_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
39fcee6c90925476268f0fc41f685da6c2087909bdeef851ffba6d2ff7122a66 | nathanmarz/storm-deploy | branch.clj | (ns backtype.storm.branch)
(defn parse-branch [branch]
(map #(Integer/parseInt %) (.split branch "\\.")))
(defn branch> [branch1 branch2]
(->> (map - (parse-branch branch1) (parse-branch branch2))
(take-while #(>= % 0))
(some pos?)))
| null | https://raw.githubusercontent.com/nathanmarz/storm-deploy/b97265bb8b79f84f5907deb826c702e9c4947305/src/clj/backtype/storm/branch.clj | clojure | (ns backtype.storm.branch)
(defn parse-branch [branch]
(map #(Integer/parseInt %) (.split branch "\\.")))
(defn branch> [branch1 branch2]
(->> (map - (parse-branch branch1) (parse-branch branch2))
(take-while #(>= % 0))
(some pos?)))
| |
9045678bb739d597fb638106a54e10e8adf5d0871dee154b667b352a71a2f495 | Frama-C/Frama-C-snapshot | Scope.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Cil_types
open Cil_datatype
(** Scope analysis. *)
(** Interface for the Scope plugin.
@see <index.html> internal documentation. *)
module Defs : sig
val get_defs :
Kernel_function.t -> stmt -> lval ->
(Stmt.Hptset.t * Locations.Zone.t option) option
* @return the set of statements that define [ lval ] before [ stmt ] in [ kf ] .
Also returns the zone that is possibly not defined .
Can return [ None ] when the information is not available ( missing ) .
Also returns the zone that is possibly not defined.
Can return [None] when the information is not available (Pdg missing). *)
val get_defs_with_type :
Kernel_function.t -> stmt -> lval ->
((bool * bool) Stmt.Map.t * Locations.Zone.t option) option
* @return a map from the statements that define [ lval ] before [ stmt ] in
[ kf ] . The first boolean indicates the possibility of a direct
modification at this statement , ie . [ lval = ... ] or [ lval = f ( ) ] .
The second boolean indicates a possible indirect modification through
a call .
Also returns the zone that is possibly not defined .
Can return [ None ] when the information is not available ( missing ) .
[kf]. The first boolean indicates the possibility of a direct
modification at this statement, ie. [lval = ...] or [lval = f()].
The second boolean indicates a possible indirect modification through
a call.
Also returns the zone that is possibly not defined.
Can return [None] when the information is not available (Pdg missing).
*)
end
module Datascope : sig
val get_data_scope_at_stmt :
Kernel_function.t -> stmt -> lval ->
Stmt.Hptset.t * (Stmt.Hptset.t * Stmt.Hptset.t)
(** @raise Kernel_function.No_Definition if [kf] has no definition.
@return 3 statement sets related to the value of [lval] before [stmt] :
- the forward selection,
- the both way selection,
- the backward selection. *)
val get_prop_scope_at_stmt :
kernel_function -> stmt -> code_annotation ->
Stmt.Hptset.t * code_annotation list
* compute the set of statements where the given annotation has the same
value as before the given stmt . Also returns the eventual code annotations
that are implied by the one given as argument .
value as before the given stmt. Also returns the eventual code annotations
that are implied by the one given as argument. *)
val check_asserts : unit -> code_annotation list
(** Print how many assertions could be removed based on the previous
analysis ([get_prop_scope_at_stmt]) and return the annotations
that can be removed. *)
val rm_asserts : unit -> unit
(** Same analysis than [check_asserts] but mark the assertions as proven. *)
end
* { 3 Zones }
module Zones : sig
type t_zones = Locations.Zone.t Stmt.Hashtbl.t
val build_zones :
kernel_function -> stmt -> lval -> Stmt.Hptset.t * t_zones
val pretty_zones : Format.formatter -> t_zones -> unit
val get_zones : t_zones -> Cil_types.stmt -> Locations.Zone.t
end
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/scope/Scope.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Scope analysis.
* Interface for the Scope plugin.
@see <index.html> internal documentation.
* @raise Kernel_function.No_Definition if [kf] has no definition.
@return 3 statement sets related to the value of [lval] before [stmt] :
- the forward selection,
- the both way selection,
- the backward selection.
* Print how many assertions could be removed based on the previous
analysis ([get_prop_scope_at_stmt]) and return the annotations
that can be removed.
* Same analysis than [check_asserts] but mark the assertions as proven. | This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Cil_types
open Cil_datatype
module Defs : sig
val get_defs :
Kernel_function.t -> stmt -> lval ->
(Stmt.Hptset.t * Locations.Zone.t option) option
* @return the set of statements that define [ lval ] before [ stmt ] in [ kf ] .
Also returns the zone that is possibly not defined .
Can return [ None ] when the information is not available ( missing ) .
Also returns the zone that is possibly not defined.
Can return [None] when the information is not available (Pdg missing). *)
val get_defs_with_type :
Kernel_function.t -> stmt -> lval ->
((bool * bool) Stmt.Map.t * Locations.Zone.t option) option
* @return a map from the statements that define [ lval ] before [ stmt ] in
[ kf ] . The first boolean indicates the possibility of a direct
modification at this statement , ie . [ lval = ... ] or [ lval = f ( ) ] .
The second boolean indicates a possible indirect modification through
a call .
Also returns the zone that is possibly not defined .
Can return [ None ] when the information is not available ( missing ) .
[kf]. The first boolean indicates the possibility of a direct
modification at this statement, ie. [lval = ...] or [lval = f()].
The second boolean indicates a possible indirect modification through
a call.
Also returns the zone that is possibly not defined.
Can return [None] when the information is not available (Pdg missing).
*)
end
module Datascope : sig
val get_data_scope_at_stmt :
Kernel_function.t -> stmt -> lval ->
Stmt.Hptset.t * (Stmt.Hptset.t * Stmt.Hptset.t)
val get_prop_scope_at_stmt :
kernel_function -> stmt -> code_annotation ->
Stmt.Hptset.t * code_annotation list
* compute the set of statements where the given annotation has the same
value as before the given stmt . Also returns the eventual code annotations
that are implied by the one given as argument .
value as before the given stmt. Also returns the eventual code annotations
that are implied by the one given as argument. *)
val check_asserts : unit -> code_annotation list
val rm_asserts : unit -> unit
end
* { 3 Zones }
module Zones : sig
type t_zones = Locations.Zone.t Stmt.Hashtbl.t
val build_zones :
kernel_function -> stmt -> lval -> Stmt.Hptset.t * t_zones
val pretty_zones : Format.formatter -> t_zones -> unit
val get_zones : t_zones -> Cil_types.stmt -> Locations.Zone.t
end
|
14f1aa7210fea466a08c8abd427c07e16fc97290a965f6b2c3ab6bb01a4cbdc3 | sangkilc/ofuzz | optmanager.ml | (* ofuzz - ocaml fuzzing platform *)
* option manager
@author < sangkil.cha\@gmail.com >
@since 2014 - 03 - 19
@author Sang Kil Cha <sangkil.cha\@gmail.com>
@since 2014-03-19
*)
Copyright ( c ) 2014 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
* Neither the name of the < organization > nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
Copyright (c) 2014, Sang Kil Cha
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*)
open BatOptParse
open Fuzztypes
(** knobs that ofuzz utilizes while fuzzing *)
type knobs =
{
(* global *)
verbosity : int;
output_dir : string;
timeout : int;
gui : bool;
full_debugging : bool; (* show even program outputs for debugging *)
(* db *)
db_host : string option;
db_port : int;
db_user : string;
db_password : string;
db_name : string;
db_expid : int;
(* test-gen *)
testgen_alg : testgen_alg;
seed_range : seed_range;
reproduce_seed : (rseed * int) option;
(* test-eval *)
gen_all_tcs : bool; (* generate all test cases (for debugging) *)
gen_crash_tcs : bool;
triage_on_the_fly : bool;
exec_timeout : int; (* execution timeout *)
(* scheduling *)
scheduling : schedule;
round_timeout : int; (* timeout for a round (in round robin) *)
}
(******************************************************************************)
(* Some Constants *)
(******************************************************************************)
let tcdir = "testcases"
let crashdir = "crashes"
let ofuzzlog = "ofuzz.log"
(******************************************************************************)
(* Useful Functions *)
(******************************************************************************)
let use_db knobs = knobs.db_host <> None
let get_tc_dir cwd = Filename.concat cwd tcdir
let get_crash_dir cwd = Filename.concat cwd crashdir
let get_logfile cwd = Filename.concat cwd ofuzzlog
(******************************************************************************)
(* Coerce Functions *)
(******************************************************************************)
let optional_value opt =
try Some (Opt.get opt)
with Opt.No_value -> None
let string_to_tc str =
match String.lowercase str with
| "zzuf" -> ZzufMutational
| "random" | "rand" | "r" -> RandomWithReplacement
| "randomwithoutreplacement" | "rwr" -> RandomWithoutReplacement
| "ball" | "b" -> BallMutational
| "surface" | "sur" | _ -> SurfaceMutational
let string_to_sch str =
match String.lowercase str with
| "round-robin" | "roundrobin" | "round" | "rr" -> RoundRobin
| "weighted-rr" | "weightedrr" | "wrr" -> WeightedRoundRobin
| "uniform-time" | "uniformtime" | "unitime" | _ -> UniformTime
let string_to_seed = Int64.of_string
let string_to_seed_range str =
let regexp = Str.regexp_string ":" in
match Str.split regexp str with
| sbegin::send::[] -> string_to_seed sbegin, string_to_seed send
| _ -> raise (Opt.Option_error ("seed-range", "invalid seed range format"))
let string_to_verbo str =
match String.lowercase str with
| "quiet" | "q" -> Logger.quiet
| "verbose" | "v" -> Logger.verbose
| "normal" | "n" | _ -> Logger.normal
let string_to_repropair str =
let comma = Str.regexp_string "," in
match Str.split comma str with
| rseed::confid::[] -> string_to_seed rseed, int_of_string confid
| _ ->
raise (Opt.Option_error ("reproduce", "invalid format for reproduce"))
(******************************************************************************)
(* Defining Options *)
(******************************************************************************)
(* global *)
let opt_verbosity =
StdOpt.any_option
~default:(Some Logger.normal) ~metavar:"<VERBOSITY>" string_to_verbo
let get_verbosity () = Opt.get opt_verbosity
let opt_output_dir =
StdOpt.str_option ~default:"ofuzz-output" ~metavar:"<DIRNAME>" ()
let get_output_dir () = Opt.get opt_output_dir
let opt_timeout = StdOpt.int_option ~default:3600 ~metavar:"<SEC>" ()
let get_timeout () = Opt.get opt_timeout
let opt_gui = StdOpt.store_true ()
let get_gui () = Opt.get opt_gui
let opt_debugflag = StdOpt.store_true ()
let get_debugflag () = Opt.get opt_debugflag
(* test-gen *)
let opt_testgen_alg =
StdOpt.any_option
~default:(Some SurfaceMutational) ~metavar:"<ALG>" string_to_tc
let get_testgen_alg () = Opt.get opt_testgen_alg
let opt_seedrange =
StdOpt.any_option
~default:(Some default_seed_range)
~metavar:"<BEGIN:END>" string_to_seed_range
let get_seedrange () = Opt.get opt_seedrange
let opt_reproduce =
StdOpt.any_option
~metavar:"<RSEED,CONF_ID>"
string_to_repropair
let get_reproduce () = optional_value opt_reproduce
(* test-eval *)
let opt_genall_tcs = StdOpt.store_true ()
let get_genall_tcs () = Opt.get opt_genall_tcs
let opt_gencrash_tcs = StdOpt.store_true ()
let get_gencrash_tcs () = Opt.get opt_gencrash_tcs
let opt_triage = StdOpt.store_true ()
let get_triage () = Opt.get opt_triage
let opt_exec_timeout = StdOpt.int_option ~default:5 ~metavar:"<SEC>" ()
let get_exec_timeout () = Opt.get opt_exec_timeout
(* scheduling *)
let opt_scheduling =
StdOpt.any_option
~default:(Some UniformTime) ~metavar:"<ALG>" string_to_sch
let get_scheduling () = Opt.get opt_scheduling
let opt_round_timeout = StdOpt.int_option ~default:5 ~metavar:"<SEC>" ()
let get_round_timeout () = Opt.get opt_round_timeout
(* database *)
let opt_dbhost = StdOpt.str_option ~default:"" ~metavar:"<HOSTNAME>" ()
let get_dbhost () =
let dbhost = Opt.get opt_dbhost in
if dbhost = "" then None else Some dbhost
let opt_dbport = StdOpt.int_option ~default:3306 ~metavar:"<PORT>" ()
let get_dbport () = Opt.get opt_dbport
let opt_dbuser = StdOpt.str_option ~default:"fuzzer" ~metavar:"<USERNAME>" ()
let get_dbuser () = Opt.get opt_dbuser
let opt_dbpassword = StdOpt.str_option ~default:"" ~metavar:"<PASSWORD>" ()
let get_dbpassword () =
let get_password_from_cmdline () =
let () = output_string stdout "password for the db: " in
let () = flush stdout in
let open Unix in
let attr = tcgetattr stdin in
let () = attr.c_echo <- false in
let () = tcsetattr stdin TCSAFLUSH attr in
let password = input_line Pervasives.stdin in
let () = attr.c_echo <- true in
let () = tcsetattr stdin TCSAFLUSH attr in
let () = print_endline "" in
password
in
let password = Opt.get opt_dbpassword in
if password = "" && get_dbhost () <> None then get_password_from_cmdline ()
else password
let opt_dbname = StdOpt.str_option ~default:"fuzzing" ~metavar:"<DBNAME>" ()
let get_dbname () = Opt.get opt_dbname
let opt_dbexpid = StdOpt.int_option ~default:0 ~metavar:"<EID>" ()
let get_dbexpid () = Opt.get opt_dbexpid
(******************************************************************************)
(******************************************************************************)
let read_conf_files p files =
if List.length files = 0 then begin
OptParser.usage p ();
Misc.error_exit "\nError: a conf file is required to start ofuzz"
end else
try begin
List.fold_left (fun acc file ->
let lst = Conf.parse file in
List.rev_append lst acc
) [] files
end with
| Conf.WrongFormat reason ->
Misc.error_exit ("\nError (WrongFormat): "^reason)
| Not_found ->
Misc.error_exit ("\nError: file not found")
| e ->
Printf.eprintf " what ? % s " ( Printexc.to_string e ) ;
Misc.error_exit "\nError: cannot read conf file(s)"
let usage = "%prog [options] <ofuzz config file(s)>"
let opt_init () =
let myformatter =
Formatter.indented_formatter ~max_help_position:50 ~width:100
~short_first:false ()
in
let p = OptParser.make ~usage:usage ~formatter:myformatter () in
let grp_testgen = OptParser.add_group p "Options related to Test-Gen" in
let grp_testeval = OptParser.add_group p "Options related to Test-Eval" in
let grp_scheduling = OptParser.add_group p "Scheduling Options" in
let grp_global = OptParser.add_group p "Global Options" in
let grp_db = OptParser.add_group p "Database Options" in
(* global options *)
let () = OptParser.add p
~long_name:"version"
(StdOpt.version_option Ofuzzversion.string)
in
let () = OptParser.add p
~group:grp_global
~help:"debugging mode"
~long_name:"debug"
opt_debugflag
in
let () = OptParser.add p
~group:grp_global
~help:"verbosity (quiet|normal|verbose) (default: normal)"
~short_name:'v' ~long_name:"verbosity"
opt_verbosity
in
let () = OptParser.add p
~group:grp_global
~help:"specify a timeout"
~short_name:'t' ~long_name:"timeout"
opt_timeout
in
let () = OptParser.add p
~group:grp_global
~help:"enable GUI fuzzing"
~long_name:"gui"
opt_gui
in
let () = OptParser.add p
~group:grp_global
~help:"specify the name of the output directory"
~short_name:'o' ~long_name:"output"
opt_output_dir
in
(* test-gen options *)
let () = OptParser.add p
~group:grp_testgen
~help:"test-gen algorithms (rand|rwr|mut|sur|zzuf)"
~long_name:"test-gen-alg"
opt_testgen_alg
in
let () = OptParser.add p
~group:grp_testgen
~help:"specify a seed range tuple"
~short_name:'s' ~long_name:"seed-range"
opt_seedrange
in
let () = OptParser.add p
~group:grp_testgen
~help:"reproduce a test case"
~long_name:"reproduce"
opt_reproduce
in
(* test-eval *)
let () = OptParser.add p
~group:grp_testeval
~help:"specify whether to generate all the test cases \
in the output directoy (only for debugging)"
~long_name:"gen-all-tcs"
opt_genall_tcs
in
let () = OptParser.add p
~group:grp_testeval
~help:"specify whether to generate crash test cases \
in the output directoy"
~long_name:"gen-crash-tcs"
opt_gencrash_tcs
in
let () = OptParser.add p
~group:grp_testeval
~help:"perform bug triaging on the fly"
~long_name:"triage"
opt_triage
in
let () = OptParser.add p
~group:grp_testeval
~help:"execution timeout per exec call (default: 5 sec)"
~long_name:"exec-timeout"
opt_exec_timeout
in
(* scheduling *)
let () = OptParser.add p
~group:grp_scheduling
~help:"specify a scheduling algorithm"
~long_name:"scheduling"
opt_scheduling
in
let () = OptParser.add p
~group:grp_scheduling
~help:"specify a round timeout (round-robin)"
~long_name:"round-timeout"
opt_round_timeout
in
(* database *)
let () = OptParser.add p
~group:grp_db
~help:"specify db host name"
~long_name:"host"
opt_dbhost
in
let () = OptParser.add p
~group:grp_db
~help:"specify db port"
~long_name:"port"
opt_dbport
in
let () = OptParser.add p
~group:grp_db
~help:"specify db username"
~long_name:"user"
opt_dbuser
in
let () = OptParser.add p
~group:grp_db
~help:"specify db password"
~long_name:"password"
opt_dbpassword
in
let () = OptParser.add p
~group:grp_db
~help:"specify db name"
~long_name:"dbname"
opt_dbname
in
let () = OptParser.add p
~group:grp_db
~help:"specify experiment id"
~long_name:"exp-id"
opt_dbexpid
in
(* parsing *)
let rest = OptParser.parse_argv p in
(* reading conf file(s) *)
let conflst = read_conf_files p rest in
(* enable backtrace *)
let () = if get_debugflag () then Printexc.record_backtrace true else () in
{
verbosity = get_verbosity ();
output_dir = get_output_dir ();
timeout = get_timeout ();
gui = get_gui ();
full_debugging = get_debugflag () && (get_verbosity () = Logger.verbose);
db_host = get_dbhost ();
db_port = get_dbport ();
db_user = get_dbuser ();
db_password = get_dbpassword ();
db_name = get_dbname ();
db_expid = get_dbexpid ();
testgen_alg = get_testgen_alg ();
seed_range = get_seedrange ();
reproduce_seed = get_reproduce ();
gen_all_tcs = get_genall_tcs ();
gen_crash_tcs = get_gencrash_tcs ();
triage_on_the_fly = get_triage ();
exec_timeout = get_exec_timeout ();
scheduling = get_scheduling ();
round_timeout = get_round_timeout ();
},
get_testgen_alg (),
get_scheduling (),
conflst
| null | https://raw.githubusercontent.com/sangkilc/ofuzz/ba53cc90cc06512eb90459a7159772d75ebe954f/src/optmanager.ml | ocaml | ofuzz - ocaml fuzzing platform
* knobs that ofuzz utilizes while fuzzing
global
show even program outputs for debugging
db
test-gen
test-eval
generate all test cases (for debugging)
execution timeout
scheduling
timeout for a round (in round robin)
****************************************************************************
Some Constants
****************************************************************************
****************************************************************************
Useful Functions
****************************************************************************
****************************************************************************
Coerce Functions
****************************************************************************
****************************************************************************
Defining Options
****************************************************************************
global
test-gen
test-eval
scheduling
database
****************************************************************************
****************************************************************************
global options
test-gen options
test-eval
scheduling
database
parsing
reading conf file(s)
enable backtrace |
* option manager
@author < sangkil.cha\@gmail.com >
@since 2014 - 03 - 19
@author Sang Kil Cha <sangkil.cha\@gmail.com>
@since 2014-03-19
*)
Copyright ( c ) 2014 ,
All rights reserved .
Redistribution and use in source and binary forms , with or without
modification , are permitted provided that the following conditions are met :
* Redistributions of source code must retain the above copyright
notice , this list of conditions and the following disclaimer .
* Redistributions in binary form must reproduce the above copyright
notice , this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution .
* Neither the name of the < organization > nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission .
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS " AS IS " AND
ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED . IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES
( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT
( INCLUDING NEGLIGENCE OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS
Copyright (c) 2014, Sang Kil Cha
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
* Neither the name of the <organization> nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL SANG KIL CHA BE LIABLE FOR ANY
DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
(INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
*)
open BatOptParse
open Fuzztypes
type knobs =
{
verbosity : int;
output_dir : string;
timeout : int;
gui : bool;
db_host : string option;
db_port : int;
db_user : string;
db_password : string;
db_name : string;
db_expid : int;
testgen_alg : testgen_alg;
seed_range : seed_range;
reproduce_seed : (rseed * int) option;
gen_crash_tcs : bool;
triage_on_the_fly : bool;
scheduling : schedule;
}
let tcdir = "testcases"
let crashdir = "crashes"
let ofuzzlog = "ofuzz.log"
let use_db knobs = knobs.db_host <> None
let get_tc_dir cwd = Filename.concat cwd tcdir
let get_crash_dir cwd = Filename.concat cwd crashdir
let get_logfile cwd = Filename.concat cwd ofuzzlog
let optional_value opt =
try Some (Opt.get opt)
with Opt.No_value -> None
let string_to_tc str =
match String.lowercase str with
| "zzuf" -> ZzufMutational
| "random" | "rand" | "r" -> RandomWithReplacement
| "randomwithoutreplacement" | "rwr" -> RandomWithoutReplacement
| "ball" | "b" -> BallMutational
| "surface" | "sur" | _ -> SurfaceMutational
let string_to_sch str =
match String.lowercase str with
| "round-robin" | "roundrobin" | "round" | "rr" -> RoundRobin
| "weighted-rr" | "weightedrr" | "wrr" -> WeightedRoundRobin
| "uniform-time" | "uniformtime" | "unitime" | _ -> UniformTime
let string_to_seed = Int64.of_string
let string_to_seed_range str =
let regexp = Str.regexp_string ":" in
match Str.split regexp str with
| sbegin::send::[] -> string_to_seed sbegin, string_to_seed send
| _ -> raise (Opt.Option_error ("seed-range", "invalid seed range format"))
let string_to_verbo str =
match String.lowercase str with
| "quiet" | "q" -> Logger.quiet
| "verbose" | "v" -> Logger.verbose
| "normal" | "n" | _ -> Logger.normal
let string_to_repropair str =
let comma = Str.regexp_string "," in
match Str.split comma str with
| rseed::confid::[] -> string_to_seed rseed, int_of_string confid
| _ ->
raise (Opt.Option_error ("reproduce", "invalid format for reproduce"))
let opt_verbosity =
StdOpt.any_option
~default:(Some Logger.normal) ~metavar:"<VERBOSITY>" string_to_verbo
let get_verbosity () = Opt.get opt_verbosity
let opt_output_dir =
StdOpt.str_option ~default:"ofuzz-output" ~metavar:"<DIRNAME>" ()
let get_output_dir () = Opt.get opt_output_dir
let opt_timeout = StdOpt.int_option ~default:3600 ~metavar:"<SEC>" ()
let get_timeout () = Opt.get opt_timeout
let opt_gui = StdOpt.store_true ()
let get_gui () = Opt.get opt_gui
let opt_debugflag = StdOpt.store_true ()
let get_debugflag () = Opt.get opt_debugflag
let opt_testgen_alg =
StdOpt.any_option
~default:(Some SurfaceMutational) ~metavar:"<ALG>" string_to_tc
let get_testgen_alg () = Opt.get opt_testgen_alg
let opt_seedrange =
StdOpt.any_option
~default:(Some default_seed_range)
~metavar:"<BEGIN:END>" string_to_seed_range
let get_seedrange () = Opt.get opt_seedrange
let opt_reproduce =
StdOpt.any_option
~metavar:"<RSEED,CONF_ID>"
string_to_repropair
let get_reproduce () = optional_value opt_reproduce
let opt_genall_tcs = StdOpt.store_true ()
let get_genall_tcs () = Opt.get opt_genall_tcs
let opt_gencrash_tcs = StdOpt.store_true ()
let get_gencrash_tcs () = Opt.get opt_gencrash_tcs
let opt_triage = StdOpt.store_true ()
let get_triage () = Opt.get opt_triage
let opt_exec_timeout = StdOpt.int_option ~default:5 ~metavar:"<SEC>" ()
let get_exec_timeout () = Opt.get opt_exec_timeout
let opt_scheduling =
StdOpt.any_option
~default:(Some UniformTime) ~metavar:"<ALG>" string_to_sch
let get_scheduling () = Opt.get opt_scheduling
let opt_round_timeout = StdOpt.int_option ~default:5 ~metavar:"<SEC>" ()
let get_round_timeout () = Opt.get opt_round_timeout
let opt_dbhost = StdOpt.str_option ~default:"" ~metavar:"<HOSTNAME>" ()
let get_dbhost () =
let dbhost = Opt.get opt_dbhost in
if dbhost = "" then None else Some dbhost
let opt_dbport = StdOpt.int_option ~default:3306 ~metavar:"<PORT>" ()
let get_dbport () = Opt.get opt_dbport
let opt_dbuser = StdOpt.str_option ~default:"fuzzer" ~metavar:"<USERNAME>" ()
let get_dbuser () = Opt.get opt_dbuser
let opt_dbpassword = StdOpt.str_option ~default:"" ~metavar:"<PASSWORD>" ()
let get_dbpassword () =
let get_password_from_cmdline () =
let () = output_string stdout "password for the db: " in
let () = flush stdout in
let open Unix in
let attr = tcgetattr stdin in
let () = attr.c_echo <- false in
let () = tcsetattr stdin TCSAFLUSH attr in
let password = input_line Pervasives.stdin in
let () = attr.c_echo <- true in
let () = tcsetattr stdin TCSAFLUSH attr in
let () = print_endline "" in
password
in
let password = Opt.get opt_dbpassword in
if password = "" && get_dbhost () <> None then get_password_from_cmdline ()
else password
let opt_dbname = StdOpt.str_option ~default:"fuzzing" ~metavar:"<DBNAME>" ()
let get_dbname () = Opt.get opt_dbname
let opt_dbexpid = StdOpt.int_option ~default:0 ~metavar:"<EID>" ()
let get_dbexpid () = Opt.get opt_dbexpid
let read_conf_files p files =
if List.length files = 0 then begin
OptParser.usage p ();
Misc.error_exit "\nError: a conf file is required to start ofuzz"
end else
try begin
List.fold_left (fun acc file ->
let lst = Conf.parse file in
List.rev_append lst acc
) [] files
end with
| Conf.WrongFormat reason ->
Misc.error_exit ("\nError (WrongFormat): "^reason)
| Not_found ->
Misc.error_exit ("\nError: file not found")
| e ->
Printf.eprintf " what ? % s " ( Printexc.to_string e ) ;
Misc.error_exit "\nError: cannot read conf file(s)"
let usage = "%prog [options] <ofuzz config file(s)>"
let opt_init () =
let myformatter =
Formatter.indented_formatter ~max_help_position:50 ~width:100
~short_first:false ()
in
let p = OptParser.make ~usage:usage ~formatter:myformatter () in
let grp_testgen = OptParser.add_group p "Options related to Test-Gen" in
let grp_testeval = OptParser.add_group p "Options related to Test-Eval" in
let grp_scheduling = OptParser.add_group p "Scheduling Options" in
let grp_global = OptParser.add_group p "Global Options" in
let grp_db = OptParser.add_group p "Database Options" in
let () = OptParser.add p
~long_name:"version"
(StdOpt.version_option Ofuzzversion.string)
in
let () = OptParser.add p
~group:grp_global
~help:"debugging mode"
~long_name:"debug"
opt_debugflag
in
let () = OptParser.add p
~group:grp_global
~help:"verbosity (quiet|normal|verbose) (default: normal)"
~short_name:'v' ~long_name:"verbosity"
opt_verbosity
in
let () = OptParser.add p
~group:grp_global
~help:"specify a timeout"
~short_name:'t' ~long_name:"timeout"
opt_timeout
in
let () = OptParser.add p
~group:grp_global
~help:"enable GUI fuzzing"
~long_name:"gui"
opt_gui
in
let () = OptParser.add p
~group:grp_global
~help:"specify the name of the output directory"
~short_name:'o' ~long_name:"output"
opt_output_dir
in
let () = OptParser.add p
~group:grp_testgen
~help:"test-gen algorithms (rand|rwr|mut|sur|zzuf)"
~long_name:"test-gen-alg"
opt_testgen_alg
in
let () = OptParser.add p
~group:grp_testgen
~help:"specify a seed range tuple"
~short_name:'s' ~long_name:"seed-range"
opt_seedrange
in
let () = OptParser.add p
~group:grp_testgen
~help:"reproduce a test case"
~long_name:"reproduce"
opt_reproduce
in
let () = OptParser.add p
~group:grp_testeval
~help:"specify whether to generate all the test cases \
in the output directoy (only for debugging)"
~long_name:"gen-all-tcs"
opt_genall_tcs
in
let () = OptParser.add p
~group:grp_testeval
~help:"specify whether to generate crash test cases \
in the output directoy"
~long_name:"gen-crash-tcs"
opt_gencrash_tcs
in
let () = OptParser.add p
~group:grp_testeval
~help:"perform bug triaging on the fly"
~long_name:"triage"
opt_triage
in
let () = OptParser.add p
~group:grp_testeval
~help:"execution timeout per exec call (default: 5 sec)"
~long_name:"exec-timeout"
opt_exec_timeout
in
let () = OptParser.add p
~group:grp_scheduling
~help:"specify a scheduling algorithm"
~long_name:"scheduling"
opt_scheduling
in
let () = OptParser.add p
~group:grp_scheduling
~help:"specify a round timeout (round-robin)"
~long_name:"round-timeout"
opt_round_timeout
in
let () = OptParser.add p
~group:grp_db
~help:"specify db host name"
~long_name:"host"
opt_dbhost
in
let () = OptParser.add p
~group:grp_db
~help:"specify db port"
~long_name:"port"
opt_dbport
in
let () = OptParser.add p
~group:grp_db
~help:"specify db username"
~long_name:"user"
opt_dbuser
in
let () = OptParser.add p
~group:grp_db
~help:"specify db password"
~long_name:"password"
opt_dbpassword
in
let () = OptParser.add p
~group:grp_db
~help:"specify db name"
~long_name:"dbname"
opt_dbname
in
let () = OptParser.add p
~group:grp_db
~help:"specify experiment id"
~long_name:"exp-id"
opt_dbexpid
in
let rest = OptParser.parse_argv p in
let conflst = read_conf_files p rest in
let () = if get_debugflag () then Printexc.record_backtrace true else () in
{
verbosity = get_verbosity ();
output_dir = get_output_dir ();
timeout = get_timeout ();
gui = get_gui ();
full_debugging = get_debugflag () && (get_verbosity () = Logger.verbose);
db_host = get_dbhost ();
db_port = get_dbport ();
db_user = get_dbuser ();
db_password = get_dbpassword ();
db_name = get_dbname ();
db_expid = get_dbexpid ();
testgen_alg = get_testgen_alg ();
seed_range = get_seedrange ();
reproduce_seed = get_reproduce ();
gen_all_tcs = get_genall_tcs ();
gen_crash_tcs = get_gencrash_tcs ();
triage_on_the_fly = get_triage ();
exec_timeout = get_exec_timeout ();
scheduling = get_scheduling ();
round_timeout = get_round_timeout ();
},
get_testgen_alg (),
get_scheduling (),
conflst
|
409f14e587151a6f463705a339643ea05499221f1ff1d217cc066b74bef1195c | jiesoul/soul-talk | date_utils.clj | (ns soul-talk.utils.date-utils)
(defn now []
())
| null | https://raw.githubusercontent.com/jiesoul/soul-talk/630de08c6549b206d59023764d5f2576d97d1030/api/src/soul_talk/utils/date_utils.clj | clojure | (ns soul-talk.utils.date-utils)
(defn now []
())
| |
6319e2525fd3a217db0de3985cfd3f678d973d4bb0b3be70b3aeba5196dac10c | huangz1990/real-world-haskell-cn | naiveeq.hs | file : ch06 / naiveeq.hs
import Data.Char (isSpace)
data Color = Red | Green | Blue
colorEq :: Color -> Color -> Bool
colorEq Red Red = True
colorEq Green Green = True
colorEq Blue Blue = True
colorEq _ _ = False
stringEq :: [Char] -> [Char] -> Bool
-- Match if both are empty
stringEq [] [] = True
-- If both start with the same char, check the rest
stringEq (x:xs) (y:ys) = x == y && stringEq xs ys
-- Everything else doesn't match
stringEq _ _ = False
instance Show Color where
-- show Red = "Red"
-- show Green = "Green"
show Blue = " Blue "
instance Show Color where
show Red = "Color 1: Red"
show Green = "Color 2: Green"
show Blue = "Color 3: Blue"
instance Read Color where
-- -- readsPrec is the main function for parsing input
-- readsPrec _ value =
-- We pass a list of pairs . Each pair has a string
-- and the desired return value . will try to match
-- the input to one of these strings .
[ ( " Red " , Red ) , ( " Green " , Green ) , ( " Blue " , Blue ) ]
where [ ] = [ ] -- If there is nothing left to try , fai
( ( attempt , result):xs ) =
-- -- Compare the start of the string to be parsed to the
-- -- text we are looking for.
-- if (take (length attempt) value) == attempt
-- -- If we have a match, return the result and the
-- -- remaining input
-- then [(result, drop (length attempt) value)]
-- -- If we don't have a match, try the next pair
-- -- in the list of attempts.
else xs
instance Read Color where
readsPrec _ value = tryParse colors
where
cleanedUpValue = dropWhile isSpace value
tryParse [] = []
tryParse ((attempt, result):xs) =
if (take (length attempt) cleanedUpValue) == attempt
then [(result, drop (length attempt) cleanedUpValue)]
else tryParse xs
colors = [("Red", Red), ("Green", Green), ("Blue", Blue)]
| null | https://raw.githubusercontent.com/huangz1990/real-world-haskell-cn/f67b07dd846b1950d17ff941d650089fcbbe9586/code/ch06/naiveeq.hs | haskell | Match if both are empty
If both start with the same char, check the rest
Everything else doesn't match
show Red = "Red"
show Green = "Green"
-- readsPrec is the main function for parsing input
readsPrec _ value =
We pass a list of pairs . Each pair has a string
and the desired return value . will try to match
the input to one of these strings .
If there is nothing left to try , fai
-- Compare the start of the string to be parsed to the
-- text we are looking for.
if (take (length attempt) value) == attempt
-- If we have a match, return the result and the
-- remaining input
then [(result, drop (length attempt) value)]
-- If we don't have a match, try the next pair
-- in the list of attempts. | file : ch06 / naiveeq.hs
import Data.Char (isSpace)
data Color = Red | Green | Blue
colorEq :: Color -> Color -> Bool
colorEq Red Red = True
colorEq Green Green = True
colorEq Blue Blue = True
colorEq _ _ = False
stringEq :: [Char] -> [Char] -> Bool
stringEq [] [] = True
stringEq (x:xs) (y:ys) = x == y && stringEq xs ys
stringEq _ _ = False
instance Show Color where
show Blue = " Blue "
instance Show Color where
show Red = "Color 1: Red"
show Green = "Color 2: Green"
show Blue = "Color 3: Blue"
instance Read Color where
[ ( " Red " , Red ) , ( " Green " , Green ) , ( " Blue " , Blue ) ]
( ( attempt , result):xs ) =
else xs
instance Read Color where
readsPrec _ value = tryParse colors
where
cleanedUpValue = dropWhile isSpace value
tryParse [] = []
tryParse ((attempt, result):xs) =
if (take (length attempt) cleanedUpValue) == attempt
then [(result, drop (length attempt) cleanedUpValue)]
else tryParse xs
colors = [("Red", Red), ("Green", Green), ("Blue", Blue)]
|
e8774278bd939c2d1f50d5ffdf70423066d2fd02c004453097b60f72e7b8c85e | dbuenzli/mu | test.ml | ---------------------------------------------------------------------------
Copyright ( c ) 2021 The mu programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2021 The mu programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open B0_std
open Mu
open Mu.Syntax
let t251 =
let d_minor = M.chord M.[ d 4 wn; f 4 wn; a 4 wn ] in
let g_major = M.chord M.[ g 4 wn; b 4 wn; d 5 wn ] in
let c_major = M.chord M.[ c 4 wn; e 4 wn; g 4 wn ] in
M.line [d_minor; g_major; c_major]
let main () = Mu_player.main (Music.map Pnote.of_pitch t251)
let () = if !Sys.interactive then () else main ()
---------------------------------------------------------------------------
Copyright ( c ) 2021 The mu programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2021 The mu programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| null | https://raw.githubusercontent.com/dbuenzli/mu/1bb96303491fe13ee8ed7b4226797e92cbec4c1f/test/test.ml | ocaml | ---------------------------------------------------------------------------
Copyright ( c ) 2021 The mu programmers . All rights reserved .
Distributed under the ISC license , see terms at the end of the file .
---------------------------------------------------------------------------
Copyright (c) 2021 The mu programmers. All rights reserved.
Distributed under the ISC license, see terms at the end of the file.
---------------------------------------------------------------------------*)
open B0_std
open Mu
open Mu.Syntax
let t251 =
let d_minor = M.chord M.[ d 4 wn; f 4 wn; a 4 wn ] in
let g_major = M.chord M.[ g 4 wn; b 4 wn; d 5 wn ] in
let c_major = M.chord M.[ c 4 wn; e 4 wn; g 4 wn ] in
M.line [d_minor; g_major; c_major]
let main () = Mu_player.main (Music.map Pnote.of_pitch t251)
let () = if !Sys.interactive then () else main ()
---------------------------------------------------------------------------
Copyright ( c ) 2021 The mu programmers
Permission to use , copy , modify , and/or distribute this software for any
purpose with or without fee is hereby granted , provided that the above
copyright notice and this permission notice appear in all copies .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
---------------------------------------------------------------------------
Copyright (c) 2021 The mu programmers
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
---------------------------------------------------------------------------*)
| |
62ded65933622cbffa67c90ec039d82b15aabe7ca255d3a50a7f513a6c46a613 | vinted/kafka-elasticsearch-tool | search_after_with_pit.clj | (ns source.elasticsearch.search-after-with-pit
(:require [clojure.tools.logging :as log]
[scroll.pit :as pit]
[scroll :as scroll]))
(defn records [es-host index-name query max-docs keep-alive]
(let [opts {:keep-alive keep-alive}
pit (pit/init es-host index-name opts)
; mutable state is needed because PIT ID might change between calls
latest-pit-id (atom (:id pit))
pit-with-keep-alive (assoc pit :keep_alive keep-alive)]
(lazy-cat
(let [hits (scroll/hits
{:es-host es-host
:index-name index-name
:query (assoc query :pit pit-with-keep-alive)
:opts {:strategy :search-after
; expects an atom
; the contents of an atom will be a string with PIT ID
:latest-pit-id latest-pit-id}})]
(if max-docs (take max-docs hits) hits))
; last element of the lazy-sequence is the output of `do` macro
and inside the ` do ` we terminate the PIT and return nil
; that last nil will not be in the sequence because `lazy-cat` terminates if nil
(do
(log/debugf "PIT terminated with: %s"
(pit/terminate es-host {:id @latest-pit-id}))
nil))))
; TODO: support other options such as keywordize?
(defn fetch [opts]
(let [max-docs (-> opts :max_docs)
es-host (or (-> opts :source :remote :host) ":9200")
index-name (or (-> opts :source :index)
(-> opts :source :remote :index)
"*")
query (or (-> opts :source :query) {:query {:match_all {}}})
keep-alive (or (-> opts :source :remote :connect_timeout) "30s")]
(records es-host index-name query max-docs keep-alive)))
(comment
(source.elasticsearch.search-after-with-pit/fetch
{:max_docs 12
:source {:remote {:host ":9200"}
:index "index_name"}}))
| null | https://raw.githubusercontent.com/vinted/kafka-elasticsearch-tool/ab92598cf47bf7df0b9c64bb0d7436aff544bb03/src/source/elasticsearch/search_after_with_pit.clj | clojure | mutable state is needed because PIT ID might change between calls
expects an atom
the contents of an atom will be a string with PIT ID
last element of the lazy-sequence is the output of `do` macro
that last nil will not be in the sequence because `lazy-cat` terminates if nil
TODO: support other options such as keywordize? | (ns source.elasticsearch.search-after-with-pit
(:require [clojure.tools.logging :as log]
[scroll.pit :as pit]
[scroll :as scroll]))
(defn records [es-host index-name query max-docs keep-alive]
(let [opts {:keep-alive keep-alive}
pit (pit/init es-host index-name opts)
latest-pit-id (atom (:id pit))
pit-with-keep-alive (assoc pit :keep_alive keep-alive)]
(lazy-cat
(let [hits (scroll/hits
{:es-host es-host
:index-name index-name
:query (assoc query :pit pit-with-keep-alive)
:opts {:strategy :search-after
:latest-pit-id latest-pit-id}})]
(if max-docs (take max-docs hits) hits))
and inside the ` do ` we terminate the PIT and return nil
(do
(log/debugf "PIT terminated with: %s"
(pit/terminate es-host {:id @latest-pit-id}))
nil))))
(defn fetch [opts]
(let [max-docs (-> opts :max_docs)
es-host (or (-> opts :source :remote :host) ":9200")
index-name (or (-> opts :source :index)
(-> opts :source :remote :index)
"*")
query (or (-> opts :source :query) {:query {:match_all {}}})
keep-alive (or (-> opts :source :remote :connect_timeout) "30s")]
(records es-host index-name query max-docs keep-alive)))
(comment
(source.elasticsearch.search-after-with-pit/fetch
{:max_docs 12
:source {:remote {:host ":9200"}
:index "index_name"}}))
|
69f638f98f8d522325bb6fa05883c19d367ee82144381823220c9678871f2593 | brendanhay/amazonka | TraceSummary.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . . Types . TraceSummary
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.XRay.Types.TraceSummary where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.XRay.Types.AvailabilityZoneDetail
import Amazonka.XRay.Types.ErrorRootCause
import Amazonka.XRay.Types.FaultRootCause
import Amazonka.XRay.Types.Http
import Amazonka.XRay.Types.InstanceIdDetail
import Amazonka.XRay.Types.ResourceARNDetail
import Amazonka.XRay.Types.ResponseTimeRootCause
import Amazonka.XRay.Types.ServiceId
import Amazonka.XRay.Types.TraceUser
import Amazonka.XRay.Types.ValueWithServiceIds
-- | Metadata generated from the segment documents in a trace.
--
-- /See:/ 'newTraceSummary' smart constructor.
data TraceSummary = TraceSummary'
{ -- | Annotations from the trace\'s segment documents.
annotations :: Prelude.Maybe (Prelude.HashMap Prelude.Text [ValueWithServiceIds]),
-- | A list of Availability Zones for any zone corresponding to the trace
-- segments.
availabilityZones :: Prelude.Maybe [AvailabilityZoneDetail],
| The length of time in seconds between the start time of the root segment
-- and the end time of the last segment that completed.
duration :: Prelude.Maybe Prelude.Double,
-- | The root of a trace.
entryPoint :: Prelude.Maybe ServiceId,
| A collection of ErrorRootCause structures corresponding to the trace
-- segments.
errorRootCauses :: Prelude.Maybe [ErrorRootCause],
| A collection of FaultRootCause structures corresponding to the trace
-- segments.
faultRootCauses :: Prelude.Maybe [FaultRootCause],
| The root segment document has a 400 series error .
hasError :: Prelude.Maybe Prelude.Bool,
| The root segment document has a 500 series error .
hasFault :: Prelude.Maybe Prelude.Bool,
| One or more of the segment documents has a 429 throttling error .
hasThrottle :: Prelude.Maybe Prelude.Bool,
-- | Information about the HTTP request served by the trace.
http :: Prelude.Maybe Http,
-- | The unique identifier for the request that generated the trace\'s
-- segments and subsegments.
id :: Prelude.Maybe Prelude.Text,
-- | A list of EC2 instance IDs for any instance corresponding to the trace
-- segments.
instanceIds :: Prelude.Maybe [InstanceIdDetail],
-- | One or more of the segment documents is in progress.
isPartial :: Prelude.Maybe Prelude.Bool,
-- | The matched time stamp of a defined event.
matchedEventTime :: Prelude.Maybe Data.POSIX,
-- | A list of resource ARNs for any resource corresponding to the trace
-- segments.
resourceARNs :: Prelude.Maybe [ResourceARNDetail],
| The length of time in seconds between the start and end times of the
-- root segment. If the service performs work asynchronously, the response
-- time measures the time before the response is sent to the user, while
-- the duration measures the amount of time before the last traced activity
-- completes.
responseTime :: Prelude.Maybe Prelude.Double,
| A collection of structures corresponding to the
-- trace segments.
responseTimeRootCauses :: Prelude.Maybe [ResponseTimeRootCause],
-- | The revision number of a trace.
revision :: Prelude.Maybe Prelude.Int,
-- | Service IDs from the trace\'s segment documents.
serviceIds :: Prelude.Maybe [ServiceId],
-- | Users from the trace\'s segment documents.
users :: Prelude.Maybe [TraceUser]
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'TraceSummary' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'annotations', 'traceSummary_annotations' - Annotations from the trace\'s segment documents.
--
-- 'availabilityZones', 'traceSummary_availabilityZones' - A list of Availability Zones for any zone corresponding to the trace
-- segments.
--
' duration ' , ' traceSummary_duration ' - The length of time in seconds between the start time of the root segment
-- and the end time of the last segment that completed.
--
-- 'entryPoint', 'traceSummary_entryPoint' - The root of a trace.
--
' errorRootCauses ' , ' traceSummary_errorRootCauses ' - A collection of ErrorRootCause structures corresponding to the trace
-- segments.
--
' faultRootCauses ' , ' traceSummary_faultRootCauses ' - A collection of FaultRootCause structures corresponding to the trace
-- segments.
--
' hasError ' , ' traceSummary_hasError ' - The root segment document has a 400 series error .
--
' ' , ' ' - The root segment document has a 500 series error .
--
' hasThrottle ' , ' traceSummary_hasThrottle ' - One or more of the segment documents has a 429 throttling error .
--
-- 'http', 'traceSummary_http' - Information about the HTTP request served by the trace.
--
-- 'id', 'traceSummary_id' - The unique identifier for the request that generated the trace\'s
-- segments and subsegments.
--
-- 'instanceIds', 'traceSummary_instanceIds' - A list of EC2 instance IDs for any instance corresponding to the trace
-- segments.
--
-- 'isPartial', 'traceSummary_isPartial' - One or more of the segment documents is in progress.
--
-- 'matchedEventTime', 'traceSummary_matchedEventTime' - The matched time stamp of a defined event.
--
-- 'resourceARNs', 'traceSummary_resourceARNs' - A list of resource ARNs for any resource corresponding to the trace
-- segments.
--
' responseTime ' , ' traceSummary_responseTime ' - The length of time in seconds between the start and end times of the
-- root segment. If the service performs work asynchronously, the response
-- time measures the time before the response is sent to the user, while
-- the duration measures the amount of time before the last traced activity
-- completes.
--
' responseTimeRootCauses ' , ' traceSummary_responseTimeRootCauses ' - A collection of structures corresponding to the
-- trace segments.
--
-- 'revision', 'traceSummary_revision' - The revision number of a trace.
--
-- 'serviceIds', 'traceSummary_serviceIds' - Service IDs from the trace\'s segment documents.
--
-- 'users', 'traceSummary_users' - Users from the trace\'s segment documents.
newTraceSummary ::
TraceSummary
newTraceSummary =
TraceSummary'
{ annotations = Prelude.Nothing,
availabilityZones = Prelude.Nothing,
duration = Prelude.Nothing,
entryPoint = Prelude.Nothing,
errorRootCauses = Prelude.Nothing,
faultRootCauses = Prelude.Nothing,
hasError = Prelude.Nothing,
hasFault = Prelude.Nothing,
hasThrottle = Prelude.Nothing,
http = Prelude.Nothing,
id = Prelude.Nothing,
instanceIds = Prelude.Nothing,
isPartial = Prelude.Nothing,
matchedEventTime = Prelude.Nothing,
resourceARNs = Prelude.Nothing,
responseTime = Prelude.Nothing,
responseTimeRootCauses = Prelude.Nothing,
revision = Prelude.Nothing,
serviceIds = Prelude.Nothing,
users = Prelude.Nothing
}
-- | Annotations from the trace\'s segment documents.
traceSummary_annotations :: Lens.Lens' TraceSummary (Prelude.Maybe (Prelude.HashMap Prelude.Text [ValueWithServiceIds]))
traceSummary_annotations = Lens.lens (\TraceSummary' {annotations} -> annotations) (\s@TraceSummary' {} a -> s {annotations = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
-- | A list of Availability Zones for any zone corresponding to the trace
-- segments.
traceSummary_availabilityZones :: Lens.Lens' TraceSummary (Prelude.Maybe [AvailabilityZoneDetail])
traceSummary_availabilityZones = Lens.lens (\TraceSummary' {availabilityZones} -> availabilityZones) (\s@TraceSummary' {} a -> s {availabilityZones = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| The length of time in seconds between the start time of the root segment
-- and the end time of the last segment that completed.
traceSummary_duration :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Double)
traceSummary_duration = Lens.lens (\TraceSummary' {duration} -> duration) (\s@TraceSummary' {} a -> s {duration = a} :: TraceSummary)
-- | The root of a trace.
traceSummary_entryPoint :: Lens.Lens' TraceSummary (Prelude.Maybe ServiceId)
traceSummary_entryPoint = Lens.lens (\TraceSummary' {entryPoint} -> entryPoint) (\s@TraceSummary' {} a -> s {entryPoint = a} :: TraceSummary)
| A collection of ErrorRootCause structures corresponding to the trace
-- segments.
traceSummary_errorRootCauses :: Lens.Lens' TraceSummary (Prelude.Maybe [ErrorRootCause])
traceSummary_errorRootCauses = Lens.lens (\TraceSummary' {errorRootCauses} -> errorRootCauses) (\s@TraceSummary' {} a -> s {errorRootCauses = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| A collection of FaultRootCause structures corresponding to the trace
-- segments.
traceSummary_faultRootCauses :: Lens.Lens' TraceSummary (Prelude.Maybe [FaultRootCause])
traceSummary_faultRootCauses = Lens.lens (\TraceSummary' {faultRootCauses} -> faultRootCauses) (\s@TraceSummary' {} a -> s {faultRootCauses = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| The root segment document has a 400 series error .
traceSummary_hasError :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_hasError = Lens.lens (\TraceSummary' {hasError} -> hasError) (\s@TraceSummary' {} a -> s {hasError = a} :: TraceSummary)
| The root segment document has a 500 series error .
traceSummary_hasFault :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_hasFault = Lens.lens (\TraceSummary' {hasFault} -> hasFault) (\s@TraceSummary' {} a -> s {hasFault = a} :: TraceSummary)
| One or more of the segment documents has a 429 throttling error .
traceSummary_hasThrottle :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_hasThrottle = Lens.lens (\TraceSummary' {hasThrottle} -> hasThrottle) (\s@TraceSummary' {} a -> s {hasThrottle = a} :: TraceSummary)
-- | Information about the HTTP request served by the trace.
traceSummary_http :: Lens.Lens' TraceSummary (Prelude.Maybe Http)
traceSummary_http = Lens.lens (\TraceSummary' {http} -> http) (\s@TraceSummary' {} a -> s {http = a} :: TraceSummary)
-- | The unique identifier for the request that generated the trace\'s
-- segments and subsegments.
traceSummary_id :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Text)
traceSummary_id = Lens.lens (\TraceSummary' {id} -> id) (\s@TraceSummary' {} a -> s {id = a} :: TraceSummary)
-- | A list of EC2 instance IDs for any instance corresponding to the trace
-- segments.
traceSummary_instanceIds :: Lens.Lens' TraceSummary (Prelude.Maybe [InstanceIdDetail])
traceSummary_instanceIds = Lens.lens (\TraceSummary' {instanceIds} -> instanceIds) (\s@TraceSummary' {} a -> s {instanceIds = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
-- | One or more of the segment documents is in progress.
traceSummary_isPartial :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_isPartial = Lens.lens (\TraceSummary' {isPartial} -> isPartial) (\s@TraceSummary' {} a -> s {isPartial = a} :: TraceSummary)
-- | The matched time stamp of a defined event.
traceSummary_matchedEventTime :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.UTCTime)
traceSummary_matchedEventTime = Lens.lens (\TraceSummary' {matchedEventTime} -> matchedEventTime) (\s@TraceSummary' {} a -> s {matchedEventTime = a} :: TraceSummary) Prelude.. Lens.mapping Data._Time
-- | A list of resource ARNs for any resource corresponding to the trace
-- segments.
traceSummary_resourceARNs :: Lens.Lens' TraceSummary (Prelude.Maybe [ResourceARNDetail])
traceSummary_resourceARNs = Lens.lens (\TraceSummary' {resourceARNs} -> resourceARNs) (\s@TraceSummary' {} a -> s {resourceARNs = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| The length of time in seconds between the start and end times of the
-- root segment. If the service performs work asynchronously, the response
-- time measures the time before the response is sent to the user, while
-- the duration measures the amount of time before the last traced activity
-- completes.
traceSummary_responseTime :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Double)
traceSummary_responseTime = Lens.lens (\TraceSummary' {responseTime} -> responseTime) (\s@TraceSummary' {} a -> s {responseTime = a} :: TraceSummary)
| A collection of structures corresponding to the
-- trace segments.
traceSummary_responseTimeRootCauses :: Lens.Lens' TraceSummary (Prelude.Maybe [ResponseTimeRootCause])
traceSummary_responseTimeRootCauses = Lens.lens (\TraceSummary' {responseTimeRootCauses} -> responseTimeRootCauses) (\s@TraceSummary' {} a -> s {responseTimeRootCauses = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
-- | The revision number of a trace.
traceSummary_revision :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Int)
traceSummary_revision = Lens.lens (\TraceSummary' {revision} -> revision) (\s@TraceSummary' {} a -> s {revision = a} :: TraceSummary)
-- | Service IDs from the trace\'s segment documents.
traceSummary_serviceIds :: Lens.Lens' TraceSummary (Prelude.Maybe [ServiceId])
traceSummary_serviceIds = Lens.lens (\TraceSummary' {serviceIds} -> serviceIds) (\s@TraceSummary' {} a -> s {serviceIds = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
-- | Users from the trace\'s segment documents.
traceSummary_users :: Lens.Lens' TraceSummary (Prelude.Maybe [TraceUser])
traceSummary_users = Lens.lens (\TraceSummary' {users} -> users) (\s@TraceSummary' {} a -> s {users = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
instance Data.FromJSON TraceSummary where
parseJSON =
Data.withObject
"TraceSummary"
( \x ->
TraceSummary'
Prelude.<$> (x Data..:? "Annotations" Data..!= Prelude.mempty)
Prelude.<*> ( x Data..:? "AvailabilityZones"
Data..!= Prelude.mempty
)
Prelude.<*> (x Data..:? "Duration")
Prelude.<*> (x Data..:? "EntryPoint")
Prelude.<*> ( x Data..:? "ErrorRootCauses"
Data..!= Prelude.mempty
)
Prelude.<*> ( x Data..:? "FaultRootCauses"
Data..!= Prelude.mempty
)
Prelude.<*> (x Data..:? "HasError")
Prelude.<*> (x Data..:? "HasFault")
Prelude.<*> (x Data..:? "HasThrottle")
Prelude.<*> (x Data..:? "Http")
Prelude.<*> (x Data..:? "Id")
Prelude.<*> (x Data..:? "InstanceIds" Data..!= Prelude.mempty)
Prelude.<*> (x Data..:? "IsPartial")
Prelude.<*> (x Data..:? "MatchedEventTime")
Prelude.<*> (x Data..:? "ResourceARNs" Data..!= Prelude.mempty)
Prelude.<*> (x Data..:? "ResponseTime")
Prelude.<*> ( x Data..:? "ResponseTimeRootCauses"
Data..!= Prelude.mempty
)
Prelude.<*> (x Data..:? "Revision")
Prelude.<*> (x Data..:? "ServiceIds" Data..!= Prelude.mempty)
Prelude.<*> (x Data..:? "Users" Data..!= Prelude.mempty)
)
instance Prelude.Hashable TraceSummary where
hashWithSalt _salt TraceSummary' {..} =
_salt `Prelude.hashWithSalt` annotations
`Prelude.hashWithSalt` availabilityZones
`Prelude.hashWithSalt` duration
`Prelude.hashWithSalt` entryPoint
`Prelude.hashWithSalt` errorRootCauses
`Prelude.hashWithSalt` faultRootCauses
`Prelude.hashWithSalt` hasError
`Prelude.hashWithSalt` hasFault
`Prelude.hashWithSalt` hasThrottle
`Prelude.hashWithSalt` http
`Prelude.hashWithSalt` id
`Prelude.hashWithSalt` instanceIds
`Prelude.hashWithSalt` isPartial
`Prelude.hashWithSalt` matchedEventTime
`Prelude.hashWithSalt` resourceARNs
`Prelude.hashWithSalt` responseTime
`Prelude.hashWithSalt` responseTimeRootCauses
`Prelude.hashWithSalt` revision
`Prelude.hashWithSalt` serviceIds
`Prelude.hashWithSalt` users
instance Prelude.NFData TraceSummary where
rnf TraceSummary' {..} =
Prelude.rnf annotations
`Prelude.seq` Prelude.rnf availabilityZones
`Prelude.seq` Prelude.rnf duration
`Prelude.seq` Prelude.rnf entryPoint
`Prelude.seq` Prelude.rnf errorRootCauses
`Prelude.seq` Prelude.rnf faultRootCauses
`Prelude.seq` Prelude.rnf hasError
`Prelude.seq` Prelude.rnf hasFault
`Prelude.seq` Prelude.rnf hasThrottle
`Prelude.seq` Prelude.rnf http
`Prelude.seq` Prelude.rnf id
`Prelude.seq` Prelude.rnf instanceIds
`Prelude.seq` Prelude.rnf isPartial
`Prelude.seq` Prelude.rnf matchedEventTime
`Prelude.seq` Prelude.rnf resourceARNs
`Prelude.seq` Prelude.rnf responseTime
`Prelude.seq` Prelude.rnf responseTimeRootCauses
`Prelude.seq` Prelude.rnf revision
`Prelude.seq` Prelude.rnf serviceIds
`Prelude.seq` Prelude.rnf users
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-xray/gen/Amazonka/XRay/Types/TraceSummary.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
| Metadata generated from the segment documents in a trace.
/See:/ 'newTraceSummary' smart constructor.
| Annotations from the trace\'s segment documents.
| A list of Availability Zones for any zone corresponding to the trace
segments.
and the end time of the last segment that completed.
| The root of a trace.
segments.
segments.
| Information about the HTTP request served by the trace.
| The unique identifier for the request that generated the trace\'s
segments and subsegments.
| A list of EC2 instance IDs for any instance corresponding to the trace
segments.
| One or more of the segment documents is in progress.
| The matched time stamp of a defined event.
| A list of resource ARNs for any resource corresponding to the trace
segments.
root segment. If the service performs work asynchronously, the response
time measures the time before the response is sent to the user, while
the duration measures the amount of time before the last traced activity
completes.
trace segments.
| The revision number of a trace.
| Service IDs from the trace\'s segment documents.
| Users from the trace\'s segment documents.
|
Create a value of 'TraceSummary' with all optional fields omitted.
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
'annotations', 'traceSummary_annotations' - Annotations from the trace\'s segment documents.
'availabilityZones', 'traceSummary_availabilityZones' - A list of Availability Zones for any zone corresponding to the trace
segments.
and the end time of the last segment that completed.
'entryPoint', 'traceSummary_entryPoint' - The root of a trace.
segments.
segments.
'http', 'traceSummary_http' - Information about the HTTP request served by the trace.
'id', 'traceSummary_id' - The unique identifier for the request that generated the trace\'s
segments and subsegments.
'instanceIds', 'traceSummary_instanceIds' - A list of EC2 instance IDs for any instance corresponding to the trace
segments.
'isPartial', 'traceSummary_isPartial' - One or more of the segment documents is in progress.
'matchedEventTime', 'traceSummary_matchedEventTime' - The matched time stamp of a defined event.
'resourceARNs', 'traceSummary_resourceARNs' - A list of resource ARNs for any resource corresponding to the trace
segments.
root segment. If the service performs work asynchronously, the response
time measures the time before the response is sent to the user, while
the duration measures the amount of time before the last traced activity
completes.
trace segments.
'revision', 'traceSummary_revision' - The revision number of a trace.
'serviceIds', 'traceSummary_serviceIds' - Service IDs from the trace\'s segment documents.
'users', 'traceSummary_users' - Users from the trace\'s segment documents.
| Annotations from the trace\'s segment documents.
| A list of Availability Zones for any zone corresponding to the trace
segments.
and the end time of the last segment that completed.
| The root of a trace.
segments.
segments.
| Information about the HTTP request served by the trace.
| The unique identifier for the request that generated the trace\'s
segments and subsegments.
| A list of EC2 instance IDs for any instance corresponding to the trace
segments.
| One or more of the segment documents is in progress.
| The matched time stamp of a defined event.
| A list of resource ARNs for any resource corresponding to the trace
segments.
root segment. If the service performs work asynchronously, the response
time measures the time before the response is sent to the user, while
the duration measures the amount of time before the last traced activity
completes.
trace segments.
| The revision number of a trace.
| Service IDs from the trace\'s segment documents.
| Users from the trace\'s segment documents. | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . . Types . TraceSummary
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.XRay.Types.TraceSummary where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import Amazonka.XRay.Types.AvailabilityZoneDetail
import Amazonka.XRay.Types.ErrorRootCause
import Amazonka.XRay.Types.FaultRootCause
import Amazonka.XRay.Types.Http
import Amazonka.XRay.Types.InstanceIdDetail
import Amazonka.XRay.Types.ResourceARNDetail
import Amazonka.XRay.Types.ResponseTimeRootCause
import Amazonka.XRay.Types.ServiceId
import Amazonka.XRay.Types.TraceUser
import Amazonka.XRay.Types.ValueWithServiceIds
data TraceSummary = TraceSummary'
annotations :: Prelude.Maybe (Prelude.HashMap Prelude.Text [ValueWithServiceIds]),
availabilityZones :: Prelude.Maybe [AvailabilityZoneDetail],
| The length of time in seconds between the start time of the root segment
duration :: Prelude.Maybe Prelude.Double,
entryPoint :: Prelude.Maybe ServiceId,
| A collection of ErrorRootCause structures corresponding to the trace
errorRootCauses :: Prelude.Maybe [ErrorRootCause],
| A collection of FaultRootCause structures corresponding to the trace
faultRootCauses :: Prelude.Maybe [FaultRootCause],
| The root segment document has a 400 series error .
hasError :: Prelude.Maybe Prelude.Bool,
| The root segment document has a 500 series error .
hasFault :: Prelude.Maybe Prelude.Bool,
| One or more of the segment documents has a 429 throttling error .
hasThrottle :: Prelude.Maybe Prelude.Bool,
http :: Prelude.Maybe Http,
id :: Prelude.Maybe Prelude.Text,
instanceIds :: Prelude.Maybe [InstanceIdDetail],
isPartial :: Prelude.Maybe Prelude.Bool,
matchedEventTime :: Prelude.Maybe Data.POSIX,
resourceARNs :: Prelude.Maybe [ResourceARNDetail],
| The length of time in seconds between the start and end times of the
responseTime :: Prelude.Maybe Prelude.Double,
| A collection of structures corresponding to the
responseTimeRootCauses :: Prelude.Maybe [ResponseTimeRootCause],
revision :: Prelude.Maybe Prelude.Int,
serviceIds :: Prelude.Maybe [ServiceId],
users :: Prelude.Maybe [TraceUser]
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
' duration ' , ' traceSummary_duration ' - The length of time in seconds between the start time of the root segment
' errorRootCauses ' , ' traceSummary_errorRootCauses ' - A collection of ErrorRootCause structures corresponding to the trace
' faultRootCauses ' , ' traceSummary_faultRootCauses ' - A collection of FaultRootCause structures corresponding to the trace
' hasError ' , ' traceSummary_hasError ' - The root segment document has a 400 series error .
' ' , ' ' - The root segment document has a 500 series error .
' hasThrottle ' , ' traceSummary_hasThrottle ' - One or more of the segment documents has a 429 throttling error .
' responseTime ' , ' traceSummary_responseTime ' - The length of time in seconds between the start and end times of the
' responseTimeRootCauses ' , ' traceSummary_responseTimeRootCauses ' - A collection of structures corresponding to the
newTraceSummary ::
TraceSummary
newTraceSummary =
TraceSummary'
{ annotations = Prelude.Nothing,
availabilityZones = Prelude.Nothing,
duration = Prelude.Nothing,
entryPoint = Prelude.Nothing,
errorRootCauses = Prelude.Nothing,
faultRootCauses = Prelude.Nothing,
hasError = Prelude.Nothing,
hasFault = Prelude.Nothing,
hasThrottle = Prelude.Nothing,
http = Prelude.Nothing,
id = Prelude.Nothing,
instanceIds = Prelude.Nothing,
isPartial = Prelude.Nothing,
matchedEventTime = Prelude.Nothing,
resourceARNs = Prelude.Nothing,
responseTime = Prelude.Nothing,
responseTimeRootCauses = Prelude.Nothing,
revision = Prelude.Nothing,
serviceIds = Prelude.Nothing,
users = Prelude.Nothing
}
traceSummary_annotations :: Lens.Lens' TraceSummary (Prelude.Maybe (Prelude.HashMap Prelude.Text [ValueWithServiceIds]))
traceSummary_annotations = Lens.lens (\TraceSummary' {annotations} -> annotations) (\s@TraceSummary' {} a -> s {annotations = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
traceSummary_availabilityZones :: Lens.Lens' TraceSummary (Prelude.Maybe [AvailabilityZoneDetail])
traceSummary_availabilityZones = Lens.lens (\TraceSummary' {availabilityZones} -> availabilityZones) (\s@TraceSummary' {} a -> s {availabilityZones = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| The length of time in seconds between the start time of the root segment
traceSummary_duration :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Double)
traceSummary_duration = Lens.lens (\TraceSummary' {duration} -> duration) (\s@TraceSummary' {} a -> s {duration = a} :: TraceSummary)
traceSummary_entryPoint :: Lens.Lens' TraceSummary (Prelude.Maybe ServiceId)
traceSummary_entryPoint = Lens.lens (\TraceSummary' {entryPoint} -> entryPoint) (\s@TraceSummary' {} a -> s {entryPoint = a} :: TraceSummary)
| A collection of ErrorRootCause structures corresponding to the trace
traceSummary_errorRootCauses :: Lens.Lens' TraceSummary (Prelude.Maybe [ErrorRootCause])
traceSummary_errorRootCauses = Lens.lens (\TraceSummary' {errorRootCauses} -> errorRootCauses) (\s@TraceSummary' {} a -> s {errorRootCauses = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| A collection of FaultRootCause structures corresponding to the trace
traceSummary_faultRootCauses :: Lens.Lens' TraceSummary (Prelude.Maybe [FaultRootCause])
traceSummary_faultRootCauses = Lens.lens (\TraceSummary' {faultRootCauses} -> faultRootCauses) (\s@TraceSummary' {} a -> s {faultRootCauses = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| The root segment document has a 400 series error .
traceSummary_hasError :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_hasError = Lens.lens (\TraceSummary' {hasError} -> hasError) (\s@TraceSummary' {} a -> s {hasError = a} :: TraceSummary)
| The root segment document has a 500 series error .
traceSummary_hasFault :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_hasFault = Lens.lens (\TraceSummary' {hasFault} -> hasFault) (\s@TraceSummary' {} a -> s {hasFault = a} :: TraceSummary)
| One or more of the segment documents has a 429 throttling error .
traceSummary_hasThrottle :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_hasThrottle = Lens.lens (\TraceSummary' {hasThrottle} -> hasThrottle) (\s@TraceSummary' {} a -> s {hasThrottle = a} :: TraceSummary)
traceSummary_http :: Lens.Lens' TraceSummary (Prelude.Maybe Http)
traceSummary_http = Lens.lens (\TraceSummary' {http} -> http) (\s@TraceSummary' {} a -> s {http = a} :: TraceSummary)
traceSummary_id :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Text)
traceSummary_id = Lens.lens (\TraceSummary' {id} -> id) (\s@TraceSummary' {} a -> s {id = a} :: TraceSummary)
traceSummary_instanceIds :: Lens.Lens' TraceSummary (Prelude.Maybe [InstanceIdDetail])
traceSummary_instanceIds = Lens.lens (\TraceSummary' {instanceIds} -> instanceIds) (\s@TraceSummary' {} a -> s {instanceIds = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
traceSummary_isPartial :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Bool)
traceSummary_isPartial = Lens.lens (\TraceSummary' {isPartial} -> isPartial) (\s@TraceSummary' {} a -> s {isPartial = a} :: TraceSummary)
traceSummary_matchedEventTime :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.UTCTime)
traceSummary_matchedEventTime = Lens.lens (\TraceSummary' {matchedEventTime} -> matchedEventTime) (\s@TraceSummary' {} a -> s {matchedEventTime = a} :: TraceSummary) Prelude.. Lens.mapping Data._Time
traceSummary_resourceARNs :: Lens.Lens' TraceSummary (Prelude.Maybe [ResourceARNDetail])
traceSummary_resourceARNs = Lens.lens (\TraceSummary' {resourceARNs} -> resourceARNs) (\s@TraceSummary' {} a -> s {resourceARNs = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
| The length of time in seconds between the start and end times of the
traceSummary_responseTime :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Double)
traceSummary_responseTime = Lens.lens (\TraceSummary' {responseTime} -> responseTime) (\s@TraceSummary' {} a -> s {responseTime = a} :: TraceSummary)
| A collection of structures corresponding to the
traceSummary_responseTimeRootCauses :: Lens.Lens' TraceSummary (Prelude.Maybe [ResponseTimeRootCause])
traceSummary_responseTimeRootCauses = Lens.lens (\TraceSummary' {responseTimeRootCauses} -> responseTimeRootCauses) (\s@TraceSummary' {} a -> s {responseTimeRootCauses = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
traceSummary_revision :: Lens.Lens' TraceSummary (Prelude.Maybe Prelude.Int)
traceSummary_revision = Lens.lens (\TraceSummary' {revision} -> revision) (\s@TraceSummary' {} a -> s {revision = a} :: TraceSummary)
traceSummary_serviceIds :: Lens.Lens' TraceSummary (Prelude.Maybe [ServiceId])
traceSummary_serviceIds = Lens.lens (\TraceSummary' {serviceIds} -> serviceIds) (\s@TraceSummary' {} a -> s {serviceIds = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
traceSummary_users :: Lens.Lens' TraceSummary (Prelude.Maybe [TraceUser])
traceSummary_users = Lens.lens (\TraceSummary' {users} -> users) (\s@TraceSummary' {} a -> s {users = a} :: TraceSummary) Prelude.. Lens.mapping Lens.coerced
instance Data.FromJSON TraceSummary where
parseJSON =
Data.withObject
"TraceSummary"
( \x ->
TraceSummary'
Prelude.<$> (x Data..:? "Annotations" Data..!= Prelude.mempty)
Prelude.<*> ( x Data..:? "AvailabilityZones"
Data..!= Prelude.mempty
)
Prelude.<*> (x Data..:? "Duration")
Prelude.<*> (x Data..:? "EntryPoint")
Prelude.<*> ( x Data..:? "ErrorRootCauses"
Data..!= Prelude.mempty
)
Prelude.<*> ( x Data..:? "FaultRootCauses"
Data..!= Prelude.mempty
)
Prelude.<*> (x Data..:? "HasError")
Prelude.<*> (x Data..:? "HasFault")
Prelude.<*> (x Data..:? "HasThrottle")
Prelude.<*> (x Data..:? "Http")
Prelude.<*> (x Data..:? "Id")
Prelude.<*> (x Data..:? "InstanceIds" Data..!= Prelude.mempty)
Prelude.<*> (x Data..:? "IsPartial")
Prelude.<*> (x Data..:? "MatchedEventTime")
Prelude.<*> (x Data..:? "ResourceARNs" Data..!= Prelude.mempty)
Prelude.<*> (x Data..:? "ResponseTime")
Prelude.<*> ( x Data..:? "ResponseTimeRootCauses"
Data..!= Prelude.mempty
)
Prelude.<*> (x Data..:? "Revision")
Prelude.<*> (x Data..:? "ServiceIds" Data..!= Prelude.mempty)
Prelude.<*> (x Data..:? "Users" Data..!= Prelude.mempty)
)
instance Prelude.Hashable TraceSummary where
hashWithSalt _salt TraceSummary' {..} =
_salt `Prelude.hashWithSalt` annotations
`Prelude.hashWithSalt` availabilityZones
`Prelude.hashWithSalt` duration
`Prelude.hashWithSalt` entryPoint
`Prelude.hashWithSalt` errorRootCauses
`Prelude.hashWithSalt` faultRootCauses
`Prelude.hashWithSalt` hasError
`Prelude.hashWithSalt` hasFault
`Prelude.hashWithSalt` hasThrottle
`Prelude.hashWithSalt` http
`Prelude.hashWithSalt` id
`Prelude.hashWithSalt` instanceIds
`Prelude.hashWithSalt` isPartial
`Prelude.hashWithSalt` matchedEventTime
`Prelude.hashWithSalt` resourceARNs
`Prelude.hashWithSalt` responseTime
`Prelude.hashWithSalt` responseTimeRootCauses
`Prelude.hashWithSalt` revision
`Prelude.hashWithSalt` serviceIds
`Prelude.hashWithSalt` users
instance Prelude.NFData TraceSummary where
rnf TraceSummary' {..} =
Prelude.rnf annotations
`Prelude.seq` Prelude.rnf availabilityZones
`Prelude.seq` Prelude.rnf duration
`Prelude.seq` Prelude.rnf entryPoint
`Prelude.seq` Prelude.rnf errorRootCauses
`Prelude.seq` Prelude.rnf faultRootCauses
`Prelude.seq` Prelude.rnf hasError
`Prelude.seq` Prelude.rnf hasFault
`Prelude.seq` Prelude.rnf hasThrottle
`Prelude.seq` Prelude.rnf http
`Prelude.seq` Prelude.rnf id
`Prelude.seq` Prelude.rnf instanceIds
`Prelude.seq` Prelude.rnf isPartial
`Prelude.seq` Prelude.rnf matchedEventTime
`Prelude.seq` Prelude.rnf resourceARNs
`Prelude.seq` Prelude.rnf responseTime
`Prelude.seq` Prelude.rnf responseTimeRootCauses
`Prelude.seq` Prelude.rnf revision
`Prelude.seq` Prelude.rnf serviceIds
`Prelude.seq` Prelude.rnf users
|
f2007d889ef2a94cc0d9019c286059db1e72eea1d54fb20930e8f305253fe251 | incjung/cl-swagger-codegen | make-apis.lisp | (ql:quickload "oauth2")
(defpackage swagger.test
(:use cl oauth2 cl-swagger))
(in-package swagger.test)
TEST 1 ) PETSTORE
(generate-client "" #p"./example/pet-api-client.lisp")
(defun rest-call (host url-path
&key params content basic-authorization
(method :get)
(accept "application/json")
(content-type "application/json"))
"call http-request with basic params and conteent and authorization"
(multiple-value-bind (stream code)
(drakma:http-request (format nil "~a~a" host url-path) :parameters params :content content :basic-authorization basic-authorization :accept accept :content-type content-type :want-stream t :method method)
(if (equal code 200)
(progn (setf (flexi-streams:flexi-stream-external-format stream) :utf-8)
(cl-json:decode-json stream))
(format t "HTTP CODE : ~A ~%" code))))
(defun post-pet (&key params content basic-authorization)
(rest-call "" "/pet" :params params :content content
:basic-authorization basic-authorization
:method :post
:accept "application/json"
:content-type "application/json"))
(post-pet
:content (cl-json:encode-json-to-string '((id . 0)
(:category . ((:id . 0) (:name . "string")))
(:name . "doggie")
("photoUrls" . #("string"))
(:tags . (((:id . 0)
(:name . "string"))))
(:status . "available"))))
= = > curl -X POST " " -H " accept : application / xml " -H " Content - Type : application / json " -d " { \"id\ " : 0 , \"category\ " : : 0 , \"name\ " : \"string\ " } , \"name\ " : \"doggie\ " , \"photoUrls\ " : [ \"string\ " ] , \"tags\ " : [ { \"id\ " : 0 , \"name\ " : \"string\ " } ] , " : \"available\ " } "
= = > ( (: |id| . -9223372036854775808 ) (: (: |id| . 0 ) (: |name| . " string " ) ) (: |name| . " doggie " ) (: |photoUrls| " string " ) (: |tags| ( (: |id| . 0 ) (: |name| . " string " ) ) ) (: . " available " ) )
;;
;; Returns a single pet
;; * path-url : /pet/{petId}
;;
(defun get-pet (path-url &key params content basic-authorization)
(rest-call "" path-url :params params :content content
:basic-authorization basic-authorization
:method :get
:accept "application/json"
:content-type "application/json"))
(get-pet "/pet/0")
TEST 2 ) Shortener URL
(generate-client #p"./example/urlshortener.json" #p"./example/urlshortener-api-client.lisp")
(defparameter *google-key* "----------YOURS--------------")
(defun get-url (&key params content basic-authorization)
(rest-call "" "/url" :params params :content content
:basic-authorization basic-authorization
:method :get
:accept "application/json"
:content-type "application/json"))
(get-url :params '(("key" . "--------YOURS------")
("shortUrl" . "")))
;; curl -X GET "" -H "accept: application/json"
TEST 3 ) Google Calendar
(generate-client #p"./example/cal-swagger.json" #p"./example/cal-api-client.lisp")
(defparameter *client-id* "-----YOURS-----")
(defparameter *client-secret* "-----YOURS-----")
(defparameter *redirect-uri* "urn:ietf:wg:oauth:2.0:oob")
(defparameter *other* '(nil))
(defparameter *other* '(("access_type" . "offline")))
(defparameter *redirect*
(request-code
""
*client-id*
:scope " "
:redirect-uri *redirect-uri*
:other *other*))
;; (defparameter *redirect*
;; (request-code
;; ""
;; *client-id*
;; :scope "-platform"
;; :redirect-uri *redirect-uri*))
(format t "~%~%Go to ~A and come back with the code: " *redirect*)
;;(defparameter *code* (read-line))
(defparameter *code* "4/AABpGWA8iVjaLIhQrS4lrBTH_ugyaXos7Tv-qyEC3YUcWzjLYrlMCk8")
(ql:quickload "oauth2")
(defparameter *token*
(request-token
""
*code*
:redirect-uri *redirect-uri*
:method :post
:other `(("client_id" . ,*client-id*)
("client_secret" . ,*client-secret*))))
(format t "I got a token:~%~A~%" *token*)
(defparameter access-token (token-string *token*))
(defparameter access-token "ya29.GlueBVO2gTnJ3EY-m0roOz0YEuuKeOYKpqvgKXKVF-cZRtkaOaB4vrCRfKB8KhNv8INLHDlgDuBtyM_D96Rpc96mlAVrZ6A8DCbFGFL6VE5-TC38AwB-Mkgdwdj7")
(defparameter refresh-token (token-refresh-token *token*))
(defparameter *refreshed-token*
(refresh-token
""
*token*
:method :post
:other `(("client_id" . ,*client-id*)
("client_secret" . ,*client-secret*)))
)
(format t " I get a refreshed token : ~A~%" *refreshed-token*)
;;
Returns entries on the user's calendar list .
;; * path-url : /users/me/calendarList
;;
(defun get-users-me-calendarlist (&key params content basic-authorization)
(rest-call "" "/users/me/calendarList" :params params :content content
:basic-authorization basic-authorization
:method :get
:accept "application/json"
:content-type "application/json"))
(get-users-me-calendarlist :params `(("access_token" . ,access-token)))
| null | https://raw.githubusercontent.com/incjung/cl-swagger-codegen/23bd1d2e895cccb5a87b5a2a2798e404798e1527/example/make-apis.lisp | lisp |
Returns a single pet
* path-url : /pet/{petId}
curl -X GET "" -H "accept: application/json"
(defparameter *redirect*
(request-code
""
*client-id*
:scope "-platform"
:redirect-uri *redirect-uri*))
(defparameter *code* (read-line))
s calendar list .
* path-url : /users/me/calendarList
| (ql:quickload "oauth2")
(defpackage swagger.test
(:use cl oauth2 cl-swagger))
(in-package swagger.test)
TEST 1 ) PETSTORE
(generate-client "" #p"./example/pet-api-client.lisp")
(defun rest-call (host url-path
&key params content basic-authorization
(method :get)
(accept "application/json")
(content-type "application/json"))
"call http-request with basic params and conteent and authorization"
(multiple-value-bind (stream code)
(drakma:http-request (format nil "~a~a" host url-path) :parameters params :content content :basic-authorization basic-authorization :accept accept :content-type content-type :want-stream t :method method)
(if (equal code 200)
(progn (setf (flexi-streams:flexi-stream-external-format stream) :utf-8)
(cl-json:decode-json stream))
(format t "HTTP CODE : ~A ~%" code))))
(defun post-pet (&key params content basic-authorization)
(rest-call "" "/pet" :params params :content content
:basic-authorization basic-authorization
:method :post
:accept "application/json"
:content-type "application/json"))
(post-pet
:content (cl-json:encode-json-to-string '((id . 0)
(:category . ((:id . 0) (:name . "string")))
(:name . "doggie")
("photoUrls" . #("string"))
(:tags . (((:id . 0)
(:name . "string"))))
(:status . "available"))))
= = > curl -X POST " " -H " accept : application / xml " -H " Content - Type : application / json " -d " { \"id\ " : 0 , \"category\ " : : 0 , \"name\ " : \"string\ " } , \"name\ " : \"doggie\ " , \"photoUrls\ " : [ \"string\ " ] , \"tags\ " : [ { \"id\ " : 0 , \"name\ " : \"string\ " } ] , " : \"available\ " } "
= = > ( (: |id| . -9223372036854775808 ) (: (: |id| . 0 ) (: |name| . " string " ) ) (: |name| . " doggie " ) (: |photoUrls| " string " ) (: |tags| ( (: |id| . 0 ) (: |name| . " string " ) ) ) (: . " available " ) )
(defun get-pet (path-url &key params content basic-authorization)
(rest-call "" path-url :params params :content content
:basic-authorization basic-authorization
:method :get
:accept "application/json"
:content-type "application/json"))
(get-pet "/pet/0")
TEST 2 ) Shortener URL
(generate-client #p"./example/urlshortener.json" #p"./example/urlshortener-api-client.lisp")
(defparameter *google-key* "----------YOURS--------------")
(defun get-url (&key params content basic-authorization)
(rest-call "" "/url" :params params :content content
:basic-authorization basic-authorization
:method :get
:accept "application/json"
:content-type "application/json"))
(get-url :params '(("key" . "--------YOURS------")
("shortUrl" . "")))
TEST 3 ) Google Calendar
(generate-client #p"./example/cal-swagger.json" #p"./example/cal-api-client.lisp")
(defparameter *client-id* "-----YOURS-----")
(defparameter *client-secret* "-----YOURS-----")
(defparameter *redirect-uri* "urn:ietf:wg:oauth:2.0:oob")
(defparameter *other* '(nil))
(defparameter *other* '(("access_type" . "offline")))
(defparameter *redirect*
(request-code
""
*client-id*
:scope " "
:redirect-uri *redirect-uri*
:other *other*))
(format t "~%~%Go to ~A and come back with the code: " *redirect*)
(defparameter *code* "4/AABpGWA8iVjaLIhQrS4lrBTH_ugyaXos7Tv-qyEC3YUcWzjLYrlMCk8")
(ql:quickload "oauth2")
(defparameter *token*
(request-token
""
*code*
:redirect-uri *redirect-uri*
:method :post
:other `(("client_id" . ,*client-id*)
("client_secret" . ,*client-secret*))))
(format t "I got a token:~%~A~%" *token*)
(defparameter access-token (token-string *token*))
(defparameter access-token "ya29.GlueBVO2gTnJ3EY-m0roOz0YEuuKeOYKpqvgKXKVF-cZRtkaOaB4vrCRfKB8KhNv8INLHDlgDuBtyM_D96Rpc96mlAVrZ6A8DCbFGFL6VE5-TC38AwB-Mkgdwdj7")
(defparameter refresh-token (token-refresh-token *token*))
(defparameter *refreshed-token*
(refresh-token
""
*token*
:method :post
:other `(("client_id" . ,*client-id*)
("client_secret" . ,*client-secret*)))
)
(format t " I get a refreshed token : ~A~%" *refreshed-token*)
(defun get-users-me-calendarlist (&key params content basic-authorization)
(rest-call "" "/users/me/calendarList" :params params :content content
:basic-authorization basic-authorization
:method :get
:accept "application/json"
:content-type "application/json"))
(get-users-me-calendarlist :params `(("access_token" . ,access-token)))
|
18d2e8271a83a65f35334eade08d263d034cef65d1b799537890ff88ef0b85ff | Quviq/epoch-eqc | names_eqc.erl | @author < >
( C ) 2019 ,
%%% @doc
%%%
%%% @end
Created : 15 Aug 2019 by < >
-module(names_eqc).
-include_lib("eqc/include/eqc.hrl").
-compile([export_all, nowarn_export_all]).
old Fortuna code :
name_hash(NameAscii) ->
Labels = binary:split(NameAscii, <<".">>, [global]),
hash_labels(lists:reverse(Labels)).
hash_labels([]) ->
empty_hash();
hash_labels([Label | Rest]) ->
LabelHash = hash(Label),
RestHash = hash_labels(Rest),
hash(<<RestHash/binary, LabelHash/binary>>).
empty_hash() ->
<<0:32/unit:8>>.
hash(Bin) ->
aec_hash:hash(aens, Bin).
prop_legacy() ->
?FORALL(Strings, list(utf8()),
begin
Name = iolist_to_binary(lists:join(".", Strings)),
OldHash = (catch {ok, name_hash(Name)}),
NewHash = (catch {ok, aens_hash:name_hash(Name)}),
?WHENFAIL(eqc:format("old hash: ~p\nnew hash: ~p\n",
[OldHash, NewHash]),
case {OldHash, NewHash} of
{{ok, H1}, {ok, H2}} -> collect(ok, true);
{{ok, _}, NewHash} -> false;
{_, {ok, _}} -> false;
_ -> collect(error, true) %% both raise exception
end)
end).
| null | https://raw.githubusercontent.com/Quviq/epoch-eqc/87fb692697f9377b95a3fa3f04efce75d8b52f87/aens_eqc/names_eqc.erl | erlang | @doc
@end
both raise exception | @author < >
( C ) 2019 ,
Created : 15 Aug 2019 by < >
-module(names_eqc).
-include_lib("eqc/include/eqc.hrl").
-compile([export_all, nowarn_export_all]).
old Fortuna code :
name_hash(NameAscii) ->
Labels = binary:split(NameAscii, <<".">>, [global]),
hash_labels(lists:reverse(Labels)).
hash_labels([]) ->
empty_hash();
hash_labels([Label | Rest]) ->
LabelHash = hash(Label),
RestHash = hash_labels(Rest),
hash(<<RestHash/binary, LabelHash/binary>>).
empty_hash() ->
<<0:32/unit:8>>.
hash(Bin) ->
aec_hash:hash(aens, Bin).
prop_legacy() ->
?FORALL(Strings, list(utf8()),
begin
Name = iolist_to_binary(lists:join(".", Strings)),
OldHash = (catch {ok, name_hash(Name)}),
NewHash = (catch {ok, aens_hash:name_hash(Name)}),
?WHENFAIL(eqc:format("old hash: ~p\nnew hash: ~p\n",
[OldHash, NewHash]),
case {OldHash, NewHash} of
{{ok, H1}, {ok, H2}} -> collect(ok, true);
{{ok, _}, NewHash} -> false;
{_, {ok, _}} -> false;
end)
end).
|
ebd51b9b2cef15200437970c4f8ff735853354ef7ce6b5f3f2b6fd94ec192ef9 | charlieg/Sparser | menus.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(USER LISP) -*-
copyright ( c ) 1990 Content Technologies , Inc. -- all rights reserved
;;;
;;; file: "run"
module : " interface;menus "
;;;
(in-package :user)
;;;-------------------------------------------------
;;; running over designated source directories
;;;-------------------------------------------------
(defun install-AssetNet-menu (&aux tmp)
(if (setq tmp (find-menu "CTI"))
(ask tmp (menu-deinstall)))
(let ((menu
(oneof *menu*
:menu-title "CTI"
:menu-items
(list
(oneof *window-menu-item*
:menu-item-title "Look for articles"
:menu-item-action 'AssetNet-loop )
(oneof *window-menu-item*
:menu-item-title "stop"
:menu-item-action 'terminate-AssetNet-loop )
))))
(ask menu (menu-install))
(ask menu (menu-enable))
menu ))
;; (install-AssetNet-menu) --executed from a menu-bar file
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/interface/AssetNet/menus.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:(USER LISP) -*-
file: "run"
-------------------------------------------------
running over designated source directories
-------------------------------------------------
(install-AssetNet-menu) --executed from a menu-bar file | copyright ( c ) 1990 Content Technologies , Inc. -- all rights reserved
module : " interface;menus "
(in-package :user)
(defun install-AssetNet-menu (&aux tmp)
(if (setq tmp (find-menu "CTI"))
(ask tmp (menu-deinstall)))
(let ((menu
(oneof *menu*
:menu-title "CTI"
:menu-items
(list
(oneof *window-menu-item*
:menu-item-title "Look for articles"
:menu-item-action 'AssetNet-loop )
(oneof *window-menu-item*
:menu-item-title "stop"
:menu-item-action 'terminate-AssetNet-loop )
))))
(ask menu (menu-install))
(ask menu (menu-enable))
menu ))
|
9a67f9236c2b4971b72986f4b13f4f0035521effc950ddcd01cffd7635559189 | slyrus/cl-bio | defpackage.lisp | Copyright ( c ) 2006 - 2008 ( )
;;; All rights reserved.
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
(in-package #:cl-user)
(defpackage #:bio-align
(:nicknames #:align)
(:use #:cl #:bio)
(:export #:global-align-na
#:global-align-aa
#:global-align-na-affine-gaps
#:global-align-aa-affine-gaps
#:local-align-na
#:local-align-aa
#:local-align-na-affine-gaps
#:local-align-aa-affine-gaps
#:alignment-score
#:alignment-results))
| null | https://raw.githubusercontent.com/slyrus/cl-bio/e6de2bc7f4accaa11466902407e43fae3184973f/align/defpackage.lisp | lisp | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
| Copyright ( c ) 2006 - 2008 ( )
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package #:cl-user)
(defpackage #:bio-align
(:nicknames #:align)
(:use #:cl #:bio)
(:export #:global-align-na
#:global-align-aa
#:global-align-na-affine-gaps
#:global-align-aa-affine-gaps
#:local-align-na
#:local-align-aa
#:local-align-na-affine-gaps
#:local-align-aa-affine-gaps
#:alignment-score
#:alignment-results))
|
c66508c9a5f6e1259f5e597e6cfd5d17c9c1407e974959ba263f2fd08f5a62da | acl2/acl2 | cgen-testing.lisp | $ ACL2s - Preamble$ ;
( include - book ; ; to fool ACL2 / cert.pl dependency scanner
" portcullis " )
( begin - book t : ttags : all);$ACL2s - Preamble$
(include-book ;; Newline to fool ACL2/cert.pl dependency scanner
"portcullis")
(begin-book t :ttags :all);$ACL2s-Preamble$|#
(in-package "ACL2S")
(include-book "acl2s/ccg/ccg" :uncertified-okp nil :dir :system :ttags
((:ccg)) :load-compiled-file nil)
(include-book "acl2s/base-theory" :dir :system :ttags :all)
(include-book "acl2s/custom" :dir :system :ttags :all)
(include-book "acl2s/cgen/top" :dir :system :ttags :all)
(must-succeed
(test? (implies (and (equal x y))
(and))))
(must-succeed
(test? (implies (and (equal x y) (equal y x) (equal x y))
(and))))
(must-succeed
(test? (implies (and (equal x y) (equal x (+ z y)))
(and))))
(must-succeed
(test? (implies (and (equal x y))
(and (equal x y)))))
(must-succeed
(test? (implies (and (== x y) (== y x) (== z y))
(== x y))))
(must-succeed
(test? (implies (and (== x y) (== y x) (== z y))
(== x z))))
(must-succeed
(test? (implies (and (== x y) (== y x) (== z y) (== a b) (== c d) (== b x) (== d y))
(== x a))))
| null | https://raw.githubusercontent.com/acl2/acl2/d7853d895191a1f695b6f2ec9b3dedebbc03053e/books/acl2s/cgen-testing.lisp | lisp |
; to fool ACL2 / cert.pl dependency scanner
$ACL2s - Preamble$
Newline to fool ACL2/cert.pl dependency scanner
$ACL2s-Preamble$|# | " portcullis " )
"portcullis")
(in-package "ACL2S")
(include-book "acl2s/ccg/ccg" :uncertified-okp nil :dir :system :ttags
((:ccg)) :load-compiled-file nil)
(include-book "acl2s/base-theory" :dir :system :ttags :all)
(include-book "acl2s/custom" :dir :system :ttags :all)
(include-book "acl2s/cgen/top" :dir :system :ttags :all)
(must-succeed
(test? (implies (and (equal x y))
(and))))
(must-succeed
(test? (implies (and (equal x y) (equal y x) (equal x y))
(and))))
(must-succeed
(test? (implies (and (equal x y) (equal x (+ z y)))
(and))))
(must-succeed
(test? (implies (and (equal x y))
(and (equal x y)))))
(must-succeed
(test? (implies (and (== x y) (== y x) (== z y))
(== x y))))
(must-succeed
(test? (implies (and (== x y) (== y x) (== z y))
(== x z))))
(must-succeed
(test? (implies (and (== x y) (== y x) (== z y) (== a b) (== c d) (== b x) (== d y))
(== x a))))
|
ec7dea45e712f8ef84c02ec68d374465cfded3163a6fd0417ba06e8877057c5a | snapframework/cufp2011 | Main.hs | {-# LANGUAGE OverloadedStrings #-}
module Main where
import Snap.Chat.API.Handlers
import Snap.Chat.ChatRoom
import Snap.Chat.Types
import Snap.Core
import Snap.Http.Server
import Snap.Util.FileServe
import Web.ClientSession
handler :: Key -> ChatRoom -> Snap ()
handler key chatRoom = route [ ("" , root )
, ("api" , apiHandlers key chatRoom)
]
where
root = serveDirectory "static"
main :: IO ()
main = do
key <- getDefaultKey
withChatRoom 200 $ quickHttpServe . handler key
| null | https://raw.githubusercontent.com/snapframework/cufp2011/6519513ef80d561efc9846847860b50a19c7b9f6/src/Snap/Chat/Main.hs | haskell | # LANGUAGE OverloadedStrings # |
module Main where
import Snap.Chat.API.Handlers
import Snap.Chat.ChatRoom
import Snap.Chat.Types
import Snap.Core
import Snap.Http.Server
import Snap.Util.FileServe
import Web.ClientSession
handler :: Key -> ChatRoom -> Snap ()
handler key chatRoom = route [ ("" , root )
, ("api" , apiHandlers key chatRoom)
]
where
root = serveDirectory "static"
main :: IO ()
main = do
key <- getDefaultKey
withChatRoom 200 $ quickHttpServe . handler key
|
0c514a1a336df9619ecf6cdf433b6d30c21146e0f5cbf68541e82333737f2929 | rescript-association/genType | syntaxerr.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1997 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Auxiliary type for reporting syntax errors *)
type error =
Unclosed of Location.t * string * Location.t * string
| Expecting of Location.t * string
| Not_expecting of Location.t * string
| Applicative_path of Location.t
| Variable_in_scope of Location.t * string
| Other of Location.t
| Ill_formed_ast of Location.t * string
| Invalid_package_type of Location.t * string
exception Error of error
exception Escape_error
let prepare_error = function
| Unclosed(opening_loc, opening, closing_loc, closing) ->
Location.errorf ~loc:closing_loc
~sub:[
Location.errorf ~loc:opening_loc
"This '%s' might be unmatched" opening
]
~if_highlight:
(Printf.sprintf "Syntax error: '%s' expected, \
the highlighted '%s' might be unmatched"
closing opening)
"Syntax error: '%s' expected" closing
| Expecting (loc, nonterm) ->
Location.errorf ~loc "Syntax error: %s expected." nonterm
| Not_expecting (loc, nonterm) ->
Location.errorf ~loc "Syntax error: %s not expected." nonterm
| Applicative_path loc ->
Location.errorf ~loc
"Syntax error: applicative paths of the form F(X).t \
are not supported when the option -no-app-func is set."
| Variable_in_scope (loc, var) ->
Location.errorf ~loc
"In this scoped type, variable '%s \
is reserved for the local type %s."
var var
| Other loc ->
Location.errorf ~loc "Syntax error"
| Ill_formed_ast (loc, s) ->
Location.errorf ~loc "broken invariant in parsetree: %s" s
| Invalid_package_type (loc, s) ->
Location.errorf ~loc "invalid package type: %s" s
let () =
Location.register_error_of_exn
(function
| Error err -> Some (prepare_error err)
| _ -> None
)
let report_error ppf err =
Location.report_error ppf (prepare_error err)
let location_of_error = function
| Unclosed(l,_,_,_)
| Applicative_path l
| Variable_in_scope(l,_)
| Other l
| Not_expecting (l, _)
| Ill_formed_ast (l, _)
| Invalid_package_type (l, _)
| Expecting (l, _) -> l
let ill_formed_ast loc s =
raise (Error (Ill_formed_ast (loc, s)))
| null | https://raw.githubusercontent.com/rescript-association/genType/c44251e969fb10d27a38d2bdeff6a5f4d778594f/src/compiler-libs-406/syntaxerr.ml | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Auxiliary type for reporting syntax errors | , projet Cristal , INRIA Rocquencourt
Copyright 1997 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
type error =
Unclosed of Location.t * string * Location.t * string
| Expecting of Location.t * string
| Not_expecting of Location.t * string
| Applicative_path of Location.t
| Variable_in_scope of Location.t * string
| Other of Location.t
| Ill_formed_ast of Location.t * string
| Invalid_package_type of Location.t * string
exception Error of error
exception Escape_error
let prepare_error = function
| Unclosed(opening_loc, opening, closing_loc, closing) ->
Location.errorf ~loc:closing_loc
~sub:[
Location.errorf ~loc:opening_loc
"This '%s' might be unmatched" opening
]
~if_highlight:
(Printf.sprintf "Syntax error: '%s' expected, \
the highlighted '%s' might be unmatched"
closing opening)
"Syntax error: '%s' expected" closing
| Expecting (loc, nonterm) ->
Location.errorf ~loc "Syntax error: %s expected." nonterm
| Not_expecting (loc, nonterm) ->
Location.errorf ~loc "Syntax error: %s not expected." nonterm
| Applicative_path loc ->
Location.errorf ~loc
"Syntax error: applicative paths of the form F(X).t \
are not supported when the option -no-app-func is set."
| Variable_in_scope (loc, var) ->
Location.errorf ~loc
"In this scoped type, variable '%s \
is reserved for the local type %s."
var var
| Other loc ->
Location.errorf ~loc "Syntax error"
| Ill_formed_ast (loc, s) ->
Location.errorf ~loc "broken invariant in parsetree: %s" s
| Invalid_package_type (loc, s) ->
Location.errorf ~loc "invalid package type: %s" s
let () =
Location.register_error_of_exn
(function
| Error err -> Some (prepare_error err)
| _ -> None
)
let report_error ppf err =
Location.report_error ppf (prepare_error err)
let location_of_error = function
| Unclosed(l,_,_,_)
| Applicative_path l
| Variable_in_scope(l,_)
| Other l
| Not_expecting (l, _)
| Ill_formed_ast (l, _)
| Invalid_package_type (l, _)
| Expecting (l, _) -> l
let ill_formed_ast loc s =
raise (Error (Ill_formed_ast (loc, s)))
|
e41edccb6ce9629987c9477cf8826d926d4c507e734c7d39a994f7424d657357 | HugoPeters1024/hs-sleuth | Shift.hs | # LANGUAGE MagicHash #
-- |
-- Module : Data.Text.Internal.Unsafe.Shift
Copyright : ( c ) 2009
--
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
Portability : GHC
--
-- /Warning/: this is an internal module, and does not have a stable
-- API or name. Functions in this module may not check or enforce
-- preconditions expected by public modules. Use at your own risk!
--
-- Fast, unchecked bit shifting functions.
module Data.Text.Internal.Unsafe.Shift
(
UnsafeShift(..)
) where
-- import qualified Data.Bits as Bits
import GHC.Base
import GHC.Word
| This is a workaround for poor optimisation in GHC 6.8.2 . It
-- fails to notice constant-width shifts, and adds a test and branch
to every shift . This imposes about a 10 % performance hit .
--
-- These functions are undefined when the amount being shifted by is
-- greater than the size in bits of a machine Int#.
class UnsafeShift a where
shiftL :: a -> Int -> a
shiftR :: a -> Int -> a
instance UnsafeShift Word16 where
# INLINE shiftL #
shiftL (W16# x#) (I# i#) = W16# (narrow16Word# (x# `uncheckedShiftL#` i#))
# INLINE shiftR #
shiftR (W16# x#) (I# i#) = W16# (x# `uncheckedShiftRL#` i#)
instance UnsafeShift Word32 where
# INLINE shiftL #
shiftL (W32# x#) (I# i#) = W32# (narrow32Word# (x# `uncheckedShiftL#` i#))
# INLINE shiftR #
shiftR (W32# x#) (I# i#) = W32# (x# `uncheckedShiftRL#` i#)
instance UnsafeShift Word64 where
# INLINE shiftL #
shiftL (W64# x#) (I# i#) = W64# (x# `uncheckedShiftL64#` i#)
# INLINE shiftR #
shiftR (W64# x#) (I# i#) = W64# (x# `uncheckedShiftRL64#` i#)
instance UnsafeShift Int where
# INLINE shiftL #
shiftL (I# x#) (I# i#) = I# (x# `iShiftL#` i#)
# INLINE shiftR #
shiftR (I# x#) (I# i#) = I# (x# `iShiftRA#` i#)
instance UnsafeShift Integer where
{ - # INLINE shiftL #
instance UnsafeShift Integer where
{-# INLINE shiftL #-}
shiftL = Bits.shiftL
# INLINE shiftR #
shiftR = Bits.shiftR
-}
| null | https://raw.githubusercontent.com/HugoPeters1024/hs-sleuth/385655e62031959a14a3bac5e9ccd1c42c045f0c/test-project/text-1.2.4.0/Data/Text/Internal/Unsafe/Shift.hs | haskell | |
Module : Data.Text.Internal.Unsafe.Shift
License : BSD-style
Maintainer :
Stability : experimental
/Warning/: this is an internal module, and does not have a stable
API or name. Functions in this module may not check or enforce
preconditions expected by public modules. Use at your own risk!
Fast, unchecked bit shifting functions.
import qualified Data.Bits as Bits
fails to notice constant-width shifts, and adds a test and branch
These functions are undefined when the amount being shifted by is
greater than the size in bits of a machine Int#.
# INLINE shiftL # | # LANGUAGE MagicHash #
Copyright : ( c ) 2009
Portability : GHC
module Data.Text.Internal.Unsafe.Shift
(
UnsafeShift(..)
) where
import GHC.Base
import GHC.Word
| This is a workaround for poor optimisation in GHC 6.8.2 . It
to every shift . This imposes about a 10 % performance hit .
class UnsafeShift a where
shiftL :: a -> Int -> a
shiftR :: a -> Int -> a
instance UnsafeShift Word16 where
# INLINE shiftL #
shiftL (W16# x#) (I# i#) = W16# (narrow16Word# (x# `uncheckedShiftL#` i#))
# INLINE shiftR #
shiftR (W16# x#) (I# i#) = W16# (x# `uncheckedShiftRL#` i#)
instance UnsafeShift Word32 where
# INLINE shiftL #
shiftL (W32# x#) (I# i#) = W32# (narrow32Word# (x# `uncheckedShiftL#` i#))
# INLINE shiftR #
shiftR (W32# x#) (I# i#) = W32# (x# `uncheckedShiftRL#` i#)
instance UnsafeShift Word64 where
# INLINE shiftL #
shiftL (W64# x#) (I# i#) = W64# (x# `uncheckedShiftL64#` i#)
# INLINE shiftR #
shiftR (W64# x#) (I# i#) = W64# (x# `uncheckedShiftRL64#` i#)
instance UnsafeShift Int where
# INLINE shiftL #
shiftL (I# x#) (I# i#) = I# (x# `iShiftL#` i#)
# INLINE shiftR #
shiftR (I# x#) (I# i#) = I# (x# `iShiftRA#` i#)
instance UnsafeShift Integer where
{ - # INLINE shiftL #
instance UnsafeShift Integer where
shiftL = Bits.shiftL
# INLINE shiftR #
shiftR = Bits.shiftR
-}
|
4224fa41165b4cc38d432d0a438bfcfcee213dc40b147621216a4dc0a83009f6 | slegrand45/examples_ocsigen | view.ml | open Types
open Js_of_ocaml_tyxml.Tyxml_js
let part_header (r, f) =
Html5.(
header ~a:[a_class ["mdl-layout__header"]] [
div ~a:[a_class ["mdl-layout__header-row"]] [
div ~a:[a_class ["mdl-layout-spacer"]] [] ;
span ~a:[a_class ["mdl-layout-title"]] [
a ~a:[
a_href "#" ;
a_onclick (fun _ -> (Controller.update (Action.Update_lang I18n.En) (r, f)); true)
] [ txt "En" ] ;
txt " | " ;
a ~a:[
a_href "#" ;
a_onclick (fun _ -> (Controller.update (Action.Update_lang I18n.Fr) (r, f)); true)
] [ txt "Fr" ] ;
] ;
]
])
let part_contact (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Contact_title Model.msg)) r in
let rl_email = React.S.map (fun (_, cv) -> let id = CV.id cv in (ID.email id)) r in
let part_without_href icon (cv, v) =
Html5.(
let v = I18n.translate (CV.lang cv) v in
if v <> "" then
p [ i ~a:[a_class ["material-icons"; "red-text"]] [ txt icon] ;
txt v ; br () ]
else
p [ txt "" ]
)
in
let part_with_href icon prefix v =
Html5.(
if v <> "" then
p [ i ~a:[a_class ["material-icons"; "red-text"]] [ txt icon] ;
a ~a:[a_href (prefix ^ v)] [ txt v ] ; br () ]
else
p [ txt "" ]
)
in
let rl_address = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [(cv, ID.address id)]) r) in
let rl_address = ReactiveData.RList.map (part_without_href "place") rl_address in
let rl_phone = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [(cv, ID.phone id)]) r) in
let rl_phone = ReactiveData.RList.map (part_without_href "smartphone") rl_phone in
let rl_github = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [ID.github id]) r) in
let rl_github = ReactiveData.RList.map (part_with_href "link" "https://") rl_github in
let rl_web = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [ID.web id]) r) in
let rl_web = ReactiveData.RList.map (part_with_href "link" "https://") rl_web in
Html5.(
div ~a:[a_class ["block"]] [
h5 [
i ~a:[a_class ["material-icons"]] [ txt "message"] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl_address ;
R.Html5.div rl_phone ;
p [
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "email"] ;
a ~a:[R.Html5.a_href (React.S.map (fun email -> "mailto:" ^ email) rl_email)] [
R.Html5.txt rl_email
] ;
br () ;
] ;
R.Html5.div rl_github ;
R.Html5.div rl_web ;
] ;
)
let one_skill (cv, s) =
let width = Printf.sprintf "width: %d%%;" s.Skill.percent in
Html5.(
div [
h6 [ txt (I18n.translate (CV.lang cv) s.Skill.title) ] ;
div ~a:[a_class ["progress"]] [
div ~a:[a_class ["progress-bar"]; a_style width] []
] ;
br () ;
]
)
let part_skill (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Skill_title Model.msg)) r in
let rl = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> List.map (fun e -> cv, e) (CV.skill cv)) r) in
let rl = ReactiveData.RList.map one_skill rl in
Html5.(
div ~a:[a_class ["block"]] [
h5 [
i ~a:[a_class ["material-icons"]] [ txt "stars"] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl
]
)
let one_language (cv, l) =
Html5.(
div [
h6 [ txt (I18n.translate (CV.lang cv) l.Language.title) ] ;
p [ txt (I18n.translate (CV.lang cv) l.Language.description) ] ;
br () ;
]
)
let part_language (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Language_title Model.msg)) r in
let rl = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> List.map (fun e -> cv, e) (CV.language cv)) r) in
let rl = ReactiveData.RList.map one_language rl in
Html5.(
div ~a:[a_class ["block"]] [
h5 [
i ~a:[a_class ["material-icons"]] [ txt "language"] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl
]
)
let one_work (cv, w) =
let f e =
match e with
| None -> ""
| Some v -> I18n.translate (CV.lang cv) v
in
let company = f w.Experience.company in
let location = f w.Experience.location in
let s_company_location =
match company, location with
| "", "" -> ""
| v, "" | "", v -> v
| v1, v2 -> v1 ^ " - " ^ v2
in
let html_company_location =
match s_company_location with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "place"] ;
txt s_company_location ;
])
in
let s_date = Date.translate_start_end (CV.lang cv)
w.Experience.date_start w.Experience.date_end
in
let html_date =
match s_date with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "date_range"] ;
txt s_date ;
])
in
Html5.(
div ~a:[a_class ["block"]] [
h5 [ txt (I18n.translate (CV.lang cv) w.Experience.title) ] ;
p (html_company_location @ html_date) ;
p [
txt (I18n.translate (CV.lang cv) w.Experience.description)
]
]
)
let part_work (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Work_title Model.msg)) r in
let rl = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> List.map (fun e -> cv, e) (CV.experience cv)) r) in
let rl = ReactiveData.RList.map one_work rl in
Html5.([
h3 [
i ~a:[a_class ["material-icons"]] [ txt "group" ] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl
])
let one_education (cv, e) =
let f e =
match e with
| None -> ""
| Some v -> I18n.translate (CV.lang cv) v
in
let s_school = f e.Education.school in
let html_school =
match s_school with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "place"] ;
txt s_school ;
])
in
let s_date = Date.translate_start_end (CV.lang cv)
e.Education.date_start e.Education.date_end
in
let html_date =
match s_date with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "date_range"] ;
txt s_date ;
])
in
Html5.(
div ~a:[a_class ["block"]] [
h5 [ txt (I18n.translate (CV.lang cv) e.Education.title) ] ;
p (html_school @ html_date) ;
p [
txt (I18n.translate (CV.lang cv) e.Education.description)
]
]
)
let part_education (r, _) =
let rl_title = React.S.map (fun (_, cv) ->
I18n.translate (CV.lang cv) (I18n.get_msg I18n.Education_title Model.msg)) r
in
let rl f =
let l = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) ->
List.map (fun e -> cv, e) (f (CV.education cv))) r)
in
ReactiveData.RList.map one_education l
in
Html5.([
h3 [
i ~a:[a_class ["material-icons"]] [ txt "school" ] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div (rl Education.diploma) ;
])
let one_portfolio_details cv n info (r, f) =
Html5.(
div ~a:[a_class ["mdl-cell"; "mdl-cell--4-col"; "block"]] [
div ~a:[a_class ["mdl-card"; "mdl-shadow--2dp"]] [
div ~a:[a_class ["mdl-card__actions"; "mdl-card--border"]] [
div ~a:[a_class ["mdl-layout-spacer"]] [] ;
i ~a:[a_class ["material-icons"] ;
a_onclick (fun _ -> (Controller.update (Action.Portfolio_summary n) (r, f)); true)]
[ txt "close" ]
] ;
div ~a:[a_class ["mdl-card__supporting-text"]] [
txt (I18n.translate (CV.lang cv) (info.Portfolio.description))
] ;
]
]
)
let one_portfolio_summary cv n info (r, f) =
Html5.(
div ~a:[a_class ["mdl-cell"; "mdl-cell--4-col"; "block"]] [
div ~a:[a_class ["mdl-card"; "mdl-shadow--2dp"]] [
div ~a:[a_class ["mdl-card__media"]] [
img ~a:[a_class ["portfolio-img"]]
~src:(info.Portfolio.image)
~alt:"Portfolio image" ()
] ;
div ~a:[a_class ["mdl-card__actions"; "mdl-card--border"]] [
txt (I18n.translate (CV.lang cv) (info.Portfolio.title)) ;
div ~a:[a_class ["mdl-layout-spacer"]] [] ;
i ~a:[a_class ["material-icons"] ;
a_onclick (fun _ -> (Controller.update (Action.Portfolio_details n) (r, f)); true)]
[ txt "more_vert" ]
] ;
]
]
)
let part_portfolio (r, f) =
let rl_title = React.S.map (fun (_, cv) ->
I18n.translate (CV.lang cv) (I18n.get_msg I18n.Portfolio_title Model.msg)) r
in
let rlp = ReactiveData.RList.from_signal (
React.S.map (fun (page, cv) ->
let portfolio = CV.portfolio cv in
let l = Page.portfolio page in
let f' (i, p) =
let info = List.nth portfolio i in
match p with
| Page.Summary -> one_portfolio_summary cv i info (r, f)
| Page.Details -> one_portfolio_details cv i info (r, f)
in
List.map f' l
) r
) in
Html5.([
h3 [
i ~a:[a_class ["material-icons"]] [ txt "apps" ] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div ~a:[a_class ["mdl-grid"; "portfolio"]] rlp
])
let part_footer () =
Html5.(
footer ~a:[a_class ["mdl-mini-footer"]] [
div ~a:[a_class ["mdl-mini-footer__left-section"]] [
div ~a:[a_class ["mdl-logo"]] [
txt "Credits: " ;
a ~a:[a_href "-who-is-searching-for-a-job/"]
[ txt "Novorésumé - Darth Vader Résumé" ] ;
txt ", " ;
a ~a:[a_href "-responsive-resume/"]
[ txt "Materialize responsive résumé template" ] ;
txt ", " ;
a ~a:[a_href "-1.jpg.html"]
[ txt "Darth Vader photo" ] ;
txt ", " ;
a ~a:[a_href ""]
[ txt "Portfolio images courtesy of Apolonia, stockimages and zole4 at FreeDigitalPhotos.net" ] ;
]
] ;
div ~a:[a_class ["mdl-mini-footer__right-section"]] [
div ~a:[a_class ["mdl-logo"]] []
] ;
]
)
let view (r, f) =
let rl_name = React.S.map (fun (_, cv) ->
let id = CV.id cv in
(I18n.translate (CV.lang cv) (ID.firstname id))
^ " " ^ (I18n.translate (CV.lang cv) (ID.lastname id))) r
in
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (CV.title cv)) r in
let rl_description = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (CV.description cv)) r in
Html5.(
div ~a:[a_class ["mdl-layout"; "mdl-js-layout"; "mdl-layout--fixed-header"]] [
part_header (r, f) ;
div ~a:[a_class ["container"]] [
div ~a:[a_class ["content"]] [
div (* main *) ~a:[a_class ["mdl-layout__content"]] [
div ~a:[a_class ["page-content"]] [
div ~a:[a_class ["mdl-grid"]] [
div ~a:[a_class ["mdl-cell"; "mdl-cell--3-col"; "left-side"]] [
img ~a:[a_class ["circle"; "user-img"; "responsive-img"]]
~src:"img/darth-vader.jpg"
~alt:"Selfie" () ;
div ~a:[a_class ["block"]] [
h5 ~a:[a_class ["center"; "red-text"]] [ R.Html5.txt rl_name ] ;
p ~a:[a_class ["light"]] [ R.Html5.txt rl_title ] ;
] ;
hr ();
div ~a:[a_class ["block"; "center"]] [
h5 [ R.Html5.txt rl_description ] ;
] ;
part_contact (r, f) ;
part_skill (r, f) ;
part_language (r, f) ;
] ;
div ~a:[a_class ["mdl-cell"; "mdl-cell--9-col"; "right-side"]] (
(part_work (r, f)) @ (part_education (r, f)) @ (part_portfolio (r, f))
)
]
]
]
]
] ;
part_footer () ;
])
| null | https://raw.githubusercontent.com/slegrand45/examples_ocsigen/e2f5efe57caf7a644795ac6b14f6d6e04168e4be/jsoo/curriculum-vitae/view.ml | ocaml | main | open Types
open Js_of_ocaml_tyxml.Tyxml_js
let part_header (r, f) =
Html5.(
header ~a:[a_class ["mdl-layout__header"]] [
div ~a:[a_class ["mdl-layout__header-row"]] [
div ~a:[a_class ["mdl-layout-spacer"]] [] ;
span ~a:[a_class ["mdl-layout-title"]] [
a ~a:[
a_href "#" ;
a_onclick (fun _ -> (Controller.update (Action.Update_lang I18n.En) (r, f)); true)
] [ txt "En" ] ;
txt " | " ;
a ~a:[
a_href "#" ;
a_onclick (fun _ -> (Controller.update (Action.Update_lang I18n.Fr) (r, f)); true)
] [ txt "Fr" ] ;
] ;
]
])
let part_contact (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Contact_title Model.msg)) r in
let rl_email = React.S.map (fun (_, cv) -> let id = CV.id cv in (ID.email id)) r in
let part_without_href icon (cv, v) =
Html5.(
let v = I18n.translate (CV.lang cv) v in
if v <> "" then
p [ i ~a:[a_class ["material-icons"; "red-text"]] [ txt icon] ;
txt v ; br () ]
else
p [ txt "" ]
)
in
let part_with_href icon prefix v =
Html5.(
if v <> "" then
p [ i ~a:[a_class ["material-icons"; "red-text"]] [ txt icon] ;
a ~a:[a_href (prefix ^ v)] [ txt v ] ; br () ]
else
p [ txt "" ]
)
in
let rl_address = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [(cv, ID.address id)]) r) in
let rl_address = ReactiveData.RList.map (part_without_href "place") rl_address in
let rl_phone = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [(cv, ID.phone id)]) r) in
let rl_phone = ReactiveData.RList.map (part_without_href "smartphone") rl_phone in
let rl_github = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [ID.github id]) r) in
let rl_github = ReactiveData.RList.map (part_with_href "link" "https://") rl_github in
let rl_web = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> let id = CV.id cv in [ID.web id]) r) in
let rl_web = ReactiveData.RList.map (part_with_href "link" "https://") rl_web in
Html5.(
div ~a:[a_class ["block"]] [
h5 [
i ~a:[a_class ["material-icons"]] [ txt "message"] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl_address ;
R.Html5.div rl_phone ;
p [
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "email"] ;
a ~a:[R.Html5.a_href (React.S.map (fun email -> "mailto:" ^ email) rl_email)] [
R.Html5.txt rl_email
] ;
br () ;
] ;
R.Html5.div rl_github ;
R.Html5.div rl_web ;
] ;
)
let one_skill (cv, s) =
let width = Printf.sprintf "width: %d%%;" s.Skill.percent in
Html5.(
div [
h6 [ txt (I18n.translate (CV.lang cv) s.Skill.title) ] ;
div ~a:[a_class ["progress"]] [
div ~a:[a_class ["progress-bar"]; a_style width] []
] ;
br () ;
]
)
let part_skill (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Skill_title Model.msg)) r in
let rl = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> List.map (fun e -> cv, e) (CV.skill cv)) r) in
let rl = ReactiveData.RList.map one_skill rl in
Html5.(
div ~a:[a_class ["block"]] [
h5 [
i ~a:[a_class ["material-icons"]] [ txt "stars"] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl
]
)
let one_language (cv, l) =
Html5.(
div [
h6 [ txt (I18n.translate (CV.lang cv) l.Language.title) ] ;
p [ txt (I18n.translate (CV.lang cv) l.Language.description) ] ;
br () ;
]
)
let part_language (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Language_title Model.msg)) r in
let rl = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> List.map (fun e -> cv, e) (CV.language cv)) r) in
let rl = ReactiveData.RList.map one_language rl in
Html5.(
div ~a:[a_class ["block"]] [
h5 [
i ~a:[a_class ["material-icons"]] [ txt "language"] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl
]
)
let one_work (cv, w) =
let f e =
match e with
| None -> ""
| Some v -> I18n.translate (CV.lang cv) v
in
let company = f w.Experience.company in
let location = f w.Experience.location in
let s_company_location =
match company, location with
| "", "" -> ""
| v, "" | "", v -> v
| v1, v2 -> v1 ^ " - " ^ v2
in
let html_company_location =
match s_company_location with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "place"] ;
txt s_company_location ;
])
in
let s_date = Date.translate_start_end (CV.lang cv)
w.Experience.date_start w.Experience.date_end
in
let html_date =
match s_date with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "date_range"] ;
txt s_date ;
])
in
Html5.(
div ~a:[a_class ["block"]] [
h5 [ txt (I18n.translate (CV.lang cv) w.Experience.title) ] ;
p (html_company_location @ html_date) ;
p [
txt (I18n.translate (CV.lang cv) w.Experience.description)
]
]
)
let part_work (r, _) =
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (I18n.get_msg I18n.Work_title Model.msg)) r in
let rl = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) -> List.map (fun e -> cv, e) (CV.experience cv)) r) in
let rl = ReactiveData.RList.map one_work rl in
Html5.([
h3 [
i ~a:[a_class ["material-icons"]] [ txt "group" ] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div rl
])
let one_education (cv, e) =
let f e =
match e with
| None -> ""
| Some v -> I18n.translate (CV.lang cv) v
in
let s_school = f e.Education.school in
let html_school =
match s_school with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "place"] ;
txt s_school ;
])
in
let s_date = Date.translate_start_end (CV.lang cv)
e.Education.date_start e.Education.date_end
in
let html_date =
match s_date with
| "" -> []
| _ -> Html5.([
i ~a:[a_class ["material-icons"; "red-text"]] [ txt "date_range"] ;
txt s_date ;
])
in
Html5.(
div ~a:[a_class ["block"]] [
h5 [ txt (I18n.translate (CV.lang cv) e.Education.title) ] ;
p (html_school @ html_date) ;
p [
txt (I18n.translate (CV.lang cv) e.Education.description)
]
]
)
let part_education (r, _) =
let rl_title = React.S.map (fun (_, cv) ->
I18n.translate (CV.lang cv) (I18n.get_msg I18n.Education_title Model.msg)) r
in
let rl f =
let l = ReactiveData.RList.from_signal (React.S.map (fun (_, cv) ->
List.map (fun e -> cv, e) (f (CV.education cv))) r)
in
ReactiveData.RList.map one_education l
in
Html5.([
h3 [
i ~a:[a_class ["material-icons"]] [ txt "school" ] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div (rl Education.diploma) ;
])
let one_portfolio_details cv n info (r, f) =
Html5.(
div ~a:[a_class ["mdl-cell"; "mdl-cell--4-col"; "block"]] [
div ~a:[a_class ["mdl-card"; "mdl-shadow--2dp"]] [
div ~a:[a_class ["mdl-card__actions"; "mdl-card--border"]] [
div ~a:[a_class ["mdl-layout-spacer"]] [] ;
i ~a:[a_class ["material-icons"] ;
a_onclick (fun _ -> (Controller.update (Action.Portfolio_summary n) (r, f)); true)]
[ txt "close" ]
] ;
div ~a:[a_class ["mdl-card__supporting-text"]] [
txt (I18n.translate (CV.lang cv) (info.Portfolio.description))
] ;
]
]
)
let one_portfolio_summary cv n info (r, f) =
Html5.(
div ~a:[a_class ["mdl-cell"; "mdl-cell--4-col"; "block"]] [
div ~a:[a_class ["mdl-card"; "mdl-shadow--2dp"]] [
div ~a:[a_class ["mdl-card__media"]] [
img ~a:[a_class ["portfolio-img"]]
~src:(info.Portfolio.image)
~alt:"Portfolio image" ()
] ;
div ~a:[a_class ["mdl-card__actions"; "mdl-card--border"]] [
txt (I18n.translate (CV.lang cv) (info.Portfolio.title)) ;
div ~a:[a_class ["mdl-layout-spacer"]] [] ;
i ~a:[a_class ["material-icons"] ;
a_onclick (fun _ -> (Controller.update (Action.Portfolio_details n) (r, f)); true)]
[ txt "more_vert" ]
] ;
]
]
)
let part_portfolio (r, f) =
let rl_title = React.S.map (fun (_, cv) ->
I18n.translate (CV.lang cv) (I18n.get_msg I18n.Portfolio_title Model.msg)) r
in
let rlp = ReactiveData.RList.from_signal (
React.S.map (fun (page, cv) ->
let portfolio = CV.portfolio cv in
let l = Page.portfolio page in
let f' (i, p) =
let info = List.nth portfolio i in
match p with
| Page.Summary -> one_portfolio_summary cv i info (r, f)
| Page.Details -> one_portfolio_details cv i info (r, f)
in
List.map f' l
) r
) in
Html5.([
h3 [
i ~a:[a_class ["material-icons"]] [ txt "apps" ] ;
R.Html5.txt rl_title ;
] ;
R.Html5.div ~a:[a_class ["mdl-grid"; "portfolio"]] rlp
])
let part_footer () =
Html5.(
footer ~a:[a_class ["mdl-mini-footer"]] [
div ~a:[a_class ["mdl-mini-footer__left-section"]] [
div ~a:[a_class ["mdl-logo"]] [
txt "Credits: " ;
a ~a:[a_href "-who-is-searching-for-a-job/"]
[ txt "Novorésumé - Darth Vader Résumé" ] ;
txt ", " ;
a ~a:[a_href "-responsive-resume/"]
[ txt "Materialize responsive résumé template" ] ;
txt ", " ;
a ~a:[a_href "-1.jpg.html"]
[ txt "Darth Vader photo" ] ;
txt ", " ;
a ~a:[a_href ""]
[ txt "Portfolio images courtesy of Apolonia, stockimages and zole4 at FreeDigitalPhotos.net" ] ;
]
] ;
div ~a:[a_class ["mdl-mini-footer__right-section"]] [
div ~a:[a_class ["mdl-logo"]] []
] ;
]
)
let view (r, f) =
let rl_name = React.S.map (fun (_, cv) ->
let id = CV.id cv in
(I18n.translate (CV.lang cv) (ID.firstname id))
^ " " ^ (I18n.translate (CV.lang cv) (ID.lastname id))) r
in
let rl_title = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (CV.title cv)) r in
let rl_description = React.S.map (fun (_, cv) -> I18n.translate (CV.lang cv) (CV.description cv)) r in
Html5.(
div ~a:[a_class ["mdl-layout"; "mdl-js-layout"; "mdl-layout--fixed-header"]] [
part_header (r, f) ;
div ~a:[a_class ["container"]] [
div ~a:[a_class ["content"]] [
div ~a:[a_class ["page-content"]] [
div ~a:[a_class ["mdl-grid"]] [
div ~a:[a_class ["mdl-cell"; "mdl-cell--3-col"; "left-side"]] [
img ~a:[a_class ["circle"; "user-img"; "responsive-img"]]
~src:"img/darth-vader.jpg"
~alt:"Selfie" () ;
div ~a:[a_class ["block"]] [
h5 ~a:[a_class ["center"; "red-text"]] [ R.Html5.txt rl_name ] ;
p ~a:[a_class ["light"]] [ R.Html5.txt rl_title ] ;
] ;
hr ();
div ~a:[a_class ["block"; "center"]] [
h5 [ R.Html5.txt rl_description ] ;
] ;
part_contact (r, f) ;
part_skill (r, f) ;
part_language (r, f) ;
] ;
div ~a:[a_class ["mdl-cell"; "mdl-cell--9-col"; "right-side"]] (
(part_work (r, f)) @ (part_education (r, f)) @ (part_portfolio (r, f))
)
]
]
]
]
] ;
part_footer () ;
])
|
92dc52bcc5f3defce8424602106e5e24d0c4cb041b77e73a56c57fe4e187dd3b | racket/math | test-utils.rkt | #lang typed/racket
(require (except-in typed/rackunit check-equal?))
(provide (all-from-out typed/rackunit)
check-equal?)
;; This gets around the fact that typed/rackunit can no longer test higher-order values for equality,
since TR has firmed up its rules on passing ` Any ' types in and out of untyped code
(define-syntax-rule (check-equal? a b . message)
(check-true (equal? a b) . message))
| null | https://raw.githubusercontent.com/racket/math/dcd2ea1893dc5b45b26c8312997917a15fcd1c4a/math-test/math/tests/test-utils.rkt | racket | This gets around the fact that typed/rackunit can no longer test higher-order values for equality, | #lang typed/racket
(require (except-in typed/rackunit check-equal?))
(provide (all-from-out typed/rackunit)
check-equal?)
since TR has firmed up its rules on passing ` Any ' types in and out of untyped code
(define-syntax-rule (check-equal? a b . message)
(check-true (equal? a b) . message))
|
91582269a4bfa8678bc0e073622df362120c459ec8110c8a29d60fb0533c4152 | gfngfn/toy-macro-ml | main.ml |
open Syntax
let main fname =
let inc = open_in fname in
let lexbuf = Lexing.from_channel inc in
let utast = ParserInterface.process lexbuf in
let (tyenv, env) = Primitives.initial_type_environment in
let (ty, eve) = Typechecker.main tyenv utast in
Format.printf "Type: %a\n" RichPrinting.pp_mono_type ty;
let v = Evaluator.main env eve in
Format.printf "Result0: %a\n" RichPrinting.pp_ev_value_0_single v;
()
let () =
try
Arg.parse [] main ""
with
| ParserInterface.Error(rng) ->
Format.printf "%a: syntax error\n" Range.pp rng
| UnidentifiedToken(rng, s) ->
Format.printf "%a: unidentified token\n" Range.pp rng
| SeeEndOfFileInComment(rng) ->
Format.printf "%a: unclosed comment\n" Range.pp rng
| UnknownBaseType(rng, s) ->
Format.printf "%a: unknown base type %s\n" Range.pp rng s
| Typechecker.UnboundVariable(rng, x) ->
Format.printf "%a: unbound variable '%s'\n" Range.pp rng x
| Typechecker.NotAFunction(rng, ty) ->
Format.printf "%a: not a function; it is of type %a\n"
Range.pp rng
pp_mono_type ty
| Typechecker.InvalidOccurrenceAsToStage(rng, x, stg, stgreq) ->
Format.printf "%a: variable '%s' occurs at %a but is expected to occur at %a\n"
Range.pp rng x pp_stage stg pp_stage stgreq
| Typechecker.InvalidMacroOccurrence(rng, x) ->
Format.printf "%a: variable '%s' is bound to a macro\n"
Range.pp rng x
| Typechecker.NotAMacro(rng, x) ->
Format.printf "%a: variable '%s' is not a macro\n"
Range.pp rng x
| Typechecker.InvalidMacroApplication(rng, x) ->
Format.printf "%a: imvalid macro application of variable '%s'"
Range.pp rng x
| Typechecker.MacroArgContradiction(_, macparamty, macarg) ->
let (rng, sarg) =
match macarg with
| EarlyArg((rng, _)) -> (rng, "an early argument")
| LateArg((rng, _)) -> (rng, "a late argument")
| BindingArg((_, (rng, _))) -> (rng, "a binder/bindee argument")
in
let pp_req ppf = function
| EarlyParamType(ty) ->
Format.fprintf ppf "an early argument '~ ...' of type %a"
pp_mono_type ty
| LateParamType(ty) ->
Format.fprintf ppf "a late argument of type %a"
pp_mono_type ty
| BindingParamType(ty1, ty2) ->
Format.fprintf ppf "a binder/bindee argument where binder is of type %a and bindee is of type %a"
pp_mono_type ty1
pp_mono_type ty2
in
Format.printf "%a: %s is given, but the macro expects %a\n"
Range.pp rng
sarg
pp_req macparamty
| Typechecker.InvalidNumberOfMacroArgs(rng, n, nreq) ->
Format.printf "%a: the macro requires %d argument(s), but here is applied to %d argument(s)\n"
Range.pp rng n nreq
| Typechecker.InvalidPrev(rng) ->
Format.printf "%a: '~ ...' occurs at stage 0\n"
Range.pp rng
| Typechecker.InvalidNext(rng) ->
Format.printf "%a: '@@ ...' occurs at stage 1\n"
Range.pp rng
| Typechecker.InvalidLetMacro(rng) ->
Format.printf "%a: 'letmac ... = ... in ...' occurs at stage 0\n"
Range.pp rng
| Typechecker.ContradictionError(ty1, ty2) ->
let (rng1, _) = ty1 in
let (rng2, _) = ty2 in
let (rng, ty, tyreq, rngreqopt) =
if Range.is_dummy rng1 then
(rng2, ty2, ty1, None)
else
if Range.is_dummy rng2 then
(rng1, ty1, ty2, None)
else
(rng1, ty1, ty2, Some(rng2))
in
begin
match rngreqopt with
| None ->
Format.printf "%a: this expression has type %a but is expected of type %a\n"
Range.pp rng pp_mono_type ty pp_mono_type tyreq
| Some(rngreq) ->
Format.printf "%a: this expression has type %a but is expected of type %a; this constraint is required by %a\n"
Range.pp rng pp_mono_type ty pp_mono_type tyreq Range.pp rngreq
end
| Typechecker.NotACode(rng, ty) ->
Format.printf "%a: this expression is expected of some code type but has type %a\n"
Range.pp rng pp_mono_type ty
| Typechecker.ShouldBeBound(rng, x, x1, ty1) ->
Format.printf "%a: in order to use variable '%s', variable '%s' should be bound to a value of type %a here\n"
Range.pp rng x x1 pp_mono_type ty1
| null | https://raw.githubusercontent.com/gfngfn/toy-macro-ml/7a5eecfc53691adbd91ceba78f8dac3ef8790054/src/main.ml | ocaml |
open Syntax
let main fname =
let inc = open_in fname in
let lexbuf = Lexing.from_channel inc in
let utast = ParserInterface.process lexbuf in
let (tyenv, env) = Primitives.initial_type_environment in
let (ty, eve) = Typechecker.main tyenv utast in
Format.printf "Type: %a\n" RichPrinting.pp_mono_type ty;
let v = Evaluator.main env eve in
Format.printf "Result0: %a\n" RichPrinting.pp_ev_value_0_single v;
()
let () =
try
Arg.parse [] main ""
with
| ParserInterface.Error(rng) ->
Format.printf "%a: syntax error\n" Range.pp rng
| UnidentifiedToken(rng, s) ->
Format.printf "%a: unidentified token\n" Range.pp rng
| SeeEndOfFileInComment(rng) ->
Format.printf "%a: unclosed comment\n" Range.pp rng
| UnknownBaseType(rng, s) ->
Format.printf "%a: unknown base type %s\n" Range.pp rng s
| Typechecker.UnboundVariable(rng, x) ->
Format.printf "%a: unbound variable '%s'\n" Range.pp rng x
| Typechecker.NotAFunction(rng, ty) ->
Format.printf "%a: not a function; it is of type %a\n"
Range.pp rng
pp_mono_type ty
| Typechecker.InvalidOccurrenceAsToStage(rng, x, stg, stgreq) ->
Format.printf "%a: variable '%s' occurs at %a but is expected to occur at %a\n"
Range.pp rng x pp_stage stg pp_stage stgreq
| Typechecker.InvalidMacroOccurrence(rng, x) ->
Format.printf "%a: variable '%s' is bound to a macro\n"
Range.pp rng x
| Typechecker.NotAMacro(rng, x) ->
Format.printf "%a: variable '%s' is not a macro\n"
Range.pp rng x
| Typechecker.InvalidMacroApplication(rng, x) ->
Format.printf "%a: imvalid macro application of variable '%s'"
Range.pp rng x
| Typechecker.MacroArgContradiction(_, macparamty, macarg) ->
let (rng, sarg) =
match macarg with
| EarlyArg((rng, _)) -> (rng, "an early argument")
| LateArg((rng, _)) -> (rng, "a late argument")
| BindingArg((_, (rng, _))) -> (rng, "a binder/bindee argument")
in
let pp_req ppf = function
| EarlyParamType(ty) ->
Format.fprintf ppf "an early argument '~ ...' of type %a"
pp_mono_type ty
| LateParamType(ty) ->
Format.fprintf ppf "a late argument of type %a"
pp_mono_type ty
| BindingParamType(ty1, ty2) ->
Format.fprintf ppf "a binder/bindee argument where binder is of type %a and bindee is of type %a"
pp_mono_type ty1
pp_mono_type ty2
in
Format.printf "%a: %s is given, but the macro expects %a\n"
Range.pp rng
sarg
pp_req macparamty
| Typechecker.InvalidNumberOfMacroArgs(rng, n, nreq) ->
Format.printf "%a: the macro requires %d argument(s), but here is applied to %d argument(s)\n"
Range.pp rng n nreq
| Typechecker.InvalidPrev(rng) ->
Format.printf "%a: '~ ...' occurs at stage 0\n"
Range.pp rng
| Typechecker.InvalidNext(rng) ->
Format.printf "%a: '@@ ...' occurs at stage 1\n"
Range.pp rng
| Typechecker.InvalidLetMacro(rng) ->
Format.printf "%a: 'letmac ... = ... in ...' occurs at stage 0\n"
Range.pp rng
| Typechecker.ContradictionError(ty1, ty2) ->
let (rng1, _) = ty1 in
let (rng2, _) = ty2 in
let (rng, ty, tyreq, rngreqopt) =
if Range.is_dummy rng1 then
(rng2, ty2, ty1, None)
else
if Range.is_dummy rng2 then
(rng1, ty1, ty2, None)
else
(rng1, ty1, ty2, Some(rng2))
in
begin
match rngreqopt with
| None ->
Format.printf "%a: this expression has type %a but is expected of type %a\n"
Range.pp rng pp_mono_type ty pp_mono_type tyreq
| Some(rngreq) ->
Format.printf "%a: this expression has type %a but is expected of type %a; this constraint is required by %a\n"
Range.pp rng pp_mono_type ty pp_mono_type tyreq Range.pp rngreq
end
| Typechecker.NotACode(rng, ty) ->
Format.printf "%a: this expression is expected of some code type but has type %a\n"
Range.pp rng pp_mono_type ty
| Typechecker.ShouldBeBound(rng, x, x1, ty1) ->
Format.printf "%a: in order to use variable '%s', variable '%s' should be bound to a value of type %a here\n"
Range.pp rng x x1 pp_mono_type ty1
| |
06405a2168eb9b7055e05d7ae507d57c5446e7573236588da6134b9e9be832fd | brendanhay/amazonka | DeleteCertificate.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE StrictData #-}
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
Module : Amazonka . Transfer . DeleteCertificate
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
--
-- Deletes the certificate that\'s specified in the @CertificateId@
-- parameter.
module Amazonka.Transfer.DeleteCertificate
( -- * Creating a Request
DeleteCertificate (..),
newDeleteCertificate,
-- * Request Lenses
deleteCertificate_certificateId,
-- * Destructuring the Response
DeleteCertificateResponse (..),
newDeleteCertificateResponse,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.Transfer.Types
-- | /See:/ 'newDeleteCertificate' smart constructor.
data DeleteCertificate = DeleteCertificate'
{ -- | The identifier of the certificate object that you are deleting.
certificateId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
Create a value of ' DeleteCertificate ' with all optional fields omitted .
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
--
-- The following record fields are available, with the corresponding lenses provided
-- for backwards compatibility:
--
-- 'certificateId', 'deleteCertificate_certificateId' - The identifier of the certificate object that you are deleting.
newDeleteCertificate ::
-- | 'certificateId'
Prelude.Text ->
DeleteCertificate
newDeleteCertificate pCertificateId_ =
DeleteCertificate' {certificateId = pCertificateId_}
-- | The identifier of the certificate object that you are deleting.
deleteCertificate_certificateId :: Lens.Lens' DeleteCertificate Prelude.Text
deleteCertificate_certificateId = Lens.lens (\DeleteCertificate' {certificateId} -> certificateId) (\s@DeleteCertificate' {} a -> s {certificateId = a} :: DeleteCertificate)
instance Core.AWSRequest DeleteCertificate where
type
AWSResponse DeleteCertificate =
DeleteCertificateResponse
request overrides =
Request.postJSON (overrides defaultService)
response =
Response.receiveNull DeleteCertificateResponse'
instance Prelude.Hashable DeleteCertificate where
hashWithSalt _salt DeleteCertificate' {..} =
_salt `Prelude.hashWithSalt` certificateId
instance Prelude.NFData DeleteCertificate where
rnf DeleteCertificate' {..} =
Prelude.rnf certificateId
instance Data.ToHeaders DeleteCertificate where
toHeaders =
Prelude.const
( Prelude.mconcat
[ "X-Amz-Target"
Data.=# ( "TransferService.DeleteCertificate" ::
Prelude.ByteString
),
"Content-Type"
Data.=# ( "application/x-amz-json-1.1" ::
Prelude.ByteString
)
]
)
instance Data.ToJSON DeleteCertificate where
toJSON DeleteCertificate' {..} =
Data.object
( Prelude.catMaybes
[ Prelude.Just
("CertificateId" Data..= certificateId)
]
)
instance Data.ToPath DeleteCertificate where
toPath = Prelude.const "/"
instance Data.ToQuery DeleteCertificate where
toQuery = Prelude.const Prelude.mempty
-- | /See:/ 'newDeleteCertificateResponse' smart constructor.
data DeleteCertificateResponse = DeleteCertificateResponse'
{
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
-- |
-- Create a value of 'DeleteCertificateResponse' with all optional fields omitted.
--
Use < -lens generic - lens > or < optics > to modify other optional fields .
newDeleteCertificateResponse ::
DeleteCertificateResponse
newDeleteCertificateResponse =
DeleteCertificateResponse'
instance Prelude.NFData DeleteCertificateResponse where
rnf _ = ()
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-transfer/gen/Amazonka/Transfer/DeleteCertificate.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Stability : auto-generated
Deletes the certificate that\'s specified in the @CertificateId@
parameter.
* Creating a Request
* Request Lenses
* Destructuring the Response
| /See:/ 'newDeleteCertificate' smart constructor.
| The identifier of the certificate object that you are deleting.
|
The following record fields are available, with the corresponding lenses provided
for backwards compatibility:
'certificateId', 'deleteCertificate_certificateId' - The identifier of the certificate object that you are deleting.
| 'certificateId'
| The identifier of the certificate object that you are deleting.
| /See:/ 'newDeleteCertificateResponse' smart constructor.
|
Create a value of 'DeleteCertificateResponse' with all optional fields omitted.
| # LANGUAGE DeriveGeneric #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeFamilies #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - binds #
# OPTIONS_GHC -fno - warn - unused - imports #
# OPTIONS_GHC -fno - warn - unused - matches #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Module : Amazonka . Transfer . DeleteCertificate
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.Transfer.DeleteCertificate
DeleteCertificate (..),
newDeleteCertificate,
deleteCertificate_certificateId,
DeleteCertificateResponse (..),
newDeleteCertificateResponse,
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Core.Lens.Internal as Lens
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
import qualified Amazonka.Request as Request
import qualified Amazonka.Response as Response
import Amazonka.Transfer.Types
data DeleteCertificate = DeleteCertificate'
certificateId :: Prelude.Text
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Create a value of ' DeleteCertificate ' with all optional fields omitted .
Use < -lens generic - lens > or < optics > to modify other optional fields .
newDeleteCertificate ::
Prelude.Text ->
DeleteCertificate
newDeleteCertificate pCertificateId_ =
DeleteCertificate' {certificateId = pCertificateId_}
deleteCertificate_certificateId :: Lens.Lens' DeleteCertificate Prelude.Text
deleteCertificate_certificateId = Lens.lens (\DeleteCertificate' {certificateId} -> certificateId) (\s@DeleteCertificate' {} a -> s {certificateId = a} :: DeleteCertificate)
instance Core.AWSRequest DeleteCertificate where
type
AWSResponse DeleteCertificate =
DeleteCertificateResponse
request overrides =
Request.postJSON (overrides defaultService)
response =
Response.receiveNull DeleteCertificateResponse'
instance Prelude.Hashable DeleteCertificate where
hashWithSalt _salt DeleteCertificate' {..} =
_salt `Prelude.hashWithSalt` certificateId
instance Prelude.NFData DeleteCertificate where
rnf DeleteCertificate' {..} =
Prelude.rnf certificateId
instance Data.ToHeaders DeleteCertificate where
toHeaders =
Prelude.const
( Prelude.mconcat
[ "X-Amz-Target"
Data.=# ( "TransferService.DeleteCertificate" ::
Prelude.ByteString
),
"Content-Type"
Data.=# ( "application/x-amz-json-1.1" ::
Prelude.ByteString
)
]
)
instance Data.ToJSON DeleteCertificate where
toJSON DeleteCertificate' {..} =
Data.object
( Prelude.catMaybes
[ Prelude.Just
("CertificateId" Data..= certificateId)
]
)
instance Data.ToPath DeleteCertificate where
toPath = Prelude.const "/"
instance Data.ToQuery DeleteCertificate where
toQuery = Prelude.const Prelude.mempty
data DeleteCertificateResponse = DeleteCertificateResponse'
{
}
deriving (Prelude.Eq, Prelude.Read, Prelude.Show, Prelude.Generic)
Use < -lens generic - lens > or < optics > to modify other optional fields .
newDeleteCertificateResponse ::
DeleteCertificateResponse
newDeleteCertificateResponse =
DeleteCertificateResponse'
instance Prelude.NFData DeleteCertificateResponse where
rnf _ = ()
|
9467006ea05c99081ff3b6f9334d96faf7121f8d5aadebfcc4b5ea7920a34cbd | qoocku/erleos | erleos_sensor_srv_tests.erl | %%% ==========================================================================
@author < > < email >
@since 2011 - 02 - 24
%%% @doc TODO: Add description to erleos_usir_srv_tests
%%% @end
%%% ==========================================================================
-module (erleos_sensor_srv_tests).
-author ("Damian T. Dobroczy\\'nski <> <email>").
-compile (export_all).
-include_lib ("eunit/include/eunit.hrl").
-include ("erleos/include/proto/sensor.hrl").
-include ("erleos/include/proto/usir.hrl").
-include ("erleos/include/proto/adis.hrl").
-record (ctx, {can_ctx, this, router, queues, type}).
ir_setup () ->
create_ctx_for(ir).
us_setup () ->
create_ctx_for(us).
adis_setup () ->
create_ctx_for({adis, undefined}).
usir_setup () ->
create_ctx_for(usir).
tear_down (#ctx{can_ctx = Ctx, this = Ss, type = {adis, undefined}}) ->
[ok = erleos_utils:sync_stop(S) || S <- Ss],
'CAN_msg_router_srv_tests':tear_down1(Ctx);
tear_down (#ctx{can_ctx = Ctx, this = S, type = usir}) ->
{S1, S2} = S,
erleos_utils:sync_stop(S1),
erleos_utils:sync_stop(S2),
'CAN_msg_router_srv_tests':tear_down1(Ctx);
tear_down (#ctx{can_ctx = Ctx, this = S}) ->
erleos_utils:sync_stop(S),
'CAN_msg_router_srv_tests':tear_down1(Ctx).
'(ir) reading messages'(Ctx) ->
test_reading_messages(Ctx).
'(us) reading messages'(Ctx) ->
test_reading_messages(Ctx).
'(usir) reading messages'(#ctx{this = {S1, S2}, router = R, queues = [Qs1, Qs2], type = usir}) ->
P1 = generate_can_messages(us, 16, R, Qs1),
P2 = generate_can_messages(ir, 16, R, Qs2),
test_mqueue_output(ir, Qs1, P1),
test_mqueue_output(us, Qs2, P2),
test_reading(us, S1, P1),
test_reading(ir, S2, P2).
'(adis) reading messages'(Ctx) ->
test_reading_messages(Ctx).
ir_test_ () ->
tests_runner:run(fun ir_setup/0, fun tear_down/1, "(ir)", ?MODULE).
us_test_ () ->
tests_runner:run(fun us_setup/0, fun tear_down/1, "(us)", ?MODULE).
usir_test_ () ->
tests_runner:run(fun usir_setup/0, fun tear_down/1, "(usir)", ?MODULE).
adis_test_ () ->
tests_runner:run(fun adis_setup/0, fun tear_down/1, "(adis)", ?MODULE).
create_ctx_for (Type) ->
we need a mocked CAN driver with CAN msgs router
Ctx = 'CAN_msg_router_srv_tests':setup1(),
% read erleos app config
{ok, [Cfg]} = file:consult(filename:join([filename:dirname(code:which(?MODULE)), "..", "priv", "erleos.config"])),
register(Ds = list_to_atom(lists:foldl(fun proplists:get_value/2, Cfg, [erleos, case Type of
{adis, _} -> adis_can_router;
Other -> usir_can_router
end, candev])),
element(2, Ctx)),
ok = lists:foldl(fun ({Key, Val}, ok) ->
application:set_env(erleos, Key, Val)
end, ok, ErlCfg = proplists:get_value(erleos, Cfg)),
and USIR sensor server(s )
{ok, S} = case Type of
usir ->
Ss = [begin
{ok, S} = erleos_utils:start(erleos_sensor_srv, [{args, [{type, T}, {cmod,
[{mod, list_to_atom("erleos_" ++ T ++ "_sensor_module")},
{args, []}]}]}]),
S
end || T <- ["us", "ir"]],
{ok, list_to_tuple(Ss)};
{adis, undefined} ->
{ok, [begin
{ok, S} = erleos_utils:start(erleos_sensor_srv, [{args, [{type, "adis"}, {cmod,
[{mod, erleos_adis_sensor_module},
{args, [{type, T}]}]}]}]),
S
end || T <- [accel, angvel, pos, linvel, factors, movinfo]]};
{adis, ST} ->
erleos_utils:start(erleos_sensor_srv, [{args, [{type, "adis"}, {cmod,
[{mod, erleos_adis_sensor_module},
{args, [{type, ST}]}]}]}]);
Type ->
erleos_utils:start(erleos_sensor_srv, [{args, [{type, atom_to_list(Type)}, {cmod,
[{mod, list_to_atom("erleos_" ++ atom_to_list(Type) ++ "_sensor_module")},
{args, []}]}]}])
end,
#ctx{can_ctx = Ctx,
this = S,
router = Ds,
queues = case Type of
usir ->
[lists:foldl(fun proplists:get_value/2, ErlCfg, [T, mqueues, output]) || T <- [us_sensor, ir_sensor]];
{adis, undefined} ->
[lists:foldl(fun proplists:get_value/2, ErlCfg, [list_to_atom(T ++ "_sensor"), mqueues, output])
|| T <- ["accel", "angvel", "pos", "linvel", "factors", "movinfo"]];
{adis, ST2} ->
lists:foldl(fun proplists:get_value/2, ErlCfg, [list_to_atom(atom_to_list(ST2)), mqueues, output]);
Other2 ->
lists:foldl(fun proplists:get_value/2, ErlCfg, [list_to_atom(atom_to_list(Other2) ++ "_sensor"), mqueues, output])
end,
type = Type}.
test_reading_messages (#ctx{this = S, router = R, queues = Qs, type = Type}) ->
P = generate_can_messages(Type, 16, R, Qs),
test_mqueue_output(Type, Qs, P),
test_reading(Type, S, P).
test_reading (_, _, {[], _}) ->
ok;
test_reading(Type, S, {Msgs, Ids}) ->
Length = length(Ids),
IdSortFun = get_sort_fun(Type, reading),
Newest = lists:sublist(Msgs, length(Msgs)-Length+1, Length),
Readings = case S of
Ss when is_list(Ss) ->
lists:flatten([erleos_sensor:get_last_reading(Srv) || Srv <- Ss]);
S ->
erleos_sensor:get_last_reading(S)
end,
?assertEqual(length(Readings), length(Newest)),
?assertEqual(Length, length(Readings)),
?assert(lists:foldl(fun
({{Id, _, <<Value:16/little, _>>},
#usir_data{id = Id, value = Value}}, Bool) ->
Bool and true;
({{Id, _, <<AX:16/little, AY:16/little, AZ:16/little>>},
#accel_data{id = Id, ax = AX, ay = AY, az = AZ}}, Bool) ->
Bool and true;
({{Id, _, <<AX:16/little, AY:16/little, AZ:16/little>>},
#angvel_data{id = Id, ax = AX, ay= AY, az = AZ}}, Bool) ->
Bool and true;
({{Id, _, <<X:16/little, Y:16/little, Phi:16/little>>},
#pos_data{id = Id, x = X, y= Y, phi = Phi}}, Bool) ->
Bool and true;
({{Id, _, <<X:16/little>>},
#linvel_data{id = Id, x = X}}, Bool) ->
Bool and true;
({{Id, _, <<N, Z, K:16/little, KV:16/little, KW:16/little>>},
#factors_data{id = Id, n = N, z = Z, k = K, kv = KV, kw = KW}}, Bool) ->
Bool and true;
({{Id, _, <<Info>>},
#movinfo_data{id = Id, info = Info}}, Bool) ->
Bool and true
%;
% (_, _) ->
% false
end, true, lists:zip(lists:sort(IdSortFun, Newest),
lists:sort(fun
(X = #usir_data{}, Y = #usir_data{}) ->
X#usir_data.id =< Y#usir_data.id;
(X, Y) -> true
end,
Readings)))).
test_mqueue_output(_, _, {[], _}) ->
ok;
test_mqueue_output(Type, Qs, {Msgs, Ids}) ->
Reader = get_mqueue_reader(Type),
SFun = get_sort_fun(Type, reading),
check_mqueue_output(Type, Qs, lists:sort(SFun, Msgs), Reader, Ids).
check_mqueue_output (_, [], [], _, []) ->
ok;
check_mqueue_output (Type, Qs, Msgs, Reader, Ids)
when Type =:= ir orelse Type =:= us orelse Type =:= usir ->
Rs = lists:foldl(fun(QName, Acc) ->
Reader(QName, undefined, Reader, Acc)
end, [], Qs),
Reader2 = fun (Data, undefined, _, undefined) -> Data end,
check_mqueue_output(fake, [[R] || R <- lists:sort(Rs)],
lists:sublist(lists:sort(fun({I1, T1, _}, {I2, T2, _}) ->
T1 >= T2 andalso I1 =< I2
end, Msgs), length(Ids)), Reader2, Ids);
check_mqueue_output ({adis, ST}, Qs, Msgs, Reader, Ids) when ST =/= fake->
check_mqueue_output({adis, fake}, Qs, lists:sublist(Msgs, length(Ids)), Reader, Ids);
check_mqueue_output (Type, [Qss | Qs], [Msg | Msgs], Reader, [I1 | Ids]) ->
Reading = lists:foldl(fun(QName, Acc) ->
Reader(QName, undefined, Reader, Acc)
end, undefined, Qss),
Id = case I1 of
{_, I} -> I;
I1 -> I1
end,
{Id, _, Data1} = Msg,
case Reading of
<<>> -> ok;
_ ->
{I2, _, Data2} = erleos_sensor_srv:decode_mqueue_packet(Reading),
?assert(Id =:= I2 andalso cmp_mqueue_datas(Type, Data1, Data2))
end,
check_mqueue_output (Type, Qs, Msgs, Reader, Ids).
get_mqueue_reader ({adis, _}) ->
fun
(QName, undefined, Loop, undefined) ->
{ok, Q} = mqueue:open(QName, [noblock]),
Loop(Q, mqueue:recv(Q), Loop, undefined);
(Q, {ok, Data}, Loop, undefined) ->
Loop(Q, mqueue:recv(Q), Loop, Data);
(Q, {ok, Data}, Loop, Data) ->
Loop(Q, mqueue:recv(Q), Loop, Data);
(Q, {error,_}, _, Acc) ->
mqueue:close(Q),
Acc
end;
get_mqueue_reader (_) ->
fun
(QName, undefined, Loop, Acc) ->
{ok, Q} = mqueue:open(QName, [noblock]),
Loop(Q, mqueue:recv(Q), Loop, Acc);
(Q, {ok, Data}, Loop, Acc) ->
Loop(Q, mqueue:recv(Q), Loop, [Data|Acc]);
(Q, {error,_}, _, Acc) ->
mqueue:close(Q),
lists:reverse(Acc)
end.
cmp_mqueue_datas ({adis, _}, Data1, Data2) ->
case {Data1, Data2} of
{<<A:16/little, B:16/little, C:16/little>>,
<<A:16/little, B:16/little, C:16/little>>} -> true;
{<<A:16/little>>, <<A:16/little>>} -> true;
{<<A, B, C:16/little, D:16/little, E:16/little>>,
<<A, B, C:16/little, D:16/little, E:16/little>>} -> true;
{<<A>>, <<A>>} -> true
end;
cmp_mqueue_datas (_, Data1, Data2) ->
<<V1:16/little, C1>> = Data1,
<<V2:16/little, C2>> = Data2,
case {V2, C2} of
{V1, C1} -> true;
_ -> false
end.
get_sort_fun ({adis, undefined}, mqueue) ->
fun (<<I1:16/little,T1:32/little,_/binary>>,
<<I2:16/little,T2:32/little,_/binary>>) ->
T2 >= T1 andalso I1 =< I2
end;
get_sort_fun ({adis, undefined}, reading) ->
fun ({I1, T1, _}, {I2, T2, _}) ->
T1 >= T2 andalso I1 =< I2
end;
get_sort_fun (_, mqueue) ->
fun (<<I1:16/little,T1:32/little,_V1:16/little,C1>>,
<<I2:16/little,T2:32/little,_V2:16/little,C2>>) ->
{I1,T1,C1}=<{I2,T2,C2}
end;
get_sort_fun (_, reading) ->
fun ({I1, T1, _}, {I2, T2, _}) ->
{I1, T1} =< {I2, T2}
end.
generate_can_messages(Type, N, R, Qs) ->
IdList = get_can_device_ids(Type),
Msgs = generate_can_device_messages(Type, N, IdList),
case Type of
{adis, undefined} ->
[clear_outdated_packets_from_queues(Qss) || Qss <- Qs];
Type ->
clear_outdated_packets_from_queues(Qs)
end,
R ! {can,atom_to_list(R),Msgs},
timer:sleep(200),
{Msgs, IdList}.
get_can_device_ids (Type) when Type =:= ir orelse Type =:= us ->
Ranges = lists:foldl(fun proplists:get_value/2,
application:get_all_env(erleos),
[usir_can_router,case Type of
ir -> ir_data_id;
us -> us_data_id;
adis -> adis_data_ir
end]),
lists:flatten([[Id || Id <- lists:seq(Left,Right)] || {Left,Right} <- Ranges]);
get_can_device_ids ({adis, ST}) ->
Cfg = proplists:get_value(adis_can_router, application:get_all_env(erleos)),
DLst = case ST of
undefined ->
["accel",
"angvel",
"pos",
"linvel",
"factors",
"movinfo"];
ST ->
atom_to_list(ST)
end,
[{list_to_atom(Key ++ "_data"),
proplists:get_value(list_to_atom(Key ++ "_id"), Cfg)} || Key <- DLst].
generate_can_device_messages ({adis, _}, N, IdList) ->
Generator = fun
(VelData, I) when VelData =:= accel_data orelse VelData =:= angvel_data ->
[AX, AY, AZ] = [I, I*2, I*3],
<<AX:16/little, AY:16/little, AZ:16/little>>;
(pos_data, I) ->
[X, Y, Phi] = [I, I*2, I*3 rem 360],
<<X:16/little, Y:16/little, Phi:16/little>>;
(linvel_data, I) ->
<<I:16/little>>;
(factors_data, I) ->
[Z, K, KV, KW] = [I+1, I+2, I*10, I*20],
<<(I rem 8), (Z rem 8), K:16/little, KV:16/little, KW:16/little>>;
(movinfo_data, I) ->
<<(I rem 8)>>
end,
lists:flatten([
[{Id, {I, 0}, Generator(D, I)} || {D, Id} <- IdList]
|| I <- lists:seq(1, N)
]);
generate_can_device_messages (Usir, N, IdList) when Usir =:= usir orelse
Usir =:= us orelse
Usir =:= ir ->
lists:flatten([[{Id,{Cycle,0},<<Cycle:16/little,Cycle>>} || Id <- IdList] || Cycle <- lists:seq(1,N)]).
clear_outdated_packets_from_queues ([]) ->
cleared;
clear_outdated_packets_from_queues ([Q|Tail]) when not is_list(Q) ->
cleared = clear_outdated_packets_from_queues (Q),
clear_outdated_packets_from_queues (Tail);
clear_outdated_packets_from_queues (Qs) ->
Cleaner = fun
(Q, {error, eagain}, _) ->
mqueue:close(Q),
cleared;
(Q, {ok, _}, Loop) ->
Loop(Q, mqueue:recv(Q), Loop)
end,
lists:foldl(fun (QName, cleared) ->
{ok, Q} = mqueue:open(QName, [noblock]),
Fun = fun () -> Cleaner(Q, mqueue:recv(Q), Cleaner) end,
Fun()
end, cleared, Qs).
| null | https://raw.githubusercontent.com/qoocku/erleos/ce14d2cfeea81053307b1fab815e0fbe3b9d1133/test/erleos_sensor_srv_tests.erl | erlang | ==========================================================================
@doc TODO: Add description to erleos_usir_srv_tests
@end
==========================================================================
read erleos app config
;
(_, _) ->
false | @author < > < email >
@since 2011 - 02 - 24
-module (erleos_sensor_srv_tests).
-author ("Damian T. Dobroczy\\'nski <> <email>").
-compile (export_all).
-include_lib ("eunit/include/eunit.hrl").
-include ("erleos/include/proto/sensor.hrl").
-include ("erleos/include/proto/usir.hrl").
-include ("erleos/include/proto/adis.hrl").
-record (ctx, {can_ctx, this, router, queues, type}).
ir_setup () ->
create_ctx_for(ir).
us_setup () ->
create_ctx_for(us).
adis_setup () ->
create_ctx_for({adis, undefined}).
usir_setup () ->
create_ctx_for(usir).
tear_down (#ctx{can_ctx = Ctx, this = Ss, type = {adis, undefined}}) ->
[ok = erleos_utils:sync_stop(S) || S <- Ss],
'CAN_msg_router_srv_tests':tear_down1(Ctx);
tear_down (#ctx{can_ctx = Ctx, this = S, type = usir}) ->
{S1, S2} = S,
erleos_utils:sync_stop(S1),
erleos_utils:sync_stop(S2),
'CAN_msg_router_srv_tests':tear_down1(Ctx);
tear_down (#ctx{can_ctx = Ctx, this = S}) ->
erleos_utils:sync_stop(S),
'CAN_msg_router_srv_tests':tear_down1(Ctx).
'(ir) reading messages'(Ctx) ->
test_reading_messages(Ctx).
'(us) reading messages'(Ctx) ->
test_reading_messages(Ctx).
'(usir) reading messages'(#ctx{this = {S1, S2}, router = R, queues = [Qs1, Qs2], type = usir}) ->
P1 = generate_can_messages(us, 16, R, Qs1),
P2 = generate_can_messages(ir, 16, R, Qs2),
test_mqueue_output(ir, Qs1, P1),
test_mqueue_output(us, Qs2, P2),
test_reading(us, S1, P1),
test_reading(ir, S2, P2).
'(adis) reading messages'(Ctx) ->
test_reading_messages(Ctx).
ir_test_ () ->
tests_runner:run(fun ir_setup/0, fun tear_down/1, "(ir)", ?MODULE).
us_test_ () ->
tests_runner:run(fun us_setup/0, fun tear_down/1, "(us)", ?MODULE).
usir_test_ () ->
tests_runner:run(fun usir_setup/0, fun tear_down/1, "(usir)", ?MODULE).
adis_test_ () ->
tests_runner:run(fun adis_setup/0, fun tear_down/1, "(adis)", ?MODULE).
create_ctx_for (Type) ->
we need a mocked CAN driver with CAN msgs router
Ctx = 'CAN_msg_router_srv_tests':setup1(),
{ok, [Cfg]} = file:consult(filename:join([filename:dirname(code:which(?MODULE)), "..", "priv", "erleos.config"])),
register(Ds = list_to_atom(lists:foldl(fun proplists:get_value/2, Cfg, [erleos, case Type of
{adis, _} -> adis_can_router;
Other -> usir_can_router
end, candev])),
element(2, Ctx)),
ok = lists:foldl(fun ({Key, Val}, ok) ->
application:set_env(erleos, Key, Val)
end, ok, ErlCfg = proplists:get_value(erleos, Cfg)),
and USIR sensor server(s )
{ok, S} = case Type of
usir ->
Ss = [begin
{ok, S} = erleos_utils:start(erleos_sensor_srv, [{args, [{type, T}, {cmod,
[{mod, list_to_atom("erleos_" ++ T ++ "_sensor_module")},
{args, []}]}]}]),
S
end || T <- ["us", "ir"]],
{ok, list_to_tuple(Ss)};
{adis, undefined} ->
{ok, [begin
{ok, S} = erleos_utils:start(erleos_sensor_srv, [{args, [{type, "adis"}, {cmod,
[{mod, erleos_adis_sensor_module},
{args, [{type, T}]}]}]}]),
S
end || T <- [accel, angvel, pos, linvel, factors, movinfo]]};
{adis, ST} ->
erleos_utils:start(erleos_sensor_srv, [{args, [{type, "adis"}, {cmod,
[{mod, erleos_adis_sensor_module},
{args, [{type, ST}]}]}]}]);
Type ->
erleos_utils:start(erleos_sensor_srv, [{args, [{type, atom_to_list(Type)}, {cmod,
[{mod, list_to_atom("erleos_" ++ atom_to_list(Type) ++ "_sensor_module")},
{args, []}]}]}])
end,
#ctx{can_ctx = Ctx,
this = S,
router = Ds,
queues = case Type of
usir ->
[lists:foldl(fun proplists:get_value/2, ErlCfg, [T, mqueues, output]) || T <- [us_sensor, ir_sensor]];
{adis, undefined} ->
[lists:foldl(fun proplists:get_value/2, ErlCfg, [list_to_atom(T ++ "_sensor"), mqueues, output])
|| T <- ["accel", "angvel", "pos", "linvel", "factors", "movinfo"]];
{adis, ST2} ->
lists:foldl(fun proplists:get_value/2, ErlCfg, [list_to_atom(atom_to_list(ST2)), mqueues, output]);
Other2 ->
lists:foldl(fun proplists:get_value/2, ErlCfg, [list_to_atom(atom_to_list(Other2) ++ "_sensor"), mqueues, output])
end,
type = Type}.
test_reading_messages (#ctx{this = S, router = R, queues = Qs, type = Type}) ->
P = generate_can_messages(Type, 16, R, Qs),
test_mqueue_output(Type, Qs, P),
test_reading(Type, S, P).
test_reading (_, _, {[], _}) ->
ok;
test_reading(Type, S, {Msgs, Ids}) ->
Length = length(Ids),
IdSortFun = get_sort_fun(Type, reading),
Newest = lists:sublist(Msgs, length(Msgs)-Length+1, Length),
Readings = case S of
Ss when is_list(Ss) ->
lists:flatten([erleos_sensor:get_last_reading(Srv) || Srv <- Ss]);
S ->
erleos_sensor:get_last_reading(S)
end,
?assertEqual(length(Readings), length(Newest)),
?assertEqual(Length, length(Readings)),
?assert(lists:foldl(fun
({{Id, _, <<Value:16/little, _>>},
#usir_data{id = Id, value = Value}}, Bool) ->
Bool and true;
({{Id, _, <<AX:16/little, AY:16/little, AZ:16/little>>},
#accel_data{id = Id, ax = AX, ay = AY, az = AZ}}, Bool) ->
Bool and true;
({{Id, _, <<AX:16/little, AY:16/little, AZ:16/little>>},
#angvel_data{id = Id, ax = AX, ay= AY, az = AZ}}, Bool) ->
Bool and true;
({{Id, _, <<X:16/little, Y:16/little, Phi:16/little>>},
#pos_data{id = Id, x = X, y= Y, phi = Phi}}, Bool) ->
Bool and true;
({{Id, _, <<X:16/little>>},
#linvel_data{id = Id, x = X}}, Bool) ->
Bool and true;
({{Id, _, <<N, Z, K:16/little, KV:16/little, KW:16/little>>},
#factors_data{id = Id, n = N, z = Z, k = K, kv = KV, kw = KW}}, Bool) ->
Bool and true;
({{Id, _, <<Info>>},
#movinfo_data{id = Id, info = Info}}, Bool) ->
Bool and true
end, true, lists:zip(lists:sort(IdSortFun, Newest),
lists:sort(fun
(X = #usir_data{}, Y = #usir_data{}) ->
X#usir_data.id =< Y#usir_data.id;
(X, Y) -> true
end,
Readings)))).
test_mqueue_output(_, _, {[], _}) ->
ok;
test_mqueue_output(Type, Qs, {Msgs, Ids}) ->
Reader = get_mqueue_reader(Type),
SFun = get_sort_fun(Type, reading),
check_mqueue_output(Type, Qs, lists:sort(SFun, Msgs), Reader, Ids).
check_mqueue_output (_, [], [], _, []) ->
ok;
check_mqueue_output (Type, Qs, Msgs, Reader, Ids)
when Type =:= ir orelse Type =:= us orelse Type =:= usir ->
Rs = lists:foldl(fun(QName, Acc) ->
Reader(QName, undefined, Reader, Acc)
end, [], Qs),
Reader2 = fun (Data, undefined, _, undefined) -> Data end,
check_mqueue_output(fake, [[R] || R <- lists:sort(Rs)],
lists:sublist(lists:sort(fun({I1, T1, _}, {I2, T2, _}) ->
T1 >= T2 andalso I1 =< I2
end, Msgs), length(Ids)), Reader2, Ids);
check_mqueue_output ({adis, ST}, Qs, Msgs, Reader, Ids) when ST =/= fake->
check_mqueue_output({adis, fake}, Qs, lists:sublist(Msgs, length(Ids)), Reader, Ids);
check_mqueue_output (Type, [Qss | Qs], [Msg | Msgs], Reader, [I1 | Ids]) ->
Reading = lists:foldl(fun(QName, Acc) ->
Reader(QName, undefined, Reader, Acc)
end, undefined, Qss),
Id = case I1 of
{_, I} -> I;
I1 -> I1
end,
{Id, _, Data1} = Msg,
case Reading of
<<>> -> ok;
_ ->
{I2, _, Data2} = erleos_sensor_srv:decode_mqueue_packet(Reading),
?assert(Id =:= I2 andalso cmp_mqueue_datas(Type, Data1, Data2))
end,
check_mqueue_output (Type, Qs, Msgs, Reader, Ids).
get_mqueue_reader ({adis, _}) ->
fun
(QName, undefined, Loop, undefined) ->
{ok, Q} = mqueue:open(QName, [noblock]),
Loop(Q, mqueue:recv(Q), Loop, undefined);
(Q, {ok, Data}, Loop, undefined) ->
Loop(Q, mqueue:recv(Q), Loop, Data);
(Q, {ok, Data}, Loop, Data) ->
Loop(Q, mqueue:recv(Q), Loop, Data);
(Q, {error,_}, _, Acc) ->
mqueue:close(Q),
Acc
end;
get_mqueue_reader (_) ->
fun
(QName, undefined, Loop, Acc) ->
{ok, Q} = mqueue:open(QName, [noblock]),
Loop(Q, mqueue:recv(Q), Loop, Acc);
(Q, {ok, Data}, Loop, Acc) ->
Loop(Q, mqueue:recv(Q), Loop, [Data|Acc]);
(Q, {error,_}, _, Acc) ->
mqueue:close(Q),
lists:reverse(Acc)
end.
cmp_mqueue_datas ({adis, _}, Data1, Data2) ->
case {Data1, Data2} of
{<<A:16/little, B:16/little, C:16/little>>,
<<A:16/little, B:16/little, C:16/little>>} -> true;
{<<A:16/little>>, <<A:16/little>>} -> true;
{<<A, B, C:16/little, D:16/little, E:16/little>>,
<<A, B, C:16/little, D:16/little, E:16/little>>} -> true;
{<<A>>, <<A>>} -> true
end;
cmp_mqueue_datas (_, Data1, Data2) ->
<<V1:16/little, C1>> = Data1,
<<V2:16/little, C2>> = Data2,
case {V2, C2} of
{V1, C1} -> true;
_ -> false
end.
get_sort_fun ({adis, undefined}, mqueue) ->
fun (<<I1:16/little,T1:32/little,_/binary>>,
<<I2:16/little,T2:32/little,_/binary>>) ->
T2 >= T1 andalso I1 =< I2
end;
get_sort_fun ({adis, undefined}, reading) ->
fun ({I1, T1, _}, {I2, T2, _}) ->
T1 >= T2 andalso I1 =< I2
end;
get_sort_fun (_, mqueue) ->
fun (<<I1:16/little,T1:32/little,_V1:16/little,C1>>,
<<I2:16/little,T2:32/little,_V2:16/little,C2>>) ->
{I1,T1,C1}=<{I2,T2,C2}
end;
get_sort_fun (_, reading) ->
fun ({I1, T1, _}, {I2, T2, _}) ->
{I1, T1} =< {I2, T2}
end.
generate_can_messages(Type, N, R, Qs) ->
IdList = get_can_device_ids(Type),
Msgs = generate_can_device_messages(Type, N, IdList),
case Type of
{adis, undefined} ->
[clear_outdated_packets_from_queues(Qss) || Qss <- Qs];
Type ->
clear_outdated_packets_from_queues(Qs)
end,
R ! {can,atom_to_list(R),Msgs},
timer:sleep(200),
{Msgs, IdList}.
get_can_device_ids (Type) when Type =:= ir orelse Type =:= us ->
Ranges = lists:foldl(fun proplists:get_value/2,
application:get_all_env(erleos),
[usir_can_router,case Type of
ir -> ir_data_id;
us -> us_data_id;
adis -> adis_data_ir
end]),
lists:flatten([[Id || Id <- lists:seq(Left,Right)] || {Left,Right} <- Ranges]);
get_can_device_ids ({adis, ST}) ->
Cfg = proplists:get_value(adis_can_router, application:get_all_env(erleos)),
DLst = case ST of
undefined ->
["accel",
"angvel",
"pos",
"linvel",
"factors",
"movinfo"];
ST ->
atom_to_list(ST)
end,
[{list_to_atom(Key ++ "_data"),
proplists:get_value(list_to_atom(Key ++ "_id"), Cfg)} || Key <- DLst].
generate_can_device_messages ({adis, _}, N, IdList) ->
Generator = fun
(VelData, I) when VelData =:= accel_data orelse VelData =:= angvel_data ->
[AX, AY, AZ] = [I, I*2, I*3],
<<AX:16/little, AY:16/little, AZ:16/little>>;
(pos_data, I) ->
[X, Y, Phi] = [I, I*2, I*3 rem 360],
<<X:16/little, Y:16/little, Phi:16/little>>;
(linvel_data, I) ->
<<I:16/little>>;
(factors_data, I) ->
[Z, K, KV, KW] = [I+1, I+2, I*10, I*20],
<<(I rem 8), (Z rem 8), K:16/little, KV:16/little, KW:16/little>>;
(movinfo_data, I) ->
<<(I rem 8)>>
end,
lists:flatten([
[{Id, {I, 0}, Generator(D, I)} || {D, Id} <- IdList]
|| I <- lists:seq(1, N)
]);
generate_can_device_messages (Usir, N, IdList) when Usir =:= usir orelse
Usir =:= us orelse
Usir =:= ir ->
lists:flatten([[{Id,{Cycle,0},<<Cycle:16/little,Cycle>>} || Id <- IdList] || Cycle <- lists:seq(1,N)]).
clear_outdated_packets_from_queues ([]) ->
cleared;
clear_outdated_packets_from_queues ([Q|Tail]) when not is_list(Q) ->
cleared = clear_outdated_packets_from_queues (Q),
clear_outdated_packets_from_queues (Tail);
clear_outdated_packets_from_queues (Qs) ->
Cleaner = fun
(Q, {error, eagain}, _) ->
mqueue:close(Q),
cleared;
(Q, {ok, _}, Loop) ->
Loop(Q, mqueue:recv(Q), Loop)
end,
lists:foldl(fun (QName, cleared) ->
{ok, Q} = mqueue:open(QName, [noblock]),
Fun = fun () -> Cleaner(Q, mqueue:recv(Q), Cleaner) end,
Fun()
end, cleared, Qs).
|
e20387c45ceecbadc69087f0b37c4cef0e6a49cca4de49a58d4dcfe6e1dd6044 | juhp/koji-tool | Main.hs | # LANGUAGE CPP #
SPDX - License - Identifier : BSD-3 - Clause
module Main (main) where
import Data.Char (isDigit)
import Data.List.Extra
import SimpleCmd
import SimpleCmdArgs
import Builds
import BuildlogSizes
import User
import Install
import qualified Paths_koji_tool
import Progress
import Find
import Tasks
main :: IO ()
main = do
simpleCmdArgs (Just Paths_koji_tool.version)
"Query and track Koji tasks, and install rpms from Koji."
"see -tool#readme" $
subcommands
[ Subcommand "builds"
"Query Koji builds (by default lists the most recent builds)" $
buildsCmd
<$> hubOpt
<*> optional userOpt
<*> (flagWith' 1 'L' "latest" "Latest build" <|>
optionalWith auto 'l' "limit" "INT" "Maximum number of builds to show [default: 10]" 10)
<*> many (parseBuildState <$> strOptionWith 's' "state" "STATE" "Filter builds by state (building,complete,deleted,fail(ed),cancel(ed)")
<*> optional (Before <$> strOptionWith 'B' "before" "TIMESTAMP" "Builds completed before timedate [default: now]" <|>
After <$> strOptionWith 'F' "from" "TIMESTAMP" "Builds completed after timedate")
<*> (fmap normalizeBuildType <$> optional (strOptionWith 'T' "type" "TYPE" ("Select builds by type: " ++ intercalate "," kojiBuildTypes)))
<*> (flagWith' Detailed 'd' "details" "Show more build details" <|>
flagWith DetailDefault DetailedTasks 't' "tasks" "Show details and tasks")
<*> optional (installArgs <$> strOptionWith 'i' "install" "INSTALLOPTS" "Install the package with 'install' options")
<*> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> (BuildBuild <$> strOptionWith 'b' "build" "NVR/BUILDID" "Show build" <|>
BuildPattern <$> strOptionWith 'p' "pattern" "NVRPAT" "Builds matching glob pattern" <|>
BuildPackage <$> strArg "PACKAGE" <|>
pure BuildQuery)
, Subcommand "tasks"
"Query Koji tasks (by default lists the most recent buildArch tasks)" $
tasksCmd
<$> hubOpt
<*> optional userOpt
<*> (flagWith' 1 'L' "latest" "Latest build or task" <|>
optionalWith auto 'l' "limit" "INT" "Maximum number of tasks to show [default: 10]" 10)
<*> many (parseTaskState <$> strOptionWith 's' "state" "STATE" "Filter tasks by state (open,close(d),cancel(ed),fail(ed),assigned,free)")
<*> many (strOptionWith 'a' "arch" "ARCH" "Task arch")
<*> optional (Before <$> strOptionWith 'B' "before" "TIMESTAMP" "Tasks completed before timedate [default: now]" <|>
After <$> strOptionWith 'F' "from" "TIMESTAMP" "Tasks completed after timedate")
<*> (fmap normalizeMethod <$> optional (strOptionWith 'm' "method" "METHOD" ("Select tasks by method (default 'buildArch'): " ++ intercalate "," kojiMethods)))
<*> switchWith 'd' "details" "Show more details of builds"
<*> switchWith 'D' "debug" "Pretty-print raw XML result"
-- FIXME error if integer (eg mistakenly taskid)
<*> optional (TaskPackage <$> strOptionWith 'P' "only-package" "PKG" "Filter task results to specified package"
<|> TaskNVR <$> strOptionWith 'N' "only-nvr" "PREFIX" "Filter task results by NVR prefix")
<*> switchWith 'T' "tail" "Fetch the tail of build.log"
-- FIXME any way to pass --help to install?
<*> optional (installArgs <$> strOptionWith 'i' "install" "INSTALLOPTS" "Install the package with 'install' options")
<*> (Build <$> strOptionWith 'b' "build" "BUILD" "List child tasks of build"
<|> Pattern <$> strOptionWith 'p' "pattern" "NVRPAT" "Build tasks of matching pattern"
<|> argumentWith (maybeReader readTaskReq) "PACKAGE|TASKID"
<|> pure TaskQuery)
, Subcommand "latest"
"Query latest Koji build for tag" $
latestCmd
<$> hubOpt
<*> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> strArg "TAG"
<*> strArg "PKG"
, Subcommand "install"
"Install rpm packages directly from a Koji build task" $
installCmd
<$> switchWith 'n' "dry-run" "Don't actually download anything"
<*> switchWith 'D' "debug" "More detailed output"
<*> flagWith No Yes 'y' "yes" "Assume yes to questions (implies --all if not installed)"
<*> hubOpt
<*> optional (strOptionWith 'P' "packages-url" "URL"
"KojiFiles packages url [default: Fedora]")
<*> switchWith 'l' "list" "List builds"
<*> switchWith 'L' "latest" "Latest build"
<*> switchWith 't' "check-remote-time" "Check remote rpm timestamps"
<*> optional pkgMgrOpt
<*> existingOpt
<*> optional (strOptionWith 'b' "prefix" "SUBPKGPREFIX" "Prefix to use for subpackages [default: base package]")
<*> selectOpt
<*> optional disttagOpt
<*> (flagWith' ReqNVR 'R' "nvr" "Give an N-V-R instead of package name" <|>
flagWith ReqName ReqNV 'V' "nv" "Give an N-V instead of package name")
<*> some (strArg "PKG|NVR|TASKID...")
, Subcommand "progress"
"Track running Koji tasks by buildlog size" $
progressCmd
<$> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> switchWith 'm' "modules" "Track module builds"
<*> many (TaskId <$> argumentWith auto "TASKID")
, Subcommand "buildlog-sizes" "Show buildlog sizes for nvr patterns" $
buildlogSizesCmd <$> strArg "NVRPATTERN"
, Subcommand "find"
("Simple quick common queries using words like: [" ++
intercalate ", " (wordsList head) ++ "]") $
findCmd
<$> hubOpt
<*> switchWith 'D' "debug" "Debug output including XML results"
<*> many (strArg "PHRASE")
]
where
hubOpt = optional (strOptionWith 'H' "hub" "HUB"
("KojiHub shortname or url (HUB = " ++
intercalate ", " knownHubs ++
") [default: fedora]"))
userOpt :: Parser UserOpt
userOpt =
User <$> strOptionWith 'u' "user" "USER" "Koji user"
<|> flagWith' UserSelf 'M' "mine" "Your tasks (krb fasid)"
selectOpt :: Parser Select
selectOpt =
flagLongWith' All "all" "all subpackages" <|>
flagLongWith' Ask "ask" "ask for each subpackge [default if not installed]" <|>
PkgsReq
<$> many (strOptionWith 'p' "package" "SUBPKG" "Subpackage (glob) to install")
<*> many (strOptionWith 'a' "add" "SUBPKG" "Additional subpackage (glob) to install")
<*> many (strOptionWith 'x' "exclude" "SUBPKG" "Subpackage (glob) not to install")
disttagOpt :: Parser String
disttagOpt = startingDot <$>
strOptionWith 'd' "disttag" "DISTTAG"
"Select a disttag different to system"
startingDot cs =
case cs of
"" -> error' "empty disttag"
(c:_) -> if c == '.' then cs else '.' : cs
normalizeMethod :: String -> String
normalizeMethod m =
case elemIndex (lower m) (map lower kojiMethods) of
Just i -> kojiMethods !! i
Nothing -> error' $! "unknown method: " ++ m
normalizeBuildType :: String -> String
normalizeBuildType m =
case elemIndex (lower m) (map lower kojiBuildTypes) of
Just i -> kojiBuildTypes !! i
Nothing -> error' $! "unknown build type: " ++ m
readTaskReq :: String -> Maybe TaskReq
readTaskReq cs =
Just $ if all isDigit cs then Task (read cs) else Package cs
pkgMgrOpt :: Parser PkgMgr
pkgMgrOpt =
flagLongWith' RPM "rpm" "Use rpm instead of dnf" <|>
flagLongWith' OSTREE "rpm-ostree" "Use rpm-ostree instead of dnf" <|>
flagLongWith' DNF "dnf" "Use dnf to install [default unless ostree]"
existingOpt :: Parser ExistingStrategy
existingOpt =
flagWith' ExistingNoReinstall 'N' "no-reinstall" "Do not reinstall existing NVRs" <|>
flagWith ExistingUpdate ExistingSkip 'S' "skip-existing" "Ignore already installed subpackages (implies --no-reinstall)"
| null | https://raw.githubusercontent.com/juhp/koji-tool/c7be357701c79fdfb89ece2ac9321c8795db8948/src/Main.hs | haskell | FIXME error if integer (eg mistakenly taskid)
FIXME any way to pass --help to install? | # LANGUAGE CPP #
SPDX - License - Identifier : BSD-3 - Clause
module Main (main) where
import Data.Char (isDigit)
import Data.List.Extra
import SimpleCmd
import SimpleCmdArgs
import Builds
import BuildlogSizes
import User
import Install
import qualified Paths_koji_tool
import Progress
import Find
import Tasks
main :: IO ()
main = do
simpleCmdArgs (Just Paths_koji_tool.version)
"Query and track Koji tasks, and install rpms from Koji."
"see -tool#readme" $
subcommands
[ Subcommand "builds"
"Query Koji builds (by default lists the most recent builds)" $
buildsCmd
<$> hubOpt
<*> optional userOpt
<*> (flagWith' 1 'L' "latest" "Latest build" <|>
optionalWith auto 'l' "limit" "INT" "Maximum number of builds to show [default: 10]" 10)
<*> many (parseBuildState <$> strOptionWith 's' "state" "STATE" "Filter builds by state (building,complete,deleted,fail(ed),cancel(ed)")
<*> optional (Before <$> strOptionWith 'B' "before" "TIMESTAMP" "Builds completed before timedate [default: now]" <|>
After <$> strOptionWith 'F' "from" "TIMESTAMP" "Builds completed after timedate")
<*> (fmap normalizeBuildType <$> optional (strOptionWith 'T' "type" "TYPE" ("Select builds by type: " ++ intercalate "," kojiBuildTypes)))
<*> (flagWith' Detailed 'd' "details" "Show more build details" <|>
flagWith DetailDefault DetailedTasks 't' "tasks" "Show details and tasks")
<*> optional (installArgs <$> strOptionWith 'i' "install" "INSTALLOPTS" "Install the package with 'install' options")
<*> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> (BuildBuild <$> strOptionWith 'b' "build" "NVR/BUILDID" "Show build" <|>
BuildPattern <$> strOptionWith 'p' "pattern" "NVRPAT" "Builds matching glob pattern" <|>
BuildPackage <$> strArg "PACKAGE" <|>
pure BuildQuery)
, Subcommand "tasks"
"Query Koji tasks (by default lists the most recent buildArch tasks)" $
tasksCmd
<$> hubOpt
<*> optional userOpt
<*> (flagWith' 1 'L' "latest" "Latest build or task" <|>
optionalWith auto 'l' "limit" "INT" "Maximum number of tasks to show [default: 10]" 10)
<*> many (parseTaskState <$> strOptionWith 's' "state" "STATE" "Filter tasks by state (open,close(d),cancel(ed),fail(ed),assigned,free)")
<*> many (strOptionWith 'a' "arch" "ARCH" "Task arch")
<*> optional (Before <$> strOptionWith 'B' "before" "TIMESTAMP" "Tasks completed before timedate [default: now]" <|>
After <$> strOptionWith 'F' "from" "TIMESTAMP" "Tasks completed after timedate")
<*> (fmap normalizeMethod <$> optional (strOptionWith 'm' "method" "METHOD" ("Select tasks by method (default 'buildArch'): " ++ intercalate "," kojiMethods)))
<*> switchWith 'd' "details" "Show more details of builds"
<*> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> optional (TaskPackage <$> strOptionWith 'P' "only-package" "PKG" "Filter task results to specified package"
<|> TaskNVR <$> strOptionWith 'N' "only-nvr" "PREFIX" "Filter task results by NVR prefix")
<*> switchWith 'T' "tail" "Fetch the tail of build.log"
<*> optional (installArgs <$> strOptionWith 'i' "install" "INSTALLOPTS" "Install the package with 'install' options")
<*> (Build <$> strOptionWith 'b' "build" "BUILD" "List child tasks of build"
<|> Pattern <$> strOptionWith 'p' "pattern" "NVRPAT" "Build tasks of matching pattern"
<|> argumentWith (maybeReader readTaskReq) "PACKAGE|TASKID"
<|> pure TaskQuery)
, Subcommand "latest"
"Query latest Koji build for tag" $
latestCmd
<$> hubOpt
<*> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> strArg "TAG"
<*> strArg "PKG"
, Subcommand "install"
"Install rpm packages directly from a Koji build task" $
installCmd
<$> switchWith 'n' "dry-run" "Don't actually download anything"
<*> switchWith 'D' "debug" "More detailed output"
<*> flagWith No Yes 'y' "yes" "Assume yes to questions (implies --all if not installed)"
<*> hubOpt
<*> optional (strOptionWith 'P' "packages-url" "URL"
"KojiFiles packages url [default: Fedora]")
<*> switchWith 'l' "list" "List builds"
<*> switchWith 'L' "latest" "Latest build"
<*> switchWith 't' "check-remote-time" "Check remote rpm timestamps"
<*> optional pkgMgrOpt
<*> existingOpt
<*> optional (strOptionWith 'b' "prefix" "SUBPKGPREFIX" "Prefix to use for subpackages [default: base package]")
<*> selectOpt
<*> optional disttagOpt
<*> (flagWith' ReqNVR 'R' "nvr" "Give an N-V-R instead of package name" <|>
flagWith ReqName ReqNV 'V' "nv" "Give an N-V instead of package name")
<*> some (strArg "PKG|NVR|TASKID...")
, Subcommand "progress"
"Track running Koji tasks by buildlog size" $
progressCmd
<$> switchWith 'D' "debug" "Pretty-print raw XML result"
<*> switchWith 'm' "modules" "Track module builds"
<*> many (TaskId <$> argumentWith auto "TASKID")
, Subcommand "buildlog-sizes" "Show buildlog sizes for nvr patterns" $
buildlogSizesCmd <$> strArg "NVRPATTERN"
, Subcommand "find"
("Simple quick common queries using words like: [" ++
intercalate ", " (wordsList head) ++ "]") $
findCmd
<$> hubOpt
<*> switchWith 'D' "debug" "Debug output including XML results"
<*> many (strArg "PHRASE")
]
where
hubOpt = optional (strOptionWith 'H' "hub" "HUB"
("KojiHub shortname or url (HUB = " ++
intercalate ", " knownHubs ++
") [default: fedora]"))
userOpt :: Parser UserOpt
userOpt =
User <$> strOptionWith 'u' "user" "USER" "Koji user"
<|> flagWith' UserSelf 'M' "mine" "Your tasks (krb fasid)"
selectOpt :: Parser Select
selectOpt =
flagLongWith' All "all" "all subpackages" <|>
flagLongWith' Ask "ask" "ask for each subpackge [default if not installed]" <|>
PkgsReq
<$> many (strOptionWith 'p' "package" "SUBPKG" "Subpackage (glob) to install")
<*> many (strOptionWith 'a' "add" "SUBPKG" "Additional subpackage (glob) to install")
<*> many (strOptionWith 'x' "exclude" "SUBPKG" "Subpackage (glob) not to install")
disttagOpt :: Parser String
disttagOpt = startingDot <$>
strOptionWith 'd' "disttag" "DISTTAG"
"Select a disttag different to system"
startingDot cs =
case cs of
"" -> error' "empty disttag"
(c:_) -> if c == '.' then cs else '.' : cs
normalizeMethod :: String -> String
normalizeMethod m =
case elemIndex (lower m) (map lower kojiMethods) of
Just i -> kojiMethods !! i
Nothing -> error' $! "unknown method: " ++ m
normalizeBuildType :: String -> String
normalizeBuildType m =
case elemIndex (lower m) (map lower kojiBuildTypes) of
Just i -> kojiBuildTypes !! i
Nothing -> error' $! "unknown build type: " ++ m
readTaskReq :: String -> Maybe TaskReq
readTaskReq cs =
Just $ if all isDigit cs then Task (read cs) else Package cs
pkgMgrOpt :: Parser PkgMgr
pkgMgrOpt =
flagLongWith' RPM "rpm" "Use rpm instead of dnf" <|>
flagLongWith' OSTREE "rpm-ostree" "Use rpm-ostree instead of dnf" <|>
flagLongWith' DNF "dnf" "Use dnf to install [default unless ostree]"
existingOpt :: Parser ExistingStrategy
existingOpt =
flagWith' ExistingNoReinstall 'N' "no-reinstall" "Do not reinstall existing NVRs" <|>
flagWith ExistingUpdate ExistingSkip 'S' "skip-existing" "Ignore already installed subpackages (implies --no-reinstall)"
|
6ac783cc8a046ffb583d88fdc00b972b8f65efebfc19771835ee56ace76d0b15 | melhadad/fuf | verb-group.lisp | ;;; -*- Mode:Lisp; Syntax:Common-Lisp; Package: -*-
;;; -----------------------------------------------------------------------
;;; File: verb-group.lisp
;;; Description: Grammatical systems for verb group
Author :
Created : 19 Dec 1991
Modified : 18 Aug 1992 Added adverb ( inherited from clause )
5 Jul 1995 SURGE 2.2 VERSION
;;; - Fixed problem of wh/inversion in
;;; long-distance (Who do you think won?) use do.
;;; (embedded no).
;;; -----------------------------------------------------------------------
FUF - a functional unification - based text generation system . ( . 5.4 )
;;;
Copyright ( c ) 1987 - 2014 by . all rights reserved .
;;;
;;; Permission to use, copy, and/or distribute for any purpose and
;;; without fee is hereby granted, provided that both the above copyright
;;; notice and this permission notice appear in all copies and derived works.
;;; Fees for distribution or use of this software or derived works may only
;;; be charged with express written permission of the copyright holder.
THIS SOFTWARE IS PROVIDED ` ` AS IS '' WITHOUT EXPRESS OR IMPLIED WARRANTY .
;;; -----------------------------------------------------------------------
(in-package "FUG5")
BRANCH FOR SIMPLE - VERB - GROUP
(def-conj simple-verb-group
(cat simple-verb-group)
(generic-cat verb-group)
(complex none)
(interrogative ((alt (none #(under yes-no) #(under wh))))) ;; default is not
(insistence ((alt (no #(under yes))))) ;; default is not insistent
(alt verb-lexicalization (:demo "Lexicalization of verb")
(((concept given)
(lexical-verb ((concept {^ ^ concept})
(subcat {^ ^ subcat})
(lex {^ ^ lex})
(cat lex-verb))))
((lex given))))
(alt verb-polarity (:index polarity)
(((polarity positive)
(notf none))
((polarity #(under negative))
(notf ((lex "not") (cat adv))))))
;; the tensed feature may be the event or any of the auxs.
(tensed-feature ((person { ^ ^ person})
(number { ^ ^ number})
(ending { ^ ^ ending})))
;; there is always an event verb -- its also called the main verb.
(event ((cat verb)
(lex { ^ ^ lex})))
deal with modality : allow only one type or no modality at all
;; default is no modality
(alt only-one-modal
(((epistemic-modality none)
(deontic-modality none)
(modality none))
((epistemic-modality given)
(deontic-modality none)
(modality epistemic))
((deontic-modality given)
(epistemic-modality none)
(modality deontic))))
(:! tense-selection)
(:! modality)
;; Now, the tense feature should be selected. Deal
;; with voice, interrogative and polarity.
(:! voice-verb-group)
(:! notf-adverb-placement)
;; Now fill the slots for auxiliaries if necessary
(alt (((be-1 none)
(cset ((- be-1))))
((be-1 given)
(be-1 ((lex "be")
(cat verb))))))
(alt (((be-2 none)
(cset ((- be-2))))
((be-2 given)
(be-2 ((lex "be")
(cat verb))))))
(alt (((have-1 none)
(cset ((- have-1))))
((have-1 given)
(have-1 ((lex "have")
(cat verb))))))
(alt (((have-2 none)
(cset ((- have-2))))
((have-2 given)
(have-2 ((lex "have")
(cat verb))))))
(alt (((beg none)
(cset ((- beg going-to))))
((beg given)
(beg ((lex "be")
(cat verb)))
;; we need to add the going to which is never tensed. So,
;; make it a cat modal.
(going-to ((lex "going to")
(cat modal))))))
(alt aux-specified
(:wait aux-specified)
(((aux-specified no)
(aux none))
((aux given))
((aux none)
(cset ((- aux))))))
;; Put everything together. Notf and adverb have already been placed.
;; For interrogative, the tensed-feature has been fronted, so
;; don't put it here
(alt pattern-interrogative (:index interrogative)
(((interrogative none)
(pattern (aux dots have-1 dots beg dots going-to dots
have-2 dots be-1 dots be-2 dots event dots)))
((interrogative interrogative)
(tensed-feature ((gap yes)))
;; If tensed-feature not bound yet, it means aux should be
;; fronted by the question.
(alt aux-or-tensed (:wait {^ tensed-feature lex})
(((tensed-feature ((lex given))))
((tensed-feature {^ aux}))))
(alt notf-interrogative
(((notf none))
((notf ((gap yes)
(lex {^ ^ ^ fronted-not lex})
(cat {^ ^ ^ fronted-not cat}))))))
(alt adverb-interrogative
(:wait aux-specified)
(((adverb none))
((aux given))
;; No aux: front adverb "who never owned this book"
((aux none)
(adverb ((gap yes)
(lex {^ ^ ^ fronted-adverb lex})
(cat {^ ^ ^ fronted-adverb cat}))))))
(pattern (aux dots have-1 dots beg dots going-to dots
have-2 dots be-1 dots be-2 dots event dots))))))
(def-alt tense-selection (:index tense)
(:demo "what is the tense?")
;; ***** Done only for case of non-modal finite tense
SIMPLE TENSES
(
tense 2 , the default :
;; I take the bus.[a]
;; The bus is taken by me.[p]
((tense present)
(tpattern ((:st :equals :rt0)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(:! aspect-choice)
(time-frame present)
(alt (((simple yes)
(modality none))
((simple no))))
(verb-aspect root)
(tensed-feature ((tense present))))
Tense 1 : past
;; I took the bus.[a]
;; The bus was taken by me.[p]
((tense past)
(tpattern ((:rt0 :precedes :st)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(alt (((simple yes)
(modality none))
((simple no))))
(time-frame past)
(:! aspect-choice)
(tensed-feature ((tense past))))
tense 3
;; I will take the bus.[a]
;; The bus will be taken by me.[b]
;; for the future tenses, the auxillary "will" is
;; treated as a modal. There is no tensed feature
;; and no agreement is necessary.
((tense future)
(tpattern ((:st :precedes :rt0)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(:! aspect-choice)
(simple no)
(time-frame future)
(verb-aspect root))
tense 4
;; I had taken the bus.(a)
;; The bus had been taken by me.(p)
((tense past-perfect)
(time-frame past)
(simple no)
(tpattern ((:rt1 :precedes :st)
(:rt0 :precedes :rt1)
(:st :none :rt2)
(:rt1 :none :rt2)
(:rt0 :none :rt2)
(:et :none :rt2)))
(first-verb {^ have-1})
(tensed-feature ((tense past)))
(:! aspect-choice)
(verb-aspect past-participle))
tense 5
;; I have taken the bus
;; The bus has been taken by me.
((tense present-perfect)
(time-frame present)
(simple no)
(tpattern ((:rt0 :precedes :rt1)
(:rt1 :equals :st)
(:st :none :rt2)
(:rt1 :none :rt2)
(:et :none :rt2)
(:rt0 :none :rt2)))
(first-verb {^ have-1})
(tensed-feature ((tense present)))
(:! aspect-choice)
(verb-aspect past-participle))
tense 6
;; I will have taken the bus.[a]
;; The bus will have been taken by me.[p]
((tense future-perfect)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt0)
(:rt0 :precedes :rt1)
(:st :none :rt2)
(:rt1 :none :rt2)
(:et :none :rt2)
(:rt0 :none :rt2)))
(aux ((lex "will")
(cat modal)))
(tensed-feature {^ aux})
(first-verb {^ aux})
(have-1 ((ending root)))
(:! aspect-choice)
(verb-aspect past-participle))
tense 7
;; I was taking the bus.(a)
;; The bus was being taken by me.(p)
((tense past-progressive)
(simple no)
(time-frame past)
(tpattern ((:rt0 :precedes :st)
(:rt0 :includes :et)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(aspect ((alt aspect-7 (event process))))
(be-1 ((tense past)))
(be-1 { ^ tensed-feature})
(verb-aspect present-participle))
tense 8
;; I am taking the bus.(a)
;; The bus is being taken by me.(p)
((tense present-progressive)
(simple no)
(time-frame present)
(tpattern ((:rt0 :precedes :st)
(:rt0 :includes :et)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(aspect ((alt aspect-8 (event process))))
(be-1 ((tense present)))
(be-1 { ^ tensed-feature})
(verb-aspect present-participle))
tense 9
;; I will be taking the bus.(a)
;; The bus will be being taken by me.(p)
((tense future-progressive)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt0)
(:rt0 :includes :et)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(aspect ((alt aspect-9 (event process))))
(aux ((lex "will")
(cat modal)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 10
;; I was going to take the bus.[a]
;; The bus was going to be taken by me.[p]
((tense tense-10)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt1 :none :rt2) (:st :none :rt2)
(:et :none :rt2)))
(simple no)
(:! aspect-choice)
(verb-aspect root)
(beg ((tense past)))
(beg { ^ tensed-feature}))
tense 11
;; I am going to take the bus.[a]
;; The bus is going to be taken by me.[p]
((tense tense-11)
(time-frame present)
(tpattern ((:st :equals :rt1) (:rt1 :precedes :rt0)
(:st :none :rt2) (:et :none :rt2)
(:rt1 :none :rt2) (:rt0 :none :rt2)))
(simple no)
(:! aspect-choice)
(verb-aspect root)
(beg ((tense present)))
(beg { ^ tensed-feature}))
tense 12
;; I will be going to take.[a]
;; The bus will be going to be taken.[b]
((tense tense-12)
(time-frame future)
(tpattern ((:st :precedes :rt1)
(:rt1 :precedes :rt0)
(:st :none :rt2) (:et :none :rt2)
(:rt1 :none :rt2) (:rt0 :none :rt2)))
(simple no)
(:! aspect-choice)
(verb-aspect root)
(beg ((ending root)))
(aux ((lex "will")
(cat modal))))
tense 13
;; I was going to have taken the bus.[a]
;; The bus was going to have been taken by me.[p]
((tense tense-13)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:st :none :rt3)
(:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(:! aspect-choice)
(beg ((tense past)))
(beg { ^ tensed-feature})
(have-2 ((ending root)))
(verb-aspect past-participle))
tense 14
;; I am going to have taken the bus.[a]
;; The bus is going to have been taken by me.[p]
((tense tense-14)
(simple no)
(time-frame present)
(tpattern ((:st :equals :rt1) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:st :none :rt3)
(:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3)))
(:! aspect-choice)
(beg ((tense present)))
(beg { ^ tensed-feature})
(have-2 ((ending root)))
(verb-aspect past-participle))
tense 15
;; I will be going to have taken the bus.[a]
;; The bus will be going to have been taken by me.[p]
((tense tense-15)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:st :none :rt3)
(:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3)))
(:! aspect-choice)
(aux ((lex "will") (cat modal)))
(beg ((ending root)))
(have-2 ((ending root)))
(verb-aspect past-participle))
tense 16
;; I had been taking the bus.[a]
;; The bus had been being taken by me.[p]
((tense tense-16)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt0 :precedes :rt1)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)))
(aspect ((alt (process event))))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 17
;; I have been taking the bus.[a]
;; The bus has been being taken by me.[p]
((tense tense-17)
(time-frame present)
(simple no)
(tpattern ((:rt1 :equals :st) (:rt0 :precedes :rt1)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)))
(aspect ((alt (process event))))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 18
;; I will have been taking the bus.[a]
;; The bus will have been being taken by me.[p]
((tense tense-18)
(time-frame future)
(simple no)
(tpattern ((:rt1 :precedes :st) (:rt0 :precedes :rt1)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)))
(aspect ((alt (process event))))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 19
;; I was going to be taking the bus.[a]
;; The bus was going to be being taking the bus.[p]
((tense tense-19)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)
(:et :none :rt2)))
(aspect ((alt aspect-19 (process event))))
(beg ((tense past)))
(beg { ^ tensed-feature})
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 20
;; I am going to be taking the bus.[a]
;; The bus is going to be being taken by me.[p]
((tense tense-20)
(time-frame present)
(simple no)
(tpattern ((:rt1 :equals :st) (:rt1 :precedes :rt0)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)
(:et :none :rt2)))
(aspect ((alt aspect-20 (process event))))
(beg ((tense present)))
(beg { ^ tensed-feature})
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 21
;; I will be going to be taking the bus.[a]
;; The bus will be going to be being taken by me.[p]
((tense tense-21)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt0)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)
(:et :none :rt2)))
(aspect ((alt aspect-21 (process event))))
(aux ((lex "will") (cat modal)))
(beg ((ending root)))
(be-1 ((ending root)))
(verb-aspect present-participle))
;; ***** NO SEMANTICS
tense 22
;; I had been going to take the bus.[a]
;; The bus had been going to be taken by me.[p]
((tense tense-22)
(simple no)
(time-frame past)
;; (tpattern ((:rt0 :precedes :et)))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(verb-aspect root)
)
;; ***** NO SEMANTICS
tense 23
;; I have been going to take the bus.[a]
;; The bus has been going to be taken by me.[p]
((tense tense-23)
(simple no)
(time-frame present)
;; (tpattern ((:rt0 :precedes :et)))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(verb-aspect root)
)
;; ***** NO SEMANTICS
tense 24
;; I will have been going to take the bus.[a]
;; The bus will have been going to be taken by me.[p]
((tense tense-24)
(simple no)
(time-frame future)
;; (tpattern ((:rt0 :precedes :et)))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(beg ((ending past-participle)))
(verb-aspect root)
)
tense 25
;; I had been going to have taken the bus.[a]
;; The bus had been going to have been taken by me.[p]???
((tense tense-25)
(simple no)
(time-frame past)
(tpattern ((:rt2 :precedes :st)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)))
(:! aspect-choice)
(verb-aspect past-participle)
(have-1 ((tense past)))
(have-1 {^ tensed-feature})
(beg ((ending past-participle)))
(have-2 ((ending root))))
tense 26
;; I have been going to have taken the bus.[a]
;; The bus has been going to have been taken by me.[p]???
((tense tense-26)
(simple no)
(time-frame present)
(tpattern ((:rt2 :equals :st)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)))
(:! aspect-choice)
(verb-aspect past-participle)
(have-1 ((tense present)))
(have-1 {^ tensed-feature})
(beg ((ending past-participle)))
(have-2 ((ending root))))
tense 27
;; I will have been going to have taken the bus.[a]
;; The bus will have been going to have been taken by me.[p]???
((tense tense-27)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt2)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)))
(:! aspect-choice)
(verb-aspect past-participle)
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(beg ((ending past-participle)))
(have-2 ((ending root))))
tense 28
;; I was going to have been taking the bus.[a]
;; The bus was going to have been being taken by me.[p]
((tense tense-28)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:rt0 :during :et)))
(aspect ((alt aspect-28 (event process))))
first - verb
(tensed-feature ((tense past)))
(have-2 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 29
;; I am going to have been taking the bus.[a]
;; The bus is going to have been being taken by me.[p]
((tense tense-29)
(simple no)
(time-frame present)
(tpattern ((:rt1 :equals :st) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:rt0 :during :et)))
(aspect ((alt aspect-29 (event process))))
(beg ((tense present)))
(beg { ^ tensed-feature})
(have-2 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 30
;; I will be going to have been taking the bus.[a]
;; The bus will be going to have been being taken by me.[p]
((tense tense-30)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:rt0 :during :et)))
(aspect ((alt aspect-30 (event process))))
(aux ((lex "will") (cat modal)))
(beg ((ending root)))
(have-2 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 31
;; I had been going to be taking the bus.[a]
;; The bus had been going to be being taken by me.[p]
((tense tense-31)
(simple no)
(time-frame past)
(tpattern ((:rt2 :precedes :st) (:rt1 :precedes :rt2)
(:rt1 :precedes :rt0) (:rt0 :during :et)
(:st :none :rt3) (:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(aspect ((alt (process event))))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 32
;; I have been going to be taking the bus.[a]
;; The bus has been going to be being taken by me.[p]
((tense tense-32)
(simple no)
(time-frame present)
(tpattern ((:rt2 :equals :st) (:rt1 :precedes :rt2)
(:rt1 :precedes :rt0) (:rt0 :during :et)
(:st :none :rt3) (:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(aspect ((alt (process event))))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 33
;; I will have been going to be taking the bus.[a]
;; The bus will have been going to be being taken by me.[p]
((tense tense-33)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt2)
(:rt1 :precedes :rt0) (:rt0 :during :et)
(:st :none :rt3) (:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(aspect ((alt (process event))))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 34
;; I had been going to have been taking the bus.[a]
;; The bus had been going to have been being taken by me.[p]
((tense tense-34)
(simple no)
(time-frame past)
(tpattern ((:rt2 :precedes :st)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)
(:rt0 :during :et)))
(aspect ((alt (process event))))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(have-2 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 35
;; I have been going to have been taking the bus.[a]
;; The bus has been going to have been being taken by me.[p]
((tense tense-35)
(simple no)
(time-frame present)
(tpattern ((:st :equals :rt2)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)
(:rt0 :during :et)))
(aspect ((alt (process event))))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(have-2 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 36
;; I will have been going to have been taking the bus.[a]
;; The bus will have been going to have been being taken by me.[p]
((tense tense-36)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)
(:rt0 :during :et)))
(aspect ((alt (process event))))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(have-2 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
))
(def-alt aspect-choice
(((aspect event)
(tpattern ((:rt0 :equals :et))))
((aspect given)
(aspect ((alt (stative process))))
(tpattern ((:rt0 :during :et))))))
(def-alt modality
(:demo "what modality is used w/ this verb?")
(
((modality none)
%TRACE-OFF%
(control (control-demo "No modality in this clause."))
%TRACE-ON%
(tensed-feature {^ first-verb})
(alt time-frame (:index time-frame)
(((time-frame present))
((time-frame past))
((time-frame future)
(aux ((lex "will") (cat modal))))
)))
((epistemic-modality fact)
(tensed-feature {^ first-verb})
(alt (:index time-frame)
(((time-frame present))
((time-frame past))
((time-frame future)
(aux ((lex "will") (cat modal))))
)))
((epistemic-modality inference)
(first-verb ((ending root)))
(alt (:index time-frame)
(((time-frame present)
(aux ((lex "must") (cat modal))))
((time-frame future)
;; there is already a will
)
((time-frame past)
(aux ((lex "must've") (cat modal))))
)))
((epistemic-modality possible)
(first-verb ((ending root)))
(alt (:index time-frame)
(((time-frame present)
(aux ((lex "can") (cat modal))))
((time-frame future)
(aux ((lex "can") (cat modal))))
((time-frame past)
(aux ((lex "could have")))))))
((epistemic-modality given)
(first-verb ((ending root)))
(aux ((cat modal)
(lex {^ ^ epistemic-modality}))))
((deontic-modality duty)
(first-verb ((ending root)))
(alt (:index time-frame)
(((time-frame present)
(aux ((lex "must") (cat modal))))
((time-frame future)
(aux ((lex "must") (cat modal))))
((time-frame past)
(aux ((lex "must've") (cat modal))))
)))
((deontic-modality given)
(first-verb ((ending root)))
(aux ((cat modal)
(lex {^ ^ deontic-modality}))))
((modality given)
(first-verb ((ending root)))
(aux ((cat modal)
(lex {^ ^ modality}))))
))
(def-alt voice-verb-group (:index voice)
(:wait {^ event lex})
(
First : very special case of " to be "
;; passive is "to be" - no auxiliary for negation and question
((event ((lex "be")))
(alt simple-be (:index simple)
;; for simple tenses, don't add "do"
(((simple yes)
( event { ^ first - verb } )
(event {^ tensed-feature}))
((simple no)
(event ((ending {^ ^ verb-aspect})))))))
then special case of all copulae : passive is themselves
;; and all other verbs at active
((alt (((voice active))
((copula #(under yes)))))
(alt simple-do (:index simple)
(
;; For simple tenses, the auxillary "do" must be added
;; e.g. I do not open the door
;; or Did you open the door?
((simple yes)
(alt simple-polarity (:index polarity)
(((polarity negative) ;; e.g. I did not see it.
(aux ((lex "do") (cat verb)))
(aux {^ tensed-feature})
(event ((ending root))))
((polarity positive) ;; e.g. I saw it
(alt simple-interrogative (:index interrogative)
(:wait {^ scope synt-funct})
((;; When wh question where scope is subject NOT EMBEDDED
;; don't use aux.
;; Example: Who do you think really won the prize?
(interrogative wh)
({^ scope} ((synt-funct #(under subject))
(clause-level ((embedded no)))))
(aux none)
(aux-specified yes)
(event { ^ tensed-feature}))
((interrogative interrogative)
(aux ((lex "do") (cat verb)))
(aux {^ tensed-feature})
(aux-specified yes)
(event ((ending root))))
((alt simple-insistence (:index insistence)
(((insistence no)
(aux none)
(aux-specified yes)
(event { ^ tensed-feature}))
((insistence yes)
(aux ((lex "do") (cat verb)))
(aux {^ tensed-feature})
(aux-specified yes)
(event ((ending root)))))))))))))
((simple no)
there is an aux of one kind or another ,
;; the default ending of the event should be
;; set event ending???
(aux-specified yes)
(event ((ending {^ ^ verb-aspect})))))))
((voice passive)
(copula no)
(aux-specified yes)
(alt passive-simple (:index simple)
(;; no auxilliary necessarily
((simple yes)
(event ((ending past-participle)))
(be-2 { ^ tensed-feature}))
;; there's an auxilliary
((simple no)
(event ((ending past-participle)))
(be-2 ((ending { ^ ^ verb-aspect}))))) )) ))
(def-alt notf-adverb-placement (:index polarity)
;; ***** WORK ON NEGATION OF NON-FINITE
@@TODO : HERE - AUX NONE must wait - for what ? ? ? ? ?
(((polarity positive)
(adverb none))
(
(alt aux-notf (:index aux)
(:wait aux-specified)
(((aux given)
(pattern (dots aux notf adverb dots)))
((aux none)
(alt have-1-notf (:index have-1)
(((have-1 given)
(pattern (dots have-1 notf adverb dots)))
((have-1 none)
(alt beg-notf (:index beg)
(((beg given)
(pattern (dots beg notf adverb dots)))
((beg none)
(alt have-2-notf (:index have-2)
(((have-2 given)
(pattern (dots have-2 notf adverb dots)))
((have-2 none)
(alt be-1-notf (:index be-1)
(((be-1 given)
(pattern (dots be-1 notf adverb dots)))
((be-1 none)
(alt be-2-notf (:index be-2)
(((be-2 given)
(pattern (dots be-2 notf adverb dots)))
((be-2 none)
;; there are no aux'es but the polarity
;; is negative. It must be the "be"
;; special case: I am not a tree.
;; or else there is only an adverb:
;; "I never do it."
(pattern (dots adverb event notf dots))))
)))
)))
)))
)))
)))
)))
)
(store-verbs '(("run up" "runs up" "ran up" "running up" "run up")
("open" "opens" "opened" "opening" "opened")))
;; ============================================================
(provide "verb-group")
;; ============================================================
| null | https://raw.githubusercontent.com/melhadad/fuf/57bd0e31afc6aaa03b85f45f4c7195af701508b8/surge/code/verb-group.lisp | lisp | -*- Mode:Lisp; Syntax:Common-Lisp; Package: -*-
-----------------------------------------------------------------------
File: verb-group.lisp
Description: Grammatical systems for verb group
- Fixed problem of wh/inversion in
long-distance (Who do you think won?) use do.
(embedded no).
-----------------------------------------------------------------------
Permission to use, copy, and/or distribute for any purpose and
without fee is hereby granted, provided that both the above copyright
notice and this permission notice appear in all copies and derived works.
Fees for distribution or use of this software or derived works may only
be charged with express written permission of the copyright holder.
-----------------------------------------------------------------------
default is not
default is not insistent
the tensed feature may be the event or any of the auxs.
there is always an event verb -- its also called the main verb.
default is no modality
Now, the tense feature should be selected. Deal
with voice, interrogative and polarity.
Now fill the slots for auxiliaries if necessary
we need to add the going to which is never tensed. So,
make it a cat modal.
Put everything together. Notf and adverb have already been placed.
For interrogative, the tensed-feature has been fronted, so
don't put it here
If tensed-feature not bound yet, it means aux should be
fronted by the question.
No aux: front adverb "who never owned this book"
***** Done only for case of non-modal finite tense
I take the bus.[a]
The bus is taken by me.[p]
I took the bus.[a]
The bus was taken by me.[p]
I will take the bus.[a]
The bus will be taken by me.[b]
for the future tenses, the auxillary "will" is
treated as a modal. There is no tensed feature
and no agreement is necessary.
I had taken the bus.(a)
The bus had been taken by me.(p)
I have taken the bus
The bus has been taken by me.
I will have taken the bus.[a]
The bus will have been taken by me.[p]
I was taking the bus.(a)
The bus was being taken by me.(p)
I am taking the bus.(a)
The bus is being taken by me.(p)
I will be taking the bus.(a)
The bus will be being taken by me.(p)
I was going to take the bus.[a]
The bus was going to be taken by me.[p]
I am going to take the bus.[a]
The bus is going to be taken by me.[p]
I will be going to take.[a]
The bus will be going to be taken.[b]
I was going to have taken the bus.[a]
The bus was going to have been taken by me.[p]
I am going to have taken the bus.[a]
The bus is going to have been taken by me.[p]
I will be going to have taken the bus.[a]
The bus will be going to have been taken by me.[p]
I had been taking the bus.[a]
The bus had been being taken by me.[p]
I have been taking the bus.[a]
The bus has been being taken by me.[p]
I will have been taking the bus.[a]
The bus will have been being taken by me.[p]
I was going to be taking the bus.[a]
The bus was going to be being taking the bus.[p]
I am going to be taking the bus.[a]
The bus is going to be being taken by me.[p]
I will be going to be taking the bus.[a]
The bus will be going to be being taken by me.[p]
***** NO SEMANTICS
I had been going to take the bus.[a]
The bus had been going to be taken by me.[p]
(tpattern ((:rt0 :precedes :et)))
***** NO SEMANTICS
I have been going to take the bus.[a]
The bus has been going to be taken by me.[p]
(tpattern ((:rt0 :precedes :et)))
***** NO SEMANTICS
I will have been going to take the bus.[a]
The bus will have been going to be taken by me.[p]
(tpattern ((:rt0 :precedes :et)))
I had been going to have taken the bus.[a]
The bus had been going to have been taken by me.[p]???
I have been going to have taken the bus.[a]
The bus has been going to have been taken by me.[p]???
I will have been going to have taken the bus.[a]
The bus will have been going to have been taken by me.[p]???
I was going to have been taking the bus.[a]
The bus was going to have been being taken by me.[p]
I am going to have been taking the bus.[a]
The bus is going to have been being taken by me.[p]
I will be going to have been taking the bus.[a]
The bus will be going to have been being taken by me.[p]
I had been going to be taking the bus.[a]
The bus had been going to be being taken by me.[p]
I have been going to be taking the bus.[a]
The bus has been going to be being taken by me.[p]
I will have been going to be taking the bus.[a]
The bus will have been going to be being taken by me.[p]
I had been going to have been taking the bus.[a]
The bus had been going to have been being taken by me.[p]
I have been going to have been taking the bus.[a]
The bus has been going to have been being taken by me.[p]
I will have been going to have been taking the bus.[a]
The bus will have been going to have been being taken by me.[p]
there is already a will
passive is "to be" - no auxiliary for negation and question
for simple tenses, don't add "do"
and all other verbs at active
For simple tenses, the auxillary "do" must be added
e.g. I do not open the door
or Did you open the door?
e.g. I did not see it.
e.g. I saw it
When wh question where scope is subject NOT EMBEDDED
don't use aux.
Example: Who do you think really won the prize?
the default ending of the event should be
set event ending???
no auxilliary necessarily
there's an auxilliary
***** WORK ON NEGATION OF NON-FINITE
there are no aux'es but the polarity
is negative. It must be the "be"
special case: I am not a tree.
or else there is only an adverb:
"I never do it."
============================================================
============================================================ | Author :
Created : 19 Dec 1991
Modified : 18 Aug 1992 Added adverb ( inherited from clause )
5 Jul 1995 SURGE 2.2 VERSION
FUF - a functional unification - based text generation system . ( . 5.4 )
Copyright ( c ) 1987 - 2014 by . all rights reserved .
THIS SOFTWARE IS PROVIDED ` ` AS IS '' WITHOUT EXPRESS OR IMPLIED WARRANTY .
(in-package "FUG5")
BRANCH FOR SIMPLE - VERB - GROUP
(def-conj simple-verb-group
(cat simple-verb-group)
(generic-cat verb-group)
(complex none)
(alt verb-lexicalization (:demo "Lexicalization of verb")
(((concept given)
(lexical-verb ((concept {^ ^ concept})
(subcat {^ ^ subcat})
(lex {^ ^ lex})
(cat lex-verb))))
((lex given))))
(alt verb-polarity (:index polarity)
(((polarity positive)
(notf none))
((polarity #(under negative))
(notf ((lex "not") (cat adv))))))
(tensed-feature ((person { ^ ^ person})
(number { ^ ^ number})
(ending { ^ ^ ending})))
(event ((cat verb)
(lex { ^ ^ lex})))
deal with modality : allow only one type or no modality at all
(alt only-one-modal
(((epistemic-modality none)
(deontic-modality none)
(modality none))
((epistemic-modality given)
(deontic-modality none)
(modality epistemic))
((deontic-modality given)
(epistemic-modality none)
(modality deontic))))
(:! tense-selection)
(:! modality)
(:! voice-verb-group)
(:! notf-adverb-placement)
(alt (((be-1 none)
(cset ((- be-1))))
((be-1 given)
(be-1 ((lex "be")
(cat verb))))))
(alt (((be-2 none)
(cset ((- be-2))))
((be-2 given)
(be-2 ((lex "be")
(cat verb))))))
(alt (((have-1 none)
(cset ((- have-1))))
((have-1 given)
(have-1 ((lex "have")
(cat verb))))))
(alt (((have-2 none)
(cset ((- have-2))))
((have-2 given)
(have-2 ((lex "have")
(cat verb))))))
(alt (((beg none)
(cset ((- beg going-to))))
((beg given)
(beg ((lex "be")
(cat verb)))
(going-to ((lex "going to")
(cat modal))))))
(alt aux-specified
(:wait aux-specified)
(((aux-specified no)
(aux none))
((aux given))
((aux none)
(cset ((- aux))))))
(alt pattern-interrogative (:index interrogative)
(((interrogative none)
(pattern (aux dots have-1 dots beg dots going-to dots
have-2 dots be-1 dots be-2 dots event dots)))
((interrogative interrogative)
(tensed-feature ((gap yes)))
(alt aux-or-tensed (:wait {^ tensed-feature lex})
(((tensed-feature ((lex given))))
((tensed-feature {^ aux}))))
(alt notf-interrogative
(((notf none))
((notf ((gap yes)
(lex {^ ^ ^ fronted-not lex})
(cat {^ ^ ^ fronted-not cat}))))))
(alt adverb-interrogative
(:wait aux-specified)
(((adverb none))
((aux given))
((aux none)
(adverb ((gap yes)
(lex {^ ^ ^ fronted-adverb lex})
(cat {^ ^ ^ fronted-adverb cat}))))))
(pattern (aux dots have-1 dots beg dots going-to dots
have-2 dots be-1 dots be-2 dots event dots))))))
(def-alt tense-selection (:index tense)
(:demo "what is the tense?")
SIMPLE TENSES
(
tense 2 , the default :
((tense present)
(tpattern ((:st :equals :rt0)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(:! aspect-choice)
(time-frame present)
(alt (((simple yes)
(modality none))
((simple no))))
(verb-aspect root)
(tensed-feature ((tense present))))
Tense 1 : past
((tense past)
(tpattern ((:rt0 :precedes :st)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(alt (((simple yes)
(modality none))
((simple no))))
(time-frame past)
(:! aspect-choice)
(tensed-feature ((tense past))))
tense 3
((tense future)
(tpattern ((:st :precedes :rt0)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(:! aspect-choice)
(simple no)
(time-frame future)
(verb-aspect root))
tense 4
((tense past-perfect)
(time-frame past)
(simple no)
(tpattern ((:rt1 :precedes :st)
(:rt0 :precedes :rt1)
(:st :none :rt2)
(:rt1 :none :rt2)
(:rt0 :none :rt2)
(:et :none :rt2)))
(first-verb {^ have-1})
(tensed-feature ((tense past)))
(:! aspect-choice)
(verb-aspect past-participle))
tense 5
((tense present-perfect)
(time-frame present)
(simple no)
(tpattern ((:rt0 :precedes :rt1)
(:rt1 :equals :st)
(:st :none :rt2)
(:rt1 :none :rt2)
(:et :none :rt2)
(:rt0 :none :rt2)))
(first-verb {^ have-1})
(tensed-feature ((tense present)))
(:! aspect-choice)
(verb-aspect past-participle))
tense 6
((tense future-perfect)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt0)
(:rt0 :precedes :rt1)
(:st :none :rt2)
(:rt1 :none :rt2)
(:et :none :rt2)
(:rt0 :none :rt2)))
(aux ((lex "will")
(cat modal)))
(tensed-feature {^ aux})
(first-verb {^ aux})
(have-1 ((ending root)))
(:! aspect-choice)
(verb-aspect past-participle))
tense 7
((tense past-progressive)
(simple no)
(time-frame past)
(tpattern ((:rt0 :precedes :st)
(:rt0 :includes :et)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(aspect ((alt aspect-7 (event process))))
(be-1 ((tense past)))
(be-1 { ^ tensed-feature})
(verb-aspect present-participle))
tense 8
((tense present-progressive)
(simple no)
(time-frame present)
(tpattern ((:rt0 :precedes :st)
(:rt0 :includes :et)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(aspect ((alt aspect-8 (event process))))
(be-1 ((tense present)))
(be-1 { ^ tensed-feature})
(verb-aspect present-participle))
tense 9
((tense future-progressive)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt0)
(:rt0 :includes :et)
(:st :none :rt1)
(:et :none :rt1)
(:rt0 :none :rt1)))
(aspect ((alt aspect-9 (event process))))
(aux ((lex "will")
(cat modal)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 10
((tense tense-10)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt1 :none :rt2) (:st :none :rt2)
(:et :none :rt2)))
(simple no)
(:! aspect-choice)
(verb-aspect root)
(beg ((tense past)))
(beg { ^ tensed-feature}))
tense 11
((tense tense-11)
(time-frame present)
(tpattern ((:st :equals :rt1) (:rt1 :precedes :rt0)
(:st :none :rt2) (:et :none :rt2)
(:rt1 :none :rt2) (:rt0 :none :rt2)))
(simple no)
(:! aspect-choice)
(verb-aspect root)
(beg ((tense present)))
(beg { ^ tensed-feature}))
tense 12
((tense tense-12)
(time-frame future)
(tpattern ((:st :precedes :rt1)
(:rt1 :precedes :rt0)
(:st :none :rt2) (:et :none :rt2)
(:rt1 :none :rt2) (:rt0 :none :rt2)))
(simple no)
(:! aspect-choice)
(verb-aspect root)
(beg ((ending root)))
(aux ((lex "will")
(cat modal))))
tense 13
((tense tense-13)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:st :none :rt3)
(:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(:! aspect-choice)
(beg ((tense past)))
(beg { ^ tensed-feature})
(have-2 ((ending root)))
(verb-aspect past-participle))
tense 14
((tense tense-14)
(simple no)
(time-frame present)
(tpattern ((:st :equals :rt1) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:st :none :rt3)
(:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3)))
(:! aspect-choice)
(beg ((tense present)))
(beg { ^ tensed-feature})
(have-2 ((ending root)))
(verb-aspect past-participle))
tense 15
((tense tense-15)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:st :none :rt3)
(:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3)))
(:! aspect-choice)
(aux ((lex "will") (cat modal)))
(beg ((ending root)))
(have-2 ((ending root)))
(verb-aspect past-participle))
tense 16
((tense tense-16)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt0 :precedes :rt1)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)))
(aspect ((alt (process event))))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 17
((tense tense-17)
(time-frame present)
(simple no)
(tpattern ((:rt1 :equals :st) (:rt0 :precedes :rt1)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)))
(aspect ((alt (process event))))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 18
((tense tense-18)
(time-frame future)
(simple no)
(tpattern ((:rt1 :precedes :st) (:rt0 :precedes :rt1)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)))
(aspect ((alt (process event))))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 19
((tense tense-19)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)
(:et :none :rt2)))
(aspect ((alt aspect-19 (process event))))
(beg ((tense past)))
(beg { ^ tensed-feature})
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 20
((tense tense-20)
(time-frame present)
(simple no)
(tpattern ((:rt1 :equals :st) (:rt1 :precedes :rt0)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)
(:et :none :rt2)))
(aspect ((alt aspect-20 (process event))))
(beg ((tense present)))
(beg { ^ tensed-feature})
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 21
((tense tense-21)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt0)
(:rt0 :during :et) (:st :none :rt2)
(:rt0 :none :rt2) (:rt1 :none :rt2)
(:et :none :rt2)))
(aspect ((alt aspect-21 (process event))))
(aux ((lex "will") (cat modal)))
(beg ((ending root)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 22
((tense tense-22)
(simple no)
(time-frame past)
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(verb-aspect root)
)
tense 23
((tense tense-23)
(simple no)
(time-frame present)
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(verb-aspect root)
)
tense 24
((tense tense-24)
(simple no)
(time-frame future)
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(beg ((ending past-participle)))
(verb-aspect root)
)
tense 25
((tense tense-25)
(simple no)
(time-frame past)
(tpattern ((:rt2 :precedes :st)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)))
(:! aspect-choice)
(verb-aspect past-participle)
(have-1 ((tense past)))
(have-1 {^ tensed-feature})
(beg ((ending past-participle)))
(have-2 ((ending root))))
tense 26
((tense tense-26)
(simple no)
(time-frame present)
(tpattern ((:rt2 :equals :st)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)))
(:! aspect-choice)
(verb-aspect past-participle)
(have-1 ((tense present)))
(have-1 {^ tensed-feature})
(beg ((ending past-participle)))
(have-2 ((ending root))))
tense 27
((tense tense-27)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt2)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)))
(:! aspect-choice)
(verb-aspect past-participle)
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(beg ((ending past-participle)))
(have-2 ((ending root))))
tense 28
((tense tense-28)
(simple no)
(time-frame past)
(tpattern ((:rt1 :precedes :st) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:rt0 :during :et)))
(aspect ((alt aspect-28 (event process))))
first - verb
(tensed-feature ((tense past)))
(have-2 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 29
((tense tense-29)
(simple no)
(time-frame present)
(tpattern ((:rt1 :equals :st) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:rt0 :during :et)))
(aspect ((alt aspect-29 (event process))))
(beg ((tense present)))
(beg { ^ tensed-feature})
(have-2 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 30
((tense tense-30)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt0)
(:rt0 :precedes :rt2) (:rt0 :during :et)))
(aspect ((alt aspect-30 (event process))))
(aux ((lex "will") (cat modal)))
(beg ((ending root)))
(have-2 ((ending root)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 31
((tense tense-31)
(simple no)
(time-frame past)
(tpattern ((:rt2 :precedes :st) (:rt1 :precedes :rt2)
(:rt1 :precedes :rt0) (:rt0 :during :et)
(:st :none :rt3) (:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(aspect ((alt (process event))))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 32
((tense tense-32)
(simple no)
(time-frame present)
(tpattern ((:rt2 :equals :st) (:rt1 :precedes :rt2)
(:rt1 :precedes :rt0) (:rt0 :during :et)
(:st :none :rt3) (:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(aspect ((alt (process event))))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(beg ((ending past-participle)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 33
((tense tense-33)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1) (:rt1 :precedes :rt2)
(:rt1 :precedes :rt0) (:rt0 :during :et)
(:st :none :rt3) (:rt0 :none :rt3) (:rt1 :none :rt3)
(:rt2 :none :rt3) (:et :none :rt3)))
(aspect ((alt (process event))))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending root)))
(verb-aspect present-participle))
tense 34
((tense tense-34)
(simple no)
(time-frame past)
(tpattern ((:rt2 :precedes :st)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)
(:rt0 :during :et)))
(aspect ((alt (process event))))
(have-1 ((tense past)))
(have-1 { ^ tensed-feature})
(have-2 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 35
((tense tense-35)
(simple no)
(time-frame present)
(tpattern ((:st :equals :rt2)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)
(:rt0 :during :et)))
(aspect ((alt (process event))))
(have-1 ((tense present)))
(have-1 { ^ tensed-feature})
(have-2 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
tense 36
((tense tense-36)
(simple no)
(time-frame future)
(tpattern ((:st :precedes :rt1)
(:rt1 :precedes :rt2)
(:rt1 :precedes :rt0)
(:rt0 :precedes :rt3)
(:rt0 :during :et)))
(aspect ((alt (process event))))
(aux ((lex "will") (cat modal)))
(have-1 ((ending root)))
(have-2 ((ending root)))
(beg ((ending past-participle)))
(be-1 ((ending past-participle)))
(verb-aspect present-participle))
))
(def-alt aspect-choice
(((aspect event)
(tpattern ((:rt0 :equals :et))))
((aspect given)
(aspect ((alt (stative process))))
(tpattern ((:rt0 :during :et))))))
(def-alt modality
(:demo "what modality is used w/ this verb?")
(
((modality none)
%TRACE-OFF%
(control (control-demo "No modality in this clause."))
%TRACE-ON%
(tensed-feature {^ first-verb})
(alt time-frame (:index time-frame)
(((time-frame present))
((time-frame past))
((time-frame future)
(aux ((lex "will") (cat modal))))
)))
((epistemic-modality fact)
(tensed-feature {^ first-verb})
(alt (:index time-frame)
(((time-frame present))
((time-frame past))
((time-frame future)
(aux ((lex "will") (cat modal))))
)))
((epistemic-modality inference)
(first-verb ((ending root)))
(alt (:index time-frame)
(((time-frame present)
(aux ((lex "must") (cat modal))))
((time-frame future)
)
((time-frame past)
(aux ((lex "must've") (cat modal))))
)))
((epistemic-modality possible)
(first-verb ((ending root)))
(alt (:index time-frame)
(((time-frame present)
(aux ((lex "can") (cat modal))))
((time-frame future)
(aux ((lex "can") (cat modal))))
((time-frame past)
(aux ((lex "could have")))))))
((epistemic-modality given)
(first-verb ((ending root)))
(aux ((cat modal)
(lex {^ ^ epistemic-modality}))))
((deontic-modality duty)
(first-verb ((ending root)))
(alt (:index time-frame)
(((time-frame present)
(aux ((lex "must") (cat modal))))
((time-frame future)
(aux ((lex "must") (cat modal))))
((time-frame past)
(aux ((lex "must've") (cat modal))))
)))
((deontic-modality given)
(first-verb ((ending root)))
(aux ((cat modal)
(lex {^ ^ deontic-modality}))))
((modality given)
(first-verb ((ending root)))
(aux ((cat modal)
(lex {^ ^ modality}))))
))
(def-alt voice-verb-group (:index voice)
(:wait {^ event lex})
(
First : very special case of " to be "
((event ((lex "be")))
(alt simple-be (:index simple)
(((simple yes)
( event { ^ first - verb } )
(event {^ tensed-feature}))
((simple no)
(event ((ending {^ ^ verb-aspect})))))))
then special case of all copulae : passive is themselves
((alt (((voice active))
((copula #(under yes)))))
(alt simple-do (:index simple)
(
((simple yes)
(alt simple-polarity (:index polarity)
(aux ((lex "do") (cat verb)))
(aux {^ tensed-feature})
(event ((ending root))))
(alt simple-interrogative (:index interrogative)
(:wait {^ scope synt-funct})
(interrogative wh)
({^ scope} ((synt-funct #(under subject))
(clause-level ((embedded no)))))
(aux none)
(aux-specified yes)
(event { ^ tensed-feature}))
((interrogative interrogative)
(aux ((lex "do") (cat verb)))
(aux {^ tensed-feature})
(aux-specified yes)
(event ((ending root))))
((alt simple-insistence (:index insistence)
(((insistence no)
(aux none)
(aux-specified yes)
(event { ^ tensed-feature}))
((insistence yes)
(aux ((lex "do") (cat verb)))
(aux {^ tensed-feature})
(aux-specified yes)
(event ((ending root)))))))))))))
((simple no)
there is an aux of one kind or another ,
(aux-specified yes)
(event ((ending {^ ^ verb-aspect})))))))
((voice passive)
(copula no)
(aux-specified yes)
(alt passive-simple (:index simple)
((simple yes)
(event ((ending past-participle)))
(be-2 { ^ tensed-feature}))
((simple no)
(event ((ending past-participle)))
(be-2 ((ending { ^ ^ verb-aspect}))))) )) ))
(def-alt notf-adverb-placement (:index polarity)
@@TODO : HERE - AUX NONE must wait - for what ? ? ? ? ?
(((polarity positive)
(adverb none))
(
(alt aux-notf (:index aux)
(:wait aux-specified)
(((aux given)
(pattern (dots aux notf adverb dots)))
((aux none)
(alt have-1-notf (:index have-1)
(((have-1 given)
(pattern (dots have-1 notf adverb dots)))
((have-1 none)
(alt beg-notf (:index beg)
(((beg given)
(pattern (dots beg notf adverb dots)))
((beg none)
(alt have-2-notf (:index have-2)
(((have-2 given)
(pattern (dots have-2 notf adverb dots)))
((have-2 none)
(alt be-1-notf (:index be-1)
(((be-1 given)
(pattern (dots be-1 notf adverb dots)))
((be-1 none)
(alt be-2-notf (:index be-2)
(((be-2 given)
(pattern (dots be-2 notf adverb dots)))
((be-2 none)
(pattern (dots adverb event notf dots))))
)))
)))
)))
)))
)))
)))
)
(store-verbs '(("run up" "runs up" "ran up" "running up" "run up")
("open" "opens" "opened" "opening" "opened")))
(provide "verb-group")
|
a9b20169e8b80085a132acb503098e94251acc930603e23705a7f3f77085c934 | spechub/Hets | OMS.hs | module PGIP.GraphQL.Resolver.OMS (resolve) where
import PGIP.GraphQL.Resolver.ToResult
import PGIP.GraphQL.Result as GraphQLResult
import PGIP.GraphQL.Result.Action as GraphQLResultAction
import PGIP.GraphQL.Result.Mapping as GraphQLResultMapping
import PGIP.GraphQL.Result.PremiseSelection as GraphQLResultPremiseSelection
import PGIP.GraphQL.Result.ReasonerConfiguration as GraphQLResultReasonerConfiguration
import PGIP.GraphQL.Result.ReasoningAttempt as GraphQLResultReasoningAttempt
import PGIP.GraphQL.Result.Sentence as GraphQLResultSentence
import PGIP.GraphQL.Result.StringReference (StringReference (..))
import PGIP.GraphQL.Result.Symbol as GraphQLResultSymbol
import PGIP.Shared
import Driver.Options
import Persistence.Database
import Persistence.Schema as DatabaseSchema
import Persistence.Utils
import Database.Esqueleto
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Fail ()
resolve :: HetcatsOpts -> Cache -> String -> IO (Maybe GraphQLResult.Result)
resolve opts _ locIdVar =
onDatabase (databaseConfig opts) $ resolveDB locIdVar
resolveDB :: (MonadIO m, MonadFail m) => String -> DBMonad m (Maybe GraphQLResult.Result)
resolveDB locIdVar = do
omsL <-
select $ from $ \(oms `InnerJoin` loc_id_bases
`InnerJoin` conservativity_statuses
`LeftOuterJoin` file_ranges
`LeftOuterJoin` loc_id_basesFreeNormalForm
`LeftOuterJoin` signature_morphismsFreeNormalForm
`InnerJoin` languages
`InnerJoin` logics
`LeftOuterJoin` loc_id_basesNormalForm
`LeftOuterJoin` signature_morphismsNormalForm) -> do
on (signature_morphismsNormalForm ?. SignatureMorphismId ==. oms ^. OMSNormalFormSignatureMorphismId)
on (loc_id_basesNormalForm ?. LocIdBaseId ==. coerceId (oms ^. OMSNormalFormId))
on (logics ^. LogicId ==. oms ^. OMSLogicId)
on (languages ^. LanguageId ==. oms ^. OMSLanguageId)
on (signature_morphismsFreeNormalForm ?. SignatureMorphismId ==. oms ^. OMSFreeNormalFormSignatureMorphismId)
on (loc_id_basesFreeNormalForm ?. LocIdBaseId ==. coerceId (oms ^. OMSFreeNormalFormId))
on (file_ranges ?. FileRangeId ==. oms ^. OMSNameFileRangeId)
on (conservativity_statuses ^. ConservativityStatusId ==. oms ^. OMSConservativityStatusId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (oms ^. OMSId))
where_ (loc_id_bases ^. LocIdBaseLocId ==. val locIdVar)
return (oms, loc_id_bases, conservativity_statuses, file_ranges,
loc_id_basesFreeNormalForm, signature_morphismsFreeNormalForm,
languages, logics,
loc_id_basesNormalForm, signature_morphismsNormalForm)
case omsL of
[] -> return Nothing
(omsEntity, locIdBaseOMS@(Entity omsKey _ ), conservativityStatusEntity, fileRangeM,
freeNormalFormLocIdBaseM, freeNormalFormSignatureMorphismM,
languageEntity, logicEntity,
normalFormLocIdBaseM, normalFormSignatureMorphismM) : _ -> do
consistencyCheckAttemptResults <- resolveConsistencyChecks omsKey
mappingSourceResults <- resolveMappings omsKey MappingSourceId
mappingTargetResults <- resolveMappings omsKey MappingTargetId
sentenceResults <- resolveSentences omsKey
serializationResult <- case oMSSerializationId $ entityVal omsEntity of
Nothing -> return Nothing
Just serializationKey -> resolveSerialization serializationKey
return $ Just $ GraphQLResult.OMSResult $
omsToResult omsEntity locIdBaseOMS conservativityStatusEntity
fileRangeM freeNormalFormLocIdBaseM freeNormalFormSignatureMorphismM
languageEntity logicEntity
normalFormLocIdBaseM normalFormSignatureMorphismM
consistencyCheckAttemptResults mappingSourceResults mappingTargetResults
sentenceResults serializationResult
resolveConsistencyChecks :: (MonadIO m, MonadFail m)
of the OMS
-> DBMonad m [GraphQLResultReasoningAttempt.ReasoningAttempt]
resolveConsistencyChecks omsKey = do
consistencyCheckAttemptL <-
select $ from $ \(reasoning_attempts `InnerJoin` consistency_check_attempts
`InnerJoin` reasoner_configurations
`LeftOuterJoin` reasoners) -> do
on (reasoners ?. ReasonerId ==. reasoning_attempts ^. ReasoningAttemptUsedReasonerId)
on (reasoner_configurations ^. ReasonerConfigurationId ==.
reasoning_attempts ^. ReasoningAttemptReasonerConfigurationId)
on (coerceId (consistency_check_attempts ^. ConsistencyCheckAttemptId) ==.
reasoning_attempts ^. ReasoningAttemptId)
where_ (consistency_check_attempts ^. ConsistencyCheckAttemptOmsId ==.
just (val omsKey))
return (reasoning_attempts, reasoners, reasoner_configurations)
mapM resolveReasoningAttempt consistencyCheckAttemptL
resolveMappings :: MonadIO m
=> LocIdBaseId
-> EntityField DatabaseSchema.Mapping LocIdBaseId
-> DBMonad m [GraphQLResultMapping.Mapping]
resolveMappings omsKey column = do
mappingData <-
select $ from $ \(mappingsSql `InnerJoin` loc_id_bases
`InnerJoin` signature_morphisms
`LeftOuterJoin` conservativity_statuses
`InnerJoin` loc_id_basesSource
`InnerJoin` loc_id_basesTarget
`LeftOuterJoin` loc_id_basesOMS
`LeftOuterJoin` languages) -> do
on (languages ?. LanguageId ==. mappingsSql ^. MappingFreenessParameterLanguageId)
on (loc_id_basesOMS ?. LocIdBaseId ==. mappingsSql ^. MappingFreenessParameterOMSId)
on (loc_id_basesTarget ^. LocIdBaseId ==. mappingsSql ^. MappingTargetId)
on (loc_id_basesSource ^. LocIdBaseId ==. mappingsSql ^. MappingSourceId)
on (conservativity_statuses ?. ConservativityStatusId ==.
mappingsSql ^. MappingConservativityStatusId)
on (mappingsSql ^. MappingSignatureMorphismId ==.
signature_morphisms ^. SignatureMorphismId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (mappingsSql ^. MappingId))
where_ (mappingsSql ^. column ==. val omsKey)
return (mappingsSql, loc_id_bases, signature_morphisms,
conservativity_statuses, loc_id_basesSource, loc_id_basesTarget,
loc_id_basesOMS, languages)
return $
map (\ (mapping, locIdBase, signatureMorphismEntity, conservativityStatusM,
locIdBaseSource, locIdBaseTarget, freenesParameterOMSLocIdM,
freenessParameterLanguageM) ->
mappingToResult mapping locIdBase signatureMorphismEntity
conservativityStatusM locIdBaseSource locIdBaseTarget
freenesParameterOMSLocIdM freenessParameterLanguageM
) mappingData
getReasonerOutput :: MonadIO m
=> ReasoningAttemptId
-> DBMonad m (Maybe (Entity DatabaseSchema.ReasonerOutput))
getReasonerOutput reasoningAttemptKey = do
reasonerOutputL <- select $ from $ \ reasoner_outputs -> do
where_ (reasoner_outputs ^. ReasonerOutputReasoningAttemptId ==.
val reasoningAttemptKey)
return reasoner_outputs
return $ case reasonerOutputL of
[] -> Nothing
reasonerOutputEntity : _ -> Just reasonerOutputEntity
resolveReasoningAttempt :: (MonadIO m, MonadFail m)
=> ( Entity DatabaseSchema.ReasoningAttempt
, Maybe (Entity DatabaseSchema.Reasoner)
, Entity DatabaseSchema.ReasonerConfiguration
)
-> DBMonad m GraphQLResultReasoningAttempt.ReasoningAttempt
resolveReasoningAttempt (reasoningAttemptEntity, reasonerEntityM, reasonerConfigurationEntity) = do
actionResult <-
resolveAction $ reasoningAttemptActionId $ entityVal reasoningAttemptEntity
reasonerOutputEntityM <- getReasonerOutput $ entityKey reasoningAttemptEntity
reasonerConfigurationResult <-
resolveReasonerConfiguration reasonerConfigurationEntity
return $ reasoningAttemptToResult reasoningAttemptEntity reasonerOutputEntityM
reasonerEntityM actionResult reasonerConfigurationResult
resolveAction :: (MonadIO m, MonadFail m)
=> DatabaseSchema.ActionId
-> DBMonad m GraphQLResultAction.Action
resolveAction actionKey = do
Just actionValue <- get actionKey
return $ actionToResult $ Entity actionKey actionValue
resolveSentences :: (MonadIO m, MonadFail m)
=> LocIdBaseId -> DBMonad m [GraphQLResultSentence.Sentence]
resolveSentences omsKey = do
sentenceL <-
select $ from $ \(sentencesSql `InnerJoin` loc_id_bases
`LeftOuterJoin` file_ranges
`LeftOuterJoin` conjectures) -> do
on (coerceId (conjectures ?. ConjectureId) ==. loc_id_bases ^. LocIdBaseId)
on (file_ranges ?. FileRangeId ==. sentencesSql ^. SentenceFileRangeId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (sentencesSql ^. SentenceId))
where_ (sentencesSql ^. SentenceOmsId ==. val omsKey)
return (sentencesSql, loc_id_bases, file_ranges, conjectures)
mapM (\ (sentenceEntity, locIdBaseEntity, fileRangeM, conjectureM) -> do
symbolResults <- resolveSymbols $ entityKey sentenceEntity
case conjectureM of
Nothing -> return $
axiomToResult sentenceEntity locIdBaseEntity fileRangeM symbolResults
Just conjectureEntity -> resolveConjecture sentenceEntity
locIdBaseEntity fileRangeM conjectureEntity symbolResults
) sentenceL
resolveConjecture :: (MonadIO m, MonadFail m)
=> Entity DatabaseSchema.Sentence
-> Entity DatabaseSchema.LocIdBase
-> Maybe (Entity DatabaseSchema.FileRange)
-> Entity DatabaseSchema.Conjecture
-> [GraphQLResultSymbol.Symbol]
-> DBMonad m GraphQLResultSentence.Sentence
resolveConjecture sentenceEntity locIdBaseEntity fileRangeM conjectureEntity symbolResults = do
actionResult <- resolveAction $ conjectureActionId $ entityVal conjectureEntity
proofAttemptResults <- resolveProofAttempts $ entityKey locIdBaseEntity
return $ conjectureToResult sentenceEntity locIdBaseEntity fileRangeM
conjectureEntity actionResult symbolResults proofAttemptResults
resolveProofAttempts :: (MonadIO m, MonadFail m)
=> LocIdBaseId
-> DBMonad m [GraphQLResultReasoningAttempt.ReasoningAttempt]
resolveProofAttempts conjectureKey = do
proofAttemptL <-
select $ from $ \(reasoning_attempts `InnerJoin` proof_attempts
`InnerJoin` reasoner_configurations
`LeftOuterJoin` reasoners) -> do
on (reasoners ?. ReasonerId ==.
reasoning_attempts ^. ReasoningAttemptUsedReasonerId)
on (reasoner_configurations ^. ReasonerConfigurationId ==.
reasoning_attempts ^. ReasoningAttemptReasonerConfigurationId)
on (coerceId (proof_attempts ^. ProofAttemptId) ==.
reasoning_attempts ^. ReasoningAttemptId)
where_ (proof_attempts ^. ProofAttemptConjectureId ==.
just (val conjectureKey))
return (reasoning_attempts, reasoners, reasoner_configurations)
mapM resolveReasoningAttempt proofAttemptL
resolveReasonerConfiguration :: MonadIO m
=> Entity DatabaseSchema.ReasonerConfiguration
-> DBMonad m GraphQLResultReasonerConfiguration.ReasonerConfiguration
resolveReasonerConfiguration reasonerConfigurationEntity@(Entity reasonerConfigurationKey reasonerConfigurationValue) = do
reasonerL <-
select $ from $ \ reasoners -> do
where_ (reasoners ?. ReasonerId ==.
val (reasonerConfigurationConfiguredReasonerId reasonerConfigurationValue))
return reasoners
let reasonerResultM = case reasonerL of
[] -> Nothing
reasonerEntity : _ -> reasonerEntity
premiseSelectionsL <-
select $ from $ \ premise_selections -> do
where_ (premise_selections ^. PremiseSelectionReasonerConfigurationId ==.
val reasonerConfigurationKey)
return premise_selections
premiseSelectionResults <- mapM resolvePremiseSelection premiseSelectionsL
return $ reasonerConfigurationToResult reasonerConfigurationEntity
reasonerResultM premiseSelectionResults
resolvePremiseSelection :: MonadIO m
=> Entity DatabaseSchema.PremiseSelection
-> DBMonad m GraphQLResultPremiseSelection.PremiseSelection
resolvePremiseSelection (Entity premiseSelectionKey _) = do
premises <-
select $ from $ \ (loc_id_bases `InnerJoin` premise_selected_sentences
`InnerJoin` premise_selections) -> do
on (loc_id_bases ^. LocIdBaseId ==.
premise_selected_sentences ^. PremiseSelectedSentencePremiseId)
on (premise_selected_sentences ^. PremiseSelectedSentencePremiseSelectionId ==.
premise_selections ^. PremiseSelectionId)
where_ (premise_selections ^. PremiseSelectionId ==. val premiseSelectionKey)
return loc_id_bases
return $ premiseSelectionToResult premises
resolveSerialization :: MonadIO m
=> SerializationId
-> DBMonad m (Maybe StringReference)
resolveSerialization serializationKey = do
serializationL <-
select $ from $ \(serializations) -> do
where_ (serializations ^. SerializationId ==. val serializationKey)
return serializations
case serializationL of
[] -> return Nothing
Entity _ serializationValue : _ ->
return $ Just $ StringReference $ serializationSlug serializationValue
resolveSymbols :: MonadIO m
=> SentenceId -> DBMonad m [GraphQLResultSymbol.Symbol]
resolveSymbols sentenceKey = do
symbolL <-
select $ from $ \(symbols `InnerJoin` sentences_symbols
`InnerJoin` sentencesSql
`InnerJoin` loc_id_bases
`LeftOuterJoin` file_ranges) -> do
on (file_ranges ?. FileRangeId ==. symbols ^. SymbolFileRangeId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (symbols ^. SymbolId))
on (coerceId (sentencesSql ^. SentenceId) ==.
sentences_symbols ^. SentenceSymbolSentenceId)
on (sentences_symbols ^. SentenceSymbolSymbolId ==.
coerceId (symbols ^. SymbolId))
where_ (sentences_symbols ^. SentenceSymbolSentenceId ==.
val (toSqlKey $ fromSqlKey sentenceKey))
return (loc_id_bases, symbols, file_ranges)
return $ map symbolToResultUncurried symbolL
| null | https://raw.githubusercontent.com/spechub/Hets/4cedaf8dbdb8909955e0066b465c331973043bbf/PGIP/GraphQL/Resolver/OMS.hs | haskell | module PGIP.GraphQL.Resolver.OMS (resolve) where
import PGIP.GraphQL.Resolver.ToResult
import PGIP.GraphQL.Result as GraphQLResult
import PGIP.GraphQL.Result.Action as GraphQLResultAction
import PGIP.GraphQL.Result.Mapping as GraphQLResultMapping
import PGIP.GraphQL.Result.PremiseSelection as GraphQLResultPremiseSelection
import PGIP.GraphQL.Result.ReasonerConfiguration as GraphQLResultReasonerConfiguration
import PGIP.GraphQL.Result.ReasoningAttempt as GraphQLResultReasoningAttempt
import PGIP.GraphQL.Result.Sentence as GraphQLResultSentence
import PGIP.GraphQL.Result.StringReference (StringReference (..))
import PGIP.GraphQL.Result.Symbol as GraphQLResultSymbol
import PGIP.Shared
import Driver.Options
import Persistence.Database
import Persistence.Schema as DatabaseSchema
import Persistence.Utils
import Database.Esqueleto
import Control.Monad.IO.Class (MonadIO (..))
import Control.Monad.Fail ()
resolve :: HetcatsOpts -> Cache -> String -> IO (Maybe GraphQLResult.Result)
resolve opts _ locIdVar =
onDatabase (databaseConfig opts) $ resolveDB locIdVar
resolveDB :: (MonadIO m, MonadFail m) => String -> DBMonad m (Maybe GraphQLResult.Result)
resolveDB locIdVar = do
omsL <-
select $ from $ \(oms `InnerJoin` loc_id_bases
`InnerJoin` conservativity_statuses
`LeftOuterJoin` file_ranges
`LeftOuterJoin` loc_id_basesFreeNormalForm
`LeftOuterJoin` signature_morphismsFreeNormalForm
`InnerJoin` languages
`InnerJoin` logics
`LeftOuterJoin` loc_id_basesNormalForm
`LeftOuterJoin` signature_morphismsNormalForm) -> do
on (signature_morphismsNormalForm ?. SignatureMorphismId ==. oms ^. OMSNormalFormSignatureMorphismId)
on (loc_id_basesNormalForm ?. LocIdBaseId ==. coerceId (oms ^. OMSNormalFormId))
on (logics ^. LogicId ==. oms ^. OMSLogicId)
on (languages ^. LanguageId ==. oms ^. OMSLanguageId)
on (signature_morphismsFreeNormalForm ?. SignatureMorphismId ==. oms ^. OMSFreeNormalFormSignatureMorphismId)
on (loc_id_basesFreeNormalForm ?. LocIdBaseId ==. coerceId (oms ^. OMSFreeNormalFormId))
on (file_ranges ?. FileRangeId ==. oms ^. OMSNameFileRangeId)
on (conservativity_statuses ^. ConservativityStatusId ==. oms ^. OMSConservativityStatusId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (oms ^. OMSId))
where_ (loc_id_bases ^. LocIdBaseLocId ==. val locIdVar)
return (oms, loc_id_bases, conservativity_statuses, file_ranges,
loc_id_basesFreeNormalForm, signature_morphismsFreeNormalForm,
languages, logics,
loc_id_basesNormalForm, signature_morphismsNormalForm)
case omsL of
[] -> return Nothing
(omsEntity, locIdBaseOMS@(Entity omsKey _ ), conservativityStatusEntity, fileRangeM,
freeNormalFormLocIdBaseM, freeNormalFormSignatureMorphismM,
languageEntity, logicEntity,
normalFormLocIdBaseM, normalFormSignatureMorphismM) : _ -> do
consistencyCheckAttemptResults <- resolveConsistencyChecks omsKey
mappingSourceResults <- resolveMappings omsKey MappingSourceId
mappingTargetResults <- resolveMappings omsKey MappingTargetId
sentenceResults <- resolveSentences omsKey
serializationResult <- case oMSSerializationId $ entityVal omsEntity of
Nothing -> return Nothing
Just serializationKey -> resolveSerialization serializationKey
return $ Just $ GraphQLResult.OMSResult $
omsToResult omsEntity locIdBaseOMS conservativityStatusEntity
fileRangeM freeNormalFormLocIdBaseM freeNormalFormSignatureMorphismM
languageEntity logicEntity
normalFormLocIdBaseM normalFormSignatureMorphismM
consistencyCheckAttemptResults mappingSourceResults mappingTargetResults
sentenceResults serializationResult
resolveConsistencyChecks :: (MonadIO m, MonadFail m)
of the OMS
-> DBMonad m [GraphQLResultReasoningAttempt.ReasoningAttempt]
resolveConsistencyChecks omsKey = do
consistencyCheckAttemptL <-
select $ from $ \(reasoning_attempts `InnerJoin` consistency_check_attempts
`InnerJoin` reasoner_configurations
`LeftOuterJoin` reasoners) -> do
on (reasoners ?. ReasonerId ==. reasoning_attempts ^. ReasoningAttemptUsedReasonerId)
on (reasoner_configurations ^. ReasonerConfigurationId ==.
reasoning_attempts ^. ReasoningAttemptReasonerConfigurationId)
on (coerceId (consistency_check_attempts ^. ConsistencyCheckAttemptId) ==.
reasoning_attempts ^. ReasoningAttemptId)
where_ (consistency_check_attempts ^. ConsistencyCheckAttemptOmsId ==.
just (val omsKey))
return (reasoning_attempts, reasoners, reasoner_configurations)
mapM resolveReasoningAttempt consistencyCheckAttemptL
resolveMappings :: MonadIO m
=> LocIdBaseId
-> EntityField DatabaseSchema.Mapping LocIdBaseId
-> DBMonad m [GraphQLResultMapping.Mapping]
resolveMappings omsKey column = do
mappingData <-
select $ from $ \(mappingsSql `InnerJoin` loc_id_bases
`InnerJoin` signature_morphisms
`LeftOuterJoin` conservativity_statuses
`InnerJoin` loc_id_basesSource
`InnerJoin` loc_id_basesTarget
`LeftOuterJoin` loc_id_basesOMS
`LeftOuterJoin` languages) -> do
on (languages ?. LanguageId ==. mappingsSql ^. MappingFreenessParameterLanguageId)
on (loc_id_basesOMS ?. LocIdBaseId ==. mappingsSql ^. MappingFreenessParameterOMSId)
on (loc_id_basesTarget ^. LocIdBaseId ==. mappingsSql ^. MappingTargetId)
on (loc_id_basesSource ^. LocIdBaseId ==. mappingsSql ^. MappingSourceId)
on (conservativity_statuses ?. ConservativityStatusId ==.
mappingsSql ^. MappingConservativityStatusId)
on (mappingsSql ^. MappingSignatureMorphismId ==.
signature_morphisms ^. SignatureMorphismId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (mappingsSql ^. MappingId))
where_ (mappingsSql ^. column ==. val omsKey)
return (mappingsSql, loc_id_bases, signature_morphisms,
conservativity_statuses, loc_id_basesSource, loc_id_basesTarget,
loc_id_basesOMS, languages)
return $
map (\ (mapping, locIdBase, signatureMorphismEntity, conservativityStatusM,
locIdBaseSource, locIdBaseTarget, freenesParameterOMSLocIdM,
freenessParameterLanguageM) ->
mappingToResult mapping locIdBase signatureMorphismEntity
conservativityStatusM locIdBaseSource locIdBaseTarget
freenesParameterOMSLocIdM freenessParameterLanguageM
) mappingData
getReasonerOutput :: MonadIO m
=> ReasoningAttemptId
-> DBMonad m (Maybe (Entity DatabaseSchema.ReasonerOutput))
getReasonerOutput reasoningAttemptKey = do
reasonerOutputL <- select $ from $ \ reasoner_outputs -> do
where_ (reasoner_outputs ^. ReasonerOutputReasoningAttemptId ==.
val reasoningAttemptKey)
return reasoner_outputs
return $ case reasonerOutputL of
[] -> Nothing
reasonerOutputEntity : _ -> Just reasonerOutputEntity
resolveReasoningAttempt :: (MonadIO m, MonadFail m)
=> ( Entity DatabaseSchema.ReasoningAttempt
, Maybe (Entity DatabaseSchema.Reasoner)
, Entity DatabaseSchema.ReasonerConfiguration
)
-> DBMonad m GraphQLResultReasoningAttempt.ReasoningAttempt
resolveReasoningAttempt (reasoningAttemptEntity, reasonerEntityM, reasonerConfigurationEntity) = do
actionResult <-
resolveAction $ reasoningAttemptActionId $ entityVal reasoningAttemptEntity
reasonerOutputEntityM <- getReasonerOutput $ entityKey reasoningAttemptEntity
reasonerConfigurationResult <-
resolveReasonerConfiguration reasonerConfigurationEntity
return $ reasoningAttemptToResult reasoningAttemptEntity reasonerOutputEntityM
reasonerEntityM actionResult reasonerConfigurationResult
resolveAction :: (MonadIO m, MonadFail m)
=> DatabaseSchema.ActionId
-> DBMonad m GraphQLResultAction.Action
resolveAction actionKey = do
Just actionValue <- get actionKey
return $ actionToResult $ Entity actionKey actionValue
resolveSentences :: (MonadIO m, MonadFail m)
=> LocIdBaseId -> DBMonad m [GraphQLResultSentence.Sentence]
resolveSentences omsKey = do
sentenceL <-
select $ from $ \(sentencesSql `InnerJoin` loc_id_bases
`LeftOuterJoin` file_ranges
`LeftOuterJoin` conjectures) -> do
on (coerceId (conjectures ?. ConjectureId) ==. loc_id_bases ^. LocIdBaseId)
on (file_ranges ?. FileRangeId ==. sentencesSql ^. SentenceFileRangeId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (sentencesSql ^. SentenceId))
where_ (sentencesSql ^. SentenceOmsId ==. val omsKey)
return (sentencesSql, loc_id_bases, file_ranges, conjectures)
mapM (\ (sentenceEntity, locIdBaseEntity, fileRangeM, conjectureM) -> do
symbolResults <- resolveSymbols $ entityKey sentenceEntity
case conjectureM of
Nothing -> return $
axiomToResult sentenceEntity locIdBaseEntity fileRangeM symbolResults
Just conjectureEntity -> resolveConjecture sentenceEntity
locIdBaseEntity fileRangeM conjectureEntity symbolResults
) sentenceL
resolveConjecture :: (MonadIO m, MonadFail m)
=> Entity DatabaseSchema.Sentence
-> Entity DatabaseSchema.LocIdBase
-> Maybe (Entity DatabaseSchema.FileRange)
-> Entity DatabaseSchema.Conjecture
-> [GraphQLResultSymbol.Symbol]
-> DBMonad m GraphQLResultSentence.Sentence
resolveConjecture sentenceEntity locIdBaseEntity fileRangeM conjectureEntity symbolResults = do
actionResult <- resolveAction $ conjectureActionId $ entityVal conjectureEntity
proofAttemptResults <- resolveProofAttempts $ entityKey locIdBaseEntity
return $ conjectureToResult sentenceEntity locIdBaseEntity fileRangeM
conjectureEntity actionResult symbolResults proofAttemptResults
resolveProofAttempts :: (MonadIO m, MonadFail m)
=> LocIdBaseId
-> DBMonad m [GraphQLResultReasoningAttempt.ReasoningAttempt]
resolveProofAttempts conjectureKey = do
proofAttemptL <-
select $ from $ \(reasoning_attempts `InnerJoin` proof_attempts
`InnerJoin` reasoner_configurations
`LeftOuterJoin` reasoners) -> do
on (reasoners ?. ReasonerId ==.
reasoning_attempts ^. ReasoningAttemptUsedReasonerId)
on (reasoner_configurations ^. ReasonerConfigurationId ==.
reasoning_attempts ^. ReasoningAttemptReasonerConfigurationId)
on (coerceId (proof_attempts ^. ProofAttemptId) ==.
reasoning_attempts ^. ReasoningAttemptId)
where_ (proof_attempts ^. ProofAttemptConjectureId ==.
just (val conjectureKey))
return (reasoning_attempts, reasoners, reasoner_configurations)
mapM resolveReasoningAttempt proofAttemptL
resolveReasonerConfiguration :: MonadIO m
=> Entity DatabaseSchema.ReasonerConfiguration
-> DBMonad m GraphQLResultReasonerConfiguration.ReasonerConfiguration
resolveReasonerConfiguration reasonerConfigurationEntity@(Entity reasonerConfigurationKey reasonerConfigurationValue) = do
reasonerL <-
select $ from $ \ reasoners -> do
where_ (reasoners ?. ReasonerId ==.
val (reasonerConfigurationConfiguredReasonerId reasonerConfigurationValue))
return reasoners
let reasonerResultM = case reasonerL of
[] -> Nothing
reasonerEntity : _ -> reasonerEntity
premiseSelectionsL <-
select $ from $ \ premise_selections -> do
where_ (premise_selections ^. PremiseSelectionReasonerConfigurationId ==.
val reasonerConfigurationKey)
return premise_selections
premiseSelectionResults <- mapM resolvePremiseSelection premiseSelectionsL
return $ reasonerConfigurationToResult reasonerConfigurationEntity
reasonerResultM premiseSelectionResults
resolvePremiseSelection :: MonadIO m
=> Entity DatabaseSchema.PremiseSelection
-> DBMonad m GraphQLResultPremiseSelection.PremiseSelection
resolvePremiseSelection (Entity premiseSelectionKey _) = do
premises <-
select $ from $ \ (loc_id_bases `InnerJoin` premise_selected_sentences
`InnerJoin` premise_selections) -> do
on (loc_id_bases ^. LocIdBaseId ==.
premise_selected_sentences ^. PremiseSelectedSentencePremiseId)
on (premise_selected_sentences ^. PremiseSelectedSentencePremiseSelectionId ==.
premise_selections ^. PremiseSelectionId)
where_ (premise_selections ^. PremiseSelectionId ==. val premiseSelectionKey)
return loc_id_bases
return $ premiseSelectionToResult premises
resolveSerialization :: MonadIO m
=> SerializationId
-> DBMonad m (Maybe StringReference)
resolveSerialization serializationKey = do
serializationL <-
select $ from $ \(serializations) -> do
where_ (serializations ^. SerializationId ==. val serializationKey)
return serializations
case serializationL of
[] -> return Nothing
Entity _ serializationValue : _ ->
return $ Just $ StringReference $ serializationSlug serializationValue
resolveSymbols :: MonadIO m
=> SentenceId -> DBMonad m [GraphQLResultSymbol.Symbol]
resolveSymbols sentenceKey = do
symbolL <-
select $ from $ \(symbols `InnerJoin` sentences_symbols
`InnerJoin` sentencesSql
`InnerJoin` loc_id_bases
`LeftOuterJoin` file_ranges) -> do
on (file_ranges ?. FileRangeId ==. symbols ^. SymbolFileRangeId)
on (loc_id_bases ^. LocIdBaseId ==. coerceId (symbols ^. SymbolId))
on (coerceId (sentencesSql ^. SentenceId) ==.
sentences_symbols ^. SentenceSymbolSentenceId)
on (sentences_symbols ^. SentenceSymbolSymbolId ==.
coerceId (symbols ^. SymbolId))
where_ (sentences_symbols ^. SentenceSymbolSentenceId ==.
val (toSqlKey $ fromSqlKey sentenceKey))
return (loc_id_bases, symbols, file_ranges)
return $ map symbolToResultUncurried symbolL
| |
f6d5acc8cd399813f6cd752e4a7ff780da9901e7c034116ea10006962080cc25 | kach/recreational-rosette | battleship.rkt | ;; A Battleship puzzle solver.
This is not the two player Battleship game -- it is a one player
;; logic puzzle. See
;;
;;
author : ( )
license : MIT
#lang rosette
(require rosette/lib/angelic)
Maximum number used is around max(width , height ) + max ship size
For a 10x10 grid with ships of up to size 4 , a bitwidth of 5 would
suffice ( not 4 because negative numbers )
Make it 10 to be safe
(current-bitwidth 10)
;;;;;;;;;;;;;;
Ship ADT ; ;
;;;;;;;;;;;;;;
(struct ship (size x y vertical?) #:transparent)
(define (in-ship? s x y)
(match s
[(ship ssize sx sy svertical?)
(if svertical?
(and (= x sx) (<= sy y (+ sy ssize -1)))
(and (= y sy) (<= sx x (+ sx ssize -1))))]))
(define (make-symbolic-ship size height width)
(define-symbolic* x y integer?)
(assert (>= x 0))
(assert (< x width))
(assert (>= y 0))
(assert (< y height))
(define-symbolic* vertical? boolean?)
(ship size x y vertical?))
;;;;;;;;;;;;;;;;
Puzzle ADT ; ;
;;;;;;;;;;;;;;;;
(struct puzzle (height width row-sums col-sums ships matrix) #:transparent)
(define (make-puzzle ships row-sums col-sums)
(let* ([h (length row-sums)]
[w (length col-sums)]
[matrix
(for/vector ([y h])
(for/vector ([x w])
(ormap (lambda (s) (in-ship? s x y)) ships)))])
(puzzle h w row-sums col-sums ships matrix)))
(define (make-symbolic-puzzle row-sums col-sums)
(make-puzzle
(map (lambda (size)
(make-symbolic-ship size (length row-sums) (length col-sums)))
'(4 3 3 2 2 2 1 1 1 1))
row-sums col-sums))
(define (ref puzzle x y [default #f])
(if (or (< x 0) (>= x (puzzle-width puzzle))
(< y 0) (>= y (puzzle-height puzzle)))
default
;; We could also get rid of the matrix entirely, and instead
;; every time we use ref we would run:
#;(ormap (lambda (s) (in-ship? s x y)) (puzzle-ships puzzle))
(vector-ref (vector-ref (puzzle-matrix puzzle) y) x)))
(define (print-puzzle puzzle)
(printf " ")
(for ([x (puzzle-width puzzle)])
(printf "~a~a" x (if (>= x 10) "" " ")))
(printf " ~%~%")
(for ([y (puzzle-height puzzle)]
[row-sum (puzzle-row-sums puzzle)])
(printf "~a~a " y (if (>= y 10) "" " "))
(for ([x (puzzle-width puzzle)])
(printf "~a " (if (ref puzzle x y) "S" "-")))
(printf " ~a~a~%" (if (>= row-sum 10) "" " ") row-sum))
;; Display column sums on the last line
(printf "~% ")
(for ([col-sum (puzzle-col-sums puzzle)])
(printf "~a~a" col-sum (if (>= col-sum 10) "" " ")))
(printf " ~%"))
;;;;;;;;;;;;;;;;;
;; Constraints ;;
;;;;;;;;;;;;;;;;;
;; All submarines are surrounded by water
(define (isolation-constraints puzzle)
(for/all ([ships (puzzle-ships puzzle)])
(for ([s ships])
(match s
[(ship ssize sx sy svertical?)
(if svertical?
(begin
;; Water at the ends
(assert (not (ref puzzle sx (- sy 1) #f)))
(assert (not (ref puzzle sx (+ sy ssize) #f)))
;; Water on the sides, including diagonally
(for ([y (range -1 (+ ssize 1))])
(assert (not (ref puzzle (+ sx 1) (+ y sy) #f)))
(assert (not (ref puzzle (- sx 1) (+ y sy) #f)))))
(begin
;; Water at the ends
(assert (not (ref puzzle (- sx 1) sy #f)))
(assert (not (ref puzzle (+ sx ssize) sy #f)))
;; Water on the sides, including diagonally
(for ([x (range -1 (+ ssize 1))])
(assert (not (ref puzzle (+ sx x) (+ sy 1) #f)))
(assert (not (ref puzzle (+ sx x) (- sy 1) #f))))))]))))
(define (sum lst)
(foldl + 0 lst))
;; Column sums
(define (assert-col-sum puzzle x result)
(assert (= (sum (map (lambda (y) (if (ref puzzle x y) 1 0))
(range (puzzle-height puzzle))))
result)))
;; Row sums
(define (assert-row-sum puzzle y result)
(assert (= (sum (map (lambda (x) (if (ref puzzle x y) 1 0))
(range (puzzle-width puzzle))))
result)))
(define (all-constraints puzzle init-fn)
;; Constraints based on given submarine and water locations
(init-fn puzzle)
;; Constraints based on subs being separated
(isolation-constraints puzzle)
;; Constraints based on row and column sums
(for-each (curry assert-col-sum puzzle)
(range (puzzle-width puzzle))
(puzzle-col-sums puzzle))
(for-each (curry assert-row-sum puzzle)
(range (puzzle-height puzzle))
(puzzle-row-sums puzzle)))
(define (solve-puzzle #:init-fn fn #:row-sums row-sums #:column-sums col-sums)
(define puzzle (make-symbolic-puzzle row-sums col-sums))
(all-constraints puzzle fn)
(define synth (time (solve (void))))
(evaluate puzzle synth))
(define (solve-and-print-puzzle #:init-fn fn
#:row-sums row-sums #:column-sums col-sums)
(define puzzle-soln
(solve-puzzle #:init-fn fn #:row-sums row-sums #:column-sums col-sums))
(printf "Place ships as follows: ~a~%~%" (puzzle-ships puzzle-soln))
(print-puzzle puzzle-soln))
;; Solving the example puzzle
(define (example-puzzle-fn puzzle)
;; Constraints from the ship pieces
(assert (ref puzzle 5 2))
(assert (or (and (ref puzzle 5 1) (ref puzzle 5 3)) ;; Vertical
Horizontal
(assert (ref puzzle 3 5))
(assert (ref puzzle 3 6))
(assert (not (ref puzzle 3 4)))
;; Constraints from the water
(assert (not (ref puzzle 0 0)))
(assert (not (ref puzzle 2 8))))
#;(solve-and-print-puzzle #:init-fn example-puzzle-fn
#:row-sums '(2 3 5 1 1 1 1 0 1 5)
#:column-sums '(4 0 3 3 0 3 1 1 4 1))
;; Solution for the example puzzle
#;(define example-puzzle-soln
(make-puzzle
(list (ship 4 6 9 #f)
(ship 3 5 1 #t) (ship 3 8 0 #t)
(ship 2 2 2 #f) (ship 2 3 5 #t) (ship 2 0 1 #t)
(ship 1 2 9 #f) (ship 1 2 0 #f) (ship 1 0 8 #t) (ship 1 0 4 #t))
'(2 3 5 1 1 1 1 0 1 5)
'(4 0 3 3 0 3 1 1 4 1)))
#;(all-constraints example-puzzle-soln (const #t))
Example puzzle from Microsoft College Puzzle Challenge 2017 , Sea Shanties
(define (cpc-puzzle-fn puzzle)
;; Constraints from the ship piece
(assert (not (ref puzzle 1 2)))
(assert (ref puzzle 2 2))
(assert (ref puzzle 3 2))
;; Constraints from the water
(assert (not (ref puzzle 0 8))))
(solve-and-print-puzzle #:init-fn cpc-puzzle-fn
#:row-sums '(2 1 2 2 4 4 1 2 1 1)
#:column-sums '(3 0 1 2 2 4 2 3 1 2))
| null | https://raw.githubusercontent.com/kach/recreational-rosette/797a8c462e8cdc820bd257428b195193f8bad995/battleship/battleship.rkt | racket | A Battleship puzzle solver.
logic puzzle. See
;
;
We could also get rid of the matrix entirely, and instead
every time we use ref we would run:
(ormap (lambda (s) (in-ship? s x y)) (puzzle-ships puzzle))
Display column sums on the last line
Constraints ;;
All submarines are surrounded by water
Water at the ends
Water on the sides, including diagonally
Water at the ends
Water on the sides, including diagonally
Column sums
Row sums
Constraints based on given submarine and water locations
Constraints based on subs being separated
Constraints based on row and column sums
Solving the example puzzle
Constraints from the ship pieces
Vertical
Constraints from the water
(solve-and-print-puzzle #:init-fn example-puzzle-fn
Solution for the example puzzle
(define example-puzzle-soln
(all-constraints example-puzzle-soln (const #t))
Constraints from the ship piece
Constraints from the water | This is not the two player Battleship game -- it is a one player
author : ( )
license : MIT
#lang rosette
(require rosette/lib/angelic)
Maximum number used is around max(width , height ) + max ship size
For a 10x10 grid with ships of up to size 4 , a bitwidth of 5 would
suffice ( not 4 because negative numbers )
Make it 10 to be safe
(current-bitwidth 10)
(struct ship (size x y vertical?) #:transparent)
(define (in-ship? s x y)
(match s
[(ship ssize sx sy svertical?)
(if svertical?
(and (= x sx) (<= sy y (+ sy ssize -1)))
(and (= y sy) (<= sx x (+ sx ssize -1))))]))
(define (make-symbolic-ship size height width)
(define-symbolic* x y integer?)
(assert (>= x 0))
(assert (< x width))
(assert (>= y 0))
(assert (< y height))
(define-symbolic* vertical? boolean?)
(ship size x y vertical?))
(struct puzzle (height width row-sums col-sums ships matrix) #:transparent)
(define (make-puzzle ships row-sums col-sums)
(let* ([h (length row-sums)]
[w (length col-sums)]
[matrix
(for/vector ([y h])
(for/vector ([x w])
(ormap (lambda (s) (in-ship? s x y)) ships)))])
(puzzle h w row-sums col-sums ships matrix)))
(define (make-symbolic-puzzle row-sums col-sums)
(make-puzzle
(map (lambda (size)
(make-symbolic-ship size (length row-sums) (length col-sums)))
'(4 3 3 2 2 2 1 1 1 1))
row-sums col-sums))
(define (ref puzzle x y [default #f])
(if (or (< x 0) (>= x (puzzle-width puzzle))
(< y 0) (>= y (puzzle-height puzzle)))
default
(vector-ref (vector-ref (puzzle-matrix puzzle) y) x)))
(define (print-puzzle puzzle)
(printf " ")
(for ([x (puzzle-width puzzle)])
(printf "~a~a" x (if (>= x 10) "" " ")))
(printf " ~%~%")
(for ([y (puzzle-height puzzle)]
[row-sum (puzzle-row-sums puzzle)])
(printf "~a~a " y (if (>= y 10) "" " "))
(for ([x (puzzle-width puzzle)])
(printf "~a " (if (ref puzzle x y) "S" "-")))
(printf " ~a~a~%" (if (>= row-sum 10) "" " ") row-sum))
(printf "~% ")
(for ([col-sum (puzzle-col-sums puzzle)])
(printf "~a~a" col-sum (if (>= col-sum 10) "" " ")))
(printf " ~%"))
(define (isolation-constraints puzzle)
(for/all ([ships (puzzle-ships puzzle)])
(for ([s ships])
(match s
[(ship ssize sx sy svertical?)
(if svertical?
(begin
(assert (not (ref puzzle sx (- sy 1) #f)))
(assert (not (ref puzzle sx (+ sy ssize) #f)))
(for ([y (range -1 (+ ssize 1))])
(assert (not (ref puzzle (+ sx 1) (+ y sy) #f)))
(assert (not (ref puzzle (- sx 1) (+ y sy) #f)))))
(begin
(assert (not (ref puzzle (- sx 1) sy #f)))
(assert (not (ref puzzle (+ sx ssize) sy #f)))
(for ([x (range -1 (+ ssize 1))])
(assert (not (ref puzzle (+ sx x) (+ sy 1) #f)))
(assert (not (ref puzzle (+ sx x) (- sy 1) #f))))))]))))
(define (sum lst)
(foldl + 0 lst))
(define (assert-col-sum puzzle x result)
(assert (= (sum (map (lambda (y) (if (ref puzzle x y) 1 0))
(range (puzzle-height puzzle))))
result)))
(define (assert-row-sum puzzle y result)
(assert (= (sum (map (lambda (x) (if (ref puzzle x y) 1 0))
(range (puzzle-width puzzle))))
result)))
(define (all-constraints puzzle init-fn)
(init-fn puzzle)
(isolation-constraints puzzle)
(for-each (curry assert-col-sum puzzle)
(range (puzzle-width puzzle))
(puzzle-col-sums puzzle))
(for-each (curry assert-row-sum puzzle)
(range (puzzle-height puzzle))
(puzzle-row-sums puzzle)))
(define (solve-puzzle #:init-fn fn #:row-sums row-sums #:column-sums col-sums)
(define puzzle (make-symbolic-puzzle row-sums col-sums))
(all-constraints puzzle fn)
(define synth (time (solve (void))))
(evaluate puzzle synth))
(define (solve-and-print-puzzle #:init-fn fn
#:row-sums row-sums #:column-sums col-sums)
(define puzzle-soln
(solve-puzzle #:init-fn fn #:row-sums row-sums #:column-sums col-sums))
(printf "Place ships as follows: ~a~%~%" (puzzle-ships puzzle-soln))
(print-puzzle puzzle-soln))
(define (example-puzzle-fn puzzle)
(assert (ref puzzle 5 2))
Horizontal
(assert (ref puzzle 3 5))
(assert (ref puzzle 3 6))
(assert (not (ref puzzle 3 4)))
(assert (not (ref puzzle 0 0)))
(assert (not (ref puzzle 2 8))))
#:row-sums '(2 3 5 1 1 1 1 0 1 5)
#:column-sums '(4 0 3 3 0 3 1 1 4 1))
(make-puzzle
(list (ship 4 6 9 #f)
(ship 3 5 1 #t) (ship 3 8 0 #t)
(ship 2 2 2 #f) (ship 2 3 5 #t) (ship 2 0 1 #t)
(ship 1 2 9 #f) (ship 1 2 0 #f) (ship 1 0 8 #t) (ship 1 0 4 #t))
'(2 3 5 1 1 1 1 0 1 5)
'(4 0 3 3 0 3 1 1 4 1)))
Example puzzle from Microsoft College Puzzle Challenge 2017 , Sea Shanties
(define (cpc-puzzle-fn puzzle)
(assert (not (ref puzzle 1 2)))
(assert (ref puzzle 2 2))
(assert (ref puzzle 3 2))
(assert (not (ref puzzle 0 8))))
(solve-and-print-puzzle #:init-fn cpc-puzzle-fn
#:row-sums '(2 1 2 2 4 4 1 2 1 1)
#:column-sums '(3 0 1 2 2 4 2 3 1 2))
|
cd4e4ce776b44c0e67ef3034876e1efdba125b1f152aae4ee2b0ecc2958e83fd | michaelklishin/neocons | bolt.clj | Copyright ( c ) 2011 - 2018 , , and The ClojureWerkz
Team
;;
;; The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 ( -1.0.php )
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns clojurewerkz.neocons.bolt
(:require [clojure.string :as string])
(:import (java.util Map)
(org.neo4j.driver.v1 AuthTokens Config Driver
GraphDatabase Record Session
StatementResult StatementRunner
Transaction TransactionWork Values)))
(defn- env-var
[^String s]
(get (System/getenv) s))
(defn connect
(^Driver [^String url]
(let [login (env-var "NEO4J_LOGIN")
password (env-var "NEO4J_PASSWORD")]
(if (or (string/blank? login)
(string/blank? password))
(GraphDatabase/driver url)
(connect url login password))))
(^Driver [^String url ^String username ^String password]
(GraphDatabase/driver url (AuthTokens/basic username password)))
(^Driver [^String url ^String username ^String password ^Config config]
(GraphDatabase/driver url (AuthTokens/basic username password) config)))
(defn create-session
^Session [^Driver driver]
(.session driver))
(defn query
([^StatementRunner runner ^String qry]
(query runner qry {}))
([^StatementRunner runner ^String qry ^Map params]
(map (fn [^Record r]
(into {} (.asMap r)))
(iterator-seq (.run runner qry params)))))
(defn begin-tx
^Transaction [^Session session]
(.beginTransaction session))
(defn run-tx
[^Transaction transaction ^String qry ^Map params]
(map (fn [^Record r] (into {} (.asMap r)))
(iterator-seq (.run transaction qry params))))
(defn tx-successful
[^Transaction transaction]
(.success transaction))
(defn tx-failure
[^Transaction transaction]
(.failure transaction))
| null | https://raw.githubusercontent.com/michaelklishin/neocons/30f30e95686a01f7a34082600bc1221877c2acbd/src/clojure/clojurewerkz/neocons/bolt.clj | clojure |
The use and distribution terms for this software are covered by the
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) 2011 - 2018 , , and The ClojureWerkz
Team
Eclipse Public License 1.0 ( -1.0.php )
(ns clojurewerkz.neocons.bolt
(:require [clojure.string :as string])
(:import (java.util Map)
(org.neo4j.driver.v1 AuthTokens Config Driver
GraphDatabase Record Session
StatementResult StatementRunner
Transaction TransactionWork Values)))
(defn- env-var
[^String s]
(get (System/getenv) s))
(defn connect
(^Driver [^String url]
(let [login (env-var "NEO4J_LOGIN")
password (env-var "NEO4J_PASSWORD")]
(if (or (string/blank? login)
(string/blank? password))
(GraphDatabase/driver url)
(connect url login password))))
(^Driver [^String url ^String username ^String password]
(GraphDatabase/driver url (AuthTokens/basic username password)))
(^Driver [^String url ^String username ^String password ^Config config]
(GraphDatabase/driver url (AuthTokens/basic username password) config)))
(defn create-session
^Session [^Driver driver]
(.session driver))
(defn query
([^StatementRunner runner ^String qry]
(query runner qry {}))
([^StatementRunner runner ^String qry ^Map params]
(map (fn [^Record r]
(into {} (.asMap r)))
(iterator-seq (.run runner qry params)))))
(defn begin-tx
^Transaction [^Session session]
(.beginTransaction session))
(defn run-tx
[^Transaction transaction ^String qry ^Map params]
(map (fn [^Record r] (into {} (.asMap r)))
(iterator-seq (.run transaction qry params))))
(defn tx-successful
[^Transaction transaction]
(.success transaction))
(defn tx-failure
[^Transaction transaction]
(.failure transaction))
|
38e82a56cf584d8aec8b1b17aa8bbcf1e7b510066ed006dc1fbd6d271ee9b171 | clojure-interop/aws-api | AWSStorageGateway.clj | (ns com.amazonaws.services.storagegateway.AWSStorageGateway
"Interface for accessing AWS Storage Gateway.
Note: Do not directly implement this interface, new methods are added to it regularly. Extend from
AbstractAWSStorageGateway instead.
AWS Storage Gateway Service
AWS Storage Gateway is the service that connects an on-premises software appliance with cloud-based storage to
provide seamless and secure integration between an organization's on-premises IT environment and the AWS storage
infrastructure. The service enables you to securely upload data to the AWS cloud for cost effective backup and rapid
disaster recovery.
Use the following links to get started using the AWS Storage Gateway Service API Reference:
AWS Storage Gateway Required Request Headers: Describes the required headers that you must send with every POST
request to AWS Storage Gateway.
Signing Requests: AWS Storage Gateway requires that you authenticate every request you send; this topic
describes how sign such a request.
Error Responses: Provides reference information about AWS Storage Gateway errors.
Operations in AWS
Storage Gateway: Contains detailed descriptions of all AWS Storage Gateway operations, their request parameters,
response elements, possible errors, and examples of requests and responses.
AWS Storage Gateway Regions and
Endpoints: Provides a list of each AWS region and endpoints available for use with AWS Storage Gateway.
AWS Storage Gateway resource IDs are in uppercase. When you use these resource IDs with the Amazon EC2 API, EC2
expects resource IDs in lowercase. You must change your resource ID to lowercase to use it with the EC2 API. For
example, in Storage Gateway the ID for a volume might be vol-AA22BB012345DAF670. When you use this ID
with the EC2 API, you must change it to vol-aa22bb012345daf670. Otherwise, the EC2 API might not behave
as expected.
IDs for Storage Gateway volumes and Amazon EBS snapshots created from gateway volumes are changing to a longer
format. Starting in December 2016, all new volumes and snapshots will be created with a 17-character string. Starting
in April 2016, you will be able to use these longer IDs so you can test your systems with the new format. For more
information, see Longer EC2 and EBS Resource IDs.
For example, a volume Amazon Resource Name (ARN) with the longer volume ID format looks like the following:
arn:aws:storagegateway:us-west-2:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABBCCDDEEFFG.
A snapshot ID with the longer ID format looks like the following: snap-78e226633445566ee.
For more information, see Announcement: Heads-up – Longer
AWS Storage Gateway volume and snapshot IDs coming in 2016."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.storagegateway AWSStorageGateway]))
(defn join-domain
"Adds a file gateway to an Active Directory domain. This operation is only supported for file gateways that
support the SMB file protocol.
join-domain-request - JoinDomainInput - `com.amazonaws.services.storagegateway.model.JoinDomainRequest`
returns: Result of the JoinDomain operation returned by the service. - `com.amazonaws.services.storagegateway.model.JoinDomainResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.JoinDomainResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.JoinDomainRequest join-domain-request]
(-> this (.joinDomain join-domain-request))))
(defn create-cachedi-scsi-volume
"Creates a cached volume on a specified cached volume gateway. This operation is only supported in the cached
volume gateway type.
Cache storage must be allocated to the gateway before you can create a cached volume. Use the AddCache
operation to add cache storage to a gateway.
In the request, you must specify the gateway, size of the volume in bytes, the iSCSI target name, an IP address
on which to expose the target, and a unique client token. In response, the gateway creates the volume and returns
information about it. This information includes the volume Amazon Resource Name (ARN), its size, and the iSCSI
target ARN that initiators can use to connect to the volume target.
Optionally, you can provide the ARN for an existing volume as the SourceVolumeARN for this cached
volume, which creates an exact copy of the existing volume’s latest recovery point. The
VolumeSizeInBytes value must be equal to or larger than the size of the copied volume, in bytes.
create-cachedi-scsi-volume-request - `com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeRequest`
returns: Result of the CreateCachediSCSIVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeRequest create-cachedi-scsi-volume-request]
(-> this (.createCachediSCSIVolume create-cachedi-scsi-volume-request))))
(defn add-cache
"Configures one or more gateway local disks as cache for a gateway. This operation is only supported in the cached
volume, tape and file gateway type (see Storage Gateway
Concepts).
In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add cache, and one or
more disk IDs that you want to configure as cache.
add-cache-request - `com.amazonaws.services.storagegateway.model.AddCacheRequest`
returns: Result of the AddCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddCacheRequest add-cache-request]
(-> this (.addCache add-cache-request))))
(defn describe-nfs-file-shares
"Gets a description for one or more Network File System (NFS) file shares from a file gateway. This operation is
only supported for file gateways.
describe-nfs-file-shares-request - DescribeNFSFileSharesInput - `com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesRequest`
returns: Result of the DescribeNFSFileShares operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesRequest describe-nfs-file-shares-request]
(-> this (.describeNFSFileShares describe-nfs-file-shares-request))))
(defn describe-maintenance-start-time
"Returns your gateway's weekly maintenance start time including the day and time of the week. Note that values are
in terms of the gateway's time zone.
describe-maintenance-start-time-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeRequest`
returns: Result of the DescribeMaintenanceStartTime operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeRequest describe-maintenance-start-time-request]
(-> this (.describeMaintenanceStartTime describe-maintenance-start-time-request))))
(defn describe-cache
"Returns information about the cache of a gateway. This operation is only supported in the cached volume, tape and
file gateway types.
The response includes disk IDs that are configured as cache, and it includes the amount of cache allocated and
used.
describe-cache-request - `com.amazonaws.services.storagegateway.model.DescribeCacheRequest`
returns: Result of the DescribeCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeCacheRequest describe-cache-request]
(-> this (.describeCache describe-cache-request))))
(defn list-tags-for-resource
"Lists the tags that have been added to the specified resource. This operation is only supported in the cached
volume, stored volume and tape gateway type.
list-tags-for-resource-request - ListTagsForResourceInput - `com.amazonaws.services.storagegateway.model.ListTagsForResourceRequest`
returns: Result of the ListTagsForResource operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListTagsForResourceResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListTagsForResourceResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListTagsForResourceRequest list-tags-for-resource-request]
(-> this (.listTagsForResource list-tags-for-resource-request)))
(^com.amazonaws.services.storagegateway.model.ListTagsForResourceResult [^AWSStorageGateway this]
(-> this (.listTagsForResource))))
(defn assign-tape-pool
"Assigns a tape to a tape pool for archiving. The tape assigned to a pool is archived in the S3 storage class that
is associated with the pool. When you use your backup application to eject the tape, the tape is archived
directly into the S3 storage class (Glacier or Deep Archive) that corresponds to the pool.
Valid values: \"GLACIER\", \"DEEP_ARCHIVE\"
assign-tape-pool-request - `com.amazonaws.services.storagegateway.model.AssignTapePoolRequest`
returns: Result of the AssignTapePool operation returned by the service. - `com.amazonaws.services.storagegateway.model.AssignTapePoolResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AssignTapePoolResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AssignTapePoolRequest assign-tape-pool-request]
(-> this (.assignTapePool assign-tape-pool-request))))
(defn set-region
"Deprecated. use AwsClientBuilder#setRegion(String)
region - The region this client will communicate with. See Region.getRegion(com.amazonaws.regions.Regions) for accessing a given region. Must not be null and must be a region where the service is available. - `com.amazonaws.regions.Region`"
([^AWSStorageGateway this ^com.amazonaws.regions.Region region]
(-> this (.setRegion region))))
(defn shutdown-gateway
"Shuts down a gateway. To specify which gateway to shut down, use the Amazon Resource Name (ARN) of the gateway in
the body of your request.
The operation shuts down the gateway service component running in the gateway's virtual machine (VM) and not the
host VM.
If you want to shut down the VM, it is recommended that you first shut down the gateway component in the VM to
avoid unpredictable conditions.
After the gateway is shutdown, you cannot call any other API except StartGateway,
DescribeGatewayInformation, and ListGateways. For more information, see ActivateGateway.
Your applications cannot read from or write to the gateway's storage volumes, and there are no snapshots taken.
When you make a shutdown request, you will get a 200 OK success response immediately. However, it
might take some time for the gateway to shut down. You can call the DescribeGatewayInformation API to
check the status. For more information, see ActivateGateway.
If do not intend to use the gateway again, you must delete the gateway (using DeleteGateway) to no longer
pay software charges associated with the gateway.
shutdown-gateway-request - A JSON object containing the of the gateway to shut down. - `com.amazonaws.services.storagegateway.model.ShutdownGatewayRequest`
returns: Result of the ShutdownGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.ShutdownGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ShutdownGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ShutdownGatewayRequest shutdown-gateway-request]
(-> this (.shutdownGateway shutdown-gateway-request))))
(defn describe-tape-recovery-points
"Returns a list of virtual tape recovery points that are available for the specified tape gateway.
A recovery point is a point-in-time view of a virtual tape at which all the data on the virtual tape is
consistent. If your gateway crashes, virtual tapes that have recovery points can be recovered to a new gateway.
This operation is only supported in the tape gateway type.
describe-tape-recovery-points-request - DescribeTapeRecoveryPointsInput - `com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsRequest`
returns: Result of the DescribeTapeRecoveryPoints operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsRequest describe-tape-recovery-points-request]
(-> this (.describeTapeRecoveryPoints describe-tape-recovery-points-request))))
(defn update-smb-file-share
"Updates a Server Message Block (SMB) file share.
To leave a file share field unchanged, set the corresponding input field to null. This operation is only
supported for file gateways.
File gateways require AWS Security Token Service (AWS STS) to be activated to enable you to create a file share.
Make sure that AWS STS is activated in the AWS Region you are creating your file gateway in. If AWS STS is not
activated in this AWS Region, activate it. For information about how to activate AWS STS, see Activating and
Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User Guide.
File gateways don't support creating hard or symbolic links on a file share.
update-smb-file-share-request - UpdateSMBFileShareInput - `com.amazonaws.services.storagegateway.model.UpdateSMBFileShareRequest`
returns: Result of the UpdateSMBFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateSMBFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateSMBFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateSMBFileShareRequest update-smb-file-share-request]
(-> this (.updateSMBFileShare update-smb-file-share-request))))
(defn list-local-disks
"Returns a list of the gateway's local disks. To specify which gateway to describe, you use the Amazon Resource
Name (ARN) of the gateway in the body of the request.
The request returns a list of all disks, specifying which are configured as working storage, cache storage, or
stored volume or not configured at all. The response includes a DiskStatus field. This field can
have a value of present (the disk is available to use), missing (the disk is no longer connected to the gateway),
or mismatch (the disk node is occupied by a disk that has incorrect metadata or the disk content is corrupted).
list-local-disks-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.ListLocalDisksRequest`
returns: Result of the ListLocalDisks operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListLocalDisksResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListLocalDisksResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListLocalDisksRequest list-local-disks-request]
(-> this (.listLocalDisks list-local-disks-request))))
(defn update-smb-security-strategy
"Updates the SMB security strategy on a file gateway. This action is only supported in file gateways.
update-smb-security-strategy-request - `com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyRequest`
returns: Result of the UpdateSMBSecurityStrategy operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyRequest update-smb-security-strategy-request]
(-> this (.updateSMBSecurityStrategy update-smb-security-strategy-request))))
(defn describe-chap-credentials
"Returns an array of Challenge-Handshake Authentication Protocol (CHAP) credentials information for a specified
iSCSI target, one for each target-initiator pair.
describe-chap-credentials-request - A JSON object containing the Amazon Resource Name (ARN) of the iSCSI volume target. - `com.amazonaws.services.storagegateway.model.DescribeChapCredentialsRequest`
returns: Result of the DescribeChapCredentials operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeChapCredentialsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeChapCredentialsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeChapCredentialsRequest describe-chap-credentials-request]
(-> this (.describeChapCredentials describe-chap-credentials-request))))
(defn describe-smb-file-shares
"Gets a description for one or more Server Message Block (SMB) file shares from a file gateway. This operation is
only supported for file gateways.
describe-smb-file-shares-request - DescribeSMBFileSharesInput - `com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesRequest`
returns: Result of the DescribeSMBFileShares operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesRequest describe-smb-file-shares-request]
(-> this (.describeSMBFileShares describe-smb-file-shares-request))))
(defn delete-volume
"Deletes the specified storage volume that you previously created using the CreateCachediSCSIVolume or
CreateStorediSCSIVolume API. This operation is only supported in the cached volume and stored volume
types. For stored volume gateways, the local disk that was configured as the storage volume is not deleted. You
can reuse the local disk to create another storage volume.
Before you delete a volume, make sure there are no iSCSI connections to the volume you are deleting. You should
also make sure there is no snapshot in progress. You can use the Amazon Elastic Compute Cloud (Amazon EC2) API to
query snapshots on the volume you are deleting and check the snapshot status. For more information, go to
DescribeSnapshots in the Amazon Elastic Compute Cloud API Reference.
In the request, you must provide the Amazon Resource Name (ARN) of the storage volume you want to delete.
delete-volume-request - A JSON object containing the DeleteVolumeInput$VolumeARN to delete. - `com.amazonaws.services.storagegateway.model.DeleteVolumeRequest`
returns: Result of the DeleteVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteVolumeRequest delete-volume-request]
(-> this (.deleteVolume delete-volume-request))))
(defn delete-chap-credentials
"Deletes Challenge-Handshake Authentication Protocol (CHAP) credentials for a specified iSCSI target and initiator
pair.
delete-chap-credentials-request - A JSON object containing one or more of the following fields: DeleteChapCredentialsInput$InitiatorName DeleteChapCredentialsInput$TargetARN - `com.amazonaws.services.storagegateway.model.DeleteChapCredentialsRequest`
returns: Result of the DeleteChapCredentials operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteChapCredentialsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteChapCredentialsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteChapCredentialsRequest delete-chap-credentials-request]
(-> this (.deleteChapCredentials delete-chap-credentials-request))))
(defn delete-tape
"Deletes the specified virtual tape. This operation is only supported in the tape gateway type.
delete-tape-request - DeleteTapeInput - `com.amazonaws.services.storagegateway.model.DeleteTapeRequest`
returns: Result of the DeleteTape operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteTapeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteTapeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteTapeRequest delete-tape-request]
(-> this (.deleteTape delete-tape-request))))
(defn create-tape-with-barcode
"Creates a virtual tape by using your own barcode. You write data to the virtual tape and then archive the tape. A
barcode is unique and can not be reused if it has already been used on a tape . This applies to barcodes used on
deleted tapes. This operation is only supported in the tape gateway type.
Cache storage must be allocated to the gateway before you can create a virtual tape. Use the AddCache
operation to add cache storage to a gateway.
create-tape-with-barcode-request - CreateTapeWithBarcodeInput - `com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeRequest`
returns: Result of the CreateTapeWithBarcode operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeRequest create-tape-with-barcode-request]
(-> this (.createTapeWithBarcode create-tape-with-barcode-request))))
(defn delete-snapshot-schedule
"Deletes a snapshot of a volume.
You can take snapshots of your gateway volumes on a scheduled or ad hoc basis. This API action enables you to
delete a snapshot schedule for a volume. For more information, see Working with
Snapshots. In the DeleteSnapshotSchedule request, you identify the volume by providing its
Amazon Resource Name (ARN). This operation is only supported in stored and cached volume gateway types.
To list or delete a snapshot, you must use the Amazon EC2 API. in Amazon Elastic Compute Cloud API
Reference.
delete-snapshot-schedule-request - `com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleRequest`
returns: Result of the DeleteSnapshotSchedule operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleRequest delete-snapshot-schedule-request]
(-> this (.deleteSnapshotSchedule delete-snapshot-schedule-request))))
(defn attach-volume
"Connects a volume to an iSCSI connection and then attaches the volume to the specified gateway. Detaching and
attaching a volume enables you to recover your data from one gateway to a different gateway without creating a
snapshot. It also makes it easier to move your volumes from an on-premises gateway to a gateway hosted on an
Amazon EC2 instance.
attach-volume-request - AttachVolumeInput - `com.amazonaws.services.storagegateway.model.AttachVolumeRequest`
returns: Result of the AttachVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.AttachVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AttachVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AttachVolumeRequest attach-volume-request]
(-> this (.attachVolume attach-volume-request))))
(defn describe-working-storage
"Returns information about the working storage of a gateway. This operation is only supported in the stored
volumes gateway type. This operation is deprecated in cached volumes API version (20120630). Use
DescribeUploadBuffer instead.
Working storage is also referred to as upload buffer. You can also use the DescribeUploadBuffer operation to add
upload buffer to a stored volume gateway.
The response includes disk IDs that are configured as working storage, and it includes the amount of working
storage allocated and used.
describe-working-storage-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeWorkingStorageRequest`
returns: Result of the DescribeWorkingStorage operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeWorkingStorageResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeWorkingStorageResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeWorkingStorageRequest describe-working-storage-request]
(-> this (.describeWorkingStorage describe-working-storage-request))))
(defn describe-vtl-devices
"Returns a description of virtual tape library (VTL) devices for the specified tape gateway. In the response, AWS
Storage Gateway returns VTL device information.
This operation is only supported in the tape gateway type.
describe-vtl-devices-request - DescribeVTLDevicesInput - `com.amazonaws.services.storagegateway.model.DescribeVTLDevicesRequest`
returns: Result of the DescribeVTLDevices operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeVTLDevicesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeVTLDevicesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeVTLDevicesRequest describe-vtl-devices-request]
(-> this (.describeVTLDevices describe-vtl-devices-request))))
(defn cancel-archival
"Cancels archiving of a virtual tape to the virtual tape shelf (VTS) after the archiving process is initiated.
This operation is only supported in the tape gateway type.
cancel-archival-request - CancelArchivalInput - `com.amazonaws.services.storagegateway.model.CancelArchivalRequest`
returns: Result of the CancelArchival operation returned by the service. - `com.amazonaws.services.storagegateway.model.CancelArchivalResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CancelArchivalResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CancelArchivalRequest cancel-archival-request]
(-> this (.cancelArchival cancel-archival-request))))
(defn update-maintenance-start-time
"Updates a gateway's weekly maintenance start time information, including day and time of the week. The
maintenance time is the time in your gateway's time zone.
update-maintenance-start-time-request - A JSON object containing the following fields: UpdateMaintenanceStartTimeInput$DayOfMonth UpdateMaintenanceStartTimeInput$DayOfWeek UpdateMaintenanceStartTimeInput$HourOfDay UpdateMaintenanceStartTimeInput$MinuteOfHour - `com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeRequest`
returns: Result of the UpdateMaintenanceStartTime operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeRequest update-maintenance-start-time-request]
(-> this (.updateMaintenanceStartTime update-maintenance-start-time-request))))
(defn delete-file-share
"Deletes a file share from a file gateway. This operation is only supported for file gateways.
delete-file-share-request - DeleteFileShareInput - `com.amazonaws.services.storagegateway.model.DeleteFileShareRequest`
returns: Result of the DeleteFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteFileShareRequest delete-file-share-request]
(-> this (.deleteFileShare delete-file-share-request))))
(defn update-snapshot-schedule
"Updates a snapshot schedule configured for a gateway volume. This operation is only supported in the cached
volume and stored volume gateway types.
The default snapshot schedule for volume is once every 24 hours, starting at the creation time of the volume. You
can use this API to change the snapshot schedule configured for the volume.
In the request you must identify the gateway volume whose snapshot schedule you want to update, and the schedule
information, including when you want the snapshot to begin on a day and the frequency (in hours) of snapshots.
update-snapshot-schedule-request - A JSON object containing one or more of the following fields: UpdateSnapshotScheduleInput$Description UpdateSnapshotScheduleInput$RecurrenceInHours UpdateSnapshotScheduleInput$StartAt UpdateSnapshotScheduleInput$VolumeARN - `com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleRequest`
returns: Result of the UpdateSnapshotSchedule operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleRequest update-snapshot-schedule-request]
(-> this (.updateSnapshotSchedule update-snapshot-schedule-request))))
(defn activate-gateway
"Activates the gateway you previously deployed on your host. In the activation process, you specify information
such as the region you want to use for storing snapshots or tapes, the time zone for scheduled snapshots the
gateway snapshot schedule window, an activation key, and a name for your gateway. The activation process also
associates your gateway with your account; for more information, see UpdateGatewayInformation.
You must turn on the gateway VM before you can activate your gateway.
activate-gateway-request - A JSON object containing one or more of the following fields: ActivateGatewayInput$ActivationKey ActivateGatewayInput$GatewayName ActivateGatewayInput$GatewayRegion ActivateGatewayInput$GatewayTimezone ActivateGatewayInput$GatewayType ActivateGatewayInput$TapeDriveType ActivateGatewayInput$MediumChangerType - `com.amazonaws.services.storagegateway.model.ActivateGatewayRequest`
returns: Result of the ActivateGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.ActivateGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ActivateGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ActivateGatewayRequest activate-gateway-request]
(-> this (.activateGateway activate-gateway-request))))
(defn list-volume-recovery-points
"Lists the recovery points for a specified gateway. This operation is only supported in the cached volume gateway
type.
Each cache volume has one recovery point. A volume recovery point is a point in time at which all data of the
volume is consistent and from which you can create a snapshot or clone a new cached volume from a source volume.
To create a snapshot from a volume recovery point use the CreateSnapshotFromVolumeRecoveryPoint operation.
list-volume-recovery-points-request - `com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsRequest`
returns: Result of the ListVolumeRecoveryPoints operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsRequest list-volume-recovery-points-request]
(-> this (.listVolumeRecoveryPoints list-volume-recovery-points-request))))
(defn list-tapes
"Lists virtual tapes in your virtual tape library (VTL) and your virtual tape shelf (VTS). You specify the tapes
to list by specifying one or more tape Amazon Resource Names (ARNs). If you don't specify a tape ARN, the
operation lists all virtual tapes in both your VTL and VTS.
This operation supports pagination. By default, the operation returns a maximum of up to 100 tapes. You can
optionally specify the Limit parameter in the body to limit the number of tapes in the response. If
the number of tapes returned in the response is truncated, the response includes a Marker element
that you can use in your subsequent request to retrieve the next set of tapes. This operation is only supported
in the tape gateway type.
list-tapes-request - A JSON object that contains one or more of the following fields: ListTapesInput$Limit ListTapesInput$Marker ListTapesInput$TapeARNs - `com.amazonaws.services.storagegateway.model.ListTapesRequest`
returns: Result of the ListTapes operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListTapesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListTapesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListTapesRequest list-tapes-request]
(-> this (.listTapes list-tapes-request))))
(defn describe-upload-buffer
"Returns information about the upload buffer of a gateway. This operation is supported for the stored volume,
cached volume and tape gateway types.
The response includes disk IDs that are configured as upload buffer space, and it includes the amount of upload
buffer space allocated and used.
describe-upload-buffer-request - `com.amazonaws.services.storagegateway.model.DescribeUploadBufferRequest`
returns: Result of the DescribeUploadBuffer operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeUploadBufferResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeUploadBufferResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeUploadBufferRequest describe-upload-buffer-request]
(-> this (.describeUploadBuffer describe-upload-buffer-request))))
(defn shutdown
"Shuts down this client object, releasing any resources that might be held open. This is an optional method, and
callers are not expected to call it, but can if they want to explicitly release any open resources. Once a client
has been shutdown, it should not be used to make any more requests."
([^AWSStorageGateway this]
(-> this (.shutdown))))
(defn start-gateway
"Starts a gateway that you previously shut down (see ShutdownGateway). After the gateway starts, you can
then make other API calls, your applications can read from or write to the gateway's storage volumes and you will
be able to take snapshot backups.
When you make a request, you will get a 200 OK success response immediately. However, it might take some time for
the gateway to be ready. You should call DescribeGatewayInformation and check the status before making any
additional API calls. For more information, see ActivateGateway.
To specify which gateway to start, use the Amazon Resource Name (ARN) of the gateway in your request.
start-gateway-request - A JSON object containing the of the gateway to start. - `com.amazonaws.services.storagegateway.model.StartGatewayRequest`
returns: Result of the StartGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.StartGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.StartGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.StartGatewayRequest start-gateway-request]
(-> this (.startGateway start-gateway-request))))
(defn set-local-console-password
"Sets the password for your VM local console. When you log in to the local console for the first time, you log in
to the VM with the default credentials. We recommend that you set a new password. You don't need to know the
default password to set a new password.
set-local-console-password-request - SetLocalConsolePasswordInput - `com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordRequest`
returns: Result of the SetLocalConsolePassword operation returned by the service. - `com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordRequest set-local-console-password-request]
(-> this (.setLocalConsolePassword set-local-console-password-request))))
(defn disable-gateway
"Disables a tape gateway when the gateway is no longer functioning. For example, if your gateway VM is damaged,
you can disable the gateway so you can recover virtual tapes.
Use this operation for a tape gateway that is not reachable or not functioning. This operation is only supported
in the tape gateway type.
Once a gateway is disabled it cannot be enabled.
disable-gateway-request - DisableGatewayInput - `com.amazonaws.services.storagegateway.model.DisableGatewayRequest`
returns: Result of the DisableGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.DisableGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DisableGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DisableGatewayRequest disable-gateway-request]
(-> this (.disableGateway disable-gateway-request))))
(defn retrieve-tape-archive
"Retrieves an archived virtual tape from the virtual tape shelf (VTS) to a tape gateway. Virtual tapes archived in
the VTS are not associated with any gateway. However after a tape is retrieved, it is associated with a gateway,
even though it is also listed in the VTS, that is, archive. This operation is only supported in the tape gateway
type.
Once a tape is successfully retrieved to a gateway, it cannot be retrieved again to another gateway. You must
archive the tape again before you can retrieve it to another gateway. This operation is only supported in the
tape gateway type.
retrieve-tape-archive-request - RetrieveTapeArchiveInput - `com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveRequest`
returns: Result of the RetrieveTapeArchive operation returned by the service. - `com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveRequest retrieve-tape-archive-request]
(-> this (.retrieveTapeArchive retrieve-tape-archive-request))))
(defn detach-volume
"Disconnects a volume from an iSCSI connection and then detaches the volume from the specified gateway. Detaching
and attaching a volume enables you to recover your data from one gateway to a different gateway without creating
a snapshot. It also makes it easier to move your volumes from an on-premises gateway to a gateway hosted on an
Amazon EC2 instance.
detach-volume-request - AttachVolumeInput - `com.amazonaws.services.storagegateway.model.DetachVolumeRequest`
returns: Result of the DetachVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.DetachVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DetachVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DetachVolumeRequest detach-volume-request]
(-> this (.detachVolume detach-volume-request))))
(defn list-gateways
"Lists gateways owned by an AWS account in a region specified in the request. The returned list is ordered by
gateway Amazon Resource Name (ARN).
By default, the operation returns a maximum of 100 gateways. This operation supports pagination that allows you
to optionally reduce the number of gateways returned in a response.
If you have more gateways than are returned in a response (that is, the response returns only a truncated list of
your gateways), the response contains a marker that you can specify in your next request to fetch the next page
of gateways.
list-gateways-request - A JSON object containing zero or more of the following fields: ListGatewaysInput$Limit ListGatewaysInput$Marker - `com.amazonaws.services.storagegateway.model.ListGatewaysRequest`
returns: Result of the ListGateways operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListGatewaysResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListGatewaysResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListGatewaysRequest list-gateways-request]
(-> this (.listGateways list-gateways-request)))
(^com.amazonaws.services.storagegateway.model.ListGatewaysResult [^AWSStorageGateway this]
(-> this (.listGateways))))
(defn create-tapes
"Creates one or more virtual tapes. You write data to the virtual tapes and then archive the tapes. This operation
is only supported in the tape gateway type.
Cache storage must be allocated to the gateway before you can create virtual tapes. Use the AddCache
operation to add cache storage to a gateway.
create-tapes-request - CreateTapesInput - `com.amazonaws.services.storagegateway.model.CreateTapesRequest`
returns: Result of the CreateTapes operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateTapesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateTapesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateTapesRequest create-tapes-request]
(-> this (.createTapes create-tapes-request))))
(defn set-endpoint
"Deprecated. use AwsClientBuilder#setEndpointConfiguration(AwsClientBuilder.EndpointConfiguration) for
example:
builder.setEndpointConfiguration(new EndpointConfiguration(endpoint, signingRegion));
endpoint - The endpoint (ex: \"storagegateway.us-east-1.amazonaws.com\") or a full URL, including the protocol (ex: \"-east-1.amazonaws.com\") of the region specific AWS endpoint this client will communicate with. - `java.lang.String`"
([^AWSStorageGateway this ^java.lang.String endpoint]
(-> this (.setEndpoint endpoint))))
(defn update-chap-credentials
"Updates the Challenge-Handshake Authentication Protocol (CHAP) credentials for a specified iSCSI target. By
default, a gateway does not have CHAP enabled; however, for added security, you might use it.
When you update CHAP credentials, all existing connections on the target are closed and initiators must reconnect
with the new credentials.
update-chap-credentials-request - A JSON object containing one or more of the following fields: UpdateChapCredentialsInput$InitiatorName UpdateChapCredentialsInput$SecretToAuthenticateInitiator UpdateChapCredentialsInput$SecretToAuthenticateTarget UpdateChapCredentialsInput$TargetARN - `com.amazonaws.services.storagegateway.model.UpdateChapCredentialsRequest`
returns: Result of the UpdateChapCredentials operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateChapCredentialsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateChapCredentialsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateChapCredentialsRequest update-chap-credentials-request]
(-> this (.updateChapCredentials update-chap-credentials-request))))
(defn add-working-storage
"Configures one or more gateway local disks as working storage for a gateway. This operation is only supported in
the stored volume gateway type. This operation is deprecated in cached volume API version 20120630. Use
AddUploadBuffer instead.
Working storage is also referred to as upload buffer. You can also use the AddUploadBuffer operation to
add upload buffer to a stored volume gateway.
In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add working storage, and
one or more disk IDs that you want to configure as working storage.
add-working-storage-request - A JSON object containing one or more of the following fields: AddWorkingStorageInput$DiskIds - `com.amazonaws.services.storagegateway.model.AddWorkingStorageRequest`
returns: Result of the AddWorkingStorage operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddWorkingStorageResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddWorkingStorageResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddWorkingStorageRequest add-working-storage-request]
(-> this (.addWorkingStorage add-working-storage-request))))
(defn refresh-cache
"Refreshes the cache for the specified file share. This operation finds objects in the Amazon S3 bucket that were
added, removed or replaced since the gateway last listed the bucket's contents and cached the results. This
operation is only supported in the file gateway type. You can subscribe to be notified through an Amazon
CloudWatch event when your RefreshCache operation completes. For more information, see Getting Notified About File Operations.
When this API is called, it only initiates the refresh operation. When the API call completes and returns a
success code, it doesn't necessarily mean that the file refresh has completed. You should use the
refresh-complete notification to determine that the operation has completed before you check for new files on the
gateway file share. You can subscribe to be notified through an CloudWatch event when your
RefreshCache operation completes.
refresh-cache-request - RefreshCacheInput - `com.amazonaws.services.storagegateway.model.RefreshCacheRequest`
returns: Result of the RefreshCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.RefreshCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RefreshCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RefreshCacheRequest refresh-cache-request]
(-> this (.refreshCache refresh-cache-request))))
(defn create-nfs-file-share
"Creates a Network File System (NFS) file share on an existing file gateway. In Storage Gateway, a file share is a
file system mount point backed by Amazon S3 cloud storage. Storage Gateway exposes file shares using a NFS
interface. This operation is only supported for file gateways.
File gateway requires AWS Security Token Service (AWS STS) to be activated to enable you create a file share.
Make sure AWS STS is activated in the region you are creating your file gateway in. If AWS STS is not activated
in the region, activate it. For information about how to activate AWS STS, see Activating and Deactivating AWS
STS in an AWS Region in the AWS Identity and Access Management User Guide.
File gateway does not support creating hard or symbolic links on a file share.
create-nfs-file-share-request - CreateNFSFileShareInput - `com.amazonaws.services.storagegateway.model.CreateNFSFileShareRequest`
returns: Result of the CreateNFSFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateNFSFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateNFSFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateNFSFileShareRequest create-nfs-file-share-request]
(-> this (.createNFSFileShare create-nfs-file-share-request))))
(defn delete-tape-archive
"Deletes the specified virtual tape from the virtual tape shelf (VTS). This operation is only supported in the
tape gateway type.
delete-tape-archive-request - DeleteTapeArchiveInput - `com.amazonaws.services.storagegateway.model.DeleteTapeArchiveRequest`
returns: Result of the DeleteTapeArchive operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteTapeArchiveResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteTapeArchiveResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteTapeArchiveRequest delete-tape-archive-request]
(-> this (.deleteTapeArchive delete-tape-archive-request))))
(defn update-nfs-file-share
"Updates a Network File System (NFS) file share. This operation is only supported in the file gateway type.
To leave a file share field unchanged, set the corresponding input field to null.
Updates the following file share setting:
Default storage class for your S3 bucket
Metadata defaults for your S3 bucket
Allowed NFS clients for your file share
Squash settings
Write status of your file share
To leave a file share field unchanged, set the corresponding input field to null. This operation is only
supported in file gateways.
update-nfs-file-share-request - UpdateNFSFileShareInput - `com.amazonaws.services.storagegateway.model.UpdateNFSFileShareRequest`
returns: Result of the UpdateNFSFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateNFSFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateNFSFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateNFSFileShareRequest update-nfs-file-share-request]
(-> this (.updateNFSFileShare update-nfs-file-share-request))))
(defn update-gateway-software-now
"Updates the gateway virtual machine (VM) software. The request immediately triggers the software update.
When you make this request, you get a 200 OK success response immediately. However, it might take
some time for the update to complete. You can call DescribeGatewayInformation to verify the gateway is in
the STATE_RUNNING state.
A software update forces a system restart of your gateway. You can minimize the chance of any disruption to your
applications by increasing your iSCSI Initiators' timeouts. For more information about increasing iSCSI Initiator
timeouts for Windows and Linux, see Customizing Your Windows iSCSI Settings and Customizing Your Linux iSCSI Settings, respectively.
update-gateway-software-now-request - A JSON object containing the of the gateway to update. - `com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowRequest`
returns: Result of the UpdateGatewaySoftwareNow operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowRequest update-gateway-software-now-request]
(-> this (.updateGatewaySoftwareNow update-gateway-software-now-request))))
(defn list-volume-initiators
"Lists iSCSI initiators that are connected to a volume. You can use this operation to determine whether a volume
is being used or not. This operation is only supported in the cached volume and stored volume gateway types.
list-volume-initiators-request - ListVolumeInitiatorsInput - `com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsRequest`
returns: Result of the ListVolumeInitiators operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsRequest list-volume-initiators-request]
(-> this (.listVolumeInitiators list-volume-initiators-request))))
(defn remove-tags-from-resource
"Removes one or more tags from the specified resource. This operation is only supported in the cached volume,
stored volume and tape gateway types.
remove-tags-from-resource-request - RemoveTagsFromResourceInput - `com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceRequest`
returns: Result of the RemoveTagsFromResource operation returned by the service. - `com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceRequest remove-tags-from-resource-request]
(-> this (.removeTagsFromResource remove-tags-from-resource-request)))
(^com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceResult [^AWSStorageGateway this]
(-> this (.removeTagsFromResource))))
(defn reset-cache
"Resets all cache disks that have encountered a error and makes the disks available for reconfiguration as cache
storage. If your cache disk encounters a error, the gateway prevents read and write operations on virtual tapes
in the gateway. For example, an error can occur when a disk is corrupted or removed from the gateway. When a
cache is reset, the gateway loses its cache storage. At this point you can reconfigure the disks as cache disks.
This operation is only supported in the cached volume and tape types.
If the cache disk you are resetting contains data that has not been uploaded to Amazon S3 yet, that data can be
lost. After you reset cache disks, there will be no configured cache disks left in the gateway, so you must
configure at least one new cache disk for your gateway to function properly.
reset-cache-request - `com.amazonaws.services.storagegateway.model.ResetCacheRequest`
returns: Result of the ResetCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.ResetCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ResetCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ResetCacheRequest reset-cache-request]
(-> this (.resetCache reset-cache-request))))
(defn describe-storedi-scsi-volumes
"Returns the description of the gateway volumes specified in the request. The list of gateway volumes in the
request must be from one gateway. In the response Amazon Storage Gateway returns volume information sorted by
volume ARNs. This operation is only supported in stored volume gateway type.
describe-storedi-scsi-volumes-request - A JSON object containing a list of DescribeStorediSCSIVolumesInput$VolumeARNs. - `com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesRequest`
returns: Result of the DescribeStorediSCSIVolumes operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesRequest describe-storedi-scsi-volumes-request]
(-> this (.describeStorediSCSIVolumes describe-storedi-scsi-volumes-request))))
(defn describe-smb-settings
"Gets a description of a Server Message Block (SMB) file share settings from a file gateway. This operation is
only supported for file gateways.
describe-smb-settings-request - `com.amazonaws.services.storagegateway.model.DescribeSMBSettingsRequest`
returns: Result of the DescribeSMBSettings operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeSMBSettingsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeSMBSettingsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeSMBSettingsRequest describe-smb-settings-request]
(-> this (.describeSMBSettings describe-smb-settings-request))))
(defn describe-tape-archives
"Returns a description of specified virtual tapes in the virtual tape shelf (VTS). This operation is only
supported in the tape gateway type.
If a specific TapeARN is not specified, AWS Storage Gateway returns a description of all virtual
tapes found in the VTS associated with your account.
describe-tape-archives-request - DescribeTapeArchivesInput - `com.amazonaws.services.storagegateway.model.DescribeTapeArchivesRequest`
returns: Result of the DescribeTapeArchives operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeTapeArchivesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeTapeArchivesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeTapeArchivesRequest describe-tape-archives-request]
(-> this (.describeTapeArchives describe-tape-archives-request)))
(^com.amazonaws.services.storagegateway.model.DescribeTapeArchivesResult [^AWSStorageGateway this]
(-> this (.describeTapeArchives))))
(defn update-vtl-device-type
"Updates the type of medium changer in a tape gateway. When you activate a tape gateway, you select a medium
changer type for the tape gateway. This operation enables you to select a different type of medium changer after
a tape gateway is activated. This operation is only supported in the tape gateway type.
update-vtl-device-type-request - `com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeRequest`
returns: Result of the UpdateVTLDeviceType operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeRequest update-vtl-device-type-request]
(-> this (.updateVTLDeviceType update-vtl-device-type-request))))
(defn describe-bandwidth-rate-limit
"Returns the bandwidth rate limits of a gateway. By default, these limits are not set, which means no bandwidth
rate limiting is in effect.
This operation only returns a value for a bandwidth rate limit only if the limit is set. If no limits are set for
the gateway, then this operation returns only the gateway ARN in the response body. To specify which gateway to
describe, use the Amazon Resource Name (ARN) of the gateway in your request.
describe-bandwidth-rate-limit-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitRequest`
returns: Result of the DescribeBandwidthRateLimit operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitRequest describe-bandwidth-rate-limit-request]
(-> this (.describeBandwidthRateLimit describe-bandwidth-rate-limit-request))))
(defn describe-tapes
"Returns a description of the specified Amazon Resource Name (ARN) of virtual tapes. If a TapeARN is
not specified, returns a description of all virtual tapes associated with the specified gateway. This operation
is only supported in the tape gateway type.
describe-tapes-request - DescribeTapesInput - `com.amazonaws.services.storagegateway.model.DescribeTapesRequest`
returns: Result of the DescribeTapes operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeTapesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeTapesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeTapesRequest describe-tapes-request]
(-> this (.describeTapes describe-tapes-request))))
(defn create-smb-file-share
"Creates a Server Message Block (SMB) file share on an existing file gateway. In Storage Gateway, a file share is
a file system mount point backed by Amazon S3 cloud storage. Storage Gateway expose file shares using a SMB
interface. This operation is only supported for file gateways.
File gateways require AWS Security Token Service (AWS STS) to be activated to enable you to create a file share.
Make sure that AWS STS is activated in the AWS Region you are creating your file gateway in. If AWS STS is not
activated in this AWS Region, activate it. For information about how to activate AWS STS, see Activating and
Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User Guide.
File gateways don't support creating hard or symbolic links on a file share.
create-smb-file-share-request - CreateSMBFileShareInput - `com.amazonaws.services.storagegateway.model.CreateSMBFileShareRequest`
returns: Result of the CreateSMBFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateSMBFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateSMBFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateSMBFileShareRequest create-smb-file-share-request]
(-> this (.createSMBFileShare create-smb-file-share-request))))
(defn set-smb-guest-password
"Sets the password for the guest user smbguest. The smbguest user is the user when the
authentication method for the file share is set to GuestAccess.
set-smb-guest-password-request - SetSMBGuestPasswordInput - `com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordRequest`
returns: Result of the SetSMBGuestPassword operation returned by the service. - `com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordRequest set-smb-guest-password-request]
(-> this (.setSMBGuestPassword set-smb-guest-password-request))))
(defn delete-bandwidth-rate-limit
"Deletes the bandwidth rate limits of a gateway. You can delete either the upload and download bandwidth rate
limit, or you can delete both. If you delete only one of the limits, the other limit remains unchanged. To
specify which gateway to work with, use the Amazon Resource Name (ARN) of the gateway in your request.
delete-bandwidth-rate-limit-request - A JSON object containing the following fields: DeleteBandwidthRateLimitInput$BandwidthType - `com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitRequest`
returns: Result of the DeleteBandwidthRateLimit operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitRequest delete-bandwidth-rate-limit-request]
(-> this (.deleteBandwidthRateLimit delete-bandwidth-rate-limit-request))))
(defn add-tags-to-resource
"Adds one or more tags to the specified resource. You use tags to add metadata to resources, which you can use to
categorize these resources. For example, you can categorize resources by purpose, owner, environment, or team.
Each tag consists of a key and a value, which you define. You can add tags to the following AWS Storage Gateway
resources:
Storage gateways of all types
Storage volumes
Virtual tapes
NFS and SMB file shares
You can create a maximum of 50 tags for each resource. Virtual tapes and storage volumes that are recovered to a
new gateway maintain their tags.
add-tags-to-resource-request - AddTagsToResourceInput - `com.amazonaws.services.storagegateway.model.AddTagsToResourceRequest`
returns: Result of the AddTagsToResource operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddTagsToResourceResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddTagsToResourceResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddTagsToResourceRequest add-tags-to-resource-request]
(-> this (.addTagsToResource add-tags-to-resource-request))))
(defn describe-cachedi-scsi-volumes
"Returns a description of the gateway volumes specified in the request. This operation is only supported in the
cached volume gateway types.
The list of gateway volumes in the request must be from one gateway. In the response Amazon Storage Gateway
returns volume information sorted by volume Amazon Resource Name (ARN).
describe-cachedi-scsi-volumes-request - `com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesRequest`
returns: Result of the DescribeCachediSCSIVolumes operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesRequest describe-cachedi-scsi-volumes-request]
(-> this (.describeCachediSCSIVolumes describe-cachedi-scsi-volumes-request))))
(defn get-cached-response-metadata
"Returns additional metadata for a previously executed successful request, typically used for debugging issues
where a service isn't acting as expected. This data isn't considered part of the result data returned by an
operation, so it's available through this separate, diagnostic interface.
Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
information for an executed request, you should use this method to retrieve it as soon as possible after
executing a request.
request - The originally executed request. - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AWSStorageGateway this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn notify-when-uploaded
"Sends you notification through CloudWatch Events when all files written to your NFS file share have been uploaded
to Amazon S3.
AWS Storage Gateway can send a notification through Amazon CloudWatch Events when all files written to your file
share up to that point in time have been uploaded to Amazon S3. These files include files written to the NFS file
share up to the time that you make a request for notification. When the upload is done, Storage Gateway sends you
notification through an Amazon CloudWatch Event. You can configure CloudWatch Events to send the notification
through event targets such as Amazon SNS or AWS Lambda function. This operation is only supported for file
gateways.
For more information, see Getting File Upload Notification in the Storage Gateway User Guide
(
.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-upload-notification).
notify-when-uploaded-request - `com.amazonaws.services.storagegateway.model.NotifyWhenUploadedRequest`
returns: Result of the NotifyWhenUploaded operation returned by the service. - `com.amazonaws.services.storagegateway.model.NotifyWhenUploadedResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.NotifyWhenUploadedResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.NotifyWhenUploadedRequest notify-when-uploaded-request]
(-> this (.notifyWhenUploaded notify-when-uploaded-request))))
(defn list-volumes
"Lists the iSCSI stored volumes of a gateway. Results are sorted by volume ARN. The response includes only the
volume ARNs. If you want additional volume information, use the DescribeStorediSCSIVolumes or the
DescribeCachediSCSIVolumes API.
The operation supports pagination. By default, the operation returns a maximum of up to 100 volumes. You can
optionally specify the Limit field in the body to limit the number of volumes in the response. If
the number of volumes returned in the response is truncated, the response includes a Marker field. You can use
this Marker value in your subsequent request to retrieve the next set of volumes. This operation is only
supported in the cached volume and stored volume gateway types.
list-volumes-request - A JSON object that contains one or more of the following fields: ListVolumesInput$Limit ListVolumesInput$Marker - `com.amazonaws.services.storagegateway.model.ListVolumesRequest`
returns: Result of the ListVolumes operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListVolumesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListVolumesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListVolumesRequest list-volumes-request]
(-> this (.listVolumes list-volumes-request))))
(defn retrieve-tape-recovery-point
"Retrieves the recovery point for the specified virtual tape. This operation is only supported in the tape gateway
type.
A recovery point is a point in time view of a virtual tape at which all the data on the tape is consistent. If
your gateway crashes, virtual tapes that have recovery points can be recovered to a new gateway.
The virtual tape can be retrieved to only one gateway. The retrieved tape is read-only. The virtual tape can be
retrieved to only a tape gateway. There is no charge for retrieving recovery points.
retrieve-tape-recovery-point-request - RetrieveTapeRecoveryPointInput - `com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointRequest`
returns: Result of the RetrieveTapeRecoveryPoint operation returned by the service. - `com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointRequest retrieve-tape-recovery-point-request]
(-> this (.retrieveTapeRecoveryPoint retrieve-tape-recovery-point-request))))
(defn delete-gateway
"Deletes a gateway. To specify which gateway to delete, use the Amazon Resource Name (ARN) of the gateway in your
request. The operation deletes the gateway; however, it does not delete the gateway virtual machine (VM) from
your host computer.
After you delete a gateway, you cannot reactivate it. Completed snapshots of the gateway volumes are not deleted
upon deleting the gateway, however, pending snapshots will not complete. After you delete a gateway, your next
step is to remove it from your environment.
however , your existing Amazon EBS snapshots
persist and you will continue to be billed for these snapshots. You can choose to remove all remaining Amazon EBS
snapshots by canceling your Amazon EC2 subscription. If you prefer not to cancel your Amazon EC2 subscription,
you can delete your snapshots using the Amazon EC2 console. For more information, see the AWS Storage Gateway Detail Page.
delete-gateway-request - A JSON object containing the ID of the gateway to delete. - `com.amazonaws.services.storagegateway.model.DeleteGatewayRequest`
returns: Result of the DeleteGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteGatewayRequest delete-gateway-request]
(-> this (.deleteGateway delete-gateway-request))))
(defn add-upload-buffer
"Configures one or more gateway local disks as upload buffer for a specified gateway. This operation is supported
for the stored volume, cached volume and tape gateway types.
In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add upload buffer, and
one or more disk IDs that you want to configure as upload buffer.
add-upload-buffer-request - `com.amazonaws.services.storagegateway.model.AddUploadBufferRequest`
returns: Result of the AddUploadBuffer operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddUploadBufferResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddUploadBufferResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddUploadBufferRequest add-upload-buffer-request]
(-> this (.addUploadBuffer add-upload-buffer-request))))
(defn describe-gateway-information
"Returns metadata about a gateway such as its name, network interfaces, configured time zone, and the state
(whether the gateway is running or not). To specify which gateway to describe, use the Amazon Resource Name (ARN)
of the gateway in your request.
describe-gateway-information-request - A JSON object containing the ID of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeGatewayInformationRequest`
returns: Result of the DescribeGatewayInformation operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeGatewayInformationResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeGatewayInformationResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeGatewayInformationRequest describe-gateway-information-request]
(-> this (.describeGatewayInformation describe-gateway-information-request))))
(defn create-storedi-scsi-volume
"Creates a volume on a specified gateway. This operation is only supported in the stored volume gateway type.
The size of the volume to create is inferred from the disk size. You can choose to preserve existing data on the
disk, create volume from an existing snapshot, or create an empty volume. If you choose to create an empty
gateway volume, then any existing data on the disk is erased.
In the request you must specify the gateway and the disk information on which you are creating the volume. In
response, the gateway creates the volume and returns volume information such as the volume Amazon Resource Name
(ARN), its size, and the iSCSI target ARN that initiators can use to connect to the volume target.
create-storedi-scsi-volume-request - A JSON object containing one or more of the following fields: CreateStorediSCSIVolumeInput$DiskId CreateStorediSCSIVolumeInput$NetworkInterfaceId CreateStorediSCSIVolumeInput$PreserveExistingData CreateStorediSCSIVolumeInput$SnapshotId CreateStorediSCSIVolumeInput$TargetName - `com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeRequest`
returns: Result of the CreateStorediSCSIVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeRequest create-storedi-scsi-volume-request]
(-> this (.createStorediSCSIVolume create-storedi-scsi-volume-request))))
(defn create-snapshot-from-volume-recovery-point
"Initiates a snapshot of a gateway from a volume recovery point. This operation is only supported in the cached
volume gateway type.
A volume recovery point is a point in time at which all data of the volume is consistent and from which you can
create a snapshot. To get a list of volume recovery point for cached volume gateway, use
ListVolumeRecoveryPoints.
In the CreateSnapshotFromVolumeRecoveryPoint request, you identify the volume by providing its
Amazon Resource Name (ARN). You must also provide a description for the snapshot. When the gateway takes a
snapshot of the specified volume, the snapshot and its description appear in the AWS Storage Gateway console. In
response, the gateway returns you a snapshot ID. You can use this snapshot ID to check the snapshot progress or
later use it when you want to create a volume from a snapshot.
To list or delete a snapshot, you must use the Amazon EC2 API. For more information, in Amazon Elastic Compute
Cloud API Reference.
create-snapshot-from-volume-recovery-point-request - `com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointRequest`
returns: Result of the CreateSnapshotFromVolumeRecoveryPoint operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointRequest create-snapshot-from-volume-recovery-point-request]
(-> this (.createSnapshotFromVolumeRecoveryPoint create-snapshot-from-volume-recovery-point-request))))
(defn list-file-shares
"Gets a list of the file shares for a specific file gateway, or the list of file shares that belong to the calling
user account. This operation is only supported for file gateways.
list-file-shares-request - ListFileShareInput - `com.amazonaws.services.storagegateway.model.ListFileSharesRequest`
returns: Result of the ListFileShares operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListFileSharesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListFileSharesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListFileSharesRequest list-file-shares-request]
(-> this (.listFileShares list-file-shares-request))))
(defn cancel-retrieval
"Cancels retrieval of a virtual tape from the virtual tape shelf (VTS) to a gateway after the retrieval process is
initiated. The virtual tape is returned to the VTS. This operation is only supported in the tape gateway type.
cancel-retrieval-request - CancelRetrievalInput - `com.amazonaws.services.storagegateway.model.CancelRetrievalRequest`
returns: Result of the CancelRetrieval operation returned by the service. - `com.amazonaws.services.storagegateway.model.CancelRetrievalResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CancelRetrievalResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CancelRetrievalRequest cancel-retrieval-request]
(-> this (.cancelRetrieval cancel-retrieval-request))))
(defn update-gateway-information
"Updates a gateway's metadata, which includes the gateway's name and time zone. To specify which gateway to
update, use the Amazon Resource Name (ARN) of the gateway in your request.
For Gateways activated after September 2, 2015, the gateway's ARN contains the gateway ID rather than the gateway
name. However, changing the name of the gateway has no effect on the gateway's ARN.
update-gateway-information-request - `com.amazonaws.services.storagegateway.model.UpdateGatewayInformationRequest`
returns: Result of the UpdateGatewayInformation operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateGatewayInformationResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateGatewayInformationResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateGatewayInformationRequest update-gateway-information-request]
(-> this (.updateGatewayInformation update-gateway-information-request))))
(defn create-snapshot
"Initiates a snapshot of a volume.
AWS Storage Gateway provides the ability to back up point-in-time snapshots of your data to Amazon Simple Storage
(S3) for durable off-site recovery, as well as import the data to an Amazon Elastic Block Store (EBS) volume in
Amazon Elastic Compute Cloud (EC2). You can take snapshots of your gateway volume on a scheduled or ad hoc basis.
This API enables you to take ad-hoc snapshot. For more information, see Editing a Snapshot Schedule.
In the CreateSnapshot request you identify the volume by providing its Amazon Resource Name (ARN). You must also
provide description for the snapshot. When AWS Storage Gateway takes the snapshot of specified volume, the
snapshot and description appears in the AWS Storage Gateway Console. In response, AWS Storage Gateway returns you
a snapshot ID. You can use this snapshot ID to check the snapshot progress or later use it when you want to
create a volume from a snapshot. This operation is only supported in stored and cached volume gateway type.
To list or delete a snapshot, you must use the Amazon EC2 API. For more information, see DescribeSnapshots or
DeleteSnapshot in the EC2
API reference.
Volume and snapshot IDs are changing to a longer length ID format. For more information, see the important note
on the Welcome page.
create-snapshot-request - A JSON object containing one or more of the following fields: CreateSnapshotInput$SnapshotDescription CreateSnapshotInput$VolumeARN - `com.amazonaws.services.storagegateway.model.CreateSnapshotRequest`
returns: Result of the CreateSnapshot operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateSnapshotResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateSnapshotResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateSnapshotRequest create-snapshot-request]
(-> this (.createSnapshot create-snapshot-request))))
(defn update-bandwidth-rate-limit
"Updates the bandwidth rate limits of a gateway. You can update both the upload and download bandwidth rate limit
or specify only one of the two. If you don't set a bandwidth rate limit, the existing rate limit remains.
By default, a gateway's bandwidth rate limits are not set. If you don't set any limit, the gateway does not have
any limitations on its bandwidth usage and could potentially use the maximum available bandwidth.
To specify which gateway to update, use the Amazon Resource Name (ARN) of the gateway in your request.
update-bandwidth-rate-limit-request - A JSON object containing one or more of the following fields: UpdateBandwidthRateLimitInput$AverageDownloadRateLimitInBitsPerSec UpdateBandwidthRateLimitInput$AverageUploadRateLimitInBitsPerSec - `com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitRequest`
returns: Result of the UpdateBandwidthRateLimit operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitRequest update-bandwidth-rate-limit-request]
(-> this (.updateBandwidthRateLimit update-bandwidth-rate-limit-request))))
(defn describe-snapshot-schedule
"Describes the snapshot schedule for the specified gateway volume. The snapshot schedule information includes
intervals at which snapshots are automatically initiated on the volume. This operation is only supported in the
cached volume and stored volume types.
describe-snapshot-schedule-request - A JSON object containing the DescribeSnapshotScheduleInput$VolumeARN of the volume. - `com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleRequest`
returns: Result of the DescribeSnapshotSchedule operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleRequest describe-snapshot-schedule-request]
(-> this (.describeSnapshotSchedule describe-snapshot-schedule-request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.storagegateway/src/com/amazonaws/services/storagegateway/AWSStorageGateway.clj | clojure | this topic
for more information, see UpdateGatewayInformation.
however, for added security, you might use it.
however, it does not delete the gateway virtual machine (VM) from | (ns com.amazonaws.services.storagegateway.AWSStorageGateway
"Interface for accessing AWS Storage Gateway.
Note: Do not directly implement this interface, new methods are added to it regularly. Extend from
AbstractAWSStorageGateway instead.
AWS Storage Gateway Service
AWS Storage Gateway is the service that connects an on-premises software appliance with cloud-based storage to
provide seamless and secure integration between an organization's on-premises IT environment and the AWS storage
infrastructure. The service enables you to securely upload data to the AWS cloud for cost effective backup and rapid
disaster recovery.
Use the following links to get started using the AWS Storage Gateway Service API Reference:
AWS Storage Gateway Required Request Headers: Describes the required headers that you must send with every POST
request to AWS Storage Gateway.
describes how sign such a request.
Error Responses: Provides reference information about AWS Storage Gateway errors.
Operations in AWS
Storage Gateway: Contains detailed descriptions of all AWS Storage Gateway operations, their request parameters,
response elements, possible errors, and examples of requests and responses.
AWS Storage Gateway Regions and
Endpoints: Provides a list of each AWS region and endpoints available for use with AWS Storage Gateway.
AWS Storage Gateway resource IDs are in uppercase. When you use these resource IDs with the Amazon EC2 API, EC2
expects resource IDs in lowercase. You must change your resource ID to lowercase to use it with the EC2 API. For
example, in Storage Gateway the ID for a volume might be vol-AA22BB012345DAF670. When you use this ID
with the EC2 API, you must change it to vol-aa22bb012345daf670. Otherwise, the EC2 API might not behave
as expected.
IDs for Storage Gateway volumes and Amazon EBS snapshots created from gateway volumes are changing to a longer
format. Starting in December 2016, all new volumes and snapshots will be created with a 17-character string. Starting
in April 2016, you will be able to use these longer IDs so you can test your systems with the new format. For more
information, see Longer EC2 and EBS Resource IDs.
For example, a volume Amazon Resource Name (ARN) with the longer volume ID format looks like the following:
arn:aws:storagegateway:us-west-2:111122223333:gateway/sgw-12A3456B/volume/vol-1122AABBCCDDEEFFG.
A snapshot ID with the longer ID format looks like the following: snap-78e226633445566ee.
For more information, see Announcement: Heads-up – Longer
AWS Storage Gateway volume and snapshot IDs coming in 2016."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.storagegateway AWSStorageGateway]))
(defn join-domain
"Adds a file gateway to an Active Directory domain. This operation is only supported for file gateways that
support the SMB file protocol.
join-domain-request - JoinDomainInput - `com.amazonaws.services.storagegateway.model.JoinDomainRequest`
returns: Result of the JoinDomain operation returned by the service. - `com.amazonaws.services.storagegateway.model.JoinDomainResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.JoinDomainResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.JoinDomainRequest join-domain-request]
(-> this (.joinDomain join-domain-request))))
(defn create-cachedi-scsi-volume
"Creates a cached volume on a specified cached volume gateway. This operation is only supported in the cached
volume gateway type.
Cache storage must be allocated to the gateway before you can create a cached volume. Use the AddCache
operation to add cache storage to a gateway.
In the request, you must specify the gateway, size of the volume in bytes, the iSCSI target name, an IP address
on which to expose the target, and a unique client token. In response, the gateway creates the volume and returns
information about it. This information includes the volume Amazon Resource Name (ARN), its size, and the iSCSI
target ARN that initiators can use to connect to the volume target.
Optionally, you can provide the ARN for an existing volume as the SourceVolumeARN for this cached
volume, which creates an exact copy of the existing volume’s latest recovery point. The
VolumeSizeInBytes value must be equal to or larger than the size of the copied volume, in bytes.
create-cachedi-scsi-volume-request - `com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeRequest`
returns: Result of the CreateCachediSCSIVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateCachediSCSIVolumeRequest create-cachedi-scsi-volume-request]
(-> this (.createCachediSCSIVolume create-cachedi-scsi-volume-request))))
(defn add-cache
"Configures one or more gateway local disks as cache for a gateway. This operation is only supported in the cached
volume, tape and file gateway type (see Storage Gateway
Concepts).
In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add cache, and one or
more disk IDs that you want to configure as cache.
add-cache-request - `com.amazonaws.services.storagegateway.model.AddCacheRequest`
returns: Result of the AddCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddCacheRequest add-cache-request]
(-> this (.addCache add-cache-request))))
(defn describe-nfs-file-shares
"Gets a description for one or more Network File System (NFS) file shares from a file gateway. This operation is
only supported for file gateways.
describe-nfs-file-shares-request - DescribeNFSFileSharesInput - `com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesRequest`
returns: Result of the DescribeNFSFileShares operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeNFSFileSharesRequest describe-nfs-file-shares-request]
(-> this (.describeNFSFileShares describe-nfs-file-shares-request))))
(defn describe-maintenance-start-time
"Returns your gateway's weekly maintenance start time including the day and time of the week. Note that values are
in terms of the gateway's time zone.
describe-maintenance-start-time-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeRequest`
returns: Result of the DescribeMaintenanceStartTime operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeMaintenanceStartTimeRequest describe-maintenance-start-time-request]
(-> this (.describeMaintenanceStartTime describe-maintenance-start-time-request))))
(defn describe-cache
"Returns information about the cache of a gateway. This operation is only supported in the cached volume, tape and
file gateway types.
The response includes disk IDs that are configured as cache, and it includes the amount of cache allocated and
used.
describe-cache-request - `com.amazonaws.services.storagegateway.model.DescribeCacheRequest`
returns: Result of the DescribeCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeCacheRequest describe-cache-request]
(-> this (.describeCache describe-cache-request))))
(defn list-tags-for-resource
"Lists the tags that have been added to the specified resource. This operation is only supported in the cached
volume, stored volume and tape gateway type.
list-tags-for-resource-request - ListTagsForResourceInput - `com.amazonaws.services.storagegateway.model.ListTagsForResourceRequest`
returns: Result of the ListTagsForResource operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListTagsForResourceResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListTagsForResourceResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListTagsForResourceRequest list-tags-for-resource-request]
(-> this (.listTagsForResource list-tags-for-resource-request)))
(^com.amazonaws.services.storagegateway.model.ListTagsForResourceResult [^AWSStorageGateway this]
(-> this (.listTagsForResource))))
(defn assign-tape-pool
"Assigns a tape to a tape pool for archiving. The tape assigned to a pool is archived in the S3 storage class that
is associated with the pool. When you use your backup application to eject the tape, the tape is archived
directly into the S3 storage class (Glacier or Deep Archive) that corresponds to the pool.
Valid values: \"GLACIER\", \"DEEP_ARCHIVE\"
assign-tape-pool-request - `com.amazonaws.services.storagegateway.model.AssignTapePoolRequest`
returns: Result of the AssignTapePool operation returned by the service. - `com.amazonaws.services.storagegateway.model.AssignTapePoolResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AssignTapePoolResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AssignTapePoolRequest assign-tape-pool-request]
(-> this (.assignTapePool assign-tape-pool-request))))
(defn set-region
"Deprecated. use AwsClientBuilder#setRegion(String)
region - The region this client will communicate with. See Region.getRegion(com.amazonaws.regions.Regions) for accessing a given region. Must not be null and must be a region where the service is available. - `com.amazonaws.regions.Region`"
([^AWSStorageGateway this ^com.amazonaws.regions.Region region]
(-> this (.setRegion region))))
(defn shutdown-gateway
"Shuts down a gateway. To specify which gateway to shut down, use the Amazon Resource Name (ARN) of the gateway in
the body of your request.
The operation shuts down the gateway service component running in the gateway's virtual machine (VM) and not the
host VM.
If you want to shut down the VM, it is recommended that you first shut down the gateway component in the VM to
avoid unpredictable conditions.
After the gateway is shutdown, you cannot call any other API except StartGateway,
DescribeGatewayInformation, and ListGateways. For more information, see ActivateGateway.
Your applications cannot read from or write to the gateway's storage volumes, and there are no snapshots taken.
When you make a shutdown request, you will get a 200 OK success response immediately. However, it
might take some time for the gateway to shut down. You can call the DescribeGatewayInformation API to
check the status. For more information, see ActivateGateway.
If do not intend to use the gateway again, you must delete the gateway (using DeleteGateway) to no longer
pay software charges associated with the gateway.
shutdown-gateway-request - A JSON object containing the of the gateway to shut down. - `com.amazonaws.services.storagegateway.model.ShutdownGatewayRequest`
returns: Result of the ShutdownGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.ShutdownGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ShutdownGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ShutdownGatewayRequest shutdown-gateway-request]
(-> this (.shutdownGateway shutdown-gateway-request))))
(defn describe-tape-recovery-points
"Returns a list of virtual tape recovery points that are available for the specified tape gateway.
A recovery point is a point-in-time view of a virtual tape at which all the data on the virtual tape is
consistent. If your gateway crashes, virtual tapes that have recovery points can be recovered to a new gateway.
This operation is only supported in the tape gateway type.
describe-tape-recovery-points-request - DescribeTapeRecoveryPointsInput - `com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsRequest`
returns: Result of the DescribeTapeRecoveryPoints operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeTapeRecoveryPointsRequest describe-tape-recovery-points-request]
(-> this (.describeTapeRecoveryPoints describe-tape-recovery-points-request))))
(defn update-smb-file-share
"Updates a Server Message Block (SMB) file share.
To leave a file share field unchanged, set the corresponding input field to null. This operation is only
supported for file gateways.
File gateways require AWS Security Token Service (AWS STS) to be activated to enable you to create a file share.
Make sure that AWS STS is activated in the AWS Region you are creating your file gateway in. If AWS STS is not
activated in this AWS Region, activate it. For information about how to activate AWS STS, see Activating and
Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User Guide.
File gateways don't support creating hard or symbolic links on a file share.
update-smb-file-share-request - UpdateSMBFileShareInput - `com.amazonaws.services.storagegateway.model.UpdateSMBFileShareRequest`
returns: Result of the UpdateSMBFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateSMBFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateSMBFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateSMBFileShareRequest update-smb-file-share-request]
(-> this (.updateSMBFileShare update-smb-file-share-request))))
(defn list-local-disks
"Returns a list of the gateway's local disks. To specify which gateway to describe, you use the Amazon Resource
Name (ARN) of the gateway in the body of the request.
The request returns a list of all disks, specifying which are configured as working storage, cache storage, or
stored volume or not configured at all. The response includes a DiskStatus field. This field can
have a value of present (the disk is available to use), missing (the disk is no longer connected to the gateway),
or mismatch (the disk node is occupied by a disk that has incorrect metadata or the disk content is corrupted).
list-local-disks-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.ListLocalDisksRequest`
returns: Result of the ListLocalDisks operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListLocalDisksResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListLocalDisksResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListLocalDisksRequest list-local-disks-request]
(-> this (.listLocalDisks list-local-disks-request))))
(defn update-smb-security-strategy
"Updates the SMB security strategy on a file gateway. This action is only supported in file gateways.
update-smb-security-strategy-request - `com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyRequest`
returns: Result of the UpdateSMBSecurityStrategy operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateSMBSecurityStrategyRequest update-smb-security-strategy-request]
(-> this (.updateSMBSecurityStrategy update-smb-security-strategy-request))))
(defn describe-chap-credentials
"Returns an array of Challenge-Handshake Authentication Protocol (CHAP) credentials information for a specified
iSCSI target, one for each target-initiator pair.
describe-chap-credentials-request - A JSON object containing the Amazon Resource Name (ARN) of the iSCSI volume target. - `com.amazonaws.services.storagegateway.model.DescribeChapCredentialsRequest`
returns: Result of the DescribeChapCredentials operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeChapCredentialsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeChapCredentialsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeChapCredentialsRequest describe-chap-credentials-request]
(-> this (.describeChapCredentials describe-chap-credentials-request))))
(defn describe-smb-file-shares
"Gets a description for one or more Server Message Block (SMB) file shares from a file gateway. This operation is
only supported for file gateways.
describe-smb-file-shares-request - DescribeSMBFileSharesInput - `com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesRequest`
returns: Result of the DescribeSMBFileShares operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeSMBFileSharesRequest describe-smb-file-shares-request]
(-> this (.describeSMBFileShares describe-smb-file-shares-request))))
(defn delete-volume
"Deletes the specified storage volume that you previously created using the CreateCachediSCSIVolume or
CreateStorediSCSIVolume API. This operation is only supported in the cached volume and stored volume
types. For stored volume gateways, the local disk that was configured as the storage volume is not deleted. You
can reuse the local disk to create another storage volume.
Before you delete a volume, make sure there are no iSCSI connections to the volume you are deleting. You should
also make sure there is no snapshot in progress. You can use the Amazon Elastic Compute Cloud (Amazon EC2) API to
query snapshots on the volume you are deleting and check the snapshot status. For more information, go to
DescribeSnapshots in the Amazon Elastic Compute Cloud API Reference.
In the request, you must provide the Amazon Resource Name (ARN) of the storage volume you want to delete.
delete-volume-request - A JSON object containing the DeleteVolumeInput$VolumeARN to delete. - `com.amazonaws.services.storagegateway.model.DeleteVolumeRequest`
returns: Result of the DeleteVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteVolumeRequest delete-volume-request]
(-> this (.deleteVolume delete-volume-request))))
(defn delete-chap-credentials
"Deletes Challenge-Handshake Authentication Protocol (CHAP) credentials for a specified iSCSI target and initiator
pair.
delete-chap-credentials-request - A JSON object containing one or more of the following fields: DeleteChapCredentialsInput$InitiatorName DeleteChapCredentialsInput$TargetARN - `com.amazonaws.services.storagegateway.model.DeleteChapCredentialsRequest`
returns: Result of the DeleteChapCredentials operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteChapCredentialsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteChapCredentialsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteChapCredentialsRequest delete-chap-credentials-request]
(-> this (.deleteChapCredentials delete-chap-credentials-request))))
(defn delete-tape
"Deletes the specified virtual tape. This operation is only supported in the tape gateway type.
delete-tape-request - DeleteTapeInput - `com.amazonaws.services.storagegateway.model.DeleteTapeRequest`
returns: Result of the DeleteTape operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteTapeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteTapeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteTapeRequest delete-tape-request]
(-> this (.deleteTape delete-tape-request))))
(defn create-tape-with-barcode
"Creates a virtual tape by using your own barcode. You write data to the virtual tape and then archive the tape. A
barcode is unique and can not be reused if it has already been used on a tape . This applies to barcodes used on
deleted tapes. This operation is only supported in the tape gateway type.
Cache storage must be allocated to the gateway before you can create a virtual tape. Use the AddCache
operation to add cache storage to a gateway.
create-tape-with-barcode-request - CreateTapeWithBarcodeInput - `com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeRequest`
returns: Result of the CreateTapeWithBarcode operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateTapeWithBarcodeRequest create-tape-with-barcode-request]
(-> this (.createTapeWithBarcode create-tape-with-barcode-request))))
(defn delete-snapshot-schedule
"Deletes a snapshot of a volume.
You can take snapshots of your gateway volumes on a scheduled or ad hoc basis. This API action enables you to
delete a snapshot schedule for a volume. For more information, see Working with
Snapshots. In the DeleteSnapshotSchedule request, you identify the volume by providing its
Amazon Resource Name (ARN). This operation is only supported in stored and cached volume gateway types.
To list or delete a snapshot, you must use the Amazon EC2 API. in Amazon Elastic Compute Cloud API
Reference.
delete-snapshot-schedule-request - `com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleRequest`
returns: Result of the DeleteSnapshotSchedule operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteSnapshotScheduleRequest delete-snapshot-schedule-request]
(-> this (.deleteSnapshotSchedule delete-snapshot-schedule-request))))
(defn attach-volume
"Connects a volume to an iSCSI connection and then attaches the volume to the specified gateway. Detaching and
attaching a volume enables you to recover your data from one gateway to a different gateway without creating a
snapshot. It also makes it easier to move your volumes from an on-premises gateway to a gateway hosted on an
Amazon EC2 instance.
attach-volume-request - AttachVolumeInput - `com.amazonaws.services.storagegateway.model.AttachVolumeRequest`
returns: Result of the AttachVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.AttachVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AttachVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AttachVolumeRequest attach-volume-request]
(-> this (.attachVolume attach-volume-request))))
(defn describe-working-storage
"Returns information about the working storage of a gateway. This operation is only supported in the stored
volumes gateway type. This operation is deprecated in cached volumes API version (20120630). Use
DescribeUploadBuffer instead.
Working storage is also referred to as upload buffer. You can also use the DescribeUploadBuffer operation to add
upload buffer to a stored volume gateway.
The response includes disk IDs that are configured as working storage, and it includes the amount of working
storage allocated and used.
describe-working-storage-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeWorkingStorageRequest`
returns: Result of the DescribeWorkingStorage operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeWorkingStorageResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeWorkingStorageResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeWorkingStorageRequest describe-working-storage-request]
(-> this (.describeWorkingStorage describe-working-storage-request))))
(defn describe-vtl-devices
"Returns a description of virtual tape library (VTL) devices for the specified tape gateway. In the response, AWS
Storage Gateway returns VTL device information.
This operation is only supported in the tape gateway type.
describe-vtl-devices-request - DescribeVTLDevicesInput - `com.amazonaws.services.storagegateway.model.DescribeVTLDevicesRequest`
returns: Result of the DescribeVTLDevices operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeVTLDevicesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeVTLDevicesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeVTLDevicesRequest describe-vtl-devices-request]
(-> this (.describeVTLDevices describe-vtl-devices-request))))
(defn cancel-archival
"Cancels archiving of a virtual tape to the virtual tape shelf (VTS) after the archiving process is initiated.
This operation is only supported in the tape gateway type.
cancel-archival-request - CancelArchivalInput - `com.amazonaws.services.storagegateway.model.CancelArchivalRequest`
returns: Result of the CancelArchival operation returned by the service. - `com.amazonaws.services.storagegateway.model.CancelArchivalResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CancelArchivalResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CancelArchivalRequest cancel-archival-request]
(-> this (.cancelArchival cancel-archival-request))))
(defn update-maintenance-start-time
"Updates a gateway's weekly maintenance start time information, including day and time of the week. The
maintenance time is the time in your gateway's time zone.
update-maintenance-start-time-request - A JSON object containing the following fields: UpdateMaintenanceStartTimeInput$DayOfMonth UpdateMaintenanceStartTimeInput$DayOfWeek UpdateMaintenanceStartTimeInput$HourOfDay UpdateMaintenanceStartTimeInput$MinuteOfHour - `com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeRequest`
returns: Result of the UpdateMaintenanceStartTime operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateMaintenanceStartTimeRequest update-maintenance-start-time-request]
(-> this (.updateMaintenanceStartTime update-maintenance-start-time-request))))
(defn delete-file-share
"Deletes a file share from a file gateway. This operation is only supported for file gateways.
delete-file-share-request - DeleteFileShareInput - `com.amazonaws.services.storagegateway.model.DeleteFileShareRequest`
returns: Result of the DeleteFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteFileShareRequest delete-file-share-request]
(-> this (.deleteFileShare delete-file-share-request))))
(defn update-snapshot-schedule
"Updates a snapshot schedule configured for a gateway volume. This operation is only supported in the cached
volume and stored volume gateway types.
The default snapshot schedule for volume is once every 24 hours, starting at the creation time of the volume. You
can use this API to change the snapshot schedule configured for the volume.
In the request you must identify the gateway volume whose snapshot schedule you want to update, and the schedule
information, including when you want the snapshot to begin on a day and the frequency (in hours) of snapshots.
update-snapshot-schedule-request - A JSON object containing one or more of the following fields: UpdateSnapshotScheduleInput$Description UpdateSnapshotScheduleInput$RecurrenceInHours UpdateSnapshotScheduleInput$StartAt UpdateSnapshotScheduleInput$VolumeARN - `com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleRequest`
returns: Result of the UpdateSnapshotSchedule operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateSnapshotScheduleRequest update-snapshot-schedule-request]
(-> this (.updateSnapshotSchedule update-snapshot-schedule-request))))
(defn activate-gateway
"Activates the gateway you previously deployed on your host. In the activation process, you specify information
such as the region you want to use for storing snapshots or tapes, the time zone for scheduled snapshots the
gateway snapshot schedule window, an activation key, and a name for your gateway. The activation process also
You must turn on the gateway VM before you can activate your gateway.
activate-gateway-request - A JSON object containing one or more of the following fields: ActivateGatewayInput$ActivationKey ActivateGatewayInput$GatewayName ActivateGatewayInput$GatewayRegion ActivateGatewayInput$GatewayTimezone ActivateGatewayInput$GatewayType ActivateGatewayInput$TapeDriveType ActivateGatewayInput$MediumChangerType - `com.amazonaws.services.storagegateway.model.ActivateGatewayRequest`
returns: Result of the ActivateGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.ActivateGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ActivateGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ActivateGatewayRequest activate-gateway-request]
(-> this (.activateGateway activate-gateway-request))))
(defn list-volume-recovery-points
"Lists the recovery points for a specified gateway. This operation is only supported in the cached volume gateway
type.
Each cache volume has one recovery point. A volume recovery point is a point in time at which all data of the
volume is consistent and from which you can create a snapshot or clone a new cached volume from a source volume.
To create a snapshot from a volume recovery point use the CreateSnapshotFromVolumeRecoveryPoint operation.
list-volume-recovery-points-request - `com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsRequest`
returns: Result of the ListVolumeRecoveryPoints operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListVolumeRecoveryPointsRequest list-volume-recovery-points-request]
(-> this (.listVolumeRecoveryPoints list-volume-recovery-points-request))))
(defn list-tapes
"Lists virtual tapes in your virtual tape library (VTL) and your virtual tape shelf (VTS). You specify the tapes
to list by specifying one or more tape Amazon Resource Names (ARNs). If you don't specify a tape ARN, the
operation lists all virtual tapes in both your VTL and VTS.
This operation supports pagination. By default, the operation returns a maximum of up to 100 tapes. You can
optionally specify the Limit parameter in the body to limit the number of tapes in the response. If
the number of tapes returned in the response is truncated, the response includes a Marker element
that you can use in your subsequent request to retrieve the next set of tapes. This operation is only supported
in the tape gateway type.
list-tapes-request - A JSON object that contains one or more of the following fields: ListTapesInput$Limit ListTapesInput$Marker ListTapesInput$TapeARNs - `com.amazonaws.services.storagegateway.model.ListTapesRequest`
returns: Result of the ListTapes operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListTapesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListTapesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListTapesRequest list-tapes-request]
(-> this (.listTapes list-tapes-request))))
(defn describe-upload-buffer
"Returns information about the upload buffer of a gateway. This operation is supported for the stored volume,
cached volume and tape gateway types.
The response includes disk IDs that are configured as upload buffer space, and it includes the amount of upload
buffer space allocated and used.
describe-upload-buffer-request - `com.amazonaws.services.storagegateway.model.DescribeUploadBufferRequest`
returns: Result of the DescribeUploadBuffer operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeUploadBufferResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeUploadBufferResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeUploadBufferRequest describe-upload-buffer-request]
(-> this (.describeUploadBuffer describe-upload-buffer-request))))
(defn shutdown
"Shuts down this client object, releasing any resources that might be held open. This is an optional method, and
callers are not expected to call it, but can if they want to explicitly release any open resources. Once a client
has been shutdown, it should not be used to make any more requests."
([^AWSStorageGateway this]
(-> this (.shutdown))))
(defn start-gateway
"Starts a gateway that you previously shut down (see ShutdownGateway). After the gateway starts, you can
then make other API calls, your applications can read from or write to the gateway's storage volumes and you will
be able to take snapshot backups.
When you make a request, you will get a 200 OK success response immediately. However, it might take some time for
the gateway to be ready. You should call DescribeGatewayInformation and check the status before making any
additional API calls. For more information, see ActivateGateway.
To specify which gateway to start, use the Amazon Resource Name (ARN) of the gateway in your request.
start-gateway-request - A JSON object containing the of the gateway to start. - `com.amazonaws.services.storagegateway.model.StartGatewayRequest`
returns: Result of the StartGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.StartGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.StartGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.StartGatewayRequest start-gateway-request]
(-> this (.startGateway start-gateway-request))))
(defn set-local-console-password
"Sets the password for your VM local console. When you log in to the local console for the first time, you log in
to the VM with the default credentials. We recommend that you set a new password. You don't need to know the
default password to set a new password.
set-local-console-password-request - SetLocalConsolePasswordInput - `com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordRequest`
returns: Result of the SetLocalConsolePassword operation returned by the service. - `com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.SetLocalConsolePasswordRequest set-local-console-password-request]
(-> this (.setLocalConsolePassword set-local-console-password-request))))
(defn disable-gateway
"Disables a tape gateway when the gateway is no longer functioning. For example, if your gateway VM is damaged,
you can disable the gateway so you can recover virtual tapes.
Use this operation for a tape gateway that is not reachable or not functioning. This operation is only supported
in the tape gateway type.
Once a gateway is disabled it cannot be enabled.
disable-gateway-request - DisableGatewayInput - `com.amazonaws.services.storagegateway.model.DisableGatewayRequest`
returns: Result of the DisableGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.DisableGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DisableGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DisableGatewayRequest disable-gateway-request]
(-> this (.disableGateway disable-gateway-request))))
(defn retrieve-tape-archive
"Retrieves an archived virtual tape from the virtual tape shelf (VTS) to a tape gateway. Virtual tapes archived in
the VTS are not associated with any gateway. However after a tape is retrieved, it is associated with a gateway,
even though it is also listed in the VTS, that is, archive. This operation is only supported in the tape gateway
type.
Once a tape is successfully retrieved to a gateway, it cannot be retrieved again to another gateway. You must
archive the tape again before you can retrieve it to another gateway. This operation is only supported in the
tape gateway type.
retrieve-tape-archive-request - RetrieveTapeArchiveInput - `com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveRequest`
returns: Result of the RetrieveTapeArchive operation returned by the service. - `com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RetrieveTapeArchiveRequest retrieve-tape-archive-request]
(-> this (.retrieveTapeArchive retrieve-tape-archive-request))))
(defn detach-volume
"Disconnects a volume from an iSCSI connection and then detaches the volume from the specified gateway. Detaching
and attaching a volume enables you to recover your data from one gateway to a different gateway without creating
a snapshot. It also makes it easier to move your volumes from an on-premises gateway to a gateway hosted on an
Amazon EC2 instance.
detach-volume-request - AttachVolumeInput - `com.amazonaws.services.storagegateway.model.DetachVolumeRequest`
returns: Result of the DetachVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.DetachVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DetachVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DetachVolumeRequest detach-volume-request]
(-> this (.detachVolume detach-volume-request))))
(defn list-gateways
"Lists gateways owned by an AWS account in a region specified in the request. The returned list is ordered by
gateway Amazon Resource Name (ARN).
By default, the operation returns a maximum of 100 gateways. This operation supports pagination that allows you
to optionally reduce the number of gateways returned in a response.
If you have more gateways than are returned in a response (that is, the response returns only a truncated list of
your gateways), the response contains a marker that you can specify in your next request to fetch the next page
of gateways.
list-gateways-request - A JSON object containing zero or more of the following fields: ListGatewaysInput$Limit ListGatewaysInput$Marker - `com.amazonaws.services.storagegateway.model.ListGatewaysRequest`
returns: Result of the ListGateways operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListGatewaysResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListGatewaysResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListGatewaysRequest list-gateways-request]
(-> this (.listGateways list-gateways-request)))
(^com.amazonaws.services.storagegateway.model.ListGatewaysResult [^AWSStorageGateway this]
(-> this (.listGateways))))
(defn create-tapes
"Creates one or more virtual tapes. You write data to the virtual tapes and then archive the tapes. This operation
is only supported in the tape gateway type.
Cache storage must be allocated to the gateway before you can create virtual tapes. Use the AddCache
operation to add cache storage to a gateway.
create-tapes-request - CreateTapesInput - `com.amazonaws.services.storagegateway.model.CreateTapesRequest`
returns: Result of the CreateTapes operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateTapesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateTapesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateTapesRequest create-tapes-request]
(-> this (.createTapes create-tapes-request))))
(defn set-endpoint
"Deprecated. use AwsClientBuilder#setEndpointConfiguration(AwsClientBuilder.EndpointConfiguration) for
example:
endpoint - The endpoint (ex: \"storagegateway.us-east-1.amazonaws.com\") or a full URL, including the protocol (ex: \"-east-1.amazonaws.com\") of the region specific AWS endpoint this client will communicate with. - `java.lang.String`"
([^AWSStorageGateway this ^java.lang.String endpoint]
(-> this (.setEndpoint endpoint))))
(defn update-chap-credentials
"Updates the Challenge-Handshake Authentication Protocol (CHAP) credentials for a specified iSCSI target. By
When you update CHAP credentials, all existing connections on the target are closed and initiators must reconnect
with the new credentials.
update-chap-credentials-request - A JSON object containing one or more of the following fields: UpdateChapCredentialsInput$InitiatorName UpdateChapCredentialsInput$SecretToAuthenticateInitiator UpdateChapCredentialsInput$SecretToAuthenticateTarget UpdateChapCredentialsInput$TargetARN - `com.amazonaws.services.storagegateway.model.UpdateChapCredentialsRequest`
returns: Result of the UpdateChapCredentials operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateChapCredentialsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateChapCredentialsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateChapCredentialsRequest update-chap-credentials-request]
(-> this (.updateChapCredentials update-chap-credentials-request))))
(defn add-working-storage
"Configures one or more gateway local disks as working storage for a gateway. This operation is only supported in
the stored volume gateway type. This operation is deprecated in cached volume API version 20120630. Use
AddUploadBuffer instead.
Working storage is also referred to as upload buffer. You can also use the AddUploadBuffer operation to
add upload buffer to a stored volume gateway.
In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add working storage, and
one or more disk IDs that you want to configure as working storage.
add-working-storage-request - A JSON object containing one or more of the following fields: AddWorkingStorageInput$DiskIds - `com.amazonaws.services.storagegateway.model.AddWorkingStorageRequest`
returns: Result of the AddWorkingStorage operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddWorkingStorageResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddWorkingStorageResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddWorkingStorageRequest add-working-storage-request]
(-> this (.addWorkingStorage add-working-storage-request))))
(defn refresh-cache
"Refreshes the cache for the specified file share. This operation finds objects in the Amazon S3 bucket that were
added, removed or replaced since the gateway last listed the bucket's contents and cached the results. This
operation is only supported in the file gateway type. You can subscribe to be notified through an Amazon
CloudWatch event when your RefreshCache operation completes. For more information, see Getting Notified About File Operations.
When this API is called, it only initiates the refresh operation. When the API call completes and returns a
success code, it doesn't necessarily mean that the file refresh has completed. You should use the
refresh-complete notification to determine that the operation has completed before you check for new files on the
gateway file share. You can subscribe to be notified through an CloudWatch event when your
RefreshCache operation completes.
refresh-cache-request - RefreshCacheInput - `com.amazonaws.services.storagegateway.model.RefreshCacheRequest`
returns: Result of the RefreshCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.RefreshCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RefreshCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RefreshCacheRequest refresh-cache-request]
(-> this (.refreshCache refresh-cache-request))))
(defn create-nfs-file-share
"Creates a Network File System (NFS) file share on an existing file gateway. In Storage Gateway, a file share is a
file system mount point backed by Amazon S3 cloud storage. Storage Gateway exposes file shares using a NFS
interface. This operation is only supported for file gateways.
File gateway requires AWS Security Token Service (AWS STS) to be activated to enable you create a file share.
Make sure AWS STS is activated in the region you are creating your file gateway in. If AWS STS is not activated
in the region, activate it. For information about how to activate AWS STS, see Activating and Deactivating AWS
STS in an AWS Region in the AWS Identity and Access Management User Guide.
File gateway does not support creating hard or symbolic links on a file share.
create-nfs-file-share-request - CreateNFSFileShareInput - `com.amazonaws.services.storagegateway.model.CreateNFSFileShareRequest`
returns: Result of the CreateNFSFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateNFSFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateNFSFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateNFSFileShareRequest create-nfs-file-share-request]
(-> this (.createNFSFileShare create-nfs-file-share-request))))
(defn delete-tape-archive
"Deletes the specified virtual tape from the virtual tape shelf (VTS). This operation is only supported in the
tape gateway type.
delete-tape-archive-request - DeleteTapeArchiveInput - `com.amazonaws.services.storagegateway.model.DeleteTapeArchiveRequest`
returns: Result of the DeleteTapeArchive operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteTapeArchiveResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteTapeArchiveResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteTapeArchiveRequest delete-tape-archive-request]
(-> this (.deleteTapeArchive delete-tape-archive-request))))
(defn update-nfs-file-share
"Updates a Network File System (NFS) file share. This operation is only supported in the file gateway type.
To leave a file share field unchanged, set the corresponding input field to null.
Updates the following file share setting:
Default storage class for your S3 bucket
Metadata defaults for your S3 bucket
Allowed NFS clients for your file share
Squash settings
Write status of your file share
To leave a file share field unchanged, set the corresponding input field to null. This operation is only
supported in file gateways.
update-nfs-file-share-request - UpdateNFSFileShareInput - `com.amazonaws.services.storagegateway.model.UpdateNFSFileShareRequest`
returns: Result of the UpdateNFSFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateNFSFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateNFSFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateNFSFileShareRequest update-nfs-file-share-request]
(-> this (.updateNFSFileShare update-nfs-file-share-request))))
(defn update-gateway-software-now
"Updates the gateway virtual machine (VM) software. The request immediately triggers the software update.
When you make this request, you get a 200 OK success response immediately. However, it might take
some time for the update to complete. You can call DescribeGatewayInformation to verify the gateway is in
the STATE_RUNNING state.
A software update forces a system restart of your gateway. You can minimize the chance of any disruption to your
applications by increasing your iSCSI Initiators' timeouts. For more information about increasing iSCSI Initiator
timeouts for Windows and Linux, see Customizing Your Windows iSCSI Settings and Customizing Your Linux iSCSI Settings, respectively.
update-gateway-software-now-request - A JSON object containing the of the gateway to update. - `com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowRequest`
returns: Result of the UpdateGatewaySoftwareNow operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateGatewaySoftwareNowRequest update-gateway-software-now-request]
(-> this (.updateGatewaySoftwareNow update-gateway-software-now-request))))
(defn list-volume-initiators
"Lists iSCSI initiators that are connected to a volume. You can use this operation to determine whether a volume
is being used or not. This operation is only supported in the cached volume and stored volume gateway types.
list-volume-initiators-request - ListVolumeInitiatorsInput - `com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsRequest`
returns: Result of the ListVolumeInitiators operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListVolumeInitiatorsRequest list-volume-initiators-request]
(-> this (.listVolumeInitiators list-volume-initiators-request))))
(defn remove-tags-from-resource
"Removes one or more tags from the specified resource. This operation is only supported in the cached volume,
stored volume and tape gateway types.
remove-tags-from-resource-request - RemoveTagsFromResourceInput - `com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceRequest`
returns: Result of the RemoveTagsFromResource operation returned by the service. - `com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceRequest remove-tags-from-resource-request]
(-> this (.removeTagsFromResource remove-tags-from-resource-request)))
(^com.amazonaws.services.storagegateway.model.RemoveTagsFromResourceResult [^AWSStorageGateway this]
(-> this (.removeTagsFromResource))))
(defn reset-cache
"Resets all cache disks that have encountered a error and makes the disks available for reconfiguration as cache
storage. If your cache disk encounters a error, the gateway prevents read and write operations on virtual tapes
in the gateway. For example, an error can occur when a disk is corrupted or removed from the gateway. When a
cache is reset, the gateway loses its cache storage. At this point you can reconfigure the disks as cache disks.
This operation is only supported in the cached volume and tape types.
If the cache disk you are resetting contains data that has not been uploaded to Amazon S3 yet, that data can be
lost. After you reset cache disks, there will be no configured cache disks left in the gateway, so you must
configure at least one new cache disk for your gateway to function properly.
reset-cache-request - `com.amazonaws.services.storagegateway.model.ResetCacheRequest`
returns: Result of the ResetCache operation returned by the service. - `com.amazonaws.services.storagegateway.model.ResetCacheResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ResetCacheResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ResetCacheRequest reset-cache-request]
(-> this (.resetCache reset-cache-request))))
(defn describe-storedi-scsi-volumes
"Returns the description of the gateway volumes specified in the request. The list of gateway volumes in the
request must be from one gateway. In the response Amazon Storage Gateway returns volume information sorted by
volume ARNs. This operation is only supported in stored volume gateway type.
describe-storedi-scsi-volumes-request - A JSON object containing a list of DescribeStorediSCSIVolumesInput$VolumeARNs. - `com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesRequest`
returns: Result of the DescribeStorediSCSIVolumes operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeStorediSCSIVolumesRequest describe-storedi-scsi-volumes-request]
(-> this (.describeStorediSCSIVolumes describe-storedi-scsi-volumes-request))))
(defn describe-smb-settings
"Gets a description of a Server Message Block (SMB) file share settings from a file gateway. This operation is
only supported for file gateways.
describe-smb-settings-request - `com.amazonaws.services.storagegateway.model.DescribeSMBSettingsRequest`
returns: Result of the DescribeSMBSettings operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeSMBSettingsResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeSMBSettingsResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeSMBSettingsRequest describe-smb-settings-request]
(-> this (.describeSMBSettings describe-smb-settings-request))))
(defn describe-tape-archives
"Returns a description of specified virtual tapes in the virtual tape shelf (VTS). This operation is only
supported in the tape gateway type.
If a specific TapeARN is not specified, AWS Storage Gateway returns a description of all virtual
tapes found in the VTS associated with your account.
describe-tape-archives-request - DescribeTapeArchivesInput - `com.amazonaws.services.storagegateway.model.DescribeTapeArchivesRequest`
returns: Result of the DescribeTapeArchives operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeTapeArchivesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeTapeArchivesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeTapeArchivesRequest describe-tape-archives-request]
(-> this (.describeTapeArchives describe-tape-archives-request)))
(^com.amazonaws.services.storagegateway.model.DescribeTapeArchivesResult [^AWSStorageGateway this]
(-> this (.describeTapeArchives))))
(defn update-vtl-device-type
"Updates the type of medium changer in a tape gateway. When you activate a tape gateway, you select a medium
changer type for the tape gateway. This operation enables you to select a different type of medium changer after
a tape gateway is activated. This operation is only supported in the tape gateway type.
update-vtl-device-type-request - `com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeRequest`
returns: Result of the UpdateVTLDeviceType operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateVTLDeviceTypeRequest update-vtl-device-type-request]
(-> this (.updateVTLDeviceType update-vtl-device-type-request))))
(defn describe-bandwidth-rate-limit
"Returns the bandwidth rate limits of a gateway. By default, these limits are not set, which means no bandwidth
rate limiting is in effect.
This operation only returns a value for a bandwidth rate limit only if the limit is set. If no limits are set for
the gateway, then this operation returns only the gateway ARN in the response body. To specify which gateway to
describe, use the Amazon Resource Name (ARN) of the gateway in your request.
describe-bandwidth-rate-limit-request - A JSON object containing the of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitRequest`
returns: Result of the DescribeBandwidthRateLimit operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeBandwidthRateLimitRequest describe-bandwidth-rate-limit-request]
(-> this (.describeBandwidthRateLimit describe-bandwidth-rate-limit-request))))
(defn describe-tapes
"Returns a description of the specified Amazon Resource Name (ARN) of virtual tapes. If a TapeARN is
not specified, returns a description of all virtual tapes associated with the specified gateway. This operation
is only supported in the tape gateway type.
describe-tapes-request - DescribeTapesInput - `com.amazonaws.services.storagegateway.model.DescribeTapesRequest`
returns: Result of the DescribeTapes operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeTapesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeTapesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeTapesRequest describe-tapes-request]
(-> this (.describeTapes describe-tapes-request))))
(defn create-smb-file-share
"Creates a Server Message Block (SMB) file share on an existing file gateway. In Storage Gateway, a file share is
a file system mount point backed by Amazon S3 cloud storage. Storage Gateway expose file shares using a SMB
interface. This operation is only supported for file gateways.
File gateways require AWS Security Token Service (AWS STS) to be activated to enable you to create a file share.
Make sure that AWS STS is activated in the AWS Region you are creating your file gateway in. If AWS STS is not
activated in this AWS Region, activate it. For information about how to activate AWS STS, see Activating and
Deactivating AWS STS in an AWS Region in the AWS Identity and Access Management User Guide.
File gateways don't support creating hard or symbolic links on a file share.
create-smb-file-share-request - CreateSMBFileShareInput - `com.amazonaws.services.storagegateway.model.CreateSMBFileShareRequest`
returns: Result of the CreateSMBFileShare operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateSMBFileShareResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateSMBFileShareResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateSMBFileShareRequest create-smb-file-share-request]
(-> this (.createSMBFileShare create-smb-file-share-request))))
(defn set-smb-guest-password
"Sets the password for the guest user smbguest. The smbguest user is the user when the
authentication method for the file share is set to GuestAccess.
set-smb-guest-password-request - SetSMBGuestPasswordInput - `com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordRequest`
returns: Result of the SetSMBGuestPassword operation returned by the service. - `com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.SetSMBGuestPasswordRequest set-smb-guest-password-request]
(-> this (.setSMBGuestPassword set-smb-guest-password-request))))
(defn delete-bandwidth-rate-limit
"Deletes the bandwidth rate limits of a gateway. You can delete either the upload and download bandwidth rate
limit, or you can delete both. If you delete only one of the limits, the other limit remains unchanged. To
specify which gateway to work with, use the Amazon Resource Name (ARN) of the gateway in your request.
delete-bandwidth-rate-limit-request - A JSON object containing the following fields: DeleteBandwidthRateLimitInput$BandwidthType - `com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitRequest`
returns: Result of the DeleteBandwidthRateLimit operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteBandwidthRateLimitRequest delete-bandwidth-rate-limit-request]
(-> this (.deleteBandwidthRateLimit delete-bandwidth-rate-limit-request))))
(defn add-tags-to-resource
"Adds one or more tags to the specified resource. You use tags to add metadata to resources, which you can use to
categorize these resources. For example, you can categorize resources by purpose, owner, environment, or team.
Each tag consists of a key and a value, which you define. You can add tags to the following AWS Storage Gateway
resources:
Storage gateways of all types
Storage volumes
Virtual tapes
NFS and SMB file shares
You can create a maximum of 50 tags for each resource. Virtual tapes and storage volumes that are recovered to a
new gateway maintain their tags.
add-tags-to-resource-request - AddTagsToResourceInput - `com.amazonaws.services.storagegateway.model.AddTagsToResourceRequest`
returns: Result of the AddTagsToResource operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddTagsToResourceResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddTagsToResourceResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddTagsToResourceRequest add-tags-to-resource-request]
(-> this (.addTagsToResource add-tags-to-resource-request))))
(defn describe-cachedi-scsi-volumes
"Returns a description of the gateway volumes specified in the request. This operation is only supported in the
cached volume gateway types.
The list of gateway volumes in the request must be from one gateway. In the response Amazon Storage Gateway
returns volume information sorted by volume Amazon Resource Name (ARN).
describe-cachedi-scsi-volumes-request - `com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesRequest`
returns: Result of the DescribeCachediSCSIVolumes operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeCachediSCSIVolumesRequest describe-cachedi-scsi-volumes-request]
(-> this (.describeCachediSCSIVolumes describe-cachedi-scsi-volumes-request))))
(defn get-cached-response-metadata
"Returns additional metadata for a previously executed successful request, typically used for debugging issues
where a service isn't acting as expected. This data isn't considered part of the result data returned by an
operation, so it's available through this separate, diagnostic interface.
Response metadata is only cached for a limited period of time, so if you need to access this extra diagnostic
information for an executed request, you should use this method to retrieve it as soon as possible after
executing a request.
request - The originally executed request. - `com.amazonaws.AmazonWebServiceRequest`
returns: The response metadata for the specified request, or null if none is available. - `com.amazonaws.ResponseMetadata`"
(^com.amazonaws.ResponseMetadata [^AWSStorageGateway this ^com.amazonaws.AmazonWebServiceRequest request]
(-> this (.getCachedResponseMetadata request))))
(defn notify-when-uploaded
"Sends you notification through CloudWatch Events when all files written to your NFS file share have been uploaded
to Amazon S3.
AWS Storage Gateway can send a notification through Amazon CloudWatch Events when all files written to your file
share up to that point in time have been uploaded to Amazon S3. These files include files written to the NFS file
share up to the time that you make a request for notification. When the upload is done, Storage Gateway sends you
notification through an Amazon CloudWatch Event. You can configure CloudWatch Events to send the notification
through event targets such as Amazon SNS or AWS Lambda function. This operation is only supported for file
gateways.
For more information, see Getting File Upload Notification in the Storage Gateway User Guide
(
.com/storagegateway/latest/userguide/monitoring-file-gateway.html#get-upload-notification).
notify-when-uploaded-request - `com.amazonaws.services.storagegateway.model.NotifyWhenUploadedRequest`
returns: Result of the NotifyWhenUploaded operation returned by the service. - `com.amazonaws.services.storagegateway.model.NotifyWhenUploadedResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.NotifyWhenUploadedResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.NotifyWhenUploadedRequest notify-when-uploaded-request]
(-> this (.notifyWhenUploaded notify-when-uploaded-request))))
(defn list-volumes
"Lists the iSCSI stored volumes of a gateway. Results are sorted by volume ARN. The response includes only the
volume ARNs. If you want additional volume information, use the DescribeStorediSCSIVolumes or the
DescribeCachediSCSIVolumes API.
The operation supports pagination. By default, the operation returns a maximum of up to 100 volumes. You can
optionally specify the Limit field in the body to limit the number of volumes in the response. If
the number of volumes returned in the response is truncated, the response includes a Marker field. You can use
this Marker value in your subsequent request to retrieve the next set of volumes. This operation is only
supported in the cached volume and stored volume gateway types.
list-volumes-request - A JSON object that contains one or more of the following fields: ListVolumesInput$Limit ListVolumesInput$Marker - `com.amazonaws.services.storagegateway.model.ListVolumesRequest`
returns: Result of the ListVolumes operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListVolumesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListVolumesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListVolumesRequest list-volumes-request]
(-> this (.listVolumes list-volumes-request))))
(defn retrieve-tape-recovery-point
"Retrieves the recovery point for the specified virtual tape. This operation is only supported in the tape gateway
type.
A recovery point is a point in time view of a virtual tape at which all the data on the tape is consistent. If
your gateway crashes, virtual tapes that have recovery points can be recovered to a new gateway.
The virtual tape can be retrieved to only one gateway. The retrieved tape is read-only. The virtual tape can be
retrieved to only a tape gateway. There is no charge for retrieving recovery points.
retrieve-tape-recovery-point-request - RetrieveTapeRecoveryPointInput - `com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointRequest`
returns: Result of the RetrieveTapeRecoveryPoint operation returned by the service. - `com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.RetrieveTapeRecoveryPointRequest retrieve-tape-recovery-point-request]
(-> this (.retrieveTapeRecoveryPoint retrieve-tape-recovery-point-request))))
(defn delete-gateway
"Deletes a gateway. To specify which gateway to delete, use the Amazon Resource Name (ARN) of the gateway in your
your host computer.
After you delete a gateway, you cannot reactivate it. Completed snapshots of the gateway volumes are not deleted
upon deleting the gateway, however, pending snapshots will not complete. After you delete a gateway, your next
step is to remove it from your environment.
however , your existing Amazon EBS snapshots
persist and you will continue to be billed for these snapshots. You can choose to remove all remaining Amazon EBS
snapshots by canceling your Amazon EC2 subscription. If you prefer not to cancel your Amazon EC2 subscription,
you can delete your snapshots using the Amazon EC2 console. For more information, see the AWS Storage Gateway Detail Page.
delete-gateway-request - A JSON object containing the ID of the gateway to delete. - `com.amazonaws.services.storagegateway.model.DeleteGatewayRequest`
returns: Result of the DeleteGateway operation returned by the service. - `com.amazonaws.services.storagegateway.model.DeleteGatewayResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DeleteGatewayResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DeleteGatewayRequest delete-gateway-request]
(-> this (.deleteGateway delete-gateway-request))))
(defn add-upload-buffer
"Configures one or more gateway local disks as upload buffer for a specified gateway. This operation is supported
for the stored volume, cached volume and tape gateway types.
In the request, you specify the gateway Amazon Resource Name (ARN) to which you want to add upload buffer, and
one or more disk IDs that you want to configure as upload buffer.
add-upload-buffer-request - `com.amazonaws.services.storagegateway.model.AddUploadBufferRequest`
returns: Result of the AddUploadBuffer operation returned by the service. - `com.amazonaws.services.storagegateway.model.AddUploadBufferResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.AddUploadBufferResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.AddUploadBufferRequest add-upload-buffer-request]
(-> this (.addUploadBuffer add-upload-buffer-request))))
(defn describe-gateway-information
"Returns metadata about a gateway such as its name, network interfaces, configured time zone, and the state
(whether the gateway is running or not). To specify which gateway to describe, use the Amazon Resource Name (ARN)
of the gateway in your request.
describe-gateway-information-request - A JSON object containing the ID of the gateway. - `com.amazonaws.services.storagegateway.model.DescribeGatewayInformationRequest`
returns: Result of the DescribeGatewayInformation operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeGatewayInformationResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeGatewayInformationResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeGatewayInformationRequest describe-gateway-information-request]
(-> this (.describeGatewayInformation describe-gateway-information-request))))
(defn create-storedi-scsi-volume
"Creates a volume on a specified gateway. This operation is only supported in the stored volume gateway type.
The size of the volume to create is inferred from the disk size. You can choose to preserve existing data on the
disk, create volume from an existing snapshot, or create an empty volume. If you choose to create an empty
gateway volume, then any existing data on the disk is erased.
In the request you must specify the gateway and the disk information on which you are creating the volume. In
response, the gateway creates the volume and returns volume information such as the volume Amazon Resource Name
(ARN), its size, and the iSCSI target ARN that initiators can use to connect to the volume target.
create-storedi-scsi-volume-request - A JSON object containing one or more of the following fields: CreateStorediSCSIVolumeInput$DiskId CreateStorediSCSIVolumeInput$NetworkInterfaceId CreateStorediSCSIVolumeInput$PreserveExistingData CreateStorediSCSIVolumeInput$SnapshotId CreateStorediSCSIVolumeInput$TargetName - `com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeRequest`
returns: Result of the CreateStorediSCSIVolume operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateStorediSCSIVolumeRequest create-storedi-scsi-volume-request]
(-> this (.createStorediSCSIVolume create-storedi-scsi-volume-request))))
(defn create-snapshot-from-volume-recovery-point
"Initiates a snapshot of a gateway from a volume recovery point. This operation is only supported in the cached
volume gateway type.
A volume recovery point is a point in time at which all data of the volume is consistent and from which you can
create a snapshot. To get a list of volume recovery point for cached volume gateway, use
ListVolumeRecoveryPoints.
In the CreateSnapshotFromVolumeRecoveryPoint request, you identify the volume by providing its
Amazon Resource Name (ARN). You must also provide a description for the snapshot. When the gateway takes a
snapshot of the specified volume, the snapshot and its description appear in the AWS Storage Gateway console. In
response, the gateway returns you a snapshot ID. You can use this snapshot ID to check the snapshot progress or
later use it when you want to create a volume from a snapshot.
To list or delete a snapshot, you must use the Amazon EC2 API. For more information, in Amazon Elastic Compute
Cloud API Reference.
create-snapshot-from-volume-recovery-point-request - `com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointRequest`
returns: Result of the CreateSnapshotFromVolumeRecoveryPoint operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateSnapshotFromVolumeRecoveryPointRequest create-snapshot-from-volume-recovery-point-request]
(-> this (.createSnapshotFromVolumeRecoveryPoint create-snapshot-from-volume-recovery-point-request))))
(defn list-file-shares
"Gets a list of the file shares for a specific file gateway, or the list of file shares that belong to the calling
user account. This operation is only supported for file gateways.
list-file-shares-request - ListFileShareInput - `com.amazonaws.services.storagegateway.model.ListFileSharesRequest`
returns: Result of the ListFileShares operation returned by the service. - `com.amazonaws.services.storagegateway.model.ListFileSharesResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.ListFileSharesResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.ListFileSharesRequest list-file-shares-request]
(-> this (.listFileShares list-file-shares-request))))
(defn cancel-retrieval
"Cancels retrieval of a virtual tape from the virtual tape shelf (VTS) to a gateway after the retrieval process is
initiated. The virtual tape is returned to the VTS. This operation is only supported in the tape gateway type.
cancel-retrieval-request - CancelRetrievalInput - `com.amazonaws.services.storagegateway.model.CancelRetrievalRequest`
returns: Result of the CancelRetrieval operation returned by the service. - `com.amazonaws.services.storagegateway.model.CancelRetrievalResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CancelRetrievalResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CancelRetrievalRequest cancel-retrieval-request]
(-> this (.cancelRetrieval cancel-retrieval-request))))
(defn update-gateway-information
"Updates a gateway's metadata, which includes the gateway's name and time zone. To specify which gateway to
update, use the Amazon Resource Name (ARN) of the gateway in your request.
For Gateways activated after September 2, 2015, the gateway's ARN contains the gateway ID rather than the gateway
name. However, changing the name of the gateway has no effect on the gateway's ARN.
update-gateway-information-request - `com.amazonaws.services.storagegateway.model.UpdateGatewayInformationRequest`
returns: Result of the UpdateGatewayInformation operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateGatewayInformationResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateGatewayInformationResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateGatewayInformationRequest update-gateway-information-request]
(-> this (.updateGatewayInformation update-gateway-information-request))))
(defn create-snapshot
"Initiates a snapshot of a volume.
AWS Storage Gateway provides the ability to back up point-in-time snapshots of your data to Amazon Simple Storage
(S3) for durable off-site recovery, as well as import the data to an Amazon Elastic Block Store (EBS) volume in
Amazon Elastic Compute Cloud (EC2). You can take snapshots of your gateway volume on a scheduled or ad hoc basis.
This API enables you to take ad-hoc snapshot. For more information, see Editing a Snapshot Schedule.
In the CreateSnapshot request you identify the volume by providing its Amazon Resource Name (ARN). You must also
provide description for the snapshot. When AWS Storage Gateway takes the snapshot of specified volume, the
snapshot and description appears in the AWS Storage Gateway Console. In response, AWS Storage Gateway returns you
a snapshot ID. You can use this snapshot ID to check the snapshot progress or later use it when you want to
create a volume from a snapshot. This operation is only supported in stored and cached volume gateway type.
To list or delete a snapshot, you must use the Amazon EC2 API. For more information, see DescribeSnapshots or
DeleteSnapshot in the EC2
API reference.
Volume and snapshot IDs are changing to a longer length ID format. For more information, see the important note
on the Welcome page.
create-snapshot-request - A JSON object containing one or more of the following fields: CreateSnapshotInput$SnapshotDescription CreateSnapshotInput$VolumeARN - `com.amazonaws.services.storagegateway.model.CreateSnapshotRequest`
returns: Result of the CreateSnapshot operation returned by the service. - `com.amazonaws.services.storagegateway.model.CreateSnapshotResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.CreateSnapshotResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.CreateSnapshotRequest create-snapshot-request]
(-> this (.createSnapshot create-snapshot-request))))
(defn update-bandwidth-rate-limit
"Updates the bandwidth rate limits of a gateway. You can update both the upload and download bandwidth rate limit
or specify only one of the two. If you don't set a bandwidth rate limit, the existing rate limit remains.
By default, a gateway's bandwidth rate limits are not set. If you don't set any limit, the gateway does not have
any limitations on its bandwidth usage and could potentially use the maximum available bandwidth.
To specify which gateway to update, use the Amazon Resource Name (ARN) of the gateway in your request.
update-bandwidth-rate-limit-request - A JSON object containing one or more of the following fields: UpdateBandwidthRateLimitInput$AverageDownloadRateLimitInBitsPerSec UpdateBandwidthRateLimitInput$AverageUploadRateLimitInBitsPerSec - `com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitRequest`
returns: Result of the UpdateBandwidthRateLimit operation returned by the service. - `com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.UpdateBandwidthRateLimitRequest update-bandwidth-rate-limit-request]
(-> this (.updateBandwidthRateLimit update-bandwidth-rate-limit-request))))
(defn describe-snapshot-schedule
"Describes the snapshot schedule for the specified gateway volume. The snapshot schedule information includes
intervals at which snapshots are automatically initiated on the volume. This operation is only supported in the
cached volume and stored volume types.
describe-snapshot-schedule-request - A JSON object containing the DescribeSnapshotScheduleInput$VolumeARN of the volume. - `com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleRequest`
returns: Result of the DescribeSnapshotSchedule operation returned by the service. - `com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleResult`
throws: com.amazonaws.services.storagegateway.model.InvalidGatewayRequestException - An exception occurred because an invalid gateway request was issued to the service. For more information, see the error and message fields."
(^com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleResult [^AWSStorageGateway this ^com.amazonaws.services.storagegateway.model.DescribeSnapshotScheduleRequest describe-snapshot-schedule-request]
(-> this (.describeSnapshotSchedule describe-snapshot-schedule-request))))
|
954102984bf91e0b20ce8c0e63e0a4628b25648b79f8e28d333c7f9f1d81d213 | jaked/ocamljs | pa_jquery.ml | open Camlp4
module Id : Sig.Id =
struct
let name = "pa_jquery"
let version = "0.1"
end
module Make (Syntax : Sig.Camlp4Syntax) =
struct
open Sig
include Syntax
DELETE_RULE Gram expr: SELF; "#"; label END;
EXTEND Gram
expr: BEFORE "apply"
[ "#" LEFTA
[ e = SELF; "#"; lab = label -> <:expr< $e$ # $lab$ >> ]
];
END
end
let module M = Register.OCamlSyntaxExtension(Id)(Make) in ()
| null | https://raw.githubusercontent.com/jaked/ocamljs/378080ff1c8033bb15ed2bd29bf1443e301d7af8/src/jquery/pa_jquery.ml | ocaml | open Camlp4
module Id : Sig.Id =
struct
let name = "pa_jquery"
let version = "0.1"
end
module Make (Syntax : Sig.Camlp4Syntax) =
struct
open Sig
include Syntax
DELETE_RULE Gram expr: SELF; "#"; label END;
EXTEND Gram
expr: BEFORE "apply"
[ "#" LEFTA
[ e = SELF; "#"; lab = label -> <:expr< $e$ # $lab$ >> ]
];
END
end
let module M = Register.OCamlSyntaxExtension(Id)(Make) in ()
| |
3ffe27da915bef377314694c286d73d19329d68fc2362059970f0655cda14e89 | jserot/lascar | main.ml | (* This is a reformulation of ../gensig1 in which the output [s] is assigned to states *)
open Utils
open Lascar
module S =
struct
type t = Off | On [@@deriving show {with_path=false}]
let compare = Stdlib.compare
let to_string = show
end
open S
module F = Fsm.Make(S)(Fsm_value.Int)
let mk n =
let open F in
create
~inps:["start",[0;1]]
~outps:["s",[0;1]]
~vars:["k", ListExt.range Fun.id 0 n]
~states:[Off,["s",0]; On,["s",1]]
~istate:([], Off)
~trans:[
Off, mk_trans "start=1 | k:=0", On;
On, ([Test ("k", "<", EConst n)], mk_acts "k:=k+1"), On;
On, ([Test ("k", "=", EConst n)], []), Off
]
let m1 = mk 2
let _ = F.dot_output "m1" m1
module FF = Conv.Fsm(F)
let m2 = FF.defactorize ~init:(Some ([],(Off,["k",0]))) [] m1
let _ = FF.dot_output ~options:[Dot.RankdirLR] "m2" m2
let _ = FF.dot_output_execs "m2_execs" ~options:[Dot.RankdirLR] 8 m2
| null | https://raw.githubusercontent.com/jserot/lascar/79bd11cd0d47545bccfc3a3571f37af065915c83/examples/fsm/gensig2/main.ml | ocaml | This is a reformulation of ../gensig1 in which the output [s] is assigned to states |
open Utils
open Lascar
module S =
struct
type t = Off | On [@@deriving show {with_path=false}]
let compare = Stdlib.compare
let to_string = show
end
open S
module F = Fsm.Make(S)(Fsm_value.Int)
let mk n =
let open F in
create
~inps:["start",[0;1]]
~outps:["s",[0;1]]
~vars:["k", ListExt.range Fun.id 0 n]
~states:[Off,["s",0]; On,["s",1]]
~istate:([], Off)
~trans:[
Off, mk_trans "start=1 | k:=0", On;
On, ([Test ("k", "<", EConst n)], mk_acts "k:=k+1"), On;
On, ([Test ("k", "=", EConst n)], []), Off
]
let m1 = mk 2
let _ = F.dot_output "m1" m1
module FF = Conv.Fsm(F)
let m2 = FF.defactorize ~init:(Some ([],(Off,["k",0]))) [] m1
let _ = FF.dot_output ~options:[Dot.RankdirLR] "m2" m2
let _ = FF.dot_output_execs "m2_execs" ~options:[Dot.RankdirLR] 8 m2
|
f43fc81e7a615bb225debb7e1e51104184a0b1b0d1b2cffa446214f45111f4f9 | semmons99/clojure-euler | prob-029.clj | problem 029 ; ; ; ; ; ; ; ; ; ;
(use '[clojure.contrib.math :only (expt)])
(defn prob-029 []
(count (distinct (for [a (range 2 101)
b (range 2 101)]
(expt a b)))))
| null | https://raw.githubusercontent.com/semmons99/clojure-euler/3480bc313b9df7f282dadf6e0b48d96230f1bfc1/prob-029.clj | clojure | ; ; ; ; ; ; ; ; ; | (use '[clojure.contrib.math :only (expt)])
(defn prob-029 []
(count (distinct (for [a (range 2 101)
b (range 2 101)]
(expt a b)))))
|
14942026676949412a375211b82ec6b648ab69832188c7fef3b536b3f861bcb8 | yakaz/yamerl | string_list.erl | -module('string_list').
-include_lib("eunit/include/eunit.hrl").
single_test_() ->
?_assertMatch(
{yamerl_parser,
string,
[],
<<>>,
19,
true,
[],
0,
20,
4,
2,
false,
4,
2,
utf8,
false,
undefined,
_,
_,
[],
{bcoll,root,0,-1,1,1,-1,1,1},
false,
false,
false,
[{impl_key,false,false,undefined,undefined,1,1}],
false,
false,
_,
[],
0,
11,
10,
undefined,
undefined,
_,
false,
[],
[
{yamerl_stream_end,4,2},
{yamerl_doc_end,4,2},
{yamerl_collection_end,4,1,flow,sequence},
{yamerl_scalar,3,3,
{yamerl_tag,3,3,{non_specific,"?"}},
flow,plain,"two"},
{yamerl_sequence_entry,3,3},
{yamerl_scalar,2,3,
{yamerl_tag,2,3,{non_specific,"?"}},
flow,plain,"one"},
{yamerl_sequence_entry,2,3},
{yamerl_collection_start,1,1,
{yamerl_tag,1,1,{non_specific,"?"}},
flow,sequence},
{yamerl_doc_start,1,1,{1,2},_},
{yamerl_stream_start,1,1,utf8}
]
},
yamerl_parser:string("[\r\n one,\r\n two\r\n]")
).
| null | https://raw.githubusercontent.com/yakaz/yamerl/0032607a7b27fa2b548fc9a02d7ae6b53469c0c5/test/parsing/string_list.erl | erlang | -module('string_list').
-include_lib("eunit/include/eunit.hrl").
single_test_() ->
?_assertMatch(
{yamerl_parser,
string,
[],
<<>>,
19,
true,
[],
0,
20,
4,
2,
false,
4,
2,
utf8,
false,
undefined,
_,
_,
[],
{bcoll,root,0,-1,1,1,-1,1,1},
false,
false,
false,
[{impl_key,false,false,undefined,undefined,1,1}],
false,
false,
_,
[],
0,
11,
10,
undefined,
undefined,
_,
false,
[],
[
{yamerl_stream_end,4,2},
{yamerl_doc_end,4,2},
{yamerl_collection_end,4,1,flow,sequence},
{yamerl_scalar,3,3,
{yamerl_tag,3,3,{non_specific,"?"}},
flow,plain,"two"},
{yamerl_sequence_entry,3,3},
{yamerl_scalar,2,3,
{yamerl_tag,2,3,{non_specific,"?"}},
flow,plain,"one"},
{yamerl_sequence_entry,2,3},
{yamerl_collection_start,1,1,
{yamerl_tag,1,1,{non_specific,"?"}},
flow,sequence},
{yamerl_doc_start,1,1,{1,2},_},
{yamerl_stream_start,1,1,utf8}
]
},
yamerl_parser:string("[\r\n one,\r\n two\r\n]")
).
| |
8dd38780faf55f10b73469776396d4dc664266d18d9d059bf4c3cae07beda77c | rjnw/sham | id.rkt | #lang sham/cryptol
#:compile-with sham
(def
[id : {a} a -> a]
[id a = a])
(test id-pass (== (id true) true))
(test id-fail (== (id true) false))
| null | https://raw.githubusercontent.com/rjnw/sham/6e0524b1eb01bcda83ae7a5be6339da4257c6781/sham-examples/sham/cryptol/tests/id.rkt | racket | #lang sham/cryptol
#:compile-with sham
(def
[id : {a} a -> a]
[id a = a])
(test id-pass (== (id true) true))
(test id-fail (== (id true) false))
| |
c646d07862ee94161eeef7d6c7e2be41d53ad57cb763c0d4dd20b38c6a63ef84 | backtracking/mlpost | ctypes.ml | type matrix = Cairo.matrix = {
mutable xx : float;
mutable yx : float;
mutable xy : float;
mutable yy : float;
mutable x0 : float;
mutable y0 : float;
}
type point = { x : float; y : float }
| null | https://raw.githubusercontent.com/backtracking/mlpost/bd4305289fd64d531b9f42d64dd641d72ab82fd5/src/ctypes.ml | ocaml | type matrix = Cairo.matrix = {
mutable xx : float;
mutable yx : float;
mutable xy : float;
mutable yy : float;
mutable x0 : float;
mutable y0 : float;
}
type point = { x : float; y : float }
| |
e59251690588bec64efb41063bcb7dcbab4dfce0b81ffad8bdd894472e53c4db | typeclasses/dsv | Position.hs | # LANGUAGE NoImplicitPrelude #
# LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving #
module DSV.Position
( RowNumber (..)
, ColumnNumber (..)
, ColumnName (..)
, Position (..)
, At (..)
, AtHeader (..)
) where
import DSV.IO
import DSV.Numbers
import DSV.Prelude
newtype RowNumber =
RowNumber Positive
deriving stock (Eq, Ord)
deriving newtype (Num, Show)
newtype ColumnNumber =
ColumnNumber Positive
deriving stock (Eq, Ord)
deriving newtype (Num, Show)
newtype ColumnName str =
ColumnName str
deriving stock (Eq, Ord, Show)
data Position row col =
Position row col
deriving stock (Eq, Ord, Show)
data At p a = At p {- ^ Position -} a
deriving stock (Eq, Ord, Show)
deriving anyclass Exception
data AtHeader a = AtHeader a
deriving stock (Eq, Ord, Show)
deriving anyclass Exception
| null | https://raw.githubusercontent.com/typeclasses/dsv/ae4eb823e27e4c569c4f9b097441985cf865fbab/dsv/library/DSV/Position.hs | haskell | ^ Position | # LANGUAGE NoImplicitPrelude #
# LANGUAGE DerivingStrategies , GeneralizedNewtypeDeriving #
module DSV.Position
( RowNumber (..)
, ColumnNumber (..)
, ColumnName (..)
, Position (..)
, At (..)
, AtHeader (..)
) where
import DSV.IO
import DSV.Numbers
import DSV.Prelude
newtype RowNumber =
RowNumber Positive
deriving stock (Eq, Ord)
deriving newtype (Num, Show)
newtype ColumnNumber =
ColumnNumber Positive
deriving stock (Eq, Ord)
deriving newtype (Num, Show)
newtype ColumnName str =
ColumnName str
deriving stock (Eq, Ord, Show)
data Position row col =
Position row col
deriving stock (Eq, Ord, Show)
deriving stock (Eq, Ord, Show)
deriving anyclass Exception
data AtHeader a = AtHeader a
deriving stock (Eq, Ord, Show)
deriving anyclass Exception
|
ab1da203914df0a369cbe7b86c694bb181056333efcc483494504367dcfdf3e0 | haskell/aeson | Text.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
-- |
-- Module: Data.Aeson.Text
Copyright : ( c ) 2012 - 2016
( c ) 2011 MailRank , Inc.
License : BSD3
Maintainer : < >
-- Stability: experimental
-- Portability: portable
--
Most frequently , you 'll probably want to encode straight to UTF-8
-- (the standard JSON encoding) using 'encode'.
--
-- You can use the conversions to 'Builder's when embedding JSON messages as
-- parts of a protocol.
module Data.Aeson.Text
(
encodeToLazyText
, encodeToTextBuilder
) where
import Prelude.Compat
import Data.Aeson.Types (Value(..), ToJSON(..))
import Data.Aeson.Encoding (encodingToLazyByteString)
import qualified Data.Aeson.KeyMap as KM
import Data.Scientific (FPFormat(..), Scientific, base10Exponent)
import Data.Text.Lazy.Builder (Builder)
import qualified Data.Text.Lazy.Builder as TB
import Data.Text.Lazy.Builder.Scientific (formatScientificBuilder)
import Numeric (showHex)
import qualified Data.Aeson.Key as Key
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import qualified Data.Vector as V
-- | Encode a JSON 'Value' to a "Data.Text.Lazy"
--
-- /Note:/ uses 'toEncoding'
encodeToLazyText :: ToJSON a => a -> LT.Text
encodeToLazyText = LT.decodeUtf8 . encodingToLazyByteString . toEncoding
-- | Encode a JSON 'Value' to a "Data.Text" 'Builder', which can be
-- embedded efficiently in a text-based protocol.
--
-- If you are going to immediately encode straight to a
' L.ByteString ' , it is more efficient to use ' encode ' ( lazy ByteString )
-- or @'fromEncoding' . 'toEncoding'@ (ByteString.Builder) instead.
--
/Note:/ Uses ' toJSON '
encodeToTextBuilder :: ToJSON a => a -> Builder
encodeToTextBuilder =
go . toJSON
where
go Null = "null"
go (Bool b) = if b then "true" else "false"
go (Number s) = fromScientific s
go (String s) = string s
go (Array v)
| V.null v = "[]"
| otherwise =
TB.singleton '[' <>
go (V.unsafeHead v) <>
V.foldr f (TB.singleton ']') (V.unsafeTail v)
where f a z = TB.singleton ',' <> go a <> z
go (Object m) =
case KM.toList m of
(x:xs) -> TB.singleton '{' <> one x <> foldr f (TB.singleton '}') xs
_ -> "{}"
where f a z = TB.singleton ',' <> one a <> z
one (k,v) = string (Key.toText k) <> TB.singleton ':' <> go v
string :: T.Text -> Builder
string s = TB.singleton '"' <> quote s <> TB.singleton '"'
where
quote q = case T.uncons t of
Nothing -> TB.fromText h
Just (!c,t') -> TB.fromText h <> escape c <> quote t'
where (h,t) = T.break isEscape q
isEscape c = c == '\"' ||
c == '\\' ||
c < '\x20'
escape '\"' = "\\\""
escape '\\' = "\\\\"
escape '\n' = "\\n"
escape '\r' = "\\r"
escape '\t' = "\\t"
escape c
| c < '\x20' = TB.fromString $ "\\u" ++ replicate (4 - length h) '0' ++ h
| otherwise = TB.singleton c
where h = showHex (fromEnum c) ""
fromScientific :: Scientific -> Builder
fromScientific s = formatScientificBuilder format prec s
where
(format, prec)
| base10Exponent s < 0 = (Generic, Nothing)
| otherwise = (Fixed, Just 0)
| null | https://raw.githubusercontent.com/haskell/aeson/241f6d69cd2b77cee5ebe75e301b775c36b05017/src/Data/Aeson/Text.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
|
Module: Data.Aeson.Text
Stability: experimental
Portability: portable
(the standard JSON encoding) using 'encode'.
You can use the conversions to 'Builder's when embedding JSON messages as
parts of a protocol.
| Encode a JSON 'Value' to a "Data.Text.Lazy"
/Note:/ uses 'toEncoding'
| Encode a JSON 'Value' to a "Data.Text" 'Builder', which can be
embedded efficiently in a text-based protocol.
If you are going to immediately encode straight to a
or @'fromEncoding' . 'toEncoding'@ (ByteString.Builder) instead.
| # LANGUAGE NoImplicitPrelude #
Copyright : ( c ) 2012 - 2016
( c ) 2011 MailRank , Inc.
License : BSD3
Maintainer : < >
Most frequently , you 'll probably want to encode straight to UTF-8
module Data.Aeson.Text
(
encodeToLazyText
, encodeToTextBuilder
) where
import Prelude.Compat
import Data.Aeson.Types (Value(..), ToJSON(..))
import Data.Aeson.Encoding (encodingToLazyByteString)
import qualified Data.Aeson.KeyMap as KM
import Data.Scientific (FPFormat(..), Scientific, base10Exponent)
import Data.Text.Lazy.Builder (Builder)
import qualified Data.Text.Lazy.Builder as TB
import Data.Text.Lazy.Builder.Scientific (formatScientificBuilder)
import Numeric (showHex)
import qualified Data.Aeson.Key as Key
import qualified Data.Text as T
import qualified Data.Text.Lazy as LT
import qualified Data.Text.Lazy.Encoding as LT
import qualified Data.Vector as V
encodeToLazyText :: ToJSON a => a -> LT.Text
encodeToLazyText = LT.decodeUtf8 . encodingToLazyByteString . toEncoding
' L.ByteString ' , it is more efficient to use ' encode ' ( lazy ByteString )
/Note:/ Uses ' toJSON '
encodeToTextBuilder :: ToJSON a => a -> Builder
encodeToTextBuilder =
go . toJSON
where
go Null = "null"
go (Bool b) = if b then "true" else "false"
go (Number s) = fromScientific s
go (String s) = string s
go (Array v)
| V.null v = "[]"
| otherwise =
TB.singleton '[' <>
go (V.unsafeHead v) <>
V.foldr f (TB.singleton ']') (V.unsafeTail v)
where f a z = TB.singleton ',' <> go a <> z
go (Object m) =
case KM.toList m of
(x:xs) -> TB.singleton '{' <> one x <> foldr f (TB.singleton '}') xs
_ -> "{}"
where f a z = TB.singleton ',' <> one a <> z
one (k,v) = string (Key.toText k) <> TB.singleton ':' <> go v
string :: T.Text -> Builder
string s = TB.singleton '"' <> quote s <> TB.singleton '"'
where
quote q = case T.uncons t of
Nothing -> TB.fromText h
Just (!c,t') -> TB.fromText h <> escape c <> quote t'
where (h,t) = T.break isEscape q
isEscape c = c == '\"' ||
c == '\\' ||
c < '\x20'
escape '\"' = "\\\""
escape '\\' = "\\\\"
escape '\n' = "\\n"
escape '\r' = "\\r"
escape '\t' = "\\t"
escape c
| c < '\x20' = TB.fromString $ "\\u" ++ replicate (4 - length h) '0' ++ h
| otherwise = TB.singleton c
where h = showHex (fromEnum c) ""
fromScientific :: Scientific -> Builder
fromScientific s = formatScientificBuilder format prec s
where
(format, prec)
| base10Exponent s < 0 = (Generic, Nothing)
| otherwise = (Fixed, Just 0)
|
0182875209991afbca64a25f5940bad7b4aafc5fe49266230f3fc287f6b90f39 | mfoemmel/erlang-otp | wxCommandEvent.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%% This file is generated DO NOT EDIT
%% @doc See external documentation: <a href="">wxCommandEvent</a>.
%% <dl><dt>Use {@link wxEvtHandler:connect/3.} with EventType:</dt>
< dd><em > command_button_clicked</em > , < em > command_checkbox_clicked</em > , < em > command_choice_selected</em > , < em > , < em > command_listbox_doubleclicked</em > , < em > command_text_updated</em > , < em > command_text_enter</em > , < em > command_menu_selected</em > , < em > > , < em > command_radiobox_selected</em > , < em > command_radiobutton_selected</em > , < em > command_scrollbar_updated</em > , < em > command_vlbox_selected</em > , < em > command_combobox_selected</em > , < em > command_tool_rclicked</em > , < em > command_tool_enter</em > , < em > command_checklistbox_toggled</em > , < em > command_togglebutton_clicked</em > , < em > command_left_click</em > , < em > command_left_dclick</em > , < em > command_right_click</em > , < em > command_set_focus</em > , < em > command_kill_focus</em > , < em > command_enter</em></dd></dl >
%% See also the message variant {@link wxEvtHandler:wxCommand(). #wxCommand{}} event record type.
%%
%% <p>This class is derived (and can use functions) from:
%% <br />{@link wxEvent}
%% </p>
%% @type wxCommandEvent(). An object reference, The representation is internal
%% and can be changed without notice. It can't be used for comparsion
%% stored on disc or distributed for use on other nodes.
-module(wxCommandEvent).
-include("wxe.hrl").
-export([getClientData/1,getExtraLong/1,getInt/1,getSelection/1,getString/1,
isChecked/1,isSelection/1,setInt/2,setString/2]).
%% inherited exports
-export([getId/1,getSkipped/1,getTimestamp/1,isCommandEvent/1,parent_class/1,
resumePropagation/2,shouldPropagate/1,skip/1,skip/2,stopPropagation/1]).
%% @hidden
parent_class(wxEvent) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
%% @spec (This::wxCommandEvent()) -> term()
%% @doc See <a href="#wxcommandeventgetclientobject">external documentation</a>.
getClientData(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_getClientData,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent()) -> integer()
%% @doc See <a href="#wxcommandeventgetextralong">external documentation</a>.
getExtraLong(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetExtraLong,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent()) -> integer()
%% @doc See <a href="#wxcommandeventgetint">external documentation</a>.
getInt(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetInt,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent()) -> integer()
%% @doc See <a href="#wxcommandeventgetselection">external documentation</a>.
getSelection(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetSelection,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent()) -> string()
%% @doc See <a href="#wxcommandeventgetstring">external documentation</a>.
getString(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetString,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent()) -> bool()
%% @doc See <a href="#wxcommandeventischecked">external documentation</a>.
isChecked(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_IsChecked,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent()) -> bool()
%% @doc See <a href="#wxcommandeventisselection">external documentation</a>.
isSelection(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_IsSelection,
<<ThisRef:32/?UI>>).
%% @spec (This::wxCommandEvent(), I::integer()) -> ok
%% @doc See <a href="#wxcommandeventsetint">external documentation</a>.
setInt(#wx_ref{type=ThisT,ref=ThisRef},I)
when is_integer(I) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:cast(?wxCommandEvent_SetInt,
<<ThisRef:32/?UI,I:32/?UI>>).
%% @spec (This::wxCommandEvent(), S::string()) -> ok
@doc See < a href=" / manuals / stable / wx_wxcommandevent.html#wxcommandeventsetstring">external documentation</a > .
setString(#wx_ref{type=ThisT,ref=ThisRef},S)
when is_list(S) ->
?CLASS(ThisT,wxCommandEvent),
S_UC = unicode:characters_to_binary([S,0]),
wxe_util:cast(?wxCommandEvent_SetString,
<<ThisRef:32/?UI,(byte_size(S_UC)):32/?UI,(S_UC)/binary, 0:(((8- ((0+byte_size(S_UC)) band 16#7)) band 16#7))/unit:8>>).
%% From wxEvent
%% @hidden
stopPropagation(This) -> wxEvent:stopPropagation(This).
%% @hidden
skip(This, Options) -> wxEvent:skip(This, Options).
%% @hidden
skip(This) -> wxEvent:skip(This).
%% @hidden
shouldPropagate(This) -> wxEvent:shouldPropagate(This).
%% @hidden
resumePropagation(This,PropagationLevel) -> wxEvent:resumePropagation(This,PropagationLevel).
%% @hidden
isCommandEvent(This) -> wxEvent:isCommandEvent(This).
%% @hidden
getTimestamp(This) -> wxEvent:getTimestamp(This).
%% @hidden
getSkipped(This) -> wxEvent:getSkipped(This).
%% @hidden
getId(This) -> wxEvent:getId(This).
| null | https://raw.githubusercontent.com/mfoemmel/erlang-otp/9c6fdd21e4e6573ca6f567053ff3ac454d742bc2/lib/wx/src/gen/wxCommandEvent.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
This file is generated DO NOT EDIT
@doc See external documentation: <a href="">wxCommandEvent</a>.
<dl><dt>Use {@link wxEvtHandler:connect/3.} with EventType:</dt>
See also the message variant {@link wxEvtHandler:wxCommand(). #wxCommand{}} event record type.
<p>This class is derived (and can use functions) from:
<br />{@link wxEvent}
</p>
@type wxCommandEvent(). An object reference, The representation is internal
and can be changed without notice. It can't be used for comparsion
stored on disc or distributed for use on other nodes.
inherited exports
@hidden
@spec (This::wxCommandEvent()) -> term()
@doc See <a href="#wxcommandeventgetclientobject">external documentation</a>.
@spec (This::wxCommandEvent()) -> integer()
@doc See <a href="#wxcommandeventgetextralong">external documentation</a>.
@spec (This::wxCommandEvent()) -> integer()
@doc See <a href="#wxcommandeventgetint">external documentation</a>.
@spec (This::wxCommandEvent()) -> integer()
@doc See <a href="#wxcommandeventgetselection">external documentation</a>.
@spec (This::wxCommandEvent()) -> string()
@doc See <a href="#wxcommandeventgetstring">external documentation</a>.
@spec (This::wxCommandEvent()) -> bool()
@doc See <a href="#wxcommandeventischecked">external documentation</a>.
@spec (This::wxCommandEvent()) -> bool()
@doc See <a href="#wxcommandeventisselection">external documentation</a>.
@spec (This::wxCommandEvent(), I::integer()) -> ok
@doc See <a href="#wxcommandeventsetint">external documentation</a>.
@spec (This::wxCommandEvent(), S::string()) -> ok
From wxEvent
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden
@hidden | Copyright Ericsson AB 2008 - 2009 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
< dd><em > command_button_clicked</em > , < em > command_checkbox_clicked</em > , < em > command_choice_selected</em > , < em > , < em > command_listbox_doubleclicked</em > , < em > command_text_updated</em > , < em > command_text_enter</em > , < em > command_menu_selected</em > , < em > > , < em > command_radiobox_selected</em > , < em > command_radiobutton_selected</em > , < em > command_scrollbar_updated</em > , < em > command_vlbox_selected</em > , < em > command_combobox_selected</em > , < em > command_tool_rclicked</em > , < em > command_tool_enter</em > , < em > command_checklistbox_toggled</em > , < em > command_togglebutton_clicked</em > , < em > command_left_click</em > , < em > command_left_dclick</em > , < em > command_right_click</em > , < em > command_set_focus</em > , < em > command_kill_focus</em > , < em > command_enter</em></dd></dl >
-module(wxCommandEvent).
-include("wxe.hrl").
-export([getClientData/1,getExtraLong/1,getInt/1,getSelection/1,getString/1,
isChecked/1,isSelection/1,setInt/2,setString/2]).
-export([getId/1,getSkipped/1,getTimestamp/1,isCommandEvent/1,parent_class/1,
resumePropagation/2,shouldPropagate/1,skip/1,skip/2,stopPropagation/1]).
parent_class(wxEvent) -> true;
parent_class(_Class) -> erlang:error({badtype, ?MODULE}).
getClientData(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_getClientData,
<<ThisRef:32/?UI>>).
getExtraLong(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetExtraLong,
<<ThisRef:32/?UI>>).
getInt(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetInt,
<<ThisRef:32/?UI>>).
getSelection(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetSelection,
<<ThisRef:32/?UI>>).
getString(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_GetString,
<<ThisRef:32/?UI>>).
isChecked(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_IsChecked,
<<ThisRef:32/?UI>>).
isSelection(#wx_ref{type=ThisT,ref=ThisRef}) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:call(?wxCommandEvent_IsSelection,
<<ThisRef:32/?UI>>).
setInt(#wx_ref{type=ThisT,ref=ThisRef},I)
when is_integer(I) ->
?CLASS(ThisT,wxCommandEvent),
wxe_util:cast(?wxCommandEvent_SetInt,
<<ThisRef:32/?UI,I:32/?UI>>).
@doc See < a href=" / manuals / stable / wx_wxcommandevent.html#wxcommandeventsetstring">external documentation</a > .
setString(#wx_ref{type=ThisT,ref=ThisRef},S)
when is_list(S) ->
?CLASS(ThisT,wxCommandEvent),
S_UC = unicode:characters_to_binary([S,0]),
wxe_util:cast(?wxCommandEvent_SetString,
<<ThisRef:32/?UI,(byte_size(S_UC)):32/?UI,(S_UC)/binary, 0:(((8- ((0+byte_size(S_UC)) band 16#7)) band 16#7))/unit:8>>).
stopPropagation(This) -> wxEvent:stopPropagation(This).
skip(This, Options) -> wxEvent:skip(This, Options).
skip(This) -> wxEvent:skip(This).
shouldPropagate(This) -> wxEvent:shouldPropagate(This).
resumePropagation(This,PropagationLevel) -> wxEvent:resumePropagation(This,PropagationLevel).
isCommandEvent(This) -> wxEvent:isCommandEvent(This).
getTimestamp(This) -> wxEvent:getTimestamp(This).
getSkipped(This) -> wxEvent:getSkipped(This).
getId(This) -> wxEvent:getId(This).
|
a27064e569c5d4f792265735f712f4b1de834132ab4dc0b9c68117af1db21cef | racket/slideshow | fullscreen.rkt | #lang racket
(require "fullscreen/base.rkt"
"pict.rkt")
(provide (except-out (all-from-out racket
"fullscreen/base.rkt"
"pict.rkt")
printable<%>))
(module reader syntax/module-reader
slideshow/fullscreen)
| null | https://raw.githubusercontent.com/racket/slideshow/4588507e83e9aa859c6841e655b98417d46987e6/slideshow-lib/slideshow/fullscreen.rkt | racket | #lang racket
(require "fullscreen/base.rkt"
"pict.rkt")
(provide (except-out (all-from-out racket
"fullscreen/base.rkt"
"pict.rkt")
printable<%>))
(module reader syntax/module-reader
slideshow/fullscreen)
| |
2a59277680bb939362eb2b278f2bd6e40b3f5fa50f3b0e7a65683669f6d878af | etoroxlabs/lira | IntermediateCompilerTest.hs | MIT License
--
Copyright ( c ) 2019 eToroX Labs
--
-- Permission is hereby granted, free of charge, to any person obtaining a copy
-- of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
-- to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
-- furnished to do so, subject to the following conditions:
--
-- The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
--
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
-- IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
-- FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
-- AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
-- OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
-- SOFTWARE.
module IntermediateCompilerTest
( tests
)
where
import qualified Data.Map.Strict as Map
import Lira.Contract
import Lira.Contract.Intermediate
import Lira.Contract.Parser
import Lira.Backends.IntermediateCompiler
import LiraParserTest hiding (tests)
import LiraTestHelpers
import Test.Hspec
tests :: Spec
tests = do
canonicalNestedIfWithinTest
activateMapSimple
marginRefundMapSimple
activateMapCanonicalIw
refundMapCanonicalIw
basicTransferTest
timeTranslationIMemExpTest
zeroContractCodeTest
canonicalNestedIfWithinTest :: Spec
canonicalNestedIfWithinTest = do
it "canonical nested if-within" $ do
intermediateCompile (parse' canonical_iw_source)
`shouldBe` intermediateContract
where
intermediateContract =
IntermediateContract parties transfers memExps activateMap marginRefundMap
parties =
[ Bound "0x1234567890123456789012345678901234567891"
, Bound "0x1234567890123456789012345678901234567892"
, Bound "0x1234567890123456789012345678901234567893"
, Bound "0x1234567890123456789012345678901234567894"
, Bound "0x1234567890123456789012345678901234567895"
, Bound "0x1234567890123456789012345678901234567896"
, Bound "0x1234567890123456789012345678901234567897"
, Bound "0x1234567890123456789012345678901234567898"
]
transfers =
[ TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567891"
, from = 0
, to = 0
, memExpPath = [(0, True), (1, True), (2, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567892"
, from = 1
, to = 1
, memExpPath = [(0, True), (1, True), (2, False)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567893"
, from = 2
, to = 2
, memExpPath = [(0, True), (1, False), (3, True), (4, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567894"
, from = 3
, to = 3
, memExpPath = [(0, True), (1, False), (3, True), (4, False)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567895"
, from = 4
, to = 4
, memExpPath = [(0, True), (1, False), (3, False)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567896"
, from = 5
, to = 5
, memExpPath = [(0, False), (5, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567897"
, from = 6
, to = 6
, memExpPath = [(0, False), (5, False), (6, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567898"
, from = 7
, to = 7
, memExpPath = [(0, False), (5, False), (6, False)]
}
]
memExps =
[ IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 1
, _IMemExpIdent = 0
, _IMemExp = GtExp (MultExp (Lit (IntVal 1)) (Lit (IntVal 1)))
(Lit (IntVal 1))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 2
, _IMemExpIdent = 1
, _IMemExp = GtExp (MultExp (Lit (IntVal 2)) (Lit (IntVal 2)))
(Lit (IntVal 2))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 3
, _IMemExpIdent = 2
, _IMemExp = GtExp (MultExp (Lit (IntVal 3)) (Lit (IntVal 3)))
(Lit (IntVal 3))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 4
, _IMemExpIdent = 3
, _IMemExp = GtExp (MultExp (Lit (IntVal 4)) (Lit (IntVal 4)))
(Lit (IntVal 4))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 5
, _IMemExpIdent = 4
, _IMemExp = GtExp (MultExp (Lit (IntVal 5)) (Lit (IntVal 5)))
(Lit (IntVal 5))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 6
, _IMemExpIdent = 5
, _IMemExp = GtExp (MultExp (Lit (IntVal 6)) (Lit (IntVal 6)))
(Lit (IntVal 6))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 7
, _IMemExpIdent = 6
, _IMemExp = GtExp (MultExp (Lit (IntVal 7)) (Lit (IntVal 7)))
(Lit (IntVal 7))
}
]
activateMap = Map.fromList
[ (("0x1234567890123456789012345678901234567891", 0), 1)
, (("0x1234567890123456789012345678901234567892", 1), 1)
, (("0x1234567890123456789012345678901234567893", 2), 1)
, (("0x1234567890123456789012345678901234567894", 3), 1)
, (("0x1234567890123456789012345678901234567895", 4), 1)
, (("0x1234567890123456789012345678901234567896", 5), 1)
, (("0x1234567890123456789012345678901234567897", 6), 1)
, (("0x1234567890123456789012345678901234567898", 7), 1)
]
marginRefundMap = Map.fromList
[ ( [(0, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
, ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, False), (5, False)]
, [("0x1234567890123456789012345678901234567896", 5, 1)]
)
, ( [(0, False), (5, False), (6, False)]
, [("0x1234567890123456789012345678901234567897", 6, 1)]
)
, ( [(0, False), (5, False), (6, True)]
, [("0x1234567890123456789012345678901234567898", 7, 1)]
)
, ( [(0, False), (5, True)]
, [ ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True)]
, [ ("0x1234567890123456789012345678901234567896", 5, 1)
, ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True), (1, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
]
)
, ( [(0, True), (1, False), (3, False)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
]
)
, ( [(0, True), (1, False), (3, True)]
, [("0x1234567890123456789012345678901234567895", 4, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, False)]
, [("0x1234567890123456789012345678901234567893", 2, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, True)]
, [("0x1234567890123456789012345678901234567894", 3, 1)]
)
, ( [(0, True), (1, True)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, True), (1, True), (2, False)]
, [("0x1234567890123456789012345678901234567891", 0, 1)]
)
, ( [(0, True), (1, True), (2, True)]
, [("0x1234567890123456789012345678901234567892", 1, 1)]
)
]
-- Test that the activateMap function returns a correct map given a function
activateMapSimple :: Spec
activateMapSimple = do
it "activateMapSimple" $ do
activateMap (intermediateCompile (parse' src)) `shouldBe` activateMap'
where
src
= "both( if true within seconds(1) then scale(1, 1, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(7, 7, transfer(0xdddddddddddddddddddddddddddddddddddddddd,0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee,0xffffffffffffffffffffffffffffffffffffffff)), if true within seconds(2) then scale(17, 17, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else if true within seconds(3) then scale(53, 53, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(101, 101, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) )"
activateMap' =
(Map.fromList
[ (("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0), 102)
, (("0xdddddddddddddddddddddddddddddddddddddddd", 2), 7)
]
)
marginRefundMapSimple :: Spec
marginRefundMapSimple = do
it "marginRefundMapSimple" $ do
marginRefundMap (intermediateCompile (parse' src)) `shouldBe` refundMap
where
src
= "both( if true within seconds(1) then scale(1, 1, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(7, 7, transfer(0xdddddddddddddddddddddddddddddddddddddddd,0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee,0xffffffffffffffffffffffffffffffffffffffff)), if true within seconds(2) then scale(17, 17, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else if true within seconds(3) then scale(53, 53, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(101, 101, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) )"
refundMap =
(Map.fromList
[ ([(0, False)], [("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0, 1)])
, ([(0, True)] , [("0xdddddddddddddddddddddddddddddddddddddddd", 2, 7)])
, ( [(1, False), (2, True)]
, [("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0, 48)]
)
, ([(1, True)], [("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0, 84)])
]
)
Test that the activateMap works for the canonical IW source code
activateMapCanonicalIw :: Spec
activateMapCanonicalIw = do
it "activateMapCanonicalIw" $ do
activateMap (intermediateCompile (parse' canonical_iw_source))
`shouldBe` activateMap'
where
activateMap' =
(Map.fromList
[ (("0x1234567890123456789012345678901234567891", 0), 1)
, (("0x1234567890123456789012345678901234567892", 1), 1)
, (("0x1234567890123456789012345678901234567893", 2), 1)
, (("0x1234567890123456789012345678901234567894", 3), 1)
, (("0x1234567890123456789012345678901234567895", 4), 1)
, (("0x1234567890123456789012345678901234567896", 5), 1)
, (("0x1234567890123456789012345678901234567897", 6), 1)
, (("0x1234567890123456789012345678901234567898", 7), 1)
]
)
refundMapCanonicalIw :: Spec
refundMapCanonicalIw = do
it "refundMapCanonicalIw" $ do
marginRefundMap (intermediateCompile (parse' canonical_iw_source))
`shouldBe` activateMap
where
activateMap =
(Map.fromList
[ ( [(0, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
, ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, False), (5, False)]
, [("0x1234567890123456789012345678901234567896", 5, 1)]
)
, ( [(0, False), (5, False), (6, False)]
, [("0x1234567890123456789012345678901234567897", 6, 1)]
)
, ( [(0, False), (5, False), (6, True)]
, [("0x1234567890123456789012345678901234567898", 7, 1)]
)
, ( [(0, False), (5, True)]
, [ ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True)]
, [ ("0x1234567890123456789012345678901234567896", 5, 1)
, ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True), (1, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
]
)
, ( [(0, True), (1, False), (3, False)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
]
)
, ( [(0, True), (1, False), (3, True)]
, [("0x1234567890123456789012345678901234567895", 4, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, False)]
, [("0x1234567890123456789012345678901234567893", 2, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, True)]
, [("0x1234567890123456789012345678901234567894", 3, 1)]
)
, ( [(0, True), (1, True)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, True), (1, True), (2, False)]
, [("0x1234567890123456789012345678901234567891", 0, 1)]
)
, ( [(0, True), (1, True), (2, True)]
, [("0x1234567890123456789012345678901234567892", 1, 1)]
)
]
)
timeTranslationIMemExpTest :: Spec
timeTranslationIMemExpTest = do
it "translates time properly" $ do
intermediateCompile contract `shouldBe` intermediateContract
where
contract :: Contract
contract =
makeContract defaultAddressMap
$ "translate(minutes(2), if (obs(bool, O, 0)) within minutes(2) "
++ "then transfer(T, A, B) "
++ "else scale(2, 2, transfer(T, A, B)))"
intermediateContract :: IntermediateContract
intermediateContract =
IntermediateContract parties transfers memExps activateMap marginRefundMap
parties = [Bound oneAddr, Bound twoAddr]
transfers =
[ TransferCall { maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 120
, tokenAddress = tokAddr
, from = 0
, to = 1
, memExpPath = [(0, True)]
}
, TransferCall { maxAmount = 2
, amount = MultExp (Lit (IntVal 1)) (Lit (IntVal 2))
, delay = 120
, tokenAddress = tokAddr
, from = 0
, to = 1
, memExpPath = [(0, False)]
}
]
memExps = [IMemExp 120 240 0 (Lit (Observable OBool obsAddr "0"))]
activateMap = Map.fromList [((tokAddr, 0), 2)]
marginRefundMap = Map.fromList [([(0, True)], [(tokAddr, 0, 1)])]
zeroContractCodeTest :: Spec
zeroContractCodeTest = do
it "translates zero contracts into no TCs" $ do
intermediateCompile Zero `shouldBe` emptyContract
it "translates an if-within that contains a zero contract" $ do
intermediateCompile contract `shouldBe` intermediateContract
where
contract :: Contract
contract =
makeContract defaultAddressMap
$ "if obs(bool, O, 0) within seconds(10) "
++ "then transfer(T, A, B) else zero"
intermediateContract :: IntermediateContract
intermediateContract =
IntermediateContract parties transfers memExps activateMap marginRefundMap
parties = [Bound oneAddr, Bound twoAddr]
transfers =
[ TransferCall { maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = tokAddr
, from = 0
, to = 1
, memExpPath = [(0, True)]
}
]
memExps =
[ IMemExp { _IMemExpBegin = 0
, _IMemExpEnd = 10
, _IMemExpIdent = 0
, _IMemExp = Lit (Observable OBool obsAddr "0")
}
]
activateMap = Map.fromList [((tokAddr, 0), 1)]
marginRefundMap = Map.fromList [([(0, False)], [(tokAddr, 0, 1)])]
basicTransferTest :: Spec
basicTransferTest = do
it "compiles a basic transfer" $ do
intermediateCompile transfer `shouldBe` transferIC
where
transfer :: Contract
transfer = Transfer { tokenAddress_ = tokAddr
, to_ = Bound oneAddr
, from_ = Bound twoAddr
}
transferIC :: IntermediateContract
transferIC = IntermediateContract
[Bound twoAddr, Bound oneAddr]
[ TransferCall { maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = tokAddr
, to = 1
, from = 0
, memExpPath = []
}
]
[]
(Map.fromList [((tokAddr, 0), 1)])
Map.empty
| null | https://raw.githubusercontent.com/etoroxlabs/lira/33fae6d37c5467d0a59ab9e9759636f2468b3653/test/IntermediateCompilerTest.hs | haskell |
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
Test that the activateMap function returns a correct map given a function | MIT License
Copyright ( c ) 2019 eToroX Labs
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
module IntermediateCompilerTest
( tests
)
where
import qualified Data.Map.Strict as Map
import Lira.Contract
import Lira.Contract.Intermediate
import Lira.Contract.Parser
import Lira.Backends.IntermediateCompiler
import LiraParserTest hiding (tests)
import LiraTestHelpers
import Test.Hspec
tests :: Spec
tests = do
canonicalNestedIfWithinTest
activateMapSimple
marginRefundMapSimple
activateMapCanonicalIw
refundMapCanonicalIw
basicTransferTest
timeTranslationIMemExpTest
zeroContractCodeTest
canonicalNestedIfWithinTest :: Spec
canonicalNestedIfWithinTest = do
it "canonical nested if-within" $ do
intermediateCompile (parse' canonical_iw_source)
`shouldBe` intermediateContract
where
intermediateContract =
IntermediateContract parties transfers memExps activateMap marginRefundMap
parties =
[ Bound "0x1234567890123456789012345678901234567891"
, Bound "0x1234567890123456789012345678901234567892"
, Bound "0x1234567890123456789012345678901234567893"
, Bound "0x1234567890123456789012345678901234567894"
, Bound "0x1234567890123456789012345678901234567895"
, Bound "0x1234567890123456789012345678901234567896"
, Bound "0x1234567890123456789012345678901234567897"
, Bound "0x1234567890123456789012345678901234567898"
]
transfers =
[ TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567891"
, from = 0
, to = 0
, memExpPath = [(0, True), (1, True), (2, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567892"
, from = 1
, to = 1
, memExpPath = [(0, True), (1, True), (2, False)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567893"
, from = 2
, to = 2
, memExpPath = [(0, True), (1, False), (3, True), (4, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567894"
, from = 3
, to = 3
, memExpPath = [(0, True), (1, False), (3, True), (4, False)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567895"
, from = 4
, to = 4
, memExpPath = [(0, True), (1, False), (3, False)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567896"
, from = 5
, to = 5
, memExpPath = [(0, False), (5, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567897"
, from = 6
, to = 6
, memExpPath = [(0, False), (5, False), (6, True)]
}
, TransferCall
{ maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = "0x1234567890123456789012345678901234567898"
, from = 7
, to = 7
, memExpPath = [(0, False), (5, False), (6, False)]
}
]
memExps =
[ IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 1
, _IMemExpIdent = 0
, _IMemExp = GtExp (MultExp (Lit (IntVal 1)) (Lit (IntVal 1)))
(Lit (IntVal 1))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 2
, _IMemExpIdent = 1
, _IMemExp = GtExp (MultExp (Lit (IntVal 2)) (Lit (IntVal 2)))
(Lit (IntVal 2))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 3
, _IMemExpIdent = 2
, _IMemExp = GtExp (MultExp (Lit (IntVal 3)) (Lit (IntVal 3)))
(Lit (IntVal 3))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 4
, _IMemExpIdent = 3
, _IMemExp = GtExp (MultExp (Lit (IntVal 4)) (Lit (IntVal 4)))
(Lit (IntVal 4))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 5
, _IMemExpIdent = 4
, _IMemExp = GtExp (MultExp (Lit (IntVal 5)) (Lit (IntVal 5)))
(Lit (IntVal 5))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 6
, _IMemExpIdent = 5
, _IMemExp = GtExp (MultExp (Lit (IntVal 6)) (Lit (IntVal 6)))
(Lit (IntVal 6))
}
, IMemExp
{ _IMemExpBegin = 0
, _IMemExpEnd = 7
, _IMemExpIdent = 6
, _IMemExp = GtExp (MultExp (Lit (IntVal 7)) (Lit (IntVal 7)))
(Lit (IntVal 7))
}
]
activateMap = Map.fromList
[ (("0x1234567890123456789012345678901234567891", 0), 1)
, (("0x1234567890123456789012345678901234567892", 1), 1)
, (("0x1234567890123456789012345678901234567893", 2), 1)
, (("0x1234567890123456789012345678901234567894", 3), 1)
, (("0x1234567890123456789012345678901234567895", 4), 1)
, (("0x1234567890123456789012345678901234567896", 5), 1)
, (("0x1234567890123456789012345678901234567897", 6), 1)
, (("0x1234567890123456789012345678901234567898", 7), 1)
]
marginRefundMap = Map.fromList
[ ( [(0, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
, ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, False), (5, False)]
, [("0x1234567890123456789012345678901234567896", 5, 1)]
)
, ( [(0, False), (5, False), (6, False)]
, [("0x1234567890123456789012345678901234567897", 6, 1)]
)
, ( [(0, False), (5, False), (6, True)]
, [("0x1234567890123456789012345678901234567898", 7, 1)]
)
, ( [(0, False), (5, True)]
, [ ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True)]
, [ ("0x1234567890123456789012345678901234567896", 5, 1)
, ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True), (1, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
]
)
, ( [(0, True), (1, False), (3, False)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
]
)
, ( [(0, True), (1, False), (3, True)]
, [("0x1234567890123456789012345678901234567895", 4, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, False)]
, [("0x1234567890123456789012345678901234567893", 2, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, True)]
, [("0x1234567890123456789012345678901234567894", 3, 1)]
)
, ( [(0, True), (1, True)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, True), (1, True), (2, False)]
, [("0x1234567890123456789012345678901234567891", 0, 1)]
)
, ( [(0, True), (1, True), (2, True)]
, [("0x1234567890123456789012345678901234567892", 1, 1)]
)
]
activateMapSimple :: Spec
activateMapSimple = do
it "activateMapSimple" $ do
activateMap (intermediateCompile (parse' src)) `shouldBe` activateMap'
where
src
= "both( if true within seconds(1) then scale(1, 1, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(7, 7, transfer(0xdddddddddddddddddddddddddddddddddddddddd,0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee,0xffffffffffffffffffffffffffffffffffffffff)), if true within seconds(2) then scale(17, 17, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else if true within seconds(3) then scale(53, 53, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(101, 101, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) )"
activateMap' =
(Map.fromList
[ (("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0), 102)
, (("0xdddddddddddddddddddddddddddddddddddddddd", 2), 7)
]
)
marginRefundMapSimple :: Spec
marginRefundMapSimple = do
it "marginRefundMapSimple" $ do
marginRefundMap (intermediateCompile (parse' src)) `shouldBe` refundMap
where
src
= "both( if true within seconds(1) then scale(1, 1, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(7, 7, transfer(0xdddddddddddddddddddddddddddddddddddddddd,0xeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee,0xffffffffffffffffffffffffffffffffffffffff)), if true within seconds(2) then scale(17, 17, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else if true within seconds(3) then scale(53, 53, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) else scale(101, 101, transfer(0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa,0xbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb,0xcccccccccccccccccccccccccccccccccccccccc)) )"
refundMap =
(Map.fromList
[ ([(0, False)], [("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0, 1)])
, ([(0, True)] , [("0xdddddddddddddddddddddddddddddddddddddddd", 2, 7)])
, ( [(1, False), (2, True)]
, [("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0, 48)]
)
, ([(1, True)], [("0xaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa", 0, 84)])
]
)
Test that the activateMap works for the canonical IW source code
activateMapCanonicalIw :: Spec
activateMapCanonicalIw = do
it "activateMapCanonicalIw" $ do
activateMap (intermediateCompile (parse' canonical_iw_source))
`shouldBe` activateMap'
where
activateMap' =
(Map.fromList
[ (("0x1234567890123456789012345678901234567891", 0), 1)
, (("0x1234567890123456789012345678901234567892", 1), 1)
, (("0x1234567890123456789012345678901234567893", 2), 1)
, (("0x1234567890123456789012345678901234567894", 3), 1)
, (("0x1234567890123456789012345678901234567895", 4), 1)
, (("0x1234567890123456789012345678901234567896", 5), 1)
, (("0x1234567890123456789012345678901234567897", 6), 1)
, (("0x1234567890123456789012345678901234567898", 7), 1)
]
)
refundMapCanonicalIw :: Spec
refundMapCanonicalIw = do
it "refundMapCanonicalIw" $ do
marginRefundMap (intermediateCompile (parse' canonical_iw_source))
`shouldBe` activateMap
where
activateMap =
(Map.fromList
[ ( [(0, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
, ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, False), (5, False)]
, [("0x1234567890123456789012345678901234567896", 5, 1)]
)
, ( [(0, False), (5, False), (6, False)]
, [("0x1234567890123456789012345678901234567897", 6, 1)]
)
, ( [(0, False), (5, False), (6, True)]
, [("0x1234567890123456789012345678901234567898", 7, 1)]
)
, ( [(0, False), (5, True)]
, [ ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True)]
, [ ("0x1234567890123456789012345678901234567896", 5, 1)
, ("0x1234567890123456789012345678901234567897", 6, 1)
, ("0x1234567890123456789012345678901234567898", 7, 1)
]
)
, ( [(0, True), (1, False)]
, [ ("0x1234567890123456789012345678901234567891", 0, 1)
, ("0x1234567890123456789012345678901234567892", 1, 1)
]
)
, ( [(0, True), (1, False), (3, False)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
]
)
, ( [(0, True), (1, False), (3, True)]
, [("0x1234567890123456789012345678901234567895", 4, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, False)]
, [("0x1234567890123456789012345678901234567893", 2, 1)]
)
, ( [(0, True), (1, False), (3, True), (4, True)]
, [("0x1234567890123456789012345678901234567894", 3, 1)]
)
, ( [(0, True), (1, True)]
, [ ("0x1234567890123456789012345678901234567893", 2, 1)
, ("0x1234567890123456789012345678901234567894", 3, 1)
, ("0x1234567890123456789012345678901234567895", 4, 1)
]
)
, ( [(0, True), (1, True), (2, False)]
, [("0x1234567890123456789012345678901234567891", 0, 1)]
)
, ( [(0, True), (1, True), (2, True)]
, [("0x1234567890123456789012345678901234567892", 1, 1)]
)
]
)
timeTranslationIMemExpTest :: Spec
timeTranslationIMemExpTest = do
it "translates time properly" $ do
intermediateCompile contract `shouldBe` intermediateContract
where
contract :: Contract
contract =
makeContract defaultAddressMap
$ "translate(minutes(2), if (obs(bool, O, 0)) within minutes(2) "
++ "then transfer(T, A, B) "
++ "else scale(2, 2, transfer(T, A, B)))"
intermediateContract :: IntermediateContract
intermediateContract =
IntermediateContract parties transfers memExps activateMap marginRefundMap
parties = [Bound oneAddr, Bound twoAddr]
transfers =
[ TransferCall { maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 120
, tokenAddress = tokAddr
, from = 0
, to = 1
, memExpPath = [(0, True)]
}
, TransferCall { maxAmount = 2
, amount = MultExp (Lit (IntVal 1)) (Lit (IntVal 2))
, delay = 120
, tokenAddress = tokAddr
, from = 0
, to = 1
, memExpPath = [(0, False)]
}
]
memExps = [IMemExp 120 240 0 (Lit (Observable OBool obsAddr "0"))]
activateMap = Map.fromList [((tokAddr, 0), 2)]
marginRefundMap = Map.fromList [([(0, True)], [(tokAddr, 0, 1)])]
zeroContractCodeTest :: Spec
zeroContractCodeTest = do
it "translates zero contracts into no TCs" $ do
intermediateCompile Zero `shouldBe` emptyContract
it "translates an if-within that contains a zero contract" $ do
intermediateCompile contract `shouldBe` intermediateContract
where
contract :: Contract
contract =
makeContract defaultAddressMap
$ "if obs(bool, O, 0) within seconds(10) "
++ "then transfer(T, A, B) else zero"
intermediateContract :: IntermediateContract
intermediateContract =
IntermediateContract parties transfers memExps activateMap marginRefundMap
parties = [Bound oneAddr, Bound twoAddr]
transfers =
[ TransferCall { maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = tokAddr
, from = 0
, to = 1
, memExpPath = [(0, True)]
}
]
memExps =
[ IMemExp { _IMemExpBegin = 0
, _IMemExpEnd = 10
, _IMemExpIdent = 0
, _IMemExp = Lit (Observable OBool obsAddr "0")
}
]
activateMap = Map.fromList [((tokAddr, 0), 1)]
marginRefundMap = Map.fromList [([(0, False)], [(tokAddr, 0, 1)])]
basicTransferTest :: Spec
basicTransferTest = do
it "compiles a basic transfer" $ do
intermediateCompile transfer `shouldBe` transferIC
where
transfer :: Contract
transfer = Transfer { tokenAddress_ = tokAddr
, to_ = Bound oneAddr
, from_ = Bound twoAddr
}
transferIC :: IntermediateContract
transferIC = IntermediateContract
[Bound twoAddr, Bound oneAddr]
[ TransferCall { maxAmount = 1
, amount = Lit (IntVal 1)
, delay = 0
, tokenAddress = tokAddr
, to = 1
, from = 0
, memExpPath = []
}
]
[]
(Map.fromList [((tokAddr, 0), 1)])
Map.empty
|
2592bca7964e5bcb31daf9f7c662fa3d99b522d2cc12279a4b6f505a77dcfd35 | kokuyouwind/haskell-exercises | Chapter5.hs | module Src.Chapter5
(
applyPair
, applyN
, squares
, fromBinary
, tails
, powerSet
, pointed1
, pointFree1
, pointed2
, pointFree2
, pointed3
, pointFree3
, pointed4
, pointFree4
, pointed5
, pointFree5
, church
, unchurch
, csucc
, cadd
, cmul
, cpred
, cTrue
, cFalse
, churchb
, unchurchb
, cnot
, cand
, cor
, cif
, cis0
, ceven
, cevennot0
, clte2
) where
( 1.1 )
applyPair :: (a -> b) -> (a, a) -> (b, b)
applyPair f (x,y) = undefined {- Rewrite HERE! -}
( 1.2 )
applyN :: (a -> a) -> Int -> a -> a
applyN f n = undefined {- Rewrite HERE! -}
( 1.3 )
squares :: Int -> [Int]
squares n = undefined {- Rewrite HERE! -}
( 2.1 )
fromBinary :: [Int] -> Int
fromBinary xs = undefined {- Rewrite HERE! -}
( 2.2 )
tails :: [a] -> [[a]]
tails xs = undefined {- Rewrite HERE! -}
( 2.3 )
powerSet :: [a] -> [[a]]
powerSet xs = undefined {- Rewrite HERE! -}
( 3.1 )
pointed1 :: [Int] -> [Int]
pointed1 xs = map negate (map (+10) (filter (>0) xs))
pointFree1 :: [Int] -> [Int]
pointFree1 = undefined {- Rewrite HERE! -}
( 3.2 )
pointed2 :: [[Int]] -> [Int]
pointed2 xss = scanl (+) 0 (map (foldl (*) 1) (filter (\xs -> length xs >= 2) xss))
pointFree2 :: [[Int]] -> [Int]
pointFree2 = undefined {- Rewrite HERE! -}
( 3.3 )
pointed3 :: [a -> a] -> a -> a
pointed3 fs x = foldl (\x f -> f x) x fs
pointFree3 :: [a -> a] -> a -> a
pointFree3 = undefined {- Rewrite HERE! -}
( 3.4 )
pointed4 :: (a -> [b]) -> [a] -> [b]
pointed4 f xs = concat (map f xs)
pointFree4 :: (a -> [b]) -> [a] -> [b]
pointFree4 = undefined {- Rewrite HERE! -}
( 3.5 )
pointed5 :: (Int -> [Int]) -> [Int] -> [Int]
pointed5 f xs = foldl (\ys g -> g ys) xs (replicate 3 (\zs -> concat (map f zs)))
pointFree5 :: (Int -> [Int]) -> [Int] -> [Int]
pointFree5 = undefined {- Rewrite HERE! -}
-- (4.1.1)
church n f z = undefined {- Rewrite HERE! -}
( 4.1.2 )
unchurch c = undefined {- Rewrite HERE! -}
-- (4.1.3)
csucc c f z = undefined {- Rewrite HERE! -}
( 4.1.4 )
cadd c1 c2 f z = undefined {- Rewrite HERE! -}
( 4.1.5 )
cmul c1 c2 f z = undefined {- Rewrite HERE! -}
-- (4.1.6)
cpred c f z = undefined {- Rewrite HERE! -}
( 4.2 pre - defined )
cTrue :: t -> t -> t
cTrue = \t f -> t
cFalse :: t -> t -> t
cFalse = \t f -> f
( 4.2.1 )
churchb b = undefined {- Rewrite HERE! -}
( 4.2.2 )
unchurchb cb = undefined {- Rewrite HERE! -}
( 4.2.3 )
cnot cb = undefined {- Rewrite HERE! -}
-- (4.2.4)
cand cb1 cb2 = undefined {- Rewrite HERE! -}
( 4.2.5 )
cor cb1 cb2 = undefined {- Rewrite HERE! -}
( 4.3 pre - defined )
cif :: (Bool -> Bool -> Bool) -> t -> t -> t
cif cb t f = if unchurchb cb then t else f
-- (4.3.1)
cis0 c = undefined {- Rewrite HERE! -}
( 4.3.2 )
ceven c = undefined {- Rewrite HERE! -}
-- (4.3.3)
cevennot0 c = undefined {- Rewrite HERE! -}
-- (4.3.4)
clte2 c = undefined {- Rewrite HERE! -}
| null | https://raw.githubusercontent.com/kokuyouwind/haskell-exercises/dbc68ba477ce525c75fe7c31301f2b632da3b08a/Src/Chapter5.hs | haskell | Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
(4.1.1)
Rewrite HERE!
Rewrite HERE!
(4.1.3)
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
(4.1.6)
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
Rewrite HERE!
(4.2.4)
Rewrite HERE!
Rewrite HERE!
(4.3.1)
Rewrite HERE!
Rewrite HERE!
(4.3.3)
Rewrite HERE!
(4.3.4)
Rewrite HERE! | module Src.Chapter5
(
applyPair
, applyN
, squares
, fromBinary
, tails
, powerSet
, pointed1
, pointFree1
, pointed2
, pointFree2
, pointed3
, pointFree3
, pointed4
, pointFree4
, pointed5
, pointFree5
, church
, unchurch
, csucc
, cadd
, cmul
, cpred
, cTrue
, cFalse
, churchb
, unchurchb
, cnot
, cand
, cor
, cif
, cis0
, ceven
, cevennot0
, clte2
) where
( 1.1 )
applyPair :: (a -> b) -> (a, a) -> (b, b)
( 1.2 )
applyN :: (a -> a) -> Int -> a -> a
( 1.3 )
squares :: Int -> [Int]
( 2.1 )
fromBinary :: [Int] -> Int
( 2.2 )
tails :: [a] -> [[a]]
( 2.3 )
powerSet :: [a] -> [[a]]
( 3.1 )
pointed1 :: [Int] -> [Int]
pointed1 xs = map negate (map (+10) (filter (>0) xs))
pointFree1 :: [Int] -> [Int]
( 3.2 )
pointed2 :: [[Int]] -> [Int]
pointed2 xss = scanl (+) 0 (map (foldl (*) 1) (filter (\xs -> length xs >= 2) xss))
pointFree2 :: [[Int]] -> [Int]
( 3.3 )
pointed3 :: [a -> a] -> a -> a
pointed3 fs x = foldl (\x f -> f x) x fs
pointFree3 :: [a -> a] -> a -> a
( 3.4 )
pointed4 :: (a -> [b]) -> [a] -> [b]
pointed4 f xs = concat (map f xs)
pointFree4 :: (a -> [b]) -> [a] -> [b]
( 3.5 )
pointed5 :: (Int -> [Int]) -> [Int] -> [Int]
pointed5 f xs = foldl (\ys g -> g ys) xs (replicate 3 (\zs -> concat (map f zs)))
pointFree5 :: (Int -> [Int]) -> [Int] -> [Int]
( 4.1.2 )
( 4.1.4 )
( 4.1.5 )
( 4.2 pre - defined )
cTrue :: t -> t -> t
cTrue = \t f -> t
cFalse :: t -> t -> t
cFalse = \t f -> f
( 4.2.1 )
( 4.2.2 )
( 4.2.3 )
( 4.2.5 )
( 4.3 pre - defined )
cif :: (Bool -> Bool -> Bool) -> t -> t -> t
cif cb t f = if unchurchb cb then t else f
( 4.3.2 )
|
502268dd49d74396fcd7ee2b07e8951435b8665b40f6ef6b664db3d97c4a832d | hopv/MoCHi | print.mli | open Syntax
type config =
{ty : bool; (** print types of arguments *)
as_ocaml : bool; (** print terms in OCaml syntax *)
for_dmochi : bool; (** print terms for dmochi when as_ocaml=true *)
top : bool; (** print let/type as in top-level *)
unused : bool; (** print unused arguments *)
depth : int} (** max depth of printing terms *)
val config_default : config ref
val set_as_ocaml : unit -> unit
val set_unused : unit -> unit
val set_depth : int -> unit
val location : Format.formatter -> Location.t -> unit
val typ : Format.formatter -> typ -> unit
val id : Format.formatter -> id -> unit
val id_typ : Format.formatter -> id -> unit
val pattern : Format.formatter -> pattern -> unit
val const : Format.formatter -> const -> unit
val desc : Format.formatter -> desc -> unit
val term : Format.formatter -> term -> unit
val term_top : Format.formatter -> term -> unit
val term' : Format.formatter -> term -> unit
val term_typ : Format.formatter -> term -> unit
val term_typ_top : Format.formatter -> term -> unit
val defs : Format.formatter -> (id * (id list * term)) list -> unit
val constr : Format.formatter -> term -> unit
val attr : Format.formatter -> attr list -> unit
val decls : Format.formatter -> declaration list -> unit
val as_ocaml : Format.formatter -> term -> unit
val as_ocaml_typ : Format.formatter -> term -> unit
val term_custom : config -> Format.formatter -> term -> unit
val int : Format.formatter -> int -> unit
val float : Format.formatter -> float -> unit
val char : Format.formatter -> char -> unit
val bool : Format.formatter -> bool -> unit
val string : Format.formatter -> string -> unit
val option : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit
val pair : (Format.formatter -> 'a -> unit) -> (Format.formatter -> 'b -> unit) -> Format.formatter -> ('a * 'b) -> unit
val ( * ) : (Format.formatter -> 'a -> unit) -> (Format.formatter -> 'b -> unit) -> Format.formatter -> ('a * 'b) -> unit
val triple : (Format.formatter -> 'a -> unit) -> (Format.formatter -> 'b -> unit) -> (Format.formatter -> 'c -> unit) -> Format.formatter -> ('a * 'b * 'c) -> unit
val list : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a list -> unit
val ignore : string -> Format.formatter -> 'a -> unit
val __ : Format.formatter -> 'a -> unit
val string_of_const : const -> string
val string_of_binop : binop -> string
val string_of_typ : typ -> string
| null | https://raw.githubusercontent.com/hopv/MoCHi/b0ac0d626d64b1e3c779d8e98cb232121cc3196a/src/print.mli | ocaml | * print types of arguments
* print terms in OCaml syntax
* print terms for dmochi when as_ocaml=true
* print let/type as in top-level
* print unused arguments
* max depth of printing terms | open Syntax
type config =
val config_default : config ref
val set_as_ocaml : unit -> unit
val set_unused : unit -> unit
val set_depth : int -> unit
val location : Format.formatter -> Location.t -> unit
val typ : Format.formatter -> typ -> unit
val id : Format.formatter -> id -> unit
val id_typ : Format.formatter -> id -> unit
val pattern : Format.formatter -> pattern -> unit
val const : Format.formatter -> const -> unit
val desc : Format.formatter -> desc -> unit
val term : Format.formatter -> term -> unit
val term_top : Format.formatter -> term -> unit
val term' : Format.formatter -> term -> unit
val term_typ : Format.formatter -> term -> unit
val term_typ_top : Format.formatter -> term -> unit
val defs : Format.formatter -> (id * (id list * term)) list -> unit
val constr : Format.formatter -> term -> unit
val attr : Format.formatter -> attr list -> unit
val decls : Format.formatter -> declaration list -> unit
val as_ocaml : Format.formatter -> term -> unit
val as_ocaml_typ : Format.formatter -> term -> unit
val term_custom : config -> Format.formatter -> term -> unit
val int : Format.formatter -> int -> unit
val float : Format.formatter -> float -> unit
val char : Format.formatter -> char -> unit
val bool : Format.formatter -> bool -> unit
val string : Format.formatter -> string -> unit
val option : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a option -> unit
val pair : (Format.formatter -> 'a -> unit) -> (Format.formatter -> 'b -> unit) -> Format.formatter -> ('a * 'b) -> unit
val ( * ) : (Format.formatter -> 'a -> unit) -> (Format.formatter -> 'b -> unit) -> Format.formatter -> ('a * 'b) -> unit
val triple : (Format.formatter -> 'a -> unit) -> (Format.formatter -> 'b -> unit) -> (Format.formatter -> 'c -> unit) -> Format.formatter -> ('a * 'b * 'c) -> unit
val list : (Format.formatter -> 'a -> unit) -> Format.formatter -> 'a list -> unit
val ignore : string -> Format.formatter -> 'a -> unit
val __ : Format.formatter -> 'a -> unit
val string_of_const : const -> string
val string_of_binop : binop -> string
val string_of_typ : typ -> string
|
ef4db668ea56d504a29d378114240e52629f4af9eeca6cd568dad98bd237191f | haskus/packages | View.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE BangPatterns #-}
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE GADTs #-}
# LANGUAGE DataKinds #
-- | A view (e.g. a slice) of a buffer
--
-- Suppose we have a big buffer B.
--
-- We can have buffer views on B, say vb1 and vb2.
--
-- B <----- vb1
-- ^------- vb2
--
-- These views don't duplicate B's contents and they keep B alive.
-- If the views are much smaller than B, it may not be what we want: a lot of
-- space is wasted and we would better duplicate B's data required by the views
-- and free B.
--
To support this , we can use " weak buffer views " , say wvb1 and wvb2 .
--
-- B <~~~~~ wvb1
^~~~~~~~ wvb2
--
-- If/when B is collected, new buffers are created from it for the views:
--
-- B1 <----- wvb1
B2 < ----- wvb2
--
-- We can also create "weak view views", say wvv1 and wvv2:
--
-- B <~~~~~ wvb1 <~~~~~ wvv1
-- ^~~~~~~~~ wvv2
--
-- If/when B is collected before wvb1, the sharing is kept while the required
-- contents of B is duplicated:
--
-- B' <---- wvb1 <~~~~~ wvv1
-- ^~~~~~~~~ wvv2
--
-- When wvb1 is collected, we can be in one of the following state depending if
-- B has been collected already or not:
--
-- B <~~~~~~~~~~~~~~~~~ wvv1
-- ^~~~~~~~~~~~~~~~~~~~ wvv2
--
-- B' <~~~~~ wvv1
-- ^~~~~~~~~ wvv2
--
module Haskus.Memory.View
( View (..)
, ViewSource (..)
, ViewPattern (..)
, viewReadWord8
, newBufferView
, newBufferWeakView
, newViewWeakView
, copyBufferWithPattern
, viewToBuffer
, showViewState
, patternSize
, unsafePatternSize
)
where
import Data.IORef
import System.Mem.Weak
import Control.Concurrent
import Haskus.Utils.Monad
import Haskus.Number.Word
import Haskus.Memory.Buffer
-- | The source of a view
--
Weak views are used so that the underlying buffer can be freed by the GC .
-- When it happens and if the view is still alive the contents of the buffer
-- used by the view is copied into a fresh (usually smaller) buffer.
--
-- Weak views can also be used as sources: in this case, when the source
view is GCed , the current view is updated to point to the source of the
-- source.
--
data ViewSource
= SourceBuffer Buffer
-- ^ The source is a buffer. The view keeps the buffer alive
| SourceWeakBuffer (Weak Buffer)
-- ^ The source is a weak buffer. If the buffer is collected, its contents
-- is copied in to a new buffer and the view is updated to use it.
| SourceWeakView (Weak ViewIORef)
-- ^ The source is a weak view. If the source view is collected, the
-- current view is updated to use whatever the source view uses as a
-- source (another view or a buffer).
-- This mechanism makes buffer contents cascade into smaller views while
-- preserving some sharing.
-- | A view on a buffer
newtype View = View ViewIORef
type ViewIORef = IORef (ViewSource,ViewPattern)
-- | A view pattern
data ViewPattern
= PatternFull -- ^ The whole buffer
^ 1D slice
^ Offset of the first cell
, pattern1DSize :: {-# UNPACK #-} !Word -- ^ Number of cells
}
| Pattern2D -- ^ 2D slice
^ Offset of the first line
, pattern2DWidth :: {-# UNPACK #-} !Word -- ^ Width (line size)
, pattern2DHeight :: {-# UNPACK #-} !Word -- ^ Height (number of lines)
^ Stride ( space between two lines )
}
| PatternOn ViewPattern ViewPattern -- ^ Composed pattern
deriving (Show)
-- | Compute an actual offset when used with the given pattern
patternOffset :: ViewPattern -> Word -> Word
patternOffset pat off = case pat of
PatternFull -> off
Pattern1D off2 _sz -> off2+off
Pattern2D off2 w _h stride -> let (y,x) = off `quotRem` w in off2+y*(w+stride)+x
PatternOn p1 p2 -> patternOffset p2 (patternOffset p1 off)
-- | Compute the effective size occupied by a pattern
unsafePatternSize :: ViewPattern -> Word
unsafePatternSize = \case
PatternFull -> error "Don't call unsafePatternSize on PatternFull"
Pattern1D _off sz -> sz
Pattern2D _off w h _stride -> w * h
PatternOn p1 _p2 -> unsafePatternSize p1
-- | Compute the effective size occupied by a pattern
patternSize :: ViewPattern -> Word -> Word
patternSize v bsz = case v of
PatternFull -> bsz
Pattern1D _off sz -> sz
Pattern2D _off w h _stride -> w * h
PatternOn p1 p2 -> patternSize p1 (patternSize p2 bsz)
| Combine two patterns
--
-- Remove trivial patterns combinations
patternApplyOn :: ViewPattern -> ViewPattern -> ViewPattern
patternApplyOn p1 p2 = case (p1, p2) of
(PatternFull,p) -> p
(p,PatternFull) -> p
(Pattern1D o1 s1, Pattern1D o2 _s2) -> Pattern1D (o1+o2) s1
_ -> PatternOn p1 p2
-- | Read a Word8 from a view
viewReadWord8 :: View -> Word -> IO Word8
viewReadWord8 view off =
withValidView view
(\b pat -> bufferReadWord8 b (patternOffset pat off))
(\b pat -> bufferReadWord8 b (patternOffset pat off))
(\v pat -> viewReadWord8 v (patternOffset pat off))
| Wait for a view to be valid then use one of the 3 passed functions on it
-- depending on its source type (Buffer, WeakBuffer, WeakView).
withValidView
:: MonadIO m
=> View
-> (Buffer -> ViewPattern -> m a)
-> (Buffer -> ViewPattern -> m a)
-> (View -> ViewPattern -> m a)
-> m a
withValidView (View ref) fb fwb fwv = go True
where
go _firstRun = do
(src,pat) <- liftIO (readIORef ref)
let waitForSource = do
-- the source is gone for now. Some thread must be copying back
-- to life so we give it some space to run with `yield` and then
-- we retry
liftIO yield
-- TODO: We execute the spin-lock in a thread to avoid locking
-- the finalizer thread
-- if firstRun
-- then forkIO (go False)
-- else go False
go False
case src of
SourceBuffer b -> fb b pat
SourceWeakBuffer wb -> liftIO (deRefWeak wb) >>= \case
Nothing -> waitForSource
Just b -> fwb b pat
SourceWeakView wv -> liftIO (deRefWeak wv) >>= \case
Nothing -> waitForSource
Just v2 -> fwv (View v2) pat
-- | Create a view on a buffer
newBufferView :: MonadIO m => Buffer -> ViewPattern -> m View
newBufferView b pat = View <$> liftIO (newIORef (SourceBuffer b,pat))
-- | Create a weak view on a buffer
--
The buffer is weakly referenced and can be GCed . When it happens , its
-- contents is stored into a new buffer.
--
-- You should only use this for views that are much smaller than the original
-- buffer so that the copying cost is balanced by the memory occupation
-- difference.
--
newBufferWeakView :: MonadIO m => Buffer -> ViewPattern -> m View
newBufferWeakView b pat = do
-- temporarily create a View that non-weakly references the buffer
v <- View <$> (liftIO $ newIORef (SourceBuffer b,pat))
-- assign the weak buffer source to the view
assignBufferWeakView v b pat
return v
assignBufferWeakView
:: MonadIO m
=> View
-> Buffer
-> ViewPattern
-> m ()
assignBufferWeakView (View ref) b pat = do
-- create a weak reference to the view
wViewRef <- liftIO $ mkWeakIORef ref (return ())
-- associate a finalizer to the buffer that will copy the duplicate the
-- buffer when it is collected
let finalizer = bufferWeakViewFinalier b pat wViewRef
wb <- liftIO (mkWeakPtr b (Just finalizer))
-- update the view to reference the weak buffer
liftIO (writeIORef ref (SourceWeakBuffer wb,pat))
bufferWeakViewFinalier
:: Buffer -- ^ Source buffer
-> ViewPattern -- ^ View pattern
-> Weak ViewIORef -- ^ Weak IORef of the view
-> IO ()
bufferWeakViewFinalier b pat wViewRef = deRefWeak wViewRef >>= \case
Nothing -> return () -- the view is dead
Just viewRef -> do
bsz <- bufferSize b
newSrc <- case pat of
-- this is stupid (the view covers the whole buffer) but let's resurrect b
PatternFull -> return (SourceBuffer b)
Pattern1D 0 psz | psz == bsz -> return (SourceBuffer b)
Pattern2D 0 w h 0 | w*h == bsz -> return (SourceBuffer b)
Pattern2D _ w h _ | w == 0 || h == 0 -> error "Invalid Pattern2D: width or height set to 0"
_ -> do
-- we allocate a new buffer and copy the contents in it
b' <- copyBufferWithPattern b pat
return (SourceBuffer b')
-- update the view IORef
writeIORef viewRef (newSrc,PatternFull)
-- | Create a weak view on a view
newViewWeakView :: MonadIO m => View -> ViewPattern -> m View
newViewWeakView src@(View srcRef) pat = do
create a new view . For now it only combines the two patterns
-- and uses the same source.
v <- liftIO $ do
(srcSrc,srcPat) <- readIORef srcRef
View <$> newIORef (srcSrc, pat `patternApplyOn` srcPat)
-- assign it the weak view source
assignViewWeakView v src pat
return v
assignViewWeakView :: MonadIO m => View -> View -> ViewPattern -> m ()
assignViewWeakView (View ref) (View srcRef) pat = do
-- create a weak reference on the current view (its IORef in fact)
weakView <- liftIO $ mkWeakIORef ref (return ())
-- create a finalizer for srcRef. We can reference srcRef directly but not
-- the current view which must be accessed through its weak reference
-- "weakView"
let finalizer = viewWeakViewFinalizer weakView srcRef pat
-- the finalizer is attached to the IORef of the source view
wSrcRef <- liftIO $ mkWeakIORef srcRef finalizer
-- we update the view
liftIO (writeIORef ref (SourceWeakView wSrcRef,pat))
-- we don't want the finalizer to run before we write the IORef
-- FIXME: liftIO (touch srcRef)
viewWeakViewFinalizer :: Weak ViewIORef -> ViewIORef -> ViewPattern -> IO ()
viewWeakViewFinalizer weakView srcRef pat = deRefWeak weakView >>= \case
Nothing -> return () -- the view is dead
Just viewRef -> do
let v = View viewRef
-- wait for the source to be valid and then handle it
withValidView (View srcRef)
(\srcB srcPat -> do
let newPat = pat `patternApplyOn` srcPat
assignBufferWeakView v srcB newPat
)
(\srcWB srcPat -> do
let newPat = pat `patternApplyOn` srcPat
assignBufferWeakView v srcWB newPat
)
(\srcV srcPat -> do
let newPat = pat `patternApplyOn` srcPat
assignViewWeakView v srcV newPat
)
-- | Allocate a new buffer initialized with the contents of the source buffer
-- according to the given pattern
copyBufferWithPattern :: Buffer -> ViewPattern -> IO Buffer
copyBufferWithPattern b pat = do
bsz <- bufferSize b
let !sz = patternSize pat bsz
b' <- newBuffer sz
case pat of
PatternFull -> error "Unreachable code"
Pattern1D poff psz -> bufferCopy b poff b' 0 psz
Pattern2D poff w h stride -> forM_ [0..h-1] $ \r ->
bufferCopy b (poff + r*(w+stride)) b' (r*w) w
PatternOn _p1 _p2 -> forM_ [0..sz-1] $ \off -> do
-- Not very efficient to copy byte by byte...
v <- bufferReadWord8 b (patternOffset pat off)
bufferWriteWord8 b' off v
return b'
-- | Convert a view into an actual buffer
viewToBuffer :: View -> IO Buffer
viewToBuffer = go PatternFull
where
go :: ViewPattern -> View -> IO Buffer
go pat v = withValidView v
(\b pat2 -> copyBufferWithPattern b (pat `patternApplyOn` pat2))
(\b pat2 -> copyBufferWithPattern b (pat `patternApplyOn` pat2))
(\v2 pat2 -> go (pat `patternApplyOn` pat2) v2)
-- | Display the state of a View
--
-- >>> :set -XOverloadedLists
> > > import System . Mem
> > > v < - newBufferWeakView ( [ 10,11,12,13,14,15,16,17 ] : : Buffer ) ( Pattern1D 2 4 )
> > > v2 < - newViewWeakView v ( Pattern1D 1 1 )
--
-- > putStr =<< showViewState v2
-- View source: weak view
Source size : 4
View pattern : Pattern1D { pattern1DOffset = 1 , pattern1DSize = 1 }
Wasted space : 75 %
-- Source:
-- View source: weak buffer
Source size : 8
View pattern : Pattern1D { pattern1DOffset = 2 , pattern1DSize = 4 }
Wasted space : 50 %
--
-- > performGC
-- > putStr =<< showViewState v2
-- View source: weak view
Source size : 4
View pattern : Pattern1D { pattern1DOffset = 1 , pattern1DSize = 1 }
Wasted space : 75 %
-- Source:
-- View source: buffer
Source size : 4
View pattern : PatternFull
Wasted space : 0 %
--
showViewState :: View -> IO String
showViewState = fmap fst . go
where
go v = withValidView v
(\b pat -> do
sz <- bufferSize b
let psz = patternSize pat sz
return (unlines
[ "View source: buffer"
, "Source size: " ++ show sz
, "View pattern: " ++ show pat
, "Wasted space: " ++ show (100 - ((psz * 100) `div` sz)) ++ "%"
], psz)
)
(\b pat -> do
sz <- bufferSize b
let psz = patternSize pat sz
return (unlines
[ "View source: weak buffer"
, "Source size: " ++ show sz
, "View pattern: " ++ show pat
, "Wasted space: " ++ show (100 - ((psz * 100) `div` sz)) ++ "%"
], psz)
)
(\v2 pat -> do
(r,sz) <- go v2
let psz = patternSize pat sz
return (unlines $
[ "View source: weak view"
, "Source size: " ++ show sz
, "View pattern: " ++ show pat
, "Wasted space: " ++ show (100 - ((psz * 100) `div` sz)) ++ "%"
, "Source:"
] ++ fmap (" " ++) (lines r), psz)
)
| null | https://raw.githubusercontent.com/haskus/packages/6d4a64dc26b55622af86b8b45a30a10f61d52e4d/haskus-binary/src/lib/Haskus/Memory/View.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE RankNTypes #
# LANGUAGE GADTs #
| A view (e.g. a slice) of a buffer
Suppose we have a big buffer B.
We can have buffer views on B, say vb1 and vb2.
B <----- vb1
^------- vb2
These views don't duplicate B's contents and they keep B alive.
If the views are much smaller than B, it may not be what we want: a lot of
space is wasted and we would better duplicate B's data required by the views
and free B.
B <~~~~~ wvb1
If/when B is collected, new buffers are created from it for the views:
B1 <----- wvb1
--- wvb2
We can also create "weak view views", say wvv1 and wvv2:
B <~~~~~ wvb1 <~~~~~ wvv1
^~~~~~~~~ wvv2
If/when B is collected before wvb1, the sharing is kept while the required
contents of B is duplicated:
B' <---- wvb1 <~~~~~ wvv1
^~~~~~~~~ wvv2
When wvb1 is collected, we can be in one of the following state depending if
B has been collected already or not:
B <~~~~~~~~~~~~~~~~~ wvv1
^~~~~~~~~~~~~~~~~~~~ wvv2
B' <~~~~~ wvv1
^~~~~~~~~ wvv2
| The source of a view
When it happens and if the view is still alive the contents of the buffer
used by the view is copied into a fresh (usually smaller) buffer.
Weak views can also be used as sources: in this case, when the source
source.
^ The source is a buffer. The view keeps the buffer alive
^ The source is a weak buffer. If the buffer is collected, its contents
is copied in to a new buffer and the view is updated to use it.
^ The source is a weak view. If the source view is collected, the
current view is updated to use whatever the source view uses as a
source (another view or a buffer).
This mechanism makes buffer contents cascade into smaller views while
preserving some sharing.
| A view on a buffer
| A view pattern
^ The whole buffer
# UNPACK #
^ Number of cells
^ 2D slice
# UNPACK #
^ Width (line size)
# UNPACK #
^ Height (number of lines)
^ Composed pattern
| Compute an actual offset when used with the given pattern
| Compute the effective size occupied by a pattern
| Compute the effective size occupied by a pattern
Remove trivial patterns combinations
| Read a Word8 from a view
depending on its source type (Buffer, WeakBuffer, WeakView).
the source is gone for now. Some thread must be copying back
to life so we give it some space to run with `yield` and then
we retry
TODO: We execute the spin-lock in a thread to avoid locking
the finalizer thread
if firstRun
then forkIO (go False)
else go False
| Create a view on a buffer
| Create a weak view on a buffer
contents is stored into a new buffer.
You should only use this for views that are much smaller than the original
buffer so that the copying cost is balanced by the memory occupation
difference.
temporarily create a View that non-weakly references the buffer
assign the weak buffer source to the view
create a weak reference to the view
associate a finalizer to the buffer that will copy the duplicate the
buffer when it is collected
update the view to reference the weak buffer
^ Source buffer
^ View pattern
^ Weak IORef of the view
the view is dead
this is stupid (the view covers the whole buffer) but let's resurrect b
we allocate a new buffer and copy the contents in it
update the view IORef
| Create a weak view on a view
and uses the same source.
assign it the weak view source
create a weak reference on the current view (its IORef in fact)
create a finalizer for srcRef. We can reference srcRef directly but not
the current view which must be accessed through its weak reference
"weakView"
the finalizer is attached to the IORef of the source view
we update the view
we don't want the finalizer to run before we write the IORef
FIXME: liftIO (touch srcRef)
the view is dead
wait for the source to be valid and then handle it
| Allocate a new buffer initialized with the contents of the source buffer
according to the given pattern
Not very efficient to copy byte by byte...
| Convert a view into an actual buffer
| Display the state of a View
>>> :set -XOverloadedLists
> putStr =<< showViewState v2
View source: weak view
Source:
View source: weak buffer
> performGC
> putStr =<< showViewState v2
View source: weak view
Source:
View source: buffer
| # LANGUAGE LambdaCase #
# LANGUAGE DataKinds #
To support this , we can use " weak buffer views " , say wvb1 and wvb2 .
^~~~~~~~ wvb2
module Haskus.Memory.View
( View (..)
, ViewSource (..)
, ViewPattern (..)
, viewReadWord8
, newBufferView
, newBufferWeakView
, newViewWeakView
, copyBufferWithPattern
, viewToBuffer
, showViewState
, patternSize
, unsafePatternSize
)
where
import Data.IORef
import System.Mem.Weak
import Control.Concurrent
import Haskus.Utils.Monad
import Haskus.Number.Word
import Haskus.Memory.Buffer
Weak views are used so that the underlying buffer can be freed by the GC .
view is GCed , the current view is updated to point to the source of the
data ViewSource
= SourceBuffer Buffer
| SourceWeakBuffer (Weak Buffer)
| SourceWeakView (Weak ViewIORef)
newtype View = View ViewIORef
type ViewIORef = IORef (ViewSource,ViewPattern)
data ViewPattern
^ 1D slice
^ Offset of the first cell
}
^ Offset of the first line
^ Stride ( space between two lines )
}
deriving (Show)
patternOffset :: ViewPattern -> Word -> Word
patternOffset pat off = case pat of
PatternFull -> off
Pattern1D off2 _sz -> off2+off
Pattern2D off2 w _h stride -> let (y,x) = off `quotRem` w in off2+y*(w+stride)+x
PatternOn p1 p2 -> patternOffset p2 (patternOffset p1 off)
unsafePatternSize :: ViewPattern -> Word
unsafePatternSize = \case
PatternFull -> error "Don't call unsafePatternSize on PatternFull"
Pattern1D _off sz -> sz
Pattern2D _off w h _stride -> w * h
PatternOn p1 _p2 -> unsafePatternSize p1
patternSize :: ViewPattern -> Word -> Word
patternSize v bsz = case v of
PatternFull -> bsz
Pattern1D _off sz -> sz
Pattern2D _off w h _stride -> w * h
PatternOn p1 p2 -> patternSize p1 (patternSize p2 bsz)
| Combine two patterns
patternApplyOn :: ViewPattern -> ViewPattern -> ViewPattern
patternApplyOn p1 p2 = case (p1, p2) of
(PatternFull,p) -> p
(p,PatternFull) -> p
(Pattern1D o1 s1, Pattern1D o2 _s2) -> Pattern1D (o1+o2) s1
_ -> PatternOn p1 p2
viewReadWord8 :: View -> Word -> IO Word8
viewReadWord8 view off =
withValidView view
(\b pat -> bufferReadWord8 b (patternOffset pat off))
(\b pat -> bufferReadWord8 b (patternOffset pat off))
(\v pat -> viewReadWord8 v (patternOffset pat off))
| Wait for a view to be valid then use one of the 3 passed functions on it
withValidView
:: MonadIO m
=> View
-> (Buffer -> ViewPattern -> m a)
-> (Buffer -> ViewPattern -> m a)
-> (View -> ViewPattern -> m a)
-> m a
withValidView (View ref) fb fwb fwv = go True
where
go _firstRun = do
(src,pat) <- liftIO (readIORef ref)
let waitForSource = do
liftIO yield
go False
case src of
SourceBuffer b -> fb b pat
SourceWeakBuffer wb -> liftIO (deRefWeak wb) >>= \case
Nothing -> waitForSource
Just b -> fwb b pat
SourceWeakView wv -> liftIO (deRefWeak wv) >>= \case
Nothing -> waitForSource
Just v2 -> fwv (View v2) pat
newBufferView :: MonadIO m => Buffer -> ViewPattern -> m View
newBufferView b pat = View <$> liftIO (newIORef (SourceBuffer b,pat))
The buffer is weakly referenced and can be GCed . When it happens , its
newBufferWeakView :: MonadIO m => Buffer -> ViewPattern -> m View
newBufferWeakView b pat = do
v <- View <$> (liftIO $ newIORef (SourceBuffer b,pat))
assignBufferWeakView v b pat
return v
assignBufferWeakView
:: MonadIO m
=> View
-> Buffer
-> ViewPattern
-> m ()
assignBufferWeakView (View ref) b pat = do
wViewRef <- liftIO $ mkWeakIORef ref (return ())
let finalizer = bufferWeakViewFinalier b pat wViewRef
wb <- liftIO (mkWeakPtr b (Just finalizer))
liftIO (writeIORef ref (SourceWeakBuffer wb,pat))
bufferWeakViewFinalier
-> IO ()
bufferWeakViewFinalier b pat wViewRef = deRefWeak wViewRef >>= \case
Just viewRef -> do
bsz <- bufferSize b
newSrc <- case pat of
PatternFull -> return (SourceBuffer b)
Pattern1D 0 psz | psz == bsz -> return (SourceBuffer b)
Pattern2D 0 w h 0 | w*h == bsz -> return (SourceBuffer b)
Pattern2D _ w h _ | w == 0 || h == 0 -> error "Invalid Pattern2D: width or height set to 0"
_ -> do
b' <- copyBufferWithPattern b pat
return (SourceBuffer b')
writeIORef viewRef (newSrc,PatternFull)
newViewWeakView :: MonadIO m => View -> ViewPattern -> m View
newViewWeakView src@(View srcRef) pat = do
create a new view . For now it only combines the two patterns
v <- liftIO $ do
(srcSrc,srcPat) <- readIORef srcRef
View <$> newIORef (srcSrc, pat `patternApplyOn` srcPat)
assignViewWeakView v src pat
return v
assignViewWeakView :: MonadIO m => View -> View -> ViewPattern -> m ()
assignViewWeakView (View ref) (View srcRef) pat = do
weakView <- liftIO $ mkWeakIORef ref (return ())
let finalizer = viewWeakViewFinalizer weakView srcRef pat
wSrcRef <- liftIO $ mkWeakIORef srcRef finalizer
liftIO (writeIORef ref (SourceWeakView wSrcRef,pat))
viewWeakViewFinalizer :: Weak ViewIORef -> ViewIORef -> ViewPattern -> IO ()
viewWeakViewFinalizer weakView srcRef pat = deRefWeak weakView >>= \case
Just viewRef -> do
let v = View viewRef
withValidView (View srcRef)
(\srcB srcPat -> do
let newPat = pat `patternApplyOn` srcPat
assignBufferWeakView v srcB newPat
)
(\srcWB srcPat -> do
let newPat = pat `patternApplyOn` srcPat
assignBufferWeakView v srcWB newPat
)
(\srcV srcPat -> do
let newPat = pat `patternApplyOn` srcPat
assignViewWeakView v srcV newPat
)
copyBufferWithPattern :: Buffer -> ViewPattern -> IO Buffer
copyBufferWithPattern b pat = do
bsz <- bufferSize b
let !sz = patternSize pat bsz
b' <- newBuffer sz
case pat of
PatternFull -> error "Unreachable code"
Pattern1D poff psz -> bufferCopy b poff b' 0 psz
Pattern2D poff w h stride -> forM_ [0..h-1] $ \r ->
bufferCopy b (poff + r*(w+stride)) b' (r*w) w
PatternOn _p1 _p2 -> forM_ [0..sz-1] $ \off -> do
v <- bufferReadWord8 b (patternOffset pat off)
bufferWriteWord8 b' off v
return b'
viewToBuffer :: View -> IO Buffer
viewToBuffer = go PatternFull
where
go :: ViewPattern -> View -> IO Buffer
go pat v = withValidView v
(\b pat2 -> copyBufferWithPattern b (pat `patternApplyOn` pat2))
(\b pat2 -> copyBufferWithPattern b (pat `patternApplyOn` pat2))
(\v2 pat2 -> go (pat `patternApplyOn` pat2) v2)
> > > import System . Mem
> > > v < - newBufferWeakView ( [ 10,11,12,13,14,15,16,17 ] : : Buffer ) ( Pattern1D 2 4 )
> > > v2 < - newViewWeakView v ( Pattern1D 1 1 )
Source size : 4
View pattern : Pattern1D { pattern1DOffset = 1 , pattern1DSize = 1 }
Wasted space : 75 %
Source size : 8
View pattern : Pattern1D { pattern1DOffset = 2 , pattern1DSize = 4 }
Wasted space : 50 %
Source size : 4
View pattern : Pattern1D { pattern1DOffset = 1 , pattern1DSize = 1 }
Wasted space : 75 %
Source size : 4
View pattern : PatternFull
Wasted space : 0 %
showViewState :: View -> IO String
showViewState = fmap fst . go
where
go v = withValidView v
(\b pat -> do
sz <- bufferSize b
let psz = patternSize pat sz
return (unlines
[ "View source: buffer"
, "Source size: " ++ show sz
, "View pattern: " ++ show pat
, "Wasted space: " ++ show (100 - ((psz * 100) `div` sz)) ++ "%"
], psz)
)
(\b pat -> do
sz <- bufferSize b
let psz = patternSize pat sz
return (unlines
[ "View source: weak buffer"
, "Source size: " ++ show sz
, "View pattern: " ++ show pat
, "Wasted space: " ++ show (100 - ((psz * 100) `div` sz)) ++ "%"
], psz)
)
(\v2 pat -> do
(r,sz) <- go v2
let psz = patternSize pat sz
return (unlines $
[ "View source: weak view"
, "Source size: " ++ show sz
, "View pattern: " ++ show pat
, "Wasted space: " ++ show (100 - ((psz * 100) `div` sz)) ++ "%"
, "Source:"
] ++ fmap (" " ++) (lines r), psz)
)
|
0a5b63ad30a077c30bd31e0723ff4166e1be0d8c5b33e42a2c7a81558fc1c32a | cedlemo/OCaml-GI-ctypes-bindings-generator | Link_button.ml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_link_button_new" (string @-> returning (ptr Widget.t_typ))
let create_with_label =
foreign "gtk_link_button_new_with_label" (string @-> string_opt @-> returning (ptr Widget.t_typ))
let get_uri =
foreign "gtk_link_button_get_uri" (t_typ @-> returning (string_opt))
let get_visited =
foreign "gtk_link_button_get_visited" (t_typ @-> returning (bool))
let set_uri =
foreign "gtk_link_button_set_uri" (t_typ @-> string @-> returning (void))
let set_visited =
foreign "gtk_link_button_set_visited" (t_typ @-> bool @-> returning (void))
| null | https://raw.githubusercontent.com/cedlemo/OCaml-GI-ctypes-bindings-generator/21a4d449f9dbd6785131979b91aa76877bad2615/tools/Gtk3/Link_button.ml | ocaml | open Ctypes
open Foreign
type t = unit ptr
let t_typ : t typ = ptr void
let create =
foreign "gtk_link_button_new" (string @-> returning (ptr Widget.t_typ))
let create_with_label =
foreign "gtk_link_button_new_with_label" (string @-> string_opt @-> returning (ptr Widget.t_typ))
let get_uri =
foreign "gtk_link_button_get_uri" (t_typ @-> returning (string_opt))
let get_visited =
foreign "gtk_link_button_get_visited" (t_typ @-> returning (bool))
let set_uri =
foreign "gtk_link_button_set_uri" (t_typ @-> string @-> returning (void))
let set_visited =
foreign "gtk_link_button_set_visited" (t_typ @-> bool @-> returning (void))
| |
ee464357619182f573aabd97637ac805a12b3bdf99a1865d36976b1a22ec88f0 | vvvvalvalval/datascript-declarative-model-example | ds_model.clj | (ns twitteur.ds-model
(:require [datascript.core :as dt]))
;;;; Model meta-data
These 2 values are DataScript Transaction Requests , i.e data structures defining writes to a DataScript database
NOTE in a real - world codebase , these 2 would typically live in different files .
(def user-model
[{:twitteur.entity-type/name :twitteur/User
:twitteur.schema/doc "a User is a person who has signed up to Twitteur."
:twitteur.entity-type/attributes
[{:twitteur.attribute/name :user/id
:twitteur.schema/doc "The unique ID of this user."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :uuid
:twitteur.attribute/unique-identity true}
{:twitteur.attribute/name :user/email
:twitteur.schema/doc "The email address of this user (not visible to other users)."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :string
:twitteur.attribute.security/private? true} ;; here's a domain-specific security rule
{:twitteur.attribute/name :user/name
:twitteur.schema/doc "The public name of this user on Twitteur."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :string}
{:twitteur.attribute/name :user/follows
:twitteur.schema/doc "The Twitteur users whom this user follows."
:twitteur.attribute/ref-typed? true ;; this attribute is a reference-typed
:twitteur.attribute.ref-typed/many? true
:twitteur.attribute.ref-typed/type {:twitteur.entity-type/name :twitteur/User}}
{:twitteur.attribute/name :user/n_followers
:twitteur.schema/doc "How many users follow this user."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.ref-typed/many? true
:twitteur.attribute.scalar/type :long
:twitteur.attribute/derived? true} ;; this attribute is not stored in DB
{:twitteur.attribute/name :user/tweets
:twitteur.schema/doc "The tweets posted by this user."
:twitteur.attribute/ref-typed? true
:twitteur.attribute.ref-typed/many? true
:twitteur.attribute.ref-typed/type {:twitteur.entity-type/name :twitteur/Tweet}
:twitteur.attribute/derived? true}
]}])
(def tweet-model
NOTE : to demonstrate the flexibility of DataScript , we choose a different but equivalent data layout
;; in this one, we define the Entity Type and the Attributes separately
[;; Entity Type
{:twitteur.entity-type/name :twitteur/Tweet
:twitteur.schema/doc "a Tweet is a short message posted by a User on Twitteur, published to all her Followers."
:twitteur.entity-type/attributes
[{:twitteur.attribute/name :tweet/id}
{:twitteur.attribute/name :tweet/content}
{:twitteur.attribute/name :tweet/author}
{:twitteur.attribute/name :tweet/time}]}
;; Attributes
{:twitteur.attribute/name :tweet/id
:twitteur.schema/doc "The unique ID of this Tweet"
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :uuid
:twitteur.attribute/unique-identity true}
{:twitteur.attribute/name :tweet/content
:twitteur.schema/doc "The textual message of this Tweet"
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :string}
{:twitteur.attribute/name :tweet/author
:twitteur.schema/doc "The Twitteur user who wrote this Tweet."
:twitteur.attribute/ref-typed? true
:twitteur.attribute.ref-typed/many? false
:twitteur.attribute.ref-typed/type {:twitteur.entity-type/name :twitteur/User}}
{:twitteur.attribute/name :tweet/time
:twitteur.schema/doc "The time at which this Tweet was published, as a timestamp."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :long}])
;;;; Writing this metadata to a DataScript db
(require '[datascript.core :as dt])
(def meta-schema
{:twitteur.entity-type/name {:db/unique :db.unique/identity}
:twitteur.entity-type/attributes {:db/valueType :db.type/ref
:db/cardinality :db.cardinality/many}
:twitteur.attribute/name {:db/unique :db.unique/identity}
:twitteur.attribute.ref-typed/type {:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}})
(defn empty-model-db
[]
(let [conn (dt/create-conn meta-schema)]
(dt/db conn)))
(def model-db
"A DataScript database value, holding a representation of our Domain Model."
(dt/db-with
(empty-model-db)
;; Composing DataScript transactions is as simple as that: concat
(concat
user-model
tweet-model)))
;;;; Let's query this a bit
(comment
;; What are all the attributes names in our Domain Model ?
(sort
(dt/q
'[:find [?attrName ...] :where
[?attr :twitteur.attribute/name ?attrName]]
model-db))
=> (:tweet/author :tweet/content :tweet/id :tweet/time :user/email :user/follows :user/id :user/n_followers :user/name)
;; What do we know about :tweet/author?
(def tweet-author-attr
(dt/entity model-db [:twitteur.attribute/name :tweet/author]))
tweet-author-attr
=> {:db/id 10}
(dt/touch tweet-author-attr)
=>
{:twitteur.schema/doc "The Twitteur user who wrote this Tweet.",
:twitteur.attribute/name :tweet/author,
:twitteur.attribute/ref-typed? true,
:twitteur.attribute.ref-typed/many? false,
:twitteur.attribute.ref-typed/type {:db/id 1},
:db/id 10}
(-> tweet-author-attr :twitteur.attribute.ref-typed/type dt/touch)
=>
{:twitteur.schema/doc "a User is a person who has signed up to Twitteur.",
:twitteur.entity-type/attributes #{{:db/id 4} {:db/id 6} {:db/id 3} {:db/id 2} {:db/id 5}},
:twitteur.entity-type/name :twitteur/User,
:db/id 1}
;; What attributes have type :twitteur/User?
(dt/q '[:find ?attrName ?to-many? :in $ ?type :where
[?attr :twitteur.attribute.ref-typed/type ?type]
[?attr :twitteur.attribute/name ?attrName]
[?attr :twitteur.attribute.ref-typed/many? ?to-many?]]
model-db [:twitteur.entity-type/name :twitteur/User])
=> #{[:tweet/author false] [:user/follows true]}
;; What attributes are derived, and therefore should not be stored in the database?
(->>
(dt/q '[:find [?attr ...] :where
[?attr :twitteur.attribute/derived? true]]
model-db)
(map #(dt/entity model-db %))
(sort-by :twitteur.attribute/name)
(mapv dt/touch))
=>
[{:twitteur.schema/doc "The tweets posted by this user.",
:twitteur.attribute/derived? true,
:twitteur.attribute/name :user/follows,
:twitteur.attribute/ref-typed? true,
:twitteur.attribute.ref-typed/many? true,
:twitteur.attribute.ref-typed/type {:db/id 7},
:db/id 5}
{:twitteur.schema/doc "How many users follow this user.",
:twitteur.attribute/derived? true,
:twitteur.attribute/name :user/n_followers,
:twitteur.attribute/ref-typed? false,
:twitteur.attribute.ref-typed/many? true,
:twitteur.attribute.scalar/type :long,
:db/id 6}]
;; What attributes are private, and therefore should not be exposed publicly?
(set
(dt/q '[:find [?attrName ...] :where
[?attr :twitteur.attribute.security/private? true]
[?attr :twitteur.attribute/name ?attrName]]
model-db))
=> #{:user/email}
)
;;;; Let's make our schema code more readable,
;;;; by using some concision helpers
(require '[twitteur.utils.model.dml :as dml])
(def user-model
[(dml/entity-type :twitteur/User
"a User is a person who has signed up to Twitteur."
{:twitteur.entity-type/attributes
[(dml/scalar :user/id :uuid (dml/unique-id) "The unique ID of this user.")
(dml/scalar :user/email :string (dml/private) "The email address of this user (not visible to other users).")
(dml/scalar :user/name :string "The public name of this user on Twitteur.")
(dml/to-many :user/follows :twitteur/User "The Twitteur users whom this user follows.")
(dml/scalar :user/n_followers :long (dml/derived) "How many users follow this user.")
(dml/to-many :user/tweets :twitteur/Tweet (dml/derived) "The tweets posted by this user.")
]})])
(def tweet-model
[(dml/entity-type :twitteur/Tweet
"a Tweet is a short message posted by a User on Twitteur, published to all her Followers."
{:twitteur.entity-type/attributes
[(dml/scalar :tweet/id :uuid "The unique ID of this Tweet" (dml/unique-id))
(dml/scalar :tweet/content :string "The textual message of this Tweet")
(dml/to-one :tweet/author :twitteur/User "The Twitteur user who wrote this Tweet.")
(dml/scalar :tweet/time :long "The time at which this Tweet was published, as a timestamp.")
]})])
;; Note that there's no macro magic above: user-model and tweet-model are still plain data structures,
;; we just use the dml/... functions to assemble them in a more readable way.
;; In particular, you can evaluate any sub-expression above in the REPL and see exactly
;; how it translates to a data structure. | null | https://raw.githubusercontent.com/vvvvalvalval/datascript-declarative-model-example/9028bb9ff0ccb8cd40ab0a5ab46530f5b8e823c3/src/twitteur/ds_model.clj | clojure | Model meta-data
here's a domain-specific security rule
this attribute is a reference-typed
this attribute is not stored in DB
in this one, we define the Entity Type and the Attributes separately
Entity Type
Attributes
Writing this metadata to a DataScript db
Composing DataScript transactions is as simple as that: concat
Let's query this a bit
What are all the attributes names in our Domain Model ?
What do we know about :tweet/author?
What attributes have type :twitteur/User?
What attributes are derived, and therefore should not be stored in the database?
What attributes are private, and therefore should not be exposed publicly?
Let's make our schema code more readable,
by using some concision helpers
Note that there's no macro magic above: user-model and tweet-model are still plain data structures,
we just use the dml/... functions to assemble them in a more readable way.
In particular, you can evaluate any sub-expression above in the REPL and see exactly
how it translates to a data structure. | (ns twitteur.ds-model
(:require [datascript.core :as dt]))
These 2 values are DataScript Transaction Requests , i.e data structures defining writes to a DataScript database
NOTE in a real - world codebase , these 2 would typically live in different files .
(def user-model
[{:twitteur.entity-type/name :twitteur/User
:twitteur.schema/doc "a User is a person who has signed up to Twitteur."
:twitteur.entity-type/attributes
[{:twitteur.attribute/name :user/id
:twitteur.schema/doc "The unique ID of this user."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :uuid
:twitteur.attribute/unique-identity true}
{:twitteur.attribute/name :user/email
:twitteur.schema/doc "The email address of this user (not visible to other users)."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :string
{:twitteur.attribute/name :user/name
:twitteur.schema/doc "The public name of this user on Twitteur."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :string}
{:twitteur.attribute/name :user/follows
:twitteur.schema/doc "The Twitteur users whom this user follows."
:twitteur.attribute.ref-typed/many? true
:twitteur.attribute.ref-typed/type {:twitteur.entity-type/name :twitteur/User}}
{:twitteur.attribute/name :user/n_followers
:twitteur.schema/doc "How many users follow this user."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.ref-typed/many? true
:twitteur.attribute.scalar/type :long
{:twitteur.attribute/name :user/tweets
:twitteur.schema/doc "The tweets posted by this user."
:twitteur.attribute/ref-typed? true
:twitteur.attribute.ref-typed/many? true
:twitteur.attribute.ref-typed/type {:twitteur.entity-type/name :twitteur/Tweet}
:twitteur.attribute/derived? true}
]}])
(def tweet-model
NOTE : to demonstrate the flexibility of DataScript , we choose a different but equivalent data layout
{:twitteur.entity-type/name :twitteur/Tweet
:twitteur.schema/doc "a Tweet is a short message posted by a User on Twitteur, published to all her Followers."
:twitteur.entity-type/attributes
[{:twitteur.attribute/name :tweet/id}
{:twitteur.attribute/name :tweet/content}
{:twitteur.attribute/name :tweet/author}
{:twitteur.attribute/name :tweet/time}]}
{:twitteur.attribute/name :tweet/id
:twitteur.schema/doc "The unique ID of this Tweet"
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :uuid
:twitteur.attribute/unique-identity true}
{:twitteur.attribute/name :tweet/content
:twitteur.schema/doc "The textual message of this Tweet"
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :string}
{:twitteur.attribute/name :tweet/author
:twitteur.schema/doc "The Twitteur user who wrote this Tweet."
:twitteur.attribute/ref-typed? true
:twitteur.attribute.ref-typed/many? false
:twitteur.attribute.ref-typed/type {:twitteur.entity-type/name :twitteur/User}}
{:twitteur.attribute/name :tweet/time
:twitteur.schema/doc "The time at which this Tweet was published, as a timestamp."
:twitteur.attribute/ref-typed? false
:twitteur.attribute.scalar/type :long}])
(require '[datascript.core :as dt])
(def meta-schema
{:twitteur.entity-type/name {:db/unique :db.unique/identity}
:twitteur.entity-type/attributes {:db/valueType :db.type/ref
:db/cardinality :db.cardinality/many}
:twitteur.attribute/name {:db/unique :db.unique/identity}
:twitteur.attribute.ref-typed/type {:db/valueType :db.type/ref
:db/cardinality :db.cardinality/one}})
(defn empty-model-db
[]
(let [conn (dt/create-conn meta-schema)]
(dt/db conn)))
(def model-db
"A DataScript database value, holding a representation of our Domain Model."
(dt/db-with
(empty-model-db)
(concat
user-model
tweet-model)))
(comment
(sort
(dt/q
'[:find [?attrName ...] :where
[?attr :twitteur.attribute/name ?attrName]]
model-db))
=> (:tweet/author :tweet/content :tweet/id :tweet/time :user/email :user/follows :user/id :user/n_followers :user/name)
(def tweet-author-attr
(dt/entity model-db [:twitteur.attribute/name :tweet/author]))
tweet-author-attr
=> {:db/id 10}
(dt/touch tweet-author-attr)
=>
{:twitteur.schema/doc "The Twitteur user who wrote this Tweet.",
:twitteur.attribute/name :tweet/author,
:twitteur.attribute/ref-typed? true,
:twitteur.attribute.ref-typed/many? false,
:twitteur.attribute.ref-typed/type {:db/id 1},
:db/id 10}
(-> tweet-author-attr :twitteur.attribute.ref-typed/type dt/touch)
=>
{:twitteur.schema/doc "a User is a person who has signed up to Twitteur.",
:twitteur.entity-type/attributes #{{:db/id 4} {:db/id 6} {:db/id 3} {:db/id 2} {:db/id 5}},
:twitteur.entity-type/name :twitteur/User,
:db/id 1}
(dt/q '[:find ?attrName ?to-many? :in $ ?type :where
[?attr :twitteur.attribute.ref-typed/type ?type]
[?attr :twitteur.attribute/name ?attrName]
[?attr :twitteur.attribute.ref-typed/many? ?to-many?]]
model-db [:twitteur.entity-type/name :twitteur/User])
=> #{[:tweet/author false] [:user/follows true]}
(->>
(dt/q '[:find [?attr ...] :where
[?attr :twitteur.attribute/derived? true]]
model-db)
(map #(dt/entity model-db %))
(sort-by :twitteur.attribute/name)
(mapv dt/touch))
=>
[{:twitteur.schema/doc "The tweets posted by this user.",
:twitteur.attribute/derived? true,
:twitteur.attribute/name :user/follows,
:twitteur.attribute/ref-typed? true,
:twitteur.attribute.ref-typed/many? true,
:twitteur.attribute.ref-typed/type {:db/id 7},
:db/id 5}
{:twitteur.schema/doc "How many users follow this user.",
:twitteur.attribute/derived? true,
:twitteur.attribute/name :user/n_followers,
:twitteur.attribute/ref-typed? false,
:twitteur.attribute.ref-typed/many? true,
:twitteur.attribute.scalar/type :long,
:db/id 6}]
(set
(dt/q '[:find [?attrName ...] :where
[?attr :twitteur.attribute.security/private? true]
[?attr :twitteur.attribute/name ?attrName]]
model-db))
=> #{:user/email}
)
(require '[twitteur.utils.model.dml :as dml])
(def user-model
[(dml/entity-type :twitteur/User
"a User is a person who has signed up to Twitteur."
{:twitteur.entity-type/attributes
[(dml/scalar :user/id :uuid (dml/unique-id) "The unique ID of this user.")
(dml/scalar :user/email :string (dml/private) "The email address of this user (not visible to other users).")
(dml/scalar :user/name :string "The public name of this user on Twitteur.")
(dml/to-many :user/follows :twitteur/User "The Twitteur users whom this user follows.")
(dml/scalar :user/n_followers :long (dml/derived) "How many users follow this user.")
(dml/to-many :user/tweets :twitteur/Tweet (dml/derived) "The tweets posted by this user.")
]})])
(def tweet-model
[(dml/entity-type :twitteur/Tweet
"a Tweet is a short message posted by a User on Twitteur, published to all her Followers."
{:twitteur.entity-type/attributes
[(dml/scalar :tweet/id :uuid "The unique ID of this Tweet" (dml/unique-id))
(dml/scalar :tweet/content :string "The textual message of this Tweet")
(dml/to-one :tweet/author :twitteur/User "The Twitteur user who wrote this Tweet.")
(dml/scalar :tweet/time :long "The time at which this Tweet was published, as a timestamp.")
]})])
|
44f6438b7460def705754612dc437cdb8e7d69081868ae1a9af37e022562adc0 | deepakjois/hs-logo | Logo.hs | {-# LANGUAGE DeriveDataTypeable #-}
module Main where
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
import Diagrams.TwoD.Path.Turtle
import Logo.Types
import Logo.TokenParser
import Logo.Builtins
import Logo.Evaluator
import System.Environment (getProgName, withArgs)
import System.Console.CmdArgs.Implicit
import qualified Data.Map as M
data LogoOpts = LogoOpts
{ output :: String -- output file to write to
, src :: Maybe String -- source file to read from
} deriving (Show, Data, Typeable)
logoOpts :: String -> LogoOpts
logoOpts prog = LogoOpts
{ output = "logo.svg"
&= typFile
&= help "Output image file (default=logo.svg)"
, src = def
&= typFile
&= args
}
&= summary "hs-logo Logo Interpreter v0.1"
&= program prog
main :: IO ()
main = do
prog <- getProgName
opts <- cmdArgs (logoOpts prog)
case src opts of
Nothing -> error "Source file not specified"
Just s -> renderLogo s (output opts)
renderLogo :: String -> String -> IO ()
renderLogo s o = do
tokens <- readSource s
diag <- drawTurtleT (evaluateSourceTokens tokens)
withArgs ["-o", o, "-w", "400", "-h", "400"] $ defaultMain (diag # lwG (0.005 * width diag) # centerXY # pad 1.1)
readSource :: FilePath -> IO [LogoToken]
readSource f = do
tokens <- tokenize f <$> readFile f
case tokens of
Left x -> error $ show x
Right t -> return t
evaluateSourceTokens :: [LogoToken] -> TurtleIO ()
evaluateSourceTokens tokens = do
let initialContext = LogoContext builtins M.empty M.empty
res <- evaluateWithContext tokens initialContext
case res of
Left err -> error $ show err
Right _ -> return ()
| null | https://raw.githubusercontent.com/deepakjois/hs-logo/888a65bfc40de463bd8dd9fe28aac729a3b803bd/src/Logo.hs | haskell | # LANGUAGE DeriveDataTypeable #
output file to write to
source file to read from | module Main where
import Diagrams.Prelude
import Diagrams.Backend.SVG.CmdLine
import Diagrams.TwoD.Path.Turtle
import Logo.Types
import Logo.TokenParser
import Logo.Builtins
import Logo.Evaluator
import System.Environment (getProgName, withArgs)
import System.Console.CmdArgs.Implicit
import qualified Data.Map as M
data LogoOpts = LogoOpts
} deriving (Show, Data, Typeable)
logoOpts :: String -> LogoOpts
logoOpts prog = LogoOpts
{ output = "logo.svg"
&= typFile
&= help "Output image file (default=logo.svg)"
, src = def
&= typFile
&= args
}
&= summary "hs-logo Logo Interpreter v0.1"
&= program prog
main :: IO ()
main = do
prog <- getProgName
opts <- cmdArgs (logoOpts prog)
case src opts of
Nothing -> error "Source file not specified"
Just s -> renderLogo s (output opts)
renderLogo :: String -> String -> IO ()
renderLogo s o = do
tokens <- readSource s
diag <- drawTurtleT (evaluateSourceTokens tokens)
withArgs ["-o", o, "-w", "400", "-h", "400"] $ defaultMain (diag # lwG (0.005 * width diag) # centerXY # pad 1.1)
readSource :: FilePath -> IO [LogoToken]
readSource f = do
tokens <- tokenize f <$> readFile f
case tokens of
Left x -> error $ show x
Right t -> return t
evaluateSourceTokens :: [LogoToken] -> TurtleIO ()
evaluateSourceTokens tokens = do
let initialContext = LogoContext builtins M.empty M.empty
res <- evaluateWithContext tokens initialContext
case res of
Left err -> error $ show err
Right _ -> return ()
|
6431d19394abb53bc6158d93a328500f0049da3fd06e0af6b0c0eaf26ab67b88 | OCamlPro/typerex-lint | plugin_typedtree.check_identifier.4.ml | Pervasives.at_exit (fun () -> print_string "end")
| null | https://raw.githubusercontent.com/OCamlPro/typerex-lint/6d9e994c8278fb65e1f7de91d74876531691120c/tools/ocp-lint-doc/examples/plugin_typedtree.check_identifier.4.ml | ocaml | Pervasives.at_exit (fun () -> print_string "end")
| |
e04b3200142b74c1e96ac14035accc8dbaaf817ec40fa79f4113ce937811cd54 | ocurrent/ocurrent-deployer | test_index.ml | module Index = Deployer.Index
let test_simple () =
let owner = "owner" in
let name = "name" in
let repo = { Current_github.Repo_id.owner; name } in
let hash = "abc" in
let _ = Lazy.force Current.Db.v in
Index.init ();
Index.record ~repo ~hash [ ("build", Some "job1"); ("deploy", None) ];
Alcotest.(check (list string)) "Job-ids" ["job1"] @@ Index.get_job_ids ~owner ~name ~hash;
Index.record ~repo ~hash [ ("build", Some "job2") ];
Alcotest.(check (list string)) "Job-ids" ["job2"] @@ List.sort String.compare @@ Index.get_job_ids ~owner ~name ~hash
let tests = [
Alcotest_lwt.test_case_sync "simple" `Quick test_simple;
]
| null | https://raw.githubusercontent.com/ocurrent/ocurrent-deployer/e60eb1677ec70c8b9e1b28ef312eff97368f3b4c/test/test_index.ml | ocaml | module Index = Deployer.Index
let test_simple () =
let owner = "owner" in
let name = "name" in
let repo = { Current_github.Repo_id.owner; name } in
let hash = "abc" in
let _ = Lazy.force Current.Db.v in
Index.init ();
Index.record ~repo ~hash [ ("build", Some "job1"); ("deploy", None) ];
Alcotest.(check (list string)) "Job-ids" ["job1"] @@ Index.get_job_ids ~owner ~name ~hash;
Index.record ~repo ~hash [ ("build", Some "job2") ];
Alcotest.(check (list string)) "Job-ids" ["job2"] @@ List.sort String.compare @@ Index.get_job_ids ~owner ~name ~hash
let tests = [
Alcotest_lwt.test_case_sync "simple" `Quick test_simple;
]
| |
501bcdca9f238bbb82b12705d28cbb3e02d49eebe442c3b779942efee464ee3c | louispan/data-diverse | ATraversable.hs | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module Data.Diverse.ATraversable where
import Data.Diverse.TypeLevel
| Given a ' Data . Diverse . Case ' that transforms each type in the typelist within an
' Applicative ' context @m@ , convert a @f xs@ to @m ( f ( ' TraverseResults ' c m xs))@ ,
-- where @('TraverseResults' c m xs)@ corresponds to @('CaseResults' (c m) xs)@ with the
-- @m@ layer peeled off from each result.
--
-- This is primarily meant to be used with 'Data.Diverse.Case.Case's from the
" Data . Diverse . CaseIxed " module .
class ATraversable f c m xs where
atraverse
:: ( Applicative m
-- Throws a type error when the 'Case' is stuck
-- (most likely because the kind does not match).
, IsTraversalCase c
Defers the evaluation of the traversal results , to avoid getting another
-- (confusing) type error when the 'Case' is stuck.
, xs' ~ (TraverseResults c m xs)
)
=> c m xs
-> f xs
-> m (f xs')
| null | https://raw.githubusercontent.com/louispan/data-diverse/4033c90c44dab5824f76d64b7128bb6dea2b5dc7/src/Data/Diverse/ATraversable.hs | haskell | where @('TraverseResults' c m xs)@ corresponds to @('CaseResults' (c m) xs)@ with the
@m@ layer peeled off from each result.
This is primarily meant to be used with 'Data.Diverse.Case.Case's from the
Throws a type error when the 'Case' is stuck
(most likely because the kind does not match).
(confusing) type error when the 'Case' is stuck. | # LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
module Data.Diverse.ATraversable where
import Data.Diverse.TypeLevel
| Given a ' Data . Diverse . Case ' that transforms each type in the typelist within an
' Applicative ' context @m@ , convert a @f xs@ to @m ( f ( ' TraverseResults ' c m xs))@ ,
" Data . Diverse . CaseIxed " module .
class ATraversable f c m xs where
atraverse
:: ( Applicative m
, IsTraversalCase c
Defers the evaluation of the traversal results , to avoid getting another
, xs' ~ (TraverseResults c m xs)
)
=> c m xs
-> f xs
-> m (f xs')
|
42a15fde8afe2485db55e1625b49a627b8a18c0e0da211e5c0729a2254c26fd6 | brownplt/TeJaS | patSets.ml | open Prelude
module StringSet = Set.Make (String)
module StringSetExt = SetExt.Make (StringSet)
type t =
| Finite of StringSet.t
| CoFinite of StringSet.t
let parse pos str =
failwith "PatSets.parse not implemented"
let singleton str = Finite (StringSet.singleton str)
let singleton_string v = match v with
| CoFinite _ -> None
| Finite set -> match StringSet.cardinal set with
| 1 -> Some (StringSet.choose set)
| _ -> None
let empty = Finite StringSet.empty
let all = CoFinite StringSet.empty
let is_empty v = match v with
| Finite set -> StringSet.is_empty set
| CoFinite _ -> false
let intersect v1 v2 =
if (is_empty v1) then empty
else if (is_empty v2) then empty
else match v1, v2 with
| Finite set1, Finite set2 -> Finite (StringSet.inter set1 set2)
| CoFinite set1, CoFinite set2 -> CoFinite (StringSet.union set1 set2)
| Finite fset, CoFinite cfset
| CoFinite cfset, Finite fset -> Finite (StringSet.diff fset cfset)
let intersections ts = match (List.fold_left
(fun u t -> match u with
| None -> Some t
| Some u -> Some (intersect u t))
None
ts) with
| Some u -> u
| None -> empty
let union v1 v2 =
if (is_empty v1) then v2
else if (is_empty v2) then v1
else match v1, v2 with
| Finite set1, Finite set2 -> Finite (StringSet.union set1 set2)
| CoFinite set1, CoFinite set2 -> CoFinite (StringSet.inter set1 set2)
| Finite fset, CoFinite cfset
| CoFinite cfset, Finite fset -> CoFinite (StringSet.diff cfset fset)
let unions ts = match (List.fold_left
(fun u t -> match u with
| None -> Some t
| Some u -> Some (union u t))
None
ts) with
| Some u -> u
| None -> empty
let negate v = match v with
| Finite set -> CoFinite set
| CoFinite set -> Finite set
let subtract v1 v2 = intersect v1 (negate v2)
let concat _ _ =
failwith "concat not implemented--probably should not be"
let star _ = failwith "star not implemented for patSets"
let one_range first last =
let ascii_first, ascii_last = Char.code first, Char.code last in
if not (ascii_last >= ascii_first) then
failwith (sprintf "Bad character range %s-%s"
(Char.escaped first)
(Char.escaped last))
else
let rec f i chars =
if i > ascii_last then chars
else f (i + 1) (StringSet.add (String.make 1 (Char.chr i)) chars) in
f ascii_first StringSet.empty
let range ranges =
Finite (List.fold_left (fun r (first, last) -> StringSet.union r (one_range first last)) StringSet.empty ranges)
let is_overlapped v1 v2 = match v1, v2 with
| Finite set1, Finite set2 ->
not (StringSet.is_empty (StringSet.inter set1 set2))
| CoFinite _, CoFinite _ ->
(* There is always some element not in the set of excluded strings that
is common to both co-finite sets. *)
true
| Finite fset, CoFinite cfset
| CoFinite cfset, Finite fset ->
(* The finite set must be contained in the excluded elements of the
co-finite set. Any element not explicitly excluded is in cfset. *)
not (StringSet.subset fset cfset)
let is_subset v1 v2 = match v1, v2 with
| Finite set1, Finite set2 -> StringSet.subset set1 set2
| CoFinite set1, CoFinite set2 -> StringSet.subset set2 set1
| Finite fset, CoFinite cfset ->
(* The finite set must be in the complement *)
StringSet.is_empty (StringSet.inter fset cfset)
| CoFinite _, Finite _ -> false
let is_equal v1 v2 = match v1, v2 with
| Finite set1, Finite set2
| CoFinite set1, CoFinite set2 ->
StringSet.equal set1 set2
| _ -> false
let pretty_helper v =
let open FormatExt in
match v with
| Finite set ->
if StringSet.cardinal set = 1 then
text (StringSet.choose set)
else
StringSetExt.p_set text set
| CoFinite set ->
if StringSet.is_empty set then
text "/.*/"
else
horz [ squish [ text "-"; StringSetExt.p_set text set; text "-" ] ]
let pretty v =
FormatExt.to_string pretty_helper v
let example v = match v with
| Finite set ->
if StringSet.is_empty set then
None
else
Some (StringSet.choose set)
| CoFinite set ->
let lengths = List.sort (compare) (List.map String.length (StringSetExt.to_list set)) in
let min_gap =
let rec helper ls = match ls with
| l1::l2::ls -> if (l1 + 1 < l2) then l1 + 1 else helper (l2::ls)
| [l] -> l+1
| [] -> 1 in
helper lengths in
Some (String.make min_gap 'X')
let rec set_to_nfa set =
let module R = PatReg in
match (StringSet.fold
(fun str nfa -> match nfa with
| None -> Some (R.singleton str)
| Some nfa -> Some (R.union (R.singleton str) nfa))
set
None) with
| None -> R.empty
| Some nfa -> nfa
let to_nfa v = match v with
| Finite set -> set_to_nfa set
| CoFinite set -> PatReg.negate (set_to_nfa set)
| null | https://raw.githubusercontent.com/brownplt/TeJaS/a8ad7e5e9ad938db205074469bbde6a688ec913e/src/patterns/patSets.ml | ocaml | There is always some element not in the set of excluded strings that
is common to both co-finite sets.
The finite set must be contained in the excluded elements of the
co-finite set. Any element not explicitly excluded is in cfset.
The finite set must be in the complement | open Prelude
module StringSet = Set.Make (String)
module StringSetExt = SetExt.Make (StringSet)
type t =
| Finite of StringSet.t
| CoFinite of StringSet.t
let parse pos str =
failwith "PatSets.parse not implemented"
let singleton str = Finite (StringSet.singleton str)
let singleton_string v = match v with
| CoFinite _ -> None
| Finite set -> match StringSet.cardinal set with
| 1 -> Some (StringSet.choose set)
| _ -> None
let empty = Finite StringSet.empty
let all = CoFinite StringSet.empty
let is_empty v = match v with
| Finite set -> StringSet.is_empty set
| CoFinite _ -> false
let intersect v1 v2 =
if (is_empty v1) then empty
else if (is_empty v2) then empty
else match v1, v2 with
| Finite set1, Finite set2 -> Finite (StringSet.inter set1 set2)
| CoFinite set1, CoFinite set2 -> CoFinite (StringSet.union set1 set2)
| Finite fset, CoFinite cfset
| CoFinite cfset, Finite fset -> Finite (StringSet.diff fset cfset)
let intersections ts = match (List.fold_left
(fun u t -> match u with
| None -> Some t
| Some u -> Some (intersect u t))
None
ts) with
| Some u -> u
| None -> empty
let union v1 v2 =
if (is_empty v1) then v2
else if (is_empty v2) then v1
else match v1, v2 with
| Finite set1, Finite set2 -> Finite (StringSet.union set1 set2)
| CoFinite set1, CoFinite set2 -> CoFinite (StringSet.inter set1 set2)
| Finite fset, CoFinite cfset
| CoFinite cfset, Finite fset -> CoFinite (StringSet.diff cfset fset)
let unions ts = match (List.fold_left
(fun u t -> match u with
| None -> Some t
| Some u -> Some (union u t))
None
ts) with
| Some u -> u
| None -> empty
let negate v = match v with
| Finite set -> CoFinite set
| CoFinite set -> Finite set
let subtract v1 v2 = intersect v1 (negate v2)
let concat _ _ =
failwith "concat not implemented--probably should not be"
let star _ = failwith "star not implemented for patSets"
let one_range first last =
let ascii_first, ascii_last = Char.code first, Char.code last in
if not (ascii_last >= ascii_first) then
failwith (sprintf "Bad character range %s-%s"
(Char.escaped first)
(Char.escaped last))
else
let rec f i chars =
if i > ascii_last then chars
else f (i + 1) (StringSet.add (String.make 1 (Char.chr i)) chars) in
f ascii_first StringSet.empty
let range ranges =
Finite (List.fold_left (fun r (first, last) -> StringSet.union r (one_range first last)) StringSet.empty ranges)
let is_overlapped v1 v2 = match v1, v2 with
| Finite set1, Finite set2 ->
not (StringSet.is_empty (StringSet.inter set1 set2))
| CoFinite _, CoFinite _ ->
true
| Finite fset, CoFinite cfset
| CoFinite cfset, Finite fset ->
not (StringSet.subset fset cfset)
let is_subset v1 v2 = match v1, v2 with
| Finite set1, Finite set2 -> StringSet.subset set1 set2
| CoFinite set1, CoFinite set2 -> StringSet.subset set2 set1
| Finite fset, CoFinite cfset ->
StringSet.is_empty (StringSet.inter fset cfset)
| CoFinite _, Finite _ -> false
let is_equal v1 v2 = match v1, v2 with
| Finite set1, Finite set2
| CoFinite set1, CoFinite set2 ->
StringSet.equal set1 set2
| _ -> false
let pretty_helper v =
let open FormatExt in
match v with
| Finite set ->
if StringSet.cardinal set = 1 then
text (StringSet.choose set)
else
StringSetExt.p_set text set
| CoFinite set ->
if StringSet.is_empty set then
text "/.*/"
else
horz [ squish [ text "-"; StringSetExt.p_set text set; text "-" ] ]
let pretty v =
FormatExt.to_string pretty_helper v
let example v = match v with
| Finite set ->
if StringSet.is_empty set then
None
else
Some (StringSet.choose set)
| CoFinite set ->
let lengths = List.sort (compare) (List.map String.length (StringSetExt.to_list set)) in
let min_gap =
let rec helper ls = match ls with
| l1::l2::ls -> if (l1 + 1 < l2) then l1 + 1 else helper (l2::ls)
| [l] -> l+1
| [] -> 1 in
helper lengths in
Some (String.make min_gap 'X')
let rec set_to_nfa set =
let module R = PatReg in
match (StringSet.fold
(fun str nfa -> match nfa with
| None -> Some (R.singleton str)
| Some nfa -> Some (R.union (R.singleton str) nfa))
set
None) with
| None -> R.empty
| Some nfa -> nfa
let to_nfa v = match v with
| Finite set -> set_to_nfa set
| CoFinite set -> PatReg.negate (set_to_nfa set)
|
2a354dec3c9a601aab48e4e46327fe542418630036b86b132acbf36957f49e8d | irastypain/sicp-on-language-racket | exercise_2_28.rkt | #lang racket
Процедура , которая приводит дерево к
(define (fringe tree)
(if (null? tree)
`()
(if (pair? tree)
(append (fringe (car tree)) (fringe (cdr tree)))
(list tree))))
(provide fringe) | null | https://raw.githubusercontent.com/irastypain/sicp-on-language-racket/0052f91d3c2432a00e7e15310f416cb77eeb4c9c/src/chapter02/exercise_2_28.rkt | racket | #lang racket
Процедура , которая приводит дерево к
(define (fringe tree)
(if (null? tree)
`()
(if (pair? tree)
(append (fringe (car tree)) (fringe (cdr tree)))
(list tree))))
(provide fringe) | |
4c100ad3b9336144d57bcdfec427e561495db763d76229508cd233d1a346ac50 | sourcegraph/srclib-haskell | Haddock.hs | # LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE ScopedTypeVariables #-}
# LANGUAGE UnicodeSyntax #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -fwarn - unused - imports #
module Haddock where
import ClassyPrelude hiding ((</>), (<.>), maximumBy)
import qualified Prelude
import Prelude.Unicode
import Control.Category.Unicode
import qualified Imports as Imp
import qualified Data.IntMap as IntMap
import qualified Data.Maybe as May
import qualified Data.Map as M
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.List as L
import Data.Foldable (maximumBy)
import Text.Printf (printf)
import qualified Documentation.Haddock as H
import Shelly hiding (FilePath, path, (</>), (<.>), canonicalize, trace)
import qualified Shelly
import qualified Filesystem.Path.CurrentOS as Path
import System.Posix.Process (getProcessID)
import Text.Regex.TDFA
import qualified Cabal as C
import Locations as Loc
import qualified Srclib as Src
import Distribution.Hackage.DB (Hackage, readHackage)
data Def = Def ModulePath Text Src.Kind Span
data Bind = Bind String H.NameSpc H.FileLoc Bool Text
deriving (Show,Ord,Eq)
localUniq (H.Local _ u _ _) = u
mkIndex ∷ Ord k ⇒ (a → k) → [a] → Map k a
mkIndex f = map (\r→(f r,r)) ⋙ M.fromList
composeN ∷ Int → (a → a) → (a → a)
composeN 0 _ = id
composeN 1 f = f
composeN n f = composeN (n-1) (f.f)
allTheRenames ∷ H.SymGraph → H.SymGraph
allTheRenames = composeN 10 renames
instance Monoid H.SymGraph where
mempty = H.SymGraph [] [] []
mappend (H.SymGraph a b c) (H.SymGraph d e f) = H.SymGraph (a++d) (b++e) (c++f)
renames ∷ H.SymGraph → H.SymGraph
renames (H.SymGraph refs exports renames) =
H.SymGraph (rename <$> refs) (rnm <$> exports) renames
where rnmTbl = M.fromList renames
defsByUniq = mkIndex localUniq $ lefts $ snd <$> refs
rename (fl,Left(l)) = (fl, Left(rnm l))
rename x = x
rnm def@(H.Local nm u nmSpc floc) = fromMaybe def $ do
otherUniq ← M.lookup u rnmTbl
realDef ← M.lookup otherUniq defsByUniq
return realDef
allLocalBindings ∷ H.SymGraph → Set Bind
allLocalBindings gr = Set.fromList internal
where exports = H.sgExports gr
internal = mkBind <$> (lefts $ snd <$> H.sgReferences gr)
isExported l = not $ L.null $ flip L.filter (H.sgExports gr) $ theSame l
theSame (H.Local _ uniq1 _ _) (H.Local _ uniq2 _ _) = uniq1 ≡ uniq2
mkBind ∷ H.LocalBinding → Bind
mkBind l@(H.Local nm u nmSpc floc) =
Bind nm nmSpc floc (isExported l) (T.pack u)
sgReferences ∷ H.SymGraph → [(H.FileLoc,(Either Bind H.GlobalBinding))]
sgReferences gr = map f $ H.sgReferences gr
where f (loc,Left l) = (loc,Left $ mkBind l)
f (loc,Right r) = (loc,Right r)
isExported l = not $ L.null $ flip L.filter (H.sgExports gr) $ theSame l
theSame (H.Local _ uniq1 _ _) (H.Local _ uniq2 _ _) = uniq1 ≡ uniq2
mkBind ∷ H.LocalBinding → Bind
mkBind l@(H.Local nm u nmSpc floc) =
Bind nm nmSpc floc (isExported l) (T.pack u)
srcPath ∷ (RepoPath, SrcPath, FileShape) → SrcPath
srcPath (_,s,_) = s
fudgeTmpFile ∷ PathDB → String → (RepoPath, SrcPath, FileShape)
fudgeTmpFile (db,tmps) tmp = chooseFrom matches
where modul = M.elems $ M.filterWithKey (\k v → looksLike tmp k) tmps
looksLike a b = (a `L.isInfixOf` b) ∨ (b `L.isInfixOf` a)
q m = Set.filter (srcPath ⋙ srcPathMatch m) db
matches = Set.unions $ q <$> modul
longest (_,Src(FP f),_) (_,Src(FP φ),_) = (compare `on` length) f φ
fallback = (Repo(FP["bogus"]), Src(FP["Bogus"]), Shape [] IntMap.empty)
chooseFrom s = if Set.null s then fallback else maximumBy longest s
pdbSourceFileNames ∷ PathDB → [Text]
pdbSourceFileNames (srcFiles, _) = fnStr <$> Set.toList srcFiles
where fnStr (repo, src, _) = srclibPath $ Loc.srcToRepo repo src
tmpFileModule ∷ PathDB → String → ModulePath
tmpFileModule db tmp = fileToModulePath $ srcPath $ fudgeTmpFile db tmp
tmpFileSpan ∷ PathDB → H.FileLoc → Span
tmpFileSpan pdb@(db,tmps) (H.FileLoc fnS (l,c) (λ,ξ)) =
fromMaybe bogusSpan $ mkSpan rp shape (LineCol l c) (LineCol λ ξ)
where (r,s,shape) = fudgeTmpFile (db,tmps) fnS
m = tmpFileModule pdb fnS
rp = srcToRepo r s
localSpan ∷ PathDB → Bind → Span
localSpan db (Bind _ _ loc e _) = tmpFileSpan db loc
bogusModule ∷ PathDB → H.LocalBinding → ModulePath
bogusModule (_,tmps) (H.Local fnS _ _ _) = trace warning $ parseModulePath $ T.pack fnS
where warning = "SymGraph is inconsistent. This file does not occur: " <> fnS
<> "\n" <> show tmps
localModule ∷ PathDB → Bind → ModulePath
localModule db@(_,tmps) loc@(Bind _ _ (H.FileLoc fnS _ _) _ _) =
tmpFileModule db fnS
isVersionNumber ∷ Text → Bool
isVersionNumber = T.unpack ⋙ (=~ ("^([0-9]+\\.)*[0-9]+$"::String))
fudgePkgName ∷ Text → Text
fudgePkgName pkg =
T.intercalate "-" $ reverse $ case reverse $ T.split (≡'-') pkg of
[] → []
full@(last:before) → if isVersionNumber last then before else full
makeSrclibPath ∷ H.NameSpc → Text → ModulePath → Text → Maybe Text → Src.Path
makeSrclibPath kind pkg mp nm uniq = case uniq of
Nothing -> Src.PGlobal pkg mp nm (convertKind kind)
Just u -> Src.PLocal pkg mp nm (convertKind kind) u
convertKind ∷ H.NameSpc → Src.Kind
convertKind H.Value = Src.Value
convertKind H.Type = Src.Type
locStartLC ∷ H.FileLoc → LineCol
locStartLC (H.FileLoc _ (l,c) _) = LineCol l c
convertDef ∷ Text → PathDB → Bind → Src.Def
convertDef pkg db l@(Bind nm nmSpc loc exported u) =
Src.Def spath spath (T.pack nm) kind file start end exported False
where (Span file start width) = localSpan db l
end = start+width
kind = convertKind nmSpc
modul = localModule db l
spath = makeSrclibPath nmSpc pkg modul (T.pack nm) pos
pos = if exported then Nothing else Just u
globalPath ∷ H.GlobalBinding → Src.Path
globalPath glob@(H.Global nm nmSpc modul pkg) =
makeSrclibPath nmSpc (fudgePkgName $ T.pack pkg) mp (T.pack nm) Nothing
where mp = (parseModulePath $ T.pack modul)
baseRepo = "github.com/bsummer4/packages-base"
safeHead ∷ [a] → Maybe a
safeHead [] = Nothing
safeHead (a:_) = Just a
convertRef ∷ (Text→Text) → Text → PathDB → (H.FileLoc,Either Bind H.GlobalBinding) → Src.Ref
convertRef lookupRepo pkg db (loc,bind) =
Src.Ref repoURI "HaskellPackage" defUnit defPath isDef file start end
where repoURI = lookupRepo defUnit
defUnit = Src.pathPkg defPath
startsAt (H.FileLoc _ s1 _) (H.FileLoc _ s2 _) = s1≡s2
isDef = case bind of Right _ → False
Left (Bind _ _ bloc _ _) → loc `startsAt` bloc
defPath = either (Src.defPath ⋘ convertDef pkg db) globalPath bind
(Span file start width) = tmpFileSpan db loc
end = start + width
TODO Drops duplicated refs and defs . This is a quick hack , but IT IS WRONG .
-- The issue is that haskell allows multiple definitions for the same
things . For example , a type declaration and two defintions that
-- handle different pattern matches are all definitions from the perspective
-- of the grapher.
fudgeGraph ∷ Src.Graph → Src.Graph
fudgeGraph (Src.Graph defs refs) = Src.Graph (fudgeDefs defs) (fudgeRefs refs)
where fudgeBy ∷ Ord b ⇒ (a → b) → [a] → [a]
fudgeBy f = M.elems . M.fromList . map (\x→(f x,x))
fudgeDefs ∷ [Src.Def] → [Src.Def]
fudgeDefs = fudgeBy Src.defPath
fudgeRefs ∷ [Src.Ref] → [Src.Ref]
fudgeRefs = fudgeBy $ Src.refStart &&& Src.refEnd
pos ∷ (Int,Int,Text) → Text
pos (x,y,nm) = tshow x <> ":" <> tshow y <> " (" <> nm
summary ∷ Src.Graph → Text
summary (Src.Graph dfs rfs) =
unlines("[refs]" : (pos<$>refs)) <> unlines("[defs]" : (pos<$>defs))
where defs = (\x→(Src.defDefStart x, Src.defDefEnd x, tshow $ Src.defPath x)) <$> dfs
refs = (\x→(Src.refStart x, Src.refEnd x, tshow $ Src.refDefPath x)) <$> rfs
convertGraph ∷ (Text→Text) → Text → PathDB → H.SymGraph → Src.Graph
convertGraph lookupRepo pkgWithJunk db agr = fudgeGraph $ Src.Graph defs refs
where pkg = traceShowId $ fudgePkgName $ traceShowId pkgWithJunk
defs = convertDef pkg db <$> Set.toList(allLocalBindings gr)
refs = convertRef lookupRepo pkg db <$> sgReferences gr
gr = allTheRenames agr
-- TODO Using `read` directly is not safe.
TODO Read / Show is a terrible approach to serializing to disk !
readSymGraphFile ∷ String → H.SymGraphFile
readSymGraphFile = fmap Prelude.read . lines
loadSymGraphFile ∷ Path.FilePath → IO H.SymGraphFile
loadSymGraphFile = readFile >=> (readSymGraphFile ⋙ return)
tmpFiles ∷ H.SymGraphFile → Map String ModulePath
tmpFiles = M.fromList . map (\(f,m,_,_)→(f,parseModulePath$T.pack m))
instance Semigroup Shelly.FilePath where
a <> b = a `mappend` b
type PkgName = Text
type RepoURI = Text
repoMap ∷ C.CabalInfo → IO (Map PkgName RepoURI)
repoMap info = do
hack ← readHackage
ds ← mapM(C.resolve hack ⋙ return) $ M.toList $ C.cabalDependencies info
return $ M.fromList $ (\d → (Src.depToUnit d, Src.depToRepoCloneURL d)) <$> ds
type ModuleLookup = ModulePath → (Src.URI,Src.Pkg)
type SrcLocLookup = (String,Int,Int) → Int
convertModuleGraph ∷ ModuleLookup → SrcLocLookup → [(Text,[Imp.ModuleRef])] → Src.Graph
convertModuleGraph toRepoAndPkg toOffset refMap =
Src.Graph [] $ concat $ fmap cvt . snd <$> refMap
where repoFile = Repo . fromMaybe mempty . Loc.parseRelativePath . T.pack
cvt ∷ Imp.ModuleRef → Src.Ref
cvt (fn,(sl,sc),(el,ec),mp) =
let (repo,pkg) = toRepoAndPkg mp
in Src.Ref
{ Src.refDefRepo = repo
, Src.refDefUnitType = "HaskellPackage"
, Src.refDefUnit = pkg
, Src.refDefPath = Src.PModule pkg mp
, Src.refIsDef = False
, Src.refFile = repoFile fn
, Src.refStart = toOffset (fn,sl,sc)
, Src.refEnd = toOffset (fn,el,ec)
}
_3_2 ∷ (a,b,c) → b
_3_2 (_,b,_) = b
ourModules ∷ PathDB → [(RepoPath,ModulePath)]
ourModules = fmap f . Set.toList . fst
where f (a,b,c) = (srcToRepo a b, fileToModulePath b)
moduleName (MP[]) = "Main"
moduleName (MP (m:_)) = m
moduleDefs ∷ Src.Pkg → [(RepoPath,ModulePath)] → Src.Graph
moduleDefs pkg = flip Src.Graph [] . fmap cvt
where cvt (filename,mp) = Src.Def
{ Src.defPath = Src.PModule pkg mp
, Src.defTreePath = Src.PModule pkg mp
, Src.defName = moduleName mp
, Src.defKind = Src.Module
, Src.defFile = filename
, Src.defDefStart = 0
, Src.defDefEnd = 0
, Src.defExported = True
, Src.defTest = False
}
-- We generate a lot of temporary directories:
-- - We copy the root directory of a source unit to keep cabal from
-- writting data to the source directory.
-- - We use a new cabal sandbox per build.
- We use tell to use a separate build directory . ( This is
-- probably not necessary).
-- - The graphing process generates a `symbol-graph` file.
graph ∷ C.CabalInfo → IO (Src.Graph, IO ())
graph info = do
pid ← toInteger <$> getProcessID
repos ← repoMap info
modules ∷ Map Loc.ModulePath Src.Pkg ← C.moduleMap info
let lookupRepo ∷ PkgName → RepoURI
lookupRepo = fromMaybe "" . flip M.lookup repos
mkParam k v = "--" <> k <> "=" <> v <> ""
mkTmp ∷ Text → Text
mkTmp n = "/tmp/srclib-haskell-" <> n <> "." <> fromString(show pid)
symbolGraph = mkTmp "symbol-graph"
sandbox = mkTmp "sandbox"
buildDir = mkTmp "build-directory"
workDir = mkTmp "work-directory"
subDir = srclibPath $ C.cabalPkgDir info
workSubDir = workDir <> "/" <> subDir
cleanup = shelly $ do
let tmps = [symbolGraph, sandbox, buildDir, workDir]
tmpFilePaths ∷ [Path.FilePath]
tmpFilePaths = fromText <$> tmps
mapM_ rm_rf tmpFilePaths
let toStderr = log_stdout_with $ T.unpack ⋙ hPutStrLn stderr
let cabal_ = run_ "cabal"
shelly $ toStderr $ do
mkdir_p (fromText workDir)
let wd = T.unpack workDir
tarcmd = T.pack $ printf "(tar c *) | (cd '%s'; tar x)" wd
run_ "/bin/sh" ["-c", tarcmd]
cd (fromText workDir)
errExit False $ run_ "autoreconf" []
cd (fromText workSubDir)
errExit False $ run_ "autoreconf" []
cabal_ ["sandbox", "init", mkParam "sandbox" sandbox]
errExit False $
cabal_ [ "install", "--only-dependencies"
, "-j4"
, "--disable-optimization"
, "--force-reinstalls"
]
cabal_ ["configure", mkParam "builddir" buildDir]
cabal_ [ "haddock", "--executables", "--internal"
, mkParam "haddock-options" ("-G" <> symbolGraph)
, mkParam "builddir" buildDir
]
let badLoad = error $ T.unpack $ "Unable to load file: " <> symbolGraph
graphs ← loadSymGraphFile $ Path.decodeString $ T.unpack symbolGraph
let packageName = C.cabalPkgName info
pdb ← mkDB info graphs
let _4 (_,_,_,x) = x
let completeSymGraph = mconcat $ _4 <$> graphs
let haddockResults = fudgeGraph $ convertGraph lookupRepo packageName pdb completeSymGraph
modRefs ← forM (pdbSourceFileNames pdb) $ \fn → do
source ← readFile $ fpFromText fn
return (fn, Imp.moduleRefs (T.unpack fn) source)
let _3 (a,b,c) = c
toOffsets ∷ (String,Int,Int) → Int
toOffsets (fn,l,c) = fromMaybe 0 $ join $ flip Loc.lineColOffset (Loc.LineCol l c) <$> shape
where frm = Repo $ fromMaybe mempty $ Loc.parseRelativePath $ T.pack fn
shapes = Set.toList $ fst pdb
shape = _3 <$> L.find (\(rp,sp,_) → frm ≡ Loc.srcToRepo rp sp) shapes
toRepoAndPkg ∷ ModulePath → (Src.URI,Src.Pkg)
toRepoAndPkg mp = trace "toRepoAndPkg" $ traceShow mp $ traceShowId (repo,pkg)
where repo = fromMaybe "" $ M.lookup pkg repos
pkg = fromMaybe packageName $ M.lookup mp modules
let moduleGraph = convertModuleGraph toRepoAndPkg toOffsets modRefs
let results = moduleGraph ++ haddockResults ++ moduleDefs packageName (ourModules pdb)
We ca n't cleanup here , since we 're using lazy IO . Processing the graph file
-- hasn't (necessarily) happened yet.
return (results,cleanup)
isParent ∷ RepoPath → RepoPath → Bool
isParent (Repo(FP parent)) (Repo(FP child)) = parent `isSuffixOf` child
-- TODO I can avoid the fromJust by pattern matching on the result of
instead of having a separate isParent and stripIt .
stripIt ∷ RepoPath → RepoPath → SrcPath
stripIt (Repo(FP parent)) (Repo(FP child)) =
Src $ FP $ reverse $ May.fromJust $ stripPrefix (reverse parent) (reverse child)
fe ∷ C.CabalInfo → [(RepoPath, SrcPath, IO FileShape)]
fe info = do
srcDir ← Set.toList $ C.cabalSrcDirs info
repoPath ← Set.toList $ C.cabalSrcFiles info
let srcPath = stripIt srcDir repoPath
guard $ isParent srcDir repoPath
return (srcDir, srcPath, fileShape(srclibPath repoPath))
mkDB ∷ C.CabalInfo → H.SymGraphFile → IO PathDB
mkDB info graphFile = do
let ugg (srcDir, srcPath, action) = do result ← action
return (srcDir, srcPath, result)
cols ← Set.fromList <$> mapM ugg (fe info)
return (cols,tmpFiles graphFile)
| null | https://raw.githubusercontent.com/sourcegraph/srclib-haskell/b138c73a7d06680300dce4a39d2b98974064b29a/src/Haddock.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE ScopedTypeVariables #
The issue is that haskell allows multiple definitions for the same
handle different pattern matches are all definitions from the perspective
of the grapher.
TODO Using `read` directly is not safe.
We generate a lot of temporary directories:
- We copy the root directory of a source unit to keep cabal from
writting data to the source directory.
- We use a new cabal sandbox per build.
probably not necessary).
- The graphing process generates a `symbol-graph` file.
hasn't (necessarily) happened yet.
TODO I can avoid the fromJust by pattern matching on the result of | # LANGUAGE LambdaCase #
# LANGUAGE UnicodeSyntax #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_GHC -fwarn - unused - imports #
module Haddock where
import ClassyPrelude hiding ((</>), (<.>), maximumBy)
import qualified Prelude
import Prelude.Unicode
import Control.Category.Unicode
import qualified Imports as Imp
import qualified Data.IntMap as IntMap
import qualified Data.Maybe as May
import qualified Data.Map as M
import qualified Data.Set as Set
import qualified Data.Text as T
import qualified Data.List as L
import Data.Foldable (maximumBy)
import Text.Printf (printf)
import qualified Documentation.Haddock as H
import Shelly hiding (FilePath, path, (</>), (<.>), canonicalize, trace)
import qualified Shelly
import qualified Filesystem.Path.CurrentOS as Path
import System.Posix.Process (getProcessID)
import Text.Regex.TDFA
import qualified Cabal as C
import Locations as Loc
import qualified Srclib as Src
import Distribution.Hackage.DB (Hackage, readHackage)
data Def = Def ModulePath Text Src.Kind Span
data Bind = Bind String H.NameSpc H.FileLoc Bool Text
deriving (Show,Ord,Eq)
localUniq (H.Local _ u _ _) = u
mkIndex ∷ Ord k ⇒ (a → k) → [a] → Map k a
mkIndex f = map (\r→(f r,r)) ⋙ M.fromList
composeN ∷ Int → (a → a) → (a → a)
composeN 0 _ = id
composeN 1 f = f
composeN n f = composeN (n-1) (f.f)
allTheRenames ∷ H.SymGraph → H.SymGraph
allTheRenames = composeN 10 renames
instance Monoid H.SymGraph where
mempty = H.SymGraph [] [] []
mappend (H.SymGraph a b c) (H.SymGraph d e f) = H.SymGraph (a++d) (b++e) (c++f)
renames ∷ H.SymGraph → H.SymGraph
renames (H.SymGraph refs exports renames) =
H.SymGraph (rename <$> refs) (rnm <$> exports) renames
where rnmTbl = M.fromList renames
defsByUniq = mkIndex localUniq $ lefts $ snd <$> refs
rename (fl,Left(l)) = (fl, Left(rnm l))
rename x = x
rnm def@(H.Local nm u nmSpc floc) = fromMaybe def $ do
otherUniq ← M.lookup u rnmTbl
realDef ← M.lookup otherUniq defsByUniq
return realDef
allLocalBindings ∷ H.SymGraph → Set Bind
allLocalBindings gr = Set.fromList internal
where exports = H.sgExports gr
internal = mkBind <$> (lefts $ snd <$> H.sgReferences gr)
isExported l = not $ L.null $ flip L.filter (H.sgExports gr) $ theSame l
theSame (H.Local _ uniq1 _ _) (H.Local _ uniq2 _ _) = uniq1 ≡ uniq2
mkBind ∷ H.LocalBinding → Bind
mkBind l@(H.Local nm u nmSpc floc) =
Bind nm nmSpc floc (isExported l) (T.pack u)
sgReferences ∷ H.SymGraph → [(H.FileLoc,(Either Bind H.GlobalBinding))]
sgReferences gr = map f $ H.sgReferences gr
where f (loc,Left l) = (loc,Left $ mkBind l)
f (loc,Right r) = (loc,Right r)
isExported l = not $ L.null $ flip L.filter (H.sgExports gr) $ theSame l
theSame (H.Local _ uniq1 _ _) (H.Local _ uniq2 _ _) = uniq1 ≡ uniq2
mkBind ∷ H.LocalBinding → Bind
mkBind l@(H.Local nm u nmSpc floc) =
Bind nm nmSpc floc (isExported l) (T.pack u)
srcPath ∷ (RepoPath, SrcPath, FileShape) → SrcPath
srcPath (_,s,_) = s
fudgeTmpFile ∷ PathDB → String → (RepoPath, SrcPath, FileShape)
fudgeTmpFile (db,tmps) tmp = chooseFrom matches
where modul = M.elems $ M.filterWithKey (\k v → looksLike tmp k) tmps
looksLike a b = (a `L.isInfixOf` b) ∨ (b `L.isInfixOf` a)
q m = Set.filter (srcPath ⋙ srcPathMatch m) db
matches = Set.unions $ q <$> modul
longest (_,Src(FP f),_) (_,Src(FP φ),_) = (compare `on` length) f φ
fallback = (Repo(FP["bogus"]), Src(FP["Bogus"]), Shape [] IntMap.empty)
chooseFrom s = if Set.null s then fallback else maximumBy longest s
pdbSourceFileNames ∷ PathDB → [Text]
pdbSourceFileNames (srcFiles, _) = fnStr <$> Set.toList srcFiles
where fnStr (repo, src, _) = srclibPath $ Loc.srcToRepo repo src
tmpFileModule ∷ PathDB → String → ModulePath
tmpFileModule db tmp = fileToModulePath $ srcPath $ fudgeTmpFile db tmp
tmpFileSpan ∷ PathDB → H.FileLoc → Span
tmpFileSpan pdb@(db,tmps) (H.FileLoc fnS (l,c) (λ,ξ)) =
fromMaybe bogusSpan $ mkSpan rp shape (LineCol l c) (LineCol λ ξ)
where (r,s,shape) = fudgeTmpFile (db,tmps) fnS
m = tmpFileModule pdb fnS
rp = srcToRepo r s
localSpan ∷ PathDB → Bind → Span
localSpan db (Bind _ _ loc e _) = tmpFileSpan db loc
bogusModule ∷ PathDB → H.LocalBinding → ModulePath
bogusModule (_,tmps) (H.Local fnS _ _ _) = trace warning $ parseModulePath $ T.pack fnS
where warning = "SymGraph is inconsistent. This file does not occur: " <> fnS
<> "\n" <> show tmps
localModule ∷ PathDB → Bind → ModulePath
localModule db@(_,tmps) loc@(Bind _ _ (H.FileLoc fnS _ _) _ _) =
tmpFileModule db fnS
isVersionNumber ∷ Text → Bool
isVersionNumber = T.unpack ⋙ (=~ ("^([0-9]+\\.)*[0-9]+$"::String))
fudgePkgName ∷ Text → Text
fudgePkgName pkg =
T.intercalate "-" $ reverse $ case reverse $ T.split (≡'-') pkg of
[] → []
full@(last:before) → if isVersionNumber last then before else full
makeSrclibPath ∷ H.NameSpc → Text → ModulePath → Text → Maybe Text → Src.Path
makeSrclibPath kind pkg mp nm uniq = case uniq of
Nothing -> Src.PGlobal pkg mp nm (convertKind kind)
Just u -> Src.PLocal pkg mp nm (convertKind kind) u
convertKind ∷ H.NameSpc → Src.Kind
convertKind H.Value = Src.Value
convertKind H.Type = Src.Type
locStartLC ∷ H.FileLoc → LineCol
locStartLC (H.FileLoc _ (l,c) _) = LineCol l c
convertDef ∷ Text → PathDB → Bind → Src.Def
convertDef pkg db l@(Bind nm nmSpc loc exported u) =
Src.Def spath spath (T.pack nm) kind file start end exported False
where (Span file start width) = localSpan db l
end = start+width
kind = convertKind nmSpc
modul = localModule db l
spath = makeSrclibPath nmSpc pkg modul (T.pack nm) pos
pos = if exported then Nothing else Just u
globalPath ∷ H.GlobalBinding → Src.Path
globalPath glob@(H.Global nm nmSpc modul pkg) =
makeSrclibPath nmSpc (fudgePkgName $ T.pack pkg) mp (T.pack nm) Nothing
where mp = (parseModulePath $ T.pack modul)
baseRepo = "github.com/bsummer4/packages-base"
safeHead ∷ [a] → Maybe a
safeHead [] = Nothing
safeHead (a:_) = Just a
convertRef ∷ (Text→Text) → Text → PathDB → (H.FileLoc,Either Bind H.GlobalBinding) → Src.Ref
convertRef lookupRepo pkg db (loc,bind) =
Src.Ref repoURI "HaskellPackage" defUnit defPath isDef file start end
where repoURI = lookupRepo defUnit
defUnit = Src.pathPkg defPath
startsAt (H.FileLoc _ s1 _) (H.FileLoc _ s2 _) = s1≡s2
isDef = case bind of Right _ → False
Left (Bind _ _ bloc _ _) → loc `startsAt` bloc
defPath = either (Src.defPath ⋘ convertDef pkg db) globalPath bind
(Span file start width) = tmpFileSpan db loc
end = start + width
TODO Drops duplicated refs and defs . This is a quick hack , but IT IS WRONG .
things . For example , a type declaration and two defintions that
fudgeGraph ∷ Src.Graph → Src.Graph
fudgeGraph (Src.Graph defs refs) = Src.Graph (fudgeDefs defs) (fudgeRefs refs)
where fudgeBy ∷ Ord b ⇒ (a → b) → [a] → [a]
fudgeBy f = M.elems . M.fromList . map (\x→(f x,x))
fudgeDefs ∷ [Src.Def] → [Src.Def]
fudgeDefs = fudgeBy Src.defPath
fudgeRefs ∷ [Src.Ref] → [Src.Ref]
fudgeRefs = fudgeBy $ Src.refStart &&& Src.refEnd
pos ∷ (Int,Int,Text) → Text
pos (x,y,nm) = tshow x <> ":" <> tshow y <> " (" <> nm
summary ∷ Src.Graph → Text
summary (Src.Graph dfs rfs) =
unlines("[refs]" : (pos<$>refs)) <> unlines("[defs]" : (pos<$>defs))
where defs = (\x→(Src.defDefStart x, Src.defDefEnd x, tshow $ Src.defPath x)) <$> dfs
refs = (\x→(Src.refStart x, Src.refEnd x, tshow $ Src.refDefPath x)) <$> rfs
convertGraph ∷ (Text→Text) → Text → PathDB → H.SymGraph → Src.Graph
convertGraph lookupRepo pkgWithJunk db agr = fudgeGraph $ Src.Graph defs refs
where pkg = traceShowId $ fudgePkgName $ traceShowId pkgWithJunk
defs = convertDef pkg db <$> Set.toList(allLocalBindings gr)
refs = convertRef lookupRepo pkg db <$> sgReferences gr
gr = allTheRenames agr
TODO Read / Show is a terrible approach to serializing to disk !
readSymGraphFile ∷ String → H.SymGraphFile
readSymGraphFile = fmap Prelude.read . lines
loadSymGraphFile ∷ Path.FilePath → IO H.SymGraphFile
loadSymGraphFile = readFile >=> (readSymGraphFile ⋙ return)
tmpFiles ∷ H.SymGraphFile → Map String ModulePath
tmpFiles = M.fromList . map (\(f,m,_,_)→(f,parseModulePath$T.pack m))
instance Semigroup Shelly.FilePath where
a <> b = a `mappend` b
type PkgName = Text
type RepoURI = Text
repoMap ∷ C.CabalInfo → IO (Map PkgName RepoURI)
repoMap info = do
hack ← readHackage
ds ← mapM(C.resolve hack ⋙ return) $ M.toList $ C.cabalDependencies info
return $ M.fromList $ (\d → (Src.depToUnit d, Src.depToRepoCloneURL d)) <$> ds
type ModuleLookup = ModulePath → (Src.URI,Src.Pkg)
type SrcLocLookup = (String,Int,Int) → Int
convertModuleGraph ∷ ModuleLookup → SrcLocLookup → [(Text,[Imp.ModuleRef])] → Src.Graph
convertModuleGraph toRepoAndPkg toOffset refMap =
Src.Graph [] $ concat $ fmap cvt . snd <$> refMap
where repoFile = Repo . fromMaybe mempty . Loc.parseRelativePath . T.pack
cvt ∷ Imp.ModuleRef → Src.Ref
cvt (fn,(sl,sc),(el,ec),mp) =
let (repo,pkg) = toRepoAndPkg mp
in Src.Ref
{ Src.refDefRepo = repo
, Src.refDefUnitType = "HaskellPackage"
, Src.refDefUnit = pkg
, Src.refDefPath = Src.PModule pkg mp
, Src.refIsDef = False
, Src.refFile = repoFile fn
, Src.refStart = toOffset (fn,sl,sc)
, Src.refEnd = toOffset (fn,el,ec)
}
_3_2 ∷ (a,b,c) → b
_3_2 (_,b,_) = b
ourModules ∷ PathDB → [(RepoPath,ModulePath)]
ourModules = fmap f . Set.toList . fst
where f (a,b,c) = (srcToRepo a b, fileToModulePath b)
moduleName (MP[]) = "Main"
moduleName (MP (m:_)) = m
moduleDefs ∷ Src.Pkg → [(RepoPath,ModulePath)] → Src.Graph
moduleDefs pkg = flip Src.Graph [] . fmap cvt
where cvt (filename,mp) = Src.Def
{ Src.defPath = Src.PModule pkg mp
, Src.defTreePath = Src.PModule pkg mp
, Src.defName = moduleName mp
, Src.defKind = Src.Module
, Src.defFile = filename
, Src.defDefStart = 0
, Src.defDefEnd = 0
, Src.defExported = True
, Src.defTest = False
}
- We use tell to use a separate build directory . ( This is
graph ∷ C.CabalInfo → IO (Src.Graph, IO ())
graph info = do
pid ← toInteger <$> getProcessID
repos ← repoMap info
modules ∷ Map Loc.ModulePath Src.Pkg ← C.moduleMap info
let lookupRepo ∷ PkgName → RepoURI
lookupRepo = fromMaybe "" . flip M.lookup repos
mkParam k v = "--" <> k <> "=" <> v <> ""
mkTmp ∷ Text → Text
mkTmp n = "/tmp/srclib-haskell-" <> n <> "." <> fromString(show pid)
symbolGraph = mkTmp "symbol-graph"
sandbox = mkTmp "sandbox"
buildDir = mkTmp "build-directory"
workDir = mkTmp "work-directory"
subDir = srclibPath $ C.cabalPkgDir info
workSubDir = workDir <> "/" <> subDir
cleanup = shelly $ do
let tmps = [symbolGraph, sandbox, buildDir, workDir]
tmpFilePaths ∷ [Path.FilePath]
tmpFilePaths = fromText <$> tmps
mapM_ rm_rf tmpFilePaths
let toStderr = log_stdout_with $ T.unpack ⋙ hPutStrLn stderr
let cabal_ = run_ "cabal"
shelly $ toStderr $ do
mkdir_p (fromText workDir)
let wd = T.unpack workDir
tarcmd = T.pack $ printf "(tar c *) | (cd '%s'; tar x)" wd
run_ "/bin/sh" ["-c", tarcmd]
cd (fromText workDir)
errExit False $ run_ "autoreconf" []
cd (fromText workSubDir)
errExit False $ run_ "autoreconf" []
cabal_ ["sandbox", "init", mkParam "sandbox" sandbox]
errExit False $
cabal_ [ "install", "--only-dependencies"
, "-j4"
, "--disable-optimization"
, "--force-reinstalls"
]
cabal_ ["configure", mkParam "builddir" buildDir]
cabal_ [ "haddock", "--executables", "--internal"
, mkParam "haddock-options" ("-G" <> symbolGraph)
, mkParam "builddir" buildDir
]
let badLoad = error $ T.unpack $ "Unable to load file: " <> symbolGraph
graphs ← loadSymGraphFile $ Path.decodeString $ T.unpack symbolGraph
let packageName = C.cabalPkgName info
pdb ← mkDB info graphs
let _4 (_,_,_,x) = x
let completeSymGraph = mconcat $ _4 <$> graphs
let haddockResults = fudgeGraph $ convertGraph lookupRepo packageName pdb completeSymGraph
modRefs ← forM (pdbSourceFileNames pdb) $ \fn → do
source ← readFile $ fpFromText fn
return (fn, Imp.moduleRefs (T.unpack fn) source)
let _3 (a,b,c) = c
toOffsets ∷ (String,Int,Int) → Int
toOffsets (fn,l,c) = fromMaybe 0 $ join $ flip Loc.lineColOffset (Loc.LineCol l c) <$> shape
where frm = Repo $ fromMaybe mempty $ Loc.parseRelativePath $ T.pack fn
shapes = Set.toList $ fst pdb
shape = _3 <$> L.find (\(rp,sp,_) → frm ≡ Loc.srcToRepo rp sp) shapes
toRepoAndPkg ∷ ModulePath → (Src.URI,Src.Pkg)
toRepoAndPkg mp = trace "toRepoAndPkg" $ traceShow mp $ traceShowId (repo,pkg)
where repo = fromMaybe "" $ M.lookup pkg repos
pkg = fromMaybe packageName $ M.lookup mp modules
let moduleGraph = convertModuleGraph toRepoAndPkg toOffsets modRefs
let results = moduleGraph ++ haddockResults ++ moduleDefs packageName (ourModules pdb)
We ca n't cleanup here , since we 're using lazy IO . Processing the graph file
return (results,cleanup)
isParent ∷ RepoPath → RepoPath → Bool
isParent (Repo(FP parent)) (Repo(FP child)) = parent `isSuffixOf` child
instead of having a separate isParent and stripIt .
stripIt ∷ RepoPath → RepoPath → SrcPath
stripIt (Repo(FP parent)) (Repo(FP child)) =
Src $ FP $ reverse $ May.fromJust $ stripPrefix (reverse parent) (reverse child)
fe ∷ C.CabalInfo → [(RepoPath, SrcPath, IO FileShape)]
fe info = do
srcDir ← Set.toList $ C.cabalSrcDirs info
repoPath ← Set.toList $ C.cabalSrcFiles info
let srcPath = stripIt srcDir repoPath
guard $ isParent srcDir repoPath
return (srcDir, srcPath, fileShape(srclibPath repoPath))
mkDB ∷ C.CabalInfo → H.SymGraphFile → IO PathDB
mkDB info graphFile = do
let ugg (srcDir, srcPath, action) = do result ← action
return (srcDir, srcPath, result)
cols ← Set.fromList <$> mapM ugg (fe info)
return (cols,tmpFiles graphFile)
|
21e7b61ea985f263c7b59bf8d40fb0ab85598e21b54790143b461ad19079c0f6 | facebook/infer | Procedures.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
module L = Logging
let get_all ~filter () =
let query_str = "SELECT proc_attributes FROM procedures" in
let adb = Database.get_database AnalysisDatabase in
let cdb = Database.get_database CaptureDatabase in
let adb_stmt = Sqlite3.prepare adb query_str in
let cdb_stmt = Sqlite3.prepare cdb query_str in
let run_query_fold db log stmt init =
SqliteUtils.result_fold_rows db ~log stmt ~init ~f:(fun rev_results stmt ->
let attrs = Sqlite3.column stmt 0 |> ProcAttributes.SQLite.deserialize in
let source_file = attrs.ProcAttributes.translation_unit in
let proc_name = ProcAttributes.get_proc_name attrs in
if filter source_file proc_name then proc_name :: rev_results else rev_results )
in
run_query_fold cdb "reading all procedure names capturedb" cdb_stmt []
|> run_query_fold adb "reading all procedure names analysisdb" adb_stmt
let select_proc_names_interactive ~filter =
let proc_names = get_all ~filter () |> List.rev in
let proc_names_len = List.length proc_names in
match (proc_names, Config.select) with
| [], _ ->
F.eprintf "No procedures found" ;
None
| _, Some (`Select n) when n >= proc_names_len ->
L.die UserError "Cannot select result #%d out of only %d procedures" n proc_names_len
| [proc_name], _ ->
F.eprintf "Selected proc name: %a@." Procname.pp proc_name ;
Some proc_names
| _, Some `All ->
Some proc_names
| _, Some (`Select n) ->
let proc_names_array = List.to_array proc_names in
Some [proc_names_array.(n)]
| _, None ->
let proc_names_array = List.to_array proc_names in
Array.iteri proc_names_array ~f:(fun i proc_name ->
F.eprintf "%d: %a@\n" i Procname.pp proc_name ) ;
let rec ask_user_input () =
F.eprintf "Select one number (type 'a' for selecting all, 'q' for quit): " ;
Out_channel.flush stderr ;
let input = String.strip In_channel.(input_line_exn stdin) in
if String.equal (String.lowercase input) "a" then Some proc_names
else if String.equal (String.lowercase input) "q" then (
F.eprintf "Quit interactive mode" ;
None )
else
match int_of_string_opt input with
| Some n when 0 <= n && n < Array.length proc_names_array ->
Some [proc_names_array.(n)]
| _ ->
F.eprintf "Invalid input" ;
ask_user_input ()
in
ask_user_input ()
let pp_all ~filter ~proc_name:proc_name_cond ~defined ~source_file:source_file_cond ~proc_attributes
~proc_cfg fmt () =
let db = Database.get_database CaptureDatabase in
let deserialize_bool_int = function
| Sqlite3.Data.INT int64 -> (
match Int64.to_int_exn int64 with 0 -> false | _ -> true )
| _ ->
L.die InternalError "deserialize_int"
in
let pp_if ?(new_line = false) condition title pp fmt x =
if condition then (
if new_line then F.fprintf fmt "@[<v2>" else F.fprintf fmt "@[<h>" ;
F.fprintf fmt "%s:@ %a@]@;" title pp x )
in
let pp_column_if stmt ?new_line condition title deserialize pp fmt column =
if condition then
(* repeat the [condition] check so that we do not deserialize if there's nothing to do *)
pp_if ?new_line condition title pp fmt (Sqlite3.column stmt column |> deserialize)
in
let pp_row stmt fmt source_file proc_name =
let[@warning "-partial-match"] (Sqlite3.Data.TEXT proc_uid) = Sqlite3.column stmt 0 in
let dump_cfg fmt cfg_opt =
match cfg_opt with
| None ->
F.pp_print_string fmt "not found"
| Some cfg ->
let path = DotCfg.emit_proc_desc source_file cfg in
F.fprintf fmt "'%s'" path
in
F.fprintf fmt "@[<v2>%s@,%a%a%a%a%a@]@\n" proc_uid
(pp_if source_file_cond "source_file" SourceFile.pp)
source_file
(pp_if proc_name_cond "proc_name" Procname.pp)
proc_name
(pp_column_if stmt defined "defined" deserialize_bool_int Bool.pp)
1
(pp_column_if stmt ~new_line:true proc_attributes "attributes"
ProcAttributes.SQLite.deserialize ProcAttributes.pp )
2
(pp_column_if stmt ~new_line:false proc_cfg "control-flow graph" Procdesc.SQLite.deserialize
dump_cfg )
3
in
(* we could also register this statement but it's typically used only once per run so just prepare
it inside the function *)
Sqlite3.prepare db
{|
SELECT
proc_uid,
cfg IS NOT NULL,
proc_attributes,
cfg
FROM procedures ORDER BY proc_uid
|}
|> Container.iter ~fold:(SqliteUtils.result_fold_rows db ~log:"print all procedures")
~f:(fun stmt ->
let attrs = Sqlite3.column stmt 2 |> ProcAttributes.SQLite.deserialize in
let proc_name = ProcAttributes.get_proc_name attrs in
let source_file = attrs.ProcAttributes.translation_unit in
if filter source_file proc_name then pp_row stmt fmt source_file proc_name )
| null | https://raw.githubusercontent.com/facebook/infer/4ee1c2a8e783ee770b61547a38ff1715d6ed75fe/infer/src/backend/Procedures.ml | ocaml | repeat the [condition] check so that we do not deserialize if there's nothing to do
we could also register this statement but it's typically used only once per run so just prepare
it inside the function |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
open! IStd
module F = Format
module L = Logging
let get_all ~filter () =
let query_str = "SELECT proc_attributes FROM procedures" in
let adb = Database.get_database AnalysisDatabase in
let cdb = Database.get_database CaptureDatabase in
let adb_stmt = Sqlite3.prepare adb query_str in
let cdb_stmt = Sqlite3.prepare cdb query_str in
let run_query_fold db log stmt init =
SqliteUtils.result_fold_rows db ~log stmt ~init ~f:(fun rev_results stmt ->
let attrs = Sqlite3.column stmt 0 |> ProcAttributes.SQLite.deserialize in
let source_file = attrs.ProcAttributes.translation_unit in
let proc_name = ProcAttributes.get_proc_name attrs in
if filter source_file proc_name then proc_name :: rev_results else rev_results )
in
run_query_fold cdb "reading all procedure names capturedb" cdb_stmt []
|> run_query_fold adb "reading all procedure names analysisdb" adb_stmt
let select_proc_names_interactive ~filter =
let proc_names = get_all ~filter () |> List.rev in
let proc_names_len = List.length proc_names in
match (proc_names, Config.select) with
| [], _ ->
F.eprintf "No procedures found" ;
None
| _, Some (`Select n) when n >= proc_names_len ->
L.die UserError "Cannot select result #%d out of only %d procedures" n proc_names_len
| [proc_name], _ ->
F.eprintf "Selected proc name: %a@." Procname.pp proc_name ;
Some proc_names
| _, Some `All ->
Some proc_names
| _, Some (`Select n) ->
let proc_names_array = List.to_array proc_names in
Some [proc_names_array.(n)]
| _, None ->
let proc_names_array = List.to_array proc_names in
Array.iteri proc_names_array ~f:(fun i proc_name ->
F.eprintf "%d: %a@\n" i Procname.pp proc_name ) ;
let rec ask_user_input () =
F.eprintf "Select one number (type 'a' for selecting all, 'q' for quit): " ;
Out_channel.flush stderr ;
let input = String.strip In_channel.(input_line_exn stdin) in
if String.equal (String.lowercase input) "a" then Some proc_names
else if String.equal (String.lowercase input) "q" then (
F.eprintf "Quit interactive mode" ;
None )
else
match int_of_string_opt input with
| Some n when 0 <= n && n < Array.length proc_names_array ->
Some [proc_names_array.(n)]
| _ ->
F.eprintf "Invalid input" ;
ask_user_input ()
in
ask_user_input ()
let pp_all ~filter ~proc_name:proc_name_cond ~defined ~source_file:source_file_cond ~proc_attributes
~proc_cfg fmt () =
let db = Database.get_database CaptureDatabase in
let deserialize_bool_int = function
| Sqlite3.Data.INT int64 -> (
match Int64.to_int_exn int64 with 0 -> false | _ -> true )
| _ ->
L.die InternalError "deserialize_int"
in
let pp_if ?(new_line = false) condition title pp fmt x =
if condition then (
if new_line then F.fprintf fmt "@[<v2>" else F.fprintf fmt "@[<h>" ;
F.fprintf fmt "%s:@ %a@]@;" title pp x )
in
let pp_column_if stmt ?new_line condition title deserialize pp fmt column =
if condition then
pp_if ?new_line condition title pp fmt (Sqlite3.column stmt column |> deserialize)
in
let pp_row stmt fmt source_file proc_name =
let[@warning "-partial-match"] (Sqlite3.Data.TEXT proc_uid) = Sqlite3.column stmt 0 in
let dump_cfg fmt cfg_opt =
match cfg_opt with
| None ->
F.pp_print_string fmt "not found"
| Some cfg ->
let path = DotCfg.emit_proc_desc source_file cfg in
F.fprintf fmt "'%s'" path
in
F.fprintf fmt "@[<v2>%s@,%a%a%a%a%a@]@\n" proc_uid
(pp_if source_file_cond "source_file" SourceFile.pp)
source_file
(pp_if proc_name_cond "proc_name" Procname.pp)
proc_name
(pp_column_if stmt defined "defined" deserialize_bool_int Bool.pp)
1
(pp_column_if stmt ~new_line:true proc_attributes "attributes"
ProcAttributes.SQLite.deserialize ProcAttributes.pp )
2
(pp_column_if stmt ~new_line:false proc_cfg "control-flow graph" Procdesc.SQLite.deserialize
dump_cfg )
3
in
Sqlite3.prepare db
{|
SELECT
proc_uid,
cfg IS NOT NULL,
proc_attributes,
cfg
FROM procedures ORDER BY proc_uid
|}
|> Container.iter ~fold:(SqliteUtils.result_fold_rows db ~log:"print all procedures")
~f:(fun stmt ->
let attrs = Sqlite3.column stmt 2 |> ProcAttributes.SQLite.deserialize in
let proc_name = ProcAttributes.get_proc_name attrs in
let source_file = attrs.ProcAttributes.translation_unit in
if filter source_file proc_name then pp_row stmt fmt source_file proc_name )
|
07770b1875761c1983c18d18fe7daa37f24816a2911bda1f963e3d64313ad983 | janestreet/ecaml | major_mode.mli | include Major_mode_intf.Major_mode
| null | https://raw.githubusercontent.com/janestreet/ecaml/bd95b93799ccf809be26436b8379410c29282c4f/src/major_mode.mli | ocaml | include Major_mode_intf.Major_mode
| |
3bf79e607f4575f8fb3daef53a48d13641b38d5ee69afe0c4b779f126b23f4cd | jonase/eastwood | classify_invoke.clj | Copyright ( c ) , Rich Hickey & contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.classify-invoke
(:require [eastwood.copieddeps.dep1.clojure.tools.analyzer.utils :refer [arglist-for-arity protocol-node? source-info]]
[eastwood.copieddeps.dep2.clojure.tools.analyzer.jvm.utils
:refer [specials prim-interface]]
[eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.validate :refer [validate]]))
(defn classify-invoke
"If the AST node is an :invoke, check the node in function position,
* if it is a keyword, transform the node in a :keyword-invoke node;
* if it is the clojure.core/instance? var and the first argument is a
literal class, transform the node in a :instance? node to be inlined by
the emitter
* if it is a protocol function var, transform the node in a :protocol-invoke
node
* if it is a regular function with primitive type hints that match a
clojure.lang.IFn$[primitive interface], transform the node in a :prim-invoke
node"
{:pass-info {:walk :post :depends #{#'validate}}}
[{:keys [op args tag env form] :as ast}]
(if-not (= op :invoke)
ast
(let [argc (count args)
the-fn (:fn ast)
op (:op the-fn)
var? (= :var op)
the-var (:var the-fn)]
(cond
(and (= :const op)
(= :keyword (:type the-fn)))
(if (<= 1 argc 2)
(if (and (not (namespace (:val the-fn)))
(= 1 argc))
(merge (dissoc ast :fn :args)
{:op :keyword-invoke
:target (first args)
:keyword the-fn
:children [:keyword :target]})
ast)
(throw (ex-info (str "Cannot invoke keyword with " argc " arguments")
(merge {:form form}
(source-info env)))))
(and (= 2 argc)
var?
(= #'clojure.core/instance? the-var)
(= :const (:op (first args)))
(= :class (:type (first args))))
(merge (dissoc ast :fn :args)
{:op :instance?
:class (:val (first args))
:target (second args)
:form form
:env env
:o-tag Boolean/TYPE
:tag (or tag Boolean/TYPE)
:children [:target]})
(and var? (protocol-node? the-var (:meta the-fn)))
(if (>= argc 1)
(merge (dissoc ast :fn)
{:op :protocol-invoke
:protocol-fn the-fn
:target (first args)
:args (vec (rest args))
:children [:protocol-fn :target :args]})
(throw (ex-info "Cannot invoke protocol method with no args"
(merge {:form form}
(source-info env)))))
:else
(let [arglist (arglist-for-arity the-fn argc)
arg-tags (mapv (comp specials str :tag meta) arglist)
ret-tag (-> arglist meta :tag str specials)
tags (conj arg-tags ret-tag)]
(if-let [prim-interface (prim-interface (mapv #(if (nil? %) Object %) tags))]
(merge ast
{:op :prim-invoke
:prim-interface prim-interface
:args (mapv (fn [arg tag] (assoc arg :tag tag)) args arg-tags)
:o-tag ret-tag
:tag (or tag ret-tag)})
ast))))))
| null | https://raw.githubusercontent.com/jonase/eastwood/c5b7d9f8ad8f8b38dc7138d853cc65f6987d6058/copied-deps/eastwood/copieddeps/dep2/clojure/tools/analyzer/passes/jvm/classify_invoke.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
| Copyright ( c ) , Rich Hickey & contributors .
(ns eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.classify-invoke
(:require [eastwood.copieddeps.dep1.clojure.tools.analyzer.utils :refer [arglist-for-arity protocol-node? source-info]]
[eastwood.copieddeps.dep2.clojure.tools.analyzer.jvm.utils
:refer [specials prim-interface]]
[eastwood.copieddeps.dep2.clojure.tools.analyzer.passes.jvm.validate :refer [validate]]))
(defn classify-invoke
"If the AST node is an :invoke, check the node in function position,
* if it is the clojure.core/instance? var and the first argument is a
literal class, transform the node in a :instance? node to be inlined by
the emitter
* if it is a protocol function var, transform the node in a :protocol-invoke
node
* if it is a regular function with primitive type hints that match a
clojure.lang.IFn$[primitive interface], transform the node in a :prim-invoke
node"
{:pass-info {:walk :post :depends #{#'validate}}}
[{:keys [op args tag env form] :as ast}]
(if-not (= op :invoke)
ast
(let [argc (count args)
the-fn (:fn ast)
op (:op the-fn)
var? (= :var op)
the-var (:var the-fn)]
(cond
(and (= :const op)
(= :keyword (:type the-fn)))
(if (<= 1 argc 2)
(if (and (not (namespace (:val the-fn)))
(= 1 argc))
(merge (dissoc ast :fn :args)
{:op :keyword-invoke
:target (first args)
:keyword the-fn
:children [:keyword :target]})
ast)
(throw (ex-info (str "Cannot invoke keyword with " argc " arguments")
(merge {:form form}
(source-info env)))))
(and (= 2 argc)
var?
(= #'clojure.core/instance? the-var)
(= :const (:op (first args)))
(= :class (:type (first args))))
(merge (dissoc ast :fn :args)
{:op :instance?
:class (:val (first args))
:target (second args)
:form form
:env env
:o-tag Boolean/TYPE
:tag (or tag Boolean/TYPE)
:children [:target]})
(and var? (protocol-node? the-var (:meta the-fn)))
(if (>= argc 1)
(merge (dissoc ast :fn)
{:op :protocol-invoke
:protocol-fn the-fn
:target (first args)
:args (vec (rest args))
:children [:protocol-fn :target :args]})
(throw (ex-info "Cannot invoke protocol method with no args"
(merge {:form form}
(source-info env)))))
:else
(let [arglist (arglist-for-arity the-fn argc)
arg-tags (mapv (comp specials str :tag meta) arglist)
ret-tag (-> arglist meta :tag str specials)
tags (conj arg-tags ret-tag)]
(if-let [prim-interface (prim-interface (mapv #(if (nil? %) Object %) tags))]
(merge ast
{:op :prim-invoke
:prim-interface prim-interface
:args (mapv (fn [arg tag] (assoc arg :tag tag)) args arg-tags)
:o-tag ret-tag
:tag (or tag ret-tag)})
ast))))))
|
aa6550214abaa653eb4b0478eab5c5132732357ddab4260cc4288936d6ed49d5 | noprompt/meander | epsilon.cljc | (ns ^:no-doc meander.substitute.epsilon
(:refer-clojure :exclude [compile])
#?(:cljs (:require-macros [meander.substitute.epsilon]))
(:require [clojure.set :as set]
[clojure.walk :as walk]
[meander.match.epsilon :as r.match]
[meander.match.runtime.epsilon :as r.match.runtime]
[meander.match.syntax.epsilon :as r.match.syntax]
[meander.syntax.epsilon :as r.syntax]
[meander.substitute.runtime.epsilon :as r.subst.runtime]
[meander.substitute.syntax.epsilon :as r.subst.syntax :include-macros true]
[meander.util.epsilon :as r.util])
#?(:clj (:import (clojure.lang ExceptionInfo))))
;; ---------------------------------------------------------------------
;; Environment utilities
(defn stateful-memory-variables
{:private true}
[node]
(distinct
(r.match/search node
(meander.syntax.epsilon/$
{:tag ::r.subst.syntax/cata
:argument (meander.syntax.epsilon/$ {:tag :mvr :as ?mvr-node})})
?mvr-node
(meander.syntax.epsilon/$
{:tag (r.match.syntax/or :rp* :rp+ :rpl :rpm)
:cat (meander.syntax.epsilon/$ {:tag :mvr :as ?mvr-node})})
?mvr-node
(meander.syntax.epsilon/$
{:tag :rpm
:mvr {:tag :mvr :as ?mvr-node}})
?mvr-node
;; TODO: This definition is weak.
(meander.syntax.epsilon/$
{:tag :wth
:bindings [_ ...
{:ref {:symbol ?symbol}
:pattern (meander.syntax.epsilon/$ {:tag :mvr :as ?mvr-node})} .
_ ...]})
?mvr-node)))
(defn memory-variable-data
{:private true}
[node]
(r.match/search (stateful-memory-variables node)
(_ ... {:symbol ?symbol} . _ ...)
{:memory-variable/symbol ?symbol
:memory-variable/state :iterating
:iterator/symbol (with-meta (symbol (str (name ?symbol) "__counter"))
{:tag 'java.util.Iterator})}))
(defn make-env
[node]
{:wth-refs {}
:data (into #{} (memory-variable-data node))})
(defn get-wth-refs
{:private true}
[env]
(get env :wth-refs))
(defn get-wth-ref-pattern
{:private true}
[env ref-node]
(get-in env [:wth-refs ref-node]))
(defn add-wth-refs
{:private true}
[env ref-map]
(update env :wth-refs merge ref-map))
;; ---------------------------------------------------------------------
;; Compilation
(defn compile-ground
"This function is used to compile the `:value` of `:lit` nodes."
{:private true}
[x]
(cond
(symbol? x)
`(quote ~x)
(seq? x)
(if (= (first x) 'quote)
x
(if (= (first x) `list)
(cons (first x) (map compile-ground (rest x)))
(if (seq x)
(cons `list (map compile-ground x))
())))
(map? x)
(into {}
(map
(fn [[k v]]
[(compile-ground k) (compile-ground v)]))
x)
(coll? x)
(into (empty x) (map compile-ground) x)
:else
x))
(defn iterator-has-next-form
{:private true}
[iterator-form]
`(.hasNext ~iterator-form))
(defn iterator-next-form
{:private true}
[iterator-form]
`(if (.hasNext ~iterator-form)
(.next ~iterator-form)))
(defn iterator-rest-form
{:private true}
[iterator-form]
`(vec (r.subst.runtime/iterator-seq ~iterator-form)))
(defmulti compile*
""
{:arglists '([node env])}
(fn [node _] (:tag node)))
(defn compile-all*
[nodes env]
(reduce
(fn [[forms env] node]
(let [[form env*] (compile* node env)]
[(conj forms form) env*]))
[[] env]
nodes))
(defmethod compile* ::r.subst.syntax/apply
[node env]
(r.match/match node
{:function ?function
:argument ?argument}
(let [[form env] (compile* ?argument env)]
[`(~?function ~form) env])))
(defmethod compile* ::r.subst.syntax/cata
[node env]
(r.match/match node
{:argument ?argument :as ?node}
(if-some [cata-symbol (get env :cata-symbol)]
(let [[argument env] (compile* ?argument env)
result (gensym "R__")
form `(let [~result (~cata-symbol ~argument)]
(if (r.match.runtime/fail? ~result)
(throw r.subst.runtime/FAIL)
(nth ~result 0)))]
[form env])
(let [env (update env :data conj {:error :cata-not-bound})]
[`(throw (ex-info "cata not bound" {})) env]))))
(defmethod compile* :ctn
[node env]
(let [pattern (:pattern node)]
(if-some [context (:context node)]
(let [[pattern-form env] (compile* pattern env)
[context-form env] (compile* context env)]
[`(~context-form ~pattern-form) env])
(compile* (:pattern node) env))))
(defmethod compile* :cat
[node env]
(r.match/match node
Base case 1 .
{:elements []}
[() env]
Base case 2 .
{:elements ()}
[() env]
;; Normalize elements to vector and recur.
{:elements (& _ :as ?elements)}
(compile* {:tag :cat :elements (vec ?elements)} env)
;; Process each element in the sequence from left to right.
;; Handle unquote splicing.
{:elements [{:tag :uns, :expr ?expr} & ?tail]}
(r.match/match (compile* ?expr env)
[?expr-form ?expr-env]
(r.match/match (compile* {:tag :cat
:elements ?tail}
?expr-env)
[?tail-form ?tail-env]
[`(concat ~?expr-form ~?tail-form) ?tail-env]))
;; Handle anything else.
{:elements [?head & ?tail]}
(r.match/match (compile* ?head env)
[?head-form ?head-env]
(r.match/match (compile* {:tag :cat
:elements ?tail}
?head-env)
[?tail-form ?tail-env]
[`(cons ~?head-form ~?tail-form) ?tail-env]))))
(defmethod compile* :drp
[_ env]
[() env])
(defmethod compile* :lit
[node env]
[(compile-ground (:value node)) env])
(defmethod compile* :lvr
[node env]
[(:symbol node) env])
(defmethod compile* :map
[node env]
(let [[form env] (if-some [as-node (:as node)]
(let [[form env] (compile* as-node env)]
[`(into {} ~form) env])
[{} env])
[forms env] (compile-all* (into [] cat (:map node)) env)
form `(merge ~form ~(into {} (map vec (partition 2 forms))))
[form env] (if-some [rest-node (:rest-map node)]
(let [[rest-form env] (compile* rest-node env)]
[`(let [form# ~form]
(merge ~rest-form form#))
env])
[form env])
;; Search for keys containing memory variables that have
;; associated iterator symbols in the environment.
iterator-symbols (r.match/search [node env]
[{:map {(r.match.syntax/apply r.syntax/variables #{{:tag :mvr :symbol ?symbol}}) _}}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol (r.match.syntax/pred symbol? ?iterator-symbol)}}}]
?iterator-symbol)
;; If there are any iterator symbols we need to check to see
;; if all of them have a value available at runtime.
form (if (seq iterator-symbols)
`(if (and ~@(map iterator-has-next-form iterator-symbols))
~form
{})
form)]
[form env]))
(defmethod compile* :merge
[node env]
(let [[forms env*] (compile-all* (:patterns node) env)]
[`(into {} cat [~@forms]) env*]))
(defmethod compile* :mvr
[node env]
(r.match/find [node env]
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:memory-variable/state :finished}}}]
[nil env]
;; Check for an associated iterator.
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol (r.match.syntax/pred symbol? ?iterator-symbol)}}}]
[(iterator-next-form ?iterator-symbol) env]
;; Check for an associated counter.
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:counter/memory-variable-symbol ?symbol
:counter/value ?value
:as ?element}
^& ?rest-data}}]
(let [element* (update ?element :counter/value inc)
data* (conj ?rest-data element*)
env* (assoc env :data data*)]
[`(nth ~?symbol ~?value nil) env*])
;; Associate a counter.
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:as ?value}
^:as ?data}}]
(let [value* (merge ?value {:counter/memory-variable-symbol ?symbol
:counter/value 1})
data* (conj ?data value*)
env* (assoc env :data data*)]
[`(nth ~?symbol 0 nil) env*])
[{:symbol ?symbol}
{:data #{^:as ?data}}]
(let [memory-variable {:memory-variable/symbol ?symbol
:counter/memory-variable-symbol ?symbol
:counter/value 1}
data* (conj ?data memory-variable)
env* (assoc env :data data*)]
[`(nth ~?symbol 0 nil) env*])))
(defmethod compile* :prt
[node env]
(r.match/match node
{:left ?left
:right ?right}
(r.match/match (compile* ?left env)
[?left-form ?left-env]
(r.match/match (compile* ?right ?left-env)
[?right-form ?right-env]
[`(concat ~?left-form ~?right-form) ?right-env]))))
(defmethod compile* :quo
[node env]
[`(quote ~(:form node)) env])
(defmethod compile* :ref
[node env]
[`(~(:symbol node)) env])
(defmethod compile* :rp*
[node env]
(let [mvrs (r.syntax/memory-variables
(r.syntax/substitute-refs node (get-wth-refs env)))]
;; If there are memory variables, compile a while loop that runs
until one of them has exauhsted its values .
(if (seq mvrs)
(let [;; Compile a conjunction of checks which will be performed
;; at the top of each loop. Each check verifies a memory
;; variable still has values to retrieve.
checks (r.match/search [mvrs env]
[#{{:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol ?iterator-symbol}}}]
(iterator-has-next-form ?iterator-symbol))
;; Compile each element of the corresponding `:cat` node
;; one at a time.
[element-forms elements-env] (compile-all* (:elements (:cat node)) env)
return-symbol (gensym "return__")]
[`(loop [~return-symbol (transient [])]
(if (and ~@checks)
(recur
~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol element-forms))
(persistent! ~return-symbol)))
elements-env])
;; This should happen in a separate check phase.
(throw (ex-info "No memory variables found for operator (...)"
{:node (r.syntax/unparse node)
:env env})))))
(defmethod compile* :rp+
[node env]
(r.match/match node
{:cat {:elements ?elements}
:n ?n}
(let [[forms env] (compile-all* ?elements env)
n-symbol (gensym "n__")
return-symbol (gensym "return__")
form `(loop [~return-symbol (transient [])
~n-symbol ~?n]
;; Yield n substitutions.
(if (zero? ~n-symbol)
(persistent! ~return-symbol)
(recur ~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol forms)
(unchecked-dec ~n-symbol))))]
[form env])))
(defmethod compile* :rpl [node env]
(r.match/match node
{:cat {:elements ?elements}
:lvr ?lvr}
(let [[forms env] (compile-all* ?elements env)
[n-form env] (compile* ?lvr env)
n-symbol (gensym "n__")
return-symbol (gensym "return__")
form `(loop [~return-symbol (transient [])
~n-symbol ~n-form]
;; Yield ?n substitutions.
(if (zero? ~n-symbol)
(persistent! ~return-symbol)
(recur ~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol forms)
(unchecked-dec ~n-symbol))))]
[form env])))
(defmethod compile* :rpm [node env]
(r.match/match node
{:cat {:elements ?elements}
:mvr ?mvr}
(let [[forms env] (compile-all* ?elements env)
[n-form env] (compile* ?mvr env)
n-symbol (gensym "n__")
return-symbol (gensym "return__")
;; Yield !n substitutions. Note that unlike `:rpl`
;; and `:rp+` we need to guard against the
;; possibility of a `nil` value in the case the
;; memory variable has been exauhsted.
form `(loop [~return-symbol (transient [])
~n-symbol (or ~n-form 0)]
(if (zero? ~n-symbol)
(persistent! ~return-symbol)
(recur ~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol forms)
(unchecked-dec ~n-symbol))))]
[form env])))
(defmethod compile* :rst
[node env]
(r.match/find [node env]
;; Check for associated memory variable in a `:finished` state.
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:memory-variable/state :finished}}}]
[nil env]
;; Check for associated iterator that is not in a `:finished`
;; state.
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol (r.match.syntax/pred symbol? ?iterator-symbol)
:as ?memory-variable}
^& ?rest-data}}]
(let [memory-variable* (assoc ?memory-variable :memory-variable/state :finished)
data* (conj ?rest-data memory-variable*)
env* (assoc env :data data*)]
[(iterator-rest-form ?iterator-symbol) env*])
;; Check for associated counter.
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:counter/value ?value
:as ?memory-variable}
^& ?rest-data}}]
(let [memory-variable* (assoc ?memory-variable :memory-variable/state :finished)
data* (conj ?rest-data memory-variable*)
env* (assoc env :data data*)]
[`(subvec ~?symbol (min ~?value (count ~?symbol)))
env*])
;; Update existing memory variable state.
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:as ?memory-variable}
^& ?rest-data}}]
(let [memory-variable* (assoc ?memory-variable :memory-variable/state :finished)
data* (conj ?rest-data memory-variable*)
env* (assoc env :data data*)]
[?symbol env*])
;; Insert memory variable in a finished state.
[{:mvr {:symbol ?symbol}}
{:data #{^:as ?data}}]
(let [memory-variable {:memory-variable/symbol ?symbol
:memory-variable/state :finished}
data* (conj ?data memory-variable)
env* (assoc env :data data*)]
[?symbol env*])))
(defmethod compile* :set
[node env]
(let [[forms env] (compile-all* (:elements node) env)
form (into #{} forms)
[form env] (if-some [as-node (:as node)]
(let [[as-form as-env] (compile* as-node env)]
[`(into ~form ~as-form) as-env])
[form env])
[form env] (if-some [rest-node (:rest node)]
(let [[rest-form rest-env] (compile* rest-node env)]
[`(into ~form ~rest-form) rest-env])
[form env])]
[form env]))
(defmethod compile* :seq
[node env]
(r.match/match node
{:prt ?prt}
(r.match/match (compile* ?prt env)
[?form ?env]
[`(list* ~?form) ?env])))
(defmethod compile* :tail
[node env]
(compile* (:pattern node) env))
(defmethod compile* :unq
[node env]
[(:expr node) env])
(defmethod compile* :uns
[node env]
[(:expr node) env])
(defmethod compile* :vec
[node env]
(r.match/match node
{:prt ?prt}
(r.match/match (compile* ?prt env)
[?form ?env]
[`(into [] ~?form) ?env])))
(defmethod compile* :wth
[node env]
(let [;; Get all of the references used in the body and in the
;; bindings.
ref-set (into (r.syntax/references node)
(comp (map :pattern)
(mapcat r.syntax/references))
(:bindings node))
;; Update the compilation environment for subnodes.
env* (add-wth-refs env (r.syntax/make-ref-map node))
[body-form env**] (compile* (:body node) env*)
;; Restore the original refs.
env** (assoc env** :wth-refs (:wth-refs env))]
;; Compile functions only for the references used.
[`(letfn [~@(r.match/search [node ref-set]
(r.syntax/with [%ref {:ref {:symbol ?symbol :as ?ref}
:pattern ?pattern}
%bindings (r.match.syntax/or [_ ... %ref . _ ...] (_ ... %ref . _ ...))]
[{:bindings %bindings} #{?ref}])
(let [[form _] (compile* ?pattern env*)]
`(~?symbol [] ~form)))]
~body-form)
env**]))
(defn rewrite-clojure*
{:private true}
[form]
(clojure.walk/prewalk
(fn f [form]
(r.match/match form
;; concat rules
;; ------------
(clojure.core/concat ?x)
?x
(clojure.core/concat ?x ())
?x
(clojure.core/concat () ?x)
?x
(clojure.core/concat (clojure.core/list & ?args1) (clojure.core/list & ?args2) & ?rest-args)
`(clojure.core/concat (clojure.core/list ~@?args1 ~@?args2) ~@ ?rest-args)
(clojure.core/concat (clojure.core/list & _ :as ?list))
?list
;; cons rules
;; ----------
(clojure.core/cons ?x ())
`(list ~?x)
(clojure.core/cons ?x (clojure.core/list . !xs ...))
`(list ~?x ~@!xs)
;; conj rules
;; ----------
(clojure.core/conj [!ys ...] . !xs ...)
`[~@!ys ~@!xs]
;; lefn rules
;; -----------
(clojure.core/letfn [] ?body)
?body
(clojure.core/letfn [] & ?body)
`(do ~?body)
;; list* rules
;; -----------
(clojure.core/list* (clojure.core/list & _ :as ?list-form))
?list-form
(clojure.core/list* . !list*-args ... (clojure.core/list* & ?last-list*-args))
`(clojure.core/list* ~@!list*-args ~@?last-list*-args)
;; merge rules
;; -----------
(clojure.core/merge {:as ?m1} {:as ?m2})
(merge ?m1 ?m2)
;; or rules
;; --------
(clojure.core/or (if ?test ?then) ?else)
`(if ~?test ~?then ~?else)
(clojure.core/or (if ?test ?then nil) ?else)
`(if ~?test ~?then ~?else)
;; into rules
;; ----------
(clojure.core/into [!xs ...] (clojure.core/cons ?x ?y))
`(clojure.core/into (conj ~!xs ~?x) ~?y)
(clojure.core/into [!xs ...] (clojure.core/list . !ys ...))
`[~@!xs ~@!ys]
(clojure.core/into [& _ :as ?vector] nil)
?vector
(clojure.core/into [& _ :as ?vector] ())
?vector
(clojure.core/into [] (clojure.core/subvec & _ :as ?subvec-form))
?subvec-form
(clojure.core/into [] (clojure.core/loop [?ret (clojure.core/transient [])]
. _ ... .
(clojure.core/persistent! ?ret)
:as ?loop-form))
?loop-form
(clojure.core/into {} (clojure.core/loop [?ret (clojure.core/transient [])]
. !forms ... .
(clojure.core/persistent! ?ret)))
`(clojure.core/loop [~?ret (clojure.core/transient {})]
~@!forms
(clojure.core/persistent! ~?ret))
(clojure.core/into {:as ?m1} {:as ?m2})
(merge ?m1 ?m2)
(clojure.core/into {:as ?m} [[_ _] ... :as ?v])
(into ?m ?v)
(clojure.core/into #{^:as ?s1} #{^:as ?s2})
(into ?s1 ?s2)
(clojure.core/into (clojure.core/into #{^:as ?s1} ?x) #{^:as ?s2})
`(clojure.core/into ~(into ?s1 ?s2) ~?x)
;; else
;; ----
?x
?x))
form))
(defn rewrite-clojure
{:private true}
[form]
(let [form* (rewrite-clojure* form)]
(if (= form form*)
form
(recur form*))))
(defn iter-bindings
{:private true}
[env]
(into [] cat
(r.match/search env
{:data #{{:memory-variable/symbol ?memory-variable-symbol
:iterator/symbol (r.match.syntax/pred some? ?iterator-symbol)}}}
[?iterator-symbol `(r.subst.runtime/iterator ~?memory-variable-symbol)])))
(defn compile [node env]
(let [node (r.subst.syntax/expand-ast node)
env (merge env (make-env node))
[form env] (compile* node env)]
(r.match/find env
{:data #{{:error :cata-not-bound}}}
::CATA_NOT_BOUND
_
(let [form* (rewrite-clojure form)
iter-bindings (iter-bindings env)
form* (if (seq iter-bindings)
`(let ~iter-bindings ~form*)
form*)
form* (if (and (not (get env :match-cata?))
(get env :subst-cata?))
(if (r.subst.syntax/contains-cata-node? node)
`(try
[~form*]
(catch ~(if (r.util/cljs-env? env) 'cljs.core/ExceptionInfo 'clojure.lang.ExceptionInfo) e#
(if (r.subst.runtime/fail? e#)
r.match.runtime/FAIL
(throw e#))))
[form*])
form*)]
form*))))
(defmacro substitute
[pattern]
(let [node (r.subst.syntax/parse pattern &env)
x (compile node &env)]
(if (= ::CATA_NOT_BOUND x)
(throw (ex-info "cata not allowed here" {:pattern pattern}))
x)))
| null | https://raw.githubusercontent.com/noprompt/meander/c24c7f1477879ee491fbc09fbf75a8777a705a20/src/meander/substitute/epsilon.cljc | clojure | ---------------------------------------------------------------------
Environment utilities
TODO: This definition is weak.
---------------------------------------------------------------------
Compilation
Normalize elements to vector and recur.
Process each element in the sequence from left to right.
Handle unquote splicing.
Handle anything else.
Search for keys containing memory variables that have
associated iterator symbols in the environment.
If there are any iterator symbols we need to check to see
if all of them have a value available at runtime.
Check for an associated iterator.
Check for an associated counter.
Associate a counter.
If there are memory variables, compile a while loop that runs
Compile a conjunction of checks which will be performed
at the top of each loop. Each check verifies a memory
variable still has values to retrieve.
Compile each element of the corresponding `:cat` node
one at a time.
This should happen in a separate check phase.
Yield n substitutions.
Yield ?n substitutions.
Yield !n substitutions. Note that unlike `:rpl`
and `:rp+` we need to guard against the
possibility of a `nil` value in the case the
memory variable has been exauhsted.
Check for associated memory variable in a `:finished` state.
Check for associated iterator that is not in a `:finished`
state.
Check for associated counter.
Update existing memory variable state.
Insert memory variable in a finished state.
Get all of the references used in the body and in the
bindings.
Update the compilation environment for subnodes.
Restore the original refs.
Compile functions only for the references used.
concat rules
------------
cons rules
----------
conj rules
----------
lefn rules
-----------
list* rules
-----------
merge rules
-----------
or rules
--------
into rules
----------
else
---- | (ns ^:no-doc meander.substitute.epsilon
(:refer-clojure :exclude [compile])
#?(:cljs (:require-macros [meander.substitute.epsilon]))
(:require [clojure.set :as set]
[clojure.walk :as walk]
[meander.match.epsilon :as r.match]
[meander.match.runtime.epsilon :as r.match.runtime]
[meander.match.syntax.epsilon :as r.match.syntax]
[meander.syntax.epsilon :as r.syntax]
[meander.substitute.runtime.epsilon :as r.subst.runtime]
[meander.substitute.syntax.epsilon :as r.subst.syntax :include-macros true]
[meander.util.epsilon :as r.util])
#?(:clj (:import (clojure.lang ExceptionInfo))))
(defn stateful-memory-variables
{:private true}
[node]
(distinct
(r.match/search node
(meander.syntax.epsilon/$
{:tag ::r.subst.syntax/cata
:argument (meander.syntax.epsilon/$ {:tag :mvr :as ?mvr-node})})
?mvr-node
(meander.syntax.epsilon/$
{:tag (r.match.syntax/or :rp* :rp+ :rpl :rpm)
:cat (meander.syntax.epsilon/$ {:tag :mvr :as ?mvr-node})})
?mvr-node
(meander.syntax.epsilon/$
{:tag :rpm
:mvr {:tag :mvr :as ?mvr-node}})
?mvr-node
(meander.syntax.epsilon/$
{:tag :wth
:bindings [_ ...
{:ref {:symbol ?symbol}
:pattern (meander.syntax.epsilon/$ {:tag :mvr :as ?mvr-node})} .
_ ...]})
?mvr-node)))
(defn memory-variable-data
{:private true}
[node]
(r.match/search (stateful-memory-variables node)
(_ ... {:symbol ?symbol} . _ ...)
{:memory-variable/symbol ?symbol
:memory-variable/state :iterating
:iterator/symbol (with-meta (symbol (str (name ?symbol) "__counter"))
{:tag 'java.util.Iterator})}))
(defn make-env
[node]
{:wth-refs {}
:data (into #{} (memory-variable-data node))})
(defn get-wth-refs
{:private true}
[env]
(get env :wth-refs))
(defn get-wth-ref-pattern
{:private true}
[env ref-node]
(get-in env [:wth-refs ref-node]))
(defn add-wth-refs
{:private true}
[env ref-map]
(update env :wth-refs merge ref-map))
(defn compile-ground
"This function is used to compile the `:value` of `:lit` nodes."
{:private true}
[x]
(cond
(symbol? x)
`(quote ~x)
(seq? x)
(if (= (first x) 'quote)
x
(if (= (first x) `list)
(cons (first x) (map compile-ground (rest x)))
(if (seq x)
(cons `list (map compile-ground x))
())))
(map? x)
(into {}
(map
(fn [[k v]]
[(compile-ground k) (compile-ground v)]))
x)
(coll? x)
(into (empty x) (map compile-ground) x)
:else
x))
(defn iterator-has-next-form
{:private true}
[iterator-form]
`(.hasNext ~iterator-form))
(defn iterator-next-form
{:private true}
[iterator-form]
`(if (.hasNext ~iterator-form)
(.next ~iterator-form)))
(defn iterator-rest-form
{:private true}
[iterator-form]
`(vec (r.subst.runtime/iterator-seq ~iterator-form)))
(defmulti compile*
""
{:arglists '([node env])}
(fn [node _] (:tag node)))
(defn compile-all*
[nodes env]
(reduce
(fn [[forms env] node]
(let [[form env*] (compile* node env)]
[(conj forms form) env*]))
[[] env]
nodes))
(defmethod compile* ::r.subst.syntax/apply
[node env]
(r.match/match node
{:function ?function
:argument ?argument}
(let [[form env] (compile* ?argument env)]
[`(~?function ~form) env])))
(defmethod compile* ::r.subst.syntax/cata
[node env]
(r.match/match node
{:argument ?argument :as ?node}
(if-some [cata-symbol (get env :cata-symbol)]
(let [[argument env] (compile* ?argument env)
result (gensym "R__")
form `(let [~result (~cata-symbol ~argument)]
(if (r.match.runtime/fail? ~result)
(throw r.subst.runtime/FAIL)
(nth ~result 0)))]
[form env])
(let [env (update env :data conj {:error :cata-not-bound})]
[`(throw (ex-info "cata not bound" {})) env]))))
(defmethod compile* :ctn
[node env]
(let [pattern (:pattern node)]
(if-some [context (:context node)]
(let [[pattern-form env] (compile* pattern env)
[context-form env] (compile* context env)]
[`(~context-form ~pattern-form) env])
(compile* (:pattern node) env))))
(defmethod compile* :cat
[node env]
(r.match/match node
Base case 1 .
{:elements []}
[() env]
Base case 2 .
{:elements ()}
[() env]
{:elements (& _ :as ?elements)}
(compile* {:tag :cat :elements (vec ?elements)} env)
{:elements [{:tag :uns, :expr ?expr} & ?tail]}
(r.match/match (compile* ?expr env)
[?expr-form ?expr-env]
(r.match/match (compile* {:tag :cat
:elements ?tail}
?expr-env)
[?tail-form ?tail-env]
[`(concat ~?expr-form ~?tail-form) ?tail-env]))
{:elements [?head & ?tail]}
(r.match/match (compile* ?head env)
[?head-form ?head-env]
(r.match/match (compile* {:tag :cat
:elements ?tail}
?head-env)
[?tail-form ?tail-env]
[`(cons ~?head-form ~?tail-form) ?tail-env]))))
(defmethod compile* :drp
[_ env]
[() env])
(defmethod compile* :lit
[node env]
[(compile-ground (:value node)) env])
(defmethod compile* :lvr
[node env]
[(:symbol node) env])
(defmethod compile* :map
[node env]
(let [[form env] (if-some [as-node (:as node)]
(let [[form env] (compile* as-node env)]
[`(into {} ~form) env])
[{} env])
[forms env] (compile-all* (into [] cat (:map node)) env)
form `(merge ~form ~(into {} (map vec (partition 2 forms))))
[form env] (if-some [rest-node (:rest-map node)]
(let [[rest-form env] (compile* rest-node env)]
[`(let [form# ~form]
(merge ~rest-form form#))
env])
[form env])
iterator-symbols (r.match/search [node env]
[{:map {(r.match.syntax/apply r.syntax/variables #{{:tag :mvr :symbol ?symbol}}) _}}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol (r.match.syntax/pred symbol? ?iterator-symbol)}}}]
?iterator-symbol)
form (if (seq iterator-symbols)
`(if (and ~@(map iterator-has-next-form iterator-symbols))
~form
{})
form)]
[form env]))
(defmethod compile* :merge
[node env]
(let [[forms env*] (compile-all* (:patterns node) env)]
[`(into {} cat [~@forms]) env*]))
(defmethod compile* :mvr
[node env]
(r.match/find [node env]
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:memory-variable/state :finished}}}]
[nil env]
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol (r.match.syntax/pred symbol? ?iterator-symbol)}}}]
[(iterator-next-form ?iterator-symbol) env]
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:counter/memory-variable-symbol ?symbol
:counter/value ?value
:as ?element}
^& ?rest-data}}]
(let [element* (update ?element :counter/value inc)
data* (conj ?rest-data element*)
env* (assoc env :data data*)]
[`(nth ~?symbol ~?value nil) env*])
[{:symbol ?symbol}
{:data #{{:memory-variable/symbol ?symbol
:as ?value}
^:as ?data}}]
(let [value* (merge ?value {:counter/memory-variable-symbol ?symbol
:counter/value 1})
data* (conj ?data value*)
env* (assoc env :data data*)]
[`(nth ~?symbol 0 nil) env*])
[{:symbol ?symbol}
{:data #{^:as ?data}}]
(let [memory-variable {:memory-variable/symbol ?symbol
:counter/memory-variable-symbol ?symbol
:counter/value 1}
data* (conj ?data memory-variable)
env* (assoc env :data data*)]
[`(nth ~?symbol 0 nil) env*])))
(defmethod compile* :prt
[node env]
(r.match/match node
{:left ?left
:right ?right}
(r.match/match (compile* ?left env)
[?left-form ?left-env]
(r.match/match (compile* ?right ?left-env)
[?right-form ?right-env]
[`(concat ~?left-form ~?right-form) ?right-env]))))
(defmethod compile* :quo
[node env]
[`(quote ~(:form node)) env])
(defmethod compile* :ref
[node env]
[`(~(:symbol node)) env])
(defmethod compile* :rp*
[node env]
(let [mvrs (r.syntax/memory-variables
(r.syntax/substitute-refs node (get-wth-refs env)))]
until one of them has exauhsted its values .
(if (seq mvrs)
checks (r.match/search [mvrs env]
[#{{:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol ?iterator-symbol}}}]
(iterator-has-next-form ?iterator-symbol))
[element-forms elements-env] (compile-all* (:elements (:cat node)) env)
return-symbol (gensym "return__")]
[`(loop [~return-symbol (transient [])]
(if (and ~@checks)
(recur
~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol element-forms))
(persistent! ~return-symbol)))
elements-env])
(throw (ex-info "No memory variables found for operator (...)"
{:node (r.syntax/unparse node)
:env env})))))
(defmethod compile* :rp+
[node env]
(r.match/match node
{:cat {:elements ?elements}
:n ?n}
(let [[forms env] (compile-all* ?elements env)
n-symbol (gensym "n__")
return-symbol (gensym "return__")
form `(loop [~return-symbol (transient [])
~n-symbol ~?n]
(if (zero? ~n-symbol)
(persistent! ~return-symbol)
(recur ~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol forms)
(unchecked-dec ~n-symbol))))]
[form env])))
(defmethod compile* :rpl [node env]
(r.match/match node
{:cat {:elements ?elements}
:lvr ?lvr}
(let [[forms env] (compile-all* ?elements env)
[n-form env] (compile* ?lvr env)
n-symbol (gensym "n__")
return-symbol (gensym "return__")
form `(loop [~return-symbol (transient [])
~n-symbol ~n-form]
(if (zero? ~n-symbol)
(persistent! ~return-symbol)
(recur ~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol forms)
(unchecked-dec ~n-symbol))))]
[form env])))
(defmethod compile* :rpm [node env]
(r.match/match node
{:cat {:elements ?elements}
:mvr ?mvr}
(let [[forms env] (compile-all* ?elements env)
[n-form env] (compile* ?mvr env)
n-symbol (gensym "n__")
return-symbol (gensym "return__")
form `(loop [~return-symbol (transient [])
~n-symbol (or ~n-form 0)]
(if (zero? ~n-symbol)
(persistent! ~return-symbol)
(recur ~(reduce (fn [ret form] `(conj! ~ret ~form))
return-symbol forms)
(unchecked-dec ~n-symbol))))]
[form env])))
(defmethod compile* :rst
[node env]
(r.match/find [node env]
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:memory-variable/state :finished}}}]
[nil env]
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:iterator/symbol (r.match.syntax/pred symbol? ?iterator-symbol)
:as ?memory-variable}
^& ?rest-data}}]
(let [memory-variable* (assoc ?memory-variable :memory-variable/state :finished)
data* (conj ?rest-data memory-variable*)
env* (assoc env :data data*)]
[(iterator-rest-form ?iterator-symbol) env*])
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:counter/value ?value
:as ?memory-variable}
^& ?rest-data}}]
(let [memory-variable* (assoc ?memory-variable :memory-variable/state :finished)
data* (conj ?rest-data memory-variable*)
env* (assoc env :data data*)]
[`(subvec ~?symbol (min ~?value (count ~?symbol)))
env*])
[{:mvr {:symbol ?symbol}}
{:data #{{:memory-variable/symbol ?symbol
:as ?memory-variable}
^& ?rest-data}}]
(let [memory-variable* (assoc ?memory-variable :memory-variable/state :finished)
data* (conj ?rest-data memory-variable*)
env* (assoc env :data data*)]
[?symbol env*])
[{:mvr {:symbol ?symbol}}
{:data #{^:as ?data}}]
(let [memory-variable {:memory-variable/symbol ?symbol
:memory-variable/state :finished}
data* (conj ?data memory-variable)
env* (assoc env :data data*)]
[?symbol env*])))
(defmethod compile* :set
[node env]
(let [[forms env] (compile-all* (:elements node) env)
form (into #{} forms)
[form env] (if-some [as-node (:as node)]
(let [[as-form as-env] (compile* as-node env)]
[`(into ~form ~as-form) as-env])
[form env])
[form env] (if-some [rest-node (:rest node)]
(let [[rest-form rest-env] (compile* rest-node env)]
[`(into ~form ~rest-form) rest-env])
[form env])]
[form env]))
(defmethod compile* :seq
[node env]
(r.match/match node
{:prt ?prt}
(r.match/match (compile* ?prt env)
[?form ?env]
[`(list* ~?form) ?env])))
(defmethod compile* :tail
[node env]
(compile* (:pattern node) env))
(defmethod compile* :unq
[node env]
[(:expr node) env])
(defmethod compile* :uns
[node env]
[(:expr node) env])
(defmethod compile* :vec
[node env]
(r.match/match node
{:prt ?prt}
(r.match/match (compile* ?prt env)
[?form ?env]
[`(into [] ~?form) ?env])))
(defmethod compile* :wth
[node env]
ref-set (into (r.syntax/references node)
(comp (map :pattern)
(mapcat r.syntax/references))
(:bindings node))
env* (add-wth-refs env (r.syntax/make-ref-map node))
[body-form env**] (compile* (:body node) env*)
env** (assoc env** :wth-refs (:wth-refs env))]
[`(letfn [~@(r.match/search [node ref-set]
(r.syntax/with [%ref {:ref {:symbol ?symbol :as ?ref}
:pattern ?pattern}
%bindings (r.match.syntax/or [_ ... %ref . _ ...] (_ ... %ref . _ ...))]
[{:bindings %bindings} #{?ref}])
(let [[form _] (compile* ?pattern env*)]
`(~?symbol [] ~form)))]
~body-form)
env**]))
(defn rewrite-clojure*
{:private true}
[form]
(clojure.walk/prewalk
(fn f [form]
(r.match/match form
(clojure.core/concat ?x)
?x
(clojure.core/concat ?x ())
?x
(clojure.core/concat () ?x)
?x
(clojure.core/concat (clojure.core/list & ?args1) (clojure.core/list & ?args2) & ?rest-args)
`(clojure.core/concat (clojure.core/list ~@?args1 ~@?args2) ~@ ?rest-args)
(clojure.core/concat (clojure.core/list & _ :as ?list))
?list
(clojure.core/cons ?x ())
`(list ~?x)
(clojure.core/cons ?x (clojure.core/list . !xs ...))
`(list ~?x ~@!xs)
(clojure.core/conj [!ys ...] . !xs ...)
`[~@!ys ~@!xs]
(clojure.core/letfn [] ?body)
?body
(clojure.core/letfn [] & ?body)
`(do ~?body)
(clojure.core/list* (clojure.core/list & _ :as ?list-form))
?list-form
(clojure.core/list* . !list*-args ... (clojure.core/list* & ?last-list*-args))
`(clojure.core/list* ~@!list*-args ~@?last-list*-args)
(clojure.core/merge {:as ?m1} {:as ?m2})
(merge ?m1 ?m2)
(clojure.core/or (if ?test ?then) ?else)
`(if ~?test ~?then ~?else)
(clojure.core/or (if ?test ?then nil) ?else)
`(if ~?test ~?then ~?else)
(clojure.core/into [!xs ...] (clojure.core/cons ?x ?y))
`(clojure.core/into (conj ~!xs ~?x) ~?y)
(clojure.core/into [!xs ...] (clojure.core/list . !ys ...))
`[~@!xs ~@!ys]
(clojure.core/into [& _ :as ?vector] nil)
?vector
(clojure.core/into [& _ :as ?vector] ())
?vector
(clojure.core/into [] (clojure.core/subvec & _ :as ?subvec-form))
?subvec-form
(clojure.core/into [] (clojure.core/loop [?ret (clojure.core/transient [])]
. _ ... .
(clojure.core/persistent! ?ret)
:as ?loop-form))
?loop-form
(clojure.core/into {} (clojure.core/loop [?ret (clojure.core/transient [])]
. !forms ... .
(clojure.core/persistent! ?ret)))
`(clojure.core/loop [~?ret (clojure.core/transient {})]
~@!forms
(clojure.core/persistent! ~?ret))
(clojure.core/into {:as ?m1} {:as ?m2})
(merge ?m1 ?m2)
(clojure.core/into {:as ?m} [[_ _] ... :as ?v])
(into ?m ?v)
(clojure.core/into #{^:as ?s1} #{^:as ?s2})
(into ?s1 ?s2)
(clojure.core/into (clojure.core/into #{^:as ?s1} ?x) #{^:as ?s2})
`(clojure.core/into ~(into ?s1 ?s2) ~?x)
?x
?x))
form))
(defn rewrite-clojure
{:private true}
[form]
(let [form* (rewrite-clojure* form)]
(if (= form form*)
form
(recur form*))))
(defn iter-bindings
{:private true}
[env]
(into [] cat
(r.match/search env
{:data #{{:memory-variable/symbol ?memory-variable-symbol
:iterator/symbol (r.match.syntax/pred some? ?iterator-symbol)}}}
[?iterator-symbol `(r.subst.runtime/iterator ~?memory-variable-symbol)])))
(defn compile [node env]
(let [node (r.subst.syntax/expand-ast node)
env (merge env (make-env node))
[form env] (compile* node env)]
(r.match/find env
{:data #{{:error :cata-not-bound}}}
::CATA_NOT_BOUND
_
(let [form* (rewrite-clojure form)
iter-bindings (iter-bindings env)
form* (if (seq iter-bindings)
`(let ~iter-bindings ~form*)
form*)
form* (if (and (not (get env :match-cata?))
(get env :subst-cata?))
(if (r.subst.syntax/contains-cata-node? node)
`(try
[~form*]
(catch ~(if (r.util/cljs-env? env) 'cljs.core/ExceptionInfo 'clojure.lang.ExceptionInfo) e#
(if (r.subst.runtime/fail? e#)
r.match.runtime/FAIL
(throw e#))))
[form*])
form*)]
form*))))
(defmacro substitute
[pattern]
(let [node (r.subst.syntax/parse pattern &env)
x (compile node &env)]
(if (= ::CATA_NOT_BOUND x)
(throw (ex-info "cata not allowed here" {:pattern pattern}))
x)))
|
ab1b92a6618d77af789471bf7801889e375170e1add0741df8adc16cc924d22e | CommonDoc/common-doc | gnuplot.lisp | (in-package :cl-user)
(defpackage common-doc.gnuplot
(:use :cl)
(:import-from :common-doc
:text
:children
:make-text
:make-image
:define-node)
(:import-from :common-doc.macro
:macro-node
:expand-macro)
(:import-from :common-doc.file
:absolute-path
:relativize-pathname)
(:export :gnuplot
:image-path
:*gnuplot-command*
:*gnuplot-default-term*)
(:documentation "gnuplot contrib package."))
(in-package :common-doc.gnuplot)
;;; Configuration
(defvar *gnuplot-command* "gnuplot"
"The path/executable name used for @c(gnuplot). The default is @c(\"gnuplot\").
It is either a string, which indicates an executable name, or full
path to the executable. Or it is a list of strings. If it is a list,
the first element is the executable and the rest are command line
arguments.")
(defvar *gnuplot-default-term*
#+darwin "png"
#-darwin "pngcairo"
"Default terminal to use for gnuplot. The default is @c(pngcairo) except for mac OSX because
@c(pngcairo) is not available. On OSX the default terminal is @c(png).")
;;; Classes
(define-node gnuplot (macro-node)
((path :reader image-path
:initarg :path
:type string
:attribute-name "path"
:documentation "Path to file where the image will be stored.")
(term :reader term
:initarg :term
:type string
:attribute-name "term"
:documentation "Terminal used by gnuplot as in @c(set term <term>). The default is taken from @c(*gnuplot-default-term*)."))
(:tag-name "gnuplot")
(:documentation "gnuplot plot."))
;;; Macroexpansion
(defmethod expand-macro ((plot gnuplot))
"Take the gnuplot source code from the children and the image name, render it
with gnuplot into an image."
(let* ((pathname (absolute-path (image-path plot)))
The gnuplot commands
(text (text (first (children plot))))
The gnuplot commands to set output format , file etc .
(input (format nil "set term ~S; set output ~S; ~A~%"
(or (term plot) *gnuplot-default-term*)
(namestring pathname)
text))
The gnuplot command
(command "gnuplot"))
;; Run
(handler-case
(progn
(with-input-from-string (stream input)
(uiop:run-program command :input stream))
(make-image (namestring (relativize-pathname pathname))))
(t (e)
(make-text (format nil "gnuplot error: ~A" e))))))
| null | https://raw.githubusercontent.com/CommonDoc/common-doc/bcde4cfee3d34482d9830c8f9ea45454c73cf5aa/contrib/gnuplot/gnuplot.lisp | lisp | Configuration
Classes
Macroexpansion
Run | (in-package :cl-user)
(defpackage common-doc.gnuplot
(:use :cl)
(:import-from :common-doc
:text
:children
:make-text
:make-image
:define-node)
(:import-from :common-doc.macro
:macro-node
:expand-macro)
(:import-from :common-doc.file
:absolute-path
:relativize-pathname)
(:export :gnuplot
:image-path
:*gnuplot-command*
:*gnuplot-default-term*)
(:documentation "gnuplot contrib package."))
(in-package :common-doc.gnuplot)
(defvar *gnuplot-command* "gnuplot"
"The path/executable name used for @c(gnuplot). The default is @c(\"gnuplot\").
It is either a string, which indicates an executable name, or full
path to the executable. Or it is a list of strings. If it is a list,
the first element is the executable and the rest are command line
arguments.")
(defvar *gnuplot-default-term*
#+darwin "png"
#-darwin "pngcairo"
"Default terminal to use for gnuplot. The default is @c(pngcairo) except for mac OSX because
@c(pngcairo) is not available. On OSX the default terminal is @c(png).")
(define-node gnuplot (macro-node)
((path :reader image-path
:initarg :path
:type string
:attribute-name "path"
:documentation "Path to file where the image will be stored.")
(term :reader term
:initarg :term
:type string
:attribute-name "term"
:documentation "Terminal used by gnuplot as in @c(set term <term>). The default is taken from @c(*gnuplot-default-term*)."))
(:tag-name "gnuplot")
(:documentation "gnuplot plot."))
(defmethod expand-macro ((plot gnuplot))
"Take the gnuplot source code from the children and the image name, render it
with gnuplot into an image."
(let* ((pathname (absolute-path (image-path plot)))
The gnuplot commands
(text (text (first (children plot))))
The gnuplot commands to set output format , file etc .
(input (format nil "set term ~S; set output ~S; ~A~%"
(or (term plot) *gnuplot-default-term*)
(namestring pathname)
text))
The gnuplot command
(command "gnuplot"))
(handler-case
(progn
(with-input-from-string (stream input)
(uiop:run-program command :input stream))
(make-image (namestring (relativize-pathname pathname))))
(t (e)
(make-text (format nil "gnuplot error: ~A" e))))))
|
88d728b82c099efd0cc89925ec8894f727ee004a62816b473037e4850e9f57b6 | ocaml-multicore/tezos | test_qty.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
(** Testing
-------
Component: Protocol (quantities)
Invocation: dune exec src/proto_alpha/lib_protocol/test/unit/main.exe \
-- test "^\[Unit\] qty$"
Subject: On tez quantities.
*)
open Protocol
let known_ok_tez_literals =
[
(0L, "0");
(10L, "0.00001");
(100L, "0.0001");
(1_000L, "0.001");
(10_000L, "0.01");
(100_000L, "0.1");
(1_000_000L, "1");
(10_000_000L, "10");
(100_000_000L, "100");
(1_000_000_000L, "1000");
(10_000_000_000L, "10000");
(100_000_000_000L, "100000");
(1_000_000_000_000L, "1000000");
(1_000_000_000_001L, "1000000.000001");
(1_000_000_000_010L, "1000000.00001");
(1_000_000_000_100L, "1000000.0001");
(1_000_000_001_000L, "1000000.001");
(1_000_000_010_000L, "1000000.01");
(1_000_000_100_000L, "1000000.1");
(123_123_123_123_123_123L, "123123123123.123123");
(999_999_999_999_999_999L, "999999999999.999999");
]
let known_bad_tez_literals =
[
"10000.";
"100,.";
"100,";
"1,0000";
"0.0000,1";
"0.00,1";
"0,1";
"HAHA";
"0.000,000,1";
"0.0000000";
"9,999,999,999,999.999,999";
]
let fail expected given msg =
Format.kasprintf
Stdlib.failwith
"@[%s@ expected: %s@ got: %s@]"
msg
expected
given
let fail_msg fmt = Format.kasprintf (fail "" "") fmt
let default_printer _ = ""
(** Literals which are supposed to be parsed correctly. *)
let test_known_tez_literals () =
List.iter
(fun (v, s) ->
let vv = Tez_repr.of_mutez v in
let vs = Tez_repr.of_string s in
let vs' =
Tez_repr.of_string (String.concat "" (String.split_on_char ',' s))
in
let vv =
match vv with None -> fail_msg "could not unopt %Ld" v | Some vv -> vv
in
let vs =
match vs with None -> fail_msg "could not unopt %s" s | Some vs -> vs
in
let vs' =
match vs' with
| None -> fail_msg "could not unopt %s" s
| Some vs' -> vs'
in
assert (vv = vs) ;
assert (vv = vs') ;
assert (Tez_repr.to_string vv = s))
known_ok_tez_literals ;
List.iter
(fun s ->
let vs = Tez_repr.of_string s in
assert (vs = None))
known_bad_tez_literals ;
return_unit
(** Randomly generated tez value which is printed into a string then
parsed again for their equality. *)
let test_random_tez_literals () =
for _ = 0 to 100_000 do
let v = Random.int64 12L in
let vv = Tez_repr.of_mutez v in
let vv =
match vv with None -> fail_msg "could not unopt %Ld" v | Some vv -> vv
in
let s = Tez_repr.to_string vv in
let vs = Tez_repr.of_string s in
let s' = String.concat "" (String.split_on_char ',' s) in
let vs' = Tez_repr.of_string s' in
assert (vs <> None) ;
assert (vs' <> None) ;
(match vs with
| None -> assert false
| Some vs ->
let rev = Tez_repr.to_mutez vs in
assert (v = rev)) ;
match vs' with
| None -> assert false
| Some vs' ->
let rev = Tez_repr.to_mutez vs' in
assert (v = rev)
done ;
return_unit
let tests =
[
("tez-literals", fun _ -> test_known_tez_literals ());
("rnd-tez-literals", fun _ -> test_random_tez_literals ());
]
let wrap (n, f) =
Alcotest_lwt.test_case n `Quick (fun _ () ->
f () >|= function
| Ok () -> ()
| Error error ->
Format.kasprintf Stdlib.failwith "%a" pp_print_trace error)
let tests = List.map wrap tests
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_alpha/lib_protocol/test/unit/test_qty.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* Testing
-------
Component: Protocol (quantities)
Invocation: dune exec src/proto_alpha/lib_protocol/test/unit/main.exe \
-- test "^\[Unit\] qty$"
Subject: On tez quantities.
* Literals which are supposed to be parsed correctly.
* Randomly generated tez value which is printed into a string then
parsed again for their equality. | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
let known_ok_tez_literals =
[
(0L, "0");
(10L, "0.00001");
(100L, "0.0001");
(1_000L, "0.001");
(10_000L, "0.01");
(100_000L, "0.1");
(1_000_000L, "1");
(10_000_000L, "10");
(100_000_000L, "100");
(1_000_000_000L, "1000");
(10_000_000_000L, "10000");
(100_000_000_000L, "100000");
(1_000_000_000_000L, "1000000");
(1_000_000_000_001L, "1000000.000001");
(1_000_000_000_010L, "1000000.00001");
(1_000_000_000_100L, "1000000.0001");
(1_000_000_001_000L, "1000000.001");
(1_000_000_010_000L, "1000000.01");
(1_000_000_100_000L, "1000000.1");
(123_123_123_123_123_123L, "123123123123.123123");
(999_999_999_999_999_999L, "999999999999.999999");
]
let known_bad_tez_literals =
[
"10000.";
"100,.";
"100,";
"1,0000";
"0.0000,1";
"0.00,1";
"0,1";
"HAHA";
"0.000,000,1";
"0.0000000";
"9,999,999,999,999.999,999";
]
let fail expected given msg =
Format.kasprintf
Stdlib.failwith
"@[%s@ expected: %s@ got: %s@]"
msg
expected
given
let fail_msg fmt = Format.kasprintf (fail "" "") fmt
let default_printer _ = ""
let test_known_tez_literals () =
List.iter
(fun (v, s) ->
let vv = Tez_repr.of_mutez v in
let vs = Tez_repr.of_string s in
let vs' =
Tez_repr.of_string (String.concat "" (String.split_on_char ',' s))
in
let vv =
match vv with None -> fail_msg "could not unopt %Ld" v | Some vv -> vv
in
let vs =
match vs with None -> fail_msg "could not unopt %s" s | Some vs -> vs
in
let vs' =
match vs' with
| None -> fail_msg "could not unopt %s" s
| Some vs' -> vs'
in
assert (vv = vs) ;
assert (vv = vs') ;
assert (Tez_repr.to_string vv = s))
known_ok_tez_literals ;
List.iter
(fun s ->
let vs = Tez_repr.of_string s in
assert (vs = None))
known_bad_tez_literals ;
return_unit
let test_random_tez_literals () =
for _ = 0 to 100_000 do
let v = Random.int64 12L in
let vv = Tez_repr.of_mutez v in
let vv =
match vv with None -> fail_msg "could not unopt %Ld" v | Some vv -> vv
in
let s = Tez_repr.to_string vv in
let vs = Tez_repr.of_string s in
let s' = String.concat "" (String.split_on_char ',' s) in
let vs' = Tez_repr.of_string s' in
assert (vs <> None) ;
assert (vs' <> None) ;
(match vs with
| None -> assert false
| Some vs ->
let rev = Tez_repr.to_mutez vs in
assert (v = rev)) ;
match vs' with
| None -> assert false
| Some vs' ->
let rev = Tez_repr.to_mutez vs' in
assert (v = rev)
done ;
return_unit
let tests =
[
("tez-literals", fun _ -> test_known_tez_literals ());
("rnd-tez-literals", fun _ -> test_random_tez_literals ());
]
let wrap (n, f) =
Alcotest_lwt.test_case n `Quick (fun _ () ->
f () >|= function
| Ok () -> ()
| Error error ->
Format.kasprintf Stdlib.failwith "%a" pp_print_trace error)
let tests = List.map wrap tests
|
1f6577741a40ce92506b80db0d6972cafd615271319f861e0ad7160f70702d18 | fourier/ppath | generic.lisp | (defpackage ppath.details.generic
(:use :cl :alexandria :ppath.details.constants)
(:export path-error string-type getenv getcwd concat getpid get-temp-path
commonprefix splitext split-components))
(in-package ppath.details.generic)
(define-condition path-error
(error)
((function :initarg :function
:initform 'unknown
:reader path-error-function)
(reason :initarg :reason
:reader reason))
(:report (lambda (condition stream)
(format stream "Path processing: ~a" (reason condition)))))
(deftype string-type ()
#+lispworks7
'lw:simple-bmp-string
#+lispworks6 'lw:simple-text-string
#-lispworks 'string)
(declaim (notinline getenv))
(defun getenv (name)
"Get system environment variable value."
The function is a wrapper around uiop : declared notinline so
;; the tests could override it
(uiop:getenv name))
(declaim (notinline getcwd))
(defun getcwd ()
"Get the current working directory as a string"
;; Using uiop:getcwd.
The function is a wrapper around uiop : declared notinline so
;; the tests could override it
(namestring (uiop:getcwd)))
(defun concat (&rest strs)
"Concatenate strings in a portable manner, converting to unicode string if necessary"
(let ((str-type
#+lispworks
(let ((lw-strtype 'string-type))
(if (some (lambda (x) (subtypep (type-of x) lw-strtype)) strs)
lw-strtype
'string))
#-lispworks 'string))
(apply #'concatenate str-type (mapcar #'string strs))))
(defun getpid ()
"Return the current process id"
#+windows (ppath.details.nt.cffi:getpid)
#-windows (ppath.details.posix.cffi:getpid))
(defun get-temp-path ()
"Return the path to the temporary files directory"
#+windows (ppath.details.nt.cffi:get-temp-path)
#-windows "/tmp/")
(defun commonprefix (&rest paths)
"Get the common prefix substring of all strings in PATHS.
PATHS components could also be lists of strings, like results of
SPLIT operation on paths.
If no common prefix return empty string."
(unless paths (return-from commonprefix ""))
(reduce (lambda (x y)
(subseq x 0 (or (mismatch x y :test #'equal) (length x)))) paths))
(declaim (inline sep-p))
(defun sep-p (c)
(declare (type character c))
(declare (optimize (speed 3) (safety 0)))
(or (char= c #\\) (char= c #\/)))
(defun splitext (path)
"Split path to path and extension. Extension is the text
after the last dot.
Invariant: (concatenate 'string root ext) == p)"
(let ((ext-pos (or (position #\. path :from-end t) -1))
(sep-pos (or (position-if #'sep-p path :from-end t) -1)))
(if (>= sep-pos ext-pos) ; encountered slash from right
(cons path "") ; return whole path
;; check if between slash and dot exist other letters,
otherwise its a full path like
(loop with i = (1+ sep-pos)
while (< i ext-pos)
unless (char= (char path i) #\.) do
(return (cons (subseq path 0 ext-pos) (subseq path ext-pos)))
end
do (incf i)
finally (return (cons path ""))))))
(defun wildcard-to-regex (pattern &key (case-sensitive-p t) (beginning-of-string t) (end-of-string t))
"Convert file wildcards to regular expressions. By default the regular
expression is case sensitive. This is regulated by keyword argument
CASE-SENSITIVE-P.
Parameters BEGINNING-OF-STRING and END-OF-STRING identify whether the beginning of the string (^)
or end of string ($) marker should be present.
Supported patters:
* - everything
? - any character
[range] - any character in range
[!range] - any character not in range
Note that directory separator characters '/' and '\\'
are treated the same way as other characters, so
the function is used to treat complex paths they
better to be splitted.
Example:
=> (wildcard-to-regex \"Photo*.jpg\")
\"^Photo.*\\\\.jpg$\"
=> (wildcard-to-regex \"Photo*.jpg\" :case-sensitive-p nil)
\"(?i)^Photo.*\\\\.jpg$\""
(let ((regex
(loop for i below (length pattern)
for c = (char pattern i)
if (char= c #\*) ; process * mask
collect ".*" into result
else if (char= c #\?) ; process ?
collect "." into result
else if (char= c #\[) ; range found
collect ;;(extract-range i)
(if-let (close-pos (position #\] pattern :start i)) ;; find closing ]
;; found, replace \ with \\
(let ((res (ppcre:regex-replace-all "\\" (subseq pattern (1+ i ) close-pos) "\\\\" )))
(setf i close-pos) ; increase current position to the end of range
(format nil "[~a]"
(cond ((char= (char res 0) #\!)
(concatenate 'string "^" (subseq res 1)))
((char= (char res 0) #\^)
(concatenate 'string "\\" res))
(t res))))
;; no closing range character found, assuming special
"\\[")
into result
else ; finally just append rest (quoting specials of course)
collect (ppcre:quote-meta-chars (string c)) into result
end
finally
(return (apply #'concatenate 'string result)))))
(concatenate 'string
(unless case-sensitive-p "(?i)")
(when beginning-of-string "^")
regex
(when end-of-string "$"))))
(defun split-components (path)
"Splits the path to the list of elements using
slash as a separator. Separators are not omitted.
Example:
(split-components \"/abc/def/gh//12\")
=> (\"/\" \"abc\" \"/\" \"def\" \"/\" \"gh\" \"//\" \"12\")"
(unless (emptyp path)
(let (components)
(loop with is-sep = (sep-p (char path 0))
with current-word = nil
for x across path
for c = (sep-p x)
if (eql c is-sep) do
(push x current-word)
else do
(progn
(push current-word components)
(setf is-sep c)
(setf current-word nil)
(push x current-word))
end
finally (push current-word components))
(nreverse
(mapcar (compose (rcurry #'coerce 'string-type) #'nreverse) components)))))
| null | https://raw.githubusercontent.com/fourier/ppath/eb1a8173b4d1d691ea9a7699412123462f58c3ce/src/details/generic.lisp | lisp | the tests could override it
Using uiop:getcwd.
the tests could override it
encountered slash from right
return whole path
check if between slash and dot exist other letters,
process * mask
process ?
range found
(extract-range i)
find closing ]
found, replace \ with \\
increase current position to the end of range
no closing range character found, assuming special
finally just append rest (quoting specials of course)
| (defpackage ppath.details.generic
(:use :cl :alexandria :ppath.details.constants)
(:export path-error string-type getenv getcwd concat getpid get-temp-path
commonprefix splitext split-components))
(in-package ppath.details.generic)
(define-condition path-error
(error)
((function :initarg :function
:initform 'unknown
:reader path-error-function)
(reason :initarg :reason
:reader reason))
(:report (lambda (condition stream)
(format stream "Path processing: ~a" (reason condition)))))
(deftype string-type ()
#+lispworks7
'lw:simple-bmp-string
#+lispworks6 'lw:simple-text-string
#-lispworks 'string)
(declaim (notinline getenv))
(defun getenv (name)
"Get system environment variable value."
The function is a wrapper around uiop : declared notinline so
(uiop:getenv name))
(declaim (notinline getcwd))
(defun getcwd ()
"Get the current working directory as a string"
The function is a wrapper around uiop : declared notinline so
(namestring (uiop:getcwd)))
(defun concat (&rest strs)
"Concatenate strings in a portable manner, converting to unicode string if necessary"
(let ((str-type
#+lispworks
(let ((lw-strtype 'string-type))
(if (some (lambda (x) (subtypep (type-of x) lw-strtype)) strs)
lw-strtype
'string))
#-lispworks 'string))
(apply #'concatenate str-type (mapcar #'string strs))))
(defun getpid ()
"Return the current process id"
#+windows (ppath.details.nt.cffi:getpid)
#-windows (ppath.details.posix.cffi:getpid))
(defun get-temp-path ()
"Return the path to the temporary files directory"
#+windows (ppath.details.nt.cffi:get-temp-path)
#-windows "/tmp/")
(defun commonprefix (&rest paths)
"Get the common prefix substring of all strings in PATHS.
PATHS components could also be lists of strings, like results of
SPLIT operation on paths.
If no common prefix return empty string."
(unless paths (return-from commonprefix ""))
(reduce (lambda (x y)
(subseq x 0 (or (mismatch x y :test #'equal) (length x)))) paths))
(declaim (inline sep-p))
(defun sep-p (c)
(declare (type character c))
(declare (optimize (speed 3) (safety 0)))
(or (char= c #\\) (char= c #\/)))
(defun splitext (path)
"Split path to path and extension. Extension is the text
after the last dot.
Invariant: (concatenate 'string root ext) == p)"
(let ((ext-pos (or (position #\. path :from-end t) -1))
(sep-pos (or (position-if #'sep-p path :from-end t) -1)))
otherwise its a full path like
(loop with i = (1+ sep-pos)
while (< i ext-pos)
unless (char= (char path i) #\.) do
(return (cons (subseq path 0 ext-pos) (subseq path ext-pos)))
end
do (incf i)
finally (return (cons path ""))))))
(defun wildcard-to-regex (pattern &key (case-sensitive-p t) (beginning-of-string t) (end-of-string t))
"Convert file wildcards to regular expressions. By default the regular
expression is case sensitive. This is regulated by keyword argument
CASE-SENSITIVE-P.
Parameters BEGINNING-OF-STRING and END-OF-STRING identify whether the beginning of the string (^)
or end of string ($) marker should be present.
Supported patters:
* - everything
? - any character
[range] - any character in range
[!range] - any character not in range
Note that directory separator characters '/' and '\\'
are treated the same way as other characters, so
the function is used to treat complex paths they
better to be splitted.
Example:
=> (wildcard-to-regex \"Photo*.jpg\")
\"^Photo.*\\\\.jpg$\"
=> (wildcard-to-regex \"Photo*.jpg\" :case-sensitive-p nil)
\"(?i)^Photo.*\\\\.jpg$\""
(let ((regex
(loop for i below (length pattern)
for c = (char pattern i)
collect ".*" into result
collect "." into result
(let ((res (ppcre:regex-replace-all "\\" (subseq pattern (1+ i ) close-pos) "\\\\" )))
(format nil "[~a]"
(cond ((char= (char res 0) #\!)
(concatenate 'string "^" (subseq res 1)))
((char= (char res 0) #\^)
(concatenate 'string "\\" res))
(t res))))
"\\[")
into result
collect (ppcre:quote-meta-chars (string c)) into result
end
finally
(return (apply #'concatenate 'string result)))))
(concatenate 'string
(unless case-sensitive-p "(?i)")
(when beginning-of-string "^")
regex
(when end-of-string "$"))))
(defun split-components (path)
"Splits the path to the list of elements using
slash as a separator. Separators are not omitted.
Example:
(split-components \"/abc/def/gh//12\")
=> (\"/\" \"abc\" \"/\" \"def\" \"/\" \"gh\" \"//\" \"12\")"
(unless (emptyp path)
(let (components)
(loop with is-sep = (sep-p (char path 0))
with current-word = nil
for x across path
for c = (sep-p x)
if (eql c is-sep) do
(push x current-word)
else do
(progn
(push current-word components)
(setf is-sep c)
(setf current-word nil)
(push x current-word))
end
finally (push current-word components))
(nreverse
(mapcar (compose (rcurry #'coerce 'string-type) #'nreverse) components)))))
|
c8662e7897c9c12c9221a68d8105c303204ec78ade6fb5546fb9ea8d932fa12c | pflanze/chj-schemelib | interrupts.scm | Copyright 2013 - 2019 by < >
;;; This file is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
;;; (at your option) any later version.
(require ;;cj-queue
cj-alist
cj-env
;;(list-util map-iota)
)
;; (compile #t)
;; (exports
; ; many SIGxxx names . how to export those automatically ?
SIGCHLD
;; SIGPOLL
;; ;;...
; ; = SIGPOLL
;; signal-number->name ;; takes int. dies on non-existing signals.[could this be made part of type sys?]
;; interrupt-install-handler! ;; (interrupt-install-handler! signal-number handler)
;; interrupt-remove-handler! ;; (interrupt-remove-handler! signal-number)
;; )
;; (exports-on-request
;; make-sigqueue
;; sigqueue-empty?
;; sigqueue-endpos
;; sigqueue-full?
;; sigqueue-overflow
;; sigqueue-overflow-reset!
;; sigqueue-positions
;; sigqueue-startpos
;; sigqueue-take!
;; sigqueue-usage
;; sigqueue-add!
;; sigqueue-remove!
;; sigqueue-ref-signo
;; ;; constant-from-c:
SIGQUEUE_ERROR
;; SIGQUEUE_ERROR2
SIGQUEUE_SUCCESS
;; ;; ?:
;; sig-errstr
;; ;; ~lowlevel handling:
;; init
;; call-with-locks
;; global_queue
;; signal-take!
;; interrupt-dispatch
;; sig-lock!
;; sig-unlock!
;; sig-locked?
;; handlers
;; handlers-set! ;;(this is for alist-set!)
;; )
;; compiletime:
;; make-gen-code
(define-constant-from-C SIGCHLD)
(define-constant-from-C SIGINT)
(define-constant-from-C SIGHUP)
(define-constant-from-C SIGPOLL)
;;XX and more...
cj Fri , 27 Jan 2006 12:25:15 +0100
based on code sent by on 4 Jan 2006
;; this is unix centric, and expects BSD like signal behaviour.
;; I develop on Debian.
;; (Maybe some time, this could go into a |posix| module?)
;; todo: this assumes that the order in which raised interrupts are
;; delivered to scheme code is the same as the one in which the C
;; signal handler is called. Is this a safe assumption?
;; todo: make this thread safe
;; create a retrieve-current setting infrastructure, which then can be
;; given instead of procedure?
;; or, do it with parameters instead ? how?
cj Thu , 28 Dec 2006 01:35:31 +0100
;; NOTE: this is using 'signal', not 'sigaction'. But as long as
;; nobody's interested in realtime signals (and/or special signals
;; like async etc.), that probably doesn't make a difference (since
;; non-realtime signals can be lost).
;; -- compilation infrastructure
simplified COPY from gperl-lowlevel.scm
(##define-macro (define-constant-from-C name)
`(define ,name
((c-lambda ()
int
,(string-append "___result="
(symbol->string name)
";")))))
;;/COPY
(compile-time
(define (make-gen-code check-code return-code)
(lambda (name/args c-name argtypes returntype error-message
#!optional
error-message2)
(let ((name (car name/args))
(args (cdr name/args)))
`(define ,name/args
(let ((res ((c-lambda ,argtypes ,returntype ,c-name) ,@args)))
(if ,check-code
(error (string-append
,(string-append (symbol->string name) ": ")
,(if error-message2
`(if (= res SIGQUEUE_ERROR2)
,error-message2
,error-message)
error-message)))
,return-code)))))))
(##define-macro (define-c/int_or_error . args)
(apply (make-gen-code '(= res SIGQUEUE_ERROR) 'res) args))
(##define-macro (define-c/status . args)
(apply (make-gen-code '(not (= res 0)) #!void) args))
;; ----- sigqueue interface ---------------
(c-declare "#include \"interrupts_Cpart.c\"")
( c - define - type sigqueue ( pointer ( struct " sigqueue " ) sigqueue * " " ) )
(c-define-type sigqueue (pointer (struct "sigqueue") sigqueue "sigqueue_release"))
; aber nein, default handling of gambit is with the stars.
( c - define - type sigqueue ( pointer ( struct " sigqueue " ) sigqueue * " " ) )
; but why. how would I deal with a structure without pointer? todo ask.
;; hey and even stuff like latin1-string do not have stars.
(define-constant-from-C SIGQUEUE_SUCCESS)
(define-constant-from-C SIGQUEUE_ERROR)
(define-constant-from-C SIGQUEUE_ERROR2)
(define make-sigqueue (c-lambda () sigqueue "make_sigqueue"))
(define sigqueue-usage (c-lambda (sigqueue) int "sigqueue_usage"))
(define (sigqueue-full? q)
(= ((c-lambda (sigqueue) int "sigqueue_isfull") q)
1))
(define (sigqueue-empty? q)
(= ((c-lambda (sigqueue) int "sigqueue_isempty") q)
1))
(define-c/status (sigqueue-add! q signo) "sigqueue_add" (sigqueue int) int "queue is full")
(define-c/int_or_error (sigqueue-ref-signo q) "sigqueue_ref_signo" (sigqueue) int "queue is empty")
(define-c/status (sigqueue-remove! q) "sigqueue_remove" (sigqueue) int "queue is empty")
(define-c/status (sig-lock!) "sig_lock" () int "already locked" "sigprocmask gave error");;todo show OS error
(define-c/status (sig-unlock!) "sig_unlock" () int "not locked" "sigprocmask gave error");;todo show OS error
(define (sig-locked?)
((c-lambda () bool "___result= is_locked;")))
;; scheme addons:
(define (sigqueue-take! q)
(let ((res (sigqueue-ref-signo q)))
(sigqueue-remove! q)
res))
(define (sigqueue-startpos q)
((c-lambda (sigqueue) int "___result= ___arg1->startpos;") q))
(define (sigqueue-endpos q)
((c-lambda (sigqueue) int "___result= ___arg1->endpos;") q))
(define (sigqueue-positions q)
(values (sigqueue-startpos q)
(sigqueue-endpos q)))
(define (sigqueue-overflow q)
((c-lambda (sigqueue) bool "___result= ___arg1->overflow;") q))
(define (sigqueue-overflow-reset! q)
((c-lambda (sigqueue) void "___arg1->overflow=false;") q))
;; ---- the list of possible signals and their names: -------------
(insert-result-of
(begin
;; Get all signal names. This assumes that the shell command "kill -l
< number > " will return the signal name without the SIG prefix .
(define (signal-number->maybe-name num)
(let* ((port (open-process
(list path: "kill"
arguments: (list "-l" (number->string num)))))
(line (read-line port))
(res (close-port port)))
;;(warn "got kill res code-or-so =" res)
;; is always #!void. still ask how todo this.
(if (eof-object? line)
#f
(string-append "SIG" line))))
(let ((alis (filter (lambda (v)v)
(map (lambda (num)
(cond ((signal-number->maybe-name num)
=> (lambda (name)
(cons num
(string->symbol name))))
(else #f)))
(iota
;; the number of valid signal numbers
30
;; the lowest signal number
1)))))
`(begin
(define %signal-number->name% ',alis)
,@ (map (lambda (p)
`(define ,(cdr p) ,(car p)))
alis)))))
(define-if-not-defined SIGIO SIGPOLL)
returns symbol . [ should we take err+success continuations ? ... ]
(number-alist-ref %signal-number->name% num
(lambda ()
(error "signal-number->name: unknown signal number:" num))))
;; ------- interfacing scheme with handler setup functions: ------
(define sig-errstr (c-lambda ()
;; latin1-string hm not available anymore? wl fair
char-string
"sig_errstr"))
(insert-result-of
(cons 'begin
(map (lambda (CNAME)
`(define (,(string->symbol CNAME)
num)
(or (= ((c-lambda (int) int ,CNAME) num) 0)
(error (string-append ,(string-append CNAME ": system error: ")
(sig-errstr))))))
'("install_signal" "uninstall_signal"))))
(define-if-not-defined interrupts:handlers
;; signal to handler
(make-table))
(define (interrupt-install-handler! signal-number handler) ;; signal-number may also be a list of numbers
(if (and (##fixnum? signal-number)
(> signal-number 0))
;; check for SIGINT and then set current-user-interrupt-handler instead?
(if (= signal-number SIGINT)
(current-user-interrupt-handler handler)
(begin
(table-set! interrupts:handlers signal-number handler)
(install_signal signal-number)))
(if (list? signal-number)
(for-each interrupt-install-handler! signal-number)
(error "interrupt-install-handler!: signal-number argument is not an ordinal number:" signal-number))))
(define (interrupt-remove-handler! signal-number) ;; signal-number may also be a list of numbers
(if (and (##fixnum? signal-number)
(> signal-number 0))
(if (= signal-number SIGINT)
(begin
(current-user-interrupt-handler ##default-user-interrupt-handler ;; ok?
;; and: do we want it to reinstall the default gambit one? or rather remove it really..?
( todo : should we route signal 2 through this interrupts module as well ? )
))
(begin
(uninstall_signal signal-number)
(or (table-ref interrupts:handlers signal-number #f)
(error "interrupt-remove-handler!: no handler installed for signal:"
signal-number))
(table-set! interrupts:handlers signal-number)))
(if (list? signal-number)
(for-each interrupt-remove-handler! signal-number)
(error "interrupt-remove-handler!: signal-number argument is not an ordinal number:" signal-number))))
(define-if-not-defined global_queue #f)
çç neue funktionen . ?
(define (call-with-locks thunk)
(dynamic-wind
sig-lock!
thunk
sig-unlock!))
(define (signal-take!)
(call-with-locks (lambda ()
(sigqueue-take! global_queue))))
(define (interrupt-dispatch)
;; called by scheme runtime as result of the C
_ _ _ EXT(___raise_interrupt ) ( _ _ _ INTR_7 ) ; call and the
( # # interrupt - vector - set ! 7 interrupt - dispatch ) mapping .
(let ((signum (call-with-locks
(lambda ()
(when (sigqueue-overflow global_queue)
(warn "Warning: signal queue has been overflown!")
(sigqueue-overflow-reset! global_queue))
(sigqueue-take! global_queue)))))
(or (table-ref interrupts:handlers signum #f)
(begin
(warn "no scheme signal handler installed anymore for signal:" signum)
(exit 123) ;;;?todo
))))
(define (init)
(let ((res ((c-lambda () int "init"))))
(cond ((= res SIGQUEUE_SUCCESS)
;; 'extract' the queue:
( if ( not global_queue ) ehr no , forget it . newly initialized in C , we need it here to . The only alternative would be the other direction , give from scheme to C. But we only loose not - yet - delivered signals , so no really big deal anyway .
(set! global_queue ((c-lambda () sigqueue "___result_voidstar= global_queue;")))
(##interrupt-vector-set! 7 interrupt-dispatch))
((= res SIGQUEUE_ERROR)
(error "interrupts init: could not allocate memory for sigqueue"))
(else
(error "interrupts init: ?? unknown error")))))
(init);yes on each load init it. since C part needs it.right?.
; --------
; test:
(insert-result-of
(cond (#f
'(begin
(interrupt-install-handler! SIGINT (lambda () (warn "sigint called")))
(interrupt-install-handler! SIGUSR1 (lambda () (warn "sigusr1 called")))
(interrupt-install-handler! SIGUSR2 (lambda () (warn "sigusr2 called")))
(define my-pid (##os-getpid))
(display "executing kill -USR1\n")
(shell-command (string-append "kill -USR1 " (number->string my-pid)))
(display "executing kill -USR2\n")
(shell-command (string-append "kill -USR2 " (number->string my-pid)))
(display "done\n")))
(#f
'(begin
(define (test port)
(interrupt-install-handler! SIGINT (lambda () (display "i" port)))
(interrupt-install-handler! SIGUSR1 (lambda () (display "1" port)))
(interrupt-install-handler! SIGUSR2 (lambda () (display "2" port)))
(interrupt-install-handler! SIGRTMIN (lambda () (error "got RT signal")))
)))
(else
'(begin))))
| null | https://raw.githubusercontent.com/pflanze/chj-schemelib/59ff8476e39f207c2f1d807cfc9670581c8cedd3/posix/interrupts.scm | scheme | This file is free software; you can redistribute it and/or modify
(at your option) any later version.
cj-queue
(list-util map-iota)
(compile #t)
(exports
; many SIGxxx names . how to export those automatically ?
SIGPOLL
;;...
; = SIGPOLL
signal-number->name ;; takes int. dies on non-existing signals.[could this be made part of type sys?]
interrupt-install-handler! ;; (interrupt-install-handler! signal-number handler)
interrupt-remove-handler! ;; (interrupt-remove-handler! signal-number)
)
(exports-on-request
make-sigqueue
sigqueue-empty?
sigqueue-endpos
sigqueue-full?
sigqueue-overflow
sigqueue-overflow-reset!
sigqueue-positions
sigqueue-startpos
sigqueue-take!
sigqueue-usage
sigqueue-add!
sigqueue-remove!
sigqueue-ref-signo
;; constant-from-c:
SIGQUEUE_ERROR2
;; ?:
sig-errstr
;; ~lowlevel handling:
init
call-with-locks
global_queue
signal-take!
interrupt-dispatch
sig-lock!
sig-unlock!
sig-locked?
handlers
handlers-set! ;;(this is for alist-set!)
)
compiletime:
make-gen-code
XX and more...
this is unix centric, and expects BSD like signal behaviour.
I develop on Debian.
(Maybe some time, this could go into a |posix| module?)
todo: this assumes that the order in which raised interrupts are
delivered to scheme code is the same as the one in which the C
signal handler is called. Is this a safe assumption?
todo: make this thread safe
create a retrieve-current setting infrastructure, which then can be
given instead of procedure?
or, do it with parameters instead ? how?
NOTE: this is using 'signal', not 'sigaction'. But as long as
nobody's interested in realtime signals (and/or special signals
like async etc.), that probably doesn't make a difference (since
non-realtime signals can be lost).
-- compilation infrastructure
/COPY
----- sigqueue interface ---------------
aber nein, default handling of gambit is with the stars.
but why. how would I deal with a structure without pointer? todo ask.
hey and even stuff like latin1-string do not have stars.
todo show OS error
todo show OS error
scheme addons:
---- the list of possible signals and their names: -------------
Get all signal names. This assumes that the shell command "kill -l
(warn "got kill res code-or-so =" res)
is always #!void. still ask how todo this.
the number of valid signal numbers
the lowest signal number
------- interfacing scheme with handler setup functions: ------
latin1-string hm not available anymore? wl fair
signal to handler
signal-number may also be a list of numbers
check for SIGINT and then set current-user-interrupt-handler instead?
signal-number may also be a list of numbers
ok?
and: do we want it to reinstall the default gambit one? or rather remove it really..?
called by scheme runtime as result of the C
call and the
?todo
'extract' the queue:
yes on each load init it. since C part needs it.right?.
--------
test: | Copyright 2013 - 2019 by < >
it under the terms of the GNU General Public License ( GPL ) as published
by the Free Software Foundation , either version 2 of the License , or
cj-alist
cj-env
)
SIGCHLD
SIGQUEUE_ERROR
SIGQUEUE_SUCCESS
(define-constant-from-C SIGCHLD)
(define-constant-from-C SIGINT)
(define-constant-from-C SIGHUP)
(define-constant-from-C SIGPOLL)
cj Fri , 27 Jan 2006 12:25:15 +0100
based on code sent by on 4 Jan 2006
cj Thu , 28 Dec 2006 01:35:31 +0100
simplified COPY from gperl-lowlevel.scm
(##define-macro (define-constant-from-C name)
`(define ,name
((c-lambda ()
int
,(string-append "___result="
(symbol->string name)
";")))))
(compile-time
(define (make-gen-code check-code return-code)
(lambda (name/args c-name argtypes returntype error-message
#!optional
error-message2)
(let ((name (car name/args))
(args (cdr name/args)))
`(define ,name/args
(let ((res ((c-lambda ,argtypes ,returntype ,c-name) ,@args)))
(if ,check-code
(error (string-append
,(string-append (symbol->string name) ": ")
,(if error-message2
`(if (= res SIGQUEUE_ERROR2)
,error-message2
,error-message)
error-message)))
,return-code)))))))
(##define-macro (define-c/int_or_error . args)
(apply (make-gen-code '(= res SIGQUEUE_ERROR) 'res) args))
(##define-macro (define-c/status . args)
(apply (make-gen-code '(not (= res 0)) #!void) args))
(c-declare "#include \"interrupts_Cpart.c\"")
( c - define - type sigqueue ( pointer ( struct " sigqueue " ) sigqueue * " " ) )
(c-define-type sigqueue (pointer (struct "sigqueue") sigqueue "sigqueue_release"))
( c - define - type sigqueue ( pointer ( struct " sigqueue " ) sigqueue * " " ) )
(define-constant-from-C SIGQUEUE_SUCCESS)
(define-constant-from-C SIGQUEUE_ERROR)
(define-constant-from-C SIGQUEUE_ERROR2)
(define make-sigqueue (c-lambda () sigqueue "make_sigqueue"))
(define sigqueue-usage (c-lambda (sigqueue) int "sigqueue_usage"))
(define (sigqueue-full? q)
(= ((c-lambda (sigqueue) int "sigqueue_isfull") q)
1))
(define (sigqueue-empty? q)
(= ((c-lambda (sigqueue) int "sigqueue_isempty") q)
1))
(define-c/status (sigqueue-add! q signo) "sigqueue_add" (sigqueue int) int "queue is full")
(define-c/int_or_error (sigqueue-ref-signo q) "sigqueue_ref_signo" (sigqueue) int "queue is empty")
(define-c/status (sigqueue-remove! q) "sigqueue_remove" (sigqueue) int "queue is empty")
(define (sig-locked?)
((c-lambda () bool "___result= is_locked;")))
(define (sigqueue-take! q)
(let ((res (sigqueue-ref-signo q)))
(sigqueue-remove! q)
res))
(define (sigqueue-startpos q)
((c-lambda (sigqueue) int "___result= ___arg1->startpos;") q))
(define (sigqueue-endpos q)
((c-lambda (sigqueue) int "___result= ___arg1->endpos;") q))
(define (sigqueue-positions q)
(values (sigqueue-startpos q)
(sigqueue-endpos q)))
(define (sigqueue-overflow q)
((c-lambda (sigqueue) bool "___result= ___arg1->overflow;") q))
(define (sigqueue-overflow-reset! q)
((c-lambda (sigqueue) void "___arg1->overflow=false;") q))
(insert-result-of
(begin
< number > " will return the signal name without the SIG prefix .
(define (signal-number->maybe-name num)
(let* ((port (open-process
(list path: "kill"
arguments: (list "-l" (number->string num)))))
(line (read-line port))
(res (close-port port)))
(if (eof-object? line)
#f
(string-append "SIG" line))))
(let ((alis (filter (lambda (v)v)
(map (lambda (num)
(cond ((signal-number->maybe-name num)
=> (lambda (name)
(cons num
(string->symbol name))))
(else #f)))
(iota
30
1)))))
`(begin
(define %signal-number->name% ',alis)
,@ (map (lambda (p)
`(define ,(cdr p) ,(car p)))
alis)))))
(define-if-not-defined SIGIO SIGPOLL)
returns symbol . [ should we take err+success continuations ? ... ]
(number-alist-ref %signal-number->name% num
(lambda ()
(error "signal-number->name: unknown signal number:" num))))
(define sig-errstr (c-lambda ()
char-string
"sig_errstr"))
(insert-result-of
(cons 'begin
(map (lambda (CNAME)
`(define (,(string->symbol CNAME)
num)
(or (= ((c-lambda (int) int ,CNAME) num) 0)
(error (string-append ,(string-append CNAME ": system error: ")
(sig-errstr))))))
'("install_signal" "uninstall_signal"))))
(define-if-not-defined interrupts:handlers
(make-table))
(if (and (##fixnum? signal-number)
(> signal-number 0))
(if (= signal-number SIGINT)
(current-user-interrupt-handler handler)
(begin
(table-set! interrupts:handlers signal-number handler)
(install_signal signal-number)))
(if (list? signal-number)
(for-each interrupt-install-handler! signal-number)
(error "interrupt-install-handler!: signal-number argument is not an ordinal number:" signal-number))))
(if (and (##fixnum? signal-number)
(> signal-number 0))
(if (= signal-number SIGINT)
(begin
( todo : should we route signal 2 through this interrupts module as well ? )
))
(begin
(uninstall_signal signal-number)
(or (table-ref interrupts:handlers signal-number #f)
(error "interrupt-remove-handler!: no handler installed for signal:"
signal-number))
(table-set! interrupts:handlers signal-number)))
(if (list? signal-number)
(for-each interrupt-remove-handler! signal-number)
(error "interrupt-remove-handler!: signal-number argument is not an ordinal number:" signal-number))))
(define-if-not-defined global_queue #f)
çç neue funktionen . ?
(define (call-with-locks thunk)
(dynamic-wind
sig-lock!
thunk
sig-unlock!))
(define (signal-take!)
(call-with-locks (lambda ()
(sigqueue-take! global_queue))))
(define (interrupt-dispatch)
( # # interrupt - vector - set ! 7 interrupt - dispatch ) mapping .
(let ((signum (call-with-locks
(lambda ()
(when (sigqueue-overflow global_queue)
(warn "Warning: signal queue has been overflown!")
(sigqueue-overflow-reset! global_queue))
(sigqueue-take! global_queue)))))
(or (table-ref interrupts:handlers signum #f)
(begin
(warn "no scheme signal handler installed anymore for signal:" signum)
))))
(define (init)
(let ((res ((c-lambda () int "init"))))
(cond ((= res SIGQUEUE_SUCCESS)
( if ( not global_queue ) ehr no , forget it . newly initialized in C , we need it here to . The only alternative would be the other direction , give from scheme to C. But we only loose not - yet - delivered signals , so no really big deal anyway .
(set! global_queue ((c-lambda () sigqueue "___result_voidstar= global_queue;")))
(##interrupt-vector-set! 7 interrupt-dispatch))
((= res SIGQUEUE_ERROR)
(error "interrupts init: could not allocate memory for sigqueue"))
(else
(error "interrupts init: ?? unknown error")))))
(insert-result-of
(cond (#f
'(begin
(interrupt-install-handler! SIGINT (lambda () (warn "sigint called")))
(interrupt-install-handler! SIGUSR1 (lambda () (warn "sigusr1 called")))
(interrupt-install-handler! SIGUSR2 (lambda () (warn "sigusr2 called")))
(define my-pid (##os-getpid))
(display "executing kill -USR1\n")
(shell-command (string-append "kill -USR1 " (number->string my-pid)))
(display "executing kill -USR2\n")
(shell-command (string-append "kill -USR2 " (number->string my-pid)))
(display "done\n")))
(#f
'(begin
(define (test port)
(interrupt-install-handler! SIGINT (lambda () (display "i" port)))
(interrupt-install-handler! SIGUSR1 (lambda () (display "1" port)))
(interrupt-install-handler! SIGUSR2 (lambda () (display "2" port)))
(interrupt-install-handler! SIGRTMIN (lambda () (error "got RT signal")))
)))
(else
'(begin))))
|
4764fba6852d5400fbbf2b275d1cf93af496b2b7de8332f54233da09b03b08c1 | haskell-servant/servant-cassava | Cassava.hs | {-# LANGUAGE DataKinds #-}
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE DeriveGeneric #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
{-# LANGUAGE ScopedTypeVariables #-}
| A @CSV@ empty datatype with ` MimeRender ` and ` MimeUnrender ` instances for
-- @cassava@'s encoding and decoding classes.
--
-- >>> type Eg = Get '[CSV' 'HasHeader MyEncodeOptions] [(Int, String)]
--
-- Default encoding and decoding options are also provided, along with the
-- @CSV@ type synonym that uses them.
--
> > > type = Get ' [ CSV ] [ ( Int , String ) ]
--
module Servant.CSV.Cassava ( module Servant.CSV.Cassava
, HasHeader(..)
) where
import Prelude ()
import Prelude.Compat
import Data.Csv
import Data.ByteString.Lazy (ByteString)
import Data.Proxy (Proxy (..))
import Data.Typeable (Typeable)
import Data.Vector (Vector, toList)
import GHC.Generics (Generic)
import qualified Network.HTTP.Media as M
import Servant.API (Accept (..), MimeRender (..),
MimeUnrender (..))
data CSV' (hasHeader :: HasHeader) opt deriving (Typeable)
type CSV = CSV' 'HasHeader DefaultOpts
-- | 'HasHeader singleton.
data SHasHeader (hasHeader :: HasHeader) where
SHasHeader :: SHasHeader 'HasHeader
SNoHeader :: SHasHeader 'NoHeader
-- | Class to provide 'SHasHeader' implicitly.
class SHasHeaderI (hasHeader :: HasHeader) where shasheader :: SHasHeader hasHeader
instance SHasHeaderI 'HasHeader where shasheader = SHasHeader
instance SHasHeaderI 'NoHeader where shasheader = SNoHeader
shasheaderToBool :: SHasHeader hasHeader -> Bool
shasheaderToBool SHasHeader = True
shasheaderToBool SNoHeader = False
lowerSHasHeader :: SHasHeader hasHeader -> HasHeader
lowerSHasHeader SHasHeader = HasHeader
lowerSHasHeader SNoHeader = NoHeader
| Default options , instances providing ' defaultDecodeOptions ' and ' defaultEncodeOptions ' , and content type @text / csv;charset = utf-8@
data DefaultOpts deriving (Typeable, Generic)
| Options that work for tab delimited data , with content type @text / tab - separated - values;charset = utf-8@
data TabSeparatedOpts deriving (Typeable, Generic)
-- | Content type can be determined to coincide with encode opts.
instance EncodeOpts opt => Accept (CSV' hasHeader opt) where
contentType _ = csvContentType (Proxy :: Proxy opt)
-- * Encoding
-- ** Instances
| Encode with ' ' . The ' Header ' param is used for determining
-- the order of headers and fields.
instance ( ToNamedRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeRender (CSV' hasHeader opt) (Header, [a]) where
mimeRender _ (hdr, vals) = encodeByNameWith opts hdr vals
where
opts = encodeOpts' (Proxy :: Proxy opt) (Proxy :: Proxy hasHeader)
-- | A class to determine how to encode a list of elements
--
-- * 'HasHeader' encode with 'encodeDefaultOrderedByNameWith'
--
-- * 'NoHeader' encode with 'encodeWith'
--
-- Currently, it's not possible to encode without headers using 'encodeDefaultOrderedByNameWith'.
--
class EncodeList (hasHeader :: HasHeader) a where
encodeList :: Proxy hasHeader -> EncodeOptions -> [a] -> ByteString
-- | 'encodeDefaultOrderedByNameWith'
instance (DefaultOrdered a, ToNamedRecord a) => EncodeList 'HasHeader a where
encodeList _ opts vals = encodeDefaultOrderedByNameWith opts { encIncludeHeader = True } vals
-- | 'encodeWith'
instance (ToRecord a) => EncodeList 'NoHeader a where
encodeList _ opts vals = encodeWith opts { encIncludeHeader = False } vals
instance ( EncodeOpts opt, EncodeList hasHeader a
) => MimeRender (CSV' hasHeader opt) [a] where
mimeRender _ = encodeList (Proxy :: Proxy hasHeader) opts
where
opts = encodeOpts (Proxy :: Proxy opt)
| Encode with ' ' . The ' Header ' param is used for determining
-- the order of headers and fields.
instance ( ToNamedRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeRender (CSV' hasHeader opt) (Header, Vector a) where
mimeRender _ (hdr, vals) = encodeByNameWith opts hdr (toList vals)
where
opts = encodeOpts' (Proxy :: Proxy opt) (Proxy :: Proxy hasHeader)
instance ( EncodeOpts opt, EncodeList hasHeader a
) => MimeRender (CSV' hasHeader opt) (Vector a) where
mimeRender _ = encodeList (Proxy :: Proxy hasHeader) opts . toList
where
opts = encodeOpts (Proxy :: Proxy opt)
-- ** Encode/Decode Options
class EncodeOpts opt where
encodeOpts :: Proxy opt -> EncodeOptions
decodeOpts :: Proxy opt -> DecodeOptions
decodeOpts p = DecodeOptions
{ decDelimiter = encDelimiter e
}
where
e = encodeOpts p
csvContentType :: Proxy opt -> M.MediaType
csvContentType p = case encDelimiter (encodeOpts p) of
ord ' \t ' = 9
9 -> "text" M.// "tab-separated-values" M./: ("charset", "utf-8")
_ -> "text" M.// "csv" M./: ("charset", "utf-8")
encodeOpts'
:: forall opt hasHeader. (EncodeOpts opt, SHasHeaderI hasHeader)
=> Proxy opt -> Proxy hasHeader -> EncodeOptions
encodeOpts' p _ = (encodeOpts p)
{ encIncludeHeader = shasheaderToBool (shasheader :: SHasHeader hasHeader)
}
instance EncodeOpts DefaultOpts where
encodeOpts _ = defaultEncodeOptions
decodeOpts _ = defaultDecodeOptions
instance EncodeOpts TabSeparatedOpts where
ord ' \t ' = 9
encodeOpts _ = defaultEncodeOptions { encDelimiter = 9 }
decodeOpts _ = defaultDecodeOptions { decDelimiter = 9 }
-- * Decoding
-- ** Instances
-- | Decode with 'decodeByNameWith'.
instance ( FromNamedRecord a, EncodeOpts opt
) => MimeUnrender (CSV' 'HasHeader opt) (Header, [a]) where
mimeUnrender _ bs = fmap toList <$> decodeByNameWith (decodeOpts p) bs
where p = Proxy :: Proxy opt
-- | Decode with 'decodeWith'.
instance ( FromRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeUnrender (CSV' hasHeader opt) [a] where
mimeUnrender _ = fmap toList . decodeWith (decodeOpts p) (lowerSHasHeader sh)
where
p = Proxy :: Proxy opt
sh = shasheader :: SHasHeader hasHeader
instance ( FromNamedRecord a, EncodeOpts opt
) => MimeUnrender (CSV' 'HasHeader opt) (Header, Vector a) where
mimeUnrender _ = decodeByNameWith (decodeOpts p)
where p = Proxy :: Proxy opt
-- | Decode with 'decodeWith'.
instance ( FromRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeUnrender (CSV' hasHeader opt) (Vector a) where
mimeUnrender _ = decodeWith (decodeOpts p) (lowerSHasHeader sh)
where
p = Proxy :: Proxy opt
sh = shasheader :: SHasHeader hasHeader
| null | https://raw.githubusercontent.com/haskell-servant/servant-cassava/8c81f070bb402dd88833ec1b6f66c6e74f70374e/src/Servant/CSV/Cassava.hs | haskell | # LANGUAGE DataKinds #
# LANGUAGE DeriveDataTypeable #
# LANGUAGE DeriveGeneric #
# LANGUAGE ScopedTypeVariables #
@cassava@'s encoding and decoding classes.
>>> type Eg = Get '[CSV' 'HasHeader MyEncodeOptions] [(Int, String)]
Default encoding and decoding options are also provided, along with the
@CSV@ type synonym that uses them.
| 'HasHeader singleton.
| Class to provide 'SHasHeader' implicitly.
| Content type can be determined to coincide with encode opts.
* Encoding
** Instances
the order of headers and fields.
| A class to determine how to encode a list of elements
* 'HasHeader' encode with 'encodeDefaultOrderedByNameWith'
* 'NoHeader' encode with 'encodeWith'
Currently, it's not possible to encode without headers using 'encodeDefaultOrderedByNameWith'.
| 'encodeDefaultOrderedByNameWith'
| 'encodeWith'
the order of headers and fields.
** Encode/Decode Options
* Decoding
** Instances
| Decode with 'decodeByNameWith'.
| Decode with 'decodeWith'.
| Decode with 'decodeWith'. | # LANGUAGE FlexibleInstances #
# LANGUAGE GADTs #
# LANGUAGE KindSignatures #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE OverloadedStrings #
| A @CSV@ empty datatype with ` MimeRender ` and ` MimeUnrender ` instances for
> > > type = Get ' [ CSV ] [ ( Int , String ) ]
module Servant.CSV.Cassava ( module Servant.CSV.Cassava
, HasHeader(..)
) where
import Prelude ()
import Prelude.Compat
import Data.Csv
import Data.ByteString.Lazy (ByteString)
import Data.Proxy (Proxy (..))
import Data.Typeable (Typeable)
import Data.Vector (Vector, toList)
import GHC.Generics (Generic)
import qualified Network.HTTP.Media as M
import Servant.API (Accept (..), MimeRender (..),
MimeUnrender (..))
data CSV' (hasHeader :: HasHeader) opt deriving (Typeable)
type CSV = CSV' 'HasHeader DefaultOpts
data SHasHeader (hasHeader :: HasHeader) where
SHasHeader :: SHasHeader 'HasHeader
SNoHeader :: SHasHeader 'NoHeader
class SHasHeaderI (hasHeader :: HasHeader) where shasheader :: SHasHeader hasHeader
instance SHasHeaderI 'HasHeader where shasheader = SHasHeader
instance SHasHeaderI 'NoHeader where shasheader = SNoHeader
shasheaderToBool :: SHasHeader hasHeader -> Bool
shasheaderToBool SHasHeader = True
shasheaderToBool SNoHeader = False
lowerSHasHeader :: SHasHeader hasHeader -> HasHeader
lowerSHasHeader SHasHeader = HasHeader
lowerSHasHeader SNoHeader = NoHeader
| Default options , instances providing ' defaultDecodeOptions ' and ' defaultEncodeOptions ' , and content type @text / csv;charset = utf-8@
data DefaultOpts deriving (Typeable, Generic)
| Options that work for tab delimited data , with content type @text / tab - separated - values;charset = utf-8@
data TabSeparatedOpts deriving (Typeable, Generic)
instance EncodeOpts opt => Accept (CSV' hasHeader opt) where
contentType _ = csvContentType (Proxy :: Proxy opt)
| Encode with ' ' . The ' Header ' param is used for determining
instance ( ToNamedRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeRender (CSV' hasHeader opt) (Header, [a]) where
mimeRender _ (hdr, vals) = encodeByNameWith opts hdr vals
where
opts = encodeOpts' (Proxy :: Proxy opt) (Proxy :: Proxy hasHeader)
class EncodeList (hasHeader :: HasHeader) a where
encodeList :: Proxy hasHeader -> EncodeOptions -> [a] -> ByteString
instance (DefaultOrdered a, ToNamedRecord a) => EncodeList 'HasHeader a where
encodeList _ opts vals = encodeDefaultOrderedByNameWith opts { encIncludeHeader = True } vals
instance (ToRecord a) => EncodeList 'NoHeader a where
encodeList _ opts vals = encodeWith opts { encIncludeHeader = False } vals
instance ( EncodeOpts opt, EncodeList hasHeader a
) => MimeRender (CSV' hasHeader opt) [a] where
mimeRender _ = encodeList (Proxy :: Proxy hasHeader) opts
where
opts = encodeOpts (Proxy :: Proxy opt)
| Encode with ' ' . The ' Header ' param is used for determining
instance ( ToNamedRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeRender (CSV' hasHeader opt) (Header, Vector a) where
mimeRender _ (hdr, vals) = encodeByNameWith opts hdr (toList vals)
where
opts = encodeOpts' (Proxy :: Proxy opt) (Proxy :: Proxy hasHeader)
instance ( EncodeOpts opt, EncodeList hasHeader a
) => MimeRender (CSV' hasHeader opt) (Vector a) where
mimeRender _ = encodeList (Proxy :: Proxy hasHeader) opts . toList
where
opts = encodeOpts (Proxy :: Proxy opt)
class EncodeOpts opt where
encodeOpts :: Proxy opt -> EncodeOptions
decodeOpts :: Proxy opt -> DecodeOptions
decodeOpts p = DecodeOptions
{ decDelimiter = encDelimiter e
}
where
e = encodeOpts p
csvContentType :: Proxy opt -> M.MediaType
csvContentType p = case encDelimiter (encodeOpts p) of
ord ' \t ' = 9
9 -> "text" M.// "tab-separated-values" M./: ("charset", "utf-8")
_ -> "text" M.// "csv" M./: ("charset", "utf-8")
encodeOpts'
:: forall opt hasHeader. (EncodeOpts opt, SHasHeaderI hasHeader)
=> Proxy opt -> Proxy hasHeader -> EncodeOptions
encodeOpts' p _ = (encodeOpts p)
{ encIncludeHeader = shasheaderToBool (shasheader :: SHasHeader hasHeader)
}
instance EncodeOpts DefaultOpts where
encodeOpts _ = defaultEncodeOptions
decodeOpts _ = defaultDecodeOptions
instance EncodeOpts TabSeparatedOpts where
ord ' \t ' = 9
encodeOpts _ = defaultEncodeOptions { encDelimiter = 9 }
decodeOpts _ = defaultDecodeOptions { decDelimiter = 9 }
instance ( FromNamedRecord a, EncodeOpts opt
) => MimeUnrender (CSV' 'HasHeader opt) (Header, [a]) where
mimeUnrender _ bs = fmap toList <$> decodeByNameWith (decodeOpts p) bs
where p = Proxy :: Proxy opt
instance ( FromRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeUnrender (CSV' hasHeader opt) [a] where
mimeUnrender _ = fmap toList . decodeWith (decodeOpts p) (lowerSHasHeader sh)
where
p = Proxy :: Proxy opt
sh = shasheader :: SHasHeader hasHeader
instance ( FromNamedRecord a, EncodeOpts opt
) => MimeUnrender (CSV' 'HasHeader opt) (Header, Vector a) where
mimeUnrender _ = decodeByNameWith (decodeOpts p)
where p = Proxy :: Proxy opt
instance ( FromRecord a, EncodeOpts opt, SHasHeaderI hasHeader
) => MimeUnrender (CSV' hasHeader opt) (Vector a) where
mimeUnrender _ = decodeWith (decodeOpts p) (lowerSHasHeader sh)
where
p = Proxy :: Proxy opt
sh = shasheader :: SHasHeader hasHeader
|
eb3d21686be5a0abe00874884ad26e624057f6f433f49a7ad1adbd57415848df | inconvergent/weird | simplify-path.lisp |
(in-package :simplify)
(deftype int-vector () `(vector veq:pn))
; TODO: make 3d version similar to vert-utils?
(veq:fvdef -simplify (pts lim &key left right)
(declare #.*opt* (veq:fvec pts) (veq:ff lim) (veq:pn left right))
(let ((res (make-adjustable-vector :type 'veq:pn))
(dmax -1f0)
(index 0))
(declare (int-vector res) (veq:pn index) (veq:ff dmax))
(veq:f2let ((sl (veq:f2$ pts left))
(sr (veq:f2$ pts right)))
(loop for i of-type veq:pn from (1+ left) below right
do (let ((d (veq:f2segdst sl sr (veq:f2$ pts i))))
(declare (veq:ff d))
(when (> d dmax) (setf dmax d index i)))))
(if (> dmax lim)
(progn (loop with ps of-type int-vector =
(-simplify pts lim :left left :right index)
for i from 0 below (1- (length ps))
do (vextend (aref ps i) res))
(loop for i across (-simplify pts lim :left index :right right)
do (vextend i res)))
(progn (vextend left res)
(vextend right res)))
(sort res #'<)))
; :8338
(defun path (pts &key (lim 1f0))
(declare #.*opt* (veq:fvec pts) (veq:ff lim))
"
simplify path, pts.
lim is the distance of candidate pt to candidate line
returns new path
"
(let ((inds (-simplify pts lim
:left 0 :right (1- (round (/ (length pts) 2))))))
(declare (vector inds))
; this is kind of inefficient. but it works just fine.
(values (veq:f$_ (loop for i of-type veq:pn across inds
collect (veq:lst (veq:f2$ pts i)) of-type list))
inds)))
| null | https://raw.githubusercontent.com/inconvergent/weird/106d154ec2cd0e4ec977c3672ba717d6305c1056/src/draw/simplify-path.lisp | lisp | TODO: make 3d version similar to vert-utils?
:8338
this is kind of inefficient. but it works just fine. |
(in-package :simplify)
(deftype int-vector () `(vector veq:pn))
(veq:fvdef -simplify (pts lim &key left right)
(declare #.*opt* (veq:fvec pts) (veq:ff lim) (veq:pn left right))
(let ((res (make-adjustable-vector :type 'veq:pn))
(dmax -1f0)
(index 0))
(declare (int-vector res) (veq:pn index) (veq:ff dmax))
(veq:f2let ((sl (veq:f2$ pts left))
(sr (veq:f2$ pts right)))
(loop for i of-type veq:pn from (1+ left) below right
do (let ((d (veq:f2segdst sl sr (veq:f2$ pts i))))
(declare (veq:ff d))
(when (> d dmax) (setf dmax d index i)))))
(if (> dmax lim)
(progn (loop with ps of-type int-vector =
(-simplify pts lim :left left :right index)
for i from 0 below (1- (length ps))
do (vextend (aref ps i) res))
(loop for i across (-simplify pts lim :left index :right right)
do (vextend i res)))
(progn (vextend left res)
(vextend right res)))
(sort res #'<)))
(defun path (pts &key (lim 1f0))
(declare #.*opt* (veq:fvec pts) (veq:ff lim))
"
simplify path, pts.
lim is the distance of candidate pt to candidate line
returns new path
"
(let ((inds (-simplify pts lim
:left 0 :right (1- (round (/ (length pts) 2))))))
(declare (vector inds))
(values (veq:f$_ (loop for i of-type veq:pn across inds
collect (veq:lst (veq:f2$ pts i)) of-type list))
inds)))
|
cd334243e2c60a983518269008aa7159497000e43827ecc822420f384a1b3df0 | ntoronto/drbayes | language.rkt | #lang typed/racket/base
(require "language/functions.rkt"
"language/macros.rkt")
(provide (all-from-out
"language/functions.rkt"
"language/macros.rkt"))
| null | https://raw.githubusercontent.com/ntoronto/drbayes/e59eb7c7867118bf4c77ca903e133c7530e612a3/drbayes/private/language.rkt | racket | #lang typed/racket/base
(require "language/functions.rkt"
"language/macros.rkt")
(provide (all-from-out
"language/functions.rkt"
"language/macros.rkt"))
| |
7f0a162f0561aeb7d9e80863e93992c166b144a9b4f80b21995296964fcce6e6 | antifuchs/sbcl | filesys.lisp | ;;;; file system interface functions -- fairly Unix-centric, but with
;;;; differences between Unix and Win32 papered over.
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB!IMPL")
;;;; Unix pathname host support
;;; FIXME: the below shouldn't really be here, but in documentation
( chapter 19 makes a lot of requirements for documenting
;;; implementation-dependent decisions), but anyway it's probably not
;;; what we currently do.
;;;
;;; Unix namestrings have the following format:
;;;
;;; namestring := [ directory ] [ file [ type [ version ]]]
;;; directory := [ "/" ] { file "/" }*
;;; file := [^/]*
;;; type := "." [^/.]*
;;; version := "." ([0-9]+ | "*")
;;;
;;; Note: this grammar is ambiguous. The string foo.bar.5 can be
;;; parsed as either just the file specified or as specifying the
;;; file, type, and version. Therefore, we use the following rules
;;; when confronted with an ambiguous file.type.version string:
;;;
- If the first character is a dot , it 's part of the file . It is not
;;; considered a dot in the following rules.
;;;
;;; - Otherwise, the last dot separates the file and the type.
;;;
Wildcard characters :
;;;
;;; If the directory, file, type components contain any of the
;;; following characters, it is considered part of a wildcard pattern
;;; and has the following meaning.
;;;
? - matches any one character
* - matches any zero or more characters .
[ abc ] - matches any of a , b , or c.
{ str1,str2, ... ,strn } - matches any of str1 , str2 , ... , or .
;;; (FIXME: no it doesn't)
;;;
;;; Any of these special characters can be preceded by a backslash to
;;; cause it to be treated as a regular character.
(defun remove-backslashes (namestr start end)
#!+sb-doc
"Remove any occurrences of #\\ from the string because we've already
checked for whatever they may have protected."
(declare (type simple-string namestr)
(type index start end))
(let* ((result (make-string (- end start) :element-type 'character))
(dst 0)
(quoted nil))
(do ((src start (1+ src)))
((= src end))
(cond (quoted
(setf (schar result dst) (schar namestr src))
(setf quoted nil)
(incf dst))
(t
(let ((char (schar namestr src)))
(cond ((char= char #\\)
(setq quoted t))
(t
(setf (schar result dst) char)
(incf dst)))))))
(when quoted
(error 'namestring-parse-error
:complaint "backslash in a bad place"
:namestring namestr
:offset (1- end)))
(%shrink-vector result dst)))
(defun maybe-make-pattern (namestr start end)
(declare (type simple-string namestr)
(type index start end))
(collect ((pattern))
(let ((quoted nil)
(any-quotes nil)
(last-regular-char nil)
(index start))
(flet ((flush-pending-regulars ()
(when last-regular-char
(pattern (if any-quotes
(remove-backslashes namestr
last-regular-char
index)
(subseq namestr last-regular-char index)))
(setf any-quotes nil)
(setf last-regular-char nil))))
(loop
(when (>= index end)
(return))
(let ((char (schar namestr index)))
(cond (quoted
(incf index)
(setf quoted nil))
((char= char #\\)
(setf quoted t)
(setf any-quotes t)
(unless last-regular-char
(setf last-regular-char index))
(incf index))
((char= char #\?)
(flush-pending-regulars)
(pattern :single-char-wild)
(incf index))
((char= char #\*)
(flush-pending-regulars)
(pattern :multi-char-wild)
(incf index))
((char= char #\[)
(flush-pending-regulars)
(let ((close-bracket
(position #\] namestr :start index :end end)))
(unless close-bracket
(error 'namestring-parse-error
:complaint "#\\[ with no corresponding #\\]"
:namestring namestr
:offset index))
(pattern (cons :character-set
(subseq namestr
(1+ index)
close-bracket)))
(setf index (1+ close-bracket))))
(t
(unless last-regular-char
(setf last-regular-char index))
(incf index)))))
(flush-pending-regulars)))
(cond ((null (pattern))
"")
((null (cdr (pattern)))
(let ((piece (first (pattern))))
(typecase piece
((member :multi-char-wild) :wild)
(simple-string piece)
(t
(make-pattern (pattern))))))
(t
(make-pattern (pattern))))))
(defun unparse-physical-piece (thing)
(etypecase thing
((member :wild) "*")
(simple-string
(let* ((srclen (length thing))
(dstlen srclen))
(dotimes (i srclen)
(case (schar thing i)
((#\* #\? #\[)
(incf dstlen))))
(let ((result (make-string dstlen))
(dst 0))
(dotimes (src srclen)
(let ((char (schar thing src)))
(case char
((#\* #\? #\[)
(setf (schar result dst) #\\)
(incf dst)))
(setf (schar result dst) char)
(incf dst)))
result)))
(pattern
(with-output-to-string (s)
(dolist (piece (pattern-pieces thing))
(etypecase piece
(simple-string
(write-string piece s))
(symbol
(ecase piece
(:multi-char-wild
(write-string "*" s))
(:single-char-wild
(write-string "?" s))))
(cons
(case (car piece)
(:character-set
(write-string "[" s)
(write-string (cdr piece) s)
(write-string "]" s))
(t
(error "invalid pattern piece: ~S" piece))))))))))
(defun make-matcher (piece)
(cond ((eq piece :wild)
(constantly t))
((typep piece 'pattern)
(lambda (other)
(when (stringp other)
(pattern-matches piece other))))
(t
(lambda (other)
(equal piece other)))))
(/show0 "filesys.lisp 160")
(defun extract-name-type-and-version (namestr start end)
(declare (type simple-string namestr)
(type index start end))
(let* ((last-dot (position #\. namestr :start (1+ start) :end end
:from-end t)))
(cond
(last-dot
(values (maybe-make-pattern namestr start last-dot)
(maybe-make-pattern namestr (1+ last-dot) end)
:newest))
(t
(values (maybe-make-pattern namestr start end)
nil
:newest)))))
(/show0 "filesys.lisp 200")
;;;; Grabbing the kind of file when we have a namestring.
(defun native-file-kind (namestring)
(multiple-value-bind (existsp errno ino mode)
#!-win32
(sb!unix:unix-lstat namestring)
#!+win32
(sb!unix:unix-stat namestring)
(declare (ignore errno ino))
(when existsp
(let ((ifmt (logand mode sb!unix:s-ifmt)))
(case ifmt
(#.sb!unix:s-ifreg :file)
(#.sb!unix:s-ifdir :directory)
#!-win32
(#.sb!unix:s-iflnk :symlink)
(t :special))))))
;;;; TRUENAME, PROBE-FILE, FILE-AUTHOR, FILE-WRITE-DATE.
Rewritten in 12/2007 by RMK , replacing 13 + year old CMU code that
made a mess of things in order to support search lists ( which SBCL
;;; has never had). These are now all relatively straightforward
;;; wrappers around stat(2) and realpath(2), with the same basic logic
;;; in all cases. The wrinkles to be aware of:
;;;
* defines the truename of an existing , dangling or
;;; self-referring symlink to be the symlink itself.
;;; * The old version of PROBE-FILE merged the pathspec against
;;; *DEFAULT-PATHNAME-DEFAULTS* twice, and so lost when *D-P-D*
;;; was a relative pathname. Even if the case where *D-P-D* is a
;;; relative pathname is problematic, there's no particular reason
;;; to get that wrong, so let's try not to.
;;; * Note that while stat(2) is probably atomic, getting the truename
;;; for a filename involves poking all over the place, and so is
;;; subject to race conditions if other programs mutate the file
;;; system while we're resolving symlinks. So it's not implausible for
;;; realpath(3) to fail even if stat(2) succeeded. There's nothing
;;; obvious we can do about this, however.
* Windows ' apparent analogue of ) is called
GetFullPathName , and it 's a bit less useful than ) .
In particular , while ) errors in case the file does n't
exist , GetFullPathName seems to return a filename in all cases .
As ) is not atomic anyway , we only ever call it when
;;; we think a file exists, so just be careful when rewriting this
;;; routine.
;;;
Given a pathname designator , some quality to query for , return one
;;; of a pathname, a universal time, or a string (a file-author), or
NIL . QUERY - FOR may be one of : TRUENAME , : , : WRITE - DATE ,
: AUTHOR . If ERRORP is false , return NIL in case the file system
;;; returns an error code; otherwise, signal an error. Accepts
;;; logical pathnames, too (but never returns LPNs). For internal
;;; use.
(defun query-file-system (pathspec query-for &optional (errorp t))
(let ((pathname (translate-logical-pathname
(merge-pathnames
(pathname pathspec)
(sane-default-pathname-defaults)))))
(when (wild-pathname-p pathname)
(error 'simple-file-error
:pathname pathname
:format-control "~@<can't find the ~A of wild pathname ~A~
(physicalized from ~A).~:>"
:format-arguments (list query-for pathname pathspec)))
(flet ((fail (note-format pathname errno)
(if errorp
(simple-file-perror note-format pathname errno)
(return-from query-file-system nil))))
(let ((filename (native-namestring pathname :as-file t)))
(multiple-value-bind (existsp errno ino mode nlink uid gid rdev size
atime mtime)
(sb!unix:unix-stat filename)
(declare (ignore ino nlink gid rdev size atime
#!+win32 uid))
(if existsp
(case query-for
(:existence (nth-value
0
(parse-native-namestring
filename
(pathname-host pathname)
(sane-default-pathname-defaults)
:as-directory (eql (logand mode sb!unix:s-ifmt)
sb!unix:s-ifdir))))
(:truename (nth-value
0
(parse-native-namestring
Note : in case the file is stat'able , POSIX
;; realpath(3) gets us a canonical absolute
;; filename, even if the post-merge PATHNAME
;; is not absolute...
(multiple-value-bind (realpath errno)
(sb!unix:unix-realpath filename)
(if realpath
realpath
(fail "couldn't resolve ~A" filename errno)))
(pathname-host pathname)
(sane-default-pathname-defaults)
;; ... but without any trailing slash.
:as-directory (eql (logand mode sb!unix:s-ifmt)
sb!unix:s-ifdir))))
(:author
#!-win32
(sb!unix:uid-username uid))
(:write-date (+ unix-to-universal-time mtime)))
(progn
SBCL has for many years had a policy that a pathname
;; that names an existing, dangling or self-referential
;; symlink denotes the symlink itself. stat(2) fails
and sets errno to ENOENT or ELOOP respectively , but
;; we must distinguish cases where the symlink exists
;; from ones where there's a loop in the apparent
;; containing directory.
#!-win32
(multiple-value-bind (linkp ignore ino mode nlink uid gid rdev
size atime mtime)
(sb!unix:unix-lstat filename)
(declare (ignore ignore ino mode nlink gid rdev size atime))
(when (and (or (= errno sb!unix:enoent)
(= errno sb!unix:eloop))
linkp)
(return-from query-file-system
(case query-for
(:existence
;; We do this reparse so as to return a
;; normalized pathname.
(parse-native-namestring
filename (pathname-host pathname)))
(:truename
So here 's a trick : since succeded ,
;; FILENAME exists, so its directory exists and
;; only the non-directory part is loopy. So
;; let's resolve FILENAME's directory part with
;; realpath(3), in order to get a canonical
;; absolute name for the directory, and then
return a pathname having PATHNAME 's name ,
;; type, and version, but the rest from the
;; truename of the directory. Since we turned
;; PATHNAME into FILENAME "as a file", FILENAME
;; does not end in a slash, and so we get the
;; directory part of FILENAME by reparsing
;; FILENAME and masking off its name, type, and
;; version bits. But note not to call ourselves
;; recursively, because we don't want to
re - merge against * DEFAULT - PATHNAME - DEFAULTS * ,
;; since PATHNAME may be a relative pathname.
(merge-pathnames
(nth-value
0
(parse-native-namestring
(multiple-value-bind (realpath errno)
(sb!unix:unix-realpath
(native-namestring
(make-pathname
:name :unspecific
:type :unspecific
:version :unspecific
:defaults (parse-native-namestring
filename
(pathname-host pathname)
(sane-default-pathname-defaults)))))
(if realpath
realpath
(fail "couldn't resolve ~A" filename errno)))
(pathname-host pathname)
(sane-default-pathname-defaults)
:as-directory t))
pathname))
(:author (sb!unix:uid-username uid))
(:write-date (+ unix-to-universal-time mtime))))))
;; If we're still here, the file doesn't exist; error.
(fail
(format nil "failed to find the ~A of ~~A" query-for)
pathspec errno))))))))
(defun probe-file (pathspec)
#!+sb-doc
"Return the truename of PATHSPEC if the truename can be found,
or NIL otherwise. See TRUENAME for more information."
(query-file-system pathspec :truename nil))
(defun truename (pathspec)
#!+sb-doc
"If PATHSPEC is a pathname that names an existing file, return
a pathname that denotes a canonicalized name for the file. If
pathspec is a stream associated with a file, return a pathname
that denotes a canonicalized name for the file associated with
the stream.
An error of type FILE-ERROR is signalled if no such file exists
or if the file system is such that a canonicalized file name
cannot be determined or if the pathname is wild.
Under Unix, the TRUENAME of a symlink that links to itself or to
a file that doesn't exist is considered to be the name of the
broken symlink itself."
;; Note that eventually this routine might be different for streams
;; than for other pathname designators.
(if (streamp pathspec)
(query-file-system pathspec :truename)
(query-file-system pathspec :truename)))
(defun file-author (pathspec)
#!+sb-doc
"Return the author of the file specified by PATHSPEC. Signal an
error of type FILE-ERROR if no such file exists, or if PATHSPEC
is a wild pathname."
(query-file-system pathspec :author))
(defun file-write-date (pathspec)
#!+sb-doc
"Return the write date of the file specified by PATHSPEC.
An error of type FILE-ERROR is signaled if no such file exists,
or if PATHSPEC is a wild pathname."
(query-file-system pathspec :write-date))
;;;; miscellaneous other operations
(/show0 "filesys.lisp 700")
(defun rename-file (file new-name)
#!+sb-doc
"Rename FILE to have the specified NEW-NAME. If FILE is a stream open to a
file, then the associated file is renamed."
(let* ((original (truename file))
(original-namestring (native-namestring original :as-file t))
(new-name (merge-pathnames new-name original))
(new-namestring (native-namestring (physicalize-pathname new-name)
:as-file t)))
(unless new-namestring
(error 'simple-file-error
:pathname new-name
:format-control "~S can't be created."
:format-arguments (list new-name)))
(multiple-value-bind (res error)
(sb!unix:unix-rename original-namestring new-namestring)
(unless res
(error 'simple-file-error
:pathname new-name
:format-control "~@<couldn't rename ~2I~_~A ~I~_to ~2I~_~A: ~
~I~_~A~:>"
:format-arguments (list original new-name (strerror error))))
(when (streamp file)
(file-name file new-name))
(values new-name original (truename new-name)))))
(defun delete-file (file)
#!+sb-doc
"Delete the specified FILE.
If FILE is a stream, on Windows the stream is closed immediately. On Unix
plaforms the stream remains open, allowing IO to continue: the OS resources
associated with the deleted file remain available till the stream is closed as
per standard Unix unlink() behaviour."
(let* ((pathname (translate-logical-pathname file))
(namestring (native-namestring pathname :as-file t)))
(truename file) ; for error-checking side-effect
#!+win32
(when (streamp file)
(close file))
(multiple-value-bind (res err) (sb!unix:unix-unlink namestring)
(unless res
(simple-file-perror "couldn't delete ~A" namestring err))))
t)
(defun delete-directory (pathspec &key recursive)
"Deletes the directory designated by PATHSPEC (a pathname designator).
Returns the truename of the directory deleted.
If RECURSIVE is false \(the default), signals an error unless the directory is
empty. If RECURSIVE is true, first deletes all files and subdirectories. If
RECURSIVE is true and the directory contains symbolic links, the links are
deleted, not the files and directories they point to.
Signals an error if PATHSPEC designates a file instead of a directory, or if
the directory could not be deleted for any reason.
\(DELETE-DIRECTORY \"/tmp/foo\") and \(DELETE-DIRECTORY \"/tmp/foo/\") both
delete the \"foo\" subdirectory of \"/tmp\", or signal an error if it does not
exist or is a file.
Experimental: interface subject to change."
(declare (type pathname-designator pathspec))
(with-pathname (pathname pathspec)
(let ((truename (truename (translate-logical-pathname pathname))))
(labels ((recurse (dir)
(map-directory #'recurse dir
:files nil
:directories t
:classify-symlinks nil)
(map-directory #'delete-file dir
:files t
:directories nil
:classify-symlinks nil)
(delete-dir dir))
(delete-dir (dir)
(let* ((namestring (native-namestring dir :as-file t))
(res (alien-funcall (extern-alien #!-win32 "rmdir"
#!+win32 "_rmdir"
(function int c-string))
namestring)))
(if (minusp res)
(simple-file-perror "Could not delete directory ~A:~% ~A"
namestring (get-errno))
dir))))
(if recursive
(recurse truename)
(delete-dir truename))))))
(defun sbcl-homedir-pathname ()
(let ((sbcl-home (posix-getenv "SBCL_HOME")))
;; SBCL_HOME isn't set for :EXECUTABLE T embedded cores
(when (and sbcl-home (not (string= sbcl-home "")))
(parse-native-namestring sbcl-home
#!-win32 sb!impl::*unix-host*
#!+win32 sb!impl::*win32-host*
*default-pathname-defaults*
:as-directory t))))
(defun user-homedir-namestring (&optional username)
(if username
(sb!unix:user-homedir username)
(let ((env-home (posix-getenv "HOME")))
(if (and env-home (not (string= env-home "")))
env-home
#!-win32
(sb!unix:uid-homedir (sb!unix:unix-getuid))))))
;;; (This is an ANSI Common Lisp function.)
(defun user-homedir-pathname (&optional host)
#!+sb-doc
"Return the home directory of the user as a pathname. If the HOME
environment variable has been specified, the directory it designates
is returned; otherwise obtains the home directory from the operating
system. HOST argument is ignored by SBCL."
(declare (ignore host))
(values
(parse-native-namestring
(or (user-homedir-namestring)
#!+win32
(sb!win32::get-folder-namestring sb!win32::csidl_profile))
#!-win32 sb!impl::*unix-host*
#!+win32 sb!impl::*win32-host*
*default-pathname-defaults*
:as-directory t)))
;;;; DIRECTORY
(defun directory (pathspec &key (resolve-symlinks t))
#!+sb-doc
"Return a list of PATHNAMEs, each the TRUENAME of a file that matched the
given pathname. Note that the interaction between this ANSI-specified
TRUENAMEing and the semantics of the Unix filesystem (symbolic links..) means
this function can sometimes return files which don't have the same directory
as PATHNAME. If :RESOLVE-SYMLINKS is NIL, don't resolve symbolic links in
matching filenames."
We create one entry in this hash table for each truename ,
;; as an asymptotically efficient way of removing duplicates
;; (which can arise when e.g. multiple symlinks map to the
;; same truename).
(truenames (make-hash-table :test #'equal)))
(labels ((record (pathname)
(let ((truename (if resolve-symlinks
;; FIXME: Why not not TRUENAME? As reported by
Milan 2003 - 10 - 05 , using
;; TRUENAME causes a race condition whereby
;; removal of a file during the directory
;; operation causes an error. It's not clear
;; what the right thing to do is, though. --
CSR , 2003 - 10 - 13
(query-file-system pathname :truename nil)
(query-file-system pathname :existence nil))))
(when truename
(setf (gethash (namestring truename) truenames)
truename))))
(do-physical-pathnames (pathname)
(aver (not (logical-pathname-p pathname)))
: Since we do n't canonize pathnames on construction ,
;; we really have to do it here to get #p"foo/." mean the same
;; as #p"foo/./".
(pathname (canonicalize-pathname pathname))
(name (pathname-name pathname))
(type (pathname-type pathname))
(match-name (make-matcher name))
(match-type (make-matcher type)))
(map-matching-directories
(if (or name type)
(lambda (directory)
(map-matching-entries #'record
directory
match-name
match-type))
#'record)
pathname)))
(do-pathnames (pathname)
(if (logical-pathname-p pathname)
(let ((host (intern-logical-host (pathname-host pathname))))
(dolist (x (logical-host-canon-transls host))
(destructuring-bind (from to) x
(let ((intersections
(pathname-intersections pathname from)))
(dolist (p intersections)
(do-pathnames (translate-pathname p from to)))))))
(do-physical-pathnames pathname))))
(declare (truly-dynamic-extent #'record))
(do-pathnames (merge-pathnames pathspec)))
(mapcar #'cdr
Sorting is n't required by the ANSI spec , but sorting into some
;; canonical order seems good just on the grounds that the
;; implementation should have repeatable behavior when possible.
(sort (loop for namestring being each hash-key in truenames
using (hash-value truename)
collect (cons namestring truename))
#'string<
:key #'car))))
(defun canonicalize-pathname (pathname)
We 're really only interested in : UNSPECIFIC - > NIL , : BACK and : UP ,
;; and dealing with #p"foo/.." and #p"foo/."
(labels ((simplify (piece)
(unless (eq :unspecific piece)
piece))
(canonicalize-directory (directory)
(let (pieces)
(dolist (piece directory)
(if (and pieces (member piece '(:back :up)))
;; FIXME: We should really canonicalize when we construct
;; pathnames. This is just wrong.
(case (car pieces)
((:absolute :wild-inferiors)
(error 'simple-file-error
:format-control "Invalid use of ~S after ~S."
:format-arguments (list piece (car pieces))
:pathname pathname))
((:relative :up :back)
(push piece pieces))
(t
(pop pieces)))
(push piece pieces)))
(nreverse pieces))))
(let ((name (simplify (pathname-name pathname)))
(type (simplify (pathname-type pathname)))
(dir (canonicalize-directory (pathname-directory pathname))))
(cond ((equal "." name)
(cond ((not type)
(make-pathname :name nil :defaults pathname))
((equal "" type)
(make-pathname :name nil
:type nil
:directory (butlast dir)
:defaults pathname))))
(t
(make-pathname :name name :type type
:directory dir
:defaults pathname))))))
;;; Given a native namestring, provides a WITH-HASH-TABLE-ITERATOR style
;;; interface to mapping over namestrings of entries in the corresponding
;;; directory.
(defmacro with-native-directory-iterator ((iterator namestring &key errorp) &body body)
(with-unique-names (one-iter)
`(dx-flet
((iterate (,one-iter)
(declare (type function ,one-iter))
(macrolet ((,iterator ()
`(funcall ,',one-iter)))
,@body)))
(call-with-native-directory-iterator #'iterate ,namestring ,errorp))))
(defun call-with-native-directory-iterator (function namestring errorp)
(declare (type (or null string) namestring)
(function function))
(let (dp)
(when namestring
(dx-flet
((one-iter ()
(tagbody
:next
(let ((ent (sb!unix:unix-readdir dp nil)))
(when ent
(let ((name (sb!unix:unix-dirent-name ent)))
(when name
(cond ((equal "." name)
(go :next))
((equal ".." name)
(go :next))
(t
(return-from one-iter name))))))))))
(unwind-protect
(progn
(setf dp (sb!unix:unix-opendir namestring errorp))
(when dp
(funcall function #'one-iter)))
(when dp
(sb!unix:unix-closedir dp nil)))))))
;;; This is our core directory access interface that we use to implement
;;; DIRECTORY.
(defun map-directory (function directory &key (files t) (directories t)
(classify-symlinks) (errorp t))
#!+sb-doc
"Map over entries in DIRECTORY. Keyword arguments specify which entries to
map over, and how:
:FILES
If true, call FUNCTION with the pathname of each file in DIRECTORY.
Defaults to T.
:DIRECTORIES
If true, call FUNCTION with a pathname for each subdirectory of DIRECTORY.
If :AS-FILES, the pathname used is a pathname designating the subdirectory
as a file in DIRECTORY. Otherwise the pathname used is a directory
pathname. Defaults to T.
:CLASSIFY-SYMLINKS
If T, the decision to call FUNCTION with the pathname of a symbolic link
depends on the resolution of the link: if it points to a directory, it is
considered a directory entry, otherwise a file entry. If false, all
symbolic links are considered file entries. Defaults to T. In both cases
the pathname used for the symbolic link is not fully resolved, but names it
as an immediate child of DIRECTORY.
:ERRORP
If true, signal an error if DIRECTORY does not exist, cannot be read, etc.
Defaults to T.
Experimental: interface subject to change."
(declare (pathname-designator directory))
(let* ((fun (%coerce-callable-to-fun function))
(as-files (eq :as-files directories))
(physical (physicalize-pathname directory))
Not QUERY - FILE - SYSTEM : , since it does n't work on Windows
;; network shares.
(realname (sb!unix:unix-realpath (native-namestring physical :as-file t)))
(canonical (if realname
(parse-native-namestring realname
(pathname-host physical)
(sane-default-pathname-defaults)
:as-directory t)
(return-from map-directory nil)))
(dirname (native-namestring canonical)))
(flet ((map-it (name dirp)
(funcall fun
(merge-pathnames (parse-native-namestring
name nil physical
:as-directory (and dirp (not as-files)))
physical))))
(with-native-directory-iterator (next dirname :errorp errorp)
(loop for name = (next)
while name
do (let* ((full (concatenate 'string dirname name))
(kind (native-file-kind full)))
(when kind
(case kind
(:directory
(when directories
(map-it name t)))
(:symlink
(if classify-symlinks
(let* ((tmpname (merge-pathnames
(parse-native-namestring
name nil physical :as-directory nil)
physical))
(truename (query-file-system tmpname :truename nil)))
(if (or (not truename)
(or (pathname-name truename) (pathname-type truename)))
(when files
(funcall fun tmpname))
(when directories
(map-it name t))))
(when files
(map-it name nil))))
(t
;; Anything else parses as a file.
(when files
(map-it name nil)))))))))))
;;; Part of DIRECTORY: implements matching the directory spec. Calls FUNCTION
with all DIRECTORIES that match the directory portion of PATHSPEC .
(defun map-matching-directories (function pathspec)
(let* ((dir (pathname-directory pathspec))
(length (length dir))
(wild (position-if (lambda (elt)
(or (eq :wild elt) (typep elt 'pattern)))
dir))
(wild-inferiors (position :wild-inferiors dir))
(end (cond ((and wild wild-inferiors)
(min wild wild-inferiors))
(t
(or wild wild-inferiors length))))
(rest (subseq dir end))
(starting-point (make-pathname :directory (subseq dir 0 end)
:device (pathname-device pathspec)
:host (pathname-host pathspec)
:name nil
:type nil
:version nil)))
(cond (wild-inferiors
(map-wild-inferiors function rest starting-point))
(wild
(map-wild function rest starting-point))
(t
;; Nothing wild -- the directory matches itself.
(funcall function starting-point))))
nil)
(defun last-directory-piece (pathname)
(car (last (pathname-directory pathname))))
;;; Part of DIRECTORY: implements iterating over a :WILD or pattern component
;;; in the directory spec.
(defun map-wild (function more directory)
(let ((this (pop more))
(next (car more)))
(flet ((cont (subdirectory)
(cond ((not more)
;; end of the line
(funcall function subdirectory))
((or (eq :wild next) (typep next 'pattern))
(map-wild function more subdirectory))
((eq :wild-inferiors next)
(map-wild-inferiors function more subdirectory))
(t
(let ((this (pathname-directory subdirectory)))
(map-matching-directories
function
(make-pathname :directory (append this more)
:defaults subdirectory)))))))
(map-directory
(if (eq :wild this)
#'cont
(lambda (sub)
(when (pattern-matches this (last-directory-piece sub))
(funcall #'cont sub))))
directory
:files nil
:directories t
:errorp nil))))
;;; Part of DIRECTORY: implements iterating over a :WILD-INFERIORS component
;;; in the directory spec.
(defun map-wild-inferiors (function more directory)
(loop while (member (car more) '(:wild :wild-inferiors))
do (pop more))
(let ((next (car more))
(rest (cdr more)))
(unless more
(funcall function directory))
(map-directory
(cond ((not more)
(lambda (pathname)
(funcall function pathname)
(map-wild-inferiors function more pathname)))
(t
(lambda (pathname)
(let ((this (pathname-directory pathname)))
(when (equal next (car (last this)))
(map-matching-directories
function
(make-pathname :directory (append this rest)
:defaults pathname)))
(map-wild-inferiors function more pathname)))))
directory
:files nil
:directories t
:errorp nil)))
;;; Part of DIRECTORY: implements iterating over entries in a directory, and
;;; matching them.
(defun map-matching-entries (function directory match-name match-type)
(map-directory
(lambda (file)
(when (and (funcall match-name (pathname-name file))
(funcall match-type (pathname-type file)))
(funcall function file)))
directory
:files t
:directories :as-files
:errorp nil))
;;; NOTE: There is a fair amount of hair below that is probably not
;;; strictly necessary.
;;;
;;; The issue is the following: what does (DIRECTORY "SYS:*;") mean?
Until 2004 - 01 , SBCL 's behaviour was unquestionably wrong , as it
;;; did not translate the logical pathname at all, but instead treated
it as a physical one . Other seem to to treat this call as
equivalent to ( DIRECTORY ( TRANSLATE - LOGICAL - PATHNAME " :* ; " ) ) ,
;;; which is fine as far as it goes, but not very interesting, and
arguably counterintuitive . ( PATHNAME - MATCH - P " : SRC ; " " :* ; " )
is true , so why should " : SRC ; " not show up in the call to
;;; DIRECTORY? (assuming the physical pathname corresponding to it
;;; exists, of course).
;;;
;;; So, the interpretation that I am pushing is for all pathnames
;;; matching the input pathname to be queried. This means that we
;;; need to compute the intersection of the input pathname and the
;;; logical host FROM translations, and then translate the resulting
;;; pathname using the host to the TO translation; this treatment is
;;; recursively invoked until we get a physical pathname, whereupon
;;; our physical DIRECTORY implementation takes over.
;;; FIXME: this is an incomplete implementation. It only works when
;;; both are logical pathnames (which is OK, because that's the only
;;; case when we call it), but there are other pitfalls as well: see
;;; the DIRECTORY-HELPER below for some, but others include a lack of
;;; pattern handling.
;;; The above was written by CSR, I (RMK) believe. The argument that
;;; motivates the interpretation is faulty, however: PATHNAME-MATCH-P
;;; returns true for (PATHNAME-MATCH-P #P"/tmp/*/" #P"/tmp/../"), but
;;; the latter pathname is not in the result of DIRECTORY on the
;;; former. Indeed, if DIRECTORY were constrained to return the
;;; truename for every pathname for which PATHNAME-MATCH-P returned
;;; true and which denoted a filename that named an existing file,
;;; (DIRECTORY #P"/tmp/**/") would be required to list every file on a
;;; Unix system, since any file can be named as though it were "below"
;;; /tmp, given the dotdot entries. So I think the strongest
;;; "consistency" we can define between PATHNAME-MATCH-P and DIRECTORY
;;; is that PATHNAME-MATCH-P returns true of everything DIRECTORY
;;; returns, but not vice versa.
;;; In any case, even if the motivation were sound, DIRECTORY on a
;;; wild logical pathname has no portable semantics. I see nothing in
;;; ANSI that requires implementations to support wild physical
;;; pathnames, and so there need not be any translation of a wild
logical pathname to a phyiscal pathname . So a program that calls
;;; DIRECTORY on a wild logical pathname is doing something
;;; non-portable at best. And if the only sensible semantics for
;;; DIRECTORY on a wild logical pathname is something like the
;;; following, it would be just as well if it signaled an error, since
;;; a program can't possibly rely on the result of an intersection of
;;; user-defined translations with a file system probe. (Potentially
;;; useful kinds of "pathname" that might not support wildcards could
;;; include pathname hosts that model unqueryable namespaces like HTTP
;;; URIs, or that model namespaces that it's not convenient to
;;; investigate, such as the namespace of TCP ports that some network
;;; host listens on. I happen to think it a bad idea to try to
;;; shoehorn such namespaces into a pathnames system, but people
;;; sometimes claim to want pathnames for these things.) -- RMK
2007 - 12 - 31 .
(defun pathname-intersections (one two)
(aver (logical-pathname-p one))
(aver (logical-pathname-p two))
(labels
((intersect-version (one two)
(aver (typep one '(or null (member :newest :wild :unspecific)
integer)))
(aver (typep two '(or null (member :newest :wild :unspecific)
integer)))
(cond
((eq one :wild) two)
((eq two :wild) one)
((or (null one) (eq one :unspecific)) two)
((or (null two) (eq two :unspecific)) one)
((eql one two) one)
(t nil)))
(intersect-name/type (one two)
(aver (typep one '(or null (member :wild :unspecific) string)))
(aver (typep two '(or null (member :wild :unspecific) string)))
(cond
((eq one :wild) two)
((eq two :wild) one)
((or (null one) (eq one :unspecific)) two)
((or (null two) (eq two :unspecific)) one)
((string= one two) one)
(t nil)))
(intersect-directory (one two)
(aver (typep one '(or null (member :wild :unspecific) list)))
(aver (typep two '(or null (member :wild :unspecific) list)))
(cond
((eq one :wild) two)
((eq two :wild) one)
((or (null one) (eq one :unspecific)) two)
((or (null two) (eq two :unspecific)) one)
(t (aver (eq (car one) (car two)))
(mapcar
(lambda (x) (cons (car one) x))
(intersect-directory-helper (cdr one) (cdr two)))))))
(let ((version (intersect-version
(pathname-version one) (pathname-version two)))
(name (intersect-name/type
(pathname-name one) (pathname-name two)))
(type (intersect-name/type
(pathname-type one) (pathname-type two)))
(host (pathname-host one)))
(mapcar (lambda (d)
(make-pathname :host host :name name :type type
:version version :directory d))
(intersect-directory
(pathname-directory one) (pathname-directory two))))))
;;; FIXME: written as its own function because I (CSR) don't
;;; understand it, so helping both debuggability and modularity. In
;;; case anyone is motivated to rewrite it, it returns a list of
sublists representing the intersection of the two input directory
;;; paths (excluding the initial :ABSOLUTE or :RELATIVE).
;;;
;;; FIXME: Does not work with :UP or :BACK
;;; FIXME: Does not work with patterns
;;;
FIXME : PFD suggests replacing this implementation with a DFA
conversion of a NDFA . Find out ( a ) what this means and ( b ) if it
;;; turns out to be worth it.
(defun intersect-directory-helper (one two)
(flet ((simple-intersection (cone ctwo)
(cond
((eq cone :wild) ctwo)
((eq ctwo :wild) cone)
(t (aver (typep cone 'string))
(aver (typep ctwo 'string))
(if (string= cone ctwo) cone nil)))))
(macrolet
((loop-possible-wild-inferiors-matches
(lower-bound bounding-sequence order)
(let ((index (gensym)) (g2 (gensym)) (g3 (gensym)) (l (gensym)))
`(let ((,l (length ,bounding-sequence)))
(loop for ,index from ,lower-bound to ,l
append (mapcar (lambda (,g2)
(append
(butlast ,bounding-sequence (- ,l ,index))
,g2))
(mapcar
(lambda (,g3)
(append
(if (eq (car (nthcdr ,index ,bounding-sequence))
:wild-inferiors)
'(:wild-inferiors)
nil) ,g3))
(intersect-directory-helper
,@(if order
`((nthcdr ,index one) (cdr two))
`((cdr one) (nthcdr ,index two)))))))))))
(cond
((and (eq (car one) :wild-inferiors)
(eq (car two) :wild-inferiors))
(delete-duplicates
(append (mapcar (lambda (x) (cons :wild-inferiors x))
(intersect-directory-helper (cdr one) (cdr two)))
(loop-possible-wild-inferiors-matches 2 one t)
(loop-possible-wild-inferiors-matches 2 two nil))
:test 'equal))
((eq (car one) :wild-inferiors)
(delete-duplicates (loop-possible-wild-inferiors-matches 0 two nil)
:test 'equal))
((eq (car two) :wild-inferiors)
(delete-duplicates (loop-possible-wild-inferiors-matches 0 one t)
:test 'equal))
((and (null one) (null two)) (list nil))
((null one) nil)
((null two) nil)
(t (and (simple-intersection (car one) (car two))
(mapcar (lambda (x) (cons (simple-intersection
(car one) (car two)) x))
(intersect-directory-helper (cdr one) (cdr two)))))))))
(defun ensure-directories-exist (pathspec &key verbose (mode #o777))
#!+sb-doc
"Test whether the directories containing the specified file
actually exist, and attempt to create them if they do not.
The MODE argument is a CMUCL/SBCL-specific extension to control
the Unix permission bits."
(let ((pathname (physicalize-pathname (merge-pathnames (pathname pathspec))))
(created-p nil))
(when (wild-pathname-p pathname)
(error 'simple-file-error
:format-control "bad place for a wild pathname"
:pathname pathspec))
(let ((dir (pathname-directory pathname)))
(loop for i from 1 upto (length dir)
do (let ((newpath (make-pathname
:host (pathname-host pathname)
:device (pathname-device pathname)
:directory (subseq dir 0 i))))
(unless (probe-file newpath)
(let ((namestring (coerce (native-namestring newpath)
'string)))
(when verbose
(format *standard-output*
"~&creating directory: ~A~%"
namestring))
(sb!unix:unix-mkdir namestring mode)
(unless (probe-file newpath)
(restart-case (error
'simple-file-error
:pathname pathspec
:format-control
"can't create directory ~A"
:format-arguments (list namestring))
(retry ()
:report "Retry directory creation."
(ensure-directories-exist
pathspec
:verbose verbose :mode mode))
(continue ()
:report
"Continue as if directory creation was successful."
nil)))
(setf created-p t)))))
(values pathspec created-p))))
(/show0 "filesys.lisp 1000")
| null | https://raw.githubusercontent.com/antifuchs/sbcl/789bf105edca031aff991ad16a5fd207812336a0/src/code/filesys.lisp | lisp | file system interface functions -- fairly Unix-centric, but with
differences between Unix and Win32 papered over.
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
Unix pathname host support
FIXME: the below shouldn't really be here, but in documentation
implementation-dependent decisions), but anyway it's probably not
what we currently do.
Unix namestrings have the following format:
namestring := [ directory ] [ file [ type [ version ]]]
directory := [ "/" ] { file "/" }*
file := [^/]*
type := "." [^/.]*
version := "." ([0-9]+ | "*")
Note: this grammar is ambiguous. The string foo.bar.5 can be
parsed as either just the file specified or as specifying the
file, type, and version. Therefore, we use the following rules
when confronted with an ambiguous file.type.version string:
considered a dot in the following rules.
- Otherwise, the last dot separates the file and the type.
If the directory, file, type components contain any of the
following characters, it is considered part of a wildcard pattern
and has the following meaning.
(FIXME: no it doesn't)
Any of these special characters can be preceded by a backslash to
cause it to be treated as a regular character.
Grabbing the kind of file when we have a namestring.
TRUENAME, PROBE-FILE, FILE-AUTHOR, FILE-WRITE-DATE.
has never had). These are now all relatively straightforward
wrappers around stat(2) and realpath(2), with the same basic logic
in all cases. The wrinkles to be aware of:
self-referring symlink to be the symlink itself.
* The old version of PROBE-FILE merged the pathspec against
*DEFAULT-PATHNAME-DEFAULTS* twice, and so lost when *D-P-D*
was a relative pathname. Even if the case where *D-P-D* is a
relative pathname is problematic, there's no particular reason
to get that wrong, so let's try not to.
* Note that while stat(2) is probably atomic, getting the truename
for a filename involves poking all over the place, and so is
subject to race conditions if other programs mutate the file
system while we're resolving symlinks. So it's not implausible for
realpath(3) to fail even if stat(2) succeeded. There's nothing
obvious we can do about this, however.
we think a file exists, so just be careful when rewriting this
routine.
of a pathname, a universal time, or a string (a file-author), or
returns an error code; otherwise, signal an error. Accepts
logical pathnames, too (but never returns LPNs). For internal
use.
realpath(3) gets us a canonical absolute
filename, even if the post-merge PATHNAME
is not absolute...
... but without any trailing slash.
that names an existing, dangling or self-referential
symlink denotes the symlink itself. stat(2) fails
we must distinguish cases where the symlink exists
from ones where there's a loop in the apparent
containing directory.
We do this reparse so as to return a
normalized pathname.
FILENAME exists, so its directory exists and
only the non-directory part is loopy. So
let's resolve FILENAME's directory part with
realpath(3), in order to get a canonical
absolute name for the directory, and then
type, and version, but the rest from the
truename of the directory. Since we turned
PATHNAME into FILENAME "as a file", FILENAME
does not end in a slash, and so we get the
directory part of FILENAME by reparsing
FILENAME and masking off its name, type, and
version bits. But note not to call ourselves
recursively, because we don't want to
since PATHNAME may be a relative pathname.
If we're still here, the file doesn't exist; error.
Note that eventually this routine might be different for streams
than for other pathname designators.
miscellaneous other operations
for error-checking side-effect
SBCL_HOME isn't set for :EXECUTABLE T embedded cores
(This is an ANSI Common Lisp function.)
otherwise obtains the home directory from the operating
DIRECTORY
as an asymptotically efficient way of removing duplicates
(which can arise when e.g. multiple symlinks map to the
same truename).
FIXME: Why not not TRUENAME? As reported by
TRUENAME causes a race condition whereby
removal of a file during the directory
operation causes an error. It's not clear
what the right thing to do is, though. --
we really have to do it here to get #p"foo/." mean the same
as #p"foo/./".
canonical order seems good just on the grounds that the
implementation should have repeatable behavior when possible.
and dealing with #p"foo/.." and #p"foo/."
FIXME: We should really canonicalize when we construct
pathnames. This is just wrong.
Given a native namestring, provides a WITH-HASH-TABLE-ITERATOR style
interface to mapping over namestrings of entries in the corresponding
directory.
This is our core directory access interface that we use to implement
DIRECTORY.
network shares.
Anything else parses as a file.
Part of DIRECTORY: implements matching the directory spec. Calls FUNCTION
Nothing wild -- the directory matches itself.
Part of DIRECTORY: implements iterating over a :WILD or pattern component
in the directory spec.
end of the line
Part of DIRECTORY: implements iterating over a :WILD-INFERIORS component
in the directory spec.
Part of DIRECTORY: implements iterating over entries in a directory, and
matching them.
NOTE: There is a fair amount of hair below that is probably not
strictly necessary.
The issue is the following: what does (DIRECTORY "SYS:*;") mean?
did not translate the logical pathname at all, but instead treated
which is fine as far as it goes, but not very interesting, and
DIRECTORY? (assuming the physical pathname corresponding to it
exists, of course).
So, the interpretation that I am pushing is for all pathnames
matching the input pathname to be queried. This means that we
need to compute the intersection of the input pathname and the
logical host FROM translations, and then translate the resulting
pathname using the host to the TO translation; this treatment is
recursively invoked until we get a physical pathname, whereupon
our physical DIRECTORY implementation takes over.
FIXME: this is an incomplete implementation. It only works when
both are logical pathnames (which is OK, because that's the only
case when we call it), but there are other pitfalls as well: see
the DIRECTORY-HELPER below for some, but others include a lack of
pattern handling.
The above was written by CSR, I (RMK) believe. The argument that
motivates the interpretation is faulty, however: PATHNAME-MATCH-P
returns true for (PATHNAME-MATCH-P #P"/tmp/*/" #P"/tmp/../"), but
the latter pathname is not in the result of DIRECTORY on the
former. Indeed, if DIRECTORY were constrained to return the
truename for every pathname for which PATHNAME-MATCH-P returned
true and which denoted a filename that named an existing file,
(DIRECTORY #P"/tmp/**/") would be required to list every file on a
Unix system, since any file can be named as though it were "below"
/tmp, given the dotdot entries. So I think the strongest
"consistency" we can define between PATHNAME-MATCH-P and DIRECTORY
is that PATHNAME-MATCH-P returns true of everything DIRECTORY
returns, but not vice versa.
In any case, even if the motivation were sound, DIRECTORY on a
wild logical pathname has no portable semantics. I see nothing in
ANSI that requires implementations to support wild physical
pathnames, and so there need not be any translation of a wild
DIRECTORY on a wild logical pathname is doing something
non-portable at best. And if the only sensible semantics for
DIRECTORY on a wild logical pathname is something like the
following, it would be just as well if it signaled an error, since
a program can't possibly rely on the result of an intersection of
user-defined translations with a file system probe. (Potentially
useful kinds of "pathname" that might not support wildcards could
include pathname hosts that model unqueryable namespaces like HTTP
URIs, or that model namespaces that it's not convenient to
investigate, such as the namespace of TCP ports that some network
host listens on. I happen to think it a bad idea to try to
shoehorn such namespaces into a pathnames system, but people
sometimes claim to want pathnames for these things.) -- RMK
FIXME: written as its own function because I (CSR) don't
understand it, so helping both debuggability and modularity. In
case anyone is motivated to rewrite it, it returns a list of
paths (excluding the initial :ABSOLUTE or :RELATIVE).
FIXME: Does not work with :UP or :BACK
FIXME: Does not work with patterns
turns out to be worth it. |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB!IMPL")
( chapter 19 makes a lot of requirements for documenting
- If the first character is a dot , it 's part of the file . It is not
Wildcard characters :
? - matches any one character
* - matches any zero or more characters .
[ abc ] - matches any of a , b , or c.
{ str1,str2, ... ,strn } - matches any of str1 , str2 , ... , or .
(defun remove-backslashes (namestr start end)
#!+sb-doc
"Remove any occurrences of #\\ from the string because we've already
checked for whatever they may have protected."
(declare (type simple-string namestr)
(type index start end))
(let* ((result (make-string (- end start) :element-type 'character))
(dst 0)
(quoted nil))
(do ((src start (1+ src)))
((= src end))
(cond (quoted
(setf (schar result dst) (schar namestr src))
(setf quoted nil)
(incf dst))
(t
(let ((char (schar namestr src)))
(cond ((char= char #\\)
(setq quoted t))
(t
(setf (schar result dst) char)
(incf dst)))))))
(when quoted
(error 'namestring-parse-error
:complaint "backslash in a bad place"
:namestring namestr
:offset (1- end)))
(%shrink-vector result dst)))
(defun maybe-make-pattern (namestr start end)
(declare (type simple-string namestr)
(type index start end))
(collect ((pattern))
(let ((quoted nil)
(any-quotes nil)
(last-regular-char nil)
(index start))
(flet ((flush-pending-regulars ()
(when last-regular-char
(pattern (if any-quotes
(remove-backslashes namestr
last-regular-char
index)
(subseq namestr last-regular-char index)))
(setf any-quotes nil)
(setf last-regular-char nil))))
(loop
(when (>= index end)
(return))
(let ((char (schar namestr index)))
(cond (quoted
(incf index)
(setf quoted nil))
((char= char #\\)
(setf quoted t)
(setf any-quotes t)
(unless last-regular-char
(setf last-regular-char index))
(incf index))
((char= char #\?)
(flush-pending-regulars)
(pattern :single-char-wild)
(incf index))
((char= char #\*)
(flush-pending-regulars)
(pattern :multi-char-wild)
(incf index))
((char= char #\[)
(flush-pending-regulars)
(let ((close-bracket
(position #\] namestr :start index :end end)))
(unless close-bracket
(error 'namestring-parse-error
:complaint "#\\[ with no corresponding #\\]"
:namestring namestr
:offset index))
(pattern (cons :character-set
(subseq namestr
(1+ index)
close-bracket)))
(setf index (1+ close-bracket))))
(t
(unless last-regular-char
(setf last-regular-char index))
(incf index)))))
(flush-pending-regulars)))
(cond ((null (pattern))
"")
((null (cdr (pattern)))
(let ((piece (first (pattern))))
(typecase piece
((member :multi-char-wild) :wild)
(simple-string piece)
(t
(make-pattern (pattern))))))
(t
(make-pattern (pattern))))))
(defun unparse-physical-piece (thing)
(etypecase thing
((member :wild) "*")
(simple-string
(let* ((srclen (length thing))
(dstlen srclen))
(dotimes (i srclen)
(case (schar thing i)
((#\* #\? #\[)
(incf dstlen))))
(let ((result (make-string dstlen))
(dst 0))
(dotimes (src srclen)
(let ((char (schar thing src)))
(case char
((#\* #\? #\[)
(setf (schar result dst) #\\)
(incf dst)))
(setf (schar result dst) char)
(incf dst)))
result)))
(pattern
(with-output-to-string (s)
(dolist (piece (pattern-pieces thing))
(etypecase piece
(simple-string
(write-string piece s))
(symbol
(ecase piece
(:multi-char-wild
(write-string "*" s))
(:single-char-wild
(write-string "?" s))))
(cons
(case (car piece)
(:character-set
(write-string "[" s)
(write-string (cdr piece) s)
(write-string "]" s))
(t
(error "invalid pattern piece: ~S" piece))))))))))
(defun make-matcher (piece)
(cond ((eq piece :wild)
(constantly t))
((typep piece 'pattern)
(lambda (other)
(when (stringp other)
(pattern-matches piece other))))
(t
(lambda (other)
(equal piece other)))))
(/show0 "filesys.lisp 160")
(defun extract-name-type-and-version (namestr start end)
(declare (type simple-string namestr)
(type index start end))
(let* ((last-dot (position #\. namestr :start (1+ start) :end end
:from-end t)))
(cond
(last-dot
(values (maybe-make-pattern namestr start last-dot)
(maybe-make-pattern namestr (1+ last-dot) end)
:newest))
(t
(values (maybe-make-pattern namestr start end)
nil
:newest)))))
(/show0 "filesys.lisp 200")
(defun native-file-kind (namestring)
(multiple-value-bind (existsp errno ino mode)
#!-win32
(sb!unix:unix-lstat namestring)
#!+win32
(sb!unix:unix-stat namestring)
(declare (ignore errno ino))
(when existsp
(let ((ifmt (logand mode sb!unix:s-ifmt)))
(case ifmt
(#.sb!unix:s-ifreg :file)
(#.sb!unix:s-ifdir :directory)
#!-win32
(#.sb!unix:s-iflnk :symlink)
(t :special))))))
Rewritten in 12/2007 by RMK , replacing 13 + year old CMU code that
made a mess of things in order to support search lists ( which SBCL
* defines the truename of an existing , dangling or
* Windows ' apparent analogue of ) is called
GetFullPathName , and it 's a bit less useful than ) .
In particular , while ) errors in case the file does n't
exist , GetFullPathName seems to return a filename in all cases .
As ) is not atomic anyway , we only ever call it when
Given a pathname designator , some quality to query for , return one
NIL . QUERY - FOR may be one of : TRUENAME , : , : WRITE - DATE ,
: AUTHOR . If ERRORP is false , return NIL in case the file system
(defun query-file-system (pathspec query-for &optional (errorp t))
(let ((pathname (translate-logical-pathname
(merge-pathnames
(pathname pathspec)
(sane-default-pathname-defaults)))))
(when (wild-pathname-p pathname)
(error 'simple-file-error
:pathname pathname
:format-control "~@<can't find the ~A of wild pathname ~A~
(physicalized from ~A).~:>"
:format-arguments (list query-for pathname pathspec)))
(flet ((fail (note-format pathname errno)
(if errorp
(simple-file-perror note-format pathname errno)
(return-from query-file-system nil))))
(let ((filename (native-namestring pathname :as-file t)))
(multiple-value-bind (existsp errno ino mode nlink uid gid rdev size
atime mtime)
(sb!unix:unix-stat filename)
(declare (ignore ino nlink gid rdev size atime
#!+win32 uid))
(if existsp
(case query-for
(:existence (nth-value
0
(parse-native-namestring
filename
(pathname-host pathname)
(sane-default-pathname-defaults)
:as-directory (eql (logand mode sb!unix:s-ifmt)
sb!unix:s-ifdir))))
(:truename (nth-value
0
(parse-native-namestring
Note : in case the file is stat'able , POSIX
(multiple-value-bind (realpath errno)
(sb!unix:unix-realpath filename)
(if realpath
realpath
(fail "couldn't resolve ~A" filename errno)))
(pathname-host pathname)
(sane-default-pathname-defaults)
:as-directory (eql (logand mode sb!unix:s-ifmt)
sb!unix:s-ifdir))))
(:author
#!-win32
(sb!unix:uid-username uid))
(:write-date (+ unix-to-universal-time mtime)))
(progn
SBCL has for many years had a policy that a pathname
and sets errno to ENOENT or ELOOP respectively , but
#!-win32
(multiple-value-bind (linkp ignore ino mode nlink uid gid rdev
size atime mtime)
(sb!unix:unix-lstat filename)
(declare (ignore ignore ino mode nlink gid rdev size atime))
(when (and (or (= errno sb!unix:enoent)
(= errno sb!unix:eloop))
linkp)
(return-from query-file-system
(case query-for
(:existence
(parse-native-namestring
filename (pathname-host pathname)))
(:truename
So here 's a trick : since succeded ,
return a pathname having PATHNAME 's name ,
re - merge against * DEFAULT - PATHNAME - DEFAULTS * ,
(merge-pathnames
(nth-value
0
(parse-native-namestring
(multiple-value-bind (realpath errno)
(sb!unix:unix-realpath
(native-namestring
(make-pathname
:name :unspecific
:type :unspecific
:version :unspecific
:defaults (parse-native-namestring
filename
(pathname-host pathname)
(sane-default-pathname-defaults)))))
(if realpath
realpath
(fail "couldn't resolve ~A" filename errno)))
(pathname-host pathname)
(sane-default-pathname-defaults)
:as-directory t))
pathname))
(:author (sb!unix:uid-username uid))
(:write-date (+ unix-to-universal-time mtime))))))
(fail
(format nil "failed to find the ~A of ~~A" query-for)
pathspec errno))))))))
(defun probe-file (pathspec)
#!+sb-doc
"Return the truename of PATHSPEC if the truename can be found,
or NIL otherwise. See TRUENAME for more information."
(query-file-system pathspec :truename nil))
(defun truename (pathspec)
#!+sb-doc
"If PATHSPEC is a pathname that names an existing file, return
a pathname that denotes a canonicalized name for the file. If
pathspec is a stream associated with a file, return a pathname
that denotes a canonicalized name for the file associated with
the stream.
An error of type FILE-ERROR is signalled if no such file exists
or if the file system is such that a canonicalized file name
cannot be determined or if the pathname is wild.
Under Unix, the TRUENAME of a symlink that links to itself or to
a file that doesn't exist is considered to be the name of the
broken symlink itself."
(if (streamp pathspec)
(query-file-system pathspec :truename)
(query-file-system pathspec :truename)))
(defun file-author (pathspec)
#!+sb-doc
"Return the author of the file specified by PATHSPEC. Signal an
error of type FILE-ERROR if no such file exists, or if PATHSPEC
is a wild pathname."
(query-file-system pathspec :author))
(defun file-write-date (pathspec)
#!+sb-doc
"Return the write date of the file specified by PATHSPEC.
An error of type FILE-ERROR is signaled if no such file exists,
or if PATHSPEC is a wild pathname."
(query-file-system pathspec :write-date))
(/show0 "filesys.lisp 700")
(defun rename-file (file new-name)
#!+sb-doc
"Rename FILE to have the specified NEW-NAME. If FILE is a stream open to a
file, then the associated file is renamed."
(let* ((original (truename file))
(original-namestring (native-namestring original :as-file t))
(new-name (merge-pathnames new-name original))
(new-namestring (native-namestring (physicalize-pathname new-name)
:as-file t)))
(unless new-namestring
(error 'simple-file-error
:pathname new-name
:format-control "~S can't be created."
:format-arguments (list new-name)))
(multiple-value-bind (res error)
(sb!unix:unix-rename original-namestring new-namestring)
(unless res
(error 'simple-file-error
:pathname new-name
:format-control "~@<couldn't rename ~2I~_~A ~I~_to ~2I~_~A: ~
~I~_~A~:>"
:format-arguments (list original new-name (strerror error))))
(when (streamp file)
(file-name file new-name))
(values new-name original (truename new-name)))))
(defun delete-file (file)
#!+sb-doc
"Delete the specified FILE.
If FILE is a stream, on Windows the stream is closed immediately. On Unix
plaforms the stream remains open, allowing IO to continue: the OS resources
associated with the deleted file remain available till the stream is closed as
per standard Unix unlink() behaviour."
(let* ((pathname (translate-logical-pathname file))
(namestring (native-namestring pathname :as-file t)))
#!+win32
(when (streamp file)
(close file))
(multiple-value-bind (res err) (sb!unix:unix-unlink namestring)
(unless res
(simple-file-perror "couldn't delete ~A" namestring err))))
t)
(defun delete-directory (pathspec &key recursive)
"Deletes the directory designated by PATHSPEC (a pathname designator).
Returns the truename of the directory deleted.
If RECURSIVE is false \(the default), signals an error unless the directory is
empty. If RECURSIVE is true, first deletes all files and subdirectories. If
RECURSIVE is true and the directory contains symbolic links, the links are
deleted, not the files and directories they point to.
Signals an error if PATHSPEC designates a file instead of a directory, or if
the directory could not be deleted for any reason.
\(DELETE-DIRECTORY \"/tmp/foo\") and \(DELETE-DIRECTORY \"/tmp/foo/\") both
delete the \"foo\" subdirectory of \"/tmp\", or signal an error if it does not
exist or is a file.
Experimental: interface subject to change."
(declare (type pathname-designator pathspec))
(with-pathname (pathname pathspec)
(let ((truename (truename (translate-logical-pathname pathname))))
(labels ((recurse (dir)
(map-directory #'recurse dir
:files nil
:directories t
:classify-symlinks nil)
(map-directory #'delete-file dir
:files t
:directories nil
:classify-symlinks nil)
(delete-dir dir))
(delete-dir (dir)
(let* ((namestring (native-namestring dir :as-file t))
(res (alien-funcall (extern-alien #!-win32 "rmdir"
#!+win32 "_rmdir"
(function int c-string))
namestring)))
(if (minusp res)
(simple-file-perror "Could not delete directory ~A:~% ~A"
namestring (get-errno))
dir))))
(if recursive
(recurse truename)
(delete-dir truename))))))
(defun sbcl-homedir-pathname ()
(let ((sbcl-home (posix-getenv "SBCL_HOME")))
(when (and sbcl-home (not (string= sbcl-home "")))
(parse-native-namestring sbcl-home
#!-win32 sb!impl::*unix-host*
#!+win32 sb!impl::*win32-host*
*default-pathname-defaults*
:as-directory t))))
(defun user-homedir-namestring (&optional username)
(if username
(sb!unix:user-homedir username)
(let ((env-home (posix-getenv "HOME")))
(if (and env-home (not (string= env-home "")))
env-home
#!-win32
(sb!unix:uid-homedir (sb!unix:unix-getuid))))))
(defun user-homedir-pathname (&optional host)
#!+sb-doc
"Return the home directory of the user as a pathname. If the HOME
environment variable has been specified, the directory it designates
system. HOST argument is ignored by SBCL."
(declare (ignore host))
(values
(parse-native-namestring
(or (user-homedir-namestring)
#!+win32
(sb!win32::get-folder-namestring sb!win32::csidl_profile))
#!-win32 sb!impl::*unix-host*
#!+win32 sb!impl::*win32-host*
*default-pathname-defaults*
:as-directory t)))
(defun directory (pathspec &key (resolve-symlinks t))
#!+sb-doc
"Return a list of PATHNAMEs, each the TRUENAME of a file that matched the
given pathname. Note that the interaction between this ANSI-specified
TRUENAMEing and the semantics of the Unix filesystem (symbolic links..) means
this function can sometimes return files which don't have the same directory
as PATHNAME. If :RESOLVE-SYMLINKS is NIL, don't resolve symbolic links in
matching filenames."
We create one entry in this hash table for each truename ,
(truenames (make-hash-table :test #'equal)))
(labels ((record (pathname)
(let ((truename (if resolve-symlinks
Milan 2003 - 10 - 05 , using
CSR , 2003 - 10 - 13
(query-file-system pathname :truename nil)
(query-file-system pathname :existence nil))))
(when truename
(setf (gethash (namestring truename) truenames)
truename))))
(do-physical-pathnames (pathname)
(aver (not (logical-pathname-p pathname)))
: Since we do n't canonize pathnames on construction ,
(pathname (canonicalize-pathname pathname))
(name (pathname-name pathname))
(type (pathname-type pathname))
(match-name (make-matcher name))
(match-type (make-matcher type)))
(map-matching-directories
(if (or name type)
(lambda (directory)
(map-matching-entries #'record
directory
match-name
match-type))
#'record)
pathname)))
(do-pathnames (pathname)
(if (logical-pathname-p pathname)
(let ((host (intern-logical-host (pathname-host pathname))))
(dolist (x (logical-host-canon-transls host))
(destructuring-bind (from to) x
(let ((intersections
(pathname-intersections pathname from)))
(dolist (p intersections)
(do-pathnames (translate-pathname p from to)))))))
(do-physical-pathnames pathname))))
(declare (truly-dynamic-extent #'record))
(do-pathnames (merge-pathnames pathspec)))
(mapcar #'cdr
Sorting is n't required by the ANSI spec , but sorting into some
(sort (loop for namestring being each hash-key in truenames
using (hash-value truename)
collect (cons namestring truename))
#'string<
:key #'car))))
(defun canonicalize-pathname (pathname)
We 're really only interested in : UNSPECIFIC - > NIL , : BACK and : UP ,
(labels ((simplify (piece)
(unless (eq :unspecific piece)
piece))
(canonicalize-directory (directory)
(let (pieces)
(dolist (piece directory)
(if (and pieces (member piece '(:back :up)))
(case (car pieces)
((:absolute :wild-inferiors)
(error 'simple-file-error
:format-control "Invalid use of ~S after ~S."
:format-arguments (list piece (car pieces))
:pathname pathname))
((:relative :up :back)
(push piece pieces))
(t
(pop pieces)))
(push piece pieces)))
(nreverse pieces))))
(let ((name (simplify (pathname-name pathname)))
(type (simplify (pathname-type pathname)))
(dir (canonicalize-directory (pathname-directory pathname))))
(cond ((equal "." name)
(cond ((not type)
(make-pathname :name nil :defaults pathname))
((equal "" type)
(make-pathname :name nil
:type nil
:directory (butlast dir)
:defaults pathname))))
(t
(make-pathname :name name :type type
:directory dir
:defaults pathname))))))
(defmacro with-native-directory-iterator ((iterator namestring &key errorp) &body body)
(with-unique-names (one-iter)
`(dx-flet
((iterate (,one-iter)
(declare (type function ,one-iter))
(macrolet ((,iterator ()
`(funcall ,',one-iter)))
,@body)))
(call-with-native-directory-iterator #'iterate ,namestring ,errorp))))
(defun call-with-native-directory-iterator (function namestring errorp)
(declare (type (or null string) namestring)
(function function))
(let (dp)
(when namestring
(dx-flet
((one-iter ()
(tagbody
:next
(let ((ent (sb!unix:unix-readdir dp nil)))
(when ent
(let ((name (sb!unix:unix-dirent-name ent)))
(when name
(cond ((equal "." name)
(go :next))
((equal ".." name)
(go :next))
(t
(return-from one-iter name))))))))))
(unwind-protect
(progn
(setf dp (sb!unix:unix-opendir namestring errorp))
(when dp
(funcall function #'one-iter)))
(when dp
(sb!unix:unix-closedir dp nil)))))))
(defun map-directory (function directory &key (files t) (directories t)
(classify-symlinks) (errorp t))
#!+sb-doc
"Map over entries in DIRECTORY. Keyword arguments specify which entries to
map over, and how:
:FILES
If true, call FUNCTION with the pathname of each file in DIRECTORY.
Defaults to T.
:DIRECTORIES
If true, call FUNCTION with a pathname for each subdirectory of DIRECTORY.
If :AS-FILES, the pathname used is a pathname designating the subdirectory
as a file in DIRECTORY. Otherwise the pathname used is a directory
pathname. Defaults to T.
:CLASSIFY-SYMLINKS
If T, the decision to call FUNCTION with the pathname of a symbolic link
depends on the resolution of the link: if it points to a directory, it is
considered a directory entry, otherwise a file entry. If false, all
symbolic links are considered file entries. Defaults to T. In both cases
the pathname used for the symbolic link is not fully resolved, but names it
as an immediate child of DIRECTORY.
:ERRORP
If true, signal an error if DIRECTORY does not exist, cannot be read, etc.
Defaults to T.
Experimental: interface subject to change."
(declare (pathname-designator directory))
(let* ((fun (%coerce-callable-to-fun function))
(as-files (eq :as-files directories))
(physical (physicalize-pathname directory))
Not QUERY - FILE - SYSTEM : , since it does n't work on Windows
(realname (sb!unix:unix-realpath (native-namestring physical :as-file t)))
(canonical (if realname
(parse-native-namestring realname
(pathname-host physical)
(sane-default-pathname-defaults)
:as-directory t)
(return-from map-directory nil)))
(dirname (native-namestring canonical)))
(flet ((map-it (name dirp)
(funcall fun
(merge-pathnames (parse-native-namestring
name nil physical
:as-directory (and dirp (not as-files)))
physical))))
(with-native-directory-iterator (next dirname :errorp errorp)
(loop for name = (next)
while name
do (let* ((full (concatenate 'string dirname name))
(kind (native-file-kind full)))
(when kind
(case kind
(:directory
(when directories
(map-it name t)))
(:symlink
(if classify-symlinks
(let* ((tmpname (merge-pathnames
(parse-native-namestring
name nil physical :as-directory nil)
physical))
(truename (query-file-system tmpname :truename nil)))
(if (or (not truename)
(or (pathname-name truename) (pathname-type truename)))
(when files
(funcall fun tmpname))
(when directories
(map-it name t))))
(when files
(map-it name nil))))
(t
(when files
(map-it name nil)))))))))))
with all DIRECTORIES that match the directory portion of PATHSPEC .
(defun map-matching-directories (function pathspec)
(let* ((dir (pathname-directory pathspec))
(length (length dir))
(wild (position-if (lambda (elt)
(or (eq :wild elt) (typep elt 'pattern)))
dir))
(wild-inferiors (position :wild-inferiors dir))
(end (cond ((and wild wild-inferiors)
(min wild wild-inferiors))
(t
(or wild wild-inferiors length))))
(rest (subseq dir end))
(starting-point (make-pathname :directory (subseq dir 0 end)
:device (pathname-device pathspec)
:host (pathname-host pathspec)
:name nil
:type nil
:version nil)))
(cond (wild-inferiors
(map-wild-inferiors function rest starting-point))
(wild
(map-wild function rest starting-point))
(t
(funcall function starting-point))))
nil)
(defun last-directory-piece (pathname)
(car (last (pathname-directory pathname))))
(defun map-wild (function more directory)
(let ((this (pop more))
(next (car more)))
(flet ((cont (subdirectory)
(cond ((not more)
(funcall function subdirectory))
((or (eq :wild next) (typep next 'pattern))
(map-wild function more subdirectory))
((eq :wild-inferiors next)
(map-wild-inferiors function more subdirectory))
(t
(let ((this (pathname-directory subdirectory)))
(map-matching-directories
function
(make-pathname :directory (append this more)
:defaults subdirectory)))))))
(map-directory
(if (eq :wild this)
#'cont
(lambda (sub)
(when (pattern-matches this (last-directory-piece sub))
(funcall #'cont sub))))
directory
:files nil
:directories t
:errorp nil))))
(defun map-wild-inferiors (function more directory)
(loop while (member (car more) '(:wild :wild-inferiors))
do (pop more))
(let ((next (car more))
(rest (cdr more)))
(unless more
(funcall function directory))
(map-directory
(cond ((not more)
(lambda (pathname)
(funcall function pathname)
(map-wild-inferiors function more pathname)))
(t
(lambda (pathname)
(let ((this (pathname-directory pathname)))
(when (equal next (car (last this)))
(map-matching-directories
function
(make-pathname :directory (append this rest)
:defaults pathname)))
(map-wild-inferiors function more pathname)))))
directory
:files nil
:directories t
:errorp nil)))
(defun map-matching-entries (function directory match-name match-type)
(map-directory
(lambda (file)
(when (and (funcall match-name (pathname-name file))
(funcall match-type (pathname-type file)))
(funcall function file)))
directory
:files t
:directories :as-files
:errorp nil))
Until 2004 - 01 , SBCL 's behaviour was unquestionably wrong , as it
it as a physical one . Other seem to to treat this call as
equivalent to ( DIRECTORY ( TRANSLATE - LOGICAL - PATHNAME " :* ; " ) ) ,
arguably counterintuitive . ( PATHNAME - MATCH - P " : SRC ; " " :* ; " )
is true , so why should " : SRC ; " not show up in the call to
logical pathname to a phyiscal pathname . So a program that calls
2007 - 12 - 31 .
(defun pathname-intersections (one two)
(aver (logical-pathname-p one))
(aver (logical-pathname-p two))
(labels
((intersect-version (one two)
(aver (typep one '(or null (member :newest :wild :unspecific)
integer)))
(aver (typep two '(or null (member :newest :wild :unspecific)
integer)))
(cond
((eq one :wild) two)
((eq two :wild) one)
((or (null one) (eq one :unspecific)) two)
((or (null two) (eq two :unspecific)) one)
((eql one two) one)
(t nil)))
(intersect-name/type (one two)
(aver (typep one '(or null (member :wild :unspecific) string)))
(aver (typep two '(or null (member :wild :unspecific) string)))
(cond
((eq one :wild) two)
((eq two :wild) one)
((or (null one) (eq one :unspecific)) two)
((or (null two) (eq two :unspecific)) one)
((string= one two) one)
(t nil)))
(intersect-directory (one two)
(aver (typep one '(or null (member :wild :unspecific) list)))
(aver (typep two '(or null (member :wild :unspecific) list)))
(cond
((eq one :wild) two)
((eq two :wild) one)
((or (null one) (eq one :unspecific)) two)
((or (null two) (eq two :unspecific)) one)
(t (aver (eq (car one) (car two)))
(mapcar
(lambda (x) (cons (car one) x))
(intersect-directory-helper (cdr one) (cdr two)))))))
(let ((version (intersect-version
(pathname-version one) (pathname-version two)))
(name (intersect-name/type
(pathname-name one) (pathname-name two)))
(type (intersect-name/type
(pathname-type one) (pathname-type two)))
(host (pathname-host one)))
(mapcar (lambda (d)
(make-pathname :host host :name name :type type
:version version :directory d))
(intersect-directory
(pathname-directory one) (pathname-directory two))))))
sublists representing the intersection of the two input directory
FIXME : PFD suggests replacing this implementation with a DFA
conversion of a NDFA . Find out ( a ) what this means and ( b ) if it
(defun intersect-directory-helper (one two)
(flet ((simple-intersection (cone ctwo)
(cond
((eq cone :wild) ctwo)
((eq ctwo :wild) cone)
(t (aver (typep cone 'string))
(aver (typep ctwo 'string))
(if (string= cone ctwo) cone nil)))))
(macrolet
((loop-possible-wild-inferiors-matches
(lower-bound bounding-sequence order)
(let ((index (gensym)) (g2 (gensym)) (g3 (gensym)) (l (gensym)))
`(let ((,l (length ,bounding-sequence)))
(loop for ,index from ,lower-bound to ,l
append (mapcar (lambda (,g2)
(append
(butlast ,bounding-sequence (- ,l ,index))
,g2))
(mapcar
(lambda (,g3)
(append
(if (eq (car (nthcdr ,index ,bounding-sequence))
:wild-inferiors)
'(:wild-inferiors)
nil) ,g3))
(intersect-directory-helper
,@(if order
`((nthcdr ,index one) (cdr two))
`((cdr one) (nthcdr ,index two)))))))))))
(cond
((and (eq (car one) :wild-inferiors)
(eq (car two) :wild-inferiors))
(delete-duplicates
(append (mapcar (lambda (x) (cons :wild-inferiors x))
(intersect-directory-helper (cdr one) (cdr two)))
(loop-possible-wild-inferiors-matches 2 one t)
(loop-possible-wild-inferiors-matches 2 two nil))
:test 'equal))
((eq (car one) :wild-inferiors)
(delete-duplicates (loop-possible-wild-inferiors-matches 0 two nil)
:test 'equal))
((eq (car two) :wild-inferiors)
(delete-duplicates (loop-possible-wild-inferiors-matches 0 one t)
:test 'equal))
((and (null one) (null two)) (list nil))
((null one) nil)
((null two) nil)
(t (and (simple-intersection (car one) (car two))
(mapcar (lambda (x) (cons (simple-intersection
(car one) (car two)) x))
(intersect-directory-helper (cdr one) (cdr two)))))))))
(defun ensure-directories-exist (pathspec &key verbose (mode #o777))
#!+sb-doc
"Test whether the directories containing the specified file
actually exist, and attempt to create them if they do not.
The MODE argument is a CMUCL/SBCL-specific extension to control
the Unix permission bits."
(let ((pathname (physicalize-pathname (merge-pathnames (pathname pathspec))))
(created-p nil))
(when (wild-pathname-p pathname)
(error 'simple-file-error
:format-control "bad place for a wild pathname"
:pathname pathspec))
(let ((dir (pathname-directory pathname)))
(loop for i from 1 upto (length dir)
do (let ((newpath (make-pathname
:host (pathname-host pathname)
:device (pathname-device pathname)
:directory (subseq dir 0 i))))
(unless (probe-file newpath)
(let ((namestring (coerce (native-namestring newpath)
'string)))
(when verbose
(format *standard-output*
"~&creating directory: ~A~%"
namestring))
(sb!unix:unix-mkdir namestring mode)
(unless (probe-file newpath)
(restart-case (error
'simple-file-error
:pathname pathspec
:format-control
"can't create directory ~A"
:format-arguments (list namestring))
(retry ()
:report "Retry directory creation."
(ensure-directories-exist
pathspec
:verbose verbose :mode mode))
(continue ()
:report
"Continue as if directory creation was successful."
nil)))
(setf created-p t)))))
(values pathspec created-p))))
(/show0 "filesys.lisp 1000")
|
fd4b7c4627f92a42cb5182c4e978ca4c839704431eacfda07aa3d74fd1c06b56 | kadena-io/chainweb-node | TransactionExec.hs | # LANGUAGE AllowAmbiguousTypes #
{-# LANGUAGE BangPatterns #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE RankNTypes #-}
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
-- |
Module : Chainweb . Pact . TransactionExec
Copyright : Copyright © 2018 Kadena LLC .
-- License : (see the file LICENSE)
Maintainer : < > , < >
-- Stability : experimental
--
Pact command execution and coin - contract transaction logic for Chainweb
--
module Chainweb.Pact.TransactionExec
( -- * Transaction Execution
applyCmd
, applyGenesisCmd
, applyLocal
, applyExec
, applyExec'
, applyContinuation
, applyContinuation'
, runPayload
, readInitModules
, enablePactEvents'
, enforceKeysetFormats'
, disablePact40Natives
-- * Gas Execution
, buyGas
-- * Coinbase Execution
, applyCoinbase
, EnforceCoinbaseFailure(..)
-- * Command Helpers
, publicMetaOf
, networkIdOf
, gasSupplyOf
-- * Utilities
, buildExecParsedCode
, mkMagicCapSlot
, listErrMsg
, initialGasOf
) where
import Control.DeepSeq
import Control.Lens
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.Trans.Maybe
import Data.Aeson hiding ((.=))
import qualified Data.Aeson as A
import Data.Bifunctor
import qualified Data.ByteString as B
import qualified Data.ByteString.Short as SB
import Data.Decimal (Decimal, roundTo)
import Data.Default (def)
import Data.Foldable (for_, traverse_, foldl')
import Data.IORef
import qualified Data.HashMap.Strict as HM
import Data.Maybe
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
-- internal Pact modules
import Pact.Eval (eval, liftTerm)
import Pact.Gas (freeGasEnv)
import Pact.Interpreter
import Pact.Native.Capabilities (evalCap)
import Pact.Parse (ParsedDecimal(..))
import Pact.Runtime.Capabilities (popCapStack)
import Pact.Runtime.Utils (lookupModule)
import Pact.Types.Capability
import Pact.Types.Command
import Pact.Types.Hash as Pact
import Pact.Types.KeySet
import Pact.Types.Logger hiding (logError)
import Pact.Types.PactValue
import Pact.Types.Pretty
import Pact.Types.RPC
import Pact.Types.Runtime
import Pact.Types.Server
import Pact.Types.SPV
internal Chainweb modules
import Chainweb.BlockHeader
import Chainweb.BlockHeight
import Chainweb.Mempool.Mempool (requestKeyToTransactionHash)
import Chainweb.Miner.Pact
import Chainweb.Pact.Service.Types
import Chainweb.Pact.Templates
import Chainweb.Pact.Transactions.UpgradeTransactions
import Chainweb.Pact.Types hiding (logError)
import Chainweb.Transaction
import Chainweb.Utils (encodeToByteString, sshow, tryAllSynchronous, T2(..), T3(..))
import Chainweb.Version as V
-- -------------------------------------------------------------------------- --
| " Magic " capability ' COINBASE ' used in the coin contract to
-- constrain coinbase calls.
--
magic_COINBASE :: CapSlot UserCapability
magic_COINBASE = mkMagicCapSlot "COINBASE"
| " Magic " capability ' GAS ' used in the coin contract to
-- constrain gas buy/redeem calls.
--
magic_GAS :: CapSlot UserCapability
magic_GAS = mkMagicCapSlot "GAS"
| " Magic " capability ' GENESIS ' used in the coin contract to
-- constrain genesis-only allocations
--
magic_GENESIS :: CapSlot UserCapability
magic_GENESIS = mkMagicCapSlot "GENESIS"
-- | The main entry point to executing transactions. From here,
-- 'applyCmd' assembles the command environment for a command and
-- orchestrates gas buys/redemption, and executing payloads.
--
applyCmd
:: ChainwebVersion
-> Logger
-- ^ Pact logger
-> Maybe Logger
-- ^ Pact gas logger
-> PactDbEnv p
-- ^ Pact db environment
-> Miner
-- ^ The miner chosen to mine the block
-> GasModel
-- ^ Gas model (pact Service config)
-> TxContext
^ tx metadata and parent header
-> SPVSupport
^ SPV support ( validates cont proofs )
-> Command (Payload PublicMeta ParsedCode)
-- ^ command with payload to execute
-> Gas
-- ^ initial gas used
-> ModuleCache
-- ^ cached module state
-> ApplyCmdExecutionContext
-- ^ is this a local or send execution context?
-> IO (T3 (CommandResult [TxLog Value]) ModuleCache (S.Set PactWarning))
applyCmd v logger gasLogger pdbenv miner gasModel txCtx spv cmd initialGas mcache0 callCtx = do
T2 cr st <- runTransactionM cenv txst applyBuyGas
let cache = _txCache st
warns = _txWarnings st
pure $ T3 cr cache warns
where
stGasModel
| chainweb217Pact' = gasModel
| otherwise = _geGasModel freeGasEnv
txst = TransactionState mcache0 mempty 0 Nothing stGasModel mempty
executionConfigNoHistory = mkExecutionConfig
$ FlagDisableHistoryInTransactionalMode
: ( [ FlagOldReadOnlyBehavior | isPactBackCompatV16 ]
++ [ FlagPreserveModuleNameBug | not isModuleNameFix ]
++ [ FlagPreserveNsModuleInstallBug | not isModuleNameFix2 ]
++ enablePactEvents' txCtx
++ enablePact40 txCtx
++ enablePact420 txCtx
++ enforceKeysetFormats' txCtx
++ enablePactModuleMemcheck txCtx
++ enablePact43 txCtx
++ enablePact431 txCtx
++ enablePact44 txCtx
++ enablePact45 txCtx
++ enableNewTrans txCtx
++ enablePact46 txCtx )
cenv = TransactionEnv Transactional pdbenv logger gasLogger (ctxToPublicData txCtx) spv nid gasPrice
requestKey (fromIntegral gasLimit) executionConfigNoHistory
requestKey = cmdToRequestKey cmd
gasPrice = view cmdGasPrice cmd
gasLimit = view cmdGasLimit cmd
nid = networkIdOf cmd
currHeight = ctxCurrentBlockHeight txCtx
isModuleNameFix = enableModuleNameFix v currHeight
isModuleNameFix2 = enableModuleNameFix2 v currHeight
isPactBackCompatV16 = pactBackCompat_v16 v currHeight
chainweb213Pact' = chainweb213Pact (ctxVersion txCtx) (ctxCurrentBlockHeight txCtx)
chainweb217Pact' = chainweb217Pact After (ctxVersion txCtx) (ctxCurrentBlockHeight txCtx)
toEmptyPactError (PactError errty _ _ _) = PactError errty def [] mempty
toOldListErr pe = pe { peDoc = listErrMsg }
isOldListErr = \case
PactError EvalError _ _ doc -> "Unknown primitive" `T.isInfixOf` renderCompactText' doc
_ -> False
redeemAllGas r = do
txGasUsed .= fromIntegral gasLimit
applyRedeem r
applyBuyGas =
catchesPactError (buyGas isPactBackCompatV16 cmd miner) >>= \case
Left e -> view txRequestKey >>= \rk ->
throwM $ BuyGasFailure $ GasPurchaseFailure (requestKeyToTransactionHash rk) e
Right _ -> checkTooBigTx initialGas gasLimit applyPayload redeemAllGas
applyPayload = do
txGasModel .= gasModel
if chainweb217Pact' then txGasUsed += initialGas
else txGasUsed .= initialGas
cr <- catchesPactError $! runPayload cmd managedNamespacePolicy
case cr of
Left e
| chainweb217Pact' -> do
let e' = case callCtx of
ApplyLocal -> e
ApplySend -> toEmptyPactError e
r <- jsonErrorResult e' "tx failure for request key when running cmd"
redeemAllGas r
| chainweb213Pact' || not (isOldListErr e) -> do
r <- jsonErrorResult e "tx failure for request key when running cmd"
redeemAllGas r
| otherwise -> do
r <- jsonErrorResult (toOldListErr e) "tx failure for request key when running cmd"
redeemAllGas r
Right r -> applyRedeem r
applyRedeem cr = do
txGasModel .= (_geGasModel freeGasEnv)
r <- catchesPactError $! redeemGas cmd
case r of
Left e ->
-- redeem gas failure is fatal (block-failing) so miner doesn't lose coins
fatal $ "tx failure for request key while redeeming gas: " <> sshow e
Right es -> do
logs <- use txLogs
return $! set crLogs (Just logs) $ over crEvents (es ++) cr
listErrMsg :: Doc
listErrMsg =
"Unknown primitive \"list\" in determining cost of GUnreduced\nCallStack (from HasCallStack):\n error, called at src/Pact/Gas/Table.hs:209:22 in pact-4.2.0-fe223ad86f1795ba381192792f450820557e59c2926c747bf2aa6e398394bee6:Pact.Gas.Table"
applyGenesisCmd
:: Logger
-- ^ Pact logger
-> PactDbEnv p
-- ^ Pact db environment
-> SPVSupport
^ SPV support ( validates cont proofs )
-> Command (Payload PublicMeta ParsedCode)
-- ^ command with payload to execute
-> IO (T2 (CommandResult [TxLog Value]) ModuleCache)
applyGenesisCmd logger dbEnv spv cmd =
second _txCache <$!> runTransactionM tenv txst go
where
nid = networkIdOf cmd
rk = cmdToRequestKey cmd
tenv = TransactionEnv
{ _txMode = Transactional
, _txDbEnv = dbEnv
, _txLogger = logger
, _txGasLogger = Nothing
, _txPublicData = def
, _txSpvSupport = spv
, _txNetworkId = nid
, _txGasPrice = 0.0
, _txRequestKey = rk
, _txGasLimit = 0
, _txExecutionConfig = mkExecutionConfig
[ FlagDisablePact40
, FlagDisablePact420
, FlagDisableInlineMemCheck
, FlagDisablePact43
, FlagDisablePact44
, FlagDisablePact45
]
}
txst = TransactionState
{ _txCache = mempty
, _txLogs = mempty
, _txGasUsed = 0
, _txGasId = Nothing
, _txGasModel = _geGasModel freeGasEnv
, _txWarnings = mempty
}
interp = initStateInterpreter
$ initCapabilities [magic_GENESIS, magic_COINBASE]
go = do
cr <- catchesPactError $! runGenesis cmd permissiveNamespacePolicy interp
case cr of
Left e -> fatal $ "Genesis command failed: " <> sshow e
Right r -> r <$ debug "successful genesis tx for request key"
applyCoinbase
:: ChainwebVersion
-> Logger
-- ^ Pact logger
-> PactDbEnv p
-- ^ Pact db environment
-> Miner
-- ^ The miner chosen to mine the block
-> ParsedDecimal
^ reward
-> TxContext
^ tx metadata and parent header
-> EnforceCoinbaseFailure
-- ^ enforce coinbase failure or not
-> CoinbaseUsePrecompiled
-- ^ always enable precompilation
-> ModuleCache
-> IO (T2 (CommandResult [TxLog Value]) (Maybe ModuleCache))
applyCoinbase v logger dbEnv (Miner mid mks@(MinerKeys mk)) reward@(ParsedDecimal d) txCtx
(EnforceCoinbaseFailure enfCBFailure) (CoinbaseUsePrecompiled enablePC) mc
| fork1_3InEffect || enablePC = do
when chainweb213Pact' $ enforceKeyFormats
(\k -> throwM $ CoinbaseFailure $ "Invalid miner key: " <> sshow k)
mk
let (cterm, cexec) = mkCoinbaseTerm mid mks reward
interp = Interpreter $ \_ -> do put initState; fmap pure (eval cterm)
go interp cexec
| otherwise = do
cexec <- mkCoinbaseCmd mid mks reward
let interp = initStateInterpreter initState
go interp cexec
where
chainweb213Pact' = chainweb213Pact v bh
fork1_3InEffect = vuln797Fix v cid bh
throwCritical = fork1_3InEffect || enfCBFailure
ec = mkExecutionConfig $
[ FlagDisableModuleInstall
, FlagDisableHistoryInTransactionalMode ] ++
enablePactEvents' txCtx ++
enablePact40 txCtx ++
enablePact420 txCtx ++
enablePactModuleMemcheck txCtx ++
enablePact43 txCtx ++
enablePact431 txCtx ++
enablePact44 txCtx ++
enablePact45 txCtx
tenv = TransactionEnv Transactional dbEnv logger Nothing (ctxToPublicData txCtx) noSPVSupport
Nothing 0.0 rk 0 ec
txst = TransactionState mc mempty 0 Nothing (_geGasModel freeGasEnv) mempty
initState = setModuleCache mc $ initCapabilities [magic_COINBASE]
rk = RequestKey chash
parent = _tcParentHeader txCtx
bh = ctxCurrentBlockHeight txCtx
cid = V._chainId parent
chash = Pact.Hash $ SB.toShort $ encodeToByteString $ _blockHash $ _parentHeader parent
-- NOTE: it holds that @ _pdPrevBlockHash pd == encode _blockHash@
-- NOTE: chash includes the /quoted/ text of the parent header.
go interp cexec = evalTransactionM tenv txst $! do
cr <- catchesPactError $!
applyExec' 0 interp cexec mempty chash managedNamespacePolicy
case cr of
Left e
| throwCritical -> throwM $ CoinbaseFailure $ sshow e
| otherwise -> (`T2` Nothing) <$> jsonErrorResult e "coinbase tx failure"
Right er -> do
debug
$! "successful coinbase of "
<> (T.take 18 $ sshow d)
<> " to "
<> sshow mid
upgradedModuleCache <- applyUpgrades v cid bh
void $! applyTwentyChainUpgrade v cid bh
NOTE ( ): When adding new forking transactions that are injected
-- into a block's coinbase transaction, please add a corresponding case
in Rosetta 's ` matchLogs ` function and follow the pattern .
--
Otherwise , Rosetta tooling has no idea that these upgrade transactions
-- occurred.
-- This is especially important if the transaction changes an account's balance.
Rosetta tooling will error out if an account 's balance changed and it
-- didn't see the transaction that caused the change.
--
logs <- use txLogs
return $! T2
(CommandResult rk (_erTxId er) (PactResult (Right (last $ _erOutput er)))
(_erGas er) (Just $ logs) (_erExec er) Nothing (_erEvents er))
upgradedModuleCache
applyLocal
:: Logger
-- ^ Pact logger
-> Maybe Logger
-- ^ Pact gas logger
-> PactDbEnv p
-- ^ Pact db environment
-> GasModel
-- ^ Gas model (pact Service config)
-> TxContext
^ tx metadata and parent header
-> SPVSupport
^ SPV support ( validates cont proofs )
-> Command PayloadWithText
-- ^ command with payload to execute
-> ModuleCache
-> ExecutionConfig
-> IO (CommandResult [TxLog Value])
applyLocal logger gasLogger dbEnv gasModel txCtx spv cmdIn mc execConfig =
evalTransactionM tenv txst go
where
cmd = payloadObj <$> cmdIn
rk = cmdToRequestKey cmd
nid = networkIdOf cmd
chash = toUntypedHash $ _cmdHash cmd
signers = _pSigners $ _cmdPayload cmd
gasPrice = view cmdGasPrice cmd
gasLimit = view cmdGasLimit cmd
tenv = TransactionEnv Local dbEnv logger gasLogger (ctxToPublicData txCtx) spv nid gasPrice
rk (fromIntegral gasLimit) execConfig
txst = TransactionState mc mempty 0 Nothing gasModel mempty
gas0 = initialGasOf (_cmdPayload cmdIn)
applyPayload m = do
interp <- gasInterpreter gas0
cr <- catchesPactError $! case m of
Exec em ->
applyExec gas0 interp em signers chash managedNamespacePolicy
Continuation cm ->
applyContinuation gas0 interp cm signers chash managedNamespacePolicy
case cr of
Left e -> jsonErrorResult e "applyLocal"
Right r -> return $! r { _crMetaData = Just (toJSON $ ctxToPublicData' txCtx) }
go = checkTooBigTx gas0 gasLimit (applyPayload $ _pPayload $ _cmdPayload cmd) return
readInitModules
:: Logger
-- ^ Pact logger
-> PactDbEnv p
-- ^ Pact db environment
-> TxContext
^ tx metadata and parent header
-> IO ModuleCache
readInitModules logger dbEnv txCtx
| chainweb217Pact' = evalTransactionM tenv txst goCw217
| otherwise = evalTransactionM tenv txst go
where
guarding 2.17 here to allow for
-- cache purging everything but coin and its
-- dependencies.
chainweb217Pact' = chainweb217Pact
After
(ctxVersion txCtx)
(ctxCurrentBlockHeight txCtx)
parent = _tcParentHeader txCtx
v = _chainwebVersion parent
h = _blockHeight (_parentHeader parent) + 1
rk = RequestKey chash
nid = Nothing
chash = pactInitialHash
tenv = TransactionEnv Local dbEnv logger Nothing (ctxToPublicData txCtx) noSPVSupport nid 0.0
rk 0 def
txst = TransactionState mempty mempty 0 Nothing (_geGasModel freeGasEnv) mempty
interp = defaultInterpreter
die msg = throwM $ PactInternalError $ "readInitModules: " <> msg
mkCmd = buildExecParsedCode (Just (v, h)) Nothing
run msg cmd = do
er <- catchesPactError $!
applyExec' 0 interp cmd [] chash permissiveNamespacePolicy
case er of
Left e -> die $ msg <> ": failed: " <> sshow e
Right r -> case _erOutput r of
[] -> die $ msg <> ": empty result"
(o:_) -> return o
go :: TransactionM p ModuleCache
go = do
-- see if fungible-v2 is there
checkCmd <- liftIO $ mkCmd "(contains \"fungible-v2\" (list-modules))"
checkFv2 <- run "check fungible-v2" checkCmd
hasFv2 <- case checkFv2 of
(PLiteral (LBool b)) -> return b
t -> die $ "got non-bool result from module read: " <> T.pack (showPretty t)
-- see if fungible-xchain-v1 is there
checkCmdx <- liftIO $ mkCmd "(contains \"fungible-xchain-v1\" (list-modules))"
checkFx <- run "check fungible-xchain-v1" checkCmdx
hasFx <- case checkFx of
(PLiteral (LBool b)) -> return b
t -> die $ "got non-bool result from module read: " <> T.pack (showPretty t)
-- load modules by referencing members
refModsCmd <- liftIO $ mkCmd $ T.intercalate " " $
[ "coin.MINIMUM_PRECISION"
, "ns.GUARD_SUCCESS"
, "gas-payer-v1.GAS_PAYER"
, "fungible-v1.account-details"] ++
[ "fungible-v2.account-details" | hasFv2 ] ++
[ "(let ((m:module{fungible-xchain-v1} coin)) 1)" | hasFx ]
void $ run "load modules" refModsCmd
-- return loaded cache
use txCache
Only load coin and its dependencies for chainweb > = 2.17
-- Note: no need to check if things are there, because this
-- requires a block height that witnesses the invariant.
--
-- if this changes, we must change the filter in 'updateInitCache'
goCw217 :: TransactionM p ModuleCache
goCw217 = do
coinDepCmd <- liftIO $ mkCmd "coin.MINIMUM_PRECISION"
void $ run "load modules" coinDepCmd
use txCache
-- | Apply (forking) upgrade transactions and module cache updates
at a particular blockheight .
--
-- This is the place where we consistently /introduce/ new transactions
-- into the blockchain along with module cache updates. The only other
places are Pact Service startup and the
-- empty-module-cache-after-initial-rewind case caught in 'execTransactions'
-- which both hit the database.
--
applyUpgrades
:: ChainwebVersion
-> V.ChainId
-> BlockHeight
-> TransactionM p (Maybe ModuleCache)
applyUpgrades v cid height
| coinV2Upgrade v cid height = applyCoinV2
| pact4coin3Upgrade At v height = applyCoinV3
| chainweb214Pact At v height = applyCoinV4
| chainweb215Pact At v height = applyCoinV5
| chainweb217Pact At v height = filterModuleCache
| otherwise = return Nothing
where
installCoinModuleAdmin = set (evalCapabilities . capModuleAdmin) $ S.singleton (ModuleName "coin" Nothing)
applyCoinV2 = applyTxs (upgradeTransactions v cid) [FlagDisableInlineMemCheck, FlagDisablePact43, FlagDisablePact45]
applyCoinV3 = applyTxs coinV3Transactions [FlagDisableInlineMemCheck, FlagDisablePact43, FlagDisablePact45]
applyCoinV4 = applyTxs coinV4Transactions [FlagDisablePact45]
applyCoinV5 = applyTxs coinV5Transactions [FlagDisablePact45]
filterModuleCache = do
mc <- use txCache
pure $ Just $ HM.filterWithKey (\k _ -> k == "coin") mc
applyTxs txsIO flags = do
infoLog "Applying upgrade!"
txs <- map (fmap payloadObj) <$> liftIO txsIO
--
-- In order to prime the module cache with all new modules for subsequent
blocks , the caches from each tx are collected and the union of all
-- those caches is returned. The calling code adds this new cache to the
-- init cache in the pact service state (_psInitCache).
--
let execConfig = mkExecutionConfig flags
caches <- local (set txExecutionConfig execConfig) $ mapM applyTx txs
return $ Just (HM.unions caches)
interp = initStateInterpreter
$ installCoinModuleAdmin
$ initCapabilities [mkMagicCapSlot "REMEDIATE"]
applyTx tx = do
infoLog $ "Running upgrade tx " <> sshow (_cmdHash tx)
tryAllSynchronous (runGenesis tx permissiveNamespacePolicy interp) >>= \case
Right _ -> use txCache
Left e -> do
logError $ "Upgrade transaction failed! " <> sshow e
throwM e
applyTwentyChainUpgrade
:: ChainwebVersion
-> V.ChainId
-> BlockHeight
-> TransactionM p ()
applyTwentyChainUpgrade v cid bh
| to20ChainRebalance v cid bh = do
txlist <- liftIO $ twentyChainUpgradeTransactions v cid
infoLog $ "Applying 20-chain upgrades on chain " <> sshow cid
let txs = fmap payloadObj <$> txlist
--
-- Note (emily): This function does not need to care about
-- module caching, because it is already seeded with the correct cache
-- state, and is not updating the module cache, unlike 'applyUpgrades'.
--
traverse_ applyTx txs
| otherwise = return ()
where
applyTx tx = do
infoLog $ "Running 20-chain upgrade tx " <> sshow (_cmdHash tx)
let i = initStateInterpreter
$ initCapabilities [mkMagicCapSlot "REMEDIATE"]
r <- tryAllSynchronous (runGenesis tx permissiveNamespacePolicy i)
case r of
Left e -> do
logError $ "Upgrade transaction failed: " <> sshow e
void $! throwM e
Right _ -> return ()
jsonErrorResult
:: PactError
-> Text
-> TransactionM p (CommandResult [TxLog Value])
jsonErrorResult err msg = do
logs <- use txLogs
gas <- view txGasLimit -- error means all gas was charged
rk <- view txRequestKey
l <- view txLogger
liftIO
$! logLog l "INFO"
$! T.unpack msg
<> ": " <> show rk
<> ": " <> show err
return $! CommandResult rk Nothing (PactResult (Left err))
gas (Just logs) Nothing Nothing []
runPayload
:: Command (Payload PublicMeta ParsedCode)
-> NamespacePolicy
-> TransactionM p (CommandResult [TxLog Value])
runPayload cmd nsp = do
g0 <- use txGasUsed
interp <- gasInterpreter g0
case payload of
Exec pm ->
applyExec g0 interp pm signers chash nsp
Continuation ym ->
applyContinuation g0 interp ym signers chash nsp
where
signers = _pSigners $ _cmdPayload cmd
chash = toUntypedHash $ _cmdHash cmd
payload = _pPayload $ _cmdPayload cmd
-- | Run genesis transaction payloads with custom interpreter
--
runGenesis
:: Command (Payload PublicMeta ParsedCode)
-> NamespacePolicy
-> Interpreter p
-> TransactionM p (CommandResult [TxLog Value])
runGenesis cmd nsp interp = case payload of
Exec pm ->
applyExec 0 interp pm signers chash nsp
Continuation ym ->
applyContinuation 0 interp ym signers chash nsp
where
signers = _pSigners $ _cmdPayload cmd
chash = toUntypedHash $ _cmdHash cmd
payload = _pPayload $ _cmdPayload cmd
| Execute an ' ' and Return the result with module cache
--
applyExec
:: Gas
-> Interpreter p
-> ExecMsg ParsedCode
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p (CommandResult [TxLog Value])
applyExec initialGas interp em senderSigs hsh nsp = do
EvalResult{..} <- applyExec' initialGas interp em senderSigs hsh nsp
for_ _erLogGas $ \gl -> gasLog $ "gas logs: " <> sshow gl
logs <- use txLogs
rk <- view txRequestKey
-- concat tx warnings with eval warnings
txWarnings <>= _erWarnings
-- applyExec enforces non-empty expression set so `last` ok
forcing it here for lazy errors . TODO NFData the Pacts
lastResult <- return $!! last _erOutput
return $! CommandResult rk _erTxId (PactResult (Right lastResult))
_erGas (Just logs) _erExec Nothing _erEvents
| Variation on ' applyExec ' that returns ' ' as opposed to
-- wrapping it up in a JSON result.
--
applyExec'
:: Gas
-> Interpreter p
-> ExecMsg ParsedCode
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p EvalResult
applyExec' initialGas interp (ExecMsg parsedCode execData) senderSigs hsh nsp
| null (_pcExps parsedCode) = throwCmdEx "No expressions found"
| otherwise = do
pactFlags <- asks _txExecutionConfig
eenv <- mkEvalEnv nsp (MsgData execData Nothing hsh senderSigs)
<&> disablePact40Natives pactFlags
<&> disablePact420Natives pactFlags
<&> disablePact43Natives pactFlags
<&> disablePact431Natives pactFlags
<&> disablePact46Natives pactFlags
setEnvGas initialGas eenv
er <- liftIO $! evalExec interp eenv parsedCode
for_ (_erExec er) $ \pe -> debug
$ "applyExec: new pact added: "
<> sshow (_pePactId pe, _peStep pe, _peYield pe, _peExecuted pe)
-- set log + cache updates + used gas
setTxResultState er
return er
enablePactEvents' :: TxContext -> [ExecutionFlag]
enablePactEvents' tc
| enablePactEvents (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePactEvents]
enforceKeysetFormats' :: TxContext -> [ExecutionFlag]
enforceKeysetFormats' tc
| enforceKeysetFormats (ctxVersion tc) (ctxCurrentBlockHeight tc) = [FlagEnforceKeyFormats]
| otherwise = []
enablePact40 :: TxContext -> [ExecutionFlag]
enablePact40 tc
| pact4coin3Upgrade After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact40]
enablePact420 :: TxContext -> [ExecutionFlag]
enablePact420 tc
| pact420Upgrade (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact420]
enablePactModuleMemcheck :: TxContext -> [ExecutionFlag]
enablePactModuleMemcheck tc
| chainweb213Pact (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisableInlineMemCheck]
enablePact43 :: TxContext -> [ExecutionFlag]
enablePact43 tc
| chainweb214Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact43]
enablePact431 :: TxContext -> [ExecutionFlag]
enablePact431 tc
| chainweb215Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact431]
enablePact44 :: TxContext -> [ExecutionFlag]
enablePact44 tc
| chainweb216Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact44]
enablePact45 :: TxContext -> [ExecutionFlag]
enablePact45 tc
| chainweb217Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact45]
enableNewTrans :: TxContext -> [ExecutionFlag]
enableNewTrans tc
| pact44NewTrans (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisableNewTrans]
enablePact46 :: TxContext -> [ExecutionFlag]
enablePact46 tc
| chainweb218Pact (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact46]
-- | Execute a 'ContMsg' and return the command result and module cache
--
applyContinuation
:: Gas
-> Interpreter p
-> ContMsg
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p (CommandResult [TxLog Value])
applyContinuation initialGas interp cm senderSigs hsh nsp = do
EvalResult{..} <- applyContinuation' initialGas interp cm senderSigs hsh nsp
for_ _erLogGas $ \gl -> gasLog $ "gas logs: " <> sshow gl
logs <- use txLogs
rk <- view txRequestKey
set tx warnings to eval warnings
txWarnings <>= _erWarnings
last safe here because cont msg is guaranteed one exp
return $! (CommandResult rk _erTxId (PactResult (Right (last _erOutput)))
_erGas (Just logs) _erExec Nothing) _erEvents
setEnvGas :: Gas -> EvalEnv e -> TransactionM p ()
setEnvGas initialGas = liftIO . views eeGas (`writeIORef` initialGas)
-- | Execute a 'ContMsg' and return just eval result, not wrapped in a
' CommandResult ' wrapper
--
applyContinuation'
:: Gas
-> Interpreter p
-> ContMsg
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p EvalResult
applyContinuation' initialGas interp cm@(ContMsg pid s rb d _) senderSigs hsh nsp = do
pactFlags <- asks _txExecutionConfig
eenv <- mkEvalEnv nsp (MsgData d pactStep hsh senderSigs)
<&> disablePact40Natives pactFlags
<&> disablePact420Natives pactFlags
<&> disablePact43Natives pactFlags
<&> disablePact46Natives pactFlags
setEnvGas initialGas eenv
er <- liftIO $! evalContinuation interp eenv cm
setTxResultState er
return er
where
pactStep = Just $ PactStep s rb pid Nothing
-- | Build and execute 'coin.buygas' command from miner info and user command
-- info (see 'TransactionExec.applyCmd')
--
see : ' pact / coin - contract / coin.pact#fund - tx '
--
buyGas :: Bool -> Command (Payload PublicMeta ParsedCode) -> Miner -> TransactionM p ()
buyGas isPactBackCompatV16 cmd (Miner mid mks) = go
where
sender = view (cmdPayload . pMeta . pmSender) cmd
initState mc logGas =
set evalLogGas (guard logGas >> Just [("GBuyGas",0)]) $ setModuleCache mc $ initCapabilities [magic_GAS]
run input = do
(findPayer isPactBackCompatV16 cmd) >>= \r -> case r of
Nothing -> input
Just withPayerCap -> withPayerCap input
(Hash chash) = toUntypedHash (_cmdHash cmd)
bgHash = Hash (chash <> "-buygas")
go = do
mcache <- use txCache
supply <- gasSupplyOf <$> view txGasLimit <*> view txGasPrice
logGas <- isJust <$> view txGasLogger
let (buyGasTerm, buyGasCmd) = mkBuyGasTerm mid mks sender supply
interp mc = Interpreter $ \_input ->
put (initState mc logGas) >> run (pure <$> eval buyGasTerm)
result <- applyExec' 0 (interp mcache) buyGasCmd
(_pSigners $ _cmdPayload cmd) bgHash managedNamespacePolicy
case _erExec result of
Nothing ->
-- should never occur: would mean coin.fund-tx is not a pact
fatal "buyGas: Internal error - empty continuation"
Just pe -> void $! txGasId .= (Just $! GasId (_pePactId pe))
findPayer
:: Bool
-> Command (Payload PublicMeta ParsedCode)
-> Eval e (Maybe (Eval e [Term Name] -> Eval e [Term Name]))
findPayer isPactBackCompatV16 cmd = runMaybeT $ do
(!m,!qn,!as) <- MaybeT findPayerCap
pMod <- MaybeT $ lookupModule qn m
capRef <- MaybeT $ return $ lookupIfaceModRef qn pMod
return $ runCap (getInfo qn) capRef as
where
setEnvMsgBody v e = set eeMsgBody v e
findPayerCap :: Eval e (Maybe (ModuleName,QualifiedName,[PactValue]))
findPayerCap = preview $ eeMsgSigs . folded . folded . to sigPayerCap . _Just
sigPayerCap (SigCapability q@(QualifiedName m n _) as)
| n == "GAS_PAYER" = Just (m,q,as)
sigPayerCap _ = Nothing
gasPayerIface = ModuleName "gas-payer-v1" Nothing
lookupIfaceModRef (QualifiedName _ n _) (ModuleData (MDModule Module{..}) refs _)
| gasPayerIface `elem` _mInterfaces = HM.lookup n refs
lookupIfaceModRef _ _ = Nothing
mkApp i r as = App (TVar r i) (map (liftTerm . fromPactValue) as) i
runCap i capRef as input = do
let msgBody = enrichedMsgBody cmd
enrichMsgBody | isPactBackCompatV16 = id
| otherwise = setEnvMsgBody msgBody
ar <- local enrichMsgBody $
evalCap i CapCallStack False $ mkApp i capRef as
case ar of
NewlyAcquired -> do
r <- input
popCapStack (const (return ()))
return r
_ -> evalError' i "Internal error, GAS_PAYER already acquired"
enrichedMsgBody :: Command (Payload PublicMeta ParsedCode) -> Value
enrichedMsgBody cmd = case (_pPayload $ _cmdPayload cmd) of
Exec (ExecMsg (ParsedCode _ exps) userData) ->
object [ "tx-type" A..= ( "exec" :: Text)
, "exec-code" A..= map renderCompactText exps
, "exec-user-data" A..= pactFriendlyUserData userData ]
Continuation (ContMsg pid step isRollback userData proof) ->
object [ "tx-type" A..= ("cont" :: Text)
, "cont-pact-id" A..= pid
, "cont-step" A..= (LInteger $ toInteger step)
, "cont-is-rollback" A..= LBool isRollback
, "cont-user-data" A..= pactFriendlyUserData userData
, "cont-has-proof" A..= (LBool $ isJust proof)
]
where
pactFriendlyUserData Null = object []
pactFriendlyUserData v = v
-- | Build and execute 'coin.redeem-gas' command from miner info and previous
-- command results (see 'TransactionExec.applyCmd')
--
see : ' pact / coin - contract / coin.pact#fund - tx '
--
redeemGas :: Command (Payload PublicMeta ParsedCode) -> TransactionM p [PactEvent]
redeemGas cmd = do
mcache <- use txCache
gid <- use txGasId >>= \case
Nothing -> fatal $! "redeemGas: no gas id in scope for gas refunds"
Just g -> return g
fee <- gasSupplyOf <$> use txGasUsed <*> view txGasPrice
_crEvents <$> applyContinuation 0 (initState mcache) (redeemGasCmd fee gid)
(_pSigners $ _cmdPayload cmd) (toUntypedHash $ _cmdHash cmd)
managedNamespacePolicy
where
initState mc = initStateInterpreter
$ setModuleCache mc
$ initCapabilities [magic_GAS]
redeemGasCmd fee (GasId pid) =
ContMsg pid 1 False (object [ "fee" A..= fee ]) Nothing
-- ---------------------------------------------------------------------------- --
Utilities
-- | Initialize a fresh eval state with magic capabilities.
-- This is the way we inject the correct guards into the environment
-- during Pact code execution
--
initCapabilities :: [CapSlot UserCapability] -> EvalState
initCapabilities cs = set (evalCapabilities . capStack) cs def
# INLINABLE initCapabilities #
initStateInterpreter :: EvalState -> Interpreter e
initStateInterpreter s = Interpreter (put s >>)
| Check whether the cost of running a tx is more than the allowed
-- gas limit and do some action depending on the outcome
--
checkTooBigTx
:: Gas
-> GasLimit
-> TransactionM p (CommandResult [TxLog Value])
-> (CommandResult [TxLog Value] -> TransactionM p (CommandResult [TxLog Value]))
-> TransactionM p (CommandResult [TxLog Value])
checkTooBigTx initialGas gasLimit next onFail
| initialGas >= (fromIntegral gasLimit) = do
txGasUsed .= (fromIntegral gasLimit) -- all gas is consumed
let !pe = PactError GasError def []
$ "Tx too big (" <> pretty initialGas <> "), limit "
<> pretty gasLimit
r <- jsonErrorResult pe "Tx too big"
onFail r
| otherwise = next
gasInterpreter :: Gas -> TransactionM db (Interpreter p)
gasInterpreter g = do
mc <- use txCache
logGas <- isJust <$> view txGasLogger
return $ initStateInterpreter
$ set evalLogGas (guard logGas >> Just [("GTxSize",g)]) -- enables gas logging
$ setModuleCache mc def
-- | Initial gas charged for transaction size
-- ignoring the size of a continuation proof, if present
--
initialGasOf :: PayloadWithText -> Gas
initialGasOf payload = gasFee
where
feePerByte :: Rational = 0.01
contProofSize =
case _pPayload (payloadObj payload) of
Continuation (ContMsg _ _ _ _ (Just (ContProof p))) -> B.length p
_ -> 0
txSize = SB.length (payloadBytes payload) - contProofSize
costPerByte = fromIntegral txSize * feePerByte
sizePenalty = txSizeAccelerationFee costPerByte
gasFee = ceiling (costPerByte + sizePenalty)
{-# INLINE initialGasOf #-}
txSizeAccelerationFee :: Rational -> Rational
txSizeAccelerationFee costPerByte = total
where
total = (costPerByte / bytePenalty) ^ power
bytePenalty = 512
power :: Integer = 7
# INLINE txSizeAccelerationFee #
| Disable certain natives around pact 4 / coin v3 upgrade
--
disablePact40Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact40Natives =
disablePactNatives ["enumerate" , "distinct" , "emit-event" , "concat" , "str-to-list"] FlagDisablePact40
# INLINE disablePact40Natives #
disablePactNatives :: [Text] -> ExecutionFlag -> ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePactNatives bannedNatives flag ec = if has (ecFlags . ix flag) ec
then over (eeRefStore . rsNatives) (\k -> foldl' (flip HM.delete) k bannedNatives)
else id
# INLINE disablePactNatives #
-- | Disable certain natives around pact 4.2.0
--
disablePact420Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact420Natives = disablePactNatives ["zip", "fold-db"] FlagDisablePact420
# INLINE disablePact420Natives #
-- | Disable certain natives around pact 4.2.0
--
disablePact43Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact43Natives = disablePactNatives ["create-principal", "validate-principal", "continue"] FlagDisablePact43
{-# INLINE disablePact43Natives #-}
disablePact431Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact431Natives = disablePactNatives ["is-principal", "typeof-principal"] FlagDisablePact431
# INLINE disablePact431Natives #
disablePact46Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact46Natives = disablePactNatives ["point-add", "scalar-mult", "pairing-check"] FlagDisablePact46
# INLINE disablePact46Natives #
| Set the module cache of a pact ' EvalState '
--
setModuleCache
:: ModuleCache
-> EvalState
-> EvalState
setModuleCache mcache es =
let allDeps = foldMap (allModuleExports . fst) mcache
in set (evalRefs . rsQualifiedDeps) allDeps $ set (evalRefs . rsLoadedModules) mcache $ es
# INLINE setModuleCache #
-- | Set tx result state
--
setTxResultState :: EvalResult -> TransactionM db ()
setTxResultState er = do
txLogs <>= (_erLogs er)
txCache .= (_erLoadedModules er)
txGasUsed .= (_erGas er)
{-# INLINE setTxResultState #-}
| Make an ' EvalEnv ' given a tx env + state
--
mkEvalEnv
:: NamespacePolicy
-> MsgData
-> TransactionM db (EvalEnv db)
mkEvalEnv nsp msg = do
tenv <- ask
genv <- GasEnv
<$> view (txGasLimit . to fromIntegral)
<*> view txGasPrice
<*> use txGasModel
liftIO $ setupEvalEnv (_txDbEnv tenv) Nothing (_txMode tenv)
msg initRefStore genv
nsp (_txSpvSupport tenv) (_txPublicData tenv) (_txExecutionConfig tenv)
| Managed namespace policy CAF
--
managedNamespacePolicy :: NamespacePolicy
managedNamespacePolicy = SmartNamespacePolicy False
(QualifiedName (ModuleName "ns" Nothing) "validate" def)
# NOINLINE managedNamespacePolicy #
-- | Builder for "magic" capabilities given a magic cap name
--
mkMagicCapSlot :: Text -> CapSlot UserCapability
mkMagicCapSlot c = CapSlot CapCallStack cap []
where
mn = ModuleName "coin" Nothing
fqn = QualifiedName mn c def
cap = SigCapability fqn []
# INLINE mkMagicCapSlot #
| Build the ' ' for some pact code fed to the function . The ' value '
-- parameter is for any possible environmental data that needs to go into
the ' ' .
--
buildExecParsedCode
:: Maybe (ChainwebVersion, BlockHeight)
-> Maybe Value
-> Text
-> IO (ExecMsg ParsedCode)
buildExecParsedCode chainCtx value code = maybe (go Null) go value
where
go val = case parsePact chainCtx code of
Right !t -> pure $! ExecMsg t val
-- if we can't construct coin contract calls, this should
-- fail fast
Left err -> internalError $ "buildExecParsedCode: parse failed: " <> T.pack err
-- | Retrieve public metadata from a command
--
publicMetaOf :: Command (Payload PublicMeta ParsedCode) -> PublicMeta
publicMetaOf = _pMeta . _cmdPayload
# INLINE publicMetaOf #
| Retrieve the optional Network identifier from a command
--
networkIdOf :: Command (Payload PublicMeta ParsedCode) -> Maybe NetworkId
networkIdOf = _pNetworkId . _cmdPayload
{-# INLINE networkIdOf #-}
-- | Calculate the gas fee (pact-generate gas cost * user-specified gas price),
rounding to the nearest stu .
--
gasSupplyOf :: Gas -> GasPrice -> GasSupply
gasSupplyOf gas (GasPrice (ParsedDecimal gp)) = GasSupply (ParsedDecimal gs)
where
gs = toCoinUnit ((fromIntegral gas) * gp)
# INLINE gasSupplyOf #
| Round to the nearest
--
toCoinUnit :: Decimal -> Decimal
toCoinUnit = roundTo 12
{-# INLINE toCoinUnit #-}
gasLog :: Text -> TransactionM db ()
gasLog m = do
l <- view txGasLogger
rk <- view txRequestKey
for_ l $ \logger ->
liftIO $! logLog logger "INFO" $! T.unpack m <> ": " <> show rk
-- | Log request keys at DEBUG when successful
--
debug :: Text -> TransactionM db ()
debug s = do
l <- view txLogger
rk <- view txRequestKey
liftIO $! logLog l "DEBUG" $! T.unpack s <> ": " <> show rk
| Denotes fatal failure points in the tx exec process
--
fatal :: Text -> TransactionM db a
fatal e = do
l <- view txLogger
rk <- view txRequestKey
liftIO
$! logLog l "ERROR"
$! "critical transaction failure: "
<> sshow rk <> ": " <> T.unpack e
throwM $ PactTransactionExecError (fromUntypedHash $ unRequestKey rk) e
logError :: Text -> TransactionM db ()
logError msg = view txLogger >>= \l -> liftIO $! logLog l "ERROR" (T.unpack msg)
infoLog :: Text -> TransactionM db ()
infoLog msg = view txLogger >>= \l -> liftIO $! logLog l "INFO" (T.unpack msg)
| null | https://raw.githubusercontent.com/kadena-io/chainweb-node/2dcdc28b6ccfa2102a16051165ed5d4e100b9084/src/Chainweb/Pact/TransactionExec.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE OverloadedStrings #
# LANGUAGE RankNTypes #
|
License : (see the file LICENSE)
Stability : experimental
* Transaction Execution
* Gas Execution
* Coinbase Execution
* Command Helpers
* Utilities
internal Pact modules
-------------------------------------------------------------------------- --
constrain coinbase calls.
constrain gas buy/redeem calls.
constrain genesis-only allocations
| The main entry point to executing transactions. From here,
'applyCmd' assembles the command environment for a command and
orchestrates gas buys/redemption, and executing payloads.
^ Pact logger
^ Pact gas logger
^ Pact db environment
^ The miner chosen to mine the block
^ Gas model (pact Service config)
^ command with payload to execute
^ initial gas used
^ cached module state
^ is this a local or send execution context?
redeem gas failure is fatal (block-failing) so miner doesn't lose coins
^ Pact logger
^ Pact db environment
^ command with payload to execute
^ Pact logger
^ Pact db environment
^ The miner chosen to mine the block
^ enforce coinbase failure or not
^ always enable precompilation
NOTE: it holds that @ _pdPrevBlockHash pd == encode _blockHash@
NOTE: chash includes the /quoted/ text of the parent header.
into a block's coinbase transaction, please add a corresponding case
occurred.
This is especially important if the transaction changes an account's balance.
didn't see the transaction that caused the change.
^ Pact logger
^ Pact gas logger
^ Pact db environment
^ Gas model (pact Service config)
^ command with payload to execute
^ Pact logger
^ Pact db environment
cache purging everything but coin and its
dependencies.
see if fungible-v2 is there
see if fungible-xchain-v1 is there
load modules by referencing members
return loaded cache
Note: no need to check if things are there, because this
requires a block height that witnesses the invariant.
if this changes, we must change the filter in 'updateInitCache'
| Apply (forking) upgrade transactions and module cache updates
This is the place where we consistently /introduce/ new transactions
into the blockchain along with module cache updates. The only other
empty-module-cache-after-initial-rewind case caught in 'execTransactions'
which both hit the database.
In order to prime the module cache with all new modules for subsequent
those caches is returned. The calling code adds this new cache to the
init cache in the pact service state (_psInitCache).
Note (emily): This function does not need to care about
module caching, because it is already seeded with the correct cache
state, and is not updating the module cache, unlike 'applyUpgrades'.
error means all gas was charged
| Run genesis transaction payloads with custom interpreter
concat tx warnings with eval warnings
applyExec enforces non-empty expression set so `last` ok
wrapping it up in a JSON result.
set log + cache updates + used gas
| Execute a 'ContMsg' and return the command result and module cache
| Execute a 'ContMsg' and return just eval result, not wrapped in a
| Build and execute 'coin.buygas' command from miner info and user command
info (see 'TransactionExec.applyCmd')
should never occur: would mean coin.fund-tx is not a pact
| Build and execute 'coin.redeem-gas' command from miner info and previous
command results (see 'TransactionExec.applyCmd')
---------------------------------------------------------------------------- --
| Initialize a fresh eval state with magic capabilities.
This is the way we inject the correct guards into the environment
during Pact code execution
gas limit and do some action depending on the outcome
all gas is consumed
enables gas logging
| Initial gas charged for transaction size
ignoring the size of a continuation proof, if present
# INLINE initialGasOf #
| Disable certain natives around pact 4.2.0
| Disable certain natives around pact 4.2.0
# INLINE disablePact43Natives #
| Set tx result state
# INLINE setTxResultState #
| Builder for "magic" capabilities given a magic cap name
parameter is for any possible environmental data that needs to go into
if we can't construct coin contract calls, this should
fail fast
| Retrieve public metadata from a command
# INLINE networkIdOf #
| Calculate the gas fee (pact-generate gas cost * user-specified gas price),
# INLINE toCoinUnit #
| Log request keys at DEBUG when successful
| # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
Module : Chainweb . Pact . TransactionExec
Copyright : Copyright © 2018 Kadena LLC .
Maintainer : < > , < >
Pact command execution and coin - contract transaction logic for Chainweb
module Chainweb.Pact.TransactionExec
applyCmd
, applyGenesisCmd
, applyLocal
, applyExec
, applyExec'
, applyContinuation
, applyContinuation'
, runPayload
, readInitModules
, enablePactEvents'
, enforceKeysetFormats'
, disablePact40Natives
, buyGas
, applyCoinbase
, EnforceCoinbaseFailure(..)
, publicMetaOf
, networkIdOf
, gasSupplyOf
, buildExecParsedCode
, mkMagicCapSlot
, listErrMsg
, initialGasOf
) where
import Control.DeepSeq
import Control.Lens
import Control.Monad
import Control.Monad.Catch
import Control.Monad.Reader
import Control.Monad.State.Strict
import Control.Monad.Trans.Maybe
import Data.Aeson hiding ((.=))
import qualified Data.Aeson as A
import Data.Bifunctor
import qualified Data.ByteString as B
import qualified Data.ByteString.Short as SB
import Data.Decimal (Decimal, roundTo)
import Data.Default (def)
import Data.Foldable (for_, traverse_, foldl')
import Data.IORef
import qualified Data.HashMap.Strict as HM
import Data.Maybe
import qualified Data.Set as S
import Data.Text (Text)
import qualified Data.Text as T
import Pact.Eval (eval, liftTerm)
import Pact.Gas (freeGasEnv)
import Pact.Interpreter
import Pact.Native.Capabilities (evalCap)
import Pact.Parse (ParsedDecimal(..))
import Pact.Runtime.Capabilities (popCapStack)
import Pact.Runtime.Utils (lookupModule)
import Pact.Types.Capability
import Pact.Types.Command
import Pact.Types.Hash as Pact
import Pact.Types.KeySet
import Pact.Types.Logger hiding (logError)
import Pact.Types.PactValue
import Pact.Types.Pretty
import Pact.Types.RPC
import Pact.Types.Runtime
import Pact.Types.Server
import Pact.Types.SPV
internal Chainweb modules
import Chainweb.BlockHeader
import Chainweb.BlockHeight
import Chainweb.Mempool.Mempool (requestKeyToTransactionHash)
import Chainweb.Miner.Pact
import Chainweb.Pact.Service.Types
import Chainweb.Pact.Templates
import Chainweb.Pact.Transactions.UpgradeTransactions
import Chainweb.Pact.Types hiding (logError)
import Chainweb.Transaction
import Chainweb.Utils (encodeToByteString, sshow, tryAllSynchronous, T2(..), T3(..))
import Chainweb.Version as V
| " Magic " capability ' COINBASE ' used in the coin contract to
magic_COINBASE :: CapSlot UserCapability
magic_COINBASE = mkMagicCapSlot "COINBASE"
| " Magic " capability ' GAS ' used in the coin contract to
magic_GAS :: CapSlot UserCapability
magic_GAS = mkMagicCapSlot "GAS"
| " Magic " capability ' GENESIS ' used in the coin contract to
magic_GENESIS :: CapSlot UserCapability
magic_GENESIS = mkMagicCapSlot "GENESIS"
applyCmd
:: ChainwebVersion
-> Logger
-> Maybe Logger
-> PactDbEnv p
-> Miner
-> GasModel
-> TxContext
^ tx metadata and parent header
-> SPVSupport
^ SPV support ( validates cont proofs )
-> Command (Payload PublicMeta ParsedCode)
-> Gas
-> ModuleCache
-> ApplyCmdExecutionContext
-> IO (T3 (CommandResult [TxLog Value]) ModuleCache (S.Set PactWarning))
applyCmd v logger gasLogger pdbenv miner gasModel txCtx spv cmd initialGas mcache0 callCtx = do
T2 cr st <- runTransactionM cenv txst applyBuyGas
let cache = _txCache st
warns = _txWarnings st
pure $ T3 cr cache warns
where
stGasModel
| chainweb217Pact' = gasModel
| otherwise = _geGasModel freeGasEnv
txst = TransactionState mcache0 mempty 0 Nothing stGasModel mempty
executionConfigNoHistory = mkExecutionConfig
$ FlagDisableHistoryInTransactionalMode
: ( [ FlagOldReadOnlyBehavior | isPactBackCompatV16 ]
++ [ FlagPreserveModuleNameBug | not isModuleNameFix ]
++ [ FlagPreserveNsModuleInstallBug | not isModuleNameFix2 ]
++ enablePactEvents' txCtx
++ enablePact40 txCtx
++ enablePact420 txCtx
++ enforceKeysetFormats' txCtx
++ enablePactModuleMemcheck txCtx
++ enablePact43 txCtx
++ enablePact431 txCtx
++ enablePact44 txCtx
++ enablePact45 txCtx
++ enableNewTrans txCtx
++ enablePact46 txCtx )
cenv = TransactionEnv Transactional pdbenv logger gasLogger (ctxToPublicData txCtx) spv nid gasPrice
requestKey (fromIntegral gasLimit) executionConfigNoHistory
requestKey = cmdToRequestKey cmd
gasPrice = view cmdGasPrice cmd
gasLimit = view cmdGasLimit cmd
nid = networkIdOf cmd
currHeight = ctxCurrentBlockHeight txCtx
isModuleNameFix = enableModuleNameFix v currHeight
isModuleNameFix2 = enableModuleNameFix2 v currHeight
isPactBackCompatV16 = pactBackCompat_v16 v currHeight
chainweb213Pact' = chainweb213Pact (ctxVersion txCtx) (ctxCurrentBlockHeight txCtx)
chainweb217Pact' = chainweb217Pact After (ctxVersion txCtx) (ctxCurrentBlockHeight txCtx)
toEmptyPactError (PactError errty _ _ _) = PactError errty def [] mempty
toOldListErr pe = pe { peDoc = listErrMsg }
isOldListErr = \case
PactError EvalError _ _ doc -> "Unknown primitive" `T.isInfixOf` renderCompactText' doc
_ -> False
redeemAllGas r = do
txGasUsed .= fromIntegral gasLimit
applyRedeem r
applyBuyGas =
catchesPactError (buyGas isPactBackCompatV16 cmd miner) >>= \case
Left e -> view txRequestKey >>= \rk ->
throwM $ BuyGasFailure $ GasPurchaseFailure (requestKeyToTransactionHash rk) e
Right _ -> checkTooBigTx initialGas gasLimit applyPayload redeemAllGas
applyPayload = do
txGasModel .= gasModel
if chainweb217Pact' then txGasUsed += initialGas
else txGasUsed .= initialGas
cr <- catchesPactError $! runPayload cmd managedNamespacePolicy
case cr of
Left e
| chainweb217Pact' -> do
let e' = case callCtx of
ApplyLocal -> e
ApplySend -> toEmptyPactError e
r <- jsonErrorResult e' "tx failure for request key when running cmd"
redeemAllGas r
| chainweb213Pact' || not (isOldListErr e) -> do
r <- jsonErrorResult e "tx failure for request key when running cmd"
redeemAllGas r
| otherwise -> do
r <- jsonErrorResult (toOldListErr e) "tx failure for request key when running cmd"
redeemAllGas r
Right r -> applyRedeem r
applyRedeem cr = do
txGasModel .= (_geGasModel freeGasEnv)
r <- catchesPactError $! redeemGas cmd
case r of
Left e ->
fatal $ "tx failure for request key while redeeming gas: " <> sshow e
Right es -> do
logs <- use txLogs
return $! set crLogs (Just logs) $ over crEvents (es ++) cr
listErrMsg :: Doc
listErrMsg =
"Unknown primitive \"list\" in determining cost of GUnreduced\nCallStack (from HasCallStack):\n error, called at src/Pact/Gas/Table.hs:209:22 in pact-4.2.0-fe223ad86f1795ba381192792f450820557e59c2926c747bf2aa6e398394bee6:Pact.Gas.Table"
applyGenesisCmd
:: Logger
-> PactDbEnv p
-> SPVSupport
^ SPV support ( validates cont proofs )
-> Command (Payload PublicMeta ParsedCode)
-> IO (T2 (CommandResult [TxLog Value]) ModuleCache)
applyGenesisCmd logger dbEnv spv cmd =
second _txCache <$!> runTransactionM tenv txst go
where
nid = networkIdOf cmd
rk = cmdToRequestKey cmd
tenv = TransactionEnv
{ _txMode = Transactional
, _txDbEnv = dbEnv
, _txLogger = logger
, _txGasLogger = Nothing
, _txPublicData = def
, _txSpvSupport = spv
, _txNetworkId = nid
, _txGasPrice = 0.0
, _txRequestKey = rk
, _txGasLimit = 0
, _txExecutionConfig = mkExecutionConfig
[ FlagDisablePact40
, FlagDisablePact420
, FlagDisableInlineMemCheck
, FlagDisablePact43
, FlagDisablePact44
, FlagDisablePact45
]
}
txst = TransactionState
{ _txCache = mempty
, _txLogs = mempty
, _txGasUsed = 0
, _txGasId = Nothing
, _txGasModel = _geGasModel freeGasEnv
, _txWarnings = mempty
}
interp = initStateInterpreter
$ initCapabilities [magic_GENESIS, magic_COINBASE]
go = do
cr <- catchesPactError $! runGenesis cmd permissiveNamespacePolicy interp
case cr of
Left e -> fatal $ "Genesis command failed: " <> sshow e
Right r -> r <$ debug "successful genesis tx for request key"
applyCoinbase
:: ChainwebVersion
-> Logger
-> PactDbEnv p
-> Miner
-> ParsedDecimal
^ reward
-> TxContext
^ tx metadata and parent header
-> EnforceCoinbaseFailure
-> CoinbaseUsePrecompiled
-> ModuleCache
-> IO (T2 (CommandResult [TxLog Value]) (Maybe ModuleCache))
applyCoinbase v logger dbEnv (Miner mid mks@(MinerKeys mk)) reward@(ParsedDecimal d) txCtx
(EnforceCoinbaseFailure enfCBFailure) (CoinbaseUsePrecompiled enablePC) mc
| fork1_3InEffect || enablePC = do
when chainweb213Pact' $ enforceKeyFormats
(\k -> throwM $ CoinbaseFailure $ "Invalid miner key: " <> sshow k)
mk
let (cterm, cexec) = mkCoinbaseTerm mid mks reward
interp = Interpreter $ \_ -> do put initState; fmap pure (eval cterm)
go interp cexec
| otherwise = do
cexec <- mkCoinbaseCmd mid mks reward
let interp = initStateInterpreter initState
go interp cexec
where
chainweb213Pact' = chainweb213Pact v bh
fork1_3InEffect = vuln797Fix v cid bh
throwCritical = fork1_3InEffect || enfCBFailure
ec = mkExecutionConfig $
[ FlagDisableModuleInstall
, FlagDisableHistoryInTransactionalMode ] ++
enablePactEvents' txCtx ++
enablePact40 txCtx ++
enablePact420 txCtx ++
enablePactModuleMemcheck txCtx ++
enablePact43 txCtx ++
enablePact431 txCtx ++
enablePact44 txCtx ++
enablePact45 txCtx
tenv = TransactionEnv Transactional dbEnv logger Nothing (ctxToPublicData txCtx) noSPVSupport
Nothing 0.0 rk 0 ec
txst = TransactionState mc mempty 0 Nothing (_geGasModel freeGasEnv) mempty
initState = setModuleCache mc $ initCapabilities [magic_COINBASE]
rk = RequestKey chash
parent = _tcParentHeader txCtx
bh = ctxCurrentBlockHeight txCtx
cid = V._chainId parent
chash = Pact.Hash $ SB.toShort $ encodeToByteString $ _blockHash $ _parentHeader parent
go interp cexec = evalTransactionM tenv txst $! do
cr <- catchesPactError $!
applyExec' 0 interp cexec mempty chash managedNamespacePolicy
case cr of
Left e
| throwCritical -> throwM $ CoinbaseFailure $ sshow e
| otherwise -> (`T2` Nothing) <$> jsonErrorResult e "coinbase tx failure"
Right er -> do
debug
$! "successful coinbase of "
<> (T.take 18 $ sshow d)
<> " to "
<> sshow mid
upgradedModuleCache <- applyUpgrades v cid bh
void $! applyTwentyChainUpgrade v cid bh
NOTE ( ): When adding new forking transactions that are injected
in Rosetta 's ` matchLogs ` function and follow the pattern .
Otherwise , Rosetta tooling has no idea that these upgrade transactions
Rosetta tooling will error out if an account 's balance changed and it
logs <- use txLogs
return $! T2
(CommandResult rk (_erTxId er) (PactResult (Right (last $ _erOutput er)))
(_erGas er) (Just $ logs) (_erExec er) Nothing (_erEvents er))
upgradedModuleCache
applyLocal
:: Logger
-> Maybe Logger
-> PactDbEnv p
-> GasModel
-> TxContext
^ tx metadata and parent header
-> SPVSupport
^ SPV support ( validates cont proofs )
-> Command PayloadWithText
-> ModuleCache
-> ExecutionConfig
-> IO (CommandResult [TxLog Value])
applyLocal logger gasLogger dbEnv gasModel txCtx spv cmdIn mc execConfig =
evalTransactionM tenv txst go
where
cmd = payloadObj <$> cmdIn
rk = cmdToRequestKey cmd
nid = networkIdOf cmd
chash = toUntypedHash $ _cmdHash cmd
signers = _pSigners $ _cmdPayload cmd
gasPrice = view cmdGasPrice cmd
gasLimit = view cmdGasLimit cmd
tenv = TransactionEnv Local dbEnv logger gasLogger (ctxToPublicData txCtx) spv nid gasPrice
rk (fromIntegral gasLimit) execConfig
txst = TransactionState mc mempty 0 Nothing gasModel mempty
gas0 = initialGasOf (_cmdPayload cmdIn)
applyPayload m = do
interp <- gasInterpreter gas0
cr <- catchesPactError $! case m of
Exec em ->
applyExec gas0 interp em signers chash managedNamespacePolicy
Continuation cm ->
applyContinuation gas0 interp cm signers chash managedNamespacePolicy
case cr of
Left e -> jsonErrorResult e "applyLocal"
Right r -> return $! r { _crMetaData = Just (toJSON $ ctxToPublicData' txCtx) }
go = checkTooBigTx gas0 gasLimit (applyPayload $ _pPayload $ _cmdPayload cmd) return
readInitModules
:: Logger
-> PactDbEnv p
-> TxContext
^ tx metadata and parent header
-> IO ModuleCache
readInitModules logger dbEnv txCtx
| chainweb217Pact' = evalTransactionM tenv txst goCw217
| otherwise = evalTransactionM tenv txst go
where
guarding 2.17 here to allow for
chainweb217Pact' = chainweb217Pact
After
(ctxVersion txCtx)
(ctxCurrentBlockHeight txCtx)
parent = _tcParentHeader txCtx
v = _chainwebVersion parent
h = _blockHeight (_parentHeader parent) + 1
rk = RequestKey chash
nid = Nothing
chash = pactInitialHash
tenv = TransactionEnv Local dbEnv logger Nothing (ctxToPublicData txCtx) noSPVSupport nid 0.0
rk 0 def
txst = TransactionState mempty mempty 0 Nothing (_geGasModel freeGasEnv) mempty
interp = defaultInterpreter
die msg = throwM $ PactInternalError $ "readInitModules: " <> msg
mkCmd = buildExecParsedCode (Just (v, h)) Nothing
run msg cmd = do
er <- catchesPactError $!
applyExec' 0 interp cmd [] chash permissiveNamespacePolicy
case er of
Left e -> die $ msg <> ": failed: " <> sshow e
Right r -> case _erOutput r of
[] -> die $ msg <> ": empty result"
(o:_) -> return o
go :: TransactionM p ModuleCache
go = do
checkCmd <- liftIO $ mkCmd "(contains \"fungible-v2\" (list-modules))"
checkFv2 <- run "check fungible-v2" checkCmd
hasFv2 <- case checkFv2 of
(PLiteral (LBool b)) -> return b
t -> die $ "got non-bool result from module read: " <> T.pack (showPretty t)
checkCmdx <- liftIO $ mkCmd "(contains \"fungible-xchain-v1\" (list-modules))"
checkFx <- run "check fungible-xchain-v1" checkCmdx
hasFx <- case checkFx of
(PLiteral (LBool b)) -> return b
t -> die $ "got non-bool result from module read: " <> T.pack (showPretty t)
refModsCmd <- liftIO $ mkCmd $ T.intercalate " " $
[ "coin.MINIMUM_PRECISION"
, "ns.GUARD_SUCCESS"
, "gas-payer-v1.GAS_PAYER"
, "fungible-v1.account-details"] ++
[ "fungible-v2.account-details" | hasFv2 ] ++
[ "(let ((m:module{fungible-xchain-v1} coin)) 1)" | hasFx ]
void $ run "load modules" refModsCmd
use txCache
Only load coin and its dependencies for chainweb > = 2.17
goCw217 :: TransactionM p ModuleCache
goCw217 = do
coinDepCmd <- liftIO $ mkCmd "coin.MINIMUM_PRECISION"
void $ run "load modules" coinDepCmd
use txCache
at a particular blockheight .
places are Pact Service startup and the
applyUpgrades
:: ChainwebVersion
-> V.ChainId
-> BlockHeight
-> TransactionM p (Maybe ModuleCache)
applyUpgrades v cid height
| coinV2Upgrade v cid height = applyCoinV2
| pact4coin3Upgrade At v height = applyCoinV3
| chainweb214Pact At v height = applyCoinV4
| chainweb215Pact At v height = applyCoinV5
| chainweb217Pact At v height = filterModuleCache
| otherwise = return Nothing
where
installCoinModuleAdmin = set (evalCapabilities . capModuleAdmin) $ S.singleton (ModuleName "coin" Nothing)
applyCoinV2 = applyTxs (upgradeTransactions v cid) [FlagDisableInlineMemCheck, FlagDisablePact43, FlagDisablePact45]
applyCoinV3 = applyTxs coinV3Transactions [FlagDisableInlineMemCheck, FlagDisablePact43, FlagDisablePact45]
applyCoinV4 = applyTxs coinV4Transactions [FlagDisablePact45]
applyCoinV5 = applyTxs coinV5Transactions [FlagDisablePact45]
filterModuleCache = do
mc <- use txCache
pure $ Just $ HM.filterWithKey (\k _ -> k == "coin") mc
applyTxs txsIO flags = do
infoLog "Applying upgrade!"
txs <- map (fmap payloadObj) <$> liftIO txsIO
blocks , the caches from each tx are collected and the union of all
let execConfig = mkExecutionConfig flags
caches <- local (set txExecutionConfig execConfig) $ mapM applyTx txs
return $ Just (HM.unions caches)
interp = initStateInterpreter
$ installCoinModuleAdmin
$ initCapabilities [mkMagicCapSlot "REMEDIATE"]
applyTx tx = do
infoLog $ "Running upgrade tx " <> sshow (_cmdHash tx)
tryAllSynchronous (runGenesis tx permissiveNamespacePolicy interp) >>= \case
Right _ -> use txCache
Left e -> do
logError $ "Upgrade transaction failed! " <> sshow e
throwM e
applyTwentyChainUpgrade
:: ChainwebVersion
-> V.ChainId
-> BlockHeight
-> TransactionM p ()
applyTwentyChainUpgrade v cid bh
| to20ChainRebalance v cid bh = do
txlist <- liftIO $ twentyChainUpgradeTransactions v cid
infoLog $ "Applying 20-chain upgrades on chain " <> sshow cid
let txs = fmap payloadObj <$> txlist
traverse_ applyTx txs
| otherwise = return ()
where
applyTx tx = do
infoLog $ "Running 20-chain upgrade tx " <> sshow (_cmdHash tx)
let i = initStateInterpreter
$ initCapabilities [mkMagicCapSlot "REMEDIATE"]
r <- tryAllSynchronous (runGenesis tx permissiveNamespacePolicy i)
case r of
Left e -> do
logError $ "Upgrade transaction failed: " <> sshow e
void $! throwM e
Right _ -> return ()
jsonErrorResult
:: PactError
-> Text
-> TransactionM p (CommandResult [TxLog Value])
jsonErrorResult err msg = do
logs <- use txLogs
rk <- view txRequestKey
l <- view txLogger
liftIO
$! logLog l "INFO"
$! T.unpack msg
<> ": " <> show rk
<> ": " <> show err
return $! CommandResult rk Nothing (PactResult (Left err))
gas (Just logs) Nothing Nothing []
runPayload
:: Command (Payload PublicMeta ParsedCode)
-> NamespacePolicy
-> TransactionM p (CommandResult [TxLog Value])
runPayload cmd nsp = do
g0 <- use txGasUsed
interp <- gasInterpreter g0
case payload of
Exec pm ->
applyExec g0 interp pm signers chash nsp
Continuation ym ->
applyContinuation g0 interp ym signers chash nsp
where
signers = _pSigners $ _cmdPayload cmd
chash = toUntypedHash $ _cmdHash cmd
payload = _pPayload $ _cmdPayload cmd
runGenesis
:: Command (Payload PublicMeta ParsedCode)
-> NamespacePolicy
-> Interpreter p
-> TransactionM p (CommandResult [TxLog Value])
runGenesis cmd nsp interp = case payload of
Exec pm ->
applyExec 0 interp pm signers chash nsp
Continuation ym ->
applyContinuation 0 interp ym signers chash nsp
where
signers = _pSigners $ _cmdPayload cmd
chash = toUntypedHash $ _cmdHash cmd
payload = _pPayload $ _cmdPayload cmd
| Execute an ' ' and Return the result with module cache
applyExec
:: Gas
-> Interpreter p
-> ExecMsg ParsedCode
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p (CommandResult [TxLog Value])
applyExec initialGas interp em senderSigs hsh nsp = do
EvalResult{..} <- applyExec' initialGas interp em senderSigs hsh nsp
for_ _erLogGas $ \gl -> gasLog $ "gas logs: " <> sshow gl
logs <- use txLogs
rk <- view txRequestKey
txWarnings <>= _erWarnings
forcing it here for lazy errors . TODO NFData the Pacts
lastResult <- return $!! last _erOutput
return $! CommandResult rk _erTxId (PactResult (Right lastResult))
_erGas (Just logs) _erExec Nothing _erEvents
| Variation on ' applyExec ' that returns ' ' as opposed to
applyExec'
:: Gas
-> Interpreter p
-> ExecMsg ParsedCode
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p EvalResult
applyExec' initialGas interp (ExecMsg parsedCode execData) senderSigs hsh nsp
| null (_pcExps parsedCode) = throwCmdEx "No expressions found"
| otherwise = do
pactFlags <- asks _txExecutionConfig
eenv <- mkEvalEnv nsp (MsgData execData Nothing hsh senderSigs)
<&> disablePact40Natives pactFlags
<&> disablePact420Natives pactFlags
<&> disablePact43Natives pactFlags
<&> disablePact431Natives pactFlags
<&> disablePact46Natives pactFlags
setEnvGas initialGas eenv
er <- liftIO $! evalExec interp eenv parsedCode
for_ (_erExec er) $ \pe -> debug
$ "applyExec: new pact added: "
<> sshow (_pePactId pe, _peStep pe, _peYield pe, _peExecuted pe)
setTxResultState er
return er
enablePactEvents' :: TxContext -> [ExecutionFlag]
enablePactEvents' tc
| enablePactEvents (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePactEvents]
enforceKeysetFormats' :: TxContext -> [ExecutionFlag]
enforceKeysetFormats' tc
| enforceKeysetFormats (ctxVersion tc) (ctxCurrentBlockHeight tc) = [FlagEnforceKeyFormats]
| otherwise = []
enablePact40 :: TxContext -> [ExecutionFlag]
enablePact40 tc
| pact4coin3Upgrade After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact40]
enablePact420 :: TxContext -> [ExecutionFlag]
enablePact420 tc
| pact420Upgrade (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact420]
enablePactModuleMemcheck :: TxContext -> [ExecutionFlag]
enablePactModuleMemcheck tc
| chainweb213Pact (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisableInlineMemCheck]
enablePact43 :: TxContext -> [ExecutionFlag]
enablePact43 tc
| chainweb214Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact43]
enablePact431 :: TxContext -> [ExecutionFlag]
enablePact431 tc
| chainweb215Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact431]
enablePact44 :: TxContext -> [ExecutionFlag]
enablePact44 tc
| chainweb216Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact44]
enablePact45 :: TxContext -> [ExecutionFlag]
enablePact45 tc
| chainweb217Pact After (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact45]
enableNewTrans :: TxContext -> [ExecutionFlag]
enableNewTrans tc
| pact44NewTrans (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisableNewTrans]
enablePact46 :: TxContext -> [ExecutionFlag]
enablePact46 tc
| chainweb218Pact (ctxVersion tc) (ctxCurrentBlockHeight tc) = []
| otherwise = [FlagDisablePact46]
applyContinuation
:: Gas
-> Interpreter p
-> ContMsg
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p (CommandResult [TxLog Value])
applyContinuation initialGas interp cm senderSigs hsh nsp = do
EvalResult{..} <- applyContinuation' initialGas interp cm senderSigs hsh nsp
for_ _erLogGas $ \gl -> gasLog $ "gas logs: " <> sshow gl
logs <- use txLogs
rk <- view txRequestKey
set tx warnings to eval warnings
txWarnings <>= _erWarnings
last safe here because cont msg is guaranteed one exp
return $! (CommandResult rk _erTxId (PactResult (Right (last _erOutput)))
_erGas (Just logs) _erExec Nothing) _erEvents
setEnvGas :: Gas -> EvalEnv e -> TransactionM p ()
setEnvGas initialGas = liftIO . views eeGas (`writeIORef` initialGas)
' CommandResult ' wrapper
applyContinuation'
:: Gas
-> Interpreter p
-> ContMsg
-> [Signer]
-> Hash
-> NamespacePolicy
-> TransactionM p EvalResult
applyContinuation' initialGas interp cm@(ContMsg pid s rb d _) senderSigs hsh nsp = do
pactFlags <- asks _txExecutionConfig
eenv <- mkEvalEnv nsp (MsgData d pactStep hsh senderSigs)
<&> disablePact40Natives pactFlags
<&> disablePact420Natives pactFlags
<&> disablePact43Natives pactFlags
<&> disablePact46Natives pactFlags
setEnvGas initialGas eenv
er <- liftIO $! evalContinuation interp eenv cm
setTxResultState er
return er
where
pactStep = Just $ PactStep s rb pid Nothing
see : ' pact / coin - contract / coin.pact#fund - tx '
buyGas :: Bool -> Command (Payload PublicMeta ParsedCode) -> Miner -> TransactionM p ()
buyGas isPactBackCompatV16 cmd (Miner mid mks) = go
where
sender = view (cmdPayload . pMeta . pmSender) cmd
initState mc logGas =
set evalLogGas (guard logGas >> Just [("GBuyGas",0)]) $ setModuleCache mc $ initCapabilities [magic_GAS]
run input = do
(findPayer isPactBackCompatV16 cmd) >>= \r -> case r of
Nothing -> input
Just withPayerCap -> withPayerCap input
(Hash chash) = toUntypedHash (_cmdHash cmd)
bgHash = Hash (chash <> "-buygas")
go = do
mcache <- use txCache
supply <- gasSupplyOf <$> view txGasLimit <*> view txGasPrice
logGas <- isJust <$> view txGasLogger
let (buyGasTerm, buyGasCmd) = mkBuyGasTerm mid mks sender supply
interp mc = Interpreter $ \_input ->
put (initState mc logGas) >> run (pure <$> eval buyGasTerm)
result <- applyExec' 0 (interp mcache) buyGasCmd
(_pSigners $ _cmdPayload cmd) bgHash managedNamespacePolicy
case _erExec result of
Nothing ->
fatal "buyGas: Internal error - empty continuation"
Just pe -> void $! txGasId .= (Just $! GasId (_pePactId pe))
findPayer
:: Bool
-> Command (Payload PublicMeta ParsedCode)
-> Eval e (Maybe (Eval e [Term Name] -> Eval e [Term Name]))
findPayer isPactBackCompatV16 cmd = runMaybeT $ do
(!m,!qn,!as) <- MaybeT findPayerCap
pMod <- MaybeT $ lookupModule qn m
capRef <- MaybeT $ return $ lookupIfaceModRef qn pMod
return $ runCap (getInfo qn) capRef as
where
setEnvMsgBody v e = set eeMsgBody v e
findPayerCap :: Eval e (Maybe (ModuleName,QualifiedName,[PactValue]))
findPayerCap = preview $ eeMsgSigs . folded . folded . to sigPayerCap . _Just
sigPayerCap (SigCapability q@(QualifiedName m n _) as)
| n == "GAS_PAYER" = Just (m,q,as)
sigPayerCap _ = Nothing
gasPayerIface = ModuleName "gas-payer-v1" Nothing
lookupIfaceModRef (QualifiedName _ n _) (ModuleData (MDModule Module{..}) refs _)
| gasPayerIface `elem` _mInterfaces = HM.lookup n refs
lookupIfaceModRef _ _ = Nothing
mkApp i r as = App (TVar r i) (map (liftTerm . fromPactValue) as) i
runCap i capRef as input = do
let msgBody = enrichedMsgBody cmd
enrichMsgBody | isPactBackCompatV16 = id
| otherwise = setEnvMsgBody msgBody
ar <- local enrichMsgBody $
evalCap i CapCallStack False $ mkApp i capRef as
case ar of
NewlyAcquired -> do
r <- input
popCapStack (const (return ()))
return r
_ -> evalError' i "Internal error, GAS_PAYER already acquired"
enrichedMsgBody :: Command (Payload PublicMeta ParsedCode) -> Value
enrichedMsgBody cmd = case (_pPayload $ _cmdPayload cmd) of
Exec (ExecMsg (ParsedCode _ exps) userData) ->
object [ "tx-type" A..= ( "exec" :: Text)
, "exec-code" A..= map renderCompactText exps
, "exec-user-data" A..= pactFriendlyUserData userData ]
Continuation (ContMsg pid step isRollback userData proof) ->
object [ "tx-type" A..= ("cont" :: Text)
, "cont-pact-id" A..= pid
, "cont-step" A..= (LInteger $ toInteger step)
, "cont-is-rollback" A..= LBool isRollback
, "cont-user-data" A..= pactFriendlyUserData userData
, "cont-has-proof" A..= (LBool $ isJust proof)
]
where
pactFriendlyUserData Null = object []
pactFriendlyUserData v = v
see : ' pact / coin - contract / coin.pact#fund - tx '
redeemGas :: Command (Payload PublicMeta ParsedCode) -> TransactionM p [PactEvent]
redeemGas cmd = do
mcache <- use txCache
gid <- use txGasId >>= \case
Nothing -> fatal $! "redeemGas: no gas id in scope for gas refunds"
Just g -> return g
fee <- gasSupplyOf <$> use txGasUsed <*> view txGasPrice
_crEvents <$> applyContinuation 0 (initState mcache) (redeemGasCmd fee gid)
(_pSigners $ _cmdPayload cmd) (toUntypedHash $ _cmdHash cmd)
managedNamespacePolicy
where
initState mc = initStateInterpreter
$ setModuleCache mc
$ initCapabilities [magic_GAS]
redeemGasCmd fee (GasId pid) =
ContMsg pid 1 False (object [ "fee" A..= fee ]) Nothing
Utilities
initCapabilities :: [CapSlot UserCapability] -> EvalState
initCapabilities cs = set (evalCapabilities . capStack) cs def
# INLINABLE initCapabilities #
initStateInterpreter :: EvalState -> Interpreter e
initStateInterpreter s = Interpreter (put s >>)
| Check whether the cost of running a tx is more than the allowed
checkTooBigTx
:: Gas
-> GasLimit
-> TransactionM p (CommandResult [TxLog Value])
-> (CommandResult [TxLog Value] -> TransactionM p (CommandResult [TxLog Value]))
-> TransactionM p (CommandResult [TxLog Value])
checkTooBigTx initialGas gasLimit next onFail
| initialGas >= (fromIntegral gasLimit) = do
let !pe = PactError GasError def []
$ "Tx too big (" <> pretty initialGas <> "), limit "
<> pretty gasLimit
r <- jsonErrorResult pe "Tx too big"
onFail r
| otherwise = next
gasInterpreter :: Gas -> TransactionM db (Interpreter p)
gasInterpreter g = do
mc <- use txCache
logGas <- isJust <$> view txGasLogger
return $ initStateInterpreter
$ setModuleCache mc def
initialGasOf :: PayloadWithText -> Gas
initialGasOf payload = gasFee
where
feePerByte :: Rational = 0.01
contProofSize =
case _pPayload (payloadObj payload) of
Continuation (ContMsg _ _ _ _ (Just (ContProof p))) -> B.length p
_ -> 0
txSize = SB.length (payloadBytes payload) - contProofSize
costPerByte = fromIntegral txSize * feePerByte
sizePenalty = txSizeAccelerationFee costPerByte
gasFee = ceiling (costPerByte + sizePenalty)
txSizeAccelerationFee :: Rational -> Rational
txSizeAccelerationFee costPerByte = total
where
total = (costPerByte / bytePenalty) ^ power
bytePenalty = 512
power :: Integer = 7
# INLINE txSizeAccelerationFee #
| Disable certain natives around pact 4 / coin v3 upgrade
disablePact40Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact40Natives =
disablePactNatives ["enumerate" , "distinct" , "emit-event" , "concat" , "str-to-list"] FlagDisablePact40
# INLINE disablePact40Natives #
disablePactNatives :: [Text] -> ExecutionFlag -> ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePactNatives bannedNatives flag ec = if has (ecFlags . ix flag) ec
then over (eeRefStore . rsNatives) (\k -> foldl' (flip HM.delete) k bannedNatives)
else id
# INLINE disablePactNatives #
disablePact420Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact420Natives = disablePactNatives ["zip", "fold-db"] FlagDisablePact420
# INLINE disablePact420Natives #
disablePact43Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact43Natives = disablePactNatives ["create-principal", "validate-principal", "continue"] FlagDisablePact43
disablePact431Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact431Natives = disablePactNatives ["is-principal", "typeof-principal"] FlagDisablePact431
# INLINE disablePact431Natives #
disablePact46Natives :: ExecutionConfig -> EvalEnv e -> EvalEnv e
disablePact46Natives = disablePactNatives ["point-add", "scalar-mult", "pairing-check"] FlagDisablePact46
# INLINE disablePact46Natives #
| Set the module cache of a pact ' EvalState '
setModuleCache
:: ModuleCache
-> EvalState
-> EvalState
setModuleCache mcache es =
let allDeps = foldMap (allModuleExports . fst) mcache
in set (evalRefs . rsQualifiedDeps) allDeps $ set (evalRefs . rsLoadedModules) mcache $ es
# INLINE setModuleCache #
setTxResultState :: EvalResult -> TransactionM db ()
setTxResultState er = do
txLogs <>= (_erLogs er)
txCache .= (_erLoadedModules er)
txGasUsed .= (_erGas er)
| Make an ' EvalEnv ' given a tx env + state
mkEvalEnv
:: NamespacePolicy
-> MsgData
-> TransactionM db (EvalEnv db)
mkEvalEnv nsp msg = do
tenv <- ask
genv <- GasEnv
<$> view (txGasLimit . to fromIntegral)
<*> view txGasPrice
<*> use txGasModel
liftIO $ setupEvalEnv (_txDbEnv tenv) Nothing (_txMode tenv)
msg initRefStore genv
nsp (_txSpvSupport tenv) (_txPublicData tenv) (_txExecutionConfig tenv)
| Managed namespace policy CAF
managedNamespacePolicy :: NamespacePolicy
managedNamespacePolicy = SmartNamespacePolicy False
(QualifiedName (ModuleName "ns" Nothing) "validate" def)
# NOINLINE managedNamespacePolicy #
mkMagicCapSlot :: Text -> CapSlot UserCapability
mkMagicCapSlot c = CapSlot CapCallStack cap []
where
mn = ModuleName "coin" Nothing
fqn = QualifiedName mn c def
cap = SigCapability fqn []
# INLINE mkMagicCapSlot #
| Build the ' ' for some pact code fed to the function . The ' value '
the ' ' .
buildExecParsedCode
:: Maybe (ChainwebVersion, BlockHeight)
-> Maybe Value
-> Text
-> IO (ExecMsg ParsedCode)
buildExecParsedCode chainCtx value code = maybe (go Null) go value
where
go val = case parsePact chainCtx code of
Right !t -> pure $! ExecMsg t val
Left err -> internalError $ "buildExecParsedCode: parse failed: " <> T.pack err
publicMetaOf :: Command (Payload PublicMeta ParsedCode) -> PublicMeta
publicMetaOf = _pMeta . _cmdPayload
# INLINE publicMetaOf #
| Retrieve the optional Network identifier from a command
networkIdOf :: Command (Payload PublicMeta ParsedCode) -> Maybe NetworkId
networkIdOf = _pNetworkId . _cmdPayload
rounding to the nearest stu .
gasSupplyOf :: Gas -> GasPrice -> GasSupply
gasSupplyOf gas (GasPrice (ParsedDecimal gp)) = GasSupply (ParsedDecimal gs)
where
gs = toCoinUnit ((fromIntegral gas) * gp)
# INLINE gasSupplyOf #
| Round to the nearest
toCoinUnit :: Decimal -> Decimal
toCoinUnit = roundTo 12
gasLog :: Text -> TransactionM db ()
gasLog m = do
l <- view txGasLogger
rk <- view txRequestKey
for_ l $ \logger ->
liftIO $! logLog logger "INFO" $! T.unpack m <> ": " <> show rk
debug :: Text -> TransactionM db ()
debug s = do
l <- view txLogger
rk <- view txRequestKey
liftIO $! logLog l "DEBUG" $! T.unpack s <> ": " <> show rk
| Denotes fatal failure points in the tx exec process
fatal :: Text -> TransactionM db a
fatal e = do
l <- view txLogger
rk <- view txRequestKey
liftIO
$! logLog l "ERROR"
$! "critical transaction failure: "
<> sshow rk <> ": " <> T.unpack e
throwM $ PactTransactionExecError (fromUntypedHash $ unRequestKey rk) e
logError :: Text -> TransactionM db ()
logError msg = view txLogger >>= \l -> liftIO $! logLog l "ERROR" (T.unpack msg)
infoLog :: Text -> TransactionM db ()
infoLog msg = view txLogger >>= \l -> liftIO $! logLog l "INFO" (T.unpack msg)
|
1c72e25ebf574f096b1ea71f153a5f762ce5e478a98aad14cae1173dde5cba25 | janestreet/universe | signal.ml | type t = int
(* this function is a copy&paste from stdune *)
let name =
let table =
let open Sys in
[ (sigabrt, "ABRT")
; (sigalrm, "ALRM")
; (sigfpe, "FPE")
; (sighup, "HUP")
; (sigill, "ILL")
; (sigint, "INT")
; (sigkill, "KILL")
; (sigpipe, "PIPE")
; (sigquit, "QUIT")
; (sigsegv, "SEGV")
; (sigterm, "TERM")
; (sigusr1, "USR1")
; (sigusr2, "USR2")
; (sigchld, "CHLD")
; (sigcont, "CONT")
; (sigstop, "STOP")
; (sigtstp, "TSTP")
; (sigttin, "TTIN")
; (sigttou, "TTOU")
; (sigvtalrm, "VTALRM")
; (sigprof, "PROF")
; (sigbus, "BUS")
; (sigpoll, "POLL")
; (sigsys, "SYS")
; (sigtrap, "TRAP")
; (sigurg, "URG")
; (sigxcpu, "XCPU")
; (sigxfsz, "XFSZ")
]
in
fun (n : int) ->
match List.assoc_opt n table with
| None ->
if n > 0 then
Printf.sprintf "%d" n
else
Printf.sprintf "caml:%d" n
| Some s -> s
| null | https://raw.githubusercontent.com/janestreet/universe/b6cb56fdae83f5d55f9c809f1c2a2b50ea213126/shexp/process-lib/src/signal.ml | ocaml | this function is a copy&paste from stdune | type t = int
let name =
let table =
let open Sys in
[ (sigabrt, "ABRT")
; (sigalrm, "ALRM")
; (sigfpe, "FPE")
; (sighup, "HUP")
; (sigill, "ILL")
; (sigint, "INT")
; (sigkill, "KILL")
; (sigpipe, "PIPE")
; (sigquit, "QUIT")
; (sigsegv, "SEGV")
; (sigterm, "TERM")
; (sigusr1, "USR1")
; (sigusr2, "USR2")
; (sigchld, "CHLD")
; (sigcont, "CONT")
; (sigstop, "STOP")
; (sigtstp, "TSTP")
; (sigttin, "TTIN")
; (sigttou, "TTOU")
; (sigvtalrm, "VTALRM")
; (sigprof, "PROF")
; (sigbus, "BUS")
; (sigpoll, "POLL")
; (sigsys, "SYS")
; (sigtrap, "TRAP")
; (sigurg, "URG")
; (sigxcpu, "XCPU")
; (sigxfsz, "XFSZ")
]
in
fun (n : int) ->
match List.assoc_opt n table with
| None ->
if n > 0 then
Printf.sprintf "%d" n
else
Printf.sprintf "caml:%d" n
| Some s -> s
|
7cbb567ec6b55610566dbf52db38b164fefb4326d4eb15b6c6b23d343e630019 | ucsd-progsys/dsolve | stable_sort2.ml | let show x = x
let rec split_aux lst left right =
match lst with
| [] -> (left, right)
| [x] -> (x :: left, right)
| x :: y :: ys -> split_aux ys (x :: left) (y :: right)
let split lst =
split_aux lst [] []
let rec len xs =
match xs with
| [] -> 0
| x::xs' -> 1 + len xs'
let rec chop k l l' =
if k = 0 then (l,l') else begin
match l with
| x::t -> chop (k-1) t (x::l')
| _ -> assert false
end
let rec rev k ys zs =
match ys with
| [] -> zs
| y::ys' -> rev y ys' (y::zs)
let reverse xs =
match xs with [] -> []
| x::xs' -> rev x (x::xs') []
let rec rev_append w l1 l2 =
match l1 with
| [] -> l2
| a :: l -> rev_append (show a) l (a :: l2)
let rec rev_append_rev w l1 l2 =
match l1 with
| [] -> l2
| a :: l -> rev_append_rev (show a) l (a :: l2)
let rev_merge l1 l2 =
let rec revm w l1 l2 accu =
match l1, l2 with
| [], l2 -> rev_append w l2 accu
| l1, [] -> rev_append w l1 accu
| h1::t1, h2::t2 ->
if h1 <= h2
then revm h1 t1 (h2::t2) (h1::accu)
else revm h2 (h1::t1) t2 (h2::accu) in
match l1, l2 with
| [],[] -> []
| [], h2::t2 -> revm h2 [] (h2::t2) []
| h1::t1,[] -> revm h1 (h1::t1) [] []
| h1::t1, h2::t2 ->
if h1 <= h2 then revm h1 (h1::t1) (h2::t2) []
else revm h2 (h1::t1) (h2::t2) []
let rev_merge_rev l1 l2 =
let rec revm w l1 l2 accu =
match l1, l2 with
| [], l2 -> rev_append_rev w l2 accu
| l1, [] -> rev_append_rev w l1 accu
| h1::t1, h2::t2 ->
if h1 > h2
then revm h1 t1 (h2::t2) (h1::accu)
else revm h2 (h1::t1) t2 (h2::accu) in
match l1, l2 with
| [],[] -> []
| [], h2::t2 -> revm h2 [] (h2::t2) []
| h1::t1,[] -> revm h1 (h1::t1) [] []
| h1::t1, h2::t2 ->
if h1 > h2 then revm h1 (h1::t1) (h2::t2) []
else revm h2 (h1::t1) (h2::t2) []
let rec ssort l =
match l with
| [] -> []
| x1::[] -> [x1]
| x1 :: x2 :: [] ->
if x1 <= x2 then [x1; x2] else [x2; x1]
| x1 :: x2 :: x3 :: [] ->
if x1 <= x2 then begin
if x2 <= x3 then [x1; x2; x3]
else if x1 <= x3 then [x1; x3; x2]
else [x3; x1; x2]
end else begin
if x1 <= x3 then [x2; x1; x3]
else if x2 <= x3 then [x2; x3; x1]
else [x3; x2; x1]
end
| x1 :: x2 :: x3 :: _ ->
let (l1,l2) = split l in
let s1 = ssort l1 in
let s2 = ssort l2 in
let s = rev_merge s1 s2 in
reverse s
let stable_sort l =
let rec sort n l =
match l with
| [] -> []
| x1 :: [] -> [x1]
| x1 :: x2 :: [] ->
if x1 <= x2 then [x1; x2] else [x2; x1]
| x1 :: x2 :: x3 :: [] ->
if x1 <= x2 then begin
if x2 <= x3 then [x1; x2; x3]
else if x1 <= x3 then [x1; x3; x2]
else [x3; x1; x2]
end else begin
if x1 <= x3 then [x2; x1; x3]
else if x2 <= x3 then [x2; x3; x1]
else [x3; x2; x1]
end
| l ->
let n1 = n asr 1 in
let n2 = n - n1 in
let (l1,l2) = chop n1 l [] in
let s1 = rev_sort n1 l1 in
let s2 = rev_sort n2 l2 in
rev_merge_rev s1 s2
and rev_sort n l =
match l with
| [] -> []
| x1 :: [] -> [x1]
| x1 :: x2 :: [] ->
if x1 > x2 then [x1; x2] else [x2; x1]
| x1 :: x2 :: x3 :: [] ->
if x1 > x2 then begin
if x2 > x3 then [x1; x2; x3]
else if x1 > x3 then [x1; x3; x2]
else [x3; x1; x2]
end else begin
if x1 > x3 then [x2; x1; x3]
else if x2 > x3 then [x2; x3; x1]
else [x3; x2; x1]
end
| l ->
let n1 = n asr 1 in
let n2 = n - n1 in
let (l1,l2) = chop n1 l [] in
let s1 = sort n1 l1 in
let s2 = sort n2 l2 in
rev_merge s1 s2
in
let n = len l in
if n < 2 then l else
sort n l
let rec sortcheck l =
match l with
| [] -> ()
| x :: [] -> ()
| x :: y :: ys ->
assert (x <= y); sortcheck (y :: ys)
let check xs =
let xs' = ssort xs in
let _ = sortcheck xs' in
let xs'' = stable_sort xs in
let _ = sortcheck xs'' in
()
| null | https://raw.githubusercontent.com/ucsd-progsys/dsolve/bfbbb8ed9bbf352d74561e9f9127ab07b7882c0c/postests/stable_sort2.ml | ocaml | let show x = x
let rec split_aux lst left right =
match lst with
| [] -> (left, right)
| [x] -> (x :: left, right)
| x :: y :: ys -> split_aux ys (x :: left) (y :: right)
let split lst =
split_aux lst [] []
let rec len xs =
match xs with
| [] -> 0
| x::xs' -> 1 + len xs'
let rec chop k l l' =
if k = 0 then (l,l') else begin
match l with
| x::t -> chop (k-1) t (x::l')
| _ -> assert false
end
let rec rev k ys zs =
match ys with
| [] -> zs
| y::ys' -> rev y ys' (y::zs)
let reverse xs =
match xs with [] -> []
| x::xs' -> rev x (x::xs') []
let rec rev_append w l1 l2 =
match l1 with
| [] -> l2
| a :: l -> rev_append (show a) l (a :: l2)
let rec rev_append_rev w l1 l2 =
match l1 with
| [] -> l2
| a :: l -> rev_append_rev (show a) l (a :: l2)
let rev_merge l1 l2 =
let rec revm w l1 l2 accu =
match l1, l2 with
| [], l2 -> rev_append w l2 accu
| l1, [] -> rev_append w l1 accu
| h1::t1, h2::t2 ->
if h1 <= h2
then revm h1 t1 (h2::t2) (h1::accu)
else revm h2 (h1::t1) t2 (h2::accu) in
match l1, l2 with
| [],[] -> []
| [], h2::t2 -> revm h2 [] (h2::t2) []
| h1::t1,[] -> revm h1 (h1::t1) [] []
| h1::t1, h2::t2 ->
if h1 <= h2 then revm h1 (h1::t1) (h2::t2) []
else revm h2 (h1::t1) (h2::t2) []
let rev_merge_rev l1 l2 =
let rec revm w l1 l2 accu =
match l1, l2 with
| [], l2 -> rev_append_rev w l2 accu
| l1, [] -> rev_append_rev w l1 accu
| h1::t1, h2::t2 ->
if h1 > h2
then revm h1 t1 (h2::t2) (h1::accu)
else revm h2 (h1::t1) t2 (h2::accu) in
match l1, l2 with
| [],[] -> []
| [], h2::t2 -> revm h2 [] (h2::t2) []
| h1::t1,[] -> revm h1 (h1::t1) [] []
| h1::t1, h2::t2 ->
if h1 > h2 then revm h1 (h1::t1) (h2::t2) []
else revm h2 (h1::t1) (h2::t2) []
let rec ssort l =
match l with
| [] -> []
| x1::[] -> [x1]
| x1 :: x2 :: [] ->
if x1 <= x2 then [x1; x2] else [x2; x1]
| x1 :: x2 :: x3 :: [] ->
if x1 <= x2 then begin
if x2 <= x3 then [x1; x2; x3]
else if x1 <= x3 then [x1; x3; x2]
else [x3; x1; x2]
end else begin
if x1 <= x3 then [x2; x1; x3]
else if x2 <= x3 then [x2; x3; x1]
else [x3; x2; x1]
end
| x1 :: x2 :: x3 :: _ ->
let (l1,l2) = split l in
let s1 = ssort l1 in
let s2 = ssort l2 in
let s = rev_merge s1 s2 in
reverse s
let stable_sort l =
let rec sort n l =
match l with
| [] -> []
| x1 :: [] -> [x1]
| x1 :: x2 :: [] ->
if x1 <= x2 then [x1; x2] else [x2; x1]
| x1 :: x2 :: x3 :: [] ->
if x1 <= x2 then begin
if x2 <= x3 then [x1; x2; x3]
else if x1 <= x3 then [x1; x3; x2]
else [x3; x1; x2]
end else begin
if x1 <= x3 then [x2; x1; x3]
else if x2 <= x3 then [x2; x3; x1]
else [x3; x2; x1]
end
| l ->
let n1 = n asr 1 in
let n2 = n - n1 in
let (l1,l2) = chop n1 l [] in
let s1 = rev_sort n1 l1 in
let s2 = rev_sort n2 l2 in
rev_merge_rev s1 s2
and rev_sort n l =
match l with
| [] -> []
| x1 :: [] -> [x1]
| x1 :: x2 :: [] ->
if x1 > x2 then [x1; x2] else [x2; x1]
| x1 :: x2 :: x3 :: [] ->
if x1 > x2 then begin
if x2 > x3 then [x1; x2; x3]
else if x1 > x3 then [x1; x3; x2]
else [x3; x1; x2]
end else begin
if x1 > x3 then [x2; x1; x3]
else if x2 > x3 then [x2; x3; x1]
else [x3; x2; x1]
end
| l ->
let n1 = n asr 1 in
let n2 = n - n1 in
let (l1,l2) = chop n1 l [] in
let s1 = sort n1 l1 in
let s2 = sort n2 l2 in
rev_merge s1 s2
in
let n = len l in
if n < 2 then l else
sort n l
let rec sortcheck l =
match l with
| [] -> ()
| x :: [] -> ()
| x :: y :: ys ->
assert (x <= y); sortcheck (y :: ys)
let check xs =
let xs' = ssort xs in
let _ = sortcheck xs' in
let xs'' = stable_sort xs in
let _ = sortcheck xs'' in
()
| |
a6b811e3904562e6e5dbd0a48185b1914ee552693e19ef1576a011028714f710 | Kalimehtar/gtk-cffi | adjustment.lisp | ;;;
;;; adjustment.lisp -- GtkAdjustment
;;;
Copyright ( C ) 2012 , < >
;;;
(in-package :gtk-cffi)
(defclass adjustment (g-object)
())
(defcfun gtk-adjustment-new :pointer
(value :double) (lower :double) (upper :double)
(step-increment :double) (page-increment :double) (page-size :double))
(defmethod gconstructor ((adjustment adjustment) &key value lower upper
step-increment page-increment page-size)
(initialize adjustment '(value lower upper
step-increment page-increment page-size))
(gtk-adjustment-new value lower upper
step-increment page-increment page-size))
(defslots adjustment
value :double
lower :double
page-increment :double
page-size :double
step-increment :double
upper :double)
(deffuns adjustment
(clamp-page :void (lower :double) (upper :double))
(changed :void)
(value-changed :void)
(:get minimum-increment :double))
(defcfun gtk-adjustment-configure :pointer
(adjustment pobject) (value :double) (lower :double) (upper :double)
(step-increment :double) (page-increment :double) (page-size :double))
(defmethod reinitialize-instance ((adjustment adjustment)
&key value lower upper
step-increment page-increment page-size)
(gtk-adjustment-configure adjustment value lower upper
step-increment page-increment page-size))
| null | https://raw.githubusercontent.com/Kalimehtar/gtk-cffi/fbd8a40a2bbda29f81b1a95ed2530debfe2afe9b/gtk/adjustment.lisp | lisp |
adjustment.lisp -- GtkAdjustment
| Copyright ( C ) 2012 , < >
(in-package :gtk-cffi)
(defclass adjustment (g-object)
())
(defcfun gtk-adjustment-new :pointer
(value :double) (lower :double) (upper :double)
(step-increment :double) (page-increment :double) (page-size :double))
(defmethod gconstructor ((adjustment adjustment) &key value lower upper
step-increment page-increment page-size)
(initialize adjustment '(value lower upper
step-increment page-increment page-size))
(gtk-adjustment-new value lower upper
step-increment page-increment page-size))
(defslots adjustment
value :double
lower :double
page-increment :double
page-size :double
step-increment :double
upper :double)
(deffuns adjustment
(clamp-page :void (lower :double) (upper :double))
(changed :void)
(value-changed :void)
(:get minimum-increment :double))
(defcfun gtk-adjustment-configure :pointer
(adjustment pobject) (value :double) (lower :double) (upper :double)
(step-increment :double) (page-increment :double) (page-size :double))
(defmethod reinitialize-instance ((adjustment adjustment)
&key value lower upper
step-increment page-increment page-size)
(gtk-adjustment-configure adjustment value lower upper
step-increment page-increment page-size))
|
5b417fced224b24a0bcdaf30fa4f0b7f7c0cae4d530895f7e85352cdb67bd2d2 | janestreet/sexp_grammar | test_disobedient_generator.ml | open! Base
open! Import
open Disobedient_generator.Private
(* silence unused constructor warnings *)
[@@@warning "-37"]
(* silence unused type warnings *)
[@@@warning "-34"]
module type S = sig
type t [@@deriving sexp_grammar]
end
(* This expect test is deterministic, but unstable. Small changes in percentages are
expected if quickcheck distributions or random seeds change. *)
let%expect_test "Yield of invalid sexps" =
let test (module M : S) =
let generator = create_unfiltered M.t_sexp_grammar in
let accepts =
Staged.unstage (Sexp_grammar.validate_sexp M.t_sexp_grammar) >> Result.is_ok
in
Base_quickcheck.Test.with_sample_exn generator ~f:(fun sequence ->
let valid = Sequence.count sequence ~f:accepts in
let num_values = Sequence.length sequence in
let wasted = Core.Percent.of_mult Float.(of_int valid /. of_int num_values) in
print_s [%message (wasted : Core.Percent.t)])
in
(* variants *)
test
(module struct
type t =
| T0
| T1 of [ `A of int list option ]
| T2 of
{ required : bool * float
; optional : string option [@sexp.option]
}
[@@deriving sexp_grammar]
end);
[%expect {| (wasted 19.67%) |}];
(* polymorphic variants *)
test
(module struct
type t =
[ `T0
| `T1 of [ `A of int list option ]
| `T2 of bool * int
]
[@@deriving sexp_grammar]
end);
[%expect {| (wasted 5.77%) |}];
(* records *)
test
(module struct
type t =
{ bool : bool
; float : float
; int_list : int list
}
[@@deriving sexp_grammar]
end);
[%expect {| (wasted 18.23%) |}];
(* very permissive record, as in some config files *)
test
(module struct
type t =
{ default : bool [@default true]
; option : int option [@sexp.option]
; list : bool list [@sexp.list]
}
[@@deriving sexp_grammar] [@@allow_extra_fields]
end);
[%expect {| (wasted 58.92%) |}];
ignore ()
;;
| null | https://raw.githubusercontent.com/janestreet/sexp_grammar/2906f35acd036522f6d9dc4d4229673f05f2ea56/validation/test/test_disobedient_generator.ml | ocaml | silence unused constructor warnings
silence unused type warnings
This expect test is deterministic, but unstable. Small changes in percentages are
expected if quickcheck distributions or random seeds change.
variants
polymorphic variants
records
very permissive record, as in some config files | open! Base
open! Import
open Disobedient_generator.Private
[@@@warning "-37"]
[@@@warning "-34"]
module type S = sig
type t [@@deriving sexp_grammar]
end
let%expect_test "Yield of invalid sexps" =
let test (module M : S) =
let generator = create_unfiltered M.t_sexp_grammar in
let accepts =
Staged.unstage (Sexp_grammar.validate_sexp M.t_sexp_grammar) >> Result.is_ok
in
Base_quickcheck.Test.with_sample_exn generator ~f:(fun sequence ->
let valid = Sequence.count sequence ~f:accepts in
let num_values = Sequence.length sequence in
let wasted = Core.Percent.of_mult Float.(of_int valid /. of_int num_values) in
print_s [%message (wasted : Core.Percent.t)])
in
test
(module struct
type t =
| T0
| T1 of [ `A of int list option ]
| T2 of
{ required : bool * float
; optional : string option [@sexp.option]
}
[@@deriving sexp_grammar]
end);
[%expect {| (wasted 19.67%) |}];
test
(module struct
type t =
[ `T0
| `T1 of [ `A of int list option ]
| `T2 of bool * int
]
[@@deriving sexp_grammar]
end);
[%expect {| (wasted 5.77%) |}];
test
(module struct
type t =
{ bool : bool
; float : float
; int_list : int list
}
[@@deriving sexp_grammar]
end);
[%expect {| (wasted 18.23%) |}];
test
(module struct
type t =
{ default : bool [@default true]
; option : int option [@sexp.option]
; list : bool list [@sexp.list]
}
[@@deriving sexp_grammar] [@@allow_extra_fields]
end);
[%expect {| (wasted 58.92%) |}];
ignore ()
;;
|
7ae4fdbd3adba393c634a81d77985e9edf6e55c63df4a434164b31a93b356d5e | fyquah/hardcaml_zprize | prove_modulo_adder_subtractor_pipe.mli | open Core
val test
: op:[ `Add | `Sub ]
-> bits:int
-> stages:int
-> num_inputs:int
-> unit Or_error.t
| null | https://raw.githubusercontent.com/fyquah/hardcaml_zprize/553b1be10ae9b977decbca850df6ee2d0595e7ff/libs/field_ops/test/prove_modulo_adder_subtractor_pipe.mli | ocaml | open Core
val test
: op:[ `Add | `Sub ]
-> bits:int
-> stages:int
-> num_inputs:int
-> unit Or_error.t
| |
c92eff63bd5013f0104ad985207b58da402d157ee6aab343f423c935902e8b4c | ddmcdonald/sparser | create-categories.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER COMMON-LISP) -*-
copyright ( c ) 2013 - 2019 -- all rights reserved
Copyright ( c ) 2007 - 2010 BBNT Solutions LLC . All Rights Reserved
;;;
;;; File: "create-categories"
Module : " grammar;rules : SDM&P :
Version : July 2019
Initiated 2/9/07 . Elaborated through 8/6 . Refactored the head form
elevator 2/4/08 . Added cases through 4/24 , then through 6/16 .
;; 8/14/08 Fell through the assumption that there would be a form category
to elevate : edge over a period in paragraph running under fire
settings . ( 2/10/10 ) Considered extending reify - segment - head - if - needed
;; for the case of literal words in rules, but it lead to coniptions with "."
0.2 4/17/10 Uppercase category names were returning nil . Changed
;; elevation of segment edge of verb cases to vg from VP.
( 1/23/12 ) cleaned up . Trying to find duplication . 4/1/13 found it .
( 4/14/14 ) Added case to generalize - segment - edge
1/17/2015 make the segment edge for " GTP - mediated " a verb+ed .
4/26/13 put guards in revise - form - of - nospace - edge - if - necessary to
;; accommodate fall-through from incomplete ns-operations.
(in-package :sparser)
;;;--------------------------
;;; Generalizing form labels
;;;--------------------------
(defun elevate-head-edge-form-if-needed (edge)
"Called from refify-segment-head-if-needed, which is called when
we want to just-cover-segment rather than attempt to analyze the
debris."
(let* ((form-category (edge-form edge))
(symbol (when form-category
;; edges formed by :literal-in-a-rule don't have
;; form categories.
(cat-symbol form-category))))
(when symbol
(case symbol
((category::np-head
category::number)) ;; "the one that"
(category::quantifier) ;; "[just no] pleasing"
((category::common-noun
category::common-noun/plural
category::proper-noun) ;; over a name-word
(setf (edge-form edge) category::np-head))
(category::modal) ;; "can can they ...
(category::verb) ;; "are are"
(category::vg) ;; staying here w/o better evidence
(category::verb+ed
(setf (edge-form edge) (category-named 'vg)))
(category::ends-in-s)
(category::ends-in-ing)
(category::ends-in-ed)
(otherwise
(unless *cfg-flag*
(break "New case of form category of edge over segment head: ~a"
(edge-form edge))))))))
(defun generalize-segment-edge-form-if-needed (edge)
"Called from most segment analyzing routines.
We have completed a minimal phrase with this edge. Its form may still
reflect a word-level category. We want to elevate that to its phrase-level
equivalent."
(declare (special *delay-generalization-of-verb-edge-form*))
(let* ((form-category (edge-form edge))
(symbol (when form-category (cat-symbol form-category))))
(when symbol
(case symbol
;; no change cases
((category::np
category::proper-name
category::proper-noun
category::pronoun
category::wh-pronoun
category::reflexive/pronoun
))
(category::adjp)
(category::subordinate-conjunction)
(category::conjunction)
(category::interjection)
(category::adjunct)
(category::comparative)
(category::comparative-adjp)
(category::superlative-adjp)
(category::superlative-adjective)
(category::possessive)
(category::quantifier)
(category::adjective)
(category::spatial-adjective)
(category::temporal-adjective)
(category::adverb)
(category::preposition)
(category::spatial-preposition)
(category::spatio-temporal-preposition)
(category::comparative)
(category::comparative-adjp)
(category::comparative-adjective)
(category::comparative-adverb)
(category::superlative)
(category::superlative-adjective)
(category::superlative-adverb)
(category::superlative-adjp)
(category::s)
(category::subj+verb)
(category::vp)
(category::vg)
(category::vg+ing)
(category::vg+ed)
(category::vp+ing)
(category::vp+ed)
(category::pp)
(category::vg+passive)
(category::subordinate-clause)
(category::to-comp) ;; happens in handling of "to be <adj>"
;; categories giving morphological properties
;; cases where we want to generalize
((category::n-bar
category::number
category::noun
category::common-noun
category::common-noun/plural
category::np-head
category::det
category::demonstrative) ;; "that"
(setf (edge-form edge) category::np))
((category::verb
category::verb+s
;;category::verb+ed
category::verb+present
category::verb+past
category::verb+passive
category::infinitive
category::modal)
(unless *delay-generalization-of-verb-edge-form*
(setf (edge-form edge) category::vg)))
((category::verb+ing)
(setf (edge-form edge) category::vg+ing))
((category::verb+ed)
(setf (edge-form edge) category::vg+ed))
(category::transitive-clause-without-object)
(category::post-ordinal)
(otherwise
(unless *cfg-flag*
(push-debug `(,edge))
(break "~a is a new case of form category of edge over segment: ~a~
~%in ~a"
(edge-form edge) edge)))))))
(defun elevate-form-given-subcat (new-edge edge pattern)
;; called from check-for-subcatorized-pps when the pattern has succeeded
;; and created the new edge. ///Ought to specifiy this explicitly,
;; but this move of elevating
assumes that the subcat terms go to the right
(edge-form edge)))
(case (cat-symbol head-form)
(category::s)
(category::vg
(setf (edge-form new-edge) category::vp))
(category::adjective
(setf (edge-form new-edge) category::adjp))
(category::np)
(otherwise
(push-debug `(,new-edge ,edge ,pattern))
(break "New case of head edge needing elevation: ~a" head-form)))))
(defun revise-form-of-nospace-edge-if-necessary (edge right-edge)
"Does what it's name suggests. Called from several edge-forming
no-space cases that don't have enough information to be sure
about the edge they're creating."
(unless edge
Got a null edge on " "
(return-from revise-form-of-nospace-edge-if-necessary nil))
(when (eq right-edge :find-it)
(setq right-edge (edge-right-daughter edge)))
(when (and right-edge (edge-p right-edge))
;; Happens when called from collect-ns-seqment-into-word
;; because the parse at that level returned a unary edge/
(let ((current-form (edge-form edge))
(form-of-last-edge (when right-edge (edge-form right-edge))))
(cond
((and form-of-last-edge
(verb-category? form-of-last-edge))
as in January sentnece 1 " GAP – mediated hydrolysis "
(setf (edge-form edge) form-of-last-edge))
((eq current-form category::np)
(setf (edge-form edge) category::n-bar))
((or (noun-category? current-form)
(eq current-form category::n-bar)))
(t ;; usually it's a verbal category
(setf (edge-form edge) category::n-bar)))
;; But we might want to overrule that if the left edge
;; of the pair carries more information
(when (and form-of-last-edge
(eq form-of-last-edge category::adjective))
;; and what others? any modifier-category?
(setf (edge-form edge) category::adjective)))))
;;;-----------------------
;;; operations over heads
;;;-----------------------
(defun reify-segment-head-if-needed ()
Runs for side - effects within the routines of sdm / analyze - segment
(let ((edge (edge-over-segment-head)))
(if edge
(then
(elevate-head-edge-form-if-needed edge)
(let ((referent (edge-referent edge)))
(unless (typep referent 'individual) ;; gets psi as well
(typecase referent
(referential-category
(set-edge-referent edge
(instantiate-reified-segment-category referent)))
(mixin-category) ;; "can"
(word) ;; "."
(otherwise
(break "New case: ~a~%~a" (type-of referent) referent))))))
(reify-segment-head-as-a-category))))
(defun reify-segment-head-as-a-category ()
(multiple-value-bind (word pos-before pos-after)
(head-word-of-segment)
(multiple-value-bind (category rule)
(find-or-define-kind (word-pname word))
(let ((edge (install-preterminal-edge rule word pos-before pos-after)))
(setf (edge-form edge) (category-named 'np-head))
(set-edge-referent edge
(instantiate-reified-segment-category category))
edge))))
(defun instantiate-reified-segment-category (ref-category)
;; Very simple with just the category to go on.
(find-or-make/individual ref-category nil))
| null | https://raw.githubusercontent.com/ddmcdonald/sparser/5a16c18417f725575e8c8c4a58fde433519e86cb/Sparser/code/s/grammar/rules/sdmp/create-categories.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER COMMON-LISP) -*-
File: "create-categories"
rules : SDM&P :
8/14/08 Fell through the assumption that there would be a form category
for the case of literal words in rules, but it lead to coniptions with "."
elevation of segment edge of verb cases to vg from VP.
accommodate fall-through from incomplete ns-operations.
--------------------------
Generalizing form labels
--------------------------
edges formed by :literal-in-a-rule don't have
form categories.
"the one that"
"[just no] pleasing"
over a name-word
"can can they ...
"are are"
staying here w/o better evidence
no change cases
happens in handling of "to be <adj>"
categories giving morphological properties
cases where we want to generalize
"that"
category::verb+ed
called from check-for-subcatorized-pps when the pattern has succeeded
and created the new edge. ///Ought to specifiy this explicitly,
but this move of elevating
Happens when called from collect-ns-seqment-into-word
because the parse at that level returned a unary edge/
usually it's a verbal category
But we might want to overrule that if the left edge
of the pair carries more information
and what others? any modifier-category?
-----------------------
operations over heads
-----------------------
gets psi as well
"can"
"."
Very simple with just the category to go on. | copyright ( c ) 2013 - 2019 -- all rights reserved
Copyright ( c ) 2007 - 2010 BBNT Solutions LLC . All Rights Reserved
Version : July 2019
Initiated 2/9/07 . Elaborated through 8/6 . Refactored the head form
elevator 2/4/08 . Added cases through 4/24 , then through 6/16 .
to elevate : edge over a period in paragraph running under fire
settings . ( 2/10/10 ) Considered extending reify - segment - head - if - needed
0.2 4/17/10 Uppercase category names were returning nil . Changed
( 1/23/12 ) cleaned up . Trying to find duplication . 4/1/13 found it .
( 4/14/14 ) Added case to generalize - segment - edge
1/17/2015 make the segment edge for " GTP - mediated " a verb+ed .
4/26/13 put guards in revise - form - of - nospace - edge - if - necessary to
(in-package :sparser)
(defun elevate-head-edge-form-if-needed (edge)
"Called from refify-segment-head-if-needed, which is called when
we want to just-cover-segment rather than attempt to analyze the
debris."
(let* ((form-category (edge-form edge))
(symbol (when form-category
(cat-symbol form-category))))
(when symbol
(case symbol
((category::np-head
((category::common-noun
category::common-noun/plural
(setf (edge-form edge) category::np-head))
(category::verb+ed
(setf (edge-form edge) (category-named 'vg)))
(category::ends-in-s)
(category::ends-in-ing)
(category::ends-in-ed)
(otherwise
(unless *cfg-flag*
(break "New case of form category of edge over segment head: ~a"
(edge-form edge))))))))
(defun generalize-segment-edge-form-if-needed (edge)
"Called from most segment analyzing routines.
We have completed a minimal phrase with this edge. Its form may still
reflect a word-level category. We want to elevate that to its phrase-level
equivalent."
(declare (special *delay-generalization-of-verb-edge-form*))
(let* ((form-category (edge-form edge))
(symbol (when form-category (cat-symbol form-category))))
(when symbol
(case symbol
((category::np
category::proper-name
category::proper-noun
category::pronoun
category::wh-pronoun
category::reflexive/pronoun
))
(category::adjp)
(category::subordinate-conjunction)
(category::conjunction)
(category::interjection)
(category::adjunct)
(category::comparative)
(category::comparative-adjp)
(category::superlative-adjp)
(category::superlative-adjective)
(category::possessive)
(category::quantifier)
(category::adjective)
(category::spatial-adjective)
(category::temporal-adjective)
(category::adverb)
(category::preposition)
(category::spatial-preposition)
(category::spatio-temporal-preposition)
(category::comparative)
(category::comparative-adjp)
(category::comparative-adjective)
(category::comparative-adverb)
(category::superlative)
(category::superlative-adjective)
(category::superlative-adverb)
(category::superlative-adjp)
(category::s)
(category::subj+verb)
(category::vp)
(category::vg)
(category::vg+ing)
(category::vg+ed)
(category::vp+ing)
(category::vp+ed)
(category::pp)
(category::vg+passive)
(category::subordinate-clause)
((category::n-bar
category::number
category::noun
category::common-noun
category::common-noun/plural
category::np-head
category::det
(setf (edge-form edge) category::np))
((category::verb
category::verb+s
category::verb+present
category::verb+past
category::verb+passive
category::infinitive
category::modal)
(unless *delay-generalization-of-verb-edge-form*
(setf (edge-form edge) category::vg)))
((category::verb+ing)
(setf (edge-form edge) category::vg+ing))
((category::verb+ed)
(setf (edge-form edge) category::vg+ed))
(category::transitive-clause-without-object)
(category::post-ordinal)
(otherwise
(unless *cfg-flag*
(push-debug `(,edge))
(break "~a is a new case of form category of edge over segment: ~a~
~%in ~a"
(edge-form edge) edge)))))))
(defun elevate-form-given-subcat (new-edge edge pattern)
assumes that the subcat terms go to the right
(edge-form edge)))
(case (cat-symbol head-form)
(category::s)
(category::vg
(setf (edge-form new-edge) category::vp))
(category::adjective
(setf (edge-form new-edge) category::adjp))
(category::np)
(otherwise
(push-debug `(,new-edge ,edge ,pattern))
(break "New case of head edge needing elevation: ~a" head-form)))))
(defun revise-form-of-nospace-edge-if-necessary (edge right-edge)
"Does what it's name suggests. Called from several edge-forming
no-space cases that don't have enough information to be sure
about the edge they're creating."
(unless edge
Got a null edge on " "
(return-from revise-form-of-nospace-edge-if-necessary nil))
(when (eq right-edge :find-it)
(setq right-edge (edge-right-daughter edge)))
(when (and right-edge (edge-p right-edge))
(let ((current-form (edge-form edge))
(form-of-last-edge (when right-edge (edge-form right-edge))))
(cond
((and form-of-last-edge
(verb-category? form-of-last-edge))
as in January sentnece 1 " GAP – mediated hydrolysis "
(setf (edge-form edge) form-of-last-edge))
((eq current-form category::np)
(setf (edge-form edge) category::n-bar))
((or (noun-category? current-form)
(eq current-form category::n-bar)))
(setf (edge-form edge) category::n-bar)))
(when (and form-of-last-edge
(eq form-of-last-edge category::adjective))
(setf (edge-form edge) category::adjective)))))
(defun reify-segment-head-if-needed ()
Runs for side - effects within the routines of sdm / analyze - segment
(let ((edge (edge-over-segment-head)))
(if edge
(then
(elevate-head-edge-form-if-needed edge)
(let ((referent (edge-referent edge)))
(typecase referent
(referential-category
(set-edge-referent edge
(instantiate-reified-segment-category referent)))
(otherwise
(break "New case: ~a~%~a" (type-of referent) referent))))))
(reify-segment-head-as-a-category))))
(defun reify-segment-head-as-a-category ()
(multiple-value-bind (word pos-before pos-after)
(head-word-of-segment)
(multiple-value-bind (category rule)
(find-or-define-kind (word-pname word))
(let ((edge (install-preterminal-edge rule word pos-before pos-after)))
(setf (edge-form edge) (category-named 'np-head))
(set-edge-referent edge
(instantiate-reified-segment-category category))
edge))))
(defun instantiate-reified-segment-category (ref-category)
(find-or-make/individual ref-category nil))
|
25f922e2d283fac1e798390adc282d841cd616bb906b4afd804e50ac47a0517d | mflatt/macro-dsl-tutorial | point-with-method.rkt | #lang racket/base
(require "with-method.rkt"
racket/math)
(define point-class
(class
(hash 'get-x
(lambda (this) (get-field this 'x))
'get-y
(lambda (this) (get-field this 'y))
'set-x
(lambda (this v) (set-field! this 'x v))
'set-y
(lambda (this v) (set-field! this 'y v))
'rotate
(lambda (this degrees)
(define pt (make-rectangular
(get-field this 'x)
(get-field this 'y)))
(define new-pt (make-polar
(magnitude pt)
(+ (angle pt) (* pi (/ degrees 180)))))
(set-field! this 'x (real-part new-pt))
(set-field! this 'y (imag-part new-pt))))
(hash 'x 0
'y 1)))
(define a-pt (make-object point-class 0 5))
(send a-pt set-x 10)
(send a-pt rotate 90)
(send a-pt get-x)
(send a-pt get-y)
(define N 100000)
(time
(for ([i (in-range N)])
(send a-pt rotate 1)))
(time
(with-method ([rot (a-pt rotate)])
(for ([i (in-range N)])
(rot 1))))
| null | https://raw.githubusercontent.com/mflatt/macro-dsl-tutorial/76e979d24c2e05bd1457e3a859645cddfee0cbd1/objects/point-with-method.rkt | racket | #lang racket/base
(require "with-method.rkt"
racket/math)
(define point-class
(class
(hash 'get-x
(lambda (this) (get-field this 'x))
'get-y
(lambda (this) (get-field this 'y))
'set-x
(lambda (this v) (set-field! this 'x v))
'set-y
(lambda (this v) (set-field! this 'y v))
'rotate
(lambda (this degrees)
(define pt (make-rectangular
(get-field this 'x)
(get-field this 'y)))
(define new-pt (make-polar
(magnitude pt)
(+ (angle pt) (* pi (/ degrees 180)))))
(set-field! this 'x (real-part new-pt))
(set-field! this 'y (imag-part new-pt))))
(hash 'x 0
'y 1)))
(define a-pt (make-object point-class 0 5))
(send a-pt set-x 10)
(send a-pt rotate 90)
(send a-pt get-x)
(send a-pt get-y)
(define N 100000)
(time
(for ([i (in-range N)])
(send a-pt rotate 1)))
(time
(with-method ([rot (a-pt rotate)])
(for ([i (in-range N)])
(rot 1))))
| |
20a05dbc79b2c31e806e46603cabf2618541967d2e36363b49f4cad361d3c5fa | facebook/flow | ephemeralConnection.ml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
This module wraps the socket connection between the Flow server monitor and one of the short-
* lived clients .
* lived clients. *)
include FlowServerMonitorConnection.Make (struct
type in_message = ServerProt.Request.command_with_context
type out_message = MonitorProt.monitor_to_client_message
end)
| null | https://raw.githubusercontent.com/facebook/flow/741104e69c43057ebd32804dd6bcc1b5e97548ea/src/monitor/connections/ephemeralConnection.ml | ocaml |
* Copyright ( c ) Meta Platforms , Inc. and affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Meta Platforms, Inc. and affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
This module wraps the socket connection between the Flow server monitor and one of the short-
* lived clients .
* lived clients. *)
include FlowServerMonitorConnection.Make (struct
type in_message = ServerProt.Request.command_with_context
type out_message = MonitorProt.monitor_to_client_message
end)
| |
c9a9bff3d2319bea69503cc93d7de3ea4c6871058cba93db2b45159c3d87b80c | MLstate/opalang | baseArg.mli |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
type simple_completion =
| Nothing (** no completion possible *)
| File of string (** completion matching the bash pattern *)
| Dir (** any directory *)
| Oneof of string list (** a finite set of possibilities *)
type completion = {params : simple_completion list; stop : bool}
(**
The type of bash completions
The list represents the completion of consecutive arguments
The boolean is true when completion should stop here (after '--' for instance)
*)
type spec =
| Unit of (unit -> unit)
| Bool of (bool -> unit)
| Set of bool ref
| Clear of bool ref
| String of (string -> unit)
| Set_string of string ref
| Int of (int -> unit)
| Set_int of int ref
| Float of (float -> unit)
| Set_float of float ref
| Tuple of spec list
| Symbol of string list * (string -> unit)
| Rest of (string -> unit)
| Complete of spec * completion
* to be used when the spec does not allow automatic completion , but you can specify one
for example , with [ ( " --impl " , Arg . String _ , " " ) ] , it ca n't be guessed that
it should be completed with a file , so you should say
[ ( " --impl " , Arg . Complete ( Arg . String _ , { params=[File " * " ] ; stop = false } ) , " " ) ] instead
for example, with [("--impl", Arg.String _, "")], it can't be guessed that
it should be completed with a file, so you should say
[("--impl", Arg.Complete (Arg.String _, {params=[File "*"];stop=false}), "")] instead
*)
type key = string
type doc = string
type usage_msg = string
type anon_fun = string -> unit
exception Help of string
exception Bad of string
val parse : (key * spec * doc) list -> anon_fun -> usage_msg -> unit
val parse_argv : ?current:int ref -> string array ->
(key * spec * doc) list -> anon_fun -> usage_msg -> unit
val usage : (key * spec * doc) list -> usage_msg -> unit
val write_simple_manpage :
cmdname:string ->
section:int ->
?centerfooter:string ->
?leftfooter:string ->
?centerheader:string ->
?summary:string ->
?synopsis:string ->
?description:string ->
?options:(string * spec * string) list ->
?other:(string * string) list -> out_channel -> unit
val align : (key * spec * doc) list -> (key * spec * doc) list
(** beware, if you wish to call [add_bash_completion], you should do it before calling [align] *)
val current : int ref
val sort : (key * spec * doc) list -> (key * spec * doc) list
* sort the options by alphabetical order on the key .
if a key appear more than once in the spec list ,
only the first occurrence is keeped , while any other
occurrence is simply removed ( no error )
if a key appear more than once in the spec list,
only the first occurrence is keeped, while any other
occurrence is simply removed (no error) *)
val spec_fun_of_assoc : ('a -> unit) -> (string * 'a) list -> spec
val spec_of_assoc : 'a ref -> (string * 'a) list -> spec
val spec_opt_of_assoc : 'a option ref -> (string * 'a) list -> spec
val spec_of_opt_assoc : 'a ref -> 'a -> (string * 'a) list -> spec
val add_bash_completion :
?name:string ->
?names:string list ->
?default:simple_completion ->
(string * spec * string) list ->
(string * spec * string) list
*
adds a --bash - completion option to the command line that generates a
" bash_completion " for the given command line parser
Should be the last option added
@param name The name of the executable that completion will work on ( eg )
@param names Same as [ name ] , but several names can be given ( eg [ \[qmlflat;qmlflat.native\ ] ] )
@param default The completion for anonymous argument
@param arg The arguments for which completion will happen
@return The same arguments with a new option --bash - completion
adds a --bash-completion option to the command line that generates a
"bash_completion" for the given command line parser
Should be the last option added
@param name The name of the executable that completion will work on (eg qmlflat)
@param names Same as [name], but several names can be given (eg [\[qmlflat;qmlflat.native\]])
@param default The completion for anonymous argument
@param arg The arguments for which completion will happen
@return The same arguments with a new option --bash-completion
*)
*
Often , options in command line can be given grouped .
Like : [ -I " foo , bar , foobar " ]
This function split a given argument into words . separators
are : [ ' ; ' ; ' , ' ; ' ' ]
Often, options in command line can be given grouped.
Like : [-I "foo,bar,foobar"]
This function split a given argument into words. Char separators
are : [';' ; ',' ; ' ']
*)
val split : string -> string list
| null | https://raw.githubusercontent.com/MLstate/opalang/424b369160ce693406cece6ac033d75d85f5df4f/ocamllib/libbase/baseArg.mli | ocaml | * no completion possible
* completion matching the bash pattern
* any directory
* a finite set of possibilities
*
The type of bash completions
The list represents the completion of consecutive arguments
The boolean is true when completion should stop here (after '--' for instance)
* beware, if you wish to call [add_bash_completion], you should do it before calling [align] |
Copyright © 2011 MLstate
This file is part of .
is free software : you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License , version 3 , as published by
the Free Software Foundation .
is distributed in the hope that it will be useful , but WITHOUT ANY
WARRANTY ; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE . See the GNU Affero General Public License for
more details .
You should have received a copy of the GNU Affero General Public License
along with . If not , see < / > .
Copyright © 2011 MLstate
This file is part of Opa.
Opa is free software: you can redistribute it and/or modify it under the
terms of the GNU Affero General Public License, version 3, as published by
the Free Software Foundation.
Opa is distributed in the hope that it will be useful, but WITHOUT ANY
WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
FOR A PARTICULAR PURPOSE. See the GNU Affero General Public License for
more details.
You should have received a copy of the GNU Affero General Public License
along with Opa. If not, see </>.
*)
type simple_completion =
type completion = {params : simple_completion list; stop : bool}
type spec =
| Unit of (unit -> unit)
| Bool of (bool -> unit)
| Set of bool ref
| Clear of bool ref
| String of (string -> unit)
| Set_string of string ref
| Int of (int -> unit)
| Set_int of int ref
| Float of (float -> unit)
| Set_float of float ref
| Tuple of spec list
| Symbol of string list * (string -> unit)
| Rest of (string -> unit)
| Complete of spec * completion
* to be used when the spec does not allow automatic completion , but you can specify one
for example , with [ ( " --impl " , Arg . String _ , " " ) ] , it ca n't be guessed that
it should be completed with a file , so you should say
[ ( " --impl " , Arg . Complete ( Arg . String _ , { params=[File " * " ] ; stop = false } ) , " " ) ] instead
for example, with [("--impl", Arg.String _, "")], it can't be guessed that
it should be completed with a file, so you should say
[("--impl", Arg.Complete (Arg.String _, {params=[File "*"];stop=false}), "")] instead
*)
type key = string
type doc = string
type usage_msg = string
type anon_fun = string -> unit
exception Help of string
exception Bad of string
val parse : (key * spec * doc) list -> anon_fun -> usage_msg -> unit
val parse_argv : ?current:int ref -> string array ->
(key * spec * doc) list -> anon_fun -> usage_msg -> unit
val usage : (key * spec * doc) list -> usage_msg -> unit
val write_simple_manpage :
cmdname:string ->
section:int ->
?centerfooter:string ->
?leftfooter:string ->
?centerheader:string ->
?summary:string ->
?synopsis:string ->
?description:string ->
?options:(string * spec * string) list ->
?other:(string * string) list -> out_channel -> unit
val align : (key * spec * doc) list -> (key * spec * doc) list
val current : int ref
val sort : (key * spec * doc) list -> (key * spec * doc) list
* sort the options by alphabetical order on the key .
if a key appear more than once in the spec list ,
only the first occurrence is keeped , while any other
occurrence is simply removed ( no error )
if a key appear more than once in the spec list,
only the first occurrence is keeped, while any other
occurrence is simply removed (no error) *)
val spec_fun_of_assoc : ('a -> unit) -> (string * 'a) list -> spec
val spec_of_assoc : 'a ref -> (string * 'a) list -> spec
val spec_opt_of_assoc : 'a option ref -> (string * 'a) list -> spec
val spec_of_opt_assoc : 'a ref -> 'a -> (string * 'a) list -> spec
val add_bash_completion :
?name:string ->
?names:string list ->
?default:simple_completion ->
(string * spec * string) list ->
(string * spec * string) list
*
adds a --bash - completion option to the command line that generates a
" bash_completion " for the given command line parser
Should be the last option added
@param name The name of the executable that completion will work on ( eg )
@param names Same as [ name ] , but several names can be given ( eg [ \[qmlflat;qmlflat.native\ ] ] )
@param default The completion for anonymous argument
@param arg The arguments for which completion will happen
@return The same arguments with a new option --bash - completion
adds a --bash-completion option to the command line that generates a
"bash_completion" for the given command line parser
Should be the last option added
@param name The name of the executable that completion will work on (eg qmlflat)
@param names Same as [name], but several names can be given (eg [\[qmlflat;qmlflat.native\]])
@param default The completion for anonymous argument
@param arg The arguments for which completion will happen
@return The same arguments with a new option --bash-completion
*)
*
Often , options in command line can be given grouped .
Like : [ -I " foo , bar , foobar " ]
This function split a given argument into words . separators
are : [ ' ; ' ; ' , ' ; ' ' ]
Often, options in command line can be given grouped.
Like : [-I "foo,bar,foobar"]
This function split a given argument into words. Char separators
are : [';' ; ',' ; ' ']
*)
val split : string -> string list
|
6b738f5b09a54ff22f84a601fc73d0dc9d507bbb1b2bd5bba6e44dc98266e30e | MagnusS/okra | test_reports.ml |
* Copyright ( c ) 2021 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2021 Magnus Skjegstad <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let aggregate f =
let ic = open_in f in
let omd = Omd.of_channel ic in
let p = Okra.Report.of_markdown omd in
close_in ic;
p
let contains_kr_cnt p kr_id =
let found = ref 0 in
Okra.Report.iter
(fun kr -> if kr.id = kr_id then found := !found + 1 else ())
p;
!found
let contains_kr p kr_id = contains_kr_cnt p kr_id > 0
let test_newkr_replaced1 () =
let res = aggregate "./reports/newkr_replaced1.acc" in
Alcotest.(check bool) "new KR replaced" true (not (contains_kr res New_KR));
Alcotest.(check int) "KR123 exists once" 1 (contains_kr_cnt res (ID "KR123"));
Alcotest.(check int) "KR124 exists once" 1 (contains_kr_cnt res (ID "KR124"));
Alcotest.(check int) "KR125 exists once" 1 (contains_kr_cnt res (ID "KR125"))
let test_newkr_exists1 () =
let res = aggregate "./reports/newkr_exists1.acc" in
Alcotest.(check bool) "new KR exists" true (contains_kr res New_KR);
let res = Okra.Aggregate.by_engineer res in
Alcotest.(check (float 0.0)) "eng1 time" 1.0 (Hashtbl.find res "eng1")
let test_kr_agg1 () =
let res = aggregate "./reports/kr_agg1.acc" in
Alcotest.(check bool) "KR123 exists" true (contains_kr res (ID "KR123"));
Alcotest.(check int)
"KR123 aggregated into one item" 1
(contains_kr_cnt res (ID "KR123"));
let res = Okra.Aggregate.by_engineer res in
(* also check that time adds up *)
Alcotest.(check (float 0.0)) "eng1 time" 4.0 (Hashtbl.find res "eng1")
let tests =
[
("Test_kr_aggregation", `Quick, test_kr_agg1);
("Test_newkr_exists", `Quick, test_newkr_exists1);
("Test_newkr_replaced", `Quick, test_newkr_replaced1);
]
| null | https://raw.githubusercontent.com/MagnusS/okra/c4a922f3c9b86876acaaeca21a4770fc5224dbb9/test/test_reports.ml | ocaml | also check that time adds up |
* Copyright ( c ) 2021 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2021 Magnus Skjegstad <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
let aggregate f =
let ic = open_in f in
let omd = Omd.of_channel ic in
let p = Okra.Report.of_markdown omd in
close_in ic;
p
let contains_kr_cnt p kr_id =
let found = ref 0 in
Okra.Report.iter
(fun kr -> if kr.id = kr_id then found := !found + 1 else ())
p;
!found
let contains_kr p kr_id = contains_kr_cnt p kr_id > 0
let test_newkr_replaced1 () =
let res = aggregate "./reports/newkr_replaced1.acc" in
Alcotest.(check bool) "new KR replaced" true (not (contains_kr res New_KR));
Alcotest.(check int) "KR123 exists once" 1 (contains_kr_cnt res (ID "KR123"));
Alcotest.(check int) "KR124 exists once" 1 (contains_kr_cnt res (ID "KR124"));
Alcotest.(check int) "KR125 exists once" 1 (contains_kr_cnt res (ID "KR125"))
let test_newkr_exists1 () =
let res = aggregate "./reports/newkr_exists1.acc" in
Alcotest.(check bool) "new KR exists" true (contains_kr res New_KR);
let res = Okra.Aggregate.by_engineer res in
Alcotest.(check (float 0.0)) "eng1 time" 1.0 (Hashtbl.find res "eng1")
let test_kr_agg1 () =
let res = aggregate "./reports/kr_agg1.acc" in
Alcotest.(check bool) "KR123 exists" true (contains_kr res (ID "KR123"));
Alcotest.(check int)
"KR123 aggregated into one item" 1
(contains_kr_cnt res (ID "KR123"));
let res = Okra.Aggregate.by_engineer res in
Alcotest.(check (float 0.0)) "eng1 time" 4.0 (Hashtbl.find res "eng1")
let tests =
[
("Test_kr_aggregation", `Quick, test_kr_agg1);
("Test_newkr_exists", `Quick, test_newkr_exists1);
("Test_newkr_replaced", `Quick, test_newkr_replaced1);
]
|
66f25f6fecf845dd07ea9d78a700d346cddcb692c6d4d66a77d822d07000f334 | ruricolist/serapeum | generalized-arrays.lisp | (in-package :serapeum.tests)
(def-suite generalized-arrays :in serapeum)
(in-suite generalized-arrays)
(defun count* (n)
(range 1 (1+ n)))
(test reshape
(let* ((count (count* 48))
(shape '(2 3 3 2))
(array (reshape shape count)))
(is (equal (shape array) '(2 3 3 2)))
(is (= (last-elt (ravel array)) 36))))
(test ravel
(is (array= (ravel (tell '(2 3)))
#((0 0) (0 1) (0 2) (1 0) (1 1) (1 2)))))
(test each
(is
(array=
'((1) (1 2) (1 2 3) (1 2 3 4))
(each #'count* (count* 4)))))
(defun frequency (values array)
(flet ((eq? (x y) (eif (equal x y) 1 0)))
(each #'sum
(each-left
values
(op (each-right _ #'eq? _))
array))))
(test each-left
(is
(array=
'("abc" "abcd" "abcde")
(each-left '(3 4 5) #'reshape "abcde")))
(assert
(array= '(5 3 1)
(frequency "abc" "The cat sat on the baseball bat"))))
(defun divisible-by (a b)
(if (zerop (mod a b)) 1 0))
(test each-right
(is
(array= #*11010001
(each-right 8 #'divisible-by (count* 8))))
(is
(= 4 (sum (each-right 8 #'divisible-by (count* 8))))))
(defun number-of-divisors (n)
(sum (each-right n #'divisible-by (count* n))))
(defun prime? (n)
(= 2 (number-of-divisors n)))
(defun sieve (numbers)
(each #'prime? numbers))
(defun primes (n)
(sieve (range (1+ n))))
(test prime-example
(is (= 4 (number-of-divisors 8)))
(is (prime? 7))
(is (not (prime? 8)))
(is (array= #(nil t t nil t nil t nil) (sieve (count* 8)))))
(test sum
(is (= 0 (sum '())))
(is (= 1 (sum '(1))))
(let ((xs (range 1000)))
(is (= (reduce #'+ xs)
(sum xs))))
(let ((xs (range 1001)))
(is (= (reduce #'+ xs)
(sum xs)))))
(test prod
(is (= 1 (prod '())))
(is (= 1 (prod '(1))))
(let ((xs (range 1000)))
(is (= (reduce #'* xs)
(prod xs))))
(let ((xs (range 1001)))
(is (= (reduce #'* xs)
(prod xs)))))
| null | https://raw.githubusercontent.com/ruricolist/serapeum/d98b4863d7cdcb8a1ed8478cc44ab41bdad5635b/tests/generalized-arrays.lisp | lisp | (in-package :serapeum.tests)
(def-suite generalized-arrays :in serapeum)
(in-suite generalized-arrays)
(defun count* (n)
(range 1 (1+ n)))
(test reshape
(let* ((count (count* 48))
(shape '(2 3 3 2))
(array (reshape shape count)))
(is (equal (shape array) '(2 3 3 2)))
(is (= (last-elt (ravel array)) 36))))
(test ravel
(is (array= (ravel (tell '(2 3)))
#((0 0) (0 1) (0 2) (1 0) (1 1) (1 2)))))
(test each
(is
(array=
'((1) (1 2) (1 2 3) (1 2 3 4))
(each #'count* (count* 4)))))
(defun frequency (values array)
(flet ((eq? (x y) (eif (equal x y) 1 0)))
(each #'sum
(each-left
values
(op (each-right _ #'eq? _))
array))))
(test each-left
(is
(array=
'("abc" "abcd" "abcde")
(each-left '(3 4 5) #'reshape "abcde")))
(assert
(array= '(5 3 1)
(frequency "abc" "The cat sat on the baseball bat"))))
(defun divisible-by (a b)
(if (zerop (mod a b)) 1 0))
(test each-right
(is
(array= #*11010001
(each-right 8 #'divisible-by (count* 8))))
(is
(= 4 (sum (each-right 8 #'divisible-by (count* 8))))))
(defun number-of-divisors (n)
(sum (each-right n #'divisible-by (count* n))))
(defun prime? (n)
(= 2 (number-of-divisors n)))
(defun sieve (numbers)
(each #'prime? numbers))
(defun primes (n)
(sieve (range (1+ n))))
(test prime-example
(is (= 4 (number-of-divisors 8)))
(is (prime? 7))
(is (not (prime? 8)))
(is (array= #(nil t t nil t nil t nil) (sieve (count* 8)))))
(test sum
(is (= 0 (sum '())))
(is (= 1 (sum '(1))))
(let ((xs (range 1000)))
(is (= (reduce #'+ xs)
(sum xs))))
(let ((xs (range 1001)))
(is (= (reduce #'+ xs)
(sum xs)))))
(test prod
(is (= 1 (prod '())))
(is (= 1 (prod '(1))))
(let ((xs (range 1000)))
(is (= (reduce #'* xs)
(prod xs))))
(let ((xs (range 1001)))
(is (= (reduce #'* xs)
(prod xs)))))
| |
4c999950fe5bec4c66e339b60e292e72d73b9e784564a9f60e96e388104a0ec9 | DaMSL/K3 | Evaluation.hs | {-# LANGUAGE DoAndIfThenElse #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE PatternGuards #
{-# LANGUAGE Rank2Types #-}
# LANGUAGE TemplateHaskell #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
# LANGUAGE TypeFamilies #
module Language.K3.Interpreter.Evaluation where
import Control.Arrow hiding ( (+++) )
import Control.Concurrent.MVar
import Control.Monad.Reader
import Control.Monad.State
import Data.Fixed
import Data.List
import Data.Maybe
import Data.Word (Word8)
import Language.K3.Core.Annotation
import Language.K3.Core.Annotation.Analysis
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Literal
import Language.K3.Core.Type
import Language.K3.Core.Utils
import Language.K3.Analysis.Core
import Language.K3.Interpreter.Data.Types
import Language.K3.Interpreter.Data.Accessors
import Language.K3.Interpreter.Values
import Language.K3.Interpreter.Collection
import Language.K3.Interpreter.Utils
import Language.K3.Interpreter.Builtins
import Language.K3.Runtime.Engine
import Language.K3.Utils.Logger
import Language.K3.Utils.Pretty
$(loggingFunctions)
-- | Monadic message passing primitive for the interpreter.
sendE :: Address -> Identifier -> Value -> Interpretation ()
sendE addr n val = liftEngine $ send addr n val
{- Interpretation -}
-- | Default values for specific types
defaultValue :: K3 Type -> Interpretation Value
defaultValue (tag -> TBool) = return $ VBool False
defaultValue (tag -> TByte) = return $ VByte 0
defaultValue (tag -> TInt) = return $ VInt 0
defaultValue (tag -> TReal) = return $ VReal 0.0
defaultValue (tag -> TString) = return $ VString ""
defaultValue t@(tag -> TOption) = return $ VOption (Nothing, vQualOfType t)
defaultValue (tag -> TAddress) = return $ VAddress defaultAddress
defaultValue (tag &&& children -> (TIndirection, [x])) =
defaultValue x >>= (\y -> (\i tg -> (i, onQualifiedType x MemImmut MemMut, tg))
<$> liftIO (newMVar y) <*> memEntTag y) >>= return . VIndirection
defaultValue (tag &&& children -> (TTuple, ch)) =
mapM (\ct -> defaultValue ct >>= return . (, vQualOfType ct)) ch >>= return . VTuple
defaultValue (tag &&& children -> (TRecord ids, ch)) =
mapM (\ct -> defaultValue ct >>= return . (, vQualOfType ct)) ch >>= return . VRecord . membersFromList . zip ids
defaultValue (tag &&& annotations -> (TCollection, anns)) =
(getComposedAnnotationT anns) >>= maybe (emptyCollection annIds) emptyAnnotatedCollection
where annIds = namedTAnnotations anns
TODO :
TSource
TSink
TTrigger
TBuiltIn TypeBuiltIn
TForall [ TypeVarDecl ]
TDeclaredVar Identifier
TSource
TSink
TTrigger
TBuiltIn TypeBuiltIn
TForall [TypeVarDecl]
TDeclaredVar Identifier
-}
defaultValue t = throwE . RunTimeTypeError $ "Cannot create default value for " ++ show t
-- | Interpretation of Constants.
constant :: Constant -> [Annotation Expression] -> Interpretation Value
constant (CBool b) _ = return $ VBool b
constant (CByte w) _ = return $ VByte w
constant (CInt i) _ = return $ VInt i
constant (CReal r) _ = return $ VReal r
constant (CString s) _ = return $ VString s
constant (CNone _) anns = return $ VOption (Nothing, vQualOfAnnsE anns)
constant (CEmpty _) anns =
(getComposedAnnotationE anns) >>= maybe (emptyCollection annIds) emptyAnnotatedCollection
where annIds = namedEAnnotations anns
numericOp :: (Word8 -> Word8 -> Word8)
-> (Int -> Int -> Int)
-> (Double -> Double -> Double)
-> Interpretation Value -> Value -> Value -> Interpretation Value
numericOp byteOpF intOpF realOpF err a b =
case (a, b) of
(VByte x, VByte y) -> return . VByte $ byteOpF x y
(VByte x, VInt y) -> return . VInt $ intOpF (fromIntegral x) y
(VByte x, VReal y) -> return . VReal $ realOpF (fromIntegral x) y
(VInt x, VByte y) -> return . VInt $ intOpF x (fromIntegral y)
(VInt x, VInt y) -> return . VInt $ intOpF x y
(VInt x, VReal y) -> return . VReal $ realOpF (fromIntegral x) y
(VReal x, VByte y) -> return . VReal $ realOpF x (fromIntegral y)
(VReal x, VInt y) -> return . VReal $ realOpF x (fromIntegral y)
(VReal x, VReal y) -> return . VReal $ realOpF x y
_ -> err
-- | Common Numeric-Operation handling, with casing for int/real promotion.
numeric :: (forall a. Num a => a -> a -> a)
-> K3 Expression -> K3 Expression -> Interpretation Value
numeric op a b = do
a' <- expression a
b' <- expression b
numericOp op op op err a' b'
where err = throwE $ RunTimeTypeError "Arithmetic Type Mis-Match"
| Similar to numeric above , except disallow a zero value for the second argument .
numericExceptZero :: (Word8 -> Word8 -> Word8)
-> (Int -> Int -> Int)
-> (Double -> Double -> Double)
-> K3 Expression -> K3 Expression -> Interpretation Value
numericExceptZero byteOpF intOpF realOpF a b = do
a' <- expression a
b' <- expression b
void $ case b' of
VByte 0 -> throwE $ RunTimeInterpretationError "Zero denominator"
VInt 0 -> throwE $ RunTimeInterpretationError "Zero denominator"
VReal 0 -> throwE $ RunTimeInterpretationError "Zero denominator"
_ -> return ()
numericOp byteOpF intOpF realOpF err a' b'
where err = throwE $ RunTimeTypeError "Arithmetic Type Mis-Match"
-- | Common boolean operation handling.
logic :: (Bool -> Bool -> Bool) -> K3 Expression -> K3 Expression -> Interpretation Value
logic op a b = do
a' <- expression a
b' <- expression b
case (a', b') of
(VBool x, VBool y) -> return $ VBool $ op x y
_ -> throwE $ RunTimeTypeError "Invalid Boolean Operation"
-- | Common comparison operation handling.
comparison :: (Value -> Value -> Interpretation Value)
-> K3 Expression -> K3 Expression -> Interpretation Value
comparison op a b = do
a' <- expression a
b' <- expression b
op a' b'
-- | Common string operation handling.
textual :: (String -> String -> String)
-> K3 Expression -> K3 Expression -> Interpretation Value
textual op a b = do
a' <- expression a
b' <- expression b
case (a', b') of
(VString s1, VString s2) -> return . VString $ op s1 s2
_ -> throwE $ RunTimeTypeError "Invalid String Operation"
-- | Interpretation of unary operators.
unary :: Operator -> K3 Expression -> Interpretation Value
-- | Interpretation of unary negation of numbers.
unary ONeg a = expression a >>= \case
VInt i -> return $ VInt (negate i)
VReal r -> return $ VReal (negate r)
_ -> throwE $ RunTimeTypeError "Invalid Negation"
-- | Interpretation of unary negation of booleans.
unary ONot a = expression a >>= \case
VBool b -> return $ VBool (not b)
_ -> throwE $ RunTimeTypeError "Invalid Complement"
unary _ _ = throwE $ RunTimeTypeError "Invalid Unary Operator"
-- | Interpretation of binary operators.
binary :: Operator -> K3 Expression -> K3 Expression -> Interpretation Value
-- | Standard numeric operators.
binary OAdd = numeric (+)
binary OSub = numeric (-)
binary OMul = numeric (*)
| Division and modulo handled similarly , but accounting zero - division errors .
binary ODiv = numericExceptZero div div (/)
binary OMod = numericExceptZero mod mod mod'
-- | Logical operators
binary OAnd = logic (&&)
binary OOr = logic (||)
-- | Comparison operators
binary OEqu = comparison valueEq
binary ONeq = comparison valueNeq
binary OLth = comparison valueLt
binary OLeq = comparison valueLte
binary OGth = comparison valueGt
binary OGeq = comparison valueGte
-- | String operators
binary OConcat = textual (++)
-- | Function Application
binary OApp = \f x -> do
f' <- expression f
x' <- expression x >>= freshenValue
case f' of
VFunction (b, cl, _) -> withClosure cl $ b x'
_ -> throwE $ RunTimeTypeError $ "Invalid Function Application on:\n" ++ pretty f
where withClosure cl doApp = mergeE cl >> doApp >>= \r -> pruneE cl >> freshenValue r
-- | Message Passing
binary OSnd = \target x -> do
target' <- expression target
x' <- expression x
case target' of
VTuple [(VTrigger (n, _, _), _), (VAddress addr, _)] -> sendE addr n x' >> return vunit
_ -> throwE $ RunTimeTypeError "Invalid Trigger Target"
-- | Sequential expressions
binary OSeq = \e1 e2 -> expression e1 >> expression e2
binary _ = \_ _ -> throwE $ RunTimeInterpretationError "Invalid binary operation"
-- | Interpretation of Expressions
expression :: K3 Expression -> Interpretation Value
expression e_ = traceExpression $ do
result <- expr e_
void $ buildProxyPath e_
return result
where
traceExpression :: Interpretation a -> Interpretation a
traceExpression m = do
let suOpt = spanUid (annotations e_)
pushed <- maybe (return False) (\su -> pushTraceUID su >> return True) suOpt
result <- m
void $ if pushed then popTraceUID else return ()
case suOpt of
Nothing -> return ()
Just (_, uid) -> do
(watched, wvars) <- (,) <$> isWatchedExpression uid <*> getWatchedVariables uid
void $ if watched then logIStateMI else return ()
void $ mapM_ (prettyWatchedVar $ maximum $ map length wvars) wvars
return result
prettyWatchedVar :: Int -> Identifier -> Interpretation ()
prettyWatchedVar w i =
lookupE i >>= liftEngine . prettyIEnvEntry defaultPrintConfig
>>= liftIO . putStrLn . ((i ++ replicate (max (w - length i) 0) ' ' ++ " => ") ++)
-- TODO: dataspace bind aliases
buildProxyPath :: K3 Expression -> Interpretation ()
buildProxyPath e =
case e @~ isBindAliasAnnotation of
Just (EAnalysis (BindAlias i)) -> appendAlias (Named i)
Just (EAnalysis (BindFreshAlias i)) -> appendAlias (Temporary i)
Just (EAnalysis (BindAliasExtension i)) -> appendAliasExtension i
Nothing -> return ()
Just _ -> throwE $ RunTimeInterpretationError "Invalid bind alias annotation matching"
isBindAliasAnnotation :: Annotation Expression -> Bool
isBindAliasAnnotation (EAnalysis (BindAlias _)) = True
isBindAliasAnnotation (EAnalysis (BindFreshAlias _)) = True
isBindAliasAnnotation (EAnalysis (BindAliasExtension _)) = True
isBindAliasAnnotation _ = False
refreshEntry :: Identifier -> IEnvEntry Value -> Value -> Interpretation ()
refreshEntry n (IVal _) v = replaceE n (IVal v)
refreshEntry _ (MVal mv) v = liftIO (modifyMVar_ mv $ const $ return v)
lookupVQ :: Identifier -> Interpretation (Value, VQualifier)
lookupVQ i = lookupE i >>= valueQOfEntry
-- | Performs a write-back for a bind expression.
-- This retrieves the current binding values from the environment
-- and reconstructs a path value to replace the bind target.
refreshBindings :: Binder -> ProxyPath -> Value -> Interpretation ()
refreshBindings (BIndirection i) proxyPath bindV =
lookupVQ i >>= \case
(iV, MemMut) ->
replaceProxyPath proxyPath bindV iV (\oldV newPathV ->
case oldV of
VIndirection (mv, MemMut, _) ->
liftIO (modifyMVar_ mv $ const $ return newPathV) >> return oldV
_ -> throwE $ RunTimeTypeError "Invalid bind indirection target")
(_, _) -> return () -- Skip writeback to an immutable value.
refreshBindings (BTuple ts) proxyPath bindV =
mapM lookupVQ ts >>= \vqs ->
if any (/= MemImmut) $ map snd vqs
then replaceProxyPath proxyPath bindV (VTuple vqs) (\_ newPathV -> return newPathV)
else return () -- Skip writeback if all fields are immutable.
refreshBindings (BRecord ids) proxyPath bindV =
mapM lookupVQ (map snd ids) >>= \vqs ->
if any (/= MemImmut) $ map snd vqs
then replaceProxyPath proxyPath bindV
(VRecord $ membersFromList $ zip (map fst ids) vqs)
(\oldV newPathV -> mergeRecords oldV newPathV)
else return () -- Skip writebsack if all fields are immutable.
replaceProxyPath :: ProxyPath -> Value -> Value
-> (Value -> Value -> Interpretation Value)
-> Interpretation ()
replaceProxyPath proxyPath origV newComponentV refreshF =
case proxyPath of
(Named n):t -> do
entry <- lookupE n
oldV <- valueOfEntry entry
pathV <- reconstructPathValue t newComponentV oldV
refreshF oldV pathV >>= refreshEntry n entry
(Temporary _):t -> reconstructPathValue t newComponentV origV >>= refreshF origV >> return ()
_ -> throwE $ RunTimeInterpretationError "Invalid path in bind writeback"
reconstructPathValue :: ProxyPath -> Value -> Value -> Interpretation Value
reconstructPathValue [] newR@(VRecord _) oldR@(VRecord _) = mergeRecords oldR newR
reconstructPathValue [] v _ = return v
reconstructPathValue (Dereference:t) v (VIndirection (iv, q, tg)) =
liftIO (readMVar iv)
>>= reconstructPathValue t v
>>= \nv -> liftIO (modifyMVar_ iv $ const $ return nv) >> return (VIndirection (iv, q, tg))
reconstructPathValue (MatchOption:t) v (VOption (Just ov, q)) =
reconstructPathValue t v ov >>= \nv -> return $ VOption (Just nv, q)
reconstructPathValue ((TupleField i):t) v (VTuple vs) =
let (x,y) = splitAt i vs in
reconstructPathValue t v (fst $ last x) >>= \nv -> return $ VTuple ((init x) ++ [(nv, snd $ last x)] ++ y)
reconstructPathValue ((RecordField n):t) v (VRecord ivs) = do
fields <- flip mapMembers ivs (\fn (fv, fq) ->
if fn == n then reconstructPathValue t v fv >>= return . (, fq)
else return (fv, fq))
return $ VRecord fields
reconstructPathValue _ _ _ =
throwE $ RunTimeInterpretationError "Invalid path in bind writeback reconstruction"
| Merge two records , restricting to the domain of the first record , and
preferring values from the second argument for duplicates .
mergeRecords :: Value -> Value -> Interpretation Value
mergeRecords (VRecord r1) (VRecord r2) =
mapBindings (\n v -> maybe (return v) return $ lookupBinding n r2) r1 >>= return . VRecord
mergeRecords _ _ =
throwE $ RunTimeTypeError "Invalid bind record target"
expr :: K3 Expression -> Interpretation Value
-- | Interpretation of constant expressions.
expr (details -> (EConstant c, _, as)) = constant c as
-- | Interpretation of variable lookups.
expr (details -> (EVariable i, _, _)) =
lookupE i >>= \e -> valueOfEntry e >>= syncCollectionE i e
-- | Interpretation of option type construction expressions.
expr (tag &&& children -> (ESome, [x])) =
expression x >>= freshenValue >>= return . VOption . (, vQualOfExpr x) . Just
expr (details -> (ESome, _, _)) =
throwE $ RunTimeTypeError "Invalid Construction of Option"
-- | Interpretation of indirection type construction expressions.
expr (tag &&& children -> (EIndirect, [x])) = do
new_val <- expression x >>= freshenValue
(\a b -> VIndirection (a, vQualOfExpr x, b)) <$> liftIO (newMVar new_val) <*> memEntTag new_val
expr (details -> (EIndirect, _, _)) =
throwE $ RunTimeTypeError "Invalid Construction of Indirection"
-- | Interpretation of tuple construction expressions.
expr (tag &&& children -> (ETuple, cs)) =
mapM (\e -> expression e >>= freshenValue >>= return . (, vQualOfExpr e)) cs >>= return . VTuple
-- | Interpretation of record construction expressions.
expr (tag &&& children -> (ERecord is, cs)) =
mapM (\e -> expression e >>= freshenValue >>= return . (, vQualOfExpr e)) cs >>= return . VRecord . membersFromList . zip is
-- | Interpretation of function construction.
expr (details -> (ELambda i, [b], _)) =
mkFunction $ \v -> insertE i (IVal v) >> expression b >>= removeE i
where
mkFunction f = (\cl tg -> VFunction (f, cl, tg)) <$> closure <*> memEntTag f
TODO : currently , this definition of a closure captures
-- annotation member variables during annotation member initialization.
-- This invalidates the use of annotation member function contextualization
-- since the context is overridden by the closure whenever applying the
-- member function.
closure :: Interpretation (Closure Value)
closure = do
globals <- get >>= return . getGlobals
vars <- return $ filter (\n -> n /= i && n `notElem` globals) $ freeVariables b
vals <- mapM lookupE vars
envFromList $ zip vars vals
-- | Interpretation of unary/binary operators.
expr (details -> (EOperate otag, cs, _))
| otag `elem` [ONeg, ONot], [a] <- cs = unary otag a
| otherwise, [a, b] <- cs = binary otag a b
| otherwise = undefined
-- | Interpretation of Record Projection.
expr (details -> (EProject i, [r], _)) = expression r >>= syncCollection >>= \case
VRecord vm -> maybe (unknownField i) (return . fst) $ lookupMember i vm
VCollection (_, c) -> do
if null (realizationId c) then unannotatedCollection
else maybe (unknownCollectionMember i c) (return . fst)
$ lookupMember i $ collectionNS $ namespace c
v -> throwE . RunTimeTypeError $ "Invalid projection on value: " ++ show v
where unknownField i' = throwE . RunTimeTypeError $ "Unknown record field " ++ i'
unannotatedCollection = throwE . RunTimeTypeError $ "Invalid projection on an unannotated collection"
unknownCollectionMember i' c = throwE . RunTimeTypeError $ "Unknown collection member " ++ i' ++ " in collection " ++ show c
expr (details -> (EProject _, _, _)) = throwE $ RunTimeTypeError "Invalid Record Projection"
-- | Interpretation of Let-In Constructions.
expr (tag &&& children -> (ELetIn i, [e, b])) = do
entry <- expression e >>= freshenValue >>= entryOfValueE (e @~ isEQualified)
insertE i entry >> expression b >>= removeE i
expr (details -> (ELetIn _, _, _)) = throwE $ RunTimeTypeError "Invalid LetIn Construction"
-- | Interpretation of Assignment.
expr (details -> (EAssign i, [e], _)) = do
entry <- lookupE i
case entry of
MVal mv -> expression e >>= freshenValue >>= \v -> liftIO (modifyMVar_ mv $ const $ return v) >> return v
IVal _ -> throwE $ RunTimeInterpretationError
$ "Invalid assignment to an immutable variable: " ++ i
expr (details -> (EAssign _, _, _)) = throwE $ RunTimeTypeError "Invalid Assignment"
-- | Interpretation of If-Then-Else constructs.
expr (details -> (EIfThenElse, [p, t, e], _)) = expression p >>= \case
VBool True -> expression t
VBool False -> expression e
_ -> throwE $ RunTimeTypeError "Invalid Conditional Predicate"
expr (details -> (EAddress, [h, p], _)) = do
hv <- expression h
pv <- expression p
case (hv, pv) of
(VString host, VInt port) -> return $ VAddress $ Address (host, port)
_ -> throwE $ RunTimeTypeError "Invalid address"
expr (details -> (ESelf, _, _)) = lookupE annotationSelfId >>= valueOfEntry
-- | Interpretation of Case-Matches.
-- Case expressions behave like bind-as, i.e., w/ isolated bindings and writeback
expr (details -> (ECaseOf i, [e, s, n], _)) = do
void $ pushProxyFrame
targetV <- expression e
case targetV of
VOption (Just v, q) -> do
pp <- getProxyPath >>= \case
Just ((Named pn):t) -> return $ (Named pn):t
Just ((Temporary pn):t) -> return $ (Temporary pn):t
_ -> throwE $ RunTimeTypeError "Invalid proxy path in case-of expression"
void $ popProxyFrame
entry <- entryOfValueQ v q
insertE i entry
sV <- expression s
void $ lookupVQ i >>= \case
(iV, MemMut) -> replaceProxyPath pp targetV (VOption (Just iV, MemMut)) (\_ newPathV -> return newPathV)
_ -> return () -- Skip writeback for immutable values.
removeE i sV
VOption (Nothing, _) -> popProxyFrame >> expression n
_ -> throwE $ RunTimeTypeError "Invalid Argument to Case-Match"
expr (details -> (ECaseOf _, _, _)) = throwE $ RunTimeTypeError "Invalid Case-Match"
-- | Interpretation of Binding.
-- TODO: For now, all bindings are added in mutable fashion. This should be extracted from
-- the type inferred for the bind target expression.
expr (details -> (EBindAs b, [e, f], _)) = do
void $ pushProxyFrame
pc <- getPrintConfig <$> get
bv <- expression e
bp <- getProxyPath >>= \case
Just ((Named n):t) -> return $ (Named n):t
Just ((Temporary n):t) -> return $ (Temporary n):t
_ -> throwE $ RunTimeTypeError "Invalid bind path in bind-as expression"
void $ popProxyFrame
case (b, bv) of
(BIndirection i, VIndirection (r,q,_)) -> do
entry <- liftIO (readMVar r) >>= flip entryOfValueQ q
void $ insertE i entry
fV <- expression f
void $ refreshBindings b bp bv
removeE i fV
(BTuple ts, VTuple vs) -> do
let tupMems = membersFromList $ zip ts vs
bindAndRefresh bp bv tupMems
(BRecord ids, VRecord ivs) -> do
let (idls, ivls) = (map fst ids, boundNames ivs)
-- Testing the intersection with the bindings ensures every bound name
-- has a value, while also allowing us to bind a subset of the values.
if idls `intersect` ivls == idls
then do
let recordMems = membersFromList $ joinByKeys (,) idls ids ivs
bindAndRefresh bp bv recordMems
else throwE $ RunTimeTypeError "Invalid Bind-Pattern"
(binder, binderV) ->
throwE $ RunTimeTypeError $
"Bind Mis-Match: value is " ++ showPC (pc {convertToTuples=False}) binderV
++ " but bind is " ++ show binder
where
bindAndRefresh bp bv mems = do
bindings <- bindMembers mems
fV <- expression f
void $ refreshBindings b bp bv
unbindMembers bindings >> return fV
joinByKeys joinF keys l r =
catMaybes $ map (\k -> lookup k l >>= (\matchL -> lookupMember k r >>= return . joinF matchL)) keys
expr (details -> (EBindAs _,_,_)) = throwE $ RunTimeTypeError "Invalid Bind Construction"
expr _ = throwE $ RunTimeInterpretationError "Invalid Expression"
{- Literal interpretation -}
literal :: K3 Literal -> Interpretation Value
literal (tag -> LBool b) = return $ VBool b
literal (tag -> LByte b) = return $ VByte b
literal (tag -> LInt i) = return $ VInt i
literal (tag -> LReal r) = return $ VReal r
literal (tag -> LString s) = return $ VString s
literal l@(tag -> LNone _) = return $ VOption (Nothing, vQualOfLit l)
literal (tag &&& children -> (LSome, [x])) = literal x >>= return . VOption . (, vQualOfLit x) . Just
literal (details -> (LSome, _, _)) = throwE $ RunTimeTypeError "Invalid option literal"
literal (tag &&& children -> (LIndirect, [x])) = literal x >>= (\y -> (\i tg -> (i, vQualOfLit x, tg)) <$> liftIO (newMVar y) <*> memEntTag y) >>= return . VIndirection
literal (details -> (LIndirect, _, _)) = throwE $ RunTimeTypeError "Invalid indirection literal"
literal (tag &&& children -> (LTuple, ch)) = mapM (\l -> literal l >>= return . (, vQualOfLit l)) ch >>= return . VTuple
literal (details -> (LTuple, _, _)) = throwE $ RunTimeTypeError "Invalid tuple literal"
literal (tag &&& children -> (LRecord ids, ch)) = mapM (\l -> literal l >>= return . (, vQualOfLit l)) ch >>= return . VRecord . membersFromList . zip ids
literal (details -> (LRecord _, _, _)) = throwE $ RunTimeTypeError "Invalid record literal"
literal (details -> (LEmpty _, [], anns)) =
getComposedAnnotationL anns >>= maybe (emptyCollection annIds) emptyAnnotatedCollection
where annIds = namedLAnnotations anns
literal (details -> (LEmpty _, _, _)) = throwE $ RunTimeTypeError "Invalid empty literal"
literal (details -> (LCollection _, elems, anns)) = do
cElems <- mapM literal elems
realizationOpt <- getComposedAnnotationL anns
case realizationOpt of
Nothing -> initialCollection (namedLAnnotations anns) cElems
Just comboId -> initialAnnotatedCollection comboId cElems
literal (details -> (LAddress, [h,p], _)) = mapM literal [h,p] >>= \case
[VString a, VInt b] -> return . VAddress $ Address (a,b)
_ -> throwE $ RunTimeTypeError "Invalid address literal"
literal (details -> (LAddress, _, _)) = throwE $ RunTimeTypeError "Invalid address literal"
literal _ = throwE $ RunTimeTypeError "Invalid literal"
{- Declaration interpretation -}
replaceTrigger :: (HasSpan a, HasUID a) => Identifier -> [a] -> Value -> Interpretation ()
replaceTrigger n _ (VFunction (f,_,tg)) = replaceE n (IVal $ VTrigger (n, Just f, tg))
replaceTrigger n _ _ = throwE $ RunTimeTypeError ("Invalid body for trigger " ++ n)
global :: Identifier -> K3 Type -> Maybe (K3 Expression) -> Interpretation ()
global n (details -> (TSink, _, anns)) (Just e) = expression e >>= replaceTrigger n anns
global _ (details -> (TSink, _, _)) Nothing = throwE $ RunTimeInterpretationError "Invalid sink trigger"
-- ^ Interpret and add sink triggers to the program environment.
| Sources have already been translated into K3 code
global _ (tag -> TSource) _ = return ()
-- | Functions have already been initialized as part of the program environment.
global _ (isTFunction -> True) _ = return ()
-- | Add collection declaration, generating the collection type given by the annotation
-- combination on demand.
-- n is the name of the variable
global n t@(details -> (TCollection, _, _)) eOpt = elemE n >>= \case
True -> void . getComposedAnnotationT $ annotations t
False -> (getComposedAnnotationT $ annotations t) >>= initializeCollection . maybe "" id
where
initializeCollection comboId = case eOpt of
Nothing | not (null comboId) -> emptyAnnotatedCollection comboId >>= entryOfValueT (t @~ isTQualified) >>= insertE n
Just e | not (null comboId) -> expression e >>= verifyInitialCollection comboId
-- TODO: error on these cases. All collections must have at least the builtin Collection annotation.
Nothing -> emptyCollection (namedTAnnotations $ annotations t) >>= entryOfValueT (t @~ isTQualified) >>= insertE n
Just e -> expression e >>= entryOfValueT (t @~ isTQualified) >>= insertE n
verifyInitialCollection comboId = \case
v@(VCollection (_, Collection _ _ cId)) ->
if comboId == cId then entryOfValueT (t @~ isTQualified) v >>= insertE n
else collInitError comboId cId
_ -> collValError
collInitError c c' = throwE . RunTimeTypeError $ "Invalid annotations on collection initializer for " ++ n ++ ": " ++ c ++ " and " ++ c'
collValError = throwE . RunTimeTypeError $ "Invalid collection value " ++ n
-- | Instantiate all other globals in the interpretation environment.
global n t eOpt = elemE n >>= \case
True -> return ()
False -> maybe (defaultValue t) expression eOpt >>= entryOfValueT (t @~ isTQualified) >>= insertE n
TODO : qualify names ?
role :: Identifier -> [K3 Declaration] -> Interpretation ()
role _ subDecls = mapM_ declaration subDecls
declaration :: K3 Declaration -> Interpretation ()
declaration (tag &&& children -> (DGlobal n t eO, ch)) =
debugDecl n t $ global n t eO >> mapM_ declaration ch
declaration (details -> (DTrigger n t e, cs, anns)) =
debugDecl n t $ (expression e >>= replaceTrigger n anns) >> mapM_ declaration cs
declaration (tag &&& children -> (DRole r, ch)) = role r ch
declaration (tag -> DDataAnnotation n vdecls members) = annotation n vdecls members
declaration _ = undefined
{- Annotations -}
annotation :: Identifier -> [TypeVarDecl] -> [AnnMemDecl] -> Interpretation ()
annotation n _ memberDecls = tryLookupADef n >>= \case
Nothing -> addAnnotationDef
Just _ -> return ()
where
addAnnotationDef = do
(annMems, bindings) <- foldM initializeMembers
(emptyMembers, emptyBindings)
[liftedAttrFuns, liftedAttrs, attrFuns, attrs]
_ <- unbindMembers bindings
void $ modifyADefs $ (:) (n, annMems)
-- | Initialize members, while adding each member declaration to the environment to
-- support linear immediate initializer access.
initializeMembers mbAcc spec = foldM (memberWithBindings spec) mbAcc memberDecls
memberWithBindings (isLifted, matchF) mbAcc mem = do
ivOpt <- annotationMember n isLifted matchF mem
maybe (return mbAcc) (bindAndAppendMem mbAcc) ivOpt
bindAndAppendMem (memAcc, bindAcc) (memN,(v,q)) = do
entry <- case q of
MemImmut -> return $ IVal v
MemMut -> liftIO (newMVar v) >>= return . MVal
void $ insertE memN entry
return (insertMember memN (v,q) memAcc, insertBinding memN entry bindAcc)
(liftedAttrFuns, liftedAttrs) = ((True, isTFunction), (True, not . isTFunction))
(attrFuns, attrs) = ((False, isTFunction), (False, not . isTFunction))
annotationMember :: Identifier -> Bool -> (K3 Type -> Bool) -> AnnMemDecl
-> Interpretation (Maybe (Identifier, (Value, VQualifier)))
annotationMember annId matchLifted matchF annMem = case (matchLifted, annMem) of
(True, Lifted Provides n t (Just e) _) | matchF t -> initializeMember n t e
(False, Attribute Provides n t (Just e) _) | matchF t -> initializeMember n t e
(True, Lifted Provides n t Nothing _) | matchF t -> builtinLiftedAttribute annId n t >>= return . builtinQual t
(False, Attribute Provides n t Nothing _) | matchF t -> builtinAttribute annId n t >>= return . builtinQual t
_ -> return Nothing
where initializeMember n t e = expression e >>= \v -> return . Just $ (n, (v, memberQual t))
builtinQual t (Just (n,v)) = Just (n, (v, memberQual t))
builtinQual _ Nothing = Nothing
memberQual t = case t @~ isTQualified of
Just TMutable -> MemMut
_ -> MemImmut
| null | https://raw.githubusercontent.com/DaMSL/K3/51749157844e76ae79dba619116fc5ad9d685643/src/Language/K3/Interpreter/Evaluation.hs | haskell | # LANGUAGE DoAndIfThenElse #
# LANGUAGE Rank2Types #
| Monadic message passing primitive for the interpreter.
Interpretation
| Default values for specific types
| Interpretation of Constants.
| Common Numeric-Operation handling, with casing for int/real promotion.
| Common boolean operation handling.
| Common comparison operation handling.
| Common string operation handling.
| Interpretation of unary operators.
| Interpretation of unary negation of numbers.
| Interpretation of unary negation of booleans.
| Interpretation of binary operators.
| Standard numeric operators.
| Logical operators
| Comparison operators
| String operators
| Function Application
| Message Passing
| Sequential expressions
| Interpretation of Expressions
TODO: dataspace bind aliases
| Performs a write-back for a bind expression.
This retrieves the current binding values from the environment
and reconstructs a path value to replace the bind target.
Skip writeback to an immutable value.
Skip writeback if all fields are immutable.
Skip writebsack if all fields are immutable.
| Interpretation of constant expressions.
| Interpretation of variable lookups.
| Interpretation of option type construction expressions.
| Interpretation of indirection type construction expressions.
| Interpretation of tuple construction expressions.
| Interpretation of record construction expressions.
| Interpretation of function construction.
annotation member variables during annotation member initialization.
This invalidates the use of annotation member function contextualization
since the context is overridden by the closure whenever applying the
member function.
| Interpretation of unary/binary operators.
| Interpretation of Record Projection.
| Interpretation of Let-In Constructions.
| Interpretation of Assignment.
| Interpretation of If-Then-Else constructs.
| Interpretation of Case-Matches.
Case expressions behave like bind-as, i.e., w/ isolated bindings and writeback
Skip writeback for immutable values.
| Interpretation of Binding.
TODO: For now, all bindings are added in mutable fashion. This should be extracted from
the type inferred for the bind target expression.
Testing the intersection with the bindings ensures every bound name
has a value, while also allowing us to bind a subset of the values.
Literal interpretation
Declaration interpretation
^ Interpret and add sink triggers to the program environment.
| Functions have already been initialized as part of the program environment.
| Add collection declaration, generating the collection type given by the annotation
combination on demand.
n is the name of the variable
TODO: error on these cases. All collections must have at least the builtin Collection annotation.
| Instantiate all other globals in the interpretation environment.
Annotations
| Initialize members, while adding each member declaration to the environment to
support linear immediate initializer access. | # LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE PatternGuards #
# LANGUAGE TemplateHaskell #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
# LANGUAGE TypeFamilies #
module Language.K3.Interpreter.Evaluation where
import Control.Arrow hiding ( (+++) )
import Control.Concurrent.MVar
import Control.Monad.Reader
import Control.Monad.State
import Data.Fixed
import Data.List
import Data.Maybe
import Data.Word (Word8)
import Language.K3.Core.Annotation
import Language.K3.Core.Annotation.Analysis
import Language.K3.Core.Common
import Language.K3.Core.Declaration
import Language.K3.Core.Expression
import Language.K3.Core.Literal
import Language.K3.Core.Type
import Language.K3.Core.Utils
import Language.K3.Analysis.Core
import Language.K3.Interpreter.Data.Types
import Language.K3.Interpreter.Data.Accessors
import Language.K3.Interpreter.Values
import Language.K3.Interpreter.Collection
import Language.K3.Interpreter.Utils
import Language.K3.Interpreter.Builtins
import Language.K3.Runtime.Engine
import Language.K3.Utils.Logger
import Language.K3.Utils.Pretty
$(loggingFunctions)
sendE :: Address -> Identifier -> Value -> Interpretation ()
sendE addr n val = liftEngine $ send addr n val
defaultValue :: K3 Type -> Interpretation Value
defaultValue (tag -> TBool) = return $ VBool False
defaultValue (tag -> TByte) = return $ VByte 0
defaultValue (tag -> TInt) = return $ VInt 0
defaultValue (tag -> TReal) = return $ VReal 0.0
defaultValue (tag -> TString) = return $ VString ""
defaultValue t@(tag -> TOption) = return $ VOption (Nothing, vQualOfType t)
defaultValue (tag -> TAddress) = return $ VAddress defaultAddress
defaultValue (tag &&& children -> (TIndirection, [x])) =
defaultValue x >>= (\y -> (\i tg -> (i, onQualifiedType x MemImmut MemMut, tg))
<$> liftIO (newMVar y) <*> memEntTag y) >>= return . VIndirection
defaultValue (tag &&& children -> (TTuple, ch)) =
mapM (\ct -> defaultValue ct >>= return . (, vQualOfType ct)) ch >>= return . VTuple
defaultValue (tag &&& children -> (TRecord ids, ch)) =
mapM (\ct -> defaultValue ct >>= return . (, vQualOfType ct)) ch >>= return . VRecord . membersFromList . zip ids
defaultValue (tag &&& annotations -> (TCollection, anns)) =
(getComposedAnnotationT anns) >>= maybe (emptyCollection annIds) emptyAnnotatedCollection
where annIds = namedTAnnotations anns
TODO :
TSource
TSink
TTrigger
TBuiltIn TypeBuiltIn
TForall [ TypeVarDecl ]
TDeclaredVar Identifier
TSource
TSink
TTrigger
TBuiltIn TypeBuiltIn
TForall [TypeVarDecl]
TDeclaredVar Identifier
-}
defaultValue t = throwE . RunTimeTypeError $ "Cannot create default value for " ++ show t
constant :: Constant -> [Annotation Expression] -> Interpretation Value
constant (CBool b) _ = return $ VBool b
constant (CByte w) _ = return $ VByte w
constant (CInt i) _ = return $ VInt i
constant (CReal r) _ = return $ VReal r
constant (CString s) _ = return $ VString s
constant (CNone _) anns = return $ VOption (Nothing, vQualOfAnnsE anns)
constant (CEmpty _) anns =
(getComposedAnnotationE anns) >>= maybe (emptyCollection annIds) emptyAnnotatedCollection
where annIds = namedEAnnotations anns
numericOp :: (Word8 -> Word8 -> Word8)
-> (Int -> Int -> Int)
-> (Double -> Double -> Double)
-> Interpretation Value -> Value -> Value -> Interpretation Value
numericOp byteOpF intOpF realOpF err a b =
case (a, b) of
(VByte x, VByte y) -> return . VByte $ byteOpF x y
(VByte x, VInt y) -> return . VInt $ intOpF (fromIntegral x) y
(VByte x, VReal y) -> return . VReal $ realOpF (fromIntegral x) y
(VInt x, VByte y) -> return . VInt $ intOpF x (fromIntegral y)
(VInt x, VInt y) -> return . VInt $ intOpF x y
(VInt x, VReal y) -> return . VReal $ realOpF (fromIntegral x) y
(VReal x, VByte y) -> return . VReal $ realOpF x (fromIntegral y)
(VReal x, VInt y) -> return . VReal $ realOpF x (fromIntegral y)
(VReal x, VReal y) -> return . VReal $ realOpF x y
_ -> err
numeric :: (forall a. Num a => a -> a -> a)
-> K3 Expression -> K3 Expression -> Interpretation Value
numeric op a b = do
a' <- expression a
b' <- expression b
numericOp op op op err a' b'
where err = throwE $ RunTimeTypeError "Arithmetic Type Mis-Match"
| Similar to numeric above , except disallow a zero value for the second argument .
numericExceptZero :: (Word8 -> Word8 -> Word8)
-> (Int -> Int -> Int)
-> (Double -> Double -> Double)
-> K3 Expression -> K3 Expression -> Interpretation Value
numericExceptZero byteOpF intOpF realOpF a b = do
a' <- expression a
b' <- expression b
void $ case b' of
VByte 0 -> throwE $ RunTimeInterpretationError "Zero denominator"
VInt 0 -> throwE $ RunTimeInterpretationError "Zero denominator"
VReal 0 -> throwE $ RunTimeInterpretationError "Zero denominator"
_ -> return ()
numericOp byteOpF intOpF realOpF err a' b'
where err = throwE $ RunTimeTypeError "Arithmetic Type Mis-Match"
logic :: (Bool -> Bool -> Bool) -> K3 Expression -> K3 Expression -> Interpretation Value
logic op a b = do
a' <- expression a
b' <- expression b
case (a', b') of
(VBool x, VBool y) -> return $ VBool $ op x y
_ -> throwE $ RunTimeTypeError "Invalid Boolean Operation"
comparison :: (Value -> Value -> Interpretation Value)
-> K3 Expression -> K3 Expression -> Interpretation Value
comparison op a b = do
a' <- expression a
b' <- expression b
op a' b'
textual :: (String -> String -> String)
-> K3 Expression -> K3 Expression -> Interpretation Value
textual op a b = do
a' <- expression a
b' <- expression b
case (a', b') of
(VString s1, VString s2) -> return . VString $ op s1 s2
_ -> throwE $ RunTimeTypeError "Invalid String Operation"
unary :: Operator -> K3 Expression -> Interpretation Value
unary ONeg a = expression a >>= \case
VInt i -> return $ VInt (negate i)
VReal r -> return $ VReal (negate r)
_ -> throwE $ RunTimeTypeError "Invalid Negation"
unary ONot a = expression a >>= \case
VBool b -> return $ VBool (not b)
_ -> throwE $ RunTimeTypeError "Invalid Complement"
unary _ _ = throwE $ RunTimeTypeError "Invalid Unary Operator"
binary :: Operator -> K3 Expression -> K3 Expression -> Interpretation Value
binary OAdd = numeric (+)
binary OSub = numeric (-)
binary OMul = numeric (*)
| Division and modulo handled similarly , but accounting zero - division errors .
binary ODiv = numericExceptZero div div (/)
binary OMod = numericExceptZero mod mod mod'
binary OAnd = logic (&&)
binary OOr = logic (||)
binary OEqu = comparison valueEq
binary ONeq = comparison valueNeq
binary OLth = comparison valueLt
binary OLeq = comparison valueLte
binary OGth = comparison valueGt
binary OGeq = comparison valueGte
binary OConcat = textual (++)
binary OApp = \f x -> do
f' <- expression f
x' <- expression x >>= freshenValue
case f' of
VFunction (b, cl, _) -> withClosure cl $ b x'
_ -> throwE $ RunTimeTypeError $ "Invalid Function Application on:\n" ++ pretty f
where withClosure cl doApp = mergeE cl >> doApp >>= \r -> pruneE cl >> freshenValue r
binary OSnd = \target x -> do
target' <- expression target
x' <- expression x
case target' of
VTuple [(VTrigger (n, _, _), _), (VAddress addr, _)] -> sendE addr n x' >> return vunit
_ -> throwE $ RunTimeTypeError "Invalid Trigger Target"
binary OSeq = \e1 e2 -> expression e1 >> expression e2
binary _ = \_ _ -> throwE $ RunTimeInterpretationError "Invalid binary operation"
expression :: K3 Expression -> Interpretation Value
expression e_ = traceExpression $ do
result <- expr e_
void $ buildProxyPath e_
return result
where
traceExpression :: Interpretation a -> Interpretation a
traceExpression m = do
let suOpt = spanUid (annotations e_)
pushed <- maybe (return False) (\su -> pushTraceUID su >> return True) suOpt
result <- m
void $ if pushed then popTraceUID else return ()
case suOpt of
Nothing -> return ()
Just (_, uid) -> do
(watched, wvars) <- (,) <$> isWatchedExpression uid <*> getWatchedVariables uid
void $ if watched then logIStateMI else return ()
void $ mapM_ (prettyWatchedVar $ maximum $ map length wvars) wvars
return result
prettyWatchedVar :: Int -> Identifier -> Interpretation ()
prettyWatchedVar w i =
lookupE i >>= liftEngine . prettyIEnvEntry defaultPrintConfig
>>= liftIO . putStrLn . ((i ++ replicate (max (w - length i) 0) ' ' ++ " => ") ++)
buildProxyPath :: K3 Expression -> Interpretation ()
buildProxyPath e =
case e @~ isBindAliasAnnotation of
Just (EAnalysis (BindAlias i)) -> appendAlias (Named i)
Just (EAnalysis (BindFreshAlias i)) -> appendAlias (Temporary i)
Just (EAnalysis (BindAliasExtension i)) -> appendAliasExtension i
Nothing -> return ()
Just _ -> throwE $ RunTimeInterpretationError "Invalid bind alias annotation matching"
isBindAliasAnnotation :: Annotation Expression -> Bool
isBindAliasAnnotation (EAnalysis (BindAlias _)) = True
isBindAliasAnnotation (EAnalysis (BindFreshAlias _)) = True
isBindAliasAnnotation (EAnalysis (BindAliasExtension _)) = True
isBindAliasAnnotation _ = False
refreshEntry :: Identifier -> IEnvEntry Value -> Value -> Interpretation ()
refreshEntry n (IVal _) v = replaceE n (IVal v)
refreshEntry _ (MVal mv) v = liftIO (modifyMVar_ mv $ const $ return v)
lookupVQ :: Identifier -> Interpretation (Value, VQualifier)
lookupVQ i = lookupE i >>= valueQOfEntry
refreshBindings :: Binder -> ProxyPath -> Value -> Interpretation ()
refreshBindings (BIndirection i) proxyPath bindV =
lookupVQ i >>= \case
(iV, MemMut) ->
replaceProxyPath proxyPath bindV iV (\oldV newPathV ->
case oldV of
VIndirection (mv, MemMut, _) ->
liftIO (modifyMVar_ mv $ const $ return newPathV) >> return oldV
_ -> throwE $ RunTimeTypeError "Invalid bind indirection target")
refreshBindings (BTuple ts) proxyPath bindV =
mapM lookupVQ ts >>= \vqs ->
if any (/= MemImmut) $ map snd vqs
then replaceProxyPath proxyPath bindV (VTuple vqs) (\_ newPathV -> return newPathV)
refreshBindings (BRecord ids) proxyPath bindV =
mapM lookupVQ (map snd ids) >>= \vqs ->
if any (/= MemImmut) $ map snd vqs
then replaceProxyPath proxyPath bindV
(VRecord $ membersFromList $ zip (map fst ids) vqs)
(\oldV newPathV -> mergeRecords oldV newPathV)
replaceProxyPath :: ProxyPath -> Value -> Value
-> (Value -> Value -> Interpretation Value)
-> Interpretation ()
replaceProxyPath proxyPath origV newComponentV refreshF =
case proxyPath of
(Named n):t -> do
entry <- lookupE n
oldV <- valueOfEntry entry
pathV <- reconstructPathValue t newComponentV oldV
refreshF oldV pathV >>= refreshEntry n entry
(Temporary _):t -> reconstructPathValue t newComponentV origV >>= refreshF origV >> return ()
_ -> throwE $ RunTimeInterpretationError "Invalid path in bind writeback"
reconstructPathValue :: ProxyPath -> Value -> Value -> Interpretation Value
reconstructPathValue [] newR@(VRecord _) oldR@(VRecord _) = mergeRecords oldR newR
reconstructPathValue [] v _ = return v
reconstructPathValue (Dereference:t) v (VIndirection (iv, q, tg)) =
liftIO (readMVar iv)
>>= reconstructPathValue t v
>>= \nv -> liftIO (modifyMVar_ iv $ const $ return nv) >> return (VIndirection (iv, q, tg))
reconstructPathValue (MatchOption:t) v (VOption (Just ov, q)) =
reconstructPathValue t v ov >>= \nv -> return $ VOption (Just nv, q)
reconstructPathValue ((TupleField i):t) v (VTuple vs) =
let (x,y) = splitAt i vs in
reconstructPathValue t v (fst $ last x) >>= \nv -> return $ VTuple ((init x) ++ [(nv, snd $ last x)] ++ y)
reconstructPathValue ((RecordField n):t) v (VRecord ivs) = do
fields <- flip mapMembers ivs (\fn (fv, fq) ->
if fn == n then reconstructPathValue t v fv >>= return . (, fq)
else return (fv, fq))
return $ VRecord fields
reconstructPathValue _ _ _ =
throwE $ RunTimeInterpretationError "Invalid path in bind writeback reconstruction"
| Merge two records , restricting to the domain of the first record , and
preferring values from the second argument for duplicates .
mergeRecords :: Value -> Value -> Interpretation Value
mergeRecords (VRecord r1) (VRecord r2) =
mapBindings (\n v -> maybe (return v) return $ lookupBinding n r2) r1 >>= return . VRecord
mergeRecords _ _ =
throwE $ RunTimeTypeError "Invalid bind record target"
expr :: K3 Expression -> Interpretation Value
expr (details -> (EConstant c, _, as)) = constant c as
expr (details -> (EVariable i, _, _)) =
lookupE i >>= \e -> valueOfEntry e >>= syncCollectionE i e
expr (tag &&& children -> (ESome, [x])) =
expression x >>= freshenValue >>= return . VOption . (, vQualOfExpr x) . Just
expr (details -> (ESome, _, _)) =
throwE $ RunTimeTypeError "Invalid Construction of Option"
expr (tag &&& children -> (EIndirect, [x])) = do
new_val <- expression x >>= freshenValue
(\a b -> VIndirection (a, vQualOfExpr x, b)) <$> liftIO (newMVar new_val) <*> memEntTag new_val
expr (details -> (EIndirect, _, _)) =
throwE $ RunTimeTypeError "Invalid Construction of Indirection"
expr (tag &&& children -> (ETuple, cs)) =
mapM (\e -> expression e >>= freshenValue >>= return . (, vQualOfExpr e)) cs >>= return . VTuple
expr (tag &&& children -> (ERecord is, cs)) =
mapM (\e -> expression e >>= freshenValue >>= return . (, vQualOfExpr e)) cs >>= return . VRecord . membersFromList . zip is
expr (details -> (ELambda i, [b], _)) =
mkFunction $ \v -> insertE i (IVal v) >> expression b >>= removeE i
where
mkFunction f = (\cl tg -> VFunction (f, cl, tg)) <$> closure <*> memEntTag f
TODO : currently , this definition of a closure captures
closure :: Interpretation (Closure Value)
closure = do
globals <- get >>= return . getGlobals
vars <- return $ filter (\n -> n /= i && n `notElem` globals) $ freeVariables b
vals <- mapM lookupE vars
envFromList $ zip vars vals
expr (details -> (EOperate otag, cs, _))
| otag `elem` [ONeg, ONot], [a] <- cs = unary otag a
| otherwise, [a, b] <- cs = binary otag a b
| otherwise = undefined
expr (details -> (EProject i, [r], _)) = expression r >>= syncCollection >>= \case
VRecord vm -> maybe (unknownField i) (return . fst) $ lookupMember i vm
VCollection (_, c) -> do
if null (realizationId c) then unannotatedCollection
else maybe (unknownCollectionMember i c) (return . fst)
$ lookupMember i $ collectionNS $ namespace c
v -> throwE . RunTimeTypeError $ "Invalid projection on value: " ++ show v
where unknownField i' = throwE . RunTimeTypeError $ "Unknown record field " ++ i'
unannotatedCollection = throwE . RunTimeTypeError $ "Invalid projection on an unannotated collection"
unknownCollectionMember i' c = throwE . RunTimeTypeError $ "Unknown collection member " ++ i' ++ " in collection " ++ show c
expr (details -> (EProject _, _, _)) = throwE $ RunTimeTypeError "Invalid Record Projection"
expr (tag &&& children -> (ELetIn i, [e, b])) = do
entry <- expression e >>= freshenValue >>= entryOfValueE (e @~ isEQualified)
insertE i entry >> expression b >>= removeE i
expr (details -> (ELetIn _, _, _)) = throwE $ RunTimeTypeError "Invalid LetIn Construction"
expr (details -> (EAssign i, [e], _)) = do
entry <- lookupE i
case entry of
MVal mv -> expression e >>= freshenValue >>= \v -> liftIO (modifyMVar_ mv $ const $ return v) >> return v
IVal _ -> throwE $ RunTimeInterpretationError
$ "Invalid assignment to an immutable variable: " ++ i
expr (details -> (EAssign _, _, _)) = throwE $ RunTimeTypeError "Invalid Assignment"
expr (details -> (EIfThenElse, [p, t, e], _)) = expression p >>= \case
VBool True -> expression t
VBool False -> expression e
_ -> throwE $ RunTimeTypeError "Invalid Conditional Predicate"
expr (details -> (EAddress, [h, p], _)) = do
hv <- expression h
pv <- expression p
case (hv, pv) of
(VString host, VInt port) -> return $ VAddress $ Address (host, port)
_ -> throwE $ RunTimeTypeError "Invalid address"
expr (details -> (ESelf, _, _)) = lookupE annotationSelfId >>= valueOfEntry
expr (details -> (ECaseOf i, [e, s, n], _)) = do
void $ pushProxyFrame
targetV <- expression e
case targetV of
VOption (Just v, q) -> do
pp <- getProxyPath >>= \case
Just ((Named pn):t) -> return $ (Named pn):t
Just ((Temporary pn):t) -> return $ (Temporary pn):t
_ -> throwE $ RunTimeTypeError "Invalid proxy path in case-of expression"
void $ popProxyFrame
entry <- entryOfValueQ v q
insertE i entry
sV <- expression s
void $ lookupVQ i >>= \case
(iV, MemMut) -> replaceProxyPath pp targetV (VOption (Just iV, MemMut)) (\_ newPathV -> return newPathV)
removeE i sV
VOption (Nothing, _) -> popProxyFrame >> expression n
_ -> throwE $ RunTimeTypeError "Invalid Argument to Case-Match"
expr (details -> (ECaseOf _, _, _)) = throwE $ RunTimeTypeError "Invalid Case-Match"
expr (details -> (EBindAs b, [e, f], _)) = do
void $ pushProxyFrame
pc <- getPrintConfig <$> get
bv <- expression e
bp <- getProxyPath >>= \case
Just ((Named n):t) -> return $ (Named n):t
Just ((Temporary n):t) -> return $ (Temporary n):t
_ -> throwE $ RunTimeTypeError "Invalid bind path in bind-as expression"
void $ popProxyFrame
case (b, bv) of
(BIndirection i, VIndirection (r,q,_)) -> do
entry <- liftIO (readMVar r) >>= flip entryOfValueQ q
void $ insertE i entry
fV <- expression f
void $ refreshBindings b bp bv
removeE i fV
(BTuple ts, VTuple vs) -> do
let tupMems = membersFromList $ zip ts vs
bindAndRefresh bp bv tupMems
(BRecord ids, VRecord ivs) -> do
let (idls, ivls) = (map fst ids, boundNames ivs)
if idls `intersect` ivls == idls
then do
let recordMems = membersFromList $ joinByKeys (,) idls ids ivs
bindAndRefresh bp bv recordMems
else throwE $ RunTimeTypeError "Invalid Bind-Pattern"
(binder, binderV) ->
throwE $ RunTimeTypeError $
"Bind Mis-Match: value is " ++ showPC (pc {convertToTuples=False}) binderV
++ " but bind is " ++ show binder
where
bindAndRefresh bp bv mems = do
bindings <- bindMembers mems
fV <- expression f
void $ refreshBindings b bp bv
unbindMembers bindings >> return fV
joinByKeys joinF keys l r =
catMaybes $ map (\k -> lookup k l >>= (\matchL -> lookupMember k r >>= return . joinF matchL)) keys
expr (details -> (EBindAs _,_,_)) = throwE $ RunTimeTypeError "Invalid Bind Construction"
expr _ = throwE $ RunTimeInterpretationError "Invalid Expression"
literal :: K3 Literal -> Interpretation Value
literal (tag -> LBool b) = return $ VBool b
literal (tag -> LByte b) = return $ VByte b
literal (tag -> LInt i) = return $ VInt i
literal (tag -> LReal r) = return $ VReal r
literal (tag -> LString s) = return $ VString s
literal l@(tag -> LNone _) = return $ VOption (Nothing, vQualOfLit l)
literal (tag &&& children -> (LSome, [x])) = literal x >>= return . VOption . (, vQualOfLit x) . Just
literal (details -> (LSome, _, _)) = throwE $ RunTimeTypeError "Invalid option literal"
literal (tag &&& children -> (LIndirect, [x])) = literal x >>= (\y -> (\i tg -> (i, vQualOfLit x, tg)) <$> liftIO (newMVar y) <*> memEntTag y) >>= return . VIndirection
literal (details -> (LIndirect, _, _)) = throwE $ RunTimeTypeError "Invalid indirection literal"
literal (tag &&& children -> (LTuple, ch)) = mapM (\l -> literal l >>= return . (, vQualOfLit l)) ch >>= return . VTuple
literal (details -> (LTuple, _, _)) = throwE $ RunTimeTypeError "Invalid tuple literal"
literal (tag &&& children -> (LRecord ids, ch)) = mapM (\l -> literal l >>= return . (, vQualOfLit l)) ch >>= return . VRecord . membersFromList . zip ids
literal (details -> (LRecord _, _, _)) = throwE $ RunTimeTypeError "Invalid record literal"
literal (details -> (LEmpty _, [], anns)) =
getComposedAnnotationL anns >>= maybe (emptyCollection annIds) emptyAnnotatedCollection
where annIds = namedLAnnotations anns
literal (details -> (LEmpty _, _, _)) = throwE $ RunTimeTypeError "Invalid empty literal"
literal (details -> (LCollection _, elems, anns)) = do
cElems <- mapM literal elems
realizationOpt <- getComposedAnnotationL anns
case realizationOpt of
Nothing -> initialCollection (namedLAnnotations anns) cElems
Just comboId -> initialAnnotatedCollection comboId cElems
literal (details -> (LAddress, [h,p], _)) = mapM literal [h,p] >>= \case
[VString a, VInt b] -> return . VAddress $ Address (a,b)
_ -> throwE $ RunTimeTypeError "Invalid address literal"
literal (details -> (LAddress, _, _)) = throwE $ RunTimeTypeError "Invalid address literal"
literal _ = throwE $ RunTimeTypeError "Invalid literal"
replaceTrigger :: (HasSpan a, HasUID a) => Identifier -> [a] -> Value -> Interpretation ()
replaceTrigger n _ (VFunction (f,_,tg)) = replaceE n (IVal $ VTrigger (n, Just f, tg))
replaceTrigger n _ _ = throwE $ RunTimeTypeError ("Invalid body for trigger " ++ n)
global :: Identifier -> K3 Type -> Maybe (K3 Expression) -> Interpretation ()
global n (details -> (TSink, _, anns)) (Just e) = expression e >>= replaceTrigger n anns
global _ (details -> (TSink, _, _)) Nothing = throwE $ RunTimeInterpretationError "Invalid sink trigger"
| Sources have already been translated into K3 code
global _ (tag -> TSource) _ = return ()
global _ (isTFunction -> True) _ = return ()
global n t@(details -> (TCollection, _, _)) eOpt = elemE n >>= \case
True -> void . getComposedAnnotationT $ annotations t
False -> (getComposedAnnotationT $ annotations t) >>= initializeCollection . maybe "" id
where
initializeCollection comboId = case eOpt of
Nothing | not (null comboId) -> emptyAnnotatedCollection comboId >>= entryOfValueT (t @~ isTQualified) >>= insertE n
Just e | not (null comboId) -> expression e >>= verifyInitialCollection comboId
Nothing -> emptyCollection (namedTAnnotations $ annotations t) >>= entryOfValueT (t @~ isTQualified) >>= insertE n
Just e -> expression e >>= entryOfValueT (t @~ isTQualified) >>= insertE n
verifyInitialCollection comboId = \case
v@(VCollection (_, Collection _ _ cId)) ->
if comboId == cId then entryOfValueT (t @~ isTQualified) v >>= insertE n
else collInitError comboId cId
_ -> collValError
collInitError c c' = throwE . RunTimeTypeError $ "Invalid annotations on collection initializer for " ++ n ++ ": " ++ c ++ " and " ++ c'
collValError = throwE . RunTimeTypeError $ "Invalid collection value " ++ n
global n t eOpt = elemE n >>= \case
True -> return ()
False -> maybe (defaultValue t) expression eOpt >>= entryOfValueT (t @~ isTQualified) >>= insertE n
TODO : qualify names ?
role :: Identifier -> [K3 Declaration] -> Interpretation ()
role _ subDecls = mapM_ declaration subDecls
declaration :: K3 Declaration -> Interpretation ()
declaration (tag &&& children -> (DGlobal n t eO, ch)) =
debugDecl n t $ global n t eO >> mapM_ declaration ch
declaration (details -> (DTrigger n t e, cs, anns)) =
debugDecl n t $ (expression e >>= replaceTrigger n anns) >> mapM_ declaration cs
declaration (tag &&& children -> (DRole r, ch)) = role r ch
declaration (tag -> DDataAnnotation n vdecls members) = annotation n vdecls members
declaration _ = undefined
annotation :: Identifier -> [TypeVarDecl] -> [AnnMemDecl] -> Interpretation ()
annotation n _ memberDecls = tryLookupADef n >>= \case
Nothing -> addAnnotationDef
Just _ -> return ()
where
addAnnotationDef = do
(annMems, bindings) <- foldM initializeMembers
(emptyMembers, emptyBindings)
[liftedAttrFuns, liftedAttrs, attrFuns, attrs]
_ <- unbindMembers bindings
void $ modifyADefs $ (:) (n, annMems)
initializeMembers mbAcc spec = foldM (memberWithBindings spec) mbAcc memberDecls
memberWithBindings (isLifted, matchF) mbAcc mem = do
ivOpt <- annotationMember n isLifted matchF mem
maybe (return mbAcc) (bindAndAppendMem mbAcc) ivOpt
bindAndAppendMem (memAcc, bindAcc) (memN,(v,q)) = do
entry <- case q of
MemImmut -> return $ IVal v
MemMut -> liftIO (newMVar v) >>= return . MVal
void $ insertE memN entry
return (insertMember memN (v,q) memAcc, insertBinding memN entry bindAcc)
(liftedAttrFuns, liftedAttrs) = ((True, isTFunction), (True, not . isTFunction))
(attrFuns, attrs) = ((False, isTFunction), (False, not . isTFunction))
annotationMember :: Identifier -> Bool -> (K3 Type -> Bool) -> AnnMemDecl
-> Interpretation (Maybe (Identifier, (Value, VQualifier)))
annotationMember annId matchLifted matchF annMem = case (matchLifted, annMem) of
(True, Lifted Provides n t (Just e) _) | matchF t -> initializeMember n t e
(False, Attribute Provides n t (Just e) _) | matchF t -> initializeMember n t e
(True, Lifted Provides n t Nothing _) | matchF t -> builtinLiftedAttribute annId n t >>= return . builtinQual t
(False, Attribute Provides n t Nothing _) | matchF t -> builtinAttribute annId n t >>= return . builtinQual t
_ -> return Nothing
where initializeMember n t e = expression e >>= \v -> return . Just $ (n, (v, memberQual t))
builtinQual t (Just (n,v)) = Just (n, (v, memberQual t))
builtinQual _ Nothing = Nothing
memberQual t = case t @~ isTQualified of
Just TMutable -> MemMut
_ -> MemImmut
|
533d4ff8f3b21c81f93e5a00bd6c55fc9476b893f1adbefce865120d8649cb66 | w3ntao/programming-in-haskell | Exercise_6_8_6.hs | # OPTIONS_GHC -Wall #
module Exercise_6_8_6 where
import Prelude hiding (and, concat, replicate, (!!), elem)
and :: [Bool] -> Bool
and [] = True
and (x : xs) = if not x
then False
else and xs
concat :: [[a]] -> [a]
concat [] = []
concat (x : xs) = x ++ concat xs
replicate :: Int -> a -> [a]
replicate 0 _ = []
replicate n x = x : replicate (n-1) x
(!!) :: [a] -> Int -> a
xs !! 0 = (head xs)
xs !! n = (tail xs) !! (n-1)
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem x xs = if x == head xs
then True
else elem x (tail xs) | null | https://raw.githubusercontent.com/w3ntao/programming-in-haskell/c2769fa19d8507aad209818c83f67e82c3698f07/Chapter-6/Exercise_6_8_6.hs | haskell | # OPTIONS_GHC -Wall #
module Exercise_6_8_6 where
import Prelude hiding (and, concat, replicate, (!!), elem)
and :: [Bool] -> Bool
and [] = True
and (x : xs) = if not x
then False
else and xs
concat :: [[a]] -> [a]
concat [] = []
concat (x : xs) = x ++ concat xs
replicate :: Int -> a -> [a]
replicate 0 _ = []
replicate n x = x : replicate (n-1) x
(!!) :: [a] -> Int -> a
xs !! 0 = (head xs)
xs !! n = (tail xs) !! (n-1)
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem x xs = if x == head xs
then True
else elem x (tail xs) | |
72c97dea9e4d69db30f572e3550a5a5eb3605c653047ecf27a069d5e24b95bfa | ocaml-sf/learn-ocaml-corpus | test.ml | open Report
open Test_lib
let sample_char () =
Char.chr (Random.int 7 * 3 + Char.code 'a')
let sample_trie, sample_string =
let sample_word () =
String.init (Random.int 2 + 2) (fun i -> sample_char ()) in
let prefix s =
String.sub s 0 (Random.int (String.length s - 1)) in
let of_list l =
List.fold_left (fun trie w -> Solution.insert trie w (sample_int ())) empty l in
let domain = ref [||] in
let sample_trie () =
domain := sample_array ~min_size:2 ~max_size: 6 ~dups: false sample_word () ;
domain := Array.concat [ [| prefix (!domain).(0) ; prefix (!domain).(1) |] ; !domain ] ;
of_list (Array.to_list !domain) in
let sample_string =
sample_alternatively
[ (fun () -> (!domain).(Random.int (Array.length !domain))) ;
sample_word ] in
sample_trie, sample_string
let sample_char_to_children () =
let Trie (_, res) = sample_trie () in
res
let rec canon_char_to_children l =
let l = List.map (fun (c, t) -> (c, canon_trie t)) l in
List.sort (fun (a, _) (b, _) -> compare a b) l
and canon_trie (Trie (c, l)) =
Trie (c, canon_char_to_children l)
let exercise_1 =
set_progress "Grading exercise 1." ;
Section ([ Text "Exercise 1: " ; Code "children_from_char" ],
test_function_2_against_solution
~test: (test_canon_ok (function None -> None | Some t -> Some (canon_trie t)))
[%ty: char_to_children -> char -> trie option] "children_from_char"
[])
let exercise_2 =
set_progress "Grading exercise 2." ;
Section ([ Text "Exercise 2: " ; Code "update_children" ],
test_function_3_against_solution
~test: (test_canon_ok canon_char_to_children)
[%ty: char_to_children -> char -> trie -> char_to_children] "update_children"
[])
let exercise_3 =
set_progress "Grading exercise 3." ;
Section ([ Text "Exercise 3: " ; Code "lookup" ],
test_function_2_against_solution
[%ty: trie -> string -> int option] "lookup"
[])
let exercise_4 =
set_progress "Grading exercise 4." ;
Section ([ Text "Exercise 4: " ; Code "insert" ],
test_function_3_against_solution
~test: (test_canon_ok canon_trie)
[%ty: trie -> string -> int -> trie] "insert"
[])
let () =
set_result @@
ast_sanity_check code_ast @@ fun () ->
[ exercise_1 ; exercise_2 ; exercise_3 ; exercise_4 ]
| null | https://raw.githubusercontent.com/ocaml-sf/learn-ocaml-corpus/7dcf4d72b49863a3e37e41b3c3097aa4c6101a69/exercises/mooc/week3/seq2/ex2/test.ml | ocaml | open Report
open Test_lib
let sample_char () =
Char.chr (Random.int 7 * 3 + Char.code 'a')
let sample_trie, sample_string =
let sample_word () =
String.init (Random.int 2 + 2) (fun i -> sample_char ()) in
let prefix s =
String.sub s 0 (Random.int (String.length s - 1)) in
let of_list l =
List.fold_left (fun trie w -> Solution.insert trie w (sample_int ())) empty l in
let domain = ref [||] in
let sample_trie () =
domain := sample_array ~min_size:2 ~max_size: 6 ~dups: false sample_word () ;
domain := Array.concat [ [| prefix (!domain).(0) ; prefix (!domain).(1) |] ; !domain ] ;
of_list (Array.to_list !domain) in
let sample_string =
sample_alternatively
[ (fun () -> (!domain).(Random.int (Array.length !domain))) ;
sample_word ] in
sample_trie, sample_string
let sample_char_to_children () =
let Trie (_, res) = sample_trie () in
res
let rec canon_char_to_children l =
let l = List.map (fun (c, t) -> (c, canon_trie t)) l in
List.sort (fun (a, _) (b, _) -> compare a b) l
and canon_trie (Trie (c, l)) =
Trie (c, canon_char_to_children l)
let exercise_1 =
set_progress "Grading exercise 1." ;
Section ([ Text "Exercise 1: " ; Code "children_from_char" ],
test_function_2_against_solution
~test: (test_canon_ok (function None -> None | Some t -> Some (canon_trie t)))
[%ty: char_to_children -> char -> trie option] "children_from_char"
[])
let exercise_2 =
set_progress "Grading exercise 2." ;
Section ([ Text "Exercise 2: " ; Code "update_children" ],
test_function_3_against_solution
~test: (test_canon_ok canon_char_to_children)
[%ty: char_to_children -> char -> trie -> char_to_children] "update_children"
[])
let exercise_3 =
set_progress "Grading exercise 3." ;
Section ([ Text "Exercise 3: " ; Code "lookup" ],
test_function_2_against_solution
[%ty: trie -> string -> int option] "lookup"
[])
let exercise_4 =
set_progress "Grading exercise 4." ;
Section ([ Text "Exercise 4: " ; Code "insert" ],
test_function_3_against_solution
~test: (test_canon_ok canon_trie)
[%ty: trie -> string -> int -> trie] "insert"
[])
let () =
set_result @@
ast_sanity_check code_ast @@ fun () ->
[ exercise_1 ; exercise_2 ; exercise_3 ; exercise_4 ]
| |
d712bbeea67260ff6e354920d745900a5b4545fa6a05a06fec1ef89c2b16158d | bobbae/gosling-emacs | killer.ml | ; -*-mlisp-*-
;
; This file contains alternate definitions for the delete/yank commands.
; They offer the advantage of a kill ring (holding 9 saves), with
; appending of consecutive kills to a single element of the ring. (also,
; yank-from-kill-ring leaves the mark at the beginning of the yank,
; facilitating un-yanking if desired.)
;
; The key assignments follow the defaults (see the end of the file), except
; that the function yank-next-from-kill-ring, which has no default
; counterpart, is bound to M-Y. Use it as follows to go around the ring:
; type C-Y to get back the last thing you killed. If you don't like it,
; travel back in time by typing M-Y repeatedly, replacing the un-kill with
; its predecessor on the ring. Eventually you get back where you started.
;
; The rules for appending/prepending successive kills follow those for teco
; emacs: successive line or word deletions in the same direction get stuck
; together. Character deletions preceded by kills of larger units also get
; tacked on.
;
; The lack of arrays or other indirection, plus the problem of knowing whether
; the last thing you did was a kill, combine to place this among the
; grisliest pieces of code ever written. A. Witkin 2/81
;
; This was modified by SWT Wed Sep 23 1981 to use the (previous-command)
; function and this-command variable to determine if this invocation
; was immediately preceded by a kill command. The function
; (previous-command) returns the value of the variable this-command
; had after the execution of the most recently executed command. This is
; usually the value of the key which invoked the command. The kill
; functions set this to a 'unique' value.
;
; &kr0 - &kr8 are, I regret to say, our kill-ring.
; &kr-flag, if != 0, signals that we're in a recursive kill. [Now only used
; to remember direction of the previous kill].
; &kr-dir if >0 says we're going forward, <0, backward.
; &kr-ptr "points" to the current "element."
;
(declare-global &kr-ptr &kr0 &kr1 &kr2 &kr3 &kr4 &kr5
&kr6 &kr7 &kr8 &kr-flag &kr-dir &kr-me-too)
(progn i ; otherwise you get a "0"
(while (< i 9)
(set (concat "&kr" i) "")
(setq i (+ 1 i))))
(defun
(Delete-next-character string
(setq string (char-to-string (following-char))); save it
(delete-next-character) ; kill it
(if (&Was-kill) (forward-maybe))); maybe append if in a successive kill.
(Delete-previous-character string
(setq string (char-to-string (preceding-char)))
(delete-previous-character)
(if (&Was-kill) (backward-maybe)))
(Delete-next-word string
(set-mark)
(forward-word)
(setq string (region-to-string))
(delete-to-killbuffer)
(forward-maybe))
(Delete-previous-word string
(set-mark)
(backward-word)
(setq string (region-to-string))
(delete-to-killbuffer)
(backward-maybe))
(Kill-to-end-of-line string
(set-mark)
(if (= (following-char) 10) ; if at eol, gobble newline char.
(forward-character)
(end-of-line))
(setq string (region-to-string))
(delete-to-killbuffer)
(forward-maybe))
(region-to-kill-ring ; put on ring w/o killing
(setq &kr-ptr (% (+ 1 &kr-ptr) 9)); increment ptr mod 9
(set (concat "&kr" &kr-ptr) (region-to-string)))
(delete-region-to-kill-ring
(region-to-kill-ring)
(delete-to-killbuffer))
(yank-from-kill-ring s
(set-mark)
(execute-mlisp-line (concat "(insert-string &kr" &kr-ptr ")"))); boo, hiss!
(yank-next-from-kill-ring s k
(setq s (region-to-string))
(execute-mlisp-line (concat "(setq k &kr" &kr-ptr ")"))
(if (!= s k)
(error-message "")
(progn (delete-to-killbuffer)
(set-mark)
(setq &kr-ptr (- &kr-ptr 1))
(if (< &kr-ptr 0) (setq &kr-ptr 8))
(execute-mlisp-line
(concat "(insert-string &kr" &kr-ptr ")")))))
(forward-maybe ; set dir to forward, call work function,
(setq &kr-dir 1) ; reset dir, and return
(maybe-append-to-kill-ring)
(setq &kr-dir 0))
(backward-maybe ; set dir to forward, call work function,
(setq &kr-dir -1) ; reset dir, and return
(maybe-append-to-kill-ring)
(setq &kr-dir 0))
; This is the guy who figures out whether to append or prepend, rather than
; incrementing ptr.
(maybe-append-to-kill-ring s
(if (!= (&Was-kill) &kr-dir) ; -> shouldn't append or prepend'
(progn (setq &kr-ptr (% (+ 1 &kr-ptr) 9)); increment
(set (concat "&kr" &kr-ptr) string));save
(if (> &kr-dir 0) ; if forward...
(execute-mlisp-line ; append
(concat
"(set (concat ""&kr"" &kr-ptr) (concat &kr"
&kr-ptr " string))"))
(execute-mlisp-line ; else prepend
(concat
"(set (concat ""&kr"" &kr-ptr) (concat string &kr"
&kr-ptr "))" ))))
(setq this-command 1802071148); = 'k'<<24+'i'<<16+'l'<<8+'l'
(setq &kr-flag &kr-dir) ; also remember the direction
)
(&Was-kill ; check to see if previous command was a
; kill
(if (= (previous-command) 1802071148); see above
&kr-flag ; if was kill, return direction of kill
0 ; else return 0
)
)
); Immer muss Man den Verben zu end lassen.
; bind some keys
(bind-to-key "Delete-next-character" '')
(bind-to-key "Delete-previous-character" '')
(bind-to-key "Delete-next-word" (+ 128 'd'))
(bind-to-key "Delete-previous-word" (+ 128 ''))
(bind-to-key "Kill-to-end-of-line" '')
(bind-to-key "region-to-kill-ring" (+ 128 'w'))
(bind-to-key "delete-region-to-kill-ring" '')
(bind-to-key "yank-from-kill-ring" '')
(bind-to-key "yank-next-from-kill-ring" (+ 128 'y'))
| null | https://raw.githubusercontent.com/bobbae/gosling-emacs/8fdda532abbffb0c952251a0b5a4857e0f27495a/lib/maclib/utah/killer.ml | ocaml | ; -*-mlisp-*-
;
; This file contains alternate definitions for the delete/yank commands.
; They offer the advantage of a kill ring (holding 9 saves), with
; appending of consecutive kills to a single element of the ring. (also,
; yank-from-kill-ring leaves the mark at the beginning of the yank,
; facilitating un-yanking if desired.)
;
; The key assignments follow the defaults (see the end of the file), except
; that the function yank-next-from-kill-ring, which has no default
; counterpart, is bound to M-Y. Use it as follows to go around the ring:
; type C-Y to get back the last thing you killed. If you don't like it,
; travel back in time by typing M-Y repeatedly, replacing the un-kill with
; its predecessor on the ring. Eventually you get back where you started.
;
; The rules for appending/prepending successive kills follow those for teco
; emacs: successive line or word deletions in the same direction get stuck
; together. Character deletions preceded by kills of larger units also get
; tacked on.
;
; The lack of arrays or other indirection, plus the problem of knowing whether
; the last thing you did was a kill, combine to place this among the
; grisliest pieces of code ever written. A. Witkin 2/81
;
; This was modified by SWT Wed Sep 23 1981 to use the (previous-command)
; function and this-command variable to determine if this invocation
; was immediately preceded by a kill command. The function
; (previous-command) returns the value of the variable this-command
; had after the execution of the most recently executed command. This is
; usually the value of the key which invoked the command. The kill
; functions set this to a 'unique' value.
;
; &kr0 - &kr8 are, I regret to say, our kill-ring.
; &kr-flag, if != 0, signals that we're in a recursive kill. [Now only used
; to remember direction of the previous kill].
; &kr-dir if >0 says we're going forward, <0, backward.
; &kr-ptr "points" to the current "element."
;
(declare-global &kr-ptr &kr0 &kr1 &kr2 &kr3 &kr4 &kr5
&kr6 &kr7 &kr8 &kr-flag &kr-dir &kr-me-too)
(progn i ; otherwise you get a "0"
(while (< i 9)
(set (concat "&kr" i) "")
(setq i (+ 1 i))))
(defun
(Delete-next-character string
(setq string (char-to-string (following-char))); save it
(delete-next-character) ; kill it
(if (&Was-kill) (forward-maybe))); maybe append if in a successive kill.
(Delete-previous-character string
(setq string (char-to-string (preceding-char)))
(delete-previous-character)
(if (&Was-kill) (backward-maybe)))
(Delete-next-word string
(set-mark)
(forward-word)
(setq string (region-to-string))
(delete-to-killbuffer)
(forward-maybe))
(Delete-previous-word string
(set-mark)
(backward-word)
(setq string (region-to-string))
(delete-to-killbuffer)
(backward-maybe))
(Kill-to-end-of-line string
(set-mark)
(if (= (following-char) 10) ; if at eol, gobble newline char.
(forward-character)
(end-of-line))
(setq string (region-to-string))
(delete-to-killbuffer)
(forward-maybe))
(region-to-kill-ring ; put on ring w/o killing
(setq &kr-ptr (% (+ 1 &kr-ptr) 9)); increment ptr mod 9
(set (concat "&kr" &kr-ptr) (region-to-string)))
(delete-region-to-kill-ring
(region-to-kill-ring)
(delete-to-killbuffer))
(yank-from-kill-ring s
(set-mark)
(execute-mlisp-line (concat "(insert-string &kr" &kr-ptr ")"))); boo, hiss!
(yank-next-from-kill-ring s k
(setq s (region-to-string))
(execute-mlisp-line (concat "(setq k &kr" &kr-ptr ")"))
(if (!= s k)
(error-message "")
(progn (delete-to-killbuffer)
(set-mark)
(setq &kr-ptr (- &kr-ptr 1))
(if (< &kr-ptr 0) (setq &kr-ptr 8))
(execute-mlisp-line
(concat "(insert-string &kr" &kr-ptr ")")))))
(forward-maybe ; set dir to forward, call work function,
(setq &kr-dir 1) ; reset dir, and return
(maybe-append-to-kill-ring)
(setq &kr-dir 0))
(backward-maybe ; set dir to forward, call work function,
(setq &kr-dir -1) ; reset dir, and return
(maybe-append-to-kill-ring)
(setq &kr-dir 0))
; This is the guy who figures out whether to append or prepend, rather than
; incrementing ptr.
(maybe-append-to-kill-ring s
(if (!= (&Was-kill) &kr-dir) ; -> shouldn't append or prepend'
(progn (setq &kr-ptr (% (+ 1 &kr-ptr) 9)); increment
(set (concat "&kr" &kr-ptr) string));save
(if (> &kr-dir 0) ; if forward...
(execute-mlisp-line ; append
(concat
"(set (concat ""&kr"" &kr-ptr) (concat &kr"
&kr-ptr " string))"))
(execute-mlisp-line ; else prepend
(concat
"(set (concat ""&kr"" &kr-ptr) (concat string &kr"
&kr-ptr "))" ))))
(setq this-command 1802071148); = 'k'<<24+'i'<<16+'l'<<8+'l'
(setq &kr-flag &kr-dir) ; also remember the direction
)
(&Was-kill ; check to see if previous command was a
; kill
(if (= (previous-command) 1802071148); see above
&kr-flag ; if was kill, return direction of kill
0 ; else return 0
)
)
); Immer muss Man den Verben zu end lassen.
; bind some keys
(bind-to-key "Delete-next-character" '')
(bind-to-key "Delete-previous-character" '')
(bind-to-key "Delete-next-word" (+ 128 'd'))
(bind-to-key "Delete-previous-word" (+ 128 ''))
(bind-to-key "Kill-to-end-of-line" '')
(bind-to-key "region-to-kill-ring" (+ 128 'w'))
(bind-to-key "delete-region-to-kill-ring" '')
(bind-to-key "yank-from-kill-ring" '')
(bind-to-key "yank-next-from-kill-ring" (+ 128 'y'))
| |
40b23f71c4d175c0a40d018da5a566f6aad9f706873868d61b7417c935876c8a | JustusAdam/language-haskell | T0015.hs | SYNTAX TEST " source.haskell " " Deriving for ADTs with sums and records "
data Feed =
BuildTask {
blokName :: String,
outPathBuild :: FilePath,
relPath :: FilePath,
srcFile :: Files.File
}
deriving Eq
-- <~~-------- meta.deriving.haskell
data Query
= All
| Some {
feeds :: [String],
cats :: [String],
dates :: QueryDate
}
deriving Read
-- <~~---------- meta.deriving.haskell
data QueryDate
= AnyDate
| Between String String
deriving Read
-- <~~---------- meta.deriving.haskell
| null | https://raw.githubusercontent.com/JustusAdam/language-haskell/c9ee1b3ee166c44db9ce350920ba502fcc868245/test/tickets/T0015.hs | haskell | <~~-------- meta.deriving.haskell
<~~---------- meta.deriving.haskell
<~~---------- meta.deriving.haskell | SYNTAX TEST " source.haskell " " Deriving for ADTs with sums and records "
data Feed =
BuildTask {
blokName :: String,
outPathBuild :: FilePath,
relPath :: FilePath,
srcFile :: Files.File
}
deriving Eq
data Query
= All
| Some {
feeds :: [String],
cats :: [String],
dates :: QueryDate
}
deriving Read
data QueryDate
= AnyDate
| Between String String
deriving Read
|
c020387dbe9cf1c46ab88f7068f41c99237b60d174e11f8c9f995d1caf00b889 | marigold-dev/deku | block.ml | open Deku_stdlib
open Deku_crypto
open Deku_concepts
open Deku_protocol
open Deku_ledger
type block =
| Block of {
key : Key.t;
signature : Signature.t;
hash : Block_hash.t;
author : Key_hash.t;
level : Level.t;
(* TODO: nonce *)
previous : Block_hash.t;
tezos_operations : Tezos_operation.t list;
withdrawal_handles_hash : Ledger.Withdrawal_handle.hash;
payload_hash : BLAKE2b.t;
payload : string;
}
type t = block
let equal a b =
let (Block { hash = a; _ }) = a in
let (Block { hash = b; _ }) = b in
Block_hash.equal a b
let compare a b =
let (Block { hash = a; _ }) = a in
let (Block { hash = b; _ }) = b in
Block_hash.compare a b
let header_encoding =
let open Data_encoding in
tup5 Key_hash.encoding Level.encoding Block_hash.encoding
(list Tezos_operation.encoding)
Ledger.Withdrawal_handle.Withdrawal_handle_hash.encoding
let hash ~author ~level ~previous ~tezos_operations ~withdrawal_handles_hash
~payload =
let payload =
let payload = BLAKE2b.hash payload in
let header =
Data_encoding.Binary.to_string_exn header_encoding
(author, level, previous, tezos_operations, withdrawal_handles_hash)
in
let header = BLAKE2b.hash header in
BLAKE2b.both header payload
in
let hash =
let state_root_hash = BLAKE2b.hash "FIXME: we need to add the state root" in
Block_hash.hash ~block_level:level ~block_payload_hash:payload
~state_root_hash ~withdrawal_handles_hash
in
(payload, hash)
let encoding =
let open Data_encoding in
conv_with_guard
(fun block ->
let (Block
{
key;
signature;
hash = _;
author;
level;
previous;
tezos_operations;
withdrawal_handles_hash;
payload;
payload_hash = _;
}) =
block
in
( (key, signature),
(author, level, previous, tezos_operations, withdrawal_handles_hash),
payload ))
(fun ( (key, signature),
(author, level, previous, tezos_operations, withdrawal_handles_hash),
payload ) ->
let payload_hash, hash =
hash ~author ~level ~previous ~tezos_operations ~withdrawal_handles_hash
~payload
in
match
Key_hash.(equal author (of_key key))
&&
let hash = Block_hash.to_blake2b hash in
Signature.verify key signature hash
with
| true ->
let block =
Block
{
key;
signature;
hash;
author;
level;
previous;
tezos_operations;
withdrawal_handles_hash;
payload;
payload_hash;
}
in
Ok block
| false -> Error "Invalid_signature")
(tup3 Signature.key_encoding header_encoding string)
let produce ~identity ~level ~previous ~payload ~tezos_operations
~withdrawal_handles_hash =
let payload = Payload.encode ~payload in
let author = Identity.key_hash identity in
let payload_hash, block_hash =
hash ~author ~level ~previous ~tezos_operations ~withdrawal_handles_hash
~payload
in
let key = Identity.key identity in
let signature =
let hash = Block_hash.to_blake2b block_hash in
Identity.sign ~hash identity
in
Block
{
key;
signature;
hash = block_hash;
author;
level;
previous;
payload;
payload_hash;
tezos_operations;
withdrawal_handles_hash;
}
let sign ~identity block =
let (Block { hash; _ }) = block in
let hash = Block_hash.to_blake2b hash in
Verified_signature.sign hash identity
let pp fmt (Block { hash; level; _ }) =
let hash = Block_hash.to_b58 hash in
let open Deku_stdlib in
Format.fprintf fmt "Block [hash: %s, level: %a]" hash N.pp (Level.to_n level)
module Set = Set.Make (struct
type t = block
let compare = compare
let encoding = encoding
end)
| null | https://raw.githubusercontent.com/marigold-dev/deku/cdf82852196b55f755f40850515580be4fd9a3fa/deku-p/src/core/consensus/block.ml | ocaml | TODO: nonce | open Deku_stdlib
open Deku_crypto
open Deku_concepts
open Deku_protocol
open Deku_ledger
type block =
| Block of {
key : Key.t;
signature : Signature.t;
hash : Block_hash.t;
author : Key_hash.t;
level : Level.t;
previous : Block_hash.t;
tezos_operations : Tezos_operation.t list;
withdrawal_handles_hash : Ledger.Withdrawal_handle.hash;
payload_hash : BLAKE2b.t;
payload : string;
}
type t = block
let equal a b =
let (Block { hash = a; _ }) = a in
let (Block { hash = b; _ }) = b in
Block_hash.equal a b
let compare a b =
let (Block { hash = a; _ }) = a in
let (Block { hash = b; _ }) = b in
Block_hash.compare a b
let header_encoding =
let open Data_encoding in
tup5 Key_hash.encoding Level.encoding Block_hash.encoding
(list Tezos_operation.encoding)
Ledger.Withdrawal_handle.Withdrawal_handle_hash.encoding
let hash ~author ~level ~previous ~tezos_operations ~withdrawal_handles_hash
~payload =
let payload =
let payload = BLAKE2b.hash payload in
let header =
Data_encoding.Binary.to_string_exn header_encoding
(author, level, previous, tezos_operations, withdrawal_handles_hash)
in
let header = BLAKE2b.hash header in
BLAKE2b.both header payload
in
let hash =
let state_root_hash = BLAKE2b.hash "FIXME: we need to add the state root" in
Block_hash.hash ~block_level:level ~block_payload_hash:payload
~state_root_hash ~withdrawal_handles_hash
in
(payload, hash)
let encoding =
let open Data_encoding in
conv_with_guard
(fun block ->
let (Block
{
key;
signature;
hash = _;
author;
level;
previous;
tezos_operations;
withdrawal_handles_hash;
payload;
payload_hash = _;
}) =
block
in
( (key, signature),
(author, level, previous, tezos_operations, withdrawal_handles_hash),
payload ))
(fun ( (key, signature),
(author, level, previous, tezos_operations, withdrawal_handles_hash),
payload ) ->
let payload_hash, hash =
hash ~author ~level ~previous ~tezos_operations ~withdrawal_handles_hash
~payload
in
match
Key_hash.(equal author (of_key key))
&&
let hash = Block_hash.to_blake2b hash in
Signature.verify key signature hash
with
| true ->
let block =
Block
{
key;
signature;
hash;
author;
level;
previous;
tezos_operations;
withdrawal_handles_hash;
payload;
payload_hash;
}
in
Ok block
| false -> Error "Invalid_signature")
(tup3 Signature.key_encoding header_encoding string)
let produce ~identity ~level ~previous ~payload ~tezos_operations
~withdrawal_handles_hash =
let payload = Payload.encode ~payload in
let author = Identity.key_hash identity in
let payload_hash, block_hash =
hash ~author ~level ~previous ~tezos_operations ~withdrawal_handles_hash
~payload
in
let key = Identity.key identity in
let signature =
let hash = Block_hash.to_blake2b block_hash in
Identity.sign ~hash identity
in
Block
{
key;
signature;
hash = block_hash;
author;
level;
previous;
payload;
payload_hash;
tezos_operations;
withdrawal_handles_hash;
}
let sign ~identity block =
let (Block { hash; _ }) = block in
let hash = Block_hash.to_blake2b hash in
Verified_signature.sign hash identity
let pp fmt (Block { hash; level; _ }) =
let hash = Block_hash.to_b58 hash in
let open Deku_stdlib in
Format.fprintf fmt "Block [hash: %s, level: %a]" hash N.pp (Level.to_n level)
module Set = Set.Make (struct
type t = block
let compare = compare
let encoding = encoding
end)
|
699f21a28648230cf3732d13d4c2856c7505ad65fff58f361e5e229ac1872323 | clojure/core.typed | hset_utils.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns clojure.core.typed.checker.hset-utils)
(def valid-fixed? (some-fn string? symbol? keyword? nil? number?
char? boolean?))
| null | https://raw.githubusercontent.com/clojure/core.typed/f5b7d00bbb29d09000d7fef7cca5b40416c9fa91/typed/checker.jvm/src/clojure/core/typed/checker/hset_utils.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software. | Copyright ( c ) , contributors .
(ns clojure.core.typed.checker.hset-utils)
(def valid-fixed? (some-fn string? symbol? keyword? nil? number?
char? boolean?))
|
5043aa3d30ec1f1b09ac6e0601760e513f126377ca0985624c8f437e57045c10 | appleshan/cl-http | w4-client.lisp | ;;;
;;; **********************************************************************
This code was written by and has been placed in
;;; the Public domain, and is provided 'as is'.
;;;
;;; **********************************************************************
;;;
CMUCL specific W4 Web Walker client support .
(in-package "HTTP")
;;; Binary stream copy into a byte vector.
;;;
(defun stream-copy-into-8-bit-array (stream length)
(declare (fixnum length)
(optimize (speed 3)))
(let ((vector (make-array length :element-type '(unsigned-byte 8))))
(dotimes (i length)
(declare (fixnum i))
(let ((byte (read-byte stream t)))
(setf (aref vector i) byte)))
vector))
Binary stream copy until EOF into a byte vector .
;;;
(defun stream-copy-until-eof-into-8-bit-array (stream)
(declare (optimize (speed 3)))
(let ((res (make-array 1000 :element-type '(unsigned-byte 8)))
(len 1000)
(index 0))
(declare (fixnum len index))
(loop
(let ((byte (read-byte stream nil nil)))
(cond (byte
(when (= index len)
(setq len (* len 2))
(let ((new (make-array len :element-type '(unsigned-byte 8))))
(replace new res)
(setq res new)))
(setf (aref res index) byte)
(incf index))
(t
(setf (lisp::%array-fill-pointer res) index)
(return res)))))))
;;; Text or CRLF stream copy into a string.
;;;
(defun stream-copy-into-string (stream length)
(declare (fixnum length)
(optimize (speed 3)))
(let ((string (make-string length)))
(dotimes (i length)
(declare (fixnum i))
(let ((char (read-char stream t)))
(setf (aref string i) char)))
string))
Text or CRLF stream copy until EOF into a string .
;;;
(defun stream-copy-until-eof-into-string (stream)
(declare (optimize (speed 3)))
(let ((res (make-string 1000))
(len 1000)
(index 0))
(declare (fixnum len index))
(loop
(let ((ch (read-char stream nil nil)))
(cond (ch
(when (= index len)
(setq len (* len 2))
(let ((new (make-string len)))
(replace new res)
(setq res new)))
(setf (schar res index) ch)
(incf index))
(t
(setf (lisp::%array-fill-pointer res) index)
(return res)))))))
;;; Fast chunked transfer capture.
;;;
(defmethod chunked-input-capture (stream copy-mode headers)
(declare (optimize (speed 3)))
(with-chunked-transfer-decoding (stream :headers headers)
(ecase copy-mode
((:text :crlf) (stream-copy-until-eof-into-string stream))
(:binary (stream-copy-until-eof-into-8-bit-array stream)))))
;;; Modified version from client/w4-client.lisp, using the above
;;; function definitions, and implementing the flushing of reply
;;; bodies upon error giving more reliable chunk-transfer operation.
;;;
(defun %get-url-headers-and-body (url headers report-stream authorization)
(handling-redirects (url)
(with-http-request
(url :get
:request-headers (compute-standard-request-headers
url :authorization authorization
:header-plist headers
:user-agent (if (getf headers :user-agent)
nil
*server-version*)))
(let ((status (client-status client))
(http-version (client-connection-version client))
(response-headers (client-response-headers client))
response-body redirection)
(case status
((200 205 206)
(let* ((content-type (get-header :content-type response-headers))
(copy-mode (mime-content-type-copy-mode content-type))
(content-length
(get-header :content-length response-headers))
(transfer-encoding
(get-header :transfer-encoding response-headers)))
(setq response-body
(cond (content-length
(ecase copy-mode
((:text :crlf)
(stream-copy-into-string remote-stream
content-length))
(:binary
(with-binary-stream (stream :input)
(binary-stream-copy-into-8-bit-array
remote-stream content-length)))))
((member http-version '(:http/1.0 :http/0.9))
(ecase copy-mode
((:text :crlf)
(stream-copy-until-eof-into-string remote-stream))
(:binary
(stream-copy-until-eof-into-8-bit-array
remote-stream))))
((eq transfer-encoding :chunked)
(chunked-input-capture remote-stream copy-mode
response-headers))
(t (error 'server-not-implemented
:close-connection t :url url
:format-string "The HTTP transfer decoding, ~A, is not implemented."
:format-args (list transfer-encoding)))))))
((201 202 203 204 300 402 403 405 406 407 415)
(flush-input-entity remote-stream response-headers http-version))
((301 302)
(let ((alternate-urls
(mapcar #'url:intern-url
(ensure-list
(or (get-header :location response-headers)
(get-header :content-location
response-headers))))))
(flush-input-entity remote-stream response-headers http-version)
(push alternate-urls redirection)
(signal (ecase status
(301 'document-moved-permanently)
(302 'document-moved-temporarily))
:new-urls alternate-urls :version http-version)))
;; do something about authentication -- JCMa 12/10/1996.
(401
(destructuring-bind (&optional authentication-method . realm)
(get-header :WWW-Authenticate response-headers)
(declare (ignore authentication-method realm))
(flush-input-entity remote-stream response-headers http-version)
nil))
(404
(when *debug-client*
(fresh-line report-stream)
(%write-common-logfile-entry
(host-string url)
(concatenate 'string (url:name-string url) " GET")
status 0 "-" *log-times-in-gmt* report-stream))
(flush-input-entity remote-stream response-headers http-version))
((nil) (setq status 408)) ; didn't return a status code
((408 411 414 500 501 502 503 504 505)
(flush-input-entity remote-stream response-headers http-version))
(t (client-signal-http-code
url status :get
:headers response-headers
:reason (client-reason client)
:version http-version)))
(values response-body (durable-response-headers client) status redirection
http-version)))))
| null | https://raw.githubusercontent.com/appleshan/cl-http/a7ec6bf51e260e9bb69d8e180a103daf49aa0ac2/cmucl/client/w4-client.lisp | lisp |
**********************************************************************
the Public domain, and is provided 'as is'.
**********************************************************************
Binary stream copy into a byte vector.
Text or CRLF stream copy into a string.
Fast chunked transfer capture.
Modified version from client/w4-client.lisp, using the above
function definitions, and implementing the flushing of reply
bodies upon error giving more reliable chunk-transfer operation.
do something about authentication -- JCMa 12/10/1996.
didn't return a status code | This code was written by and has been placed in
CMUCL specific W4 Web Walker client support .
(in-package "HTTP")
(defun stream-copy-into-8-bit-array (stream length)
(declare (fixnum length)
(optimize (speed 3)))
(let ((vector (make-array length :element-type '(unsigned-byte 8))))
(dotimes (i length)
(declare (fixnum i))
(let ((byte (read-byte stream t)))
(setf (aref vector i) byte)))
vector))
Binary stream copy until EOF into a byte vector .
(defun stream-copy-until-eof-into-8-bit-array (stream)
(declare (optimize (speed 3)))
(let ((res (make-array 1000 :element-type '(unsigned-byte 8)))
(len 1000)
(index 0))
(declare (fixnum len index))
(loop
(let ((byte (read-byte stream nil nil)))
(cond (byte
(when (= index len)
(setq len (* len 2))
(let ((new (make-array len :element-type '(unsigned-byte 8))))
(replace new res)
(setq res new)))
(setf (aref res index) byte)
(incf index))
(t
(setf (lisp::%array-fill-pointer res) index)
(return res)))))))
(defun stream-copy-into-string (stream length)
(declare (fixnum length)
(optimize (speed 3)))
(let ((string (make-string length)))
(dotimes (i length)
(declare (fixnum i))
(let ((char (read-char stream t)))
(setf (aref string i) char)))
string))
Text or CRLF stream copy until EOF into a string .
(defun stream-copy-until-eof-into-string (stream)
(declare (optimize (speed 3)))
(let ((res (make-string 1000))
(len 1000)
(index 0))
(declare (fixnum len index))
(loop
(let ((ch (read-char stream nil nil)))
(cond (ch
(when (= index len)
(setq len (* len 2))
(let ((new (make-string len)))
(replace new res)
(setq res new)))
(setf (schar res index) ch)
(incf index))
(t
(setf (lisp::%array-fill-pointer res) index)
(return res)))))))
(defmethod chunked-input-capture (stream copy-mode headers)
(declare (optimize (speed 3)))
(with-chunked-transfer-decoding (stream :headers headers)
(ecase copy-mode
((:text :crlf) (stream-copy-until-eof-into-string stream))
(:binary (stream-copy-until-eof-into-8-bit-array stream)))))
(defun %get-url-headers-and-body (url headers report-stream authorization)
(handling-redirects (url)
(with-http-request
(url :get
:request-headers (compute-standard-request-headers
url :authorization authorization
:header-plist headers
:user-agent (if (getf headers :user-agent)
nil
*server-version*)))
(let ((status (client-status client))
(http-version (client-connection-version client))
(response-headers (client-response-headers client))
response-body redirection)
(case status
((200 205 206)
(let* ((content-type (get-header :content-type response-headers))
(copy-mode (mime-content-type-copy-mode content-type))
(content-length
(get-header :content-length response-headers))
(transfer-encoding
(get-header :transfer-encoding response-headers)))
(setq response-body
(cond (content-length
(ecase copy-mode
((:text :crlf)
(stream-copy-into-string remote-stream
content-length))
(:binary
(with-binary-stream (stream :input)
(binary-stream-copy-into-8-bit-array
remote-stream content-length)))))
((member http-version '(:http/1.0 :http/0.9))
(ecase copy-mode
((:text :crlf)
(stream-copy-until-eof-into-string remote-stream))
(:binary
(stream-copy-until-eof-into-8-bit-array
remote-stream))))
((eq transfer-encoding :chunked)
(chunked-input-capture remote-stream copy-mode
response-headers))
(t (error 'server-not-implemented
:close-connection t :url url
:format-string "The HTTP transfer decoding, ~A, is not implemented."
:format-args (list transfer-encoding)))))))
((201 202 203 204 300 402 403 405 406 407 415)
(flush-input-entity remote-stream response-headers http-version))
((301 302)
(let ((alternate-urls
(mapcar #'url:intern-url
(ensure-list
(or (get-header :location response-headers)
(get-header :content-location
response-headers))))))
(flush-input-entity remote-stream response-headers http-version)
(push alternate-urls redirection)
(signal (ecase status
(301 'document-moved-permanently)
(302 'document-moved-temporarily))
:new-urls alternate-urls :version http-version)))
(401
(destructuring-bind (&optional authentication-method . realm)
(get-header :WWW-Authenticate response-headers)
(declare (ignore authentication-method realm))
(flush-input-entity remote-stream response-headers http-version)
nil))
(404
(when *debug-client*
(fresh-line report-stream)
(%write-common-logfile-entry
(host-string url)
(concatenate 'string (url:name-string url) " GET")
status 0 "-" *log-times-in-gmt* report-stream))
(flush-input-entity remote-stream response-headers http-version))
((408 411 414 500 501 502 503 504 505)
(flush-input-entity remote-stream response-headers http-version))
(t (client-signal-http-code
url status :get
:headers response-headers
:reason (client-reason client)
:version http-version)))
(values response-body (durable-response-headers client) status redirection
http-version)))))
|
ef44d02e32a9c98e83652753317a3bc277870f228666869d2d11469d8d47f057 | achirkin/vulkan | VK_MVK_macos_surface.hs | # OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_HADDOCK not - home #
{-# LANGUAGE CPP #-}
{-# LANGUAGE DataKinds #-}
# LANGUAGE FlexibleInstances #
# LANGUAGE ForeignFunctionInterface #
{-# LANGUAGE MagicHash #-}
{-# LANGUAGE PatternSynonyms #-}
{-# LANGUAGE Strict #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE ViewPatterns #-}
module Graphics.Vulkan.Ext.VK_MVK_macos_surface
(AHardwareBuffer(), ANativeWindow(), CAMetalLayer(), VkBool32(..),
VkDeviceAddress(..), VkDeviceSize(..), VkFlags(..),
VkSampleMask(..), VkAndroidSurfaceCreateFlagsKHR(..),
VkBufferViewCreateFlags(..),
VkBuildAccelerationStructureFlagsNV(..),
VkCommandPoolTrimFlags(..), VkCommandPoolTrimFlagsKHR(..),
VkDebugUtilsMessengerCallbackDataFlagsEXT(..),
VkDebugUtilsMessengerCreateFlagsEXT(..),
VkDescriptorBindingFlagsEXT(..), VkDescriptorPoolResetFlags(..),
VkDescriptorUpdateTemplateCreateFlags(..),
VkDescriptorUpdateTemplateCreateFlagsKHR(..),
VkDeviceCreateFlags(..), VkDirectFBSurfaceCreateFlagsEXT(..),
VkDisplayModeCreateFlagsKHR(..),
VkDisplaySurfaceCreateFlagsKHR(..), VkEventCreateFlags(..),
VkExternalFenceFeatureFlagsKHR(..),
VkExternalFenceHandleTypeFlagsKHR(..),
VkExternalMemoryFeatureFlagsKHR(..),
VkExternalMemoryHandleTypeFlagsKHR(..),
VkExternalSemaphoreFeatureFlagsKHR(..),
VkExternalSemaphoreHandleTypeFlagsKHR(..),
VkFenceImportFlagsKHR(..), VkGeometryFlagsNV(..),
VkGeometryInstanceFlagsNV(..), VkHeadlessSurfaceCreateFlagsEXT(..),
VkIOSSurfaceCreateFlagsMVK(..),
VkImagePipeSurfaceCreateFlagsFUCHSIA(..),
VkInstanceCreateFlags(..), VkMacOSSurfaceCreateFlagsMVK(..),
VkMemoryAllocateFlagsKHR(..), VkMemoryMapFlags(..),
VkMetalSurfaceCreateFlagsEXT(..), VkPeerMemoryFeatureFlagsKHR(..),
VkPipelineColorBlendStateCreateFlags(..),
VkPipelineCoverageModulationStateCreateFlagsNV(..),
VkPipelineCoverageReductionStateCreateFlagsNV(..),
VkPipelineCoverageToColorStateCreateFlagsNV(..),
VkPipelineDepthStencilStateCreateFlags(..),
VkPipelineDiscardRectangleStateCreateFlagsEXT(..),
VkPipelineDynamicStateCreateFlags(..),
VkPipelineInputAssemblyStateCreateFlags(..),
VkPipelineLayoutCreateFlags(..),
VkPipelineMultisampleStateCreateFlags(..),
VkPipelineRasterizationConservativeStateCreateFlagsEXT(..),
VkPipelineRasterizationDepthClipStateCreateFlagsEXT(..),
VkPipelineRasterizationStateCreateFlags(..),
VkPipelineRasterizationStateStreamCreateFlagsEXT(..),
VkPipelineTessellationStateCreateFlags(..),
VkPipelineVertexInputStateCreateFlags(..),
VkPipelineViewportStateCreateFlags(..),
VkPipelineViewportSwizzleStateCreateFlagsNV(..),
VkQueryPoolCreateFlags(..), VkResolveModeFlagsKHR(..),
VkSemaphoreCreateFlags(..), VkSemaphoreImportFlagsKHR(..),
VkSemaphoreWaitFlagsKHR(..),
VkStreamDescriptorSurfaceCreateFlagsGGP(..),
VkValidationCacheCreateFlagsEXT(..), VkViSurfaceCreateFlagsNN(..),
VkWaylandSurfaceCreateFlagsKHR(..),
VkWin32SurfaceCreateFlagsKHR(..), VkXcbSurfaceCreateFlagsKHR(..),
VkXlibSurfaceCreateFlagsKHR(..), VkMacOSSurfaceCreateInfoMVK,
VkStructureType(..), -- > #include "vk_platform.h"
VkCreateMacOSSurfaceMVK,
pattern VkCreateMacOSSurfaceMVK, HS_vkCreateMacOSSurfaceMVK,
PFN_vkCreateMacOSSurfaceMVK, module Graphics.Vulkan.Marshal,
VkInternalAllocationType(..), VkResult(..),
VkSystemAllocationScope(..), newVkAllocationFunction,
newVkDebugReportCallbackEXT, newVkDebugUtilsMessengerCallbackEXT,
newVkFreeFunction, newVkInternalAllocationNotification,
newVkInternalFreeNotification, newVkReallocationFunction,
newVkVoidFunction, unwrapVkAllocationFunction,
unwrapVkDebugReportCallbackEXT,
unwrapVkDebugUtilsMessengerCallbackEXT, unwrapVkFreeFunction,
unwrapVkInternalAllocationNotification,
unwrapVkInternalFreeNotification, unwrapVkReallocationFunction,
unwrapVkVoidFunction, HS_vkAllocationFunction,
HS_vkDebugReportCallbackEXT, HS_vkDebugUtilsMessengerCallbackEXT,
HS_vkFreeFunction, HS_vkInternalAllocationNotification,
HS_vkInternalFreeNotification, HS_vkReallocationFunction,
HS_vkVoidFunction, PFN_vkAllocationFunction,
PFN_vkDebugReportCallbackEXT, PFN_vkDebugUtilsMessengerCallbackEXT,
PFN_vkFreeFunction, PFN_vkInternalAllocationNotification,
PFN_vkInternalFreeNotification, PFN_vkReallocationFunction,
PFN_vkVoidFunction, VkAccelerationStructureKHR,
VkAccelerationStructureKHR_T(), VkAccelerationStructureNV,
VkAccelerationStructureNV_T(), VkBuffer, VkBufferView,
VkBufferView_T(), VkBuffer_T(), VkCommandBuffer,
VkCommandBuffer_T(), VkCommandPool, VkCommandPool_T(),
VkDebugReportCallbackEXT, VkDebugReportCallbackEXT_T(),
VkDebugUtilsMessengerEXT, VkDebugUtilsMessengerEXT_T(),
VkDeferredOperationKHR, VkDeferredOperationKHR_T(),
VkDescriptorPool, VkDescriptorPool_T(), VkDescriptorSet,
VkDescriptorSetLayout, VkDescriptorSetLayout_T(),
VkDescriptorSet_T(), VkDescriptorUpdateTemplate,
VkDescriptorUpdateTemplateKHR, VkDescriptorUpdateTemplateKHR_T(),
VkDescriptorUpdateTemplate_T(), VkDevice, VkDeviceMemory,
VkDeviceMemory_T(), VkDevice_T(), VkDisplayKHR, VkDisplayKHR_T(),
VkDisplayModeKHR, VkDisplayModeKHR_T(), VkEvent, VkEvent_T(),
VkFence, VkFence_T(), VkFramebuffer, VkFramebuffer_T(), VkImage,
VkImageView, VkImageView_T(), VkImage_T(),
VkIndirectCommandsLayoutNV, VkIndirectCommandsLayoutNV_T(),
VkInstance, VkInstance_T(), VkPerformanceConfigurationINTEL,
VkPerformanceConfigurationINTEL_T(), VkPhysicalDevice,
VkPhysicalDevice_T(), VkPipeline, VkPipelineCache,
VkPipelineCache_T(), VkPipelineLayout, VkPipelineLayout_T(),
VkPipeline_T(), VkPrivateDataSlotEXT, VkPrivateDataSlotEXT_T(),
VkQueryPool, VkQueryPool_T(), VkQueue, VkQueue_T(), VkRenderPass,
VkRenderPass_T(), VkSampler, VkSamplerYcbcrConversion,
VkSamplerYcbcrConversionKHR, VkSamplerYcbcrConversionKHR_T(),
VkSamplerYcbcrConversion_T(), VkSampler_T(), VkSemaphore,
VkSemaphore_T(), VkShaderModule, VkShaderModule_T(), VkSurfaceKHR,
VkSurfaceKHR_T(), VkSwapchainKHR, VkSwapchainKHR_T(),
VkValidationCacheEXT, VkValidationCacheEXT_T(),
VkAllocationCallbacks, VK_MVK_MACOS_SURFACE_SPEC_VERSION,
pattern VK_MVK_MACOS_SURFACE_SPEC_VERSION,
VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
pattern VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Marshal.Proc (VulkanProc (..))
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Bitmasks
import Graphics.Vulkan.Types.Enum.InternalAllocationType
import Graphics.Vulkan.Types.Enum.Result
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Enum.SystemAllocationScope
import Graphics.Vulkan.Types.Funcpointers
import Graphics.Vulkan.Types.Handles
import Graphics.Vulkan.Types.Struct.AllocationCallbacks
import Graphics.Vulkan.Types.Struct.PlatformMacosMvk
pattern VkCreateMacOSSurfaceMVK :: CString
pattern VkCreateMacOSSurfaceMVK <-
(is_VkCreateMacOSSurfaceMVK -> True)
where
VkCreateMacOSSurfaceMVK = _VkCreateMacOSSurfaceMVK
# INLINE _ VkCreateMacOSSurfaceMVK #
_VkCreateMacOSSurfaceMVK :: CString
_VkCreateMacOSSurfaceMVK = Ptr "vkCreateMacOSSurfaceMVK\NUL"#
# INLINE is_VkCreateMacOSSurfaceMVK #
is_VkCreateMacOSSurfaceMVK :: CString -> Bool
is_VkCreateMacOSSurfaceMVK
= (EQ ==) . cmpCStrings _VkCreateMacOSSurfaceMVK
type VkCreateMacOSSurfaceMVK = "vkCreateMacOSSurfaceMVK"
| Success codes : ' VK_SUCCESS ' .
--
-- Error codes: 'VK_ERROR_OUT_OF_HOST_MEMORY', 'VK_ERROR_OUT_OF_DEVICE_MEMORY', 'VK_ERROR_NATIVE_WINDOW_IN_USE_KHR'.
--
> vkCreateMacOSSurfaceMVK
> ( VkInstance instance
-- > , const VkMacOSSurfaceCreateInfoMVK* pCreateInfo
-- > , const VkAllocationCallbacks* pAllocator
-- > , VkSurfaceKHR* pSurface
-- > )
--
< -extensions/html/vkspec.html#vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK registry at www.khronos.org >
type HS_vkCreateMacOSSurfaceMVK =
VkInstance -- ^ instance
->
Ptr VkMacOSSurfaceCreateInfoMVK -- ^ pCreateInfo
->
Ptr VkAllocationCallbacks -- ^ pAllocator
-> Ptr VkSurfaceKHR -- ^ pSurface
-> IO VkResult
type PFN_vkCreateMacOSSurfaceMVK =
FunPtr HS_vkCreateMacOSSurfaceMVK
foreign import ccall unsafe "dynamic"
unwrapVkCreateMacOSSurfaceMVKUnsafe ::
PFN_vkCreateMacOSSurfaceMVK -> HS_vkCreateMacOSSurfaceMVK
foreign import ccall safe "dynamic"
unwrapVkCreateMacOSSurfaceMVKSafe ::
PFN_vkCreateMacOSSurfaceMVK -> HS_vkCreateMacOSSurfaceMVK
instance VulkanProc "vkCreateMacOSSurfaceMVK" where
type VkProcType "vkCreateMacOSSurfaceMVK" =
HS_vkCreateMacOSSurfaceMVK
vkProcSymbol = _VkCreateMacOSSurfaceMVK
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe = unwrapVkCreateMacOSSurfaceMVKUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkCreateMacOSSurfaceMVKSafe
# INLINE unwrapVkProcPtrSafe #
pattern VK_MVK_MACOS_SURFACE_SPEC_VERSION :: (Num a, Eq a) => a
pattern VK_MVK_MACOS_SURFACE_SPEC_VERSION = 3
type VK_MVK_MACOS_SURFACE_SPEC_VERSION = 3
pattern VK_MVK_MACOS_SURFACE_EXTENSION_NAME :: CString
pattern VK_MVK_MACOS_SURFACE_EXTENSION_NAME <-
(is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME -> True)
where
VK_MVK_MACOS_SURFACE_EXTENSION_NAME
= _VK_MVK_MACOS_SURFACE_EXTENSION_NAME
# INLINE _ VK_MVK_MACOS_SURFACE_EXTENSION_NAME #
_VK_MVK_MACOS_SURFACE_EXTENSION_NAME :: CString
_VK_MVK_MACOS_SURFACE_EXTENSION_NAME
= Ptr "VK_MVK_macos_surface\NUL"#
# INLINE is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME #
is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME :: CString -> Bool
is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME
= (EQ ==) . cmpCStrings _VK_MVK_MACOS_SURFACE_EXTENSION_NAME
type VK_MVK_MACOS_SURFACE_EXTENSION_NAME = "VK_MVK_macos_surface"
pattern VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK ::
VkStructureType
pattern VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK =
VkStructureType 1000123000
| null | https://raw.githubusercontent.com/achirkin/vulkan/b2e0568c71b5135010f4bba939cd8dcf7a05c361/vulkan-api/src-gen/Graphics/Vulkan/Ext/VK_MVK_macos_surface.hs | haskell | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE MagicHash #
# LANGUAGE PatternSynonyms #
# LANGUAGE Strict #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
> #include "vk_platform.h"
Error codes: 'VK_ERROR_OUT_OF_HOST_MEMORY', 'VK_ERROR_OUT_OF_DEVICE_MEMORY', 'VK_ERROR_NATIVE_WINDOW_IN_USE_KHR'.
> , const VkMacOSSurfaceCreateInfoMVK* pCreateInfo
> , const VkAllocationCallbacks* pAllocator
> , VkSurfaceKHR* pSurface
> )
^ instance
^ pCreateInfo
^ pAllocator
^ pSurface | # OPTIONS_GHC -fno - warn - orphans #
# OPTIONS_HADDOCK not - home #
# LANGUAGE FlexibleInstances #
# LANGUAGE ForeignFunctionInterface #
module Graphics.Vulkan.Ext.VK_MVK_macos_surface
(AHardwareBuffer(), ANativeWindow(), CAMetalLayer(), VkBool32(..),
VkDeviceAddress(..), VkDeviceSize(..), VkFlags(..),
VkSampleMask(..), VkAndroidSurfaceCreateFlagsKHR(..),
VkBufferViewCreateFlags(..),
VkBuildAccelerationStructureFlagsNV(..),
VkCommandPoolTrimFlags(..), VkCommandPoolTrimFlagsKHR(..),
VkDebugUtilsMessengerCallbackDataFlagsEXT(..),
VkDebugUtilsMessengerCreateFlagsEXT(..),
VkDescriptorBindingFlagsEXT(..), VkDescriptorPoolResetFlags(..),
VkDescriptorUpdateTemplateCreateFlags(..),
VkDescriptorUpdateTemplateCreateFlagsKHR(..),
VkDeviceCreateFlags(..), VkDirectFBSurfaceCreateFlagsEXT(..),
VkDisplayModeCreateFlagsKHR(..),
VkDisplaySurfaceCreateFlagsKHR(..), VkEventCreateFlags(..),
VkExternalFenceFeatureFlagsKHR(..),
VkExternalFenceHandleTypeFlagsKHR(..),
VkExternalMemoryFeatureFlagsKHR(..),
VkExternalMemoryHandleTypeFlagsKHR(..),
VkExternalSemaphoreFeatureFlagsKHR(..),
VkExternalSemaphoreHandleTypeFlagsKHR(..),
VkFenceImportFlagsKHR(..), VkGeometryFlagsNV(..),
VkGeometryInstanceFlagsNV(..), VkHeadlessSurfaceCreateFlagsEXT(..),
VkIOSSurfaceCreateFlagsMVK(..),
VkImagePipeSurfaceCreateFlagsFUCHSIA(..),
VkInstanceCreateFlags(..), VkMacOSSurfaceCreateFlagsMVK(..),
VkMemoryAllocateFlagsKHR(..), VkMemoryMapFlags(..),
VkMetalSurfaceCreateFlagsEXT(..), VkPeerMemoryFeatureFlagsKHR(..),
VkPipelineColorBlendStateCreateFlags(..),
VkPipelineCoverageModulationStateCreateFlagsNV(..),
VkPipelineCoverageReductionStateCreateFlagsNV(..),
VkPipelineCoverageToColorStateCreateFlagsNV(..),
VkPipelineDepthStencilStateCreateFlags(..),
VkPipelineDiscardRectangleStateCreateFlagsEXT(..),
VkPipelineDynamicStateCreateFlags(..),
VkPipelineInputAssemblyStateCreateFlags(..),
VkPipelineLayoutCreateFlags(..),
VkPipelineMultisampleStateCreateFlags(..),
VkPipelineRasterizationConservativeStateCreateFlagsEXT(..),
VkPipelineRasterizationDepthClipStateCreateFlagsEXT(..),
VkPipelineRasterizationStateCreateFlags(..),
VkPipelineRasterizationStateStreamCreateFlagsEXT(..),
VkPipelineTessellationStateCreateFlags(..),
VkPipelineVertexInputStateCreateFlags(..),
VkPipelineViewportStateCreateFlags(..),
VkPipelineViewportSwizzleStateCreateFlagsNV(..),
VkQueryPoolCreateFlags(..), VkResolveModeFlagsKHR(..),
VkSemaphoreCreateFlags(..), VkSemaphoreImportFlagsKHR(..),
VkSemaphoreWaitFlagsKHR(..),
VkStreamDescriptorSurfaceCreateFlagsGGP(..),
VkValidationCacheCreateFlagsEXT(..), VkViSurfaceCreateFlagsNN(..),
VkWaylandSurfaceCreateFlagsKHR(..),
VkWin32SurfaceCreateFlagsKHR(..), VkXcbSurfaceCreateFlagsKHR(..),
VkXlibSurfaceCreateFlagsKHR(..), VkMacOSSurfaceCreateInfoMVK,
VkCreateMacOSSurfaceMVK,
pattern VkCreateMacOSSurfaceMVK, HS_vkCreateMacOSSurfaceMVK,
PFN_vkCreateMacOSSurfaceMVK, module Graphics.Vulkan.Marshal,
VkInternalAllocationType(..), VkResult(..),
VkSystemAllocationScope(..), newVkAllocationFunction,
newVkDebugReportCallbackEXT, newVkDebugUtilsMessengerCallbackEXT,
newVkFreeFunction, newVkInternalAllocationNotification,
newVkInternalFreeNotification, newVkReallocationFunction,
newVkVoidFunction, unwrapVkAllocationFunction,
unwrapVkDebugReportCallbackEXT,
unwrapVkDebugUtilsMessengerCallbackEXT, unwrapVkFreeFunction,
unwrapVkInternalAllocationNotification,
unwrapVkInternalFreeNotification, unwrapVkReallocationFunction,
unwrapVkVoidFunction, HS_vkAllocationFunction,
HS_vkDebugReportCallbackEXT, HS_vkDebugUtilsMessengerCallbackEXT,
HS_vkFreeFunction, HS_vkInternalAllocationNotification,
HS_vkInternalFreeNotification, HS_vkReallocationFunction,
HS_vkVoidFunction, PFN_vkAllocationFunction,
PFN_vkDebugReportCallbackEXT, PFN_vkDebugUtilsMessengerCallbackEXT,
PFN_vkFreeFunction, PFN_vkInternalAllocationNotification,
PFN_vkInternalFreeNotification, PFN_vkReallocationFunction,
PFN_vkVoidFunction, VkAccelerationStructureKHR,
VkAccelerationStructureKHR_T(), VkAccelerationStructureNV,
VkAccelerationStructureNV_T(), VkBuffer, VkBufferView,
VkBufferView_T(), VkBuffer_T(), VkCommandBuffer,
VkCommandBuffer_T(), VkCommandPool, VkCommandPool_T(),
VkDebugReportCallbackEXT, VkDebugReportCallbackEXT_T(),
VkDebugUtilsMessengerEXT, VkDebugUtilsMessengerEXT_T(),
VkDeferredOperationKHR, VkDeferredOperationKHR_T(),
VkDescriptorPool, VkDescriptorPool_T(), VkDescriptorSet,
VkDescriptorSetLayout, VkDescriptorSetLayout_T(),
VkDescriptorSet_T(), VkDescriptorUpdateTemplate,
VkDescriptorUpdateTemplateKHR, VkDescriptorUpdateTemplateKHR_T(),
VkDescriptorUpdateTemplate_T(), VkDevice, VkDeviceMemory,
VkDeviceMemory_T(), VkDevice_T(), VkDisplayKHR, VkDisplayKHR_T(),
VkDisplayModeKHR, VkDisplayModeKHR_T(), VkEvent, VkEvent_T(),
VkFence, VkFence_T(), VkFramebuffer, VkFramebuffer_T(), VkImage,
VkImageView, VkImageView_T(), VkImage_T(),
VkIndirectCommandsLayoutNV, VkIndirectCommandsLayoutNV_T(),
VkInstance, VkInstance_T(), VkPerformanceConfigurationINTEL,
VkPerformanceConfigurationINTEL_T(), VkPhysicalDevice,
VkPhysicalDevice_T(), VkPipeline, VkPipelineCache,
VkPipelineCache_T(), VkPipelineLayout, VkPipelineLayout_T(),
VkPipeline_T(), VkPrivateDataSlotEXT, VkPrivateDataSlotEXT_T(),
VkQueryPool, VkQueryPool_T(), VkQueue, VkQueue_T(), VkRenderPass,
VkRenderPass_T(), VkSampler, VkSamplerYcbcrConversion,
VkSamplerYcbcrConversionKHR, VkSamplerYcbcrConversionKHR_T(),
VkSamplerYcbcrConversion_T(), VkSampler_T(), VkSemaphore,
VkSemaphore_T(), VkShaderModule, VkShaderModule_T(), VkSurfaceKHR,
VkSurfaceKHR_T(), VkSwapchainKHR, VkSwapchainKHR_T(),
VkValidationCacheEXT, VkValidationCacheEXT_T(),
VkAllocationCallbacks, VK_MVK_MACOS_SURFACE_SPEC_VERSION,
pattern VK_MVK_MACOS_SURFACE_SPEC_VERSION,
VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
pattern VK_MVK_MACOS_SURFACE_EXTENSION_NAME,
pattern VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK)
where
import GHC.Ptr (Ptr (..))
import Graphics.Vulkan.Marshal
import Graphics.Vulkan.Marshal.Proc (VulkanProc (..))
import Graphics.Vulkan.Types.BaseTypes
import Graphics.Vulkan.Types.Bitmasks
import Graphics.Vulkan.Types.Enum.InternalAllocationType
import Graphics.Vulkan.Types.Enum.Result
import Graphics.Vulkan.Types.Enum.StructureType
import Graphics.Vulkan.Types.Enum.SystemAllocationScope
import Graphics.Vulkan.Types.Funcpointers
import Graphics.Vulkan.Types.Handles
import Graphics.Vulkan.Types.Struct.AllocationCallbacks
import Graphics.Vulkan.Types.Struct.PlatformMacosMvk
pattern VkCreateMacOSSurfaceMVK :: CString
pattern VkCreateMacOSSurfaceMVK <-
(is_VkCreateMacOSSurfaceMVK -> True)
where
VkCreateMacOSSurfaceMVK = _VkCreateMacOSSurfaceMVK
# INLINE _ VkCreateMacOSSurfaceMVK #
_VkCreateMacOSSurfaceMVK :: CString
_VkCreateMacOSSurfaceMVK = Ptr "vkCreateMacOSSurfaceMVK\NUL"#
# INLINE is_VkCreateMacOSSurfaceMVK #
is_VkCreateMacOSSurfaceMVK :: CString -> Bool
is_VkCreateMacOSSurfaceMVK
= (EQ ==) . cmpCStrings _VkCreateMacOSSurfaceMVK
type VkCreateMacOSSurfaceMVK = "vkCreateMacOSSurfaceMVK"
| Success codes : ' VK_SUCCESS ' .
> vkCreateMacOSSurfaceMVK
> ( VkInstance instance
< -extensions/html/vkspec.html#vkCreateMacOSSurfaceMVK vkCreateMacOSSurfaceMVK registry at www.khronos.org >
type HS_vkCreateMacOSSurfaceMVK =
->
->
-> IO VkResult
type PFN_vkCreateMacOSSurfaceMVK =
FunPtr HS_vkCreateMacOSSurfaceMVK
foreign import ccall unsafe "dynamic"
unwrapVkCreateMacOSSurfaceMVKUnsafe ::
PFN_vkCreateMacOSSurfaceMVK -> HS_vkCreateMacOSSurfaceMVK
foreign import ccall safe "dynamic"
unwrapVkCreateMacOSSurfaceMVKSafe ::
PFN_vkCreateMacOSSurfaceMVK -> HS_vkCreateMacOSSurfaceMVK
instance VulkanProc "vkCreateMacOSSurfaceMVK" where
type VkProcType "vkCreateMacOSSurfaceMVK" =
HS_vkCreateMacOSSurfaceMVK
vkProcSymbol = _VkCreateMacOSSurfaceMVK
# INLINE vkProcSymbol #
unwrapVkProcPtrUnsafe = unwrapVkCreateMacOSSurfaceMVKUnsafe
# INLINE unwrapVkProcPtrUnsafe #
unwrapVkProcPtrSafe = unwrapVkCreateMacOSSurfaceMVKSafe
# INLINE unwrapVkProcPtrSafe #
pattern VK_MVK_MACOS_SURFACE_SPEC_VERSION :: (Num a, Eq a) => a
pattern VK_MVK_MACOS_SURFACE_SPEC_VERSION = 3
type VK_MVK_MACOS_SURFACE_SPEC_VERSION = 3
pattern VK_MVK_MACOS_SURFACE_EXTENSION_NAME :: CString
pattern VK_MVK_MACOS_SURFACE_EXTENSION_NAME <-
(is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME -> True)
where
VK_MVK_MACOS_SURFACE_EXTENSION_NAME
= _VK_MVK_MACOS_SURFACE_EXTENSION_NAME
# INLINE _ VK_MVK_MACOS_SURFACE_EXTENSION_NAME #
_VK_MVK_MACOS_SURFACE_EXTENSION_NAME :: CString
_VK_MVK_MACOS_SURFACE_EXTENSION_NAME
= Ptr "VK_MVK_macos_surface\NUL"#
# INLINE is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME #
is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME :: CString -> Bool
is_VK_MVK_MACOS_SURFACE_EXTENSION_NAME
= (EQ ==) . cmpCStrings _VK_MVK_MACOS_SURFACE_EXTENSION_NAME
type VK_MVK_MACOS_SURFACE_EXTENSION_NAME = "VK_MVK_macos_surface"
pattern VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK ::
VkStructureType
pattern VK_STRUCTURE_TYPE_MACOS_SURFACE_CREATE_INFO_MVK =
VkStructureType 1000123000
|
d519cfe3ab6b2303f440e5d7052215b705126ad3e5cca4018f1dc746e10ff9e0 | poscat0x04/telegram-types | SetMyCommands.hs | module Web.Telegram.Types.Internal.API.SetMyCommands where
import Common
import Web.Telegram.Types.Internal.BotCommand
newtype SetMyCommands = SetMyCommands
{commands :: [BotCommand]}
deriving stock (Show, Eq)
mkLabel ''SetMyCommands
deriveToJSON snake ''SetMyCommands
makeMethod ''SetMyCommands
| null | https://raw.githubusercontent.com/poscat0x04/telegram-types/3de0710640f5303638a83e409001b0342299aeb8/src/Web/Telegram/Types/Internal/API/SetMyCommands.hs | haskell | module Web.Telegram.Types.Internal.API.SetMyCommands where
import Common
import Web.Telegram.Types.Internal.BotCommand
newtype SetMyCommands = SetMyCommands
{commands :: [BotCommand]}
deriving stock (Show, Eq)
mkLabel ''SetMyCommands
deriveToJSON snake ''SetMyCommands
makeMethod ''SetMyCommands
| |
2dbdd652345fcb513a8b7c1f4ad24cacacd801c5603ed49ec294a15f8c6fa23e | rotty/texinfo | indexing.scm | ( texinfo indexing ) -- indexing stexinfo
Copyright ( C ) 2003,2004 < wingo at pobox dot com >
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with this program. If not, see </>.
;;; Commentary:
;;
;;@c texinfo formatting
;;Given a piece of stexi, return an index of a specified variety.
;;
Note that currently , - extract - index } does n't differentiate
;;between different kinds of index entries. That's a bug ;)
;;; Code:
(define-module (texinfo indexing)
#:use-module (sxml simple)
#:use-module (scheme documentation)
#:use-module (srfi srfi-13)
#:export (stexi-extract-index))
(define (def-name def)
(cadr (assq 'name (cdadr def))))
(define defines
'(deftp defcv defivar deftypeivar defop deftypeop defmethod
deftypemethod defopt defvr defvar deftypevr deftypevar deffn
deftypefn defspec defmac defun deftypefun))
(define indices
'(cindex findex vindex kindex pindex tindex))
(define (stexi-extract-index tree manual-name kind)
"Given an stexi tree @var{tree}, index all of the entries of type
@var{kind}. @var{kind} can be one of the predefined texinfo indices
(@code{concept}, @code{variable}, @code{function}, @code{key},
@code{program}, @code{type}) or one of the special symbols @code{auto}
or @code{all}. @code{auto} will scan the stext for a @code{(printindex)}
statement, and @code{all} will generate an index from all entries,
regardless of type.
The returned index is a list of pairs, the @sc{car} of which is the
entry (a string) and the @sc{cdr} of which is a node name (a string)."
(let loop ((in tree) (entries '()))
(cond
((null? in)
entries)
((pair? (car in))
(cond
((and (pair? (cdr in)) (pair? (cadr in))
(eq? (caar in) 'anchor) (memq (caadr in) defines))
(loop (cddr in) (acons (cadr (assq 'name (cdr (cadadr in))))
(cadr (assq 'name (cdadar in)))
entries)))
((and (pair? (cdr in)) (pair? (cadr in))
(eq? (caar in) 'anchor) (memq (caadr in) indices))
(loop (cddr in) (acons (sxml->string (cadr in))
(cadr (assq 'name (cdadar in)))
entries)))
(else
(loop (cdr in) (loop (car in) entries)))))
(else
(loop (cdr in) entries)))))
arch - tag : 216d29d3 - 1ed9 - 433f-9c19 - 0dc4d6b439b6
| null | https://raw.githubusercontent.com/rotty/texinfo/362a147831ba3ae8926eea6af33b2708d79e05dd/scheme/texinfo/private/indexing.scm | scheme | This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
Commentary:
@c texinfo formatting
Given a piece of stexi, return an index of a specified variety.
between different kinds of index entries. That's a bug ;)
Code: | ( texinfo indexing ) -- indexing stexinfo
Copyright ( C ) 2003,2004 < wingo at pobox dot com >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Note that currently , - extract - index } does n't differentiate
(define-module (texinfo indexing)
#:use-module (sxml simple)
#:use-module (scheme documentation)
#:use-module (srfi srfi-13)
#:export (stexi-extract-index))
(define (def-name def)
(cadr (assq 'name (cdadr def))))
(define defines
'(deftp defcv defivar deftypeivar defop deftypeop defmethod
deftypemethod defopt defvr defvar deftypevr deftypevar deffn
deftypefn defspec defmac defun deftypefun))
(define indices
'(cindex findex vindex kindex pindex tindex))
(define (stexi-extract-index tree manual-name kind)
"Given an stexi tree @var{tree}, index all of the entries of type
@var{kind}. @var{kind} can be one of the predefined texinfo indices
(@code{concept}, @code{variable}, @code{function}, @code{key},
@code{program}, @code{type}) or one of the special symbols @code{auto}
or @code{all}. @code{auto} will scan the stext for a @code{(printindex)}
statement, and @code{all} will generate an index from all entries,
regardless of type.
The returned index is a list of pairs, the @sc{car} of which is the
entry (a string) and the @sc{cdr} of which is a node name (a string)."
(let loop ((in tree) (entries '()))
(cond
((null? in)
entries)
((pair? (car in))
(cond
((and (pair? (cdr in)) (pair? (cadr in))
(eq? (caar in) 'anchor) (memq (caadr in) defines))
(loop (cddr in) (acons (cadr (assq 'name (cdr (cadadr in))))
(cadr (assq 'name (cdadar in)))
entries)))
((and (pair? (cdr in)) (pair? (cadr in))
(eq? (caar in) 'anchor) (memq (caadr in) indices))
(loop (cddr in) (acons (sxml->string (cadr in))
(cadr (assq 'name (cdadar in)))
entries)))
(else
(loop (cdr in) (loop (car in) entries)))))
(else
(loop (cdr in) entries)))))
arch - tag : 216d29d3 - 1ed9 - 433f-9c19 - 0dc4d6b439b6
|
54975951655bb43da06cdb4048454be4b0323eec1521ba40d9e36cefef38071d | baskeboler/cljs-karaoke-client | queue.cljs | (ns cljs-karaoke.remote-control.queue
(:require [cljs.core.async :as async :refer [go go-loop <! >! chan]]))
(defonce remote-commands-queue (chan))
(defn ^export queue-remote-command [cmd]
(go
(>! remote-commands-queue cmd)))
(println "Remote control queue initiated.")
| null | https://raw.githubusercontent.com/baskeboler/cljs-karaoke-client/bb6512435eaa436d35034886be99213625847ee0/src/main/cljs_karaoke/remote_control/queue.cljs | clojure | (ns cljs-karaoke.remote-control.queue
(:require [cljs.core.async :as async :refer [go go-loop <! >! chan]]))
(defonce remote-commands-queue (chan))
(defn ^export queue-remote-command [cmd]
(go
(>! remote-commands-queue cmd)))
(println "Remote control queue initiated.")
| |
4a4264e4adce0ac501f1d8bff2c0a2f31abc5a2de1392ceb486e50fcccc9da57 | MyDataFlow/ttalk-server | meck_history_tests.erl | %%%============================================================================
Copyright 2013
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%============================================================================
-module(meck_history_tests).
-include_lib("eunit/include/eunit.hrl").
num_calls_with_arity_test() ->
%% Given
meck:new(test, [non_strict]),
meck:expect(test, foo, 2, ok),
meck:expect(test, foo, 3, ok),
%% When
test:foo(1, 2, 3),
test:foo(1, 2),
test:foo(1, 2, 3),
test:foo(1, 2, 3),
test:foo(1, 2),
%% Then
?assertMatch(2, meck:num_calls(test, foo, 2)),
?assertMatch(3, meck:num_calls(test, foo, 3)),
?assertMatch(0, meck:num_calls(test, foo, 4)),
%% Clean
meck:unload().
capture_different_positions_test() ->
%% Given
meck:new(test, [non_strict]),
meck:expect(test, foo, 3, ok),
meck:expect(test, foo, 4, ok),
meck:expect(test, bar, 3, ok),
test:foo(1001, 2001, 3001, 4001),
test:bar(1002, 2002, 3002),
test:foo(1003, 2003, 3003),
test:bar(1004, 2004, 3004),
test:foo(1005, 2005, 3005),
test:foo(1006, 2006, 3006),
test:bar(1007, 2007, 3007),
test:foo(1008, 2008, 3008),
%% When/Then
?assertMatch(2003, meck:capture(first, test, foo, ['_', '_', '_'], 2)),
?assertMatch(2008, meck:capture(last, test, foo, ['_', '_', '_'], 2)),
?assertMatch(2006, meck:capture(3, test, foo, ['_', '_', '_'], 2)),
?assertError(not_found, meck:capture(5, test, foo, ['_', '_', '_'], 2)),
%% Clean
meck:unload().
capture_different_args_specs_test() ->
%% Given
meck:new(test, [non_strict]),
meck:expect(test, foo, 2, ok),
meck:expect(test, foo, 3, ok),
meck:expect(test, foo, 4, ok),
meck:expect(test, bar, 3, ok),
test:foo(1001, 2001, 3001, 4001),
test:bar(1002, 2002, 3002),
test:foo(1003, 2003, 3003),
test:bar(1004, 2004, 3004),
test:foo(1005, 2005),
test:foo(1006, 2006, 3006),
test:bar(1007, 2007, 3007),
test:foo(1008, 2008, 3008),
%% When/Then
?assertMatch(2001, meck:capture(first, test, foo, '_', 2)),
?assertMatch(2003, meck:capture(first, test, foo, 3, 2)),
?assertMatch(2005, meck:capture(first, test, foo, ['_', '_'], 2)),
?assertMatch(2006, meck:capture(first, test, foo, [1006, '_', '_'], 2)),
?assertMatch(2008, meck:capture(first, test, foo, ['_', '_', meck:is(hamcrest_matchers:greater_than(3006))], 2)),
%% Clean
meck:unload().
| null | https://raw.githubusercontent.com/MyDataFlow/ttalk-server/07a60d5d74cd86aedd1f19c922d9d3abf2ebf28d/deps/meck/test/meck_history_tests.erl | erlang | ============================================================================
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
============================================================================
Given
When
Then
Clean
Given
When/Then
Clean
Given
When/Then
Clean | Copyright 2013
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(meck_history_tests).
-include_lib("eunit/include/eunit.hrl").
num_calls_with_arity_test() ->
meck:new(test, [non_strict]),
meck:expect(test, foo, 2, ok),
meck:expect(test, foo, 3, ok),
test:foo(1, 2, 3),
test:foo(1, 2),
test:foo(1, 2, 3),
test:foo(1, 2, 3),
test:foo(1, 2),
?assertMatch(2, meck:num_calls(test, foo, 2)),
?assertMatch(3, meck:num_calls(test, foo, 3)),
?assertMatch(0, meck:num_calls(test, foo, 4)),
meck:unload().
capture_different_positions_test() ->
meck:new(test, [non_strict]),
meck:expect(test, foo, 3, ok),
meck:expect(test, foo, 4, ok),
meck:expect(test, bar, 3, ok),
test:foo(1001, 2001, 3001, 4001),
test:bar(1002, 2002, 3002),
test:foo(1003, 2003, 3003),
test:bar(1004, 2004, 3004),
test:foo(1005, 2005, 3005),
test:foo(1006, 2006, 3006),
test:bar(1007, 2007, 3007),
test:foo(1008, 2008, 3008),
?assertMatch(2003, meck:capture(first, test, foo, ['_', '_', '_'], 2)),
?assertMatch(2008, meck:capture(last, test, foo, ['_', '_', '_'], 2)),
?assertMatch(2006, meck:capture(3, test, foo, ['_', '_', '_'], 2)),
?assertError(not_found, meck:capture(5, test, foo, ['_', '_', '_'], 2)),
meck:unload().
capture_different_args_specs_test() ->
meck:new(test, [non_strict]),
meck:expect(test, foo, 2, ok),
meck:expect(test, foo, 3, ok),
meck:expect(test, foo, 4, ok),
meck:expect(test, bar, 3, ok),
test:foo(1001, 2001, 3001, 4001),
test:bar(1002, 2002, 3002),
test:foo(1003, 2003, 3003),
test:bar(1004, 2004, 3004),
test:foo(1005, 2005),
test:foo(1006, 2006, 3006),
test:bar(1007, 2007, 3007),
test:foo(1008, 2008, 3008),
?assertMatch(2001, meck:capture(first, test, foo, '_', 2)),
?assertMatch(2003, meck:capture(first, test, foo, 3, 2)),
?assertMatch(2005, meck:capture(first, test, foo, ['_', '_'], 2)),
?assertMatch(2006, meck:capture(first, test, foo, [1006, '_', '_'], 2)),
?assertMatch(2008, meck:capture(first, test, foo, ['_', '_', meck:is(hamcrest_matchers:greater_than(3006))], 2)),
meck:unload().
|
aec7c52087b0acbe2a7f7fa40e054404851bfb93d11e0962152e1294db535a4c | lpw25/talks | fiber.ml | open Import
module Eq = struct
type ('a, 'b) t = T : ('a, 'a) t
let cast (type a) (type b) (T : (a, b) t) (x : a) : b = x
end
module Var0 = struct
module Key = struct
type 'a t = ..
end
module type T = sig
type t
type 'a Key.t += T : t Key.t
val id : int
end
type 'a t = (module T with type t = 'a)
let next = ref 0
let create (type a) () =
let n = !next in
next := n + 1;
let module M = struct
type t = a
type 'a Key.t += T : t Key.t
let id = n
end in
(module M : T with type t = a)
let id (type a) (module M : T with type t = a) = M.id
let eq (type a) (type b)
(module A : T with type t = a)
(module B : T with type t = b) : (a, b) Eq.t =
match A.T with
| B.T -> Eq.T
| _ -> assert false
end
module Binding = struct
type t = T : 'a Var0.t * 'a -> t
end
module Int_map = Map.Make(Int)
type ctx =
{ on_error : exn -> unit; (* This callback must never raise *)
fibers : int ref; (* Number of fibers running in this execution
context *)
vars : Binding.t Int_map.t;
on_release : unit -> unit;
suspended : task Queue.t; }
and 'a cont =
('a, ![], unit) continuation
and task =
| Cont : 'a * 'a cont -> task
| Cont_unit : unit cont -> task
| Exec :
ctx * 'a * ('a -[Async : 'o. 'o op -> 'o]-> 'b)
* (ctx -> 'b -> unit) -> task
and 'a waiting =
Waiting : ctx * 'a cont -> 'a waiting
and 'a ivar_state =
| Full of 'a
| Empty of 'a waiting Queue.t
and 'a ivar = { mutable state : 'a ivar_state }
and mutex =
{ mutable locked : bool;
mutable waiters : unit waiting Queue.t; }
and 'a op =
| Never : 'a op
| Fork :
'a * ('a -[Async : 'o. 'o op -> 'o]-> 'b) -> 'b ivar op
| NFork :
'a list * ('a -[Async : 'o. 'o op -> 'o]-> 'b) ->
'b ivar list op
| Fork_and_join :
(unit -[Async : 'o. 'o op -> 'o]-> 'a) *
(unit -[Async : 'o. 'o op -> 'o]-> 'b) ->
('a * 'b) op
| Parallel_map :
'a list * ('a -[Async : 'o. 'o op -> 'o]-> 'b) -> 'b list op
| Parallel_iter :
'a list * ('a -[Async : 'o. 'o op -> 'o]-> unit) -> unit op
| Get : 'a Var0.t -> 'a option op
| Get_exn : 'a Var0.t -> 'a op
| Set :
'a Var0.t * 'a * (unit -[Async : 'o. 'o op -> 'o]-> 'b) -> 'b op
| With_error_handler :
(unit -[Async : 'o. 'o op -> 'o]-> 'a) * (exn -> unit) -> 'a op
| Wait_errors :
(unit -[Async : 'o. 'o op -> 'o]-> 'a) -> ('a, unit) result op
| Fill : 'b ivar * 'b -> unit op
| Read : 'a ivar -> 'a op
| Lock : mutex -> unit op
| Unlock : mutex -> unit op
| Yield : unit op
effect async = ![ Async : 'o. 'o op -> 'o ]
type ('a, 'b) fork_and_join_state =
| Nothing_yet
| Got_a of 'a
| Got_b of 'b
let initial_context () =
{ on_error = raise;
fibers = ref 1;
vars = Int_map.empty;
on_release = ignore;
suspended = Queue.create (); }
let subcontext ctx ~on_release =
{ ctx with on_release; fibers = ref 1 }
let set_vars ctx vars =
{ ctx with vars }
let set_error_handler ctx ~on_error =
{ ctx with on_error }
let enqueue ctx s =
Queue.push s ctx.suspended
let release ctx =
ctx.on_release ()
let add_refs ctx n =
ctx.fibers := !(ctx.fibers) + n
let activate (Waiting(ctx, cont)) x =
enqueue ctx (Cont(x, cont))
let list_of_option_array a =
let rec loop arr i acc =
if i = 0 then
acc
else
let i = i - 1 in
match arr.(i) with
| None -> assert false
| Some x ->
loop arr i (x :: acc)
in
loop a (Array.length a) []
let rec exec :
'a 'b. ctx -> 'a ->
('a -[async]-> 'b) -> (ctx -> 'b -> unit) -> unit =
fun ctx x f g ->
match f x with
| res -> g ctx res
| exception exn ->
forward_error true ctx exn
| effect Async(op), k -> begin
match op with
| Never -> never ctx k
| Fork(x, f) -> fork x f ctx k
| NFork(l, f) -> nfork l f ctx k
| Fork_and_join(fa, fb) -> fork_and_join fa fb ctx k
| Parallel_map(l, f) -> parallel_map l f ctx k
| Parallel_iter(l, f) -> parallel_iter l f ctx k
| Get var -> get var ctx k
| Get_exn var -> get_exn var ctx k
| Set(var, x, f) -> set var x f ctx k
| With_error_handler(f, h) -> with_error_handler f h ctx k
| Wait_errors f -> wait_errors f ctx k
| Fill(ivar, x) -> fill ivar x ctx k
| Read ivar -> read ivar ctx k
| Lock mutex -> lock mutex ctx k
| Unlock mutex -> unlock mutex ctx k
| Yield -> yield ctx k
end
and schedule : ctx -> unit =
fun ctx ->
match Queue.pop ctx.suspended with
| exception Queue.Empty -> ()
| Cont(x, k) -> continue k x
| Cont_unit k -> continue k ()
| Exec(ctx', x, f, g) -> exec ctx' x f g
and deref : 'a. ctx -> unit =
fun ctx ->
let n = !(ctx.fibers) - 1 in
assert (n >= 0);
ctx.fibers := n;
if n = 0 then release ctx
else schedule ctx
and forward_error : 'a. bool -> ctx -> exn -> unit =
fun drf ctx exn ->
let bt = Printexc.get_raw_backtrace () in
match ctx.on_error exn with
| () -> if drf then deref ctx
| exception exn2 ->
(* We can't abort the execution at this point, so we just dump
the error on stderr *)
let bt2 = Printexc.get_backtrace () in
let s =
(Printf.sprintf "%s\n%s\nOriginal exception was: %s\n%s"
(Printexc.to_string exn2) bt2
(Printexc.to_string exn) (Printexc.raw_backtrace_to_string bt))
|> String.split_lines
|> List.map ~f:(Printf.sprintf "| %s")
|> String.concat ~sep:"\n"
in
let line = String.make 71 '-' in
Format.eprintf
"/%s\n\
| @{<error>Internal error@}: \
Fiber.Execution_context.forward_error: error handler raised.\n\
%s\n\
\\%s@."
line s line
and never : 'a. ctx -> 'a cont -> unit =
fun ctx _ ->
schedule ctx
and finish : 'a. 'a ivar -> ctx -> 'a -> unit =
fun ivar ctx x ->
match ivar.state with
| Full _ -> assert false
| Empty q ->
ivar.state <- Full x;
Queue.iter (fun handler -> activate handler x) q;
schedule ctx
and fork :
'a 'b. 'a -> ('a -[async]-> 'b) -> ctx ->
'b ivar cont -> unit =
fun x f ctx k ->
let ivar = { state = Empty (Queue.create ()) } in
add_refs ctx 1;
enqueue ctx (Cont(ivar, k));
exec ctx x f (finish ivar)
and nfork :
'a 'b. 'a list -> ('a -[async]-> 'b) ->
ctx -> 'b ivar list cont -> unit =
fun l f ctx k ->
match l with
| [] -> continue k []
| [x] ->
let ivar = { state = Empty (Queue.create ()) } in
add_refs ctx 1;
enqueue ctx (Cont([ivar], k));
exec ctx x f (finish ivar)
| first :: rest ->
let n = List.length rest in
add_refs ctx n;
let rest_ivars =
List.map rest ~f:(fun x ->
let ivar = { state = Empty (Queue.create ()) } in
enqueue ctx (Exec(ctx, x, f, finish ivar));
ivar)
in
let first_ivar = { state = Empty (Queue.create ()) } in
let ivars = first_ivar :: rest_ivars in
enqueue ctx (Cont(ivars, k));
exec ctx first f (finish first_ivar)
and fork_and_join :
'a 'b. (unit -[async]-> 'a) -> (unit -[async]-> 'b) ->
ctx -> ('a * 'b) cont -> unit =
fun fa fb ctx k ->
let state = ref Nothing_yet in
let finish_a ctx a =
match !state with
| Nothing_yet -> state := Got_a a; deref ctx
| Got_a _ -> assert false
| Got_b b -> continue k (a, b)
in
let finish_b ctx b =
match !state with
| Nothing_yet -> state := Got_b b; deref ctx
| Got_a a -> continue k (a, b)
| Got_b _ -> assert false
in
add_refs ctx 1;
enqueue ctx (Exec(ctx, (), fb, finish_b));
exec ctx () fa finish_a
and parallel_map :
'a 'b. 'a list -> ('a -[async]-> 'b) ->
ctx -> 'b list cont -> unit =
fun l f ctx k ->
match l with
| [] -> continue k []
| [x] ->
exec ctx x f (fun _ x -> continue k [x])
| first :: rest ->
let n = List.length l in
add_refs ctx (n - 1);
let left_over = ref n in
let results = Array.make n None in
let finish_i i ctx x =
results.(i) <- Some x;
decr left_over;
if !left_over = 0 then begin
continue k (list_of_option_array results)
end else begin
deref ctx
end
in
List.iteri rest ~f:(fun i x ->
enqueue ctx (Exec(ctx, x, f, finish_i (i + 1))));
exec ctx first f (finish_i 0)
and parallel_iter :
'a. 'a list -> ('a -[async]-> unit) ->
ctx -> unit cont -> unit =
fun l f ctx k ->
match l with
| [] -> continue k ()
| [x] -> exec ctx x f (fun _ _ -> continue k ())
| first :: rest ->
let n = List.length l in
add_refs ctx (n - 1);
let left_over = ref n in
let finish ctx () =
decr left_over;
if !left_over = 0 then begin
continue k ()
end else begin
deref ctx
end
in
List.iter rest ~f:(fun x ->
enqueue ctx (Exec(ctx, x, f, finish)));
exec ctx first f finish
and get :
'a. 'a Var0.t -> ctx -> 'a option cont -> unit =
fun var ctx k ->
match Int_map.find (Var0.id var) ctx.vars with
| exception Not_found -> continue k None
| Binding.T (var', v) ->
let eq = Var0.eq var' var in
continue k (Some (Eq.cast eq v))
and get_exn :
'a. 'a Var0.t -> ctx -> 'a cont -> unit =
fun var ctx k ->
match Int_map.find (Var0.id var) ctx.vars with
| exception Not_found -> discontinue k (Failure "Fiber.Var.find_exn")
| Binding.T (var', v) ->
let eq = Var0.eq var' var in
continue k (Eq.cast eq v)
and set :
'a 'b. 'a Var0.t -> 'a -> (unit -[async]-> 'b) ->
ctx -> 'b cont -> unit =
fun (type t) (var : t Var0.t) x f ctx k ->
let (module M) = var in
let data = Binding.T (var, x) in
let ctx' = set_vars ctx (Int_map.add M.id data ctx.vars) in
exec ctx' () f (fun _ res -> continue k res)
and with_error_handler :
'a. (unit -[async]-> 'a) -> (exn -> unit) ->
ctx -> 'a cont -> unit =
fun f err ctx k ->
let on_error exn =
try
err exn
with exn ->
forward_error false ctx exn
in
let ctx' = set_error_handler ctx ~on_error in
exec ctx' () f (fun _ x -> continue k x)
and wait_errors :
'a. (unit -[async]-> 'a) ->
ctx -> ('a, unit) Result.t cont -> unit =
fun f ctx k ->
let result = ref (Result.Error ()) in
let on_release () = continue k !result in
let ctx' = subcontext ctx ~on_release in
let finish ctx' x =
result := Ok x;
deref ctx'
in
exec ctx' () f finish
and fill : 'a. 'a ivar -> 'a -> ctx -> unit cont -> unit =
fun ivar x ctx k ->
match ivar.state with
| Full _ -> discontinue k (Failure "Fiber.Ivar.fill")
| Empty q ->
ivar.state <- Full x;
Queue.iter (fun handler -> activate handler x) q;
enqueue ctx (Cont_unit(k));
schedule ctx
and read : 'a. 'a ivar -> ctx -> 'a cont -> unit =
fun ivar ctx k ->
match ivar.state with
| Full x -> continue k x
| Empty q ->
Queue.push (Waiting(ctx, k)) q;
schedule ctx
and lock : 'a. mutex -> ctx -> unit cont -> unit =
fun lock ctx k ->
if lock.locked then begin
Queue.push (Waiting(ctx, k)) lock.waiters;
schedule ctx
end else begin
lock.locked <- true;
continue k ()
end
and unlock : 'a. mutex -> ctx -> unit cont -> unit =
fun lock _ctx k ->
assert lock.locked;
if Queue.is_empty lock.waiters then begin
lock.locked <- false
end else begin
activate (Queue.pop lock.waiters) ()
end;
continue k ()
and yield : 'a. ctx -> unit cont -> unit =
fun ctx k ->
enqueue ctx (Cont_unit(k));
schedule ctx
let never : unit -[async]-> 'a =
fun () ->
perform Async(Never)
let fork : (unit -[async]-> 'b) -[async]-> 'b ivar =
fun f ->
perform Async(Fork((), f))
let nfork_map : 'a list -> f:('a -[async]-> 'b) -[async]-> 'b ivar list =
fun l ~f ->
perform Async(NFork(l, f))
let nfork : (unit -[async]-> 'a) list -[async]-> 'a ivar list =
fun l ->
perform Async(NFork(l, fun f -> f ()))
let fork_and_join :
(unit -[async]-> 'a) -> (unit -[async]-> 'b) -[async]->
'a * 'b =
fun fa fb ->
perform Async(Fork_and_join(fa, fb))
let fork_and_join_unit :
(unit -[async]-> unit) -> (unit -[async]-> 'a)
-[async]-> 'a =
fun fa fb ->
snd (perform Async(Fork_and_join(fa, fb)))
let parallel_map :
'a list -> f:('a -[async]-> 'b) -[async]-> 'b list =
fun l ~f ->
perform Async(Parallel_map(l, f))
let parallel_iter :
'a list -> f:('a -[async]-> unit) -[async]-> unit =
fun l ~f ->
perform Async(Parallel_iter(l, f))
module Var = struct
type 'a t = 'a Var0.t
let create = Var0.create
let get : 'a Var0.t -[async]-> 'a option =
fun t -> perform Async(Get t)
let get_exn : 'a Var0.t -[async]-> 'a =
fun t -> perform Async(Get_exn t)
let set : 'a Var0.t -> 'a -> (unit -[async]-> 'b) -[async]-> 'b =
fun t x f -> perform Async(Set(t, x, f))
end
let with_error_handler :
(unit -[async]-> 'a) ->
on_error:(exn -> unit) -[async]-> 'a =
fun f ~on_error ->
perform Async(With_error_handler(f, on_error))
let wait_errors :
(unit -[async]-> 'a) -[async]-> ('a, unit) result =
fun f ->
perform Async(Wait_errors f)
let fold_errors f ~init ~on_error =
let acc = ref init in
let on_error exn =
acc := on_error exn !acc
in
match wait_errors (fun () -> with_error_handler f ~on_error) with
| Ok _ as ok -> ok
| Error () -> Error !acc
let collect_errors f =
fold_errors f
~init:[]
~on_error:(fun e l -> e :: l)
let finalize :
(unit -[async]-> 'a) -> finally:(unit -[async]-> unit)
-[async]-> 'a =
fun f ~finally ->
let res = wait_errors f in
finally ();
match res with
| Ok x -> x
| Error () -> never ()
module Ivar = struct
type 'a t = 'a ivar
let create () = { state = Empty (Queue.create ()) }
let fill : 'a ivar -> 'a -[async]-> unit =
fun t x -> perform Async(Fill(t, x))
let read : 'a ivar -[async]-> 'a =
fun t -> perform Async(Read t)
end
module Future = struct
type 'a t = 'a Ivar.t
let wait = Ivar.read
end
module Mutex = struct
type t = mutex
let create () =
{ locked = false;
waiters = Queue.create (); }
let with_lock : mutex -> (unit -[async]-> 'a) -[async]-> 'a =
fun t f ->
perform Async(Lock t);
finalize f
~finally:(fun () -> perform Async(Unlock t))
end
let yield : unit -[async]-> unit =
fun () -> perform Async(Yield)
exception Never
let run f x =
let result = ref None in
let ctx = initial_context () in
let finish ctx y =
result := Some y
in
exec ctx x f finish;
match !result with
| None -> raise Never
| Some res -> res
| null | https://raw.githubusercontent.com/lpw25/talks/89b565f66be567795d08c520aea3b03d527d64b2/nyc-2018/fiber.ml | ocaml | This callback must never raise
Number of fibers running in this execution
context
We can't abort the execution at this point, so we just dump
the error on stderr | open Import
module Eq = struct
type ('a, 'b) t = T : ('a, 'a) t
let cast (type a) (type b) (T : (a, b) t) (x : a) : b = x
end
module Var0 = struct
module Key = struct
type 'a t = ..
end
module type T = sig
type t
type 'a Key.t += T : t Key.t
val id : int
end
type 'a t = (module T with type t = 'a)
let next = ref 0
let create (type a) () =
let n = !next in
next := n + 1;
let module M = struct
type t = a
type 'a Key.t += T : t Key.t
let id = n
end in
(module M : T with type t = a)
let id (type a) (module M : T with type t = a) = M.id
let eq (type a) (type b)
(module A : T with type t = a)
(module B : T with type t = b) : (a, b) Eq.t =
match A.T with
| B.T -> Eq.T
| _ -> assert false
end
module Binding = struct
type t = T : 'a Var0.t * 'a -> t
end
module Int_map = Map.Make(Int)
type ctx =
vars : Binding.t Int_map.t;
on_release : unit -> unit;
suspended : task Queue.t; }
and 'a cont =
('a, ![], unit) continuation
and task =
| Cont : 'a * 'a cont -> task
| Cont_unit : unit cont -> task
| Exec :
ctx * 'a * ('a -[Async : 'o. 'o op -> 'o]-> 'b)
* (ctx -> 'b -> unit) -> task
and 'a waiting =
Waiting : ctx * 'a cont -> 'a waiting
and 'a ivar_state =
| Full of 'a
| Empty of 'a waiting Queue.t
and 'a ivar = { mutable state : 'a ivar_state }
and mutex =
{ mutable locked : bool;
mutable waiters : unit waiting Queue.t; }
and 'a op =
| Never : 'a op
| Fork :
'a * ('a -[Async : 'o. 'o op -> 'o]-> 'b) -> 'b ivar op
| NFork :
'a list * ('a -[Async : 'o. 'o op -> 'o]-> 'b) ->
'b ivar list op
| Fork_and_join :
(unit -[Async : 'o. 'o op -> 'o]-> 'a) *
(unit -[Async : 'o. 'o op -> 'o]-> 'b) ->
('a * 'b) op
| Parallel_map :
'a list * ('a -[Async : 'o. 'o op -> 'o]-> 'b) -> 'b list op
| Parallel_iter :
'a list * ('a -[Async : 'o. 'o op -> 'o]-> unit) -> unit op
| Get : 'a Var0.t -> 'a option op
| Get_exn : 'a Var0.t -> 'a op
| Set :
'a Var0.t * 'a * (unit -[Async : 'o. 'o op -> 'o]-> 'b) -> 'b op
| With_error_handler :
(unit -[Async : 'o. 'o op -> 'o]-> 'a) * (exn -> unit) -> 'a op
| Wait_errors :
(unit -[Async : 'o. 'o op -> 'o]-> 'a) -> ('a, unit) result op
| Fill : 'b ivar * 'b -> unit op
| Read : 'a ivar -> 'a op
| Lock : mutex -> unit op
| Unlock : mutex -> unit op
| Yield : unit op
effect async = ![ Async : 'o. 'o op -> 'o ]
type ('a, 'b) fork_and_join_state =
| Nothing_yet
| Got_a of 'a
| Got_b of 'b
let initial_context () =
{ on_error = raise;
fibers = ref 1;
vars = Int_map.empty;
on_release = ignore;
suspended = Queue.create (); }
let subcontext ctx ~on_release =
{ ctx with on_release; fibers = ref 1 }
let set_vars ctx vars =
{ ctx with vars }
let set_error_handler ctx ~on_error =
{ ctx with on_error }
let enqueue ctx s =
Queue.push s ctx.suspended
let release ctx =
ctx.on_release ()
let add_refs ctx n =
ctx.fibers := !(ctx.fibers) + n
let activate (Waiting(ctx, cont)) x =
enqueue ctx (Cont(x, cont))
let list_of_option_array a =
let rec loop arr i acc =
if i = 0 then
acc
else
let i = i - 1 in
match arr.(i) with
| None -> assert false
| Some x ->
loop arr i (x :: acc)
in
loop a (Array.length a) []
let rec exec :
'a 'b. ctx -> 'a ->
('a -[async]-> 'b) -> (ctx -> 'b -> unit) -> unit =
fun ctx x f g ->
match f x with
| res -> g ctx res
| exception exn ->
forward_error true ctx exn
| effect Async(op), k -> begin
match op with
| Never -> never ctx k
| Fork(x, f) -> fork x f ctx k
| NFork(l, f) -> nfork l f ctx k
| Fork_and_join(fa, fb) -> fork_and_join fa fb ctx k
| Parallel_map(l, f) -> parallel_map l f ctx k
| Parallel_iter(l, f) -> parallel_iter l f ctx k
| Get var -> get var ctx k
| Get_exn var -> get_exn var ctx k
| Set(var, x, f) -> set var x f ctx k
| With_error_handler(f, h) -> with_error_handler f h ctx k
| Wait_errors f -> wait_errors f ctx k
| Fill(ivar, x) -> fill ivar x ctx k
| Read ivar -> read ivar ctx k
| Lock mutex -> lock mutex ctx k
| Unlock mutex -> unlock mutex ctx k
| Yield -> yield ctx k
end
and schedule : ctx -> unit =
fun ctx ->
match Queue.pop ctx.suspended with
| exception Queue.Empty -> ()
| Cont(x, k) -> continue k x
| Cont_unit k -> continue k ()
| Exec(ctx', x, f, g) -> exec ctx' x f g
and deref : 'a. ctx -> unit =
fun ctx ->
let n = !(ctx.fibers) - 1 in
assert (n >= 0);
ctx.fibers := n;
if n = 0 then release ctx
else schedule ctx
and forward_error : 'a. bool -> ctx -> exn -> unit =
fun drf ctx exn ->
let bt = Printexc.get_raw_backtrace () in
match ctx.on_error exn with
| () -> if drf then deref ctx
| exception exn2 ->
let bt2 = Printexc.get_backtrace () in
let s =
(Printf.sprintf "%s\n%s\nOriginal exception was: %s\n%s"
(Printexc.to_string exn2) bt2
(Printexc.to_string exn) (Printexc.raw_backtrace_to_string bt))
|> String.split_lines
|> List.map ~f:(Printf.sprintf "| %s")
|> String.concat ~sep:"\n"
in
let line = String.make 71 '-' in
Format.eprintf
"/%s\n\
| @{<error>Internal error@}: \
Fiber.Execution_context.forward_error: error handler raised.\n\
%s\n\
\\%s@."
line s line
and never : 'a. ctx -> 'a cont -> unit =
fun ctx _ ->
schedule ctx
and finish : 'a. 'a ivar -> ctx -> 'a -> unit =
fun ivar ctx x ->
match ivar.state with
| Full _ -> assert false
| Empty q ->
ivar.state <- Full x;
Queue.iter (fun handler -> activate handler x) q;
schedule ctx
and fork :
'a 'b. 'a -> ('a -[async]-> 'b) -> ctx ->
'b ivar cont -> unit =
fun x f ctx k ->
let ivar = { state = Empty (Queue.create ()) } in
add_refs ctx 1;
enqueue ctx (Cont(ivar, k));
exec ctx x f (finish ivar)
and nfork :
'a 'b. 'a list -> ('a -[async]-> 'b) ->
ctx -> 'b ivar list cont -> unit =
fun l f ctx k ->
match l with
| [] -> continue k []
| [x] ->
let ivar = { state = Empty (Queue.create ()) } in
add_refs ctx 1;
enqueue ctx (Cont([ivar], k));
exec ctx x f (finish ivar)
| first :: rest ->
let n = List.length rest in
add_refs ctx n;
let rest_ivars =
List.map rest ~f:(fun x ->
let ivar = { state = Empty (Queue.create ()) } in
enqueue ctx (Exec(ctx, x, f, finish ivar));
ivar)
in
let first_ivar = { state = Empty (Queue.create ()) } in
let ivars = first_ivar :: rest_ivars in
enqueue ctx (Cont(ivars, k));
exec ctx first f (finish first_ivar)
and fork_and_join :
'a 'b. (unit -[async]-> 'a) -> (unit -[async]-> 'b) ->
ctx -> ('a * 'b) cont -> unit =
fun fa fb ctx k ->
let state = ref Nothing_yet in
let finish_a ctx a =
match !state with
| Nothing_yet -> state := Got_a a; deref ctx
| Got_a _ -> assert false
| Got_b b -> continue k (a, b)
in
let finish_b ctx b =
match !state with
| Nothing_yet -> state := Got_b b; deref ctx
| Got_a a -> continue k (a, b)
| Got_b _ -> assert false
in
add_refs ctx 1;
enqueue ctx (Exec(ctx, (), fb, finish_b));
exec ctx () fa finish_a
and parallel_map :
'a 'b. 'a list -> ('a -[async]-> 'b) ->
ctx -> 'b list cont -> unit =
fun l f ctx k ->
match l with
| [] -> continue k []
| [x] ->
exec ctx x f (fun _ x -> continue k [x])
| first :: rest ->
let n = List.length l in
add_refs ctx (n - 1);
let left_over = ref n in
let results = Array.make n None in
let finish_i i ctx x =
results.(i) <- Some x;
decr left_over;
if !left_over = 0 then begin
continue k (list_of_option_array results)
end else begin
deref ctx
end
in
List.iteri rest ~f:(fun i x ->
enqueue ctx (Exec(ctx, x, f, finish_i (i + 1))));
exec ctx first f (finish_i 0)
and parallel_iter :
'a. 'a list -> ('a -[async]-> unit) ->
ctx -> unit cont -> unit =
fun l f ctx k ->
match l with
| [] -> continue k ()
| [x] -> exec ctx x f (fun _ _ -> continue k ())
| first :: rest ->
let n = List.length l in
add_refs ctx (n - 1);
let left_over = ref n in
let finish ctx () =
decr left_over;
if !left_over = 0 then begin
continue k ()
end else begin
deref ctx
end
in
List.iter rest ~f:(fun x ->
enqueue ctx (Exec(ctx, x, f, finish)));
exec ctx first f finish
and get :
'a. 'a Var0.t -> ctx -> 'a option cont -> unit =
fun var ctx k ->
match Int_map.find (Var0.id var) ctx.vars with
| exception Not_found -> continue k None
| Binding.T (var', v) ->
let eq = Var0.eq var' var in
continue k (Some (Eq.cast eq v))
and get_exn :
'a. 'a Var0.t -> ctx -> 'a cont -> unit =
fun var ctx k ->
match Int_map.find (Var0.id var) ctx.vars with
| exception Not_found -> discontinue k (Failure "Fiber.Var.find_exn")
| Binding.T (var', v) ->
let eq = Var0.eq var' var in
continue k (Eq.cast eq v)
and set :
'a 'b. 'a Var0.t -> 'a -> (unit -[async]-> 'b) ->
ctx -> 'b cont -> unit =
fun (type t) (var : t Var0.t) x f ctx k ->
let (module M) = var in
let data = Binding.T (var, x) in
let ctx' = set_vars ctx (Int_map.add M.id data ctx.vars) in
exec ctx' () f (fun _ res -> continue k res)
and with_error_handler :
'a. (unit -[async]-> 'a) -> (exn -> unit) ->
ctx -> 'a cont -> unit =
fun f err ctx k ->
let on_error exn =
try
err exn
with exn ->
forward_error false ctx exn
in
let ctx' = set_error_handler ctx ~on_error in
exec ctx' () f (fun _ x -> continue k x)
and wait_errors :
'a. (unit -[async]-> 'a) ->
ctx -> ('a, unit) Result.t cont -> unit =
fun f ctx k ->
let result = ref (Result.Error ()) in
let on_release () = continue k !result in
let ctx' = subcontext ctx ~on_release in
let finish ctx' x =
result := Ok x;
deref ctx'
in
exec ctx' () f finish
and fill : 'a. 'a ivar -> 'a -> ctx -> unit cont -> unit =
fun ivar x ctx k ->
match ivar.state with
| Full _ -> discontinue k (Failure "Fiber.Ivar.fill")
| Empty q ->
ivar.state <- Full x;
Queue.iter (fun handler -> activate handler x) q;
enqueue ctx (Cont_unit(k));
schedule ctx
and read : 'a. 'a ivar -> ctx -> 'a cont -> unit =
fun ivar ctx k ->
match ivar.state with
| Full x -> continue k x
| Empty q ->
Queue.push (Waiting(ctx, k)) q;
schedule ctx
and lock : 'a. mutex -> ctx -> unit cont -> unit =
fun lock ctx k ->
if lock.locked then begin
Queue.push (Waiting(ctx, k)) lock.waiters;
schedule ctx
end else begin
lock.locked <- true;
continue k ()
end
and unlock : 'a. mutex -> ctx -> unit cont -> unit =
fun lock _ctx k ->
assert lock.locked;
if Queue.is_empty lock.waiters then begin
lock.locked <- false
end else begin
activate (Queue.pop lock.waiters) ()
end;
continue k ()
and yield : 'a. ctx -> unit cont -> unit =
fun ctx k ->
enqueue ctx (Cont_unit(k));
schedule ctx
let never : unit -[async]-> 'a =
fun () ->
perform Async(Never)
let fork : (unit -[async]-> 'b) -[async]-> 'b ivar =
fun f ->
perform Async(Fork((), f))
let nfork_map : 'a list -> f:('a -[async]-> 'b) -[async]-> 'b ivar list =
fun l ~f ->
perform Async(NFork(l, f))
let nfork : (unit -[async]-> 'a) list -[async]-> 'a ivar list =
fun l ->
perform Async(NFork(l, fun f -> f ()))
let fork_and_join :
(unit -[async]-> 'a) -> (unit -[async]-> 'b) -[async]->
'a * 'b =
fun fa fb ->
perform Async(Fork_and_join(fa, fb))
let fork_and_join_unit :
(unit -[async]-> unit) -> (unit -[async]-> 'a)
-[async]-> 'a =
fun fa fb ->
snd (perform Async(Fork_and_join(fa, fb)))
let parallel_map :
'a list -> f:('a -[async]-> 'b) -[async]-> 'b list =
fun l ~f ->
perform Async(Parallel_map(l, f))
let parallel_iter :
'a list -> f:('a -[async]-> unit) -[async]-> unit =
fun l ~f ->
perform Async(Parallel_iter(l, f))
module Var = struct
type 'a t = 'a Var0.t
let create = Var0.create
let get : 'a Var0.t -[async]-> 'a option =
fun t -> perform Async(Get t)
let get_exn : 'a Var0.t -[async]-> 'a =
fun t -> perform Async(Get_exn t)
let set : 'a Var0.t -> 'a -> (unit -[async]-> 'b) -[async]-> 'b =
fun t x f -> perform Async(Set(t, x, f))
end
let with_error_handler :
(unit -[async]-> 'a) ->
on_error:(exn -> unit) -[async]-> 'a =
fun f ~on_error ->
perform Async(With_error_handler(f, on_error))
let wait_errors :
(unit -[async]-> 'a) -[async]-> ('a, unit) result =
fun f ->
perform Async(Wait_errors f)
let fold_errors f ~init ~on_error =
let acc = ref init in
let on_error exn =
acc := on_error exn !acc
in
match wait_errors (fun () -> with_error_handler f ~on_error) with
| Ok _ as ok -> ok
| Error () -> Error !acc
let collect_errors f =
fold_errors f
~init:[]
~on_error:(fun e l -> e :: l)
let finalize :
(unit -[async]-> 'a) -> finally:(unit -[async]-> unit)
-[async]-> 'a =
fun f ~finally ->
let res = wait_errors f in
finally ();
match res with
| Ok x -> x
| Error () -> never ()
module Ivar = struct
type 'a t = 'a ivar
let create () = { state = Empty (Queue.create ()) }
let fill : 'a ivar -> 'a -[async]-> unit =
fun t x -> perform Async(Fill(t, x))
let read : 'a ivar -[async]-> 'a =
fun t -> perform Async(Read t)
end
module Future = struct
type 'a t = 'a Ivar.t
let wait = Ivar.read
end
module Mutex = struct
type t = mutex
let create () =
{ locked = false;
waiters = Queue.create (); }
let with_lock : mutex -> (unit -[async]-> 'a) -[async]-> 'a =
fun t f ->
perform Async(Lock t);
finalize f
~finally:(fun () -> perform Async(Unlock t))
end
let yield : unit -[async]-> unit =
fun () -> perform Async(Yield)
exception Never
let run f x =
let result = ref None in
let ctx = initial_context () in
let finish ctx y =
result := Some y
in
exec ctx x f finish;
match !result with
| None -> raise Never
| Some res -> res
|
07b5463c8a0e001e1ec3b826b9a63ad79f6b7685f9a92ca9a4f6b3f0586097f0 | simonmar/par-tutorial | kmeans2.hs | -- Modified version of kmeans.hs: uses divide-and-conquer instead of flat parList.
import System.IO
import KMeansCommon
import Data.Array
import Text.Printf
import Data.List
import Data.Function
import Data.Binary (decodeFile)
import Debug.Trace
import Control.Parallel.Strategies
import Control.DeepSeq
import System.Environment
import Data.Time.Clock
import Control.Exception
main = do
points <- decodeFile "points.bin"
clusters <- getClusters "clusters"
let nclusters = length clusters
args <- getArgs
npoints <- evaluate (length points)
t0 <- getCurrentTime
final_clusters <- case args of
["seq"] -> kmeans_seq nclusters points clusters
["par",n] -> kmeans_par (read n) nclusters points clusters npoints
_other -> error "args"
t1 <- getCurrentTime
print final_clusters
printf "Total time: %.2f\n" (realToFrac (diffUTCTime t1 t0) :: Double)
split :: Int -> [a] -> [[a]]
split numChunks l = splitSize (ceiling $ fromIntegral (length l) / fromIntegral numChunks) l
where
splitSize _ [] = []
splitSize i v = xs : splitSize i ys
where (xs,ys) = splitAt i v
data Tree a = Leaf a
| Node (Tree a) (Tree a)
mkPointTree :: Int -> [Vector] -> Int -> Tree [Vector]
mkPointTree depth points npoints
| depth >= threshold = Leaf points
| otherwise = Node (mkPointTree (depth+1) xs half)
(mkPointTree (depth+1) ys half)
where
half = npoints `quot` 2
(xs,ys) = splitAt half points
threshold = 10 :: Int
kmeans_par :: Int -> Int -> [Vector] -> [Cluster] -> Int -> IO [Cluster]
kmeans_par mappers nclusters points clusters npoints = do
let
tree = mkPointTree 0 points npoints
loop :: Int -> [Cluster] -> IO [Cluster]
loop n clusters | n > tooMany = do printf "giving up."; return clusters
loop n clusters = do
hPrintf stderr "iteration %d\n" n
hPutStr stderr (unlines (map show clusters))
let
divconq :: Tree [Vector] -> [Cluster]
divconq (Leaf points) = step nclusters clusters points
divconq (Node left right) = runEval $ do
c1 <- rpar $ divconq left
c2 <- rpar $ divconq right
rdeepseq c1
rdeepseq c2
return $! reduce nclusters [c1,c2]
clusters' = divconq tree
if clusters' == clusters
then return clusters
else loop (n+1) clusters'
--
final <- loop 0 clusters
return final
kmeans_seq :: Int -> [Vector] -> [Cluster] -> IO [Cluster]
kmeans_seq nclusters points clusters = do
let
loop :: Int -> [Cluster] -> IO [Cluster]
loop n clusters | n > tooMany = do printf "giving up."; return clusters
loop n clusters = do
hPrintf stderr "iteration %d\n" n
hPutStr stderr (unlines (map show clusters))
let clusters' = step nclusters clusters points
if clusters' == clusters
then return clusters
else loop (n+1) clusters'
--
loop 0 clusters
tooMany = 50
reduce :: Int -> [[Cluster]] -> [Cluster]
reduce nclusters css =
concatMap combine (elems (accumArray (flip (:)) [] (0,nclusters) [ (clId c, c) | c <- concat css]))
where
combine [] = []
combine (c:cs) = [foldr combineClusters c cs]
step :: Int -> [Cluster] -> [Vector] -> [Cluster]
step nclusters clusters points
= makeNewClusters (assign nclusters clusters points)
-- assign each vector to the nearest cluster centre
assign :: Int -> [Cluster] -> [Vector] -> Array Int [Vector]
assign nclusters clusters points =
accumArray (flip (:)) [] (0, nclusters-1)
[ (clId (nearest p), p) | p <- points ]
where
nearest p = fst $ minimumBy (compare `on` snd)
[ (c, sqDistance (clCent c) p) | c <- clusters ]
makeNewClusters :: Array Int [Vector] -> [Cluster]
makeNewClusters arr =
filter ((>0) . clCount) $
[ makeCluster i ps | (i,ps) <- assocs arr ]
-- v. important: filter out any clusters that have
-- no points. This can happen when a cluster is not
-- close to any points. If we leave these in, then
the NaNs mess up all the future calculations .
| null | https://raw.githubusercontent.com/simonmar/par-tutorial/f9061ea177800eb4ed9660bcabc8d8d836e1c73c/code/kmeans/kmeans2.hs | haskell | Modified version of kmeans.hs: uses divide-and-conquer instead of flat parList.
assign each vector to the nearest cluster centre
v. important: filter out any clusters that have
no points. This can happen when a cluster is not
close to any points. If we leave these in, then |
import System.IO
import KMeansCommon
import Data.Array
import Text.Printf
import Data.List
import Data.Function
import Data.Binary (decodeFile)
import Debug.Trace
import Control.Parallel.Strategies
import Control.DeepSeq
import System.Environment
import Data.Time.Clock
import Control.Exception
main = do
points <- decodeFile "points.bin"
clusters <- getClusters "clusters"
let nclusters = length clusters
args <- getArgs
npoints <- evaluate (length points)
t0 <- getCurrentTime
final_clusters <- case args of
["seq"] -> kmeans_seq nclusters points clusters
["par",n] -> kmeans_par (read n) nclusters points clusters npoints
_other -> error "args"
t1 <- getCurrentTime
print final_clusters
printf "Total time: %.2f\n" (realToFrac (diffUTCTime t1 t0) :: Double)
split :: Int -> [a] -> [[a]]
split numChunks l = splitSize (ceiling $ fromIntegral (length l) / fromIntegral numChunks) l
where
splitSize _ [] = []
splitSize i v = xs : splitSize i ys
where (xs,ys) = splitAt i v
data Tree a = Leaf a
| Node (Tree a) (Tree a)
mkPointTree :: Int -> [Vector] -> Int -> Tree [Vector]
mkPointTree depth points npoints
| depth >= threshold = Leaf points
| otherwise = Node (mkPointTree (depth+1) xs half)
(mkPointTree (depth+1) ys half)
where
half = npoints `quot` 2
(xs,ys) = splitAt half points
threshold = 10 :: Int
kmeans_par :: Int -> Int -> [Vector] -> [Cluster] -> Int -> IO [Cluster]
kmeans_par mappers nclusters points clusters npoints = do
let
tree = mkPointTree 0 points npoints
loop :: Int -> [Cluster] -> IO [Cluster]
loop n clusters | n > tooMany = do printf "giving up."; return clusters
loop n clusters = do
hPrintf stderr "iteration %d\n" n
hPutStr stderr (unlines (map show clusters))
let
divconq :: Tree [Vector] -> [Cluster]
divconq (Leaf points) = step nclusters clusters points
divconq (Node left right) = runEval $ do
c1 <- rpar $ divconq left
c2 <- rpar $ divconq right
rdeepseq c1
rdeepseq c2
return $! reduce nclusters [c1,c2]
clusters' = divconq tree
if clusters' == clusters
then return clusters
else loop (n+1) clusters'
final <- loop 0 clusters
return final
kmeans_seq :: Int -> [Vector] -> [Cluster] -> IO [Cluster]
kmeans_seq nclusters points clusters = do
let
loop :: Int -> [Cluster] -> IO [Cluster]
loop n clusters | n > tooMany = do printf "giving up."; return clusters
loop n clusters = do
hPrintf stderr "iteration %d\n" n
hPutStr stderr (unlines (map show clusters))
let clusters' = step nclusters clusters points
if clusters' == clusters
then return clusters
else loop (n+1) clusters'
loop 0 clusters
tooMany = 50
reduce :: Int -> [[Cluster]] -> [Cluster]
reduce nclusters css =
concatMap combine (elems (accumArray (flip (:)) [] (0,nclusters) [ (clId c, c) | c <- concat css]))
where
combine [] = []
combine (c:cs) = [foldr combineClusters c cs]
step :: Int -> [Cluster] -> [Vector] -> [Cluster]
step nclusters clusters points
= makeNewClusters (assign nclusters clusters points)
assign :: Int -> [Cluster] -> [Vector] -> Array Int [Vector]
assign nclusters clusters points =
accumArray (flip (:)) [] (0, nclusters-1)
[ (clId (nearest p), p) | p <- points ]
where
nearest p = fst $ minimumBy (compare `on` snd)
[ (c, sqDistance (clCent c) p) | c <- clusters ]
makeNewClusters :: Array Int [Vector] -> [Cluster]
makeNewClusters arr =
filter ((>0) . clCount) $
[ makeCluster i ps | (i,ps) <- assocs arr ]
the NaNs mess up all the future calculations .
|
d4d32fb042b3cb05e819ac10c5f92757ceef3ea77b658353aaea968ba29193b6 | poroh/ersip | ersip_dialog_test.erl | %%%
Copyright ( c ) 2018 , 2021 Dmitry Poroh
%%% All rights reserved.
Distributed under the terms of the MIT License . See the LICENSE file .
%%%
%%% Common dialog support test
%%%
%%% TODO:
%%% - Check that Record-route are ignored for target_refresher
%%% messages
%%%
-module(ersip_dialog_test).
-include_lib("eunit/include/eunit.hrl").
%%===================================================================
%% Cases
%%===================================================================
dialog_create_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
?assertEqual(no_dialog, ersip_dialog:uas_dialog_id(InvSipMsg)),
{UASDialogEarly, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
?assertMatch({ok, _}, ersip_dialog:uac_new(InvReq, InvResp180UAC)),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
InvResp200UAS = invite_reply(200, InvSipMsg),
{UASDialogConfirmed, InvResp200UAC} = ersip_dialog:uas_pass_response(InvSipMsg, InvResp200UAS, UASDialogEarly),
{ok, UACDialogConfirmed} = ersip_dialog:uac_update(InvResp200UAS, UACDialogEarly),
%% ====================
%% Sending BYE through UAC dialog:
{_UACDialog1, ByeSipMsgA} = ersip_dialog:uac_request(bye_sipmsg(), UACDialogConfirmed),
%% --------------------
Check that dialog identifier of UAS is equal to calculated by request :
?assertMatch({ok, _}, ersip_dialog:uas_dialog_id(ByeSipMsgA)),
{ok, ByeUASDialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgA),
?assertEqual(ersip_dialog:id(UASDialogConfirmed), ByeUASDialogId),
%% --------------------
%% Check that message is filled in according to dialog:
1 . The URI in the To field of the request MUST be set to the
remote URI from the dialog state .
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(to, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(to, ByeSipMsgA))),
2 . The tag in the To header field of the request MUST be set to
%% the remote tag of the dialog ID.
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(to, InvResp180UAC)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsgA))),
3 . The From URI of the request MUST be set to the local URI
%% from the dialog state.
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(from, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(from, ByeSipMsgA))),
4 . The tag in the From header field of the request MUST be set to the local tag
%% of the dialog ID
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(from, InvSipMsg)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(from, ByeSipMsgA))),
5 . The Call - ID of the request MUST be set to the Call - ID of the dialog .
?assertEqual(ersip_sipmsg:get(callid, InvSipMsg),
ersip_sipmsg:get(callid, ByeSipMsgA)),
6 . Requests within a dialog MUST contain strictly monotonically
%% increasing and contiguous CSeq sequence numbers
( increasing - by - one ) in each direction ( excepting ACK and CANCEL
%% of course, whose numbers equal the requests being acknowledged
%% or cancelled).
?assert(ersip_hdr_cseq:number(ersip_sipmsg:get(cseq, InvSipMsg))
< ersip_hdr_cseq:number(ersip_sipmsg:get(cseq, ByeSipMsgA))),
7 . The method field in the CSeq header field value MUST match
%% the method of the request.
?assertEqual(ersip_sipmsg:method(ByeSipMsgA),
ersip_hdr_cseq:method(ersip_sipmsg:get(cseq, ByeSipMsgA))),
8 . If the route set is empty , the UAC MUST place the remote target URI
into the Request - URI . The UAC MUST NOT add a Route header field to
%% the request.
[RemoteContactA] = ersip_sipmsg:get(contact, InvResp200UAC),
?assertEqual(ersip_hdr_contact:uri(RemoteContactA),
ersip_sipmsg:ruri(ByeSipMsgA)),
%% ====================
Sending BYE through UAS dialog :
{_UASDialog1, ByeSipMsgB} = ersip_dialog:uac_request(bye_sipmsg(), UASDialogConfirmed),
%% --------------------
Check that dialog identifier of UAC is equal to calculated by request :
?assertMatch({ok, _}, ersip_dialog:uas_dialog_id(ByeSipMsgB)),
{ok, ByeBDialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgB),
?assertEqual(ersip_dialog:id(UACDialogConfirmed), ByeBDialogId),
%% --------------------
%% Check that message is filled in according to dialog:
1 . The URI in the To field of the request MUST be set to the
remote URI from the dialog state .
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(from, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(to, ByeSipMsgB))),
2 . The tag in the To header field of the request MUST be set to
%% the remote tag of the dialog ID.
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(from, InvResp180UAC)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsgB))),
3 . The From URI of the request MUST be set to the local URI
%% from the dialog state.
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(to, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(from, ByeSipMsgB))),
4 . The tag in the From header field of the request MUST be set to the local tag
%% of the dialog ID
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(to, InvResp180UAS)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(from, ByeSipMsgB))),
5 . The Call - ID of the request MUST be set to the Call - ID of the dialog .
?assertEqual(ersip_sipmsg:get(callid, InvSipMsg),
ersip_sipmsg:get(callid, ByeSipMsgB)),
6 . Requests within a dialog MUST contain strictly monotonically
%% increasing and contiguous CSeq sequence numbers
( increasing - by - one ) in each direction ( excepting ACK and CANCEL
%% of course, whose numbers equal the requests being acknowledged
%% or cancelled).
%% Filled with new value
7 . The method field in the CSeq header field value MUST match
%% the method of the request.
?assertEqual(ersip_sipmsg:method(ByeSipMsgB),
ersip_hdr_cseq:method(ersip_sipmsg:get(cseq, ByeSipMsgB))),
8 . If the route set is empty , the UAC MUST place the remote target URI
into the Request - URI . The UAC MUST NOT add a Route header field to
%% the request.
[RemoteContactB] = ersip_sipmsg:get(contact, InvSipMsg),
?assertEqual(ersip_hdr_contact:uri(RemoteContactB),
ersip_sipmsg:ruri(ByeSipMsgB)),
ok.
uas_dialog_rfc2543_compiance_test() ->
A UAS MUST be prepared to receive a
%% request without a tag in the From field, in which case the tag is
%% considered to have a value of null.
%%
This is to maintain backwards compatibility with RFC 2543 , which
%% did not mandate From tags.
InvReq = invite_request(),
InvSipMsg = clear_tag(from, ersip_request:sipmsg(InvReq)),
InvResp200 = invite_reply(200, InvSipMsg),
{Dialog, _} = ersip_dialog:uas_new(InvSipMsg, InvResp200),
%% If the value of the remote or local tags is null, the tag
%% parameter MUST be omitted from the To or From header fields,
%% respectively.
{_, ByeSipMsgB} = ersip_dialog:uac_request(bye_sipmsg(), Dialog),
?assertEqual(undefined, ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsgB))),
%% Check that message sent without from tag is mached dialog
%% created by initial invite.
{ok, DialogA} = ersip_dialog:uac_new(InvReq, InvResp200),
{_, ByeSipMsgA0} = ersip_dialog:uac_request(bye_sipmsg(), DialogA),
ByeSipMsgA = clear_tag(from, ByeSipMsgA0),
{ok, ByeADialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgA),
?assertEqual(ersip_dialog:id(Dialog), ByeADialogId),
ok.
uac_dialog_rfc2543_compiance_test() ->
%% A UAC MUST be prepared to receive a response without a tag in
%% the To field, in which case the tag is considered to have a
%% value of null.
%%
This is to maintain backwards compatibility with RFC 2543 ,
%% which did not mandate To tags.
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp200 = clear_tag(to, invite_reply(200, InvSipMsg)),
?assertEqual(undefined, ersip_hdr_fromto:tag(ersip_sipmsg:get(to, InvResp200))),
?assertMatch({ok, _}, ersip_dialog:uac_new(InvReq, InvResp200)),
{ok, Dialog} = ersip_dialog:uac_new(InvReq, InvResp200),
%% If the value of the remote or local tags is null, the tag
%% parameter MUST be omitted from the To or From header fields,
%% respectively.
{_, ByeSipMsg} = ersip_dialog:uac_request(bye_sipmsg(), Dialog),
?assertEqual(undefined, ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsg))),
%% Check that message sent without from tag is mached dialog
%% created by initial invite.
{DialogB, _} = ersip_dialog:uas_new(InvSipMsg, InvResp200),
{_, ByeSipMsgB0} = ersip_dialog:uac_request(bye_sipmsg(), DialogB),
ByeSipMsgB = clear_tag(from, ByeSipMsgB0),
{ok, ByeBDialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgB),
?assertEqual(ersip_dialog:id(Dialog), ByeBDialogId),
ok.
indialog_ack_and_cancel_cseq_test() ->
%% Requests within a dialog MUST contain strictly monotonically
%% increasing and contiguous CSeq sequence numbers (increasing-by-one)
in each direction ( excepting ACK and CANCEL of course , whose numbers
%% equal the requests being acknowledged or cancelled).
{UASDialog0, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UASDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UASDialog0),
{_, AckSipMsg} = ersip_dialog:uac_request(ack_sipmsg(), UASDialog1),
{_, CancelSipMsg} = ersip_dialog:uac_request(cancel_sipmsg(), UASDialog1),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(CancelSipMsg)),
{UACDialog1, UACReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{_, UACAckSipMsg} = ersip_dialog:uac_request(ack_sipmsg(), UACDialog1),
{_, UACCancelSipMsg} = ersip_dialog:uac_request(cancel_sipmsg(), UACDialog1),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACAckSipMsg)),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACCancelSipMsg)),
ok.
indialog_ack_and_cancel_cseq_no_cseq_test() ->
%% Requests within a dialog MUST contain strictly monotonically
%% increasing and contiguous CSeq sequence numbers (increasing-by-one)
in each direction ( excepting ACK and CANCEL of course , whose numbers
%% equal the requests being acknowledged or cancelled).
{UASDialog0, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UASDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UASDialog0),
{_, AckSipMsg} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UASDialog1),
{_, CancelSipMsg} = ersip_dialog:uac_request(del_cseq(cancel_sipmsg()), UASDialog1),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(CancelSipMsg)),
{UACDialog1, UACReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{_, UACAckSipMsg} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog1),
{_, UACCancelSipMsg} = ersip_dialog:uac_request(del_cseq(cancel_sipmsg()), UACDialog1),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACAckSipMsg)),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACCancelSipMsg)),
ok.
indialog_ack_and_cancel_cseq_no_cseq_with_info_test() ->
%% Requests within a dialog MUST contain strictly monotonically
%% increasing and contiguous CSeq sequence numbers (increasing-by-one)
in each direction ( excepting ACK and CANCEL of course , whose numbers
%% equal the requests being acknowledged or cancelled).
{_, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UACDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{UACDialog2, InfoSipMsg} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialog1),
{_, AckSipMsg} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog2),
{_, CancelSipMsg} = ersip_dialog:uac_request(del_cseq(cancel_sipmsg()), UACDialog2),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(CancelSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg) + 1, cseq_number(InfoSipMsg)),
ok.
indialog_request_after_repeated_ack_no_cseq_test() ->
%% Requests within a dialog MUST contain strictly monotonically
%% increasing and contiguous CSeq sequence numbers (increasing-by-one)
in each direction ( excepting ACK and CANCEL of course , whose numbers
%% equal the requests being acknowledged or cancelled).
{_, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UACDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{UACDialog2, AckSipMsg1} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog1),
{UACDialog3, InfoSipMsg1} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialog2),
{UACDialog4, AckSipMsg2} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog3),
{_, InfoSipMsg2} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialog4),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg1)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg2)),
?assertEqual(cseq_number(ReInviteSipMsg) + 1, cseq_number(InfoSipMsg1)),
?assertEqual(cseq_number(ReInviteSipMsg) + 2, cseq_number(InfoSipMsg2)),
ok.
uas_message_checking_cseq_test() ->
1 . If the remote sequence number is empty , it MUST be set to
%% the value of the sequence number in the CSeq header field value
%% in the request.
{UASDialog0, UACDialog0} = create_uas_uac_dialogs(invite_request()),
Note that UAC dialog has empty remote sequence number , so we
use initially UAC side as UAS for CSeq checking :
CSeq = <<"3251">>,
{_, ReInviteSipMsg} =
ersip_dialog:uac_request(reinvite_sipmsg(#{cseq => CSeq}), UASDialog0),
?assertEqual(empty, ersip_dialog:remote_seq(UACDialog0)),
{ok, UpdatedDialog} = ersip_dialog:uas_process(ReInviteSipMsg, target_refresh, UACDialog0),
?assertEqual(binary_to_integer(CSeq), ersip_dialog:remote_seq(UpdatedDialog)),
%% If the remote sequence number was not empty, but the sequence
%% number of the request is lower than the remote sequence number,
the request is out of order and MUST be rejected with a 500
( Server Internal Error ) response .
{_, ReInviteSipMsg1} = ersip_dialog:uac_request(reinvite_sipmsg(), UASDialog0),
ReInviteSipMsg2 = set_cseq_number(3250, ReInviteSipMsg1),
?assertMatch({reply, _}, ersip_dialog:uas_process(ReInviteSipMsg2, target_refresh, UpdatedDialog)),
{reply, Resp500} = ersip_dialog:uas_process(ReInviteSipMsg2, target_refresh, UpdatedDialog),
?assertEqual(500, ersip_sipmsg:status(Resp500)),
%% Check that in-order message updates cseq:
CSeqNew = 3252,
ReInviteSipMsg3 = set_cseq_number(CSeqNew, ReInviteSipMsg1),
?assertMatch({ok, _}, ersip_dialog:uas_process(ReInviteSipMsg3, target_refresh, UpdatedDialog)),
{ok, UpdatedDialog1} = ersip_dialog:uas_process(ReInviteSipMsg3, target_refresh, UpdatedDialog),
?assertEqual(CSeqNew, ersip_dialog:remote_seq(UpdatedDialog1)),
ok.
loose_routing_dialog_test() ->
%% Create dialogs with defined route set:
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request(), fun loose_route/2),
{_, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
RouteBob = ersip_sipmsg:get(route, ReInviteFromBob),
?assertEqual(ersip_uri:make(<<"sip:">>), ersip_sipmsg:ruri(ReInviteFromBob)),
%% Check requirements:
%%
%% Creating route set:
1 . The route set MUST be set to the list of URIs in the
Record - Route header field from the request , taken in order
%% and preserving all URI parameters.
%%
%% Filling loose-route request:
2 . If the route set is not empty , and the first URI in the
%% route set contains the lr parameter (see Section 19.1.1),
the UAC MUST place the remote target URI into the
Request - URI and MUST include a Route header field containing
%% the route set values in order, including all parameters.
?assertEqual(ersip_uri:make(<<"sip:biloxi.com;lr">>), ersip_hdr_route:uri(ersip_route_set:first(RouteBob))),
?assertEqual(ersip_uri:make(<<"sip:atlanta.com;lr">>), ersip_hdr_route:uri(ersip_route_set:last(RouteBob))),
{_, ReInviteFromAlice} = ersip_dialog:uac_request(reinvite_sipmsg(), AliceDialog),
RouteAlice = ersip_sipmsg:get(route, ReInviteFromAlice),
?assertEqual(ersip_uri:make(<<"sip:bob@192.0.2.4">>), ersip_sipmsg:ruri(ReInviteFromAlice)),
%% Check requirements:
%%
%% Creating route set:
1 . The route set MUST be set to the list of URIs in the
Record - Route header field from the response , taken in
%% reverse order and preserving all URI parameters.
%%
%% Filling loose-route request:
2 . If the route set is not empty , and the first URI in the
%% route set contains the lr parameter (see Section 19.1.1),
the UAC MUST place the remote target URI into the
Request - URI and MUST include a Route header field containing
%% the route set values in order, including all parameters.
?assertEqual(ersip_uri:make(<<"sip:atlanta.com;lr">>), ersip_hdr_route:uri(ersip_route_set:first(RouteAlice))),
?assertEqual(ersip_uri:make(<<"sip:biloxi.com;lr">>), ersip_hdr_route:uri(ersip_route_set:last(RouteAlice))),
ok.
strict_routing_dialog_test() ->
%% Create dialogs with defined route set:
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request(), fun strict_route/2),
%% Check requirements:
%%
If the route set is not empty , and its first URI does not
contain the lr parameter , the UAC MUST place the first URI from
%% the route set into the Request-URI, stripping any parameters
that are not allowed in a Request - URI . The UAC MUST add a
Route header field containing the remainder of the route set
values in order , including all parameters . The UAC MUST then
place the remote target URI into the Route header field as the
%% last value.
{_, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
RouteBob = ersip_sipmsg:get(route, ReInviteFromBob),
?assertEqual(ersip_uri:make(<<"sip:biloxi.com">>), ersip_sipmsg:ruri(ReInviteFromBob)),
?assertEqual(ersip_uri:make(<<"sip:atlanta.com">>), ersip_hdr_route:uri(ersip_route_set:first(RouteBob))),
?assertEqual(ersip_uri:make(<<"sip:">>), ersip_hdr_route:uri(ersip_route_set:last(RouteBob))),
{_, ReInviteFromAlice} = ersip_dialog:uac_request(reinvite_sipmsg(), AliceDialog),
RouteAlice = ersip_sipmsg:get(route, ReInviteFromAlice),
?assertEqual(ersip_uri:make(<<"sip:atlanta.com">>), ersip_sipmsg:ruri(ReInviteFromAlice)),
?assertEqual(ersip_uri:make(<<"sip:biloxi.com">>), ersip_hdr_route:uri(ersip_route_set:first(RouteAlice))),
?assertEqual(ersip_uri:make(<<"sip:bob@192.0.2.4">>), ersip_hdr_route:uri(ersip_route_set:last(RouteAlice))),
ok.
target_refresh_test() ->
%% Create dialogs with defined route set:
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
NewBobContact = <<"sip:bob-new@192.0.2.5">>,
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(#{contact => NewBobContact}), BobDialog),
{ok, AliceDialogRefreshed} = ersip_dialog:uas_process(ReInviteFromBob, target_refresh, AliceDialog),
AliceReInviteResp0 = ersip_sipmsg:reply(200, ReInviteFromBob),
NewAliceContact = <<"sip:">>,
AliceReInviteResp = ersip_sipmsg:set(contact, make_contact(NewAliceContact), AliceReInviteResp0),
{ok, BobDialogRefreshed} = ersip_dialog:uac_trans_result(AliceReInviteResp, target_refresh, BobDialog1),
?assertEqual(ersip_uri:make(NewAliceContact), remote_target(BobDialogRefreshed)),
?assertEqual(ersip_uri:make(NewBobContact), remote_target(AliceDialogRefreshed)),
ok.
neg_400_on_star_contact_test() ->
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
InvSipMsg = ersip_sipmsg:set(contact, ersip_hdr_contact_list:make_star(), InvSipMsg0),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
neg_400_on_multiple_contact_test() ->
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
ContactList = [ersip_hdr_contact:make(<<"sip:">>), ersip_hdr_contact:make(<<"sip:bob@192.0.2.4">>)],
InvSipMsg = ersip_sipmsg:set(contact, ContactList, InvSipMsg0),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
neg_400_on_no_contact_test() ->
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
ContactList = [],
InvSipMsg = ersip_sipmsg:set(contact, ContactList, InvSipMsg0),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
neg_400_on_bad_record_route_test() ->
InvSipMsg = create_sipmsg(invite_request_bin(#{record_route => <<"aaaa">>}), make_default_source(), []),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
uas_verify_test() ->
InvSipMsg1 = create_sipmsg(invite_request_bin(#{}), make_default_source(), []),
?assertEqual(ok, ersip_dialog:uas_verify(InvSipMsg1)),
InvSipMsg2 = create_sipmsg(invite_request_bin(#{record_route => <<"<sip:atlanta.com>">>}), make_default_source(), []),
?assertEqual(ok, ersip_dialog:uas_verify(InvSipMsg2)),
ok.
uac_trans_result_terminates_dialog_test() ->
{BobDialog, _} = create_uas_uac_dialogs(invite_request()),
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
If the response for a request within a dialog is a 481
( Call / Transaction Does Not Exist ) or a 408 ( Request Timeout ) ,
the UAC SHOULD terminate the dialog . A UAC SHOULD also
%% terminate a dialog if no response at all is received for the
%% request (the client transaction would inform the TU about the
%% timeout.)
1 . 481
AliceReInviteResp481 = ersip_sipmsg:reply(481, ReInviteFromBob),
?assertEqual(terminate_dialog, ersip_dialog:uac_trans_result(AliceReInviteResp481, target_refresh, BobDialog1)),
2 . 408
AliceReInviteResp408 = ersip_sipmsg:reply(408, ReInviteFromBob),
?assertEqual(terminate_dialog, ersip_dialog:uac_trans_result(AliceReInviteResp408, target_refresh, BobDialog1)),
3 . timeout
?assertEqual(terminate_dialog, ersip_dialog:uac_trans_result(timeout, target_refresh, BobDialog1)),
%% Dialog does not terminated on other response codes:
[begin
Reply = ersip_sipmsg:reply(Code, ReInviteFromBob),
?assertMatch({ok, _}, ersip_dialog:uac_trans_result(Reply, target_refresh, BobDialog1))
end || Code <- [200, 299, 400, 407, 409, 499, 500, 599, 600, 699]],
ok.
no_contact_means_no_refresh_test() ->
%% Check last "if present" in clause:
%%
When a UAS receives a target refresh request , it MUST replace the
dialog 's remote target URI with the URI from the Contact header field
%% in that request, if present.
NoContact = <<>>,
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
BobRURI = remote_target(AliceDialog),
AliceRURI = remote_target(BobDialog),
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(#{contact => NoContact}), BobDialog),
{ok, AliceDialogAfter} = ersip_dialog:uas_process(ReInviteFromBob, target_refresh, AliceDialog),
AliceReInviteResp0 = ersip_sipmsg:reply(200, ReInviteFromBob),
AliceReInviteResp = ersip_sipmsg:remove(contact, AliceReInviteResp0),
{ok, BobDialogAfter} = ersip_dialog:uac_trans_result(AliceReInviteResp, target_refresh, BobDialog1),
?assertEqual(AliceRURI, remote_target(BobDialogAfter)),
?assertEqual(BobRURI, remote_target(AliceDialogAfter)),
ok.
regular_requests_means_no_refresh_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
BobRURI = remote_target(AliceDialog),
AliceRURI = remote_target(BobDialog),
NewBobContact = <<"sip:bob-new@192.0.2.5">>,
{BobDialog1, InfoFromBob} = ersip_dialog:uac_request(info_sipmsg(#{contact => NewBobContact}), BobDialog),
{ok, AliceDialogAfter} = ersip_dialog:uas_process(InfoFromBob, regular, AliceDialog),
AliceInfoResp0 = ersip_sipmsg:reply(200, InfoFromBob),
NewAliceContact = <<"sip:">>,
AliceInfoResp = ersip_sipmsg:set(contact, make_contact(NewAliceContact), AliceInfoResp0),
{ok, BobDialogAfter} = ersip_dialog:uac_trans_result(AliceInfoResp, regular, BobDialog1),
?assertEqual(AliceRURI, remote_target(BobDialogAfter)),
?assertEqual(BobRURI, remote_target(AliceDialogAfter)),
ok.
bad_request_on_bad_contact_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
{_, ReInviteFromBob0} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
check_400_uas_resp(ersip_sipmsg:set(contact, star, ReInviteFromBob0), AliceDialog),
check_400_uas_resp(ersip_sipmsg:set(contact, make_contact(<<"unknown:x.y">>), ReInviteFromBob0), AliceDialog),
ok.
bad_contact_is_ignored_by_uac_test() ->
{BobDialog, _AliceDialog} = create_uas_uac_dialogs(invite_request()),
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
AliceReInviteResp0 = ersip_sipmsg:reply(200, ReInviteFromBob),
AliceReInviteResp = ersip_sipmsg:set(contact, star, AliceReInviteResp0),
{ok, BobDialog1} = ersip_dialog:uac_trans_result(AliceReInviteResp, target_refresh, BobDialog1),
ok.
second_provisional_response_test() ->
Check that second provisional response does not change state of
the dialog on UAS side :
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, _} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
?assertMatch({UASDialogEarly, _}, ersip_dialog:uas_pass_response(InvSipMsg, InvResp180UAS, UASDialogEarly)),
ok.
uas_check_contact_test() ->
The URI provided in the Contact header field MUST be a SIP or
%% SIPS URI. If the request that initiated the dialog contained a
SIPS URI in the Request - URI or in the top Record - Route header
%% field value, if there was any, or the Contact header field if
%% there was no Record-Route header field, the Contact header
%% field in the response MUST be a SIPS URI.
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
InvSipMsg = ersip_sipmsg:set_ruri(ersip_uri:make(<<"sips:">>), InvSipMsg0),
InvResp200 = invite_reply(200, InvSipMsg),
1 . Check that we can not create dialog with SIP URI :
InvResp200Sip = ersip_sipmsg:set(contact, make_contact(<<"sip:bob@192.0.2.4">>), InvResp200),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsg, InvResp200Sip)),
2 . Check that we can create dialog with SIPs URI :
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
3 . Check that we can not create dialog with star contact :
InvResp200Star = ersip_sipmsg:set(contact, star, InvResp200),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsg, InvResp200Star)),
4 . Check that we can not create dialog with star contact in request :
InvSipMsgStar = ersip_sipmsg:set(contact, star, InvSipMsg),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsgStar, InvResp200Sip)),
5 . Check that if top record route contains SIPS RURI then
%% Contact is checked to be SIPS URI.
InvSipMsgSIPSRR = set_routes(record_route, [<<"sip:atlanta.com;lr">>, <<"sips:biloxi.com;lr">>], InvSipMsg),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsgSIPSRR, InvResp200Sip)),
6 . Check that we can create dialog with SIPS URI :
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
7 . Bad contact format :
InvSipMsgBadContct = ersip_request:sipmsg(invite_request(#{contact => <<"@">>})),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsgBadContct, InvResp200)),
ok.
uac_check_contact_test() ->
%% 12.1.2 UAC Behavior
If the request has a Request - URI or a topmost Route header
%% field value with a SIPS URI, the Contact header field MUST
%% contain a SIPS URI.
InvReq1 = invite_request(#{ruri => <<"sips:">>,
contact => <<"sip:">>}),
check_new_uac_error(InvReq1),
InvReq2 = invite_request(#{ruri => <<"sip:">>,
contact => <<"sip:">>,
route => [<<"sips:biloxi.com">>]}),
check_new_uac_error(InvReq2),
%% Check success consturction:
InvReq3 = invite_request(#{ruri => <<"sips:">>,
contact => <<"sips:">>}),
check_new_uac_ok(InvReq3),
InvReq4 = invite_request(#{ruri => <<"sip:">>,
contact => <<"sips:">>,
route => [<<"sips:biloxi.com">>]}),
check_new_uac_ok(InvReq4),
ok.
uas_update_after_confirmed_test() ->
{BobDialog, _} = create_uas_uac_dialogs(invite_request()),
InvSipMsg = ersip_request:sipmsg(invite_request()),
Resp200 = invite_reply(200, InvSipMsg),
?assertMatch({BobDialog, _RespSipMsg}, ersip_dialog:uas_pass_response(InvSipMsg, Resp200, BobDialog)),
ok.
is_secure_test() ->
InvSipMsg = create_sipmsg(invite_request_bin(#{ruri => <<"sips:">>,
contact => <<"sips:">>}),
tls_source(default_peer()), []),
Target = ersip_uri:make(<<"sips:127.0.0.1;transport=tls">>),
InvReq = ersip_request:new(InvSipMsg, ersip_branch:make_random(7), Target),
InvResp200 = invite_reply(200, InvSipMsg),
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
12.1.1 UAS behavior
If the request arrived over TLS , and the Request - URI contained
%% a SIPS URI, the "secure" flag is set to TRUE.
{BobDialog, InvResp} = ersip_dialog:uas_new(InvSipMsg, InvResp200Sips),
?assertEqual(true, ersip_dialog:is_secure(BobDialog)),
%% 12.1.2 UAC Behavior
If the request was sent over TLS , and the Request - URI contained a
%% SIPS URI, the "secure" flag is set to TRUE.
{ok, AliceDialog} = ersip_dialog:uac_new(InvReq, InvResp),
?assertEqual(true, ersip_dialog:is_secure(AliceDialog)),
ok.
check_no_secure_when_on_undefined_source_test() ->
InvSipMsg = create_sipmsg(invite_request_bin(#{ruri => <<"sips:">>,
contact => <<"sips:">>}),
undefined, []),
InvResp200 = invite_reply(200, InvSipMsg),
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
{BobDialog, _} = ersip_dialog:uas_new(InvSipMsg, InvResp200Sips),
?assertEqual(false, ersip_dialog:is_secure(BobDialog)),
ok.
uac_create_dialog_no_contact_in_resp_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS_0 = invite_reply(180, InvSipMsg),
InvResp180UAS = ersip_sipmsg:set(contact, [], InvResp180UAS_0),
?assertMatch({ok, _}, ersip_dialog:uac_new(InvReq, InvResp180UAS)),
{ok, Dialog} = ersip_dialog:uac_new(InvReq, InvResp180UAS),
?assertEqual(ersip_request:nexthop(InvReq), ersip_dialog:target(Dialog)),
ok.
uas_create_dialog_no_contact_in_resp_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180_0 = invite_reply(180, InvSipMsg),
InvResp180 = ersip_sipmsg:set(contact, [], InvResp180_0),
{_, _} = ersip_dialog:uas_new(InvSipMsg, InvResp180),
ok.
uas_negative_response_terminate_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180 = invite_reply(180, InvSipMsg),
InvResp487 = invite_reply(487, InvSipMsg),
{Dialog, _} = ersip_dialog:uas_new(InvSipMsg, InvResp180),
?assertEqual(terminate_dialog, ersip_dialog:uas_pass_response(InvSipMsg, InvResp487, Dialog)),
ok.
uac_notify_dialog_test() ->
{_, UACDialog0} = notify_create_uas_uac_dialogs(notify_request()),
{_, NotifyInviteSipMsg} = ersip_dialog:uac_request(notify_sipmsg(), UACDialog0),
?assertEqual(cseq_number(notify_sipmsg())+1, cseq_number(NotifyInviteSipMsg)),
ok.
Check that provisional respose creates dialogs ( UAC / UAS ) in early state .
%%
Check that in - dialog messages ( PRACK , INFO , ... ) in early
%% dialog does not switch dialog state to confirmed state.
early_dialog_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
?assertEqual(true, ersip_dialog:is_early(UASDialogEarly)),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
?assertEqual(true, ersip_dialog:is_early(UACDialogEarly)),
{UACDialogEarly1, InfoSipMsg} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialogEarly),
?assertEqual(true, ersip_dialog:is_early(UACDialogEarly1)),
{ok, UASDialogEarly1} = ersip_dialog:uas_process(InfoSipMsg, regular, UASDialogEarly),
?assertEqual(true, ersip_dialog:is_early(UASDialogEarly1)),
InfoResp200UAS = ersip_sipmsg:reply(200, InfoSipMsg),
{UASDialogEarly2, InfoResp200UAC} = ersip_dialog:uas_pass_response(InfoSipMsg, InfoResp200UAS, UASDialogEarly1),
?assertEqual(true, ersip_dialog:is_early(UASDialogEarly2)),
{ok, UACDialogEarly2} = ersip_dialog:uac_trans_result(InfoResp200UAC, regular, UACDialogEarly1),
?assertEqual(true, ersip_dialog:is_early(UACDialogEarly2)),
%% Check that ersip_dialog:uac_update works the same way as uac_trans_result for in (early) dialog requests
?assertEqual({ok, UACDialogEarly2}, ersip_dialog:uac_update(InfoResp200UAC, UACDialogEarly1)),
InvResp200UAS = invite_reply(200, InvSipMsg),
{UASDialogConfirmed, InvResp200UAC} = ersip_dialog:uas_pass_response(InvSipMsg, InvResp200UAS, UASDialogEarly2),
?assertEqual(false, ersip_dialog:is_early(UASDialogConfirmed)),
{ok, UACDialogConfirmed} = ersip_dialog:uac_update(InvResp200UAC, UACDialogEarly2),
?assertEqual(false, ersip_dialog:is_early(UACDialogConfirmed)),
ok.
Check that UAC terminates early dialog on non2xx response
uac_terminates_early_dialog_on_non2xx_response_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
InvResp400 = invite_reply(400, InvSipMsg),
?assertEqual(terminate_dialog, ersip_dialog:uas_pass_response(InvSipMsg, InvResp400, UASDialogEarly)),
?assertEqual(terminate_dialog, ersip_dialog:uac_update(InvResp400, UACDialogEarly)),
ok.
Check that UAC terminates early dialog on timeout
uac_terminates_early_dialog_on_timeout_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{_, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
?assertEqual(terminate_dialog, ersip_dialog:uac_update(timeout, UACDialogEarly)),
ok.
%% Check that uas_create + uas_pass_response works the same way as uas_new.
%% Confirmed dialog
uas_create_pass_response_pair_confirmed_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp200 = invite_reply(200, InvSipMsg),
{UASDialog, UACResp200} = ersip_dialog:uas_new(InvSipMsg, InvResp200),
?assertEqual(UASDialog, ersip_dialog:uas_create(InvSipMsg, InvResp200)),
?assertEqual({UASDialog, UACResp200}, ersip_dialog:uas_pass_response(InvSipMsg, InvResp200, UASDialog)),
ok.
%% Check that uas_create + uas_pass_response works the same way as uas_new.
%% Early dialog
uas_create_pass_response_pair_early_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180 = invite_reply(180, InvSipMsg),
{UASDialog, UACResp180} = ersip_dialog:uas_new(InvSipMsg, InvResp180),
?assertEqual(UASDialog, ersip_dialog:uas_create(InvSipMsg, InvResp180)),
?assertEqual({UASDialog, UACResp180}, ersip_dialog:uas_pass_response(InvSipMsg, InvResp180, UASDialog)),
ok.
%% Check that uas_create + uas_pass_response works the same way as uas_new.
SIPS in RURI & SIP URI in contact
uas_create_pass_response_pair_sip_schema_mismatch_test() ->
InvReq = invite_request(),
InvSipMsg0 = ersip_request:sipmsg(InvReq),
InvSipMsg = ersip_sipmsg:set_ruri(ersip_uri:make(<<"sips:">>), InvSipMsg0),
InvResp200SIPS = invite_reply(200, InvSipMsg),
InvResp200 = ersip_sipmsg:set(contact, make_contact(<<"sip:bob@192.0.2.4">>), InvResp200SIPS),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_create(InvSipMsg, InvResp200)),
ok.
%%===================================================================
%% Helpers
%%===================================================================
-define(crlf, "\r\n").
invite_request() ->
InvSipMsg = create_sipmsg(invite_request_bin(), make_default_source(), []),
Target = ersip_uri:make(<<"sip:127.0.0.1">>),
ersip_request:new(InvSipMsg, ersip_branch:make_random(7), Target).
invite_request(Opts) ->
InvSipMsg = create_sipmsg(invite_request_bin(Opts), make_default_source(), []),
Target = ersip_uri:make(<<"sip:127.0.0.1">>),
ersip_request:new(InvSipMsg, ersip_branch:make_random(7), Target).
invite_request_bin() ->
invite_request_bin(#{}).
invite_request_bin(Options) ->
RURI = maps:get(ruri, Options, <<"sip:">>),
RecordRoute = case Options of
#{record_route := RR} ->
<<"Record-Route: ", RR/binary, ?crlf>>;
_ ->
<<>>
end,
Contact = case Options of
#{contact := <<>>} ->
<<>>;
#{contact := ContactVal} ->
<<"Contact: ", ContactVal/binary, ?crlf>>;
_ ->
<<"Contact: <sip:>", ?crlf>>
end,
Route = case Options of
#{route := Routes} ->
IORoutes = [<<"Route: ", R/binary, ?crlf>> || R <- Routes],
iolist_to_binary(IORoutes);
_ ->
<<>>
end,
<<"INVITE ", RURI/binary, " SIP/2.0" ?crlf
"Via: SIP/2.0/UDP pc33.atlanta.com;branch=z9hG4bKnashds8" ?crlf
"Max-Forwards: 70" ?crlf
"To: Bob <sip:>" ?crlf
"From: Alice <sip:>;tag=1928301774" ?crlf
"Call-ID: a84b4c76e66710" ?crlf
"CSeq: 314159 INVITE" ?crlf,
Contact/binary,
RecordRoute/binary,
Route/binary,
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf
?crlf
"Test">>.
invite_reply(Code, InvSipMsg) ->
InvResp = ersip_sipmsg:reply(Code, InvSipMsg),
ersip_sipmsg:set(contact, make_contact(<<"sip:bob@192.0.2.4">>), InvResp).
create_uas_uac_dialogs(Req) ->
create_uas_uac_dialogs(Req, fun(_, ReqResp) -> ReqResp end).
create_uas_uac_dialogs(Req, ProxyFun) ->
InvSipMsg0 = ersip_request:sipmsg(Req),
InvSipMsg = ProxyFun(request, InvSipMsg0),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, InvResp180UAC0} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
InvResp180UAC = ProxyFun(response, InvResp180UAC0),
?assertMatch({ok, _}, ersip_dialog:uac_new(Req, InvResp180UAC)),
{ok, UACDialogEarly} = ersip_dialog:uac_new(Req, InvResp180UAC),
InvResp200UAS = invite_reply(200, InvSipMsg),
{UASDialogConfirmed, _} = ersip_dialog:uas_pass_response(InvSipMsg, InvResp200UAS, UASDialogEarly),
InvResp200UAC = ProxyFun(response, InvResp200UAS),
{ok, UACDialogConfirmed} = ersip_dialog:uac_update(InvResp200UAC, UACDialogEarly),
{UASDialogConfirmed, UACDialogConfirmed}.
notify_request() ->
NotifySipMsg = create_sipmsg(notify_request_bin(), make_default_source(), []),
Target = ersip_uri:make(<<"sip:127.0.0.1">>),
ersip_request:new(NotifySipMsg, ersip_branch:make_random(7), Target).
notify_request_bin() ->
notify_request_bin(#{}).
notify_request_bin(Options) ->
RURI = maps:get(ruri, Options, <<"sip:">>),
Contact = <<"Contact: <sip:>", ?crlf>>,
<<"NOTIFY ", RURI/binary, " SIP/2.0" ?crlf
"Via: SIP/2.0/UDP pc33.atlanta.com;branch=z9hG4bKnashds8" ?crlf
"Max-Forwards: 70" ?crlf
"To: Bob <sip:>" ?crlf
"From: Alice <sip:>;tag=1928301774" ?crlf
"Call-ID: a84b4c76e66710" ?crlf
"CSeq: 314159 INVITE" ?crlf,
Contact/binary,
"Subscription-State: active;expires=3600"
"Content-Type: text/plain" ?crlf
"Content-Length: 4" ?crlf
?crlf
"Test">>.
notify_sipmsg() ->
create_sipmsg(notify_request_bin(), make_default_source(), []).
notify_create_uas_uac_dialogs(Req) ->
notify_create_uas_uac_dialogs(Req, fun(_, ReqResp) -> ReqResp end).
notify_create_uas_uac_dialogs(Req, ProxyFun) ->
NotifySipMsg0 = ersip_request:sipmsg(Req),
NotifySipMsg = ProxyFun(request, NotifySipMsg0),
NotifyResp200UAS = invite_reply(200, NotifySipMsg),
NotifyResp200UAC = ProxyFun(response, NotifyResp200UAS),
{UASDialogConfirmed, _} = ersip_dialog:uas_new(NotifySipMsg, NotifyResp200UAS),
{ok, UACDialogConfirmed} = ersip_dialog:uac_new(Req, NotifyResp200UAC),
{UASDialogConfirmed, UACDialogConfirmed}.
bye_sipmsg() ->
create_sipmsg(bye_bin(), make_default_source(), []).
bye_bin() ->
<<"BYE sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
?crlf>>.
reinvite_sipmsg() ->
reinvite_sipmsg(#{}).
reinvite_sipmsg(UserOpts) ->
FullOpts = maps:merge(#{cseq => <<"314160">>,
contact => <<"sip:">>},
UserOpts),
#{cseq := CSeq,
contact := ContactOpt
} = FullOpts,
Contact = case ContactOpt of
<<>> -> <<>>;
_ -> <<"Contact: ", ContactOpt/binary, ?crlf>>
end,
Bin =
<<"INVITE sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf,
Contact/binary,
"CSeq: ", CSeq/binary, " INVITE" ?crlf
?crlf
"Test">>,
create_sipmsg(Bin, make_default_source(), []).
info_sipmsg(UserOpts) ->
FullOpts = maps:merge(#{cseq => <<"314160">>,
contact => <<"sip:">>},
UserOpts),
#{cseq := CSeq,
contact := ContactOpt
} = FullOpts,
Contact = case ContactOpt of
<<>> -> <<>>;
_ -> <<"Contact: ", ContactOpt/binary, ?crlf>>
end,
Bin =
<<"INFO sip: SIP/2.0" ?crlf
"From: Alice <sip:>" ?crlf
"Max-Forwards: 70" ?crlf,
Contact/binary,
"CSeq: ", CSeq/binary, " INFO" ?crlf
?crlf>>,
create_sipmsg(Bin, make_default_source(), []).
ack_sipmsg() ->
Bin =
<<"ACK sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf
"CSeq: 314160 ACK" ?crlf
?crlf
"Test">>,
create_sipmsg(Bin, make_default_source(), []).
cancel_sipmsg() ->
Bin =
<<"CANCEL sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf
"CSeq: 314160 CANCEL" ?crlf
?crlf
"Test">>,
create_sipmsg(Bin, make_default_source(), []).
make_default_source() ->
tcp_source(default_peer()).
default_peer() ->
{ersip_host:make({127, 0, 0, 1}), 5060}.
tcp_source(Peer) ->
ersip_source:new(default_peer(), Peer, ersip_transport:tcp(), undefined).
tls_source(Peer) ->
ersip_source:new(default_peer(), Peer, ersip_transport:tls(), undefined).
create_sipmsg(Msg, Source, HeadersToParse) when is_binary(Msg) ->
P = ersip_parser:new_dgram(Msg),
{{ok, PMsg}, _P2} = ersip_parser:parse(P),
PMsg1 = ersip_msg:set_source(Source, PMsg),
{ok, SipMsg} = ersip_sipmsg:parse(PMsg1, HeadersToParse),
SipMsg.
make_contact(ContactBin) when is_binary(ContactBin) ->
Contact = ersip_hdr_contact:make(ContactBin),
[Contact].
clear_tag(H, SipMsg) when H == from; H == to ->
FromOrTo0 = ersip_sipmsg:get(H, SipMsg),
FromOrTo = ersip_hdr_fromto:set_tag(undefined, FromOrTo0),
ersip_sipmsg:set(H, FromOrTo, SipMsg).
cseq_number(SipMsg) ->
ersip_hdr_cseq:number(ersip_sipmsg:get(cseq, SipMsg)).
set_cseq_number(Seq, Req) ->
CSeq0 = ersip_sipmsg:get(cseq, Req),
CSeq = ersip_hdr_cseq:set_number(Seq, CSeq0),
ersip_sipmsg:set(cseq, CSeq, Req).
loose_route(request, ReqSipMsg) ->
%% Add proxy record route:
RRRoutes = [<<"sip:atlanta.com;lr">>, <<"sip:biloxi.com;lr">>],
set_routes(record_route, RRRoutes, ReqSipMsg);
loose_route(response, RespSipMsg) ->
RespSipMsg.
strict_route(request, ReqSipMsg) ->
%% Add proxy record route:
RRRoutes = [<<"sip:atlanta.com">>, <<"sip:biloxi.com">>],
set_routes(record_route, RRRoutes, ReqSipMsg);
strict_route(response, RespSipMsg) ->
RespSipMsg.
set_routes(Header, Routes, SipMsg) ->
RRSet0 = ersip_route_set:new(),
RRSet = add_routes(Routes, RRSet0),
ersip_sipmsg:set(Header, RRSet, SipMsg).
add_routes([], RouteSet) ->
RouteSet;
add_routes([URI|Rest], RouteSet0) ->
Route = ersip_hdr_route:new(ersip_uri:make(URI)),
RouteSet = ersip_route_set:add_first(Route, RouteSet0),
add_routes(Rest, RouteSet).
remote_target(Dialog) ->
%% Trick to extract remote target is to send message and get RURI
%% from it.
{_, SipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), Dialog),
ersip_sipmsg:ruri(SipMsg).
check_400_uas_resp(Req, Dialog) ->
?assertMatch({reply, _}, ersip_dialog:uas_process(Req, target_refresh, Dialog)),
{reply, Resp400} = ersip_dialog:uas_process(Req, target_refresh, Dialog),
?assertEqual(400, ersip_sipmsg:status(Resp400)).
check_new_uac_error(Req) ->
InvSipMsg = ersip_request:sipmsg(Req),
InvResp200 = invite_reply(200, InvSipMsg),
?assertMatch({error, _}, ersip_dialog:uac_new(Req, InvResp200)).
check_new_uac_ok(Req) ->
InvSipMsg = ersip_request:sipmsg(Req),
InvResp200 = invite_reply(200, InvSipMsg),
?assertMatch({ok, _}, ersip_dialog:uac_new(Req, InvResp200)).
del_cseq(SipMsg) ->
ersip_sipmsg:remove(cseq, SipMsg).
| null | https://raw.githubusercontent.com/poroh/ersip/60afa92a53898f6692f3a30d04b5b88a580c8153/test/ersip_dialog_test.erl | erlang |
All rights reserved.
Common dialog support test
TODO:
- Check that Record-route are ignored for target_refresher
messages
===================================================================
Cases
===================================================================
====================
Sending BYE through UAC dialog:
--------------------
--------------------
Check that message is filled in according to dialog:
the remote tag of the dialog ID.
from the dialog state.
of the dialog ID
increasing and contiguous CSeq sequence numbers
of course, whose numbers equal the requests being acknowledged
or cancelled).
the method of the request.
the request.
====================
--------------------
--------------------
Check that message is filled in according to dialog:
the remote tag of the dialog ID.
from the dialog state.
of the dialog ID
increasing and contiguous CSeq sequence numbers
of course, whose numbers equal the requests being acknowledged
or cancelled).
Filled with new value
the method of the request.
the request.
request without a tag in the From field, in which case the tag is
considered to have a value of null.
did not mandate From tags.
If the value of the remote or local tags is null, the tag
parameter MUST be omitted from the To or From header fields,
respectively.
Check that message sent without from tag is mached dialog
created by initial invite.
A UAC MUST be prepared to receive a response without a tag in
the To field, in which case the tag is considered to have a
value of null.
which did not mandate To tags.
If the value of the remote or local tags is null, the tag
parameter MUST be omitted from the To or From header fields,
respectively.
Check that message sent without from tag is mached dialog
created by initial invite.
Requests within a dialog MUST contain strictly monotonically
increasing and contiguous CSeq sequence numbers (increasing-by-one)
equal the requests being acknowledged or cancelled).
Requests within a dialog MUST contain strictly monotonically
increasing and contiguous CSeq sequence numbers (increasing-by-one)
equal the requests being acknowledged or cancelled).
Requests within a dialog MUST contain strictly monotonically
increasing and contiguous CSeq sequence numbers (increasing-by-one)
equal the requests being acknowledged or cancelled).
Requests within a dialog MUST contain strictly monotonically
increasing and contiguous CSeq sequence numbers (increasing-by-one)
equal the requests being acknowledged or cancelled).
the value of the sequence number in the CSeq header field value
in the request.
If the remote sequence number was not empty, but the sequence
number of the request is lower than the remote sequence number,
Check that in-order message updates cseq:
Create dialogs with defined route set:
Check requirements:
Creating route set:
and preserving all URI parameters.
Filling loose-route request:
route set contains the lr parameter (see Section 19.1.1),
the route set values in order, including all parameters.
Check requirements:
Creating route set:
reverse order and preserving all URI parameters.
Filling loose-route request:
route set contains the lr parameter (see Section 19.1.1),
the route set values in order, including all parameters.
Create dialogs with defined route set:
Check requirements:
the route set into the Request-URI, stripping any parameters
last value.
Create dialogs with defined route set:
terminate a dialog if no response at all is received for the
request (the client transaction would inform the TU about the
timeout.)
Dialog does not terminated on other response codes:
Check last "if present" in clause:
in that request, if present.
SIPS URI. If the request that initiated the dialog contained a
field value, if there was any, or the Contact header field if
there was no Record-Route header field, the Contact header
field in the response MUST be a SIPS URI.
Contact is checked to be SIPS URI.
12.1.2 UAC Behavior
field value with a SIPS URI, the Contact header field MUST
contain a SIPS URI.
Check success consturction:
a SIPS URI, the "secure" flag is set to TRUE.
12.1.2 UAC Behavior
SIPS URI, the "secure" flag is set to TRUE.
dialog does not switch dialog state to confirmed state.
Check that ersip_dialog:uac_update works the same way as uac_trans_result for in (early) dialog requests
Check that uas_create + uas_pass_response works the same way as uas_new.
Confirmed dialog
Check that uas_create + uas_pass_response works the same way as uas_new.
Early dialog
Check that uas_create + uas_pass_response works the same way as uas_new.
===================================================================
Helpers
===================================================================
Add proxy record route:
Add proxy record route:
Trick to extract remote target is to send message and get RURI
from it. | Copyright ( c ) 2018 , 2021 Dmitry Poroh
Distributed under the terms of the MIT License . See the LICENSE file .
-module(ersip_dialog_test).
-include_lib("eunit/include/eunit.hrl").
dialog_create_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
?assertEqual(no_dialog, ersip_dialog:uas_dialog_id(InvSipMsg)),
{UASDialogEarly, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
?assertMatch({ok, _}, ersip_dialog:uac_new(InvReq, InvResp180UAC)),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
InvResp200UAS = invite_reply(200, InvSipMsg),
{UASDialogConfirmed, InvResp200UAC} = ersip_dialog:uas_pass_response(InvSipMsg, InvResp200UAS, UASDialogEarly),
{ok, UACDialogConfirmed} = ersip_dialog:uac_update(InvResp200UAS, UACDialogEarly),
{_UACDialog1, ByeSipMsgA} = ersip_dialog:uac_request(bye_sipmsg(), UACDialogConfirmed),
Check that dialog identifier of UAS is equal to calculated by request :
?assertMatch({ok, _}, ersip_dialog:uas_dialog_id(ByeSipMsgA)),
{ok, ByeUASDialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgA),
?assertEqual(ersip_dialog:id(UASDialogConfirmed), ByeUASDialogId),
1 . The URI in the To field of the request MUST be set to the
remote URI from the dialog state .
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(to, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(to, ByeSipMsgA))),
2 . The tag in the To header field of the request MUST be set to
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(to, InvResp180UAC)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsgA))),
3 . The From URI of the request MUST be set to the local URI
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(from, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(from, ByeSipMsgA))),
4 . The tag in the From header field of the request MUST be set to the local tag
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(from, InvSipMsg)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(from, ByeSipMsgA))),
5 . The Call - ID of the request MUST be set to the Call - ID of the dialog .
?assertEqual(ersip_sipmsg:get(callid, InvSipMsg),
ersip_sipmsg:get(callid, ByeSipMsgA)),
6 . Requests within a dialog MUST contain strictly monotonically
( increasing - by - one ) in each direction ( excepting ACK and CANCEL
?assert(ersip_hdr_cseq:number(ersip_sipmsg:get(cseq, InvSipMsg))
< ersip_hdr_cseq:number(ersip_sipmsg:get(cseq, ByeSipMsgA))),
7 . The method field in the CSeq header field value MUST match
?assertEqual(ersip_sipmsg:method(ByeSipMsgA),
ersip_hdr_cseq:method(ersip_sipmsg:get(cseq, ByeSipMsgA))),
8 . If the route set is empty , the UAC MUST place the remote target URI
into the Request - URI . The UAC MUST NOT add a Route header field to
[RemoteContactA] = ersip_sipmsg:get(contact, InvResp200UAC),
?assertEqual(ersip_hdr_contact:uri(RemoteContactA),
ersip_sipmsg:ruri(ByeSipMsgA)),
Sending BYE through UAS dialog :
{_UASDialog1, ByeSipMsgB} = ersip_dialog:uac_request(bye_sipmsg(), UASDialogConfirmed),
Check that dialog identifier of UAC is equal to calculated by request :
?assertMatch({ok, _}, ersip_dialog:uas_dialog_id(ByeSipMsgB)),
{ok, ByeBDialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgB),
?assertEqual(ersip_dialog:id(UACDialogConfirmed), ByeBDialogId),
1 . The URI in the To field of the request MUST be set to the
remote URI from the dialog state .
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(from, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(to, ByeSipMsgB))),
2 . The tag in the To header field of the request MUST be set to
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(from, InvResp180UAC)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsgB))),
3 . The From URI of the request MUST be set to the local URI
?assertEqual(ersip_hdr_fromto:uri(ersip_sipmsg:get(to, InvSipMsg)),
ersip_hdr_fromto:uri(ersip_sipmsg:get(from, ByeSipMsgB))),
4 . The tag in the From header field of the request MUST be set to the local tag
?assertEqual(ersip_hdr_fromto:tag(ersip_sipmsg:get(to, InvResp180UAS)),
ersip_hdr_fromto:tag(ersip_sipmsg:get(from, ByeSipMsgB))),
5 . The Call - ID of the request MUST be set to the Call - ID of the dialog .
?assertEqual(ersip_sipmsg:get(callid, InvSipMsg),
ersip_sipmsg:get(callid, ByeSipMsgB)),
6 . Requests within a dialog MUST contain strictly monotonically
( increasing - by - one ) in each direction ( excepting ACK and CANCEL
7 . The method field in the CSeq header field value MUST match
?assertEqual(ersip_sipmsg:method(ByeSipMsgB),
ersip_hdr_cseq:method(ersip_sipmsg:get(cseq, ByeSipMsgB))),
8 . If the route set is empty , the UAC MUST place the remote target URI
into the Request - URI . The UAC MUST NOT add a Route header field to
[RemoteContactB] = ersip_sipmsg:get(contact, InvSipMsg),
?assertEqual(ersip_hdr_contact:uri(RemoteContactB),
ersip_sipmsg:ruri(ByeSipMsgB)),
ok.
uas_dialog_rfc2543_compiance_test() ->
A UAS MUST be prepared to receive a
This is to maintain backwards compatibility with RFC 2543 , which
InvReq = invite_request(),
InvSipMsg = clear_tag(from, ersip_request:sipmsg(InvReq)),
InvResp200 = invite_reply(200, InvSipMsg),
{Dialog, _} = ersip_dialog:uas_new(InvSipMsg, InvResp200),
{_, ByeSipMsgB} = ersip_dialog:uac_request(bye_sipmsg(), Dialog),
?assertEqual(undefined, ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsgB))),
{ok, DialogA} = ersip_dialog:uac_new(InvReq, InvResp200),
{_, ByeSipMsgA0} = ersip_dialog:uac_request(bye_sipmsg(), DialogA),
ByeSipMsgA = clear_tag(from, ByeSipMsgA0),
{ok, ByeADialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgA),
?assertEqual(ersip_dialog:id(Dialog), ByeADialogId),
ok.
uac_dialog_rfc2543_compiance_test() ->
This is to maintain backwards compatibility with RFC 2543 ,
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp200 = clear_tag(to, invite_reply(200, InvSipMsg)),
?assertEqual(undefined, ersip_hdr_fromto:tag(ersip_sipmsg:get(to, InvResp200))),
?assertMatch({ok, _}, ersip_dialog:uac_new(InvReq, InvResp200)),
{ok, Dialog} = ersip_dialog:uac_new(InvReq, InvResp200),
{_, ByeSipMsg} = ersip_dialog:uac_request(bye_sipmsg(), Dialog),
?assertEqual(undefined, ersip_hdr_fromto:tag(ersip_sipmsg:get(to, ByeSipMsg))),
{DialogB, _} = ersip_dialog:uas_new(InvSipMsg, InvResp200),
{_, ByeSipMsgB0} = ersip_dialog:uac_request(bye_sipmsg(), DialogB),
ByeSipMsgB = clear_tag(from, ByeSipMsgB0),
{ok, ByeBDialogId} = ersip_dialog:uas_dialog_id(ByeSipMsgB),
?assertEqual(ersip_dialog:id(Dialog), ByeBDialogId),
ok.
indialog_ack_and_cancel_cseq_test() ->
in each direction ( excepting ACK and CANCEL of course , whose numbers
{UASDialog0, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UASDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UASDialog0),
{_, AckSipMsg} = ersip_dialog:uac_request(ack_sipmsg(), UASDialog1),
{_, CancelSipMsg} = ersip_dialog:uac_request(cancel_sipmsg(), UASDialog1),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(CancelSipMsg)),
{UACDialog1, UACReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{_, UACAckSipMsg} = ersip_dialog:uac_request(ack_sipmsg(), UACDialog1),
{_, UACCancelSipMsg} = ersip_dialog:uac_request(cancel_sipmsg(), UACDialog1),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACAckSipMsg)),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACCancelSipMsg)),
ok.
indialog_ack_and_cancel_cseq_no_cseq_test() ->
in each direction ( excepting ACK and CANCEL of course , whose numbers
{UASDialog0, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UASDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UASDialog0),
{_, AckSipMsg} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UASDialog1),
{_, CancelSipMsg} = ersip_dialog:uac_request(del_cseq(cancel_sipmsg()), UASDialog1),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(CancelSipMsg)),
{UACDialog1, UACReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{_, UACAckSipMsg} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog1),
{_, UACCancelSipMsg} = ersip_dialog:uac_request(del_cseq(cancel_sipmsg()), UACDialog1),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACAckSipMsg)),
?assertEqual(cseq_number(UACReInviteSipMsg), cseq_number(UACCancelSipMsg)),
ok.
indialog_ack_and_cancel_cseq_no_cseq_with_info_test() ->
in each direction ( excepting ACK and CANCEL of course , whose numbers
{_, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UACDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{UACDialog2, InfoSipMsg} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialog1),
{_, AckSipMsg} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog2),
{_, CancelSipMsg} = ersip_dialog:uac_request(del_cseq(cancel_sipmsg()), UACDialog2),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(CancelSipMsg)),
?assertEqual(cseq_number(ReInviteSipMsg) + 1, cseq_number(InfoSipMsg)),
ok.
indialog_request_after_repeated_ack_no_cseq_test() ->
in each direction ( excepting ACK and CANCEL of course , whose numbers
{_, UACDialog0} = create_uas_uac_dialogs(invite_request()),
{UACDialog1, ReInviteSipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), UACDialog0),
{UACDialog2, AckSipMsg1} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog1),
{UACDialog3, InfoSipMsg1} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialog2),
{UACDialog4, AckSipMsg2} = ersip_dialog:uac_request(del_cseq(ack_sipmsg()), UACDialog3),
{_, InfoSipMsg2} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialog4),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg1)),
?assertEqual(cseq_number(ReInviteSipMsg), cseq_number(AckSipMsg2)),
?assertEqual(cseq_number(ReInviteSipMsg) + 1, cseq_number(InfoSipMsg1)),
?assertEqual(cseq_number(ReInviteSipMsg) + 2, cseq_number(InfoSipMsg2)),
ok.
uas_message_checking_cseq_test() ->
1 . If the remote sequence number is empty , it MUST be set to
{UASDialog0, UACDialog0} = create_uas_uac_dialogs(invite_request()),
Note that UAC dialog has empty remote sequence number , so we
use initially UAC side as UAS for CSeq checking :
CSeq = <<"3251">>,
{_, ReInviteSipMsg} =
ersip_dialog:uac_request(reinvite_sipmsg(#{cseq => CSeq}), UASDialog0),
?assertEqual(empty, ersip_dialog:remote_seq(UACDialog0)),
{ok, UpdatedDialog} = ersip_dialog:uas_process(ReInviteSipMsg, target_refresh, UACDialog0),
?assertEqual(binary_to_integer(CSeq), ersip_dialog:remote_seq(UpdatedDialog)),
the request is out of order and MUST be rejected with a 500
( Server Internal Error ) response .
{_, ReInviteSipMsg1} = ersip_dialog:uac_request(reinvite_sipmsg(), UASDialog0),
ReInviteSipMsg2 = set_cseq_number(3250, ReInviteSipMsg1),
?assertMatch({reply, _}, ersip_dialog:uas_process(ReInviteSipMsg2, target_refresh, UpdatedDialog)),
{reply, Resp500} = ersip_dialog:uas_process(ReInviteSipMsg2, target_refresh, UpdatedDialog),
?assertEqual(500, ersip_sipmsg:status(Resp500)),
CSeqNew = 3252,
ReInviteSipMsg3 = set_cseq_number(CSeqNew, ReInviteSipMsg1),
?assertMatch({ok, _}, ersip_dialog:uas_process(ReInviteSipMsg3, target_refresh, UpdatedDialog)),
{ok, UpdatedDialog1} = ersip_dialog:uas_process(ReInviteSipMsg3, target_refresh, UpdatedDialog),
?assertEqual(CSeqNew, ersip_dialog:remote_seq(UpdatedDialog1)),
ok.
loose_routing_dialog_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request(), fun loose_route/2),
{_, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
RouteBob = ersip_sipmsg:get(route, ReInviteFromBob),
?assertEqual(ersip_uri:make(<<"sip:">>), ersip_sipmsg:ruri(ReInviteFromBob)),
1 . The route set MUST be set to the list of URIs in the
Record - Route header field from the request , taken in order
2 . If the route set is not empty , and the first URI in the
the UAC MUST place the remote target URI into the
Request - URI and MUST include a Route header field containing
?assertEqual(ersip_uri:make(<<"sip:biloxi.com;lr">>), ersip_hdr_route:uri(ersip_route_set:first(RouteBob))),
?assertEqual(ersip_uri:make(<<"sip:atlanta.com;lr">>), ersip_hdr_route:uri(ersip_route_set:last(RouteBob))),
{_, ReInviteFromAlice} = ersip_dialog:uac_request(reinvite_sipmsg(), AliceDialog),
RouteAlice = ersip_sipmsg:get(route, ReInviteFromAlice),
?assertEqual(ersip_uri:make(<<"sip:bob@192.0.2.4">>), ersip_sipmsg:ruri(ReInviteFromAlice)),
1 . The route set MUST be set to the list of URIs in the
Record - Route header field from the response , taken in
2 . If the route set is not empty , and the first URI in the
the UAC MUST place the remote target URI into the
Request - URI and MUST include a Route header field containing
?assertEqual(ersip_uri:make(<<"sip:atlanta.com;lr">>), ersip_hdr_route:uri(ersip_route_set:first(RouteAlice))),
?assertEqual(ersip_uri:make(<<"sip:biloxi.com;lr">>), ersip_hdr_route:uri(ersip_route_set:last(RouteAlice))),
ok.
strict_routing_dialog_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request(), fun strict_route/2),
If the route set is not empty , and its first URI does not
contain the lr parameter , the UAC MUST place the first URI from
that are not allowed in a Request - URI . The UAC MUST add a
Route header field containing the remainder of the route set
values in order , including all parameters . The UAC MUST then
place the remote target URI into the Route header field as the
{_, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
RouteBob = ersip_sipmsg:get(route, ReInviteFromBob),
?assertEqual(ersip_uri:make(<<"sip:biloxi.com">>), ersip_sipmsg:ruri(ReInviteFromBob)),
?assertEqual(ersip_uri:make(<<"sip:atlanta.com">>), ersip_hdr_route:uri(ersip_route_set:first(RouteBob))),
?assertEqual(ersip_uri:make(<<"sip:">>), ersip_hdr_route:uri(ersip_route_set:last(RouteBob))),
{_, ReInviteFromAlice} = ersip_dialog:uac_request(reinvite_sipmsg(), AliceDialog),
RouteAlice = ersip_sipmsg:get(route, ReInviteFromAlice),
?assertEqual(ersip_uri:make(<<"sip:atlanta.com">>), ersip_sipmsg:ruri(ReInviteFromAlice)),
?assertEqual(ersip_uri:make(<<"sip:biloxi.com">>), ersip_hdr_route:uri(ersip_route_set:first(RouteAlice))),
?assertEqual(ersip_uri:make(<<"sip:bob@192.0.2.4">>), ersip_hdr_route:uri(ersip_route_set:last(RouteAlice))),
ok.
target_refresh_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
NewBobContact = <<"sip:bob-new@192.0.2.5">>,
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(#{contact => NewBobContact}), BobDialog),
{ok, AliceDialogRefreshed} = ersip_dialog:uas_process(ReInviteFromBob, target_refresh, AliceDialog),
AliceReInviteResp0 = ersip_sipmsg:reply(200, ReInviteFromBob),
NewAliceContact = <<"sip:">>,
AliceReInviteResp = ersip_sipmsg:set(contact, make_contact(NewAliceContact), AliceReInviteResp0),
{ok, BobDialogRefreshed} = ersip_dialog:uac_trans_result(AliceReInviteResp, target_refresh, BobDialog1),
?assertEqual(ersip_uri:make(NewAliceContact), remote_target(BobDialogRefreshed)),
?assertEqual(ersip_uri:make(NewBobContact), remote_target(AliceDialogRefreshed)),
ok.
neg_400_on_star_contact_test() ->
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
InvSipMsg = ersip_sipmsg:set(contact, ersip_hdr_contact_list:make_star(), InvSipMsg0),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
neg_400_on_multiple_contact_test() ->
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
ContactList = [ersip_hdr_contact:make(<<"sip:">>), ersip_hdr_contact:make(<<"sip:bob@192.0.2.4">>)],
InvSipMsg = ersip_sipmsg:set(contact, ContactList, InvSipMsg0),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
neg_400_on_no_contact_test() ->
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
ContactList = [],
InvSipMsg = ersip_sipmsg:set(contact, ContactList, InvSipMsg0),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
neg_400_on_bad_record_route_test() ->
InvSipMsg = create_sipmsg(invite_request_bin(#{record_route => <<"aaaa">>}), make_default_source(), []),
?assertMatch({reply, _}, ersip_dialog:uas_verify(InvSipMsg)),
{reply, Resp400} = ersip_dialog:uas_verify(InvSipMsg),
?assertEqual(400, ersip_sipmsg:status(Resp400)),
ok.
uas_verify_test() ->
InvSipMsg1 = create_sipmsg(invite_request_bin(#{}), make_default_source(), []),
?assertEqual(ok, ersip_dialog:uas_verify(InvSipMsg1)),
InvSipMsg2 = create_sipmsg(invite_request_bin(#{record_route => <<"<sip:atlanta.com>">>}), make_default_source(), []),
?assertEqual(ok, ersip_dialog:uas_verify(InvSipMsg2)),
ok.
uac_trans_result_terminates_dialog_test() ->
{BobDialog, _} = create_uas_uac_dialogs(invite_request()),
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
If the response for a request within a dialog is a 481
( Call / Transaction Does Not Exist ) or a 408 ( Request Timeout ) ,
the UAC SHOULD terminate the dialog . A UAC SHOULD also
1 . 481
AliceReInviteResp481 = ersip_sipmsg:reply(481, ReInviteFromBob),
?assertEqual(terminate_dialog, ersip_dialog:uac_trans_result(AliceReInviteResp481, target_refresh, BobDialog1)),
2 . 408
AliceReInviteResp408 = ersip_sipmsg:reply(408, ReInviteFromBob),
?assertEqual(terminate_dialog, ersip_dialog:uac_trans_result(AliceReInviteResp408, target_refresh, BobDialog1)),
3 . timeout
?assertEqual(terminate_dialog, ersip_dialog:uac_trans_result(timeout, target_refresh, BobDialog1)),
[begin
Reply = ersip_sipmsg:reply(Code, ReInviteFromBob),
?assertMatch({ok, _}, ersip_dialog:uac_trans_result(Reply, target_refresh, BobDialog1))
end || Code <- [200, 299, 400, 407, 409, 499, 500, 599, 600, 699]],
ok.
no_contact_means_no_refresh_test() ->
When a UAS receives a target refresh request , it MUST replace the
dialog 's remote target URI with the URI from the Contact header field
NoContact = <<>>,
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
BobRURI = remote_target(AliceDialog),
AliceRURI = remote_target(BobDialog),
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(#{contact => NoContact}), BobDialog),
{ok, AliceDialogAfter} = ersip_dialog:uas_process(ReInviteFromBob, target_refresh, AliceDialog),
AliceReInviteResp0 = ersip_sipmsg:reply(200, ReInviteFromBob),
AliceReInviteResp = ersip_sipmsg:remove(contact, AliceReInviteResp0),
{ok, BobDialogAfter} = ersip_dialog:uac_trans_result(AliceReInviteResp, target_refresh, BobDialog1),
?assertEqual(AliceRURI, remote_target(BobDialogAfter)),
?assertEqual(BobRURI, remote_target(AliceDialogAfter)),
ok.
regular_requests_means_no_refresh_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
BobRURI = remote_target(AliceDialog),
AliceRURI = remote_target(BobDialog),
NewBobContact = <<"sip:bob-new@192.0.2.5">>,
{BobDialog1, InfoFromBob} = ersip_dialog:uac_request(info_sipmsg(#{contact => NewBobContact}), BobDialog),
{ok, AliceDialogAfter} = ersip_dialog:uas_process(InfoFromBob, regular, AliceDialog),
AliceInfoResp0 = ersip_sipmsg:reply(200, InfoFromBob),
NewAliceContact = <<"sip:">>,
AliceInfoResp = ersip_sipmsg:set(contact, make_contact(NewAliceContact), AliceInfoResp0),
{ok, BobDialogAfter} = ersip_dialog:uac_trans_result(AliceInfoResp, regular, BobDialog1),
?assertEqual(AliceRURI, remote_target(BobDialogAfter)),
?assertEqual(BobRURI, remote_target(AliceDialogAfter)),
ok.
bad_request_on_bad_contact_test() ->
{BobDialog, AliceDialog} = create_uas_uac_dialogs(invite_request()),
{_, ReInviteFromBob0} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
check_400_uas_resp(ersip_sipmsg:set(contact, star, ReInviteFromBob0), AliceDialog),
check_400_uas_resp(ersip_sipmsg:set(contact, make_contact(<<"unknown:x.y">>), ReInviteFromBob0), AliceDialog),
ok.
bad_contact_is_ignored_by_uac_test() ->
{BobDialog, _AliceDialog} = create_uas_uac_dialogs(invite_request()),
{BobDialog1, ReInviteFromBob} = ersip_dialog:uac_request(reinvite_sipmsg(), BobDialog),
AliceReInviteResp0 = ersip_sipmsg:reply(200, ReInviteFromBob),
AliceReInviteResp = ersip_sipmsg:set(contact, star, AliceReInviteResp0),
{ok, BobDialog1} = ersip_dialog:uac_trans_result(AliceReInviteResp, target_refresh, BobDialog1),
ok.
second_provisional_response_test() ->
Check that second provisional response does not change state of
the dialog on UAS side :
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, _} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
?assertMatch({UASDialogEarly, _}, ersip_dialog:uas_pass_response(InvSipMsg, InvResp180UAS, UASDialogEarly)),
ok.
uas_check_contact_test() ->
The URI provided in the Contact header field MUST be a SIP or
SIPS URI in the Request - URI or in the top Record - Route header
InvSipMsg0 = ersip_request:sipmsg(invite_request()),
InvSipMsg = ersip_sipmsg:set_ruri(ersip_uri:make(<<"sips:">>), InvSipMsg0),
InvResp200 = invite_reply(200, InvSipMsg),
1 . Check that we can not create dialog with SIP URI :
InvResp200Sip = ersip_sipmsg:set(contact, make_contact(<<"sip:bob@192.0.2.4">>), InvResp200),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsg, InvResp200Sip)),
2 . Check that we can create dialog with SIPs URI :
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
3 . Check that we can not create dialog with star contact :
InvResp200Star = ersip_sipmsg:set(contact, star, InvResp200),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsg, InvResp200Star)),
4 . Check that we can not create dialog with star contact in request :
InvSipMsgStar = ersip_sipmsg:set(contact, star, InvSipMsg),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsgStar, InvResp200Sip)),
5 . Check that if top record route contains SIPS RURI then
InvSipMsgSIPSRR = set_routes(record_route, [<<"sip:atlanta.com;lr">>, <<"sips:biloxi.com;lr">>], InvSipMsg),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsgSIPSRR, InvResp200Sip)),
6 . Check that we can create dialog with SIPS URI :
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
7 . Bad contact format :
InvSipMsgBadContct = ersip_request:sipmsg(invite_request(#{contact => <<"@">>})),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_new(InvSipMsgBadContct, InvResp200)),
ok.
uac_check_contact_test() ->
If the request has a Request - URI or a topmost Route header
InvReq1 = invite_request(#{ruri => <<"sips:">>,
contact => <<"sip:">>}),
check_new_uac_error(InvReq1),
InvReq2 = invite_request(#{ruri => <<"sip:">>,
contact => <<"sip:">>,
route => [<<"sips:biloxi.com">>]}),
check_new_uac_error(InvReq2),
InvReq3 = invite_request(#{ruri => <<"sips:">>,
contact => <<"sips:">>}),
check_new_uac_ok(InvReq3),
InvReq4 = invite_request(#{ruri => <<"sip:">>,
contact => <<"sips:">>,
route => [<<"sips:biloxi.com">>]}),
check_new_uac_ok(InvReq4),
ok.
uas_update_after_confirmed_test() ->
{BobDialog, _} = create_uas_uac_dialogs(invite_request()),
InvSipMsg = ersip_request:sipmsg(invite_request()),
Resp200 = invite_reply(200, InvSipMsg),
?assertMatch({BobDialog, _RespSipMsg}, ersip_dialog:uas_pass_response(InvSipMsg, Resp200, BobDialog)),
ok.
is_secure_test() ->
InvSipMsg = create_sipmsg(invite_request_bin(#{ruri => <<"sips:">>,
contact => <<"sips:">>}),
tls_source(default_peer()), []),
Target = ersip_uri:make(<<"sips:127.0.0.1;transport=tls">>),
InvReq = ersip_request:new(InvSipMsg, ersip_branch:make_random(7), Target),
InvResp200 = invite_reply(200, InvSipMsg),
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
12.1.1 UAS behavior
If the request arrived over TLS , and the Request - URI contained
{BobDialog, InvResp} = ersip_dialog:uas_new(InvSipMsg, InvResp200Sips),
?assertEqual(true, ersip_dialog:is_secure(BobDialog)),
If the request was sent over TLS , and the Request - URI contained a
{ok, AliceDialog} = ersip_dialog:uac_new(InvReq, InvResp),
?assertEqual(true, ersip_dialog:is_secure(AliceDialog)),
ok.
check_no_secure_when_on_undefined_source_test() ->
InvSipMsg = create_sipmsg(invite_request_bin(#{ruri => <<"sips:">>,
contact => <<"sips:">>}),
undefined, []),
InvResp200 = invite_reply(200, InvSipMsg),
InvResp200Sips = ersip_sipmsg:set(contact, make_contact(<<"sips:bob@192.0.2.4">>), InvResp200),
{BobDialog, _} = ersip_dialog:uas_new(InvSipMsg, InvResp200Sips),
?assertEqual(false, ersip_dialog:is_secure(BobDialog)),
ok.
uac_create_dialog_no_contact_in_resp_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS_0 = invite_reply(180, InvSipMsg),
InvResp180UAS = ersip_sipmsg:set(contact, [], InvResp180UAS_0),
?assertMatch({ok, _}, ersip_dialog:uac_new(InvReq, InvResp180UAS)),
{ok, Dialog} = ersip_dialog:uac_new(InvReq, InvResp180UAS),
?assertEqual(ersip_request:nexthop(InvReq), ersip_dialog:target(Dialog)),
ok.
uas_create_dialog_no_contact_in_resp_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180_0 = invite_reply(180, InvSipMsg),
InvResp180 = ersip_sipmsg:set(contact, [], InvResp180_0),
{_, _} = ersip_dialog:uas_new(InvSipMsg, InvResp180),
ok.
uas_negative_response_terminate_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180 = invite_reply(180, InvSipMsg),
InvResp487 = invite_reply(487, InvSipMsg),
{Dialog, _} = ersip_dialog:uas_new(InvSipMsg, InvResp180),
?assertEqual(terminate_dialog, ersip_dialog:uas_pass_response(InvSipMsg, InvResp487, Dialog)),
ok.
uac_notify_dialog_test() ->
{_, UACDialog0} = notify_create_uas_uac_dialogs(notify_request()),
{_, NotifyInviteSipMsg} = ersip_dialog:uac_request(notify_sipmsg(), UACDialog0),
?assertEqual(cseq_number(notify_sipmsg())+1, cseq_number(NotifyInviteSipMsg)),
ok.
Check that provisional respose creates dialogs ( UAC / UAS ) in early state .
Check that in - dialog messages ( PRACK , INFO , ... ) in early
early_dialog_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
?assertEqual(true, ersip_dialog:is_early(UASDialogEarly)),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
?assertEqual(true, ersip_dialog:is_early(UACDialogEarly)),
{UACDialogEarly1, InfoSipMsg} = ersip_dialog:uac_request(info_sipmsg(#{}), UACDialogEarly),
?assertEqual(true, ersip_dialog:is_early(UACDialogEarly1)),
{ok, UASDialogEarly1} = ersip_dialog:uas_process(InfoSipMsg, regular, UASDialogEarly),
?assertEqual(true, ersip_dialog:is_early(UASDialogEarly1)),
InfoResp200UAS = ersip_sipmsg:reply(200, InfoSipMsg),
{UASDialogEarly2, InfoResp200UAC} = ersip_dialog:uas_pass_response(InfoSipMsg, InfoResp200UAS, UASDialogEarly1),
?assertEqual(true, ersip_dialog:is_early(UASDialogEarly2)),
{ok, UACDialogEarly2} = ersip_dialog:uac_trans_result(InfoResp200UAC, regular, UACDialogEarly1),
?assertEqual(true, ersip_dialog:is_early(UACDialogEarly2)),
?assertEqual({ok, UACDialogEarly2}, ersip_dialog:uac_update(InfoResp200UAC, UACDialogEarly1)),
InvResp200UAS = invite_reply(200, InvSipMsg),
{UASDialogConfirmed, InvResp200UAC} = ersip_dialog:uas_pass_response(InvSipMsg, InvResp200UAS, UASDialogEarly2),
?assertEqual(false, ersip_dialog:is_early(UASDialogConfirmed)),
{ok, UACDialogConfirmed} = ersip_dialog:uac_update(InvResp200UAC, UACDialogEarly2),
?assertEqual(false, ersip_dialog:is_early(UACDialogConfirmed)),
ok.
Check that UAC terminates early dialog on non2xx response
uac_terminates_early_dialog_on_non2xx_response_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
InvResp400 = invite_reply(400, InvSipMsg),
?assertEqual(terminate_dialog, ersip_dialog:uas_pass_response(InvSipMsg, InvResp400, UASDialogEarly)),
?assertEqual(terminate_dialog, ersip_dialog:uac_update(InvResp400, UACDialogEarly)),
ok.
Check that UAC terminates early dialog on timeout
uac_terminates_early_dialog_on_timeout_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180UAS = invite_reply(180, InvSipMsg),
{_, InvResp180UAC} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
{ok, UACDialogEarly} = ersip_dialog:uac_new(InvReq, InvResp180UAC),
?assertEqual(terminate_dialog, ersip_dialog:uac_update(timeout, UACDialogEarly)),
ok.
uas_create_pass_response_pair_confirmed_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp200 = invite_reply(200, InvSipMsg),
{UASDialog, UACResp200} = ersip_dialog:uas_new(InvSipMsg, InvResp200),
?assertEqual(UASDialog, ersip_dialog:uas_create(InvSipMsg, InvResp200)),
?assertEqual({UASDialog, UACResp200}, ersip_dialog:uas_pass_response(InvSipMsg, InvResp200, UASDialog)),
ok.
uas_create_pass_response_pair_early_test() ->
InvReq = invite_request(),
InvSipMsg = ersip_request:sipmsg(InvReq),
InvResp180 = invite_reply(180, InvSipMsg),
{UASDialog, UACResp180} = ersip_dialog:uas_new(InvSipMsg, InvResp180),
?assertEqual(UASDialog, ersip_dialog:uas_create(InvSipMsg, InvResp180)),
?assertEqual({UASDialog, UACResp180}, ersip_dialog:uas_pass_response(InvSipMsg, InvResp180, UASDialog)),
ok.
SIPS in RURI & SIP URI in contact
uas_create_pass_response_pair_sip_schema_mismatch_test() ->
InvReq = invite_request(),
InvSipMsg0 = ersip_request:sipmsg(InvReq),
InvSipMsg = ersip_sipmsg:set_ruri(ersip_uri:make(<<"sips:">>), InvSipMsg0),
InvResp200SIPS = invite_reply(200, InvSipMsg),
InvResp200 = ersip_sipmsg:set(contact, make_contact(<<"sip:bob@192.0.2.4">>), InvResp200SIPS),
?assertError({cannot_create_dialog, _}, ersip_dialog:uas_create(InvSipMsg, InvResp200)),
ok.
-define(crlf, "\r\n").
invite_request() ->
InvSipMsg = create_sipmsg(invite_request_bin(), make_default_source(), []),
Target = ersip_uri:make(<<"sip:127.0.0.1">>),
ersip_request:new(InvSipMsg, ersip_branch:make_random(7), Target).
invite_request(Opts) ->
InvSipMsg = create_sipmsg(invite_request_bin(Opts), make_default_source(), []),
Target = ersip_uri:make(<<"sip:127.0.0.1">>),
ersip_request:new(InvSipMsg, ersip_branch:make_random(7), Target).
invite_request_bin() ->
invite_request_bin(#{}).
invite_request_bin(Options) ->
RURI = maps:get(ruri, Options, <<"sip:">>),
RecordRoute = case Options of
#{record_route := RR} ->
<<"Record-Route: ", RR/binary, ?crlf>>;
_ ->
<<>>
end,
Contact = case Options of
#{contact := <<>>} ->
<<>>;
#{contact := ContactVal} ->
<<"Contact: ", ContactVal/binary, ?crlf>>;
_ ->
<<"Contact: <sip:>", ?crlf>>
end,
Route = case Options of
#{route := Routes} ->
IORoutes = [<<"Route: ", R/binary, ?crlf>> || R <- Routes],
iolist_to_binary(IORoutes);
_ ->
<<>>
end,
<<"INVITE ", RURI/binary, " SIP/2.0" ?crlf
"Via: SIP/2.0/UDP pc33.atlanta.com;branch=z9hG4bKnashds8" ?crlf
"Max-Forwards: 70" ?crlf
"To: Bob <sip:>" ?crlf
"From: Alice <sip:>;tag=1928301774" ?crlf
"Call-ID: a84b4c76e66710" ?crlf
"CSeq: 314159 INVITE" ?crlf,
Contact/binary,
RecordRoute/binary,
Route/binary,
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf
?crlf
"Test">>.
invite_reply(Code, InvSipMsg) ->
InvResp = ersip_sipmsg:reply(Code, InvSipMsg),
ersip_sipmsg:set(contact, make_contact(<<"sip:bob@192.0.2.4">>), InvResp).
create_uas_uac_dialogs(Req) ->
create_uas_uac_dialogs(Req, fun(_, ReqResp) -> ReqResp end).
create_uas_uac_dialogs(Req, ProxyFun) ->
InvSipMsg0 = ersip_request:sipmsg(Req),
InvSipMsg = ProxyFun(request, InvSipMsg0),
InvResp180UAS = invite_reply(180, InvSipMsg),
{UASDialogEarly, InvResp180UAC0} = ersip_dialog:uas_new(InvSipMsg, InvResp180UAS),
InvResp180UAC = ProxyFun(response, InvResp180UAC0),
?assertMatch({ok, _}, ersip_dialog:uac_new(Req, InvResp180UAC)),
{ok, UACDialogEarly} = ersip_dialog:uac_new(Req, InvResp180UAC),
InvResp200UAS = invite_reply(200, InvSipMsg),
{UASDialogConfirmed, _} = ersip_dialog:uas_pass_response(InvSipMsg, InvResp200UAS, UASDialogEarly),
InvResp200UAC = ProxyFun(response, InvResp200UAS),
{ok, UACDialogConfirmed} = ersip_dialog:uac_update(InvResp200UAC, UACDialogEarly),
{UASDialogConfirmed, UACDialogConfirmed}.
notify_request() ->
NotifySipMsg = create_sipmsg(notify_request_bin(), make_default_source(), []),
Target = ersip_uri:make(<<"sip:127.0.0.1">>),
ersip_request:new(NotifySipMsg, ersip_branch:make_random(7), Target).
notify_request_bin() ->
notify_request_bin(#{}).
notify_request_bin(Options) ->
RURI = maps:get(ruri, Options, <<"sip:">>),
Contact = <<"Contact: <sip:>", ?crlf>>,
<<"NOTIFY ", RURI/binary, " SIP/2.0" ?crlf
"Via: SIP/2.0/UDP pc33.atlanta.com;branch=z9hG4bKnashds8" ?crlf
"Max-Forwards: 70" ?crlf
"To: Bob <sip:>" ?crlf
"From: Alice <sip:>;tag=1928301774" ?crlf
"Call-ID: a84b4c76e66710" ?crlf
"CSeq: 314159 INVITE" ?crlf,
Contact/binary,
"Subscription-State: active;expires=3600"
"Content-Type: text/plain" ?crlf
"Content-Length: 4" ?crlf
?crlf
"Test">>.
notify_sipmsg() ->
create_sipmsg(notify_request_bin(), make_default_source(), []).
notify_create_uas_uac_dialogs(Req) ->
notify_create_uas_uac_dialogs(Req, fun(_, ReqResp) -> ReqResp end).
notify_create_uas_uac_dialogs(Req, ProxyFun) ->
NotifySipMsg0 = ersip_request:sipmsg(Req),
NotifySipMsg = ProxyFun(request, NotifySipMsg0),
NotifyResp200UAS = invite_reply(200, NotifySipMsg),
NotifyResp200UAC = ProxyFun(response, NotifyResp200UAS),
{UASDialogConfirmed, _} = ersip_dialog:uas_new(NotifySipMsg, NotifyResp200UAS),
{ok, UACDialogConfirmed} = ersip_dialog:uac_new(Req, NotifyResp200UAC),
{UASDialogConfirmed, UACDialogConfirmed}.
bye_sipmsg() ->
create_sipmsg(bye_bin(), make_default_source(), []).
bye_bin() ->
<<"BYE sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
?crlf>>.
reinvite_sipmsg() ->
reinvite_sipmsg(#{}).
reinvite_sipmsg(UserOpts) ->
FullOpts = maps:merge(#{cseq => <<"314160">>,
contact => <<"sip:">>},
UserOpts),
#{cseq := CSeq,
contact := ContactOpt
} = FullOpts,
Contact = case ContactOpt of
<<>> -> <<>>;
_ -> <<"Contact: ", ContactOpt/binary, ?crlf>>
end,
Bin =
<<"INVITE sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf,
Contact/binary,
"CSeq: ", CSeq/binary, " INVITE" ?crlf
?crlf
"Test">>,
create_sipmsg(Bin, make_default_source(), []).
info_sipmsg(UserOpts) ->
FullOpts = maps:merge(#{cseq => <<"314160">>,
contact => <<"sip:">>},
UserOpts),
#{cseq := CSeq,
contact := ContactOpt
} = FullOpts,
Contact = case ContactOpt of
<<>> -> <<>>;
_ -> <<"Contact: ", ContactOpt/binary, ?crlf>>
end,
Bin =
<<"INFO sip: SIP/2.0" ?crlf
"From: Alice <sip:>" ?crlf
"Max-Forwards: 70" ?crlf,
Contact/binary,
"CSeq: ", CSeq/binary, " INFO" ?crlf
?crlf>>,
create_sipmsg(Bin, make_default_source(), []).
ack_sipmsg() ->
Bin =
<<"ACK sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf
"CSeq: 314160 ACK" ?crlf
?crlf
"Test">>,
create_sipmsg(Bin, make_default_source(), []).
cancel_sipmsg() ->
Bin =
<<"CANCEL sip: SIP/2.0" ?crlf
"Max-Forwards: 70" ?crlf
"Content-Type: application/sdp" ?crlf
"Content-Length: 4" ?crlf
"CSeq: 314160 CANCEL" ?crlf
?crlf
"Test">>,
create_sipmsg(Bin, make_default_source(), []).
make_default_source() ->
tcp_source(default_peer()).
default_peer() ->
{ersip_host:make({127, 0, 0, 1}), 5060}.
tcp_source(Peer) ->
ersip_source:new(default_peer(), Peer, ersip_transport:tcp(), undefined).
tls_source(Peer) ->
ersip_source:new(default_peer(), Peer, ersip_transport:tls(), undefined).
create_sipmsg(Msg, Source, HeadersToParse) when is_binary(Msg) ->
P = ersip_parser:new_dgram(Msg),
{{ok, PMsg}, _P2} = ersip_parser:parse(P),
PMsg1 = ersip_msg:set_source(Source, PMsg),
{ok, SipMsg} = ersip_sipmsg:parse(PMsg1, HeadersToParse),
SipMsg.
make_contact(ContactBin) when is_binary(ContactBin) ->
Contact = ersip_hdr_contact:make(ContactBin),
[Contact].
clear_tag(H, SipMsg) when H == from; H == to ->
FromOrTo0 = ersip_sipmsg:get(H, SipMsg),
FromOrTo = ersip_hdr_fromto:set_tag(undefined, FromOrTo0),
ersip_sipmsg:set(H, FromOrTo, SipMsg).
cseq_number(SipMsg) ->
ersip_hdr_cseq:number(ersip_sipmsg:get(cseq, SipMsg)).
set_cseq_number(Seq, Req) ->
CSeq0 = ersip_sipmsg:get(cseq, Req),
CSeq = ersip_hdr_cseq:set_number(Seq, CSeq0),
ersip_sipmsg:set(cseq, CSeq, Req).
loose_route(request, ReqSipMsg) ->
RRRoutes = [<<"sip:atlanta.com;lr">>, <<"sip:biloxi.com;lr">>],
set_routes(record_route, RRRoutes, ReqSipMsg);
loose_route(response, RespSipMsg) ->
RespSipMsg.
strict_route(request, ReqSipMsg) ->
RRRoutes = [<<"sip:atlanta.com">>, <<"sip:biloxi.com">>],
set_routes(record_route, RRRoutes, ReqSipMsg);
strict_route(response, RespSipMsg) ->
RespSipMsg.
set_routes(Header, Routes, SipMsg) ->
RRSet0 = ersip_route_set:new(),
RRSet = add_routes(Routes, RRSet0),
ersip_sipmsg:set(Header, RRSet, SipMsg).
add_routes([], RouteSet) ->
RouteSet;
add_routes([URI|Rest], RouteSet0) ->
Route = ersip_hdr_route:new(ersip_uri:make(URI)),
RouteSet = ersip_route_set:add_first(Route, RouteSet0),
add_routes(Rest, RouteSet).
remote_target(Dialog) ->
{_, SipMsg} = ersip_dialog:uac_request(reinvite_sipmsg(), Dialog),
ersip_sipmsg:ruri(SipMsg).
check_400_uas_resp(Req, Dialog) ->
?assertMatch({reply, _}, ersip_dialog:uas_process(Req, target_refresh, Dialog)),
{reply, Resp400} = ersip_dialog:uas_process(Req, target_refresh, Dialog),
?assertEqual(400, ersip_sipmsg:status(Resp400)).
check_new_uac_error(Req) ->
InvSipMsg = ersip_request:sipmsg(Req),
InvResp200 = invite_reply(200, InvSipMsg),
?assertMatch({error, _}, ersip_dialog:uac_new(Req, InvResp200)).
check_new_uac_ok(Req) ->
InvSipMsg = ersip_request:sipmsg(Req),
InvResp200 = invite_reply(200, InvSipMsg),
?assertMatch({ok, _}, ersip_dialog:uac_new(Req, InvResp200)).
del_cseq(SipMsg) ->
ersip_sipmsg:remove(cseq, SipMsg).
|
1726660f93e221514fcc6ee19072a11d53a57dc65b7b865124f907ef935fb041 | donaldsonjw/bigloo | sha2.scm | ;*=====================================================================*/
* serrano / prgm / project / bigloo / runtime / Unsafe / sha2.scm * /
;* ------------------------------------------------------------- */
* Author : and * /
* Creation : Mon May 26 08:40:27 2008 * /
* Last change : Tue Jun 17 17:28:49 2014 ( serrano ) * /
* Copyright : 2008 - 14 , * /
;* ------------------------------------------------------------- */
;* SHA-256 Bigloo implementation */
;*=====================================================================*/
;; This code has been inspired by a C implementation written by
and distributed under the following copyright :
;;
;; FILE: sha2.c
AUTHOR : < >
;;
Copyright ( c ) 2000 - 2001 ,
;; All rights reserved.
;;
;; Redistribution and use in source and binary forms, with or without
;; modification, are permitted provided that the following conditions
;; are met:
1 . Redistributions of source code must retain the above copyright
;; notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
;; notice, this list of conditions and the following disclaimer in the
;; documentation and/or other materials provided with the distribution.
3 . Neither the name of the copyright holder nor the names of contributors
;; may be used to endorse or promote products derived from this software
;; without specific prior written permission.
;;
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTOR(S ) ` ` AS IS '' AND
;; ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTOR(S) BE LIABLE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
;; OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
;; LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
;; OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
;; SUCH DAMAGE.
;;
$ I d : sha2.c , v 1.1 2001/11/08 00:01:51 adg Exp adg $
;*---------------------------------------------------------------------*/
;* The module */
;*---------------------------------------------------------------------*/
(module __sha2
(use __type
__bigloo
__bexit
__object
__thread
__rgc
__bit
__bignum
__r4_numbers_6_5
__r4_numbers_6_5_fixnum
__r4_numbers_6_5_flonum
__r4_numbers_6_5_flonum_dtoa
__r4_booleans_6_1
__r4_symbols_6_4
__r4_vectors_6_8
__r4_control_features_6_9
__r4_pairs_and_lists_6_3
__r4_characters_6_6
__r4_equivalence_6_2
__r4_strings_6_7
__r4_ports_6_10_1
__r4_input_6_10_2
__r5_control_features_6_4
__mmap
__foreign
__error
__evenv
__os
__structure)
(import __param
__hmac
__tvector)
(from __srfi4)
(export (sha256sum::bstring ::obj)
(sha256sum-string::bstring ::bstring)
(sha256sum-mmap::bstring ::mmap)
(sha256sum-port::bstring ::input-port)
(sha256sum-file::bstring ::bstring)
(hmac-sha256sum-string::bstring ::bstring ::bstring)))
;*---------------------------------------------------------------------*/
;* u32vector-set! ... */
;*---------------------------------------------------------------------*/
(define-macro (u32vector-set! vec i val)
`((@ u32vector-set! __srfi4) ,vec ,i ($ulong->uint32 ,val)))
;*---------------------------------------------------------------------*/
;* u32vector-ref ... */
;*---------------------------------------------------------------------*/
(define-macro (u32vector-ref vec i)
`($uint32->ulong ((@ u32vector-ref __srfi4) ,vec ,i)))
;*---------------------------------------------------------------------*/
;* u32 ... */
;*---------------------------------------------------------------------*/
(define-macro (u32 hi lo)
`(bit-or (bit-lsh ,hi 16) ,lo))
;*---------------------------------------------------------------------*/
;* u16 ... */
;*---------------------------------------------------------------------*/
(define-macro (u16 hi lo)
`(bit-or (bit-lsh ,hi 8) ,lo))
;*---------------------------------------------------------------------*/
;* u32-hi ... */
;*---------------------------------------------------------------------*/
(define-macro (u32-hi w)
`(bit-ursh ,w 16))
;*---------------------------------------------------------------------*/
;* u32-low ... */
;*---------------------------------------------------------------------*/
(define-macro (u32-lo w)
`(bit-and ,w (-fx (bit-lsh 1 16) 1)))
;*---------------------------------------------------------------------*/
;* addu32 ... */
;* ------------------------------------------------------------- */
;* unsigned binary addition */
;*---------------------------------------------------------------------*/
(define (addu32::ulong n1::ulong n2::ulong)
(let* ((h1::ulong (bit-ursh n1 16))
(h2::ulong (bit-ursh n2 16))
(h::ulong (bit-and #xffff (+fx h1 h2)))
(l1::ulong (bit-and n1 #xffff))
(l2::ulong (bit-and n2 #xffff))
(l::ulong (+fx l1 l2))
(lh::ulong (bit-lsh h 16)))
(+fx lh l)))
;*---------------------------------------------------------------------*/
;* u32+ ... */
;*---------------------------------------------------------------------*/
(define-expander u32+
(lambda (x e)
(match-case x
(()
0)
((?- ?a)
(e a e))
((?- ?a . ?b)
(e `(addu32 ,a (u32+ ,@b)) e)))))
;*---------------------------------------------------------------------*/
;* rotr32 ... */
;*---------------------------------------------------------------------*/
(define (rotr32::ulong x::ulong n::int)
(bit-or (bit-ursh x n) (bit-lsh x (-fx 32 n))))
;*---------------------------------------------------------------------*/
;* u32-fill! ... */
;*---------------------------------------------------------------------*/
(define (u32-fill! str offset w::ulong)
(let* ((s1 (integer->string (u32-hi w) 16))
(l1 (string-length s1))
(s2 (integer->string (u32-lo w) 16))
(l2 (string-length s2)))
(blit-string! s1 0 str (+fx offset (-fx 4 l1)) l1)
(blit-string! s2 0 str (+fx offset (+fx 4 (-fx 4 l2))) l2)))
;*---------------------------------------------------------------------*/
;* state->string ... */
;*---------------------------------------------------------------------*/
(define (state->string state::u32vector)
(let ((r (make-string 64 #\0)))
(u32-fill! r 0 (u32vector-ref state 0))
(u32-fill! r 8 (u32vector-ref state 1))
(u32-fill! r 16 (u32vector-ref state 2))
(u32-fill! r 24 (u32vector-ref state 3))
(u32-fill! r 32 (u32vector-ref state 4))
(u32-fill! r 40 (u32vector-ref state 5))
(u32-fill! r 48 (u32vector-ref state 6))
(u32-fill! r 56 (u32vector-ref state 7))
r))
;*---------------------------------------------------------------------*/
;* bit-or* ... */
;*---------------------------------------------------------------------*/
(define-expander bit-or*
(lambda (x e)
(match-case x
((?- ?a ?b)
(e `(bit-or ,a ,b) e))
((?- ?a . ?b)
(e `(bit-or ,a (bit-or* ,@b)) e)))))
;*---------------------------------------------------------------------*/
;* bit-xor* ... */
;*---------------------------------------------------------------------*/
(define-expander bit-xor*
(lambda (x e)
(match-case x
((?- ?a ?b)
(e `(bit-xor ,a ,b) e))
((?- ?a . ?b)
(e `(bit-xor ,a (bit-xor* ,@b)) e)))))
;*---------------------------------------------------------------------*/
;* K256 ... */
;*---------------------------------------------------------------------*/
(define K256
(let ((v (make-u32vector 64)))
(u32vector-set! v 0 (u32 #x428a #x2f98))
(u32vector-set! v 1 (u32 #x7137 #x4491))
(u32vector-set! v 2 (u32 #xb5c0 #xfbcf))
(u32vector-set! v 3 (u32 #xe9b5 #xdba5))
(u32vector-set! v 4 (u32 #x3956 #xc25b))
(u32vector-set! v 5 (u32 #x59f1 #x11f1))
(u32vector-set! v 6 (u32 #x923f #x82a4))
(u32vector-set! v 7 (u32 #xab1c #x5ed5))
(u32vector-set! v 8 (u32 #xd807 #xaa98))
(u32vector-set! v 9 (u32 #x1283 #x5b01))
(u32vector-set! v 10 (u32 #x2431 #x85be))
(u32vector-set! v 11 (u32 #x550c #x7dc3))
(u32vector-set! v 12 (u32 #x72be #x5d74))
(u32vector-set! v 13 (u32 #x80de #xb1fe))
(u32vector-set! v 14 (u32 #x9bdc #x06a7))
(u32vector-set! v 15 (u32 #xc19b #xf174))
(u32vector-set! v 16 (u32 #xe49b #x69c1))
(u32vector-set! v 17 (u32 #xefbe #x4786))
(u32vector-set! v 18 (u32 #x0fc1 #x9dc6))
(u32vector-set! v 19 (u32 #x240c #xa1cc))
(u32vector-set! v 20 (u32 #x2de9 #x2c6f))
(u32vector-set! v 21 (u32 #x4a74 #x84aa))
(u32vector-set! v 22 (u32 #x5cb0 #xa9dc))
(u32vector-set! v 23 (u32 #x76f9 #x88da))
(u32vector-set! v 24 (u32 #x983e #x5152))
(u32vector-set! v 25 (u32 #xa831 #xc66d))
(u32vector-set! v 26 (u32 #xb003 #x27c8))
(u32vector-set! v 27 (u32 #xbf59 #x7fc7))
(u32vector-set! v 28 (u32 #xc6e0 #x0bf3))
(u32vector-set! v 29 (u32 #xd5a7 #x9147))
(u32vector-set! v 30 (u32 #x06ca #x6351))
(u32vector-set! v 31 (u32 #x1429 #x2967))
(u32vector-set! v 32 (u32 #x27b7 #x0a85))
(u32vector-set! v 33 (u32 #x2e1b #x2138))
(u32vector-set! v 34 (u32 #x4d2c #x6dfc))
(u32vector-set! v 35 (u32 #x5338 #x0d13))
(u32vector-set! v 36 (u32 #x650a #x7354))
(u32vector-set! v 37 (u32 #x766a #x0abb))
(u32vector-set! v 38 (u32 #x81c2 #xc92e))
(u32vector-set! v 39 (u32 #x9272 #x2c85))
(u32vector-set! v 40 (u32 #xa2bf #xe8a1))
(u32vector-set! v 41 (u32 #xa81a #x664b))
(u32vector-set! v 42 (u32 #xc24b #x8b70))
(u32vector-set! v 43 (u32 #xc76c #x51a3))
(u32vector-set! v 44 (u32 #xd192 #xe819))
(u32vector-set! v 45 (u32 #xd699 #x0624))
(u32vector-set! v 46 (u32 #xf40e #x3585))
(u32vector-set! v 47 (u32 #x106a #xa070))
(u32vector-set! v 48 (u32 #x19a4 #xc116))
(u32vector-set! v 49 (u32 #x1e37 #x6c08))
(u32vector-set! v 50 (u32 #x2748 #x774c))
(u32vector-set! v 51 (u32 #x34b0 #xbcb5))
(u32vector-set! v 52 (u32 #x391c #x0cb3))
(u32vector-set! v 53 (u32 #x4ed8 #xaa4a))
(u32vector-set! v 54 (u32 #x5b9c #xca4f))
(u32vector-set! v 55 (u32 #x682e #x6ff3))
(u32vector-set! v 56 (u32 #x748f #x82ee))
(u32vector-set! v 57 (u32 #x78a5 #x636f))
(u32vector-set! v 58 (u32 #x84c8 #x7814))
(u32vector-set! v 59 (u32 #x8cc7 #x0208))
(u32vector-set! v 60 (u32 #x90be #xfffa))
(u32vector-set! v 61 (u32 #xa450 #x6ceb))
(u32vector-set! v 62 (u32 #xbef9 #xa3f7))
(u32vector-set! v 63 (u32 #xc671 #x78f2))
v))
;*---------------------------------------------------------------------*/
;* sha256-initial-hash-value ... */
;*---------------------------------------------------------------------*/
(define (sha256-initial-hash-value)
(let ((v (make-u32vector 8)))
(u32vector-set! v 0 (u32 #x6a09 #xe667))
(u32vector-set! v 1 (u32 #xbb67 #xae85))
(u32vector-set! v 2 (u32 #x3c6e #xf372))
(u32vector-set! v 3 (u32 #xa54f #xf53a))
(u32vector-set! v 4 (u32 #x510e #x527f))
(u32vector-set! v 5 (u32 #x9b05 #x688c))
(u32vector-set! v 6 (u32 #x1f83 #xd9ab))
(u32vector-set! v 7 (u32 #x5be0 #xcd19))
v))
;*---------------------------------------------------------------------*/
;* Ch ... */
;*---------------------------------------------------------------------*/
(define (Ch::ulong x::ulong y::ulong z::ulong)
(bit-xor (bit-and x y) (bit-and (bit-not x) z)))
;*---------------------------------------------------------------------*/
;* Maj ... */
;*---------------------------------------------------------------------*/
(define (Maj::ulong x::ulong y::ulong z::ulong)
(bit-xor* (bit-and x y) (bit-and x z) (bit-and y z)))
;*---------------------------------------------------------------------*/
* Sigma0 - 256 ... * /
;*---------------------------------------------------------------------*/
(define (Sigma0-256::ulong x::ulong)
(bit-xor* (rotr32 x 2) (rotr32 x 13) (rotr32 x 22)))
;*---------------------------------------------------------------------*/
;* sigma0-256 ... */
;*---------------------------------------------------------------------*/
(define (sigma0-256::ulong x::ulong)
(bit-xor* (rotr32 x 7) (rotr32 x 18) (bit-ursh x 3)))
;*---------------------------------------------------------------------*/
* Sigma1 - 256 ... * /
;*---------------------------------------------------------------------*/
(define (Sigma1-256::ulong x::ulong)
(bit-xor* (rotr32 x 6) (rotr32 x 11) (rotr32 x 25)))
;*---------------------------------------------------------------------*/
* - 256 ... * /
;*---------------------------------------------------------------------*/
(define (sigma1-256::ulong x::ulong)
(bit-xor* (rotr32 x 17) (rotr32 x 19) (bit-ursh x 10)))
;*---------------------------------------------------------------------*/
;* get-a/b/c/d/e/f/g/h ... */
;*---------------------------------------------------------------------*/
(define (get-a::ulong st) (u32vector-ref st 0))
(define (get-b::ulong st) (u32vector-ref st 1))
(define (get-c::ulong st) (u32vector-ref st 2))
(define (get-d::ulong st) (u32vector-ref st 3))
(define (get-e::ulong st) (u32vector-ref st 4))
(define (get-f::ulong st) (u32vector-ref st 5))
(define (get-g::ulong st) (u32vector-ref st 6))
(define (get-h::ulong st) (u32vector-ref st 7))
;*---------------------------------------------------------------------*/
;* sha256-internal-transform ... */
;*---------------------------------------------------------------------*/
(define (sha256-internal-transform state::u32vector buffer::u32vector)
(define (compress e f g h w::ulong j)
(u32+ h (Sigma1-256 e) (Ch e f g)
(u32vector-ref K256 j)
w))
(define (xf-ref::ulong v::u32vector n::long)
(u32vector-ref v (bit-and n #xf)))
(define (get-u32::ulong W n)
(bit-or* (bit-lsh (u32vector-ref W (+ n 0)) 24)
(bit-lsh (u32vector-ref W (+ n 1)) 16)
(bit-lsh (u32vector-ref W (+ n 2)) 8)
(u32vector-ref W (+ n 3))))
(define (set-state! state
a::ulong b::ulong c::ulong d::ulong
e::ulong f::ulong g::ulong h::ulong)
(let ((oa::ulong (get-a state))
(ob::ulong (get-b state))
(oc::ulong (get-c state))
(od::ulong (get-d state))
(oe::ulong (get-e state))
(of::ulong (get-f state))
(og::ulong (get-g state))
(oh::ulong (get-h state))
(oj::long 0))
(u32vector-set! state 0 (u32+ oa a))
(u32vector-set! state 1 (u32+ ob b))
(u32vector-set! state 2 (u32+ oc c))
(u32vector-set! state 3 (u32+ od d))
(u32vector-set! state 4 (u32+ oe e))
(u32vector-set! state 5 (u32+ of f))
(u32vector-set! state 6 (u32+ og g))
(u32vector-set! state 7 (u32+ oh h))))
(let loop ((a::ulong (get-a state))
(b::ulong (get-b state))
(c::ulong (get-c state))
(d::ulong (get-d state))
(e::ulong (get-e state))
(f::ulong (get-f state))
(g::ulong (get-g state))
(h::ulong (get-h state))
(j::long 0))
(cond
((<fx j 16)
(let* ((w::ulong (u32vector-ref buffer j))
(T1::ulong (compress e f g h w j))
(T2::ulong (u32+ (Sigma0-256 a) (Maj a b c))))
(loop (u32+ T1 T2) a b c (u32+ d T1) e f g (+ j 1))))
((<fx j 64)
(let* ((s0 (sigma0-256 (xf-ref buffer (+ j 1))))
(s1 (sigma1-256 (xf-ref buffer (+ j 14))))
(ndx (bit-and j #lxF))
(w::ulong (u32+ (xf-ref buffer j) s1 (xf-ref buffer (+fx j 9)) s0))
(T1::ulong (compress e f g h w j))
(T2::ulong (u32+ (Sigma0-256 a) (Maj a b c))))
(u32vector-set! buffer ndx w)
(loop (u32+ T1 T2) a b c (u32+ d T1) e f g (+ j 1))))
(else
(set-state! state a b c d e f g h)
state))))
;*---------------------------------------------------------------------*/
;* sha256-update ... */
;*---------------------------------------------------------------------*/
(define (sha256-update state::u32vector buffer::u32vector
o::obj fill-word!::procedure)
(define (fill-buffer! buffer i)
fills 16 words of 4 bytes , returns the number of read bytes
(let loop ((j 0)
(i i)
(n 0))
(if (<fx j 16)
(loop (+fx j 1) (+fx i 4) (+ n (fill-word! buffer j o i)))
n)))
(let loop ((i 0)
(l 0))
(let ((bytes (fill-buffer! buffer i)))
(cond
((=fx bytes 64)
;; a full buffer
(sha256-internal-transform state buffer)
(loop (+fx i 64) (+fx l 64)))
((>=fx (-fx 64 bytes) 8)
;; we have room for the length of the message. The length is
a 64 bits integer but we are using here 32bits values
(let ((ulen::ulong (*fx 8 (+fx (-fx l 1) bytes))))
(u32vector-set! buffer 15 ulen))
(sha256-internal-transform state buffer))
(else
;; we don't have space for the length
(sha256-internal-transform state buffer)
(loop (+fx 64 bytes) (+fx l bytes)))))))
;*---------------------------------------------------------------------*/
;* sha256sum-mmap ... */
;*---------------------------------------------------------------------*/
(define (sha256sum-mmap mm)
(define (u32mmap-ref::ulong mm::mmap i::long)
(char->integer ($mmap-ref mm i)))
(define (fill-word32-mmap! v32::u32vector i::long mm::mmap n::long)
(let ((l (mmap-length mm)))
(cond
((<=fx (+fx n 4) l)
(let* ((v0::ulong (u32mmap-ref mm n))
(v1::ulong (u32mmap-ref mm (+fx n 1)))
(v2::ulong (u32mmap-ref mm (+fx n 2)))
(v3::ulong (u32mmap-ref mm (+fx n 3)))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
4))
((>=fx n (+fx 1 l))
(u32vector-set! v32 i 0)
0)
(else
(let ((v (make-u32vector 4 0))
(k (-fx 4 (-fx (+fx n 4) l))))
(let loop ((j 0))
(if (=fx j k)
(begin
(u32vector-set! v j #x80)
(let* ((v0::ulong (u32vector-ref v 0))
(v1::ulong (u32vector-ref v 1))
(v2::ulong (u32vector-ref v 2))
(v3::ulong (u32vector-ref v 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
(+fx j 1)))
(begin
(u32vector-set! v j (u32mmap-ref mm (+ n j)))
(loop (+fx j 1))))))))))
(let ((state (sha256-initial-hash-value))
(buffer (make-u32vector 16)))
(sha256-update state buffer mm fill-word32-mmap!)
(state->string state)))
;*---------------------------------------------------------------------*/
;* sha256sum-string ... */
;*---------------------------------------------------------------------*/
(define (sha256sum-string str)
(define (u32string-ref::ulong str i)
(char->integer (string-ref str i)))
(define (fill-word32-string! v32::u32vector i::long str::bstring n::long)
(let ((l (string-length str)))
(cond
((<=fx (+fx n 4) l)
(let* ((v0::ulong (u32string-ref str n))
(v1::ulong (u32string-ref str (+fx n 1)))
(v2::ulong (u32string-ref str (+fx n 2)))
(v3::ulong (u32string-ref str (+fx n 3)))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
4))
((>=fx n (+fx 1 l))
(u32vector-set! v32 i 0)
0)
(else
(let ((v (make-u32vector 4 0))
(k (-fx 4 (-fx (+fx n 4) l))))
(let loop ((j 0))
(if (=fx j k)
(begin
(u32vector-set! v j #x80)
(let* ((v0::ulong (u32vector-ref v 0))
(v1::ulong (u32vector-ref v 1))
(v2::ulong (u32vector-ref v 2))
(v3::ulong (u32vector-ref v 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
(+fx j 1)))
(begin
(u32vector-set! v j (u32string-ref str (+ n j)))
(loop (+fx j 1))))))))))
(let ((state (sha256-initial-hash-value))
(buffer (make-u32vector 16)))
(sha256-update state buffer str fill-word32-string!)
(state->string state)))
;*---------------------------------------------------------------------*/
;* sha256sum-port ... */
;*---------------------------------------------------------------------*/
(define (sha256sum-port p)
(define buf (make-u32vector 4))
(define len 0)
(define (read-word! p::input-port)
(let loop ((i 0))
(if (=fx i 4)
i
(let ((c (read-byte p)))
(if (eof-object? c)
(let liip ((j i))
(if (=fx j 4)
i
(begin
(u32vector-set! buf j 0)
(liip (+fx j 1)))))
(begin
(u32vector-set! buf i ($byte->ulong c))
(loop (+fx i 1))))))))
(define (fill-word32-port! v32::u32vector i::long p::input-port n::long)
(let ((l (read-word! p)))
(set! len (+fx len l))
(cond
((<=fx (+fx n 4) len)
(let* ((v0::ulong (u32vector-ref buf 0))
(v1::ulong (u32vector-ref buf 1))
(v2::ulong (u32vector-ref buf 2))
(v3::ulong (u32vector-ref buf 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
4))
((>=fx n (+fx 1 len))
(u32vector-set! v32 i 0)
0)
(else
(let ((v (make-u32vector 4 0))
(k (-fx 4 (-fx (+fx n 4) len))))
(let loop ((j 0))
(if (=fx j k)
(begin
(u32vector-set! v j #x80)
(let* ((v0::ulong (u32vector-ref v 0))
(v1::ulong (u32vector-ref v 1))
(v2::ulong (u32vector-ref v 2))
(v3::ulong (u32vector-ref v 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
(+fx j 1)))
(begin
(u32vector-set! v j (u32vector-ref buf j))
(loop (+fx j 1))))))))))
(let ((state (sha256-initial-hash-value))
(buffer (make-u32vector 16)))
(sha256-update state buffer p fill-word32-port!)
(state->string state)))
;*---------------------------------------------------------------------*/
;* sha256sum-file ... */
;*---------------------------------------------------------------------*/
(define (sha256sum-file fname)
(let ((mm (open-mmap fname :write #f)))
(if (mmap? mm)
(unwind-protect
(sha256sum-mmap mm)
(close-mmap mm))
(let ((p (open-input-file fname)))
(unwind-protect
(sha256sum-port p)
(close-input-port p))))))
;*---------------------------------------------------------------------*/
;* sha256sum ... */
;*---------------------------------------------------------------------*/
(define (sha256sum obj)
(cond
((mmap? obj)
(sha256sum-mmap obj))
((string? obj)
(sha256sum-string obj))
((input-port? obj)
(sha256sum-port obj))
(else
(error "sha256sum" "Illegal argument" obj))))
;*---------------------------------------------------------------------*/
;* hmac-sh256sum-string ... */
;*---------------------------------------------------------------------*/
(define (hmac-sha256sum-string key msg)
(hmac-string key msg sha256sum-string))
| null | https://raw.githubusercontent.com/donaldsonjw/bigloo/a4d06e409d0004e159ce92b9908719510a18aed5/runtime/Unsafe/sha2.scm | scheme | *=====================================================================*/
* ------------------------------------------------------------- */
* ------------------------------------------------------------- */
* SHA-256 Bigloo implementation */
*=====================================================================*/
This code has been inspired by a C implementation written by
FILE: sha2.c
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR OR CONTRIBUTOR(S) BE LIABLE
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS
OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION)
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY
OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
*---------------------------------------------------------------------*/
* The module */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32vector-set! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32vector-ref ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32 ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u16 ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32-hi ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32-low ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* addu32 ... */
* ------------------------------------------------------------- */
* unsigned binary addition */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32+ ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* rotr32 ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* u32-fill! ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* state->string ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* bit-or* ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* bit-xor* ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* K256 ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256-initial-hash-value ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* Ch ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* Maj ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sigma0-256 ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* get-a/b/c/d/e/f/g/h ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256-internal-transform ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256-update ... */
*---------------------------------------------------------------------*/
a full buffer
we have room for the length of the message. The length is
we don't have space for the length
*---------------------------------------------------------------------*/
* sha256sum-mmap ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256sum-string ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256sum-port ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256sum-file ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* sha256sum ... */
*---------------------------------------------------------------------*/
*---------------------------------------------------------------------*/
* hmac-sh256sum-string ... */
*---------------------------------------------------------------------*/ | * serrano / prgm / project / bigloo / runtime / Unsafe / sha2.scm * /
* Author : and * /
* Creation : Mon May 26 08:40:27 2008 * /
* Last change : Tue Jun 17 17:28:49 2014 ( serrano ) * /
* Copyright : 2008 - 14 , * /
and distributed under the following copyright :
AUTHOR : < >
Copyright ( c ) 2000 - 2001 ,
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of the copyright holder nor the names of contributors
THIS SOFTWARE IS PROVIDED BY THE AUTHOR AND CONTRIBUTOR(S ) ` ` AS IS '' AND
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
$ I d : sha2.c , v 1.1 2001/11/08 00:01:51 adg Exp adg $
(module __sha2
(use __type
__bigloo
__bexit
__object
__thread
__rgc
__bit
__bignum
__r4_numbers_6_5
__r4_numbers_6_5_fixnum
__r4_numbers_6_5_flonum
__r4_numbers_6_5_flonum_dtoa
__r4_booleans_6_1
__r4_symbols_6_4
__r4_vectors_6_8
__r4_control_features_6_9
__r4_pairs_and_lists_6_3
__r4_characters_6_6
__r4_equivalence_6_2
__r4_strings_6_7
__r4_ports_6_10_1
__r4_input_6_10_2
__r5_control_features_6_4
__mmap
__foreign
__error
__evenv
__os
__structure)
(import __param
__hmac
__tvector)
(from __srfi4)
(export (sha256sum::bstring ::obj)
(sha256sum-string::bstring ::bstring)
(sha256sum-mmap::bstring ::mmap)
(sha256sum-port::bstring ::input-port)
(sha256sum-file::bstring ::bstring)
(hmac-sha256sum-string::bstring ::bstring ::bstring)))
(define-macro (u32vector-set! vec i val)
`((@ u32vector-set! __srfi4) ,vec ,i ($ulong->uint32 ,val)))
(define-macro (u32vector-ref vec i)
`($uint32->ulong ((@ u32vector-ref __srfi4) ,vec ,i)))
(define-macro (u32 hi lo)
`(bit-or (bit-lsh ,hi 16) ,lo))
(define-macro (u16 hi lo)
`(bit-or (bit-lsh ,hi 8) ,lo))
(define-macro (u32-hi w)
`(bit-ursh ,w 16))
(define-macro (u32-lo w)
`(bit-and ,w (-fx (bit-lsh 1 16) 1)))
(define (addu32::ulong n1::ulong n2::ulong)
(let* ((h1::ulong (bit-ursh n1 16))
(h2::ulong (bit-ursh n2 16))
(h::ulong (bit-and #xffff (+fx h1 h2)))
(l1::ulong (bit-and n1 #xffff))
(l2::ulong (bit-and n2 #xffff))
(l::ulong (+fx l1 l2))
(lh::ulong (bit-lsh h 16)))
(+fx lh l)))
(define-expander u32+
(lambda (x e)
(match-case x
(()
0)
((?- ?a)
(e a e))
((?- ?a . ?b)
(e `(addu32 ,a (u32+ ,@b)) e)))))
(define (rotr32::ulong x::ulong n::int)
(bit-or (bit-ursh x n) (bit-lsh x (-fx 32 n))))
(define (u32-fill! str offset w::ulong)
(let* ((s1 (integer->string (u32-hi w) 16))
(l1 (string-length s1))
(s2 (integer->string (u32-lo w) 16))
(l2 (string-length s2)))
(blit-string! s1 0 str (+fx offset (-fx 4 l1)) l1)
(blit-string! s2 0 str (+fx offset (+fx 4 (-fx 4 l2))) l2)))
(define (state->string state::u32vector)
(let ((r (make-string 64 #\0)))
(u32-fill! r 0 (u32vector-ref state 0))
(u32-fill! r 8 (u32vector-ref state 1))
(u32-fill! r 16 (u32vector-ref state 2))
(u32-fill! r 24 (u32vector-ref state 3))
(u32-fill! r 32 (u32vector-ref state 4))
(u32-fill! r 40 (u32vector-ref state 5))
(u32-fill! r 48 (u32vector-ref state 6))
(u32-fill! r 56 (u32vector-ref state 7))
r))
(define-expander bit-or*
(lambda (x e)
(match-case x
((?- ?a ?b)
(e `(bit-or ,a ,b) e))
((?- ?a . ?b)
(e `(bit-or ,a (bit-or* ,@b)) e)))))
(define-expander bit-xor*
(lambda (x e)
(match-case x
((?- ?a ?b)
(e `(bit-xor ,a ,b) e))
((?- ?a . ?b)
(e `(bit-xor ,a (bit-xor* ,@b)) e)))))
(define K256
(let ((v (make-u32vector 64)))
(u32vector-set! v 0 (u32 #x428a #x2f98))
(u32vector-set! v 1 (u32 #x7137 #x4491))
(u32vector-set! v 2 (u32 #xb5c0 #xfbcf))
(u32vector-set! v 3 (u32 #xe9b5 #xdba5))
(u32vector-set! v 4 (u32 #x3956 #xc25b))
(u32vector-set! v 5 (u32 #x59f1 #x11f1))
(u32vector-set! v 6 (u32 #x923f #x82a4))
(u32vector-set! v 7 (u32 #xab1c #x5ed5))
(u32vector-set! v 8 (u32 #xd807 #xaa98))
(u32vector-set! v 9 (u32 #x1283 #x5b01))
(u32vector-set! v 10 (u32 #x2431 #x85be))
(u32vector-set! v 11 (u32 #x550c #x7dc3))
(u32vector-set! v 12 (u32 #x72be #x5d74))
(u32vector-set! v 13 (u32 #x80de #xb1fe))
(u32vector-set! v 14 (u32 #x9bdc #x06a7))
(u32vector-set! v 15 (u32 #xc19b #xf174))
(u32vector-set! v 16 (u32 #xe49b #x69c1))
(u32vector-set! v 17 (u32 #xefbe #x4786))
(u32vector-set! v 18 (u32 #x0fc1 #x9dc6))
(u32vector-set! v 19 (u32 #x240c #xa1cc))
(u32vector-set! v 20 (u32 #x2de9 #x2c6f))
(u32vector-set! v 21 (u32 #x4a74 #x84aa))
(u32vector-set! v 22 (u32 #x5cb0 #xa9dc))
(u32vector-set! v 23 (u32 #x76f9 #x88da))
(u32vector-set! v 24 (u32 #x983e #x5152))
(u32vector-set! v 25 (u32 #xa831 #xc66d))
(u32vector-set! v 26 (u32 #xb003 #x27c8))
(u32vector-set! v 27 (u32 #xbf59 #x7fc7))
(u32vector-set! v 28 (u32 #xc6e0 #x0bf3))
(u32vector-set! v 29 (u32 #xd5a7 #x9147))
(u32vector-set! v 30 (u32 #x06ca #x6351))
(u32vector-set! v 31 (u32 #x1429 #x2967))
(u32vector-set! v 32 (u32 #x27b7 #x0a85))
(u32vector-set! v 33 (u32 #x2e1b #x2138))
(u32vector-set! v 34 (u32 #x4d2c #x6dfc))
(u32vector-set! v 35 (u32 #x5338 #x0d13))
(u32vector-set! v 36 (u32 #x650a #x7354))
(u32vector-set! v 37 (u32 #x766a #x0abb))
(u32vector-set! v 38 (u32 #x81c2 #xc92e))
(u32vector-set! v 39 (u32 #x9272 #x2c85))
(u32vector-set! v 40 (u32 #xa2bf #xe8a1))
(u32vector-set! v 41 (u32 #xa81a #x664b))
(u32vector-set! v 42 (u32 #xc24b #x8b70))
(u32vector-set! v 43 (u32 #xc76c #x51a3))
(u32vector-set! v 44 (u32 #xd192 #xe819))
(u32vector-set! v 45 (u32 #xd699 #x0624))
(u32vector-set! v 46 (u32 #xf40e #x3585))
(u32vector-set! v 47 (u32 #x106a #xa070))
(u32vector-set! v 48 (u32 #x19a4 #xc116))
(u32vector-set! v 49 (u32 #x1e37 #x6c08))
(u32vector-set! v 50 (u32 #x2748 #x774c))
(u32vector-set! v 51 (u32 #x34b0 #xbcb5))
(u32vector-set! v 52 (u32 #x391c #x0cb3))
(u32vector-set! v 53 (u32 #x4ed8 #xaa4a))
(u32vector-set! v 54 (u32 #x5b9c #xca4f))
(u32vector-set! v 55 (u32 #x682e #x6ff3))
(u32vector-set! v 56 (u32 #x748f #x82ee))
(u32vector-set! v 57 (u32 #x78a5 #x636f))
(u32vector-set! v 58 (u32 #x84c8 #x7814))
(u32vector-set! v 59 (u32 #x8cc7 #x0208))
(u32vector-set! v 60 (u32 #x90be #xfffa))
(u32vector-set! v 61 (u32 #xa450 #x6ceb))
(u32vector-set! v 62 (u32 #xbef9 #xa3f7))
(u32vector-set! v 63 (u32 #xc671 #x78f2))
v))
(define (sha256-initial-hash-value)
(let ((v (make-u32vector 8)))
(u32vector-set! v 0 (u32 #x6a09 #xe667))
(u32vector-set! v 1 (u32 #xbb67 #xae85))
(u32vector-set! v 2 (u32 #x3c6e #xf372))
(u32vector-set! v 3 (u32 #xa54f #xf53a))
(u32vector-set! v 4 (u32 #x510e #x527f))
(u32vector-set! v 5 (u32 #x9b05 #x688c))
(u32vector-set! v 6 (u32 #x1f83 #xd9ab))
(u32vector-set! v 7 (u32 #x5be0 #xcd19))
v))
(define (Ch::ulong x::ulong y::ulong z::ulong)
(bit-xor (bit-and x y) (bit-and (bit-not x) z)))
(define (Maj::ulong x::ulong y::ulong z::ulong)
(bit-xor* (bit-and x y) (bit-and x z) (bit-and y z)))
* Sigma0 - 256 ... * /
(define (Sigma0-256::ulong x::ulong)
(bit-xor* (rotr32 x 2) (rotr32 x 13) (rotr32 x 22)))
(define (sigma0-256::ulong x::ulong)
(bit-xor* (rotr32 x 7) (rotr32 x 18) (bit-ursh x 3)))
* Sigma1 - 256 ... * /
(define (Sigma1-256::ulong x::ulong)
(bit-xor* (rotr32 x 6) (rotr32 x 11) (rotr32 x 25)))
* - 256 ... * /
(define (sigma1-256::ulong x::ulong)
(bit-xor* (rotr32 x 17) (rotr32 x 19) (bit-ursh x 10)))
(define (get-a::ulong st) (u32vector-ref st 0))
(define (get-b::ulong st) (u32vector-ref st 1))
(define (get-c::ulong st) (u32vector-ref st 2))
(define (get-d::ulong st) (u32vector-ref st 3))
(define (get-e::ulong st) (u32vector-ref st 4))
(define (get-f::ulong st) (u32vector-ref st 5))
(define (get-g::ulong st) (u32vector-ref st 6))
(define (get-h::ulong st) (u32vector-ref st 7))
(define (sha256-internal-transform state::u32vector buffer::u32vector)
(define (compress e f g h w::ulong j)
(u32+ h (Sigma1-256 e) (Ch e f g)
(u32vector-ref K256 j)
w))
(define (xf-ref::ulong v::u32vector n::long)
(u32vector-ref v (bit-and n #xf)))
(define (get-u32::ulong W n)
(bit-or* (bit-lsh (u32vector-ref W (+ n 0)) 24)
(bit-lsh (u32vector-ref W (+ n 1)) 16)
(bit-lsh (u32vector-ref W (+ n 2)) 8)
(u32vector-ref W (+ n 3))))
(define (set-state! state
a::ulong b::ulong c::ulong d::ulong
e::ulong f::ulong g::ulong h::ulong)
(let ((oa::ulong (get-a state))
(ob::ulong (get-b state))
(oc::ulong (get-c state))
(od::ulong (get-d state))
(oe::ulong (get-e state))
(of::ulong (get-f state))
(og::ulong (get-g state))
(oh::ulong (get-h state))
(oj::long 0))
(u32vector-set! state 0 (u32+ oa a))
(u32vector-set! state 1 (u32+ ob b))
(u32vector-set! state 2 (u32+ oc c))
(u32vector-set! state 3 (u32+ od d))
(u32vector-set! state 4 (u32+ oe e))
(u32vector-set! state 5 (u32+ of f))
(u32vector-set! state 6 (u32+ og g))
(u32vector-set! state 7 (u32+ oh h))))
(let loop ((a::ulong (get-a state))
(b::ulong (get-b state))
(c::ulong (get-c state))
(d::ulong (get-d state))
(e::ulong (get-e state))
(f::ulong (get-f state))
(g::ulong (get-g state))
(h::ulong (get-h state))
(j::long 0))
(cond
((<fx j 16)
(let* ((w::ulong (u32vector-ref buffer j))
(T1::ulong (compress e f g h w j))
(T2::ulong (u32+ (Sigma0-256 a) (Maj a b c))))
(loop (u32+ T1 T2) a b c (u32+ d T1) e f g (+ j 1))))
((<fx j 64)
(let* ((s0 (sigma0-256 (xf-ref buffer (+ j 1))))
(s1 (sigma1-256 (xf-ref buffer (+ j 14))))
(ndx (bit-and j #lxF))
(w::ulong (u32+ (xf-ref buffer j) s1 (xf-ref buffer (+fx j 9)) s0))
(T1::ulong (compress e f g h w j))
(T2::ulong (u32+ (Sigma0-256 a) (Maj a b c))))
(u32vector-set! buffer ndx w)
(loop (u32+ T1 T2) a b c (u32+ d T1) e f g (+ j 1))))
(else
(set-state! state a b c d e f g h)
state))))
(define (sha256-update state::u32vector buffer::u32vector
o::obj fill-word!::procedure)
(define (fill-buffer! buffer i)
fills 16 words of 4 bytes , returns the number of read bytes
(let loop ((j 0)
(i i)
(n 0))
(if (<fx j 16)
(loop (+fx j 1) (+fx i 4) (+ n (fill-word! buffer j o i)))
n)))
(let loop ((i 0)
(l 0))
(let ((bytes (fill-buffer! buffer i)))
(cond
((=fx bytes 64)
(sha256-internal-transform state buffer)
(loop (+fx i 64) (+fx l 64)))
((>=fx (-fx 64 bytes) 8)
a 64 bits integer but we are using here 32bits values
(let ((ulen::ulong (*fx 8 (+fx (-fx l 1) bytes))))
(u32vector-set! buffer 15 ulen))
(sha256-internal-transform state buffer))
(else
(sha256-internal-transform state buffer)
(loop (+fx 64 bytes) (+fx l bytes)))))))
(define (sha256sum-mmap mm)
(define (u32mmap-ref::ulong mm::mmap i::long)
(char->integer ($mmap-ref mm i)))
(define (fill-word32-mmap! v32::u32vector i::long mm::mmap n::long)
(let ((l (mmap-length mm)))
(cond
((<=fx (+fx n 4) l)
(let* ((v0::ulong (u32mmap-ref mm n))
(v1::ulong (u32mmap-ref mm (+fx n 1)))
(v2::ulong (u32mmap-ref mm (+fx n 2)))
(v3::ulong (u32mmap-ref mm (+fx n 3)))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
4))
((>=fx n (+fx 1 l))
(u32vector-set! v32 i 0)
0)
(else
(let ((v (make-u32vector 4 0))
(k (-fx 4 (-fx (+fx n 4) l))))
(let loop ((j 0))
(if (=fx j k)
(begin
(u32vector-set! v j #x80)
(let* ((v0::ulong (u32vector-ref v 0))
(v1::ulong (u32vector-ref v 1))
(v2::ulong (u32vector-ref v 2))
(v3::ulong (u32vector-ref v 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
(+fx j 1)))
(begin
(u32vector-set! v j (u32mmap-ref mm (+ n j)))
(loop (+fx j 1))))))))))
(let ((state (sha256-initial-hash-value))
(buffer (make-u32vector 16)))
(sha256-update state buffer mm fill-word32-mmap!)
(state->string state)))
(define (sha256sum-string str)
(define (u32string-ref::ulong str i)
(char->integer (string-ref str i)))
(define (fill-word32-string! v32::u32vector i::long str::bstring n::long)
(let ((l (string-length str)))
(cond
((<=fx (+fx n 4) l)
(let* ((v0::ulong (u32string-ref str n))
(v1::ulong (u32string-ref str (+fx n 1)))
(v2::ulong (u32string-ref str (+fx n 2)))
(v3::ulong (u32string-ref str (+fx n 3)))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
4))
((>=fx n (+fx 1 l))
(u32vector-set! v32 i 0)
0)
(else
(let ((v (make-u32vector 4 0))
(k (-fx 4 (-fx (+fx n 4) l))))
(let loop ((j 0))
(if (=fx j k)
(begin
(u32vector-set! v j #x80)
(let* ((v0::ulong (u32vector-ref v 0))
(v1::ulong (u32vector-ref v 1))
(v2::ulong (u32vector-ref v 2))
(v3::ulong (u32vector-ref v 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
(+fx j 1)))
(begin
(u32vector-set! v j (u32string-ref str (+ n j)))
(loop (+fx j 1))))))))))
(let ((state (sha256-initial-hash-value))
(buffer (make-u32vector 16)))
(sha256-update state buffer str fill-word32-string!)
(state->string state)))
(define (sha256sum-port p)
(define buf (make-u32vector 4))
(define len 0)
(define (read-word! p::input-port)
(let loop ((i 0))
(if (=fx i 4)
i
(let ((c (read-byte p)))
(if (eof-object? c)
(let liip ((j i))
(if (=fx j 4)
i
(begin
(u32vector-set! buf j 0)
(liip (+fx j 1)))))
(begin
(u32vector-set! buf i ($byte->ulong c))
(loop (+fx i 1))))))))
(define (fill-word32-port! v32::u32vector i::long p::input-port n::long)
(let ((l (read-word! p)))
(set! len (+fx len l))
(cond
((<=fx (+fx n 4) len)
(let* ((v0::ulong (u32vector-ref buf 0))
(v1::ulong (u32vector-ref buf 1))
(v2::ulong (u32vector-ref buf 2))
(v3::ulong (u32vector-ref buf 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
4))
((>=fx n (+fx 1 len))
(u32vector-set! v32 i 0)
0)
(else
(let ((v (make-u32vector 4 0))
(k (-fx 4 (-fx (+fx n 4) len))))
(let loop ((j 0))
(if (=fx j k)
(begin
(u32vector-set! v j #x80)
(let* ((v0::ulong (u32vector-ref v 0))
(v1::ulong (u32vector-ref v 1))
(v2::ulong (u32vector-ref v 2))
(v3::ulong (u32vector-ref v 3))
(v::ulong (u32 (u16 v0 v1) (u16 v2 v3))))
(u32vector-set! v32 i v)
(+fx j 1)))
(begin
(u32vector-set! v j (u32vector-ref buf j))
(loop (+fx j 1))))))))))
(let ((state (sha256-initial-hash-value))
(buffer (make-u32vector 16)))
(sha256-update state buffer p fill-word32-port!)
(state->string state)))
(define (sha256sum-file fname)
(let ((mm (open-mmap fname :write #f)))
(if (mmap? mm)
(unwind-protect
(sha256sum-mmap mm)
(close-mmap mm))
(let ((p (open-input-file fname)))
(unwind-protect
(sha256sum-port p)
(close-input-port p))))))
(define (sha256sum obj)
(cond
((mmap? obj)
(sha256sum-mmap obj))
((string? obj)
(sha256sum-string obj))
((input-port? obj)
(sha256sum-port obj))
(else
(error "sha256sum" "Illegal argument" obj))))
(define (hmac-sha256sum-string key msg)
(hmac-string key msg sha256sum-string))
|
ec5b0cb89ff1e7bf73a838c036f3cadb229172a2a4a7ab35258541071e9448a6 | clojure-interop/aws-api | AWSStorageGatewayClientBuilder.clj | (ns com.amazonaws.services.storagegateway.AWSStorageGatewayClientBuilder
"Fluent builder for AWSStorageGateway. Use of the builder is preferred
over using constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.storagegateway AWSStorageGatewayClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.storagegateway.AWSStorageGatewayClientBuilder`"
(^com.amazonaws.services.storagegateway.AWSStorageGatewayClientBuilder []
(AWSStorageGatewayClientBuilder/standard )))
(defn *default-client
"returns: Default client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.storagegateway.AWSStorageGateway`"
(^com.amazonaws.services.storagegateway.AWSStorageGateway []
(AWSStorageGatewayClientBuilder/defaultClient )))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.storagegateway/src/com/amazonaws/services/storagegateway/AWSStorageGatewayClientBuilder.clj | clojure | (ns com.amazonaws.services.storagegateway.AWSStorageGatewayClientBuilder
"Fluent builder for AWSStorageGateway. Use of the builder is preferred
over using constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.storagegateway AWSStorageGatewayClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.storagegateway.AWSStorageGatewayClientBuilder`"
(^com.amazonaws.services.storagegateway.AWSStorageGatewayClientBuilder []
(AWSStorageGatewayClientBuilder/standard )))
(defn *default-client
"returns: Default client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.storagegateway.AWSStorageGateway`"
(^com.amazonaws.services.storagegateway.AWSStorageGateway []
(AWSStorageGatewayClientBuilder/defaultClient )))
| |
f21d22fd372e233bcb4b8bbc8ea6921e5a4b729ec16ad7a493931e52e5fb3641 | pokepay/paras | compiler.lisp | (defpackage #:paras/compiler
(:use #:cl
#:paras/errors
#:paras/types)
(:shadowing-import-from #:paras/errors
#:end-of-file
#:undefined-function)
(:import-from #:paras/builtin)
(:import-from #:paras/user)
(:export #:compiled-form
#:compiled-form-bindings
#:compiled-form-body
#:compile-code
#:recompile-form))
(in-package #:paras/compiler)
(defstruct compiled-form
bindings
code
body)
(defun package-external-symbols (package)
(let ((symbols '()))
(do-external-symbols (s package symbols)
(push s symbols))))
(defun function-allowed-p (function-name)
(let ((package (symbol-package function-name))
(modules (list* "PARAS/SPECIAL" "PARAS/BUILTIN" paras/builtin:*modules*)))
(when (or (find (package-name package)
modules
:test #'string=)
(some (lambda (module-name)
(find module-name (package-nicknames package) :test 'equal))
modules))
(do-external-symbols (symbol package)
(when (eq symbol function-name)
(return-from function-allowed-p t))))))
(defun compile-code (code &optional (bindings '()))
(let ((*package* (find-package '#:paras-user)))
(check-type code paras-type)
(labels ((recur (code)
(typecase code
(cons
(let ((fn (first code)))
(unless (and (symbolp fn)
(handler-case (symbol-function fn)
(cl:undefined-function () nil))
(function-allowed-p fn))
;; The function is not allowed to be called.
(error 'undefined-function :name fn))
(if (macro-function fn)
(macroexpand code)
(macroexpand
(cons fn
(mapcar #'recur (rest code)))))))
(paras-variable-type
(handler-case (symbol-value code)
(cl:unbound-variable ()
(error 'undefined-variable :name code)))
code)
(paras-constant-type code)
(otherwise (error 'type-not-allowed :value code)))))
(make-compiled-form
:bindings bindings
:code code
:body
(progv
(mapcar #'car bindings)
(mapcar #'cdr bindings)
(recur code))))))
(defun recompile-form (form &optional (bindings '() bindings-specified-p))
(check-type form compiled-form)
(compile-code (compiled-form-code form)
(if bindings-specified-p
bindings
(compiled-form-bindings form))))
| null | https://raw.githubusercontent.com/pokepay/paras/4809fd49f279aaf06e6e4feb3edc0f2a87371cd4/compiler.lisp | lisp | The function is not allowed to be called. | (defpackage #:paras/compiler
(:use #:cl
#:paras/errors
#:paras/types)
(:shadowing-import-from #:paras/errors
#:end-of-file
#:undefined-function)
(:import-from #:paras/builtin)
(:import-from #:paras/user)
(:export #:compiled-form
#:compiled-form-bindings
#:compiled-form-body
#:compile-code
#:recompile-form))
(in-package #:paras/compiler)
(defstruct compiled-form
bindings
code
body)
(defun package-external-symbols (package)
(let ((symbols '()))
(do-external-symbols (s package symbols)
(push s symbols))))
(defun function-allowed-p (function-name)
(let ((package (symbol-package function-name))
(modules (list* "PARAS/SPECIAL" "PARAS/BUILTIN" paras/builtin:*modules*)))
(when (or (find (package-name package)
modules
:test #'string=)
(some (lambda (module-name)
(find module-name (package-nicknames package) :test 'equal))
modules))
(do-external-symbols (symbol package)
(when (eq symbol function-name)
(return-from function-allowed-p t))))))
(defun compile-code (code &optional (bindings '()))
(let ((*package* (find-package '#:paras-user)))
(check-type code paras-type)
(labels ((recur (code)
(typecase code
(cons
(let ((fn (first code)))
(unless (and (symbolp fn)
(handler-case (symbol-function fn)
(cl:undefined-function () nil))
(function-allowed-p fn))
(error 'undefined-function :name fn))
(if (macro-function fn)
(macroexpand code)
(macroexpand
(cons fn
(mapcar #'recur (rest code)))))))
(paras-variable-type
(handler-case (symbol-value code)
(cl:unbound-variable ()
(error 'undefined-variable :name code)))
code)
(paras-constant-type code)
(otherwise (error 'type-not-allowed :value code)))))
(make-compiled-form
:bindings bindings
:code code
:body
(progv
(mapcar #'car bindings)
(mapcar #'cdr bindings)
(recur code))))))
(defun recompile-form (form &optional (bindings '() bindings-specified-p))
(check-type form compiled-form)
(compile-code (compiled-form-code form)
(if bindings-specified-p
bindings
(compiled-form-bindings form))))
|
2ff3cb59948ca95aafdd112a71ec25968a780beca5161f9086cd443cd5776458 | Daniel-Diaz/processing | random.hs |
{-# LANGUAGE OverloadedStrings #-}
import Graphics.Web.Processing.Mid
import Graphics.Web.Processing.Html
import Control.Applicative ((<$>))
main :: IO ()
main = writeHtml "processing.js" "random.pde" "Random demo" "random.html" randomDemo
randomDemo :: ProcScript
randomDemo = execScriptM $ do
vi <- newVar 0
vj <- newVar 0
rv <- newVar 0
gv <- newVar 0
bv <- newVar 0
on Setup $ do
size screenWidth screenHeight
background $ Color 255 255 255 255
setFrameRate 30
on Draw $ do
strokeWeight $ div screenWidth 40
-- Random color
random rv 0 50
r <- pround <$> readVar rv
random gv 0 255
g <- pround <$> readVar gv
random bv 0 255
b <- pround <$> readVar bv
stroke $ Color r g b 100
-- Top position
i <- readVar vi
-- Bottom position
random vj 0 (intToFloat screenWidth)
j <- readVar vj
-- Line
line (intToFloat i,0) (j,intToFloat screenHeight)
-- Update top position
ifM (i #>= screenWidth)
(writeVar vi 0)
(writeVar vi $ i + div screenWidth 250)
| null | https://raw.githubusercontent.com/Daniel-Diaz/processing/f7e6661e533abf7a8d47abd273b0a8ab61eb47ad/examples/random.hs | haskell | # LANGUAGE OverloadedStrings #
Random color
Top position
Bottom position
Line
Update top position |
import Graphics.Web.Processing.Mid
import Graphics.Web.Processing.Html
import Control.Applicative ((<$>))
main :: IO ()
main = writeHtml "processing.js" "random.pde" "Random demo" "random.html" randomDemo
randomDemo :: ProcScript
randomDemo = execScriptM $ do
vi <- newVar 0
vj <- newVar 0
rv <- newVar 0
gv <- newVar 0
bv <- newVar 0
on Setup $ do
size screenWidth screenHeight
background $ Color 255 255 255 255
setFrameRate 30
on Draw $ do
strokeWeight $ div screenWidth 40
random rv 0 50
r <- pround <$> readVar rv
random gv 0 255
g <- pround <$> readVar gv
random bv 0 255
b <- pround <$> readVar bv
stroke $ Color r g b 100
i <- readVar vi
random vj 0 (intToFloat screenWidth)
j <- readVar vj
line (intToFloat i,0) (j,intToFloat screenHeight)
ifM (i #>= screenWidth)
(writeVar vi 0)
(writeVar vi $ i + div screenWidth 250)
|
ad367006b07fd59784806329aa10c345897bb6fe8020e6780dccb27b26a8240a | rd--/hsc3 | Enum.hs | -- | Data types for enumerated and non signal unit generator inputs.
module Sound.Sc3.Ugen.Enum where
import Sound.Sc3.Common.Envelope {- hsc3 -}
import Sound.Sc3.Common.Enum {- hsc3 -}
import Sound.Sc3.Ugen.Ugen {- hsc3 -}
-- | Type specialised envelope curve.
type EnvCurve = Envelope_Curve Ugen
| Lift to Ugen .
from_buffer :: Buffer Ugen -> Ugen
from_buffer b =
case b of
Buffer_Id i -> constant i
Buffer u -> u
| null | https://raw.githubusercontent.com/rd--/hsc3/024d45b6b5166e5cd3f0142fbf65aeb6ef642d46/Sound/Sc3/Ugen/Enum.hs | haskell | | Data types for enumerated and non signal unit generator inputs.
hsc3
hsc3
hsc3
| Type specialised envelope curve. | module Sound.Sc3.Ugen.Enum where
type EnvCurve = Envelope_Curve Ugen
| Lift to Ugen .
from_buffer :: Buffer Ugen -> Ugen
from_buffer b =
case b of
Buffer_Id i -> constant i
Buffer u -> u
|
7819ca9e05783085b3d3105d315cc6d71bc442ad9bd37c568f25546829e7f90e | carl-eastlund/dracula | syntax-checks.rkt | #lang racket/base
(provide (all-defined-out))
;; Constants must be surrounded with asterisks.
(define (legal-constant-name? x)
(and (identifier? x)
(regexp-match-exact?
(regexp "[*].+[*]")
(symbol->string (syntax-e x)))))
;; is stx an identifier whose name starts with a colon?
(define (keyword-syntax? stx)
(and (identifier? stx)
(let ([str (symbol->string (syntax-e stx))])
(eq? (string-ref str 0) #\:))))
| null | https://raw.githubusercontent.com/carl-eastlund/dracula/a937f4b40463779246e3544e4021c53744a33847/lang/syntax-checks.rkt | racket | Constants must be surrounded with asterisks.
is stx an identifier whose name starts with a colon? | #lang racket/base
(provide (all-defined-out))
(define (legal-constant-name? x)
(and (identifier? x)
(regexp-match-exact?
(regexp "[*].+[*]")
(symbol->string (syntax-e x)))))
(define (keyword-syntax? stx)
(and (identifier? stx)
(let ([str (symbol->string (syntax-e stx))])
(eq? (string-ref str 0) #\:))))
|
f7c482d0f8f0a1e51494c4e3afa1434a81c6b896f8fa47cafcb013423e49c4a2 | rbonichon/smtpp | obfuscator.ml | (*********************************************************************************)
Copyright ( c ) 2015 , INRIA , Universite de Nancy 2 and Universidade Federal
do Rio Grande do Norte .
(* *)
(* Permission to use, copy, modify, and distribute this software for any *)
(* purpose with or without fee is hereby granted, provided that the above *)
(* copyright notice and this permission notice appear in all copies. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
(* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF *)
(* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR *)
(* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES *)
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
(* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF *)
(* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. *)
(*********************************************************************************)
open Ast ;;
open Extended_ast ;;
open Theory ;;
open Format ;;
module HashedSymb = struct
type t = Ast.symbol ;;
let equal sy1 sy2 =
match sy1.symbol_desc, sy2.symbol_desc with
| SimpleSymbol s1, SimpleSymbol s2
| QuotedSymbol s1, QuotedSymbol s2 -> String.compare s1 s2 = 0
| _, _ -> false
;;
let hash sy = Hashtbl.hash sy.symbol_desc ;;
end
;;
module SymbHash = struct
include Hashtbl.Make(HashedSymb)
let h = create 97 ;;
let n = ref (-1) ;;
let base = ref "S" ;;
let init ?keep:(keep = []) () =
List.iter (fun s -> let sy = Ast_utils.mk_symbol s in add h sy sy) keep;
;;
let get_symbol symb =
try
let sy = find h symb in
sy
with
| Not_found ->
begin
incr n;
let newsymb =
{ symb with symbol_desc = SimpleSymbol (!base ^ (string_of_int !n)) }
in
add h symb newsymb;
newsymb
end
;;
let get_htable () = h ;;
end
;;
let obfuscate_index = function
| IdxNum n -> IdxNum n
| IdxSymbol symb -> IdxSymbol (SymbHash.get_symbol symb)
;;
let obfuscate_indexes = List.map obfuscate_index ;;
let obfuscate_id id =
let id_desc =
match id.id_desc with
| IdSymbol symb -> IdSymbol (SymbHash.get_symbol symb)
| IdUnderscore (symb, indexes) ->
IdUnderscore (SymbHash.get_symbol symb, obfuscate_indexes indexes)
in { id with id_desc }
;;
let obfuscate_qid qid =
let qual_identifier_desc =
match qid.qual_identifier_desc with
| QualIdentifierIdentifier id -> QualIdentifierIdentifier (obfuscate_id id)
| QualIdentifierAs (id, sort) -> QualIdentifierAs (obfuscate_id id, sort)
in { qid with qual_identifier_desc }
;;
let rec obfuscate_sexpr sexpr =
let sexpr_desc =
match sexpr.sexpr_desc with
| SexprConstant _ | SexprKeyword _ as sdesc -> sdesc
| SexprSymbol symb -> SexprSymbol (SymbHash.get_symbol symb)
| SexprParens sexprs -> SexprParens (obfuscate_sexprs sexprs)
in { sexpr with sexpr_desc }
and obfuscate_sexprs terms = List.map obfuscate_sexpr terms ;;
let obfuscate_attr_value avalue =
let attr_value_desc =
match avalue.attr_value_desc with
| AttrValSpecConstant c -> AttrValSpecConstant c
| AttrValSymbol symb -> AttrValSymbol (SymbHash.get_symbol symb)
| AttrValSexpr sexprs -> AttrValSexpr (obfuscate_sexprs sexprs)
in { avalue with attr_value_desc }
;;
let obfuscate_attribute attr =
let attribute_desc =
match attr.attribute_desc with
| AttrKeyword kwd -> AttrKeyword kwd
| AttrKeywordValue (kwd, attr_value) ->
AttrKeywordValue (kwd, obfuscate_attr_value attr_value)
in { attr with attribute_desc }
;;
let obfuscate_attributes = List.map obfuscate_attribute ;;
let obfuscate_sorted_var svar =
let sorted_var_desc =
match svar.sorted_var_desc with
| SortedVar (symb, sort) -> SortedVar (SymbHash.get_symbol symb, sort)
in { svar with sorted_var_desc }
;;
let obfuscate_sorted_vars = List.map obfuscate_sorted_var ;;
let rec obfuscate_term term =
let term_desc =
match term.term_desc with
| TermSpecConstant _ -> term.term_desc
| TermQualIdentifier qid -> TermQualIdentifier (obfuscate_qid qid)
| TermQualIdentifierTerms (qid, terms) ->
TermQualIdentifierTerms (obfuscate_qid qid, obfuscate_terms terms)
| TermLetTerm (vbindings, term) ->
TermLetTerm (obfuscate_vbindings vbindings, obfuscate_term term)
| TermForallTerm (sortedvars, term) ->
TermForallTerm (obfuscate_sorted_vars sortedvars, obfuscate_term term)
| TermExistsTerm (sortedvars, term) ->
TermExistsTerm (obfuscate_sorted_vars sortedvars, obfuscate_term term)
| TermAnnotatedTerm (term, attrs) ->
TermAnnotatedTerm (obfuscate_term term, obfuscate_attributes attrs)
in { term with term_desc }
and obfuscate_terms terms = List.map obfuscate_term terms
and obfuscate_vbinding vbinding =
let var_binding_desc =
match vbinding.var_binding_desc with
| VarBinding (symb, term) ->
VarBinding (SymbHash.get_symbol symb, obfuscate_term term)
in { vbinding with var_binding_desc }
and obfuscate_vbindings vbindings = List.map obfuscate_vbinding vbindings
;;
let obfuscate_fun_def fdef =
let fun_def_desc =
match fdef.fun_def_desc with
| FunDef (symb, par, vars, sort, t) ->
let s = SymbHash.get_symbol symb in
FunDef (s, par, obfuscate_sorted_vars vars, sort, obfuscate_term t)
in { fdef with fun_def_desc }
;;
let obfuscate_fun_rec_def frecdec =
let fun_rec_def_desc =
match frecdec.fun_rec_def_desc with
| FunRecDef (symb, par, vars, sort, t) ->
let s = SymbHash.get_symbol symb in
FunRecDef (s, par, obfuscate_sorted_vars vars, sort, obfuscate_term t)
in { frecdec with fun_rec_def_desc }
;;
let obfuscate_opt opt =
let smt_option_desc =
match opt.smt_option_desc with
| OptionAttribute attr -> OptionAttribute (obfuscate_attribute attr)
in { opt with smt_option_desc }
;;
let obfuscate_command cmd =
let command_desc =
match cmd.command_desc with
| CmdSetLogic _
| CmdReset
| CmdResetAssertions
| CmdCheckSat
| CmdEcho _
| CmdExit
| CmdGetAssertions
| CmdGetModel
| CmdGetAssignment
| CmdGetProof
| CmdGetUnsatCore
| CmdGetUnsatAssumptions
| CmdPop _
| CmdPush _
| CmdGetOption _
| CmdGetInfo _ as c -> c
| CmdAssert term ->
CmdAssert (obfuscate_term term)
| CmdDeclareConst (symb, sort) ->
let s = SymbHash.get_symbol symb in
CmdDeclareConst (s, sort)
| CmdDeclareFun (symb, par, dom, codom) ->
let s = SymbHash.get_symbol symb in
CmdDeclareFun (s, par, dom, codom)
| CmdDefineFun fdef ->
CmdDefineFun(obfuscate_fun_def fdef)
| CmdDefineFunRec frecdeflist ->
CmdDefineFunRec (List.map obfuscate_fun_rec_def frecdeflist)
| CmdCheckSatAssuming symbs ->
CmdCheckSatAssuming (List.map SymbHash.get_symbol symbs)
| CmdDeclareSort (symb, num) ->
(* Should we obfuscate declared sort symbols ? *)
CmdDeclareSort (symb, num)
| CmdDefineSort (symb, symbs, sort) ->
(* Should we obfuscate declared sort symbols ? *)
CmdDefineSort(symb, symbs, sort)
| CmdGetValue terms ->
CmdGetValue (obfuscate_terms terms)
| CmdMetaInfo attr ->
CmdMetaInfo (obfuscate_attribute attr)
| CmdSetInfo attr ->
CmdSetInfo (obfuscate_attribute attr)
| CmdSetOption opt -> CmdSetOption (obfuscate_opt opt)
in { cmd with command_desc }
;;
let obfuscate_commands cmds = List.map obfuscate_command cmds ;;
let apply (script : Extended_ast.ext_script) =
let theory_keeps = List.map fst script.ext_script_theory.theory_symbols in
let keep = (Config.get_keep_symbols ()) @ theory_keeps in
Init hash table with symbols that should be kept
let ext_script_commands = obfuscate_commands script.ext_script_commands in
let obfuscated_script = { script with ext_script_commands } in
printf "%a" Pp.pp_extended obfuscated_script;
if Config.get_debug () then
printf "@[<v 0>%a@ %a@]"
Utils.mk_header "Symbol table"
(fun fmt h ->
SymbHash.iter
(fun k v ->
Format.fprintf fmt "%a -> %a@ " Pp.pp_symbol k Pp.pp_symbol v)
h) (SymbHash.get_htable ());
;;
| null | https://raw.githubusercontent.com/rbonichon/smtpp/57eb74bccbb0f30293ee058ded4b01baa1067756/src/obfuscator.ml | ocaml | *******************************************************************************
Permission to use, copy, modify, and distribute this software for any
purpose with or without fee is hereby granted, provided that the above
copyright notice and this permission notice appear in all copies.
WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*******************************************************************************
Should we obfuscate declared sort symbols ?
Should we obfuscate declared sort symbols ? | Copyright ( c ) 2015 , INRIA , Universite de Nancy 2 and Universidade Federal
do Rio Grande do Norte .
THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
open Ast ;;
open Extended_ast ;;
open Theory ;;
open Format ;;
module HashedSymb = struct
type t = Ast.symbol ;;
let equal sy1 sy2 =
match sy1.symbol_desc, sy2.symbol_desc with
| SimpleSymbol s1, SimpleSymbol s2
| QuotedSymbol s1, QuotedSymbol s2 -> String.compare s1 s2 = 0
| _, _ -> false
;;
let hash sy = Hashtbl.hash sy.symbol_desc ;;
end
;;
module SymbHash = struct
include Hashtbl.Make(HashedSymb)
let h = create 97 ;;
let n = ref (-1) ;;
let base = ref "S" ;;
let init ?keep:(keep = []) () =
List.iter (fun s -> let sy = Ast_utils.mk_symbol s in add h sy sy) keep;
;;
let get_symbol symb =
try
let sy = find h symb in
sy
with
| Not_found ->
begin
incr n;
let newsymb =
{ symb with symbol_desc = SimpleSymbol (!base ^ (string_of_int !n)) }
in
add h symb newsymb;
newsymb
end
;;
let get_htable () = h ;;
end
;;
let obfuscate_index = function
| IdxNum n -> IdxNum n
| IdxSymbol symb -> IdxSymbol (SymbHash.get_symbol symb)
;;
let obfuscate_indexes = List.map obfuscate_index ;;
let obfuscate_id id =
let id_desc =
match id.id_desc with
| IdSymbol symb -> IdSymbol (SymbHash.get_symbol symb)
| IdUnderscore (symb, indexes) ->
IdUnderscore (SymbHash.get_symbol symb, obfuscate_indexes indexes)
in { id with id_desc }
;;
let obfuscate_qid qid =
let qual_identifier_desc =
match qid.qual_identifier_desc with
| QualIdentifierIdentifier id -> QualIdentifierIdentifier (obfuscate_id id)
| QualIdentifierAs (id, sort) -> QualIdentifierAs (obfuscate_id id, sort)
in { qid with qual_identifier_desc }
;;
let rec obfuscate_sexpr sexpr =
let sexpr_desc =
match sexpr.sexpr_desc with
| SexprConstant _ | SexprKeyword _ as sdesc -> sdesc
| SexprSymbol symb -> SexprSymbol (SymbHash.get_symbol symb)
| SexprParens sexprs -> SexprParens (obfuscate_sexprs sexprs)
in { sexpr with sexpr_desc }
and obfuscate_sexprs terms = List.map obfuscate_sexpr terms ;;
let obfuscate_attr_value avalue =
let attr_value_desc =
match avalue.attr_value_desc with
| AttrValSpecConstant c -> AttrValSpecConstant c
| AttrValSymbol symb -> AttrValSymbol (SymbHash.get_symbol symb)
| AttrValSexpr sexprs -> AttrValSexpr (obfuscate_sexprs sexprs)
in { avalue with attr_value_desc }
;;
let obfuscate_attribute attr =
let attribute_desc =
match attr.attribute_desc with
| AttrKeyword kwd -> AttrKeyword kwd
| AttrKeywordValue (kwd, attr_value) ->
AttrKeywordValue (kwd, obfuscate_attr_value attr_value)
in { attr with attribute_desc }
;;
let obfuscate_attributes = List.map obfuscate_attribute ;;
let obfuscate_sorted_var svar =
let sorted_var_desc =
match svar.sorted_var_desc with
| SortedVar (symb, sort) -> SortedVar (SymbHash.get_symbol symb, sort)
in { svar with sorted_var_desc }
;;
let obfuscate_sorted_vars = List.map obfuscate_sorted_var ;;
let rec obfuscate_term term =
let term_desc =
match term.term_desc with
| TermSpecConstant _ -> term.term_desc
| TermQualIdentifier qid -> TermQualIdentifier (obfuscate_qid qid)
| TermQualIdentifierTerms (qid, terms) ->
TermQualIdentifierTerms (obfuscate_qid qid, obfuscate_terms terms)
| TermLetTerm (vbindings, term) ->
TermLetTerm (obfuscate_vbindings vbindings, obfuscate_term term)
| TermForallTerm (sortedvars, term) ->
TermForallTerm (obfuscate_sorted_vars sortedvars, obfuscate_term term)
| TermExistsTerm (sortedvars, term) ->
TermExistsTerm (obfuscate_sorted_vars sortedvars, obfuscate_term term)
| TermAnnotatedTerm (term, attrs) ->
TermAnnotatedTerm (obfuscate_term term, obfuscate_attributes attrs)
in { term with term_desc }
and obfuscate_terms terms = List.map obfuscate_term terms
and obfuscate_vbinding vbinding =
let var_binding_desc =
match vbinding.var_binding_desc with
| VarBinding (symb, term) ->
VarBinding (SymbHash.get_symbol symb, obfuscate_term term)
in { vbinding with var_binding_desc }
and obfuscate_vbindings vbindings = List.map obfuscate_vbinding vbindings
;;
let obfuscate_fun_def fdef =
let fun_def_desc =
match fdef.fun_def_desc with
| FunDef (symb, par, vars, sort, t) ->
let s = SymbHash.get_symbol symb in
FunDef (s, par, obfuscate_sorted_vars vars, sort, obfuscate_term t)
in { fdef with fun_def_desc }
;;
let obfuscate_fun_rec_def frecdec =
let fun_rec_def_desc =
match frecdec.fun_rec_def_desc with
| FunRecDef (symb, par, vars, sort, t) ->
let s = SymbHash.get_symbol symb in
FunRecDef (s, par, obfuscate_sorted_vars vars, sort, obfuscate_term t)
in { frecdec with fun_rec_def_desc }
;;
let obfuscate_opt opt =
let smt_option_desc =
match opt.smt_option_desc with
| OptionAttribute attr -> OptionAttribute (obfuscate_attribute attr)
in { opt with smt_option_desc }
;;
let obfuscate_command cmd =
let command_desc =
match cmd.command_desc with
| CmdSetLogic _
| CmdReset
| CmdResetAssertions
| CmdCheckSat
| CmdEcho _
| CmdExit
| CmdGetAssertions
| CmdGetModel
| CmdGetAssignment
| CmdGetProof
| CmdGetUnsatCore
| CmdGetUnsatAssumptions
| CmdPop _
| CmdPush _
| CmdGetOption _
| CmdGetInfo _ as c -> c
| CmdAssert term ->
CmdAssert (obfuscate_term term)
| CmdDeclareConst (symb, sort) ->
let s = SymbHash.get_symbol symb in
CmdDeclareConst (s, sort)
| CmdDeclareFun (symb, par, dom, codom) ->
let s = SymbHash.get_symbol symb in
CmdDeclareFun (s, par, dom, codom)
| CmdDefineFun fdef ->
CmdDefineFun(obfuscate_fun_def fdef)
| CmdDefineFunRec frecdeflist ->
CmdDefineFunRec (List.map obfuscate_fun_rec_def frecdeflist)
| CmdCheckSatAssuming symbs ->
CmdCheckSatAssuming (List.map SymbHash.get_symbol symbs)
| CmdDeclareSort (symb, num) ->
CmdDeclareSort (symb, num)
| CmdDefineSort (symb, symbs, sort) ->
CmdDefineSort(symb, symbs, sort)
| CmdGetValue terms ->
CmdGetValue (obfuscate_terms terms)
| CmdMetaInfo attr ->
CmdMetaInfo (obfuscate_attribute attr)
| CmdSetInfo attr ->
CmdSetInfo (obfuscate_attribute attr)
| CmdSetOption opt -> CmdSetOption (obfuscate_opt opt)
in { cmd with command_desc }
;;
let obfuscate_commands cmds = List.map obfuscate_command cmds ;;
let apply (script : Extended_ast.ext_script) =
let theory_keeps = List.map fst script.ext_script_theory.theory_symbols in
let keep = (Config.get_keep_symbols ()) @ theory_keeps in
Init hash table with symbols that should be kept
let ext_script_commands = obfuscate_commands script.ext_script_commands in
let obfuscated_script = { script with ext_script_commands } in
printf "%a" Pp.pp_extended obfuscated_script;
if Config.get_debug () then
printf "@[<v 0>%a@ %a@]"
Utils.mk_header "Symbol table"
(fun fmt h ->
SymbHash.iter
(fun k v ->
Format.fprintf fmt "%a -> %a@ " Pp.pp_symbol k Pp.pp_symbol v)
h) (SymbHash.get_htable ());
;;
|
0b055bec8386ecc584b067ad133030353c49fedae0601ba28cf0ff2db055cd74 | remyzorg/pendulum | test_typing.ml | [@@@warning "-27"]
[@@@warning "-32"]
[@@@warning "-33"]
open Pendulum
open Program
open Signal
let p =
let%sync react_obj a =
loop begin
!(Format.printf "%d\n" !!a)
; pause
end in react_obj#create 0
let () = p#a 10; ignore @@ p#react
(* 'a -> < react : Pendulum.Program.state; x : 'a -> unit > *)
let%sync p2 = loop pause
let _ : < react : unit; status : state> = p2#create
let _ : < react : unit; status : state> = p2#create_run
let%sync p2' s = loop begin
run p2
; pause
end
let _ : 'a -> < react : unit; status : state; s : 'a -> unit > = p2'#create
let _ : ('a, 'b) signal -> < react : unit; status : state; s : 'a -> unit > = p2'#create_run
let%sync p3 s = loop begin run p2' !("test" ^ !!s); pause end
let _ : string -> < react : unit; status : state; s : string -> unit > = p2'#create
let _ : (string, string) signal -> < react : unit; status : state; s : string -> unit > = p2'#create_run
let%sync p_out = input s; output o;
loop begin
emit o
; pause
end
let _ : 'a -> unit * ('b -> unit) -> < react : unit; status : state; s : 'a -> unit > = p_out#create
let _ :
('a, 'c) signal ->
(unit, unit) signal * (unit -> unit) ->
< react : unit; status : state; s : 'a -> unit >
= p_out#create_run
let%sync mouse =
input i;
output write (^);
loop begin
present i
(emit write "")
; pause
end
| null | https://raw.githubusercontent.com/remyzorg/pendulum/a532681c6f99d77129e31fbe27cc56c396a7c63c/tests/ppx/test_typing.ml | ocaml | 'a -> < react : Pendulum.Program.state; x : 'a -> unit > | [@@@warning "-27"]
[@@@warning "-32"]
[@@@warning "-33"]
open Pendulum
open Program
open Signal
let p =
let%sync react_obj a =
loop begin
!(Format.printf "%d\n" !!a)
; pause
end in react_obj#create 0
let () = p#a 10; ignore @@ p#react
let%sync p2 = loop pause
let _ : < react : unit; status : state> = p2#create
let _ : < react : unit; status : state> = p2#create_run
let%sync p2' s = loop begin
run p2
; pause
end
let _ : 'a -> < react : unit; status : state; s : 'a -> unit > = p2'#create
let _ : ('a, 'b) signal -> < react : unit; status : state; s : 'a -> unit > = p2'#create_run
let%sync p3 s = loop begin run p2' !("test" ^ !!s); pause end
let _ : string -> < react : unit; status : state; s : string -> unit > = p2'#create
let _ : (string, string) signal -> < react : unit; status : state; s : string -> unit > = p2'#create_run
let%sync p_out = input s; output o;
loop begin
emit o
; pause
end
let _ : 'a -> unit * ('b -> unit) -> < react : unit; status : state; s : 'a -> unit > = p_out#create
let _ :
('a, 'c) signal ->
(unit, unit) signal * (unit -> unit) ->
< react : unit; status : state; s : 'a -> unit >
= p_out#create_run
let%sync mouse =
input i;
output write (^);
loop begin
present i
(emit write "")
; pause
end
|
531aec5847ed63298b3d91a84a580e949e5402c6f3cb27d0f0b5e36025feb0e1 | dgiot/dgiot | emqx_connection.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2018 - 2021 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
%% MQTT/TCP|TLS Connection
-module(emqx_connection).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
-include("logger.hrl").
-include("types.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-logger_header("[MQTT]").
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
-elvis([{elvis_style, invalid_dynamic_call, #{ignore => [emqx_connection]}}]).
%% API
-export([ start_link/3
, stop/1
]).
-export([ info/1
, stats/1
]).
-export([ async_set_keepalive/3
, async_set_keepalive/4
, async_set_socket_options/2
]).
-export([ call/2
, call/3
, cast/2
]).
%% Callback
-export([init/4]).
Sys callbacks
-export([ system_continue/3
, system_terminate/4
, system_code_change/4
, system_get_state/1
]).
%% Internal callback
-export([wakeup_from_hib/2, recvloop/2, get_state/1]).
%% Export for CT
-export([set_field/3]).
-import(emqx_misc,
[ maybe_apply/2
, start_timer/2
]).
-record(state, {
TCP / TLS Transport
transport :: esockd:transport(),
%% TCP/TLS Socket
socket :: esockd:socket(),
%% Peername of the connection
peername :: emqx_types:peername(),
of the connection
sockname :: emqx_types:peername(),
%% Sock State
sockstate :: emqx_types:sockstate(),
%% The {active, N} option
active_n :: pos_integer(),
Limiter
limiter :: maybe(emqx_limiter:limiter()),
%% Limit Timer
limit_timer :: maybe(reference()),
Parse State
parse_state :: emqx_frame:parse_state(),
Serialize options
serialize :: emqx_frame:serialize_opts(),
Channel State
channel :: emqx_channel:channel(),
GC State
gc_state :: maybe(emqx_gc:gc_state()),
%% Stats Timer
stats_timer :: disabled | maybe(reference()),
%% Idle Timeout
idle_timeout :: integer(),
%% Idle Timer
idle_timer :: maybe(reference())
}).
-type(state() :: #state{}).
-define(ACTIVE_N, 100).
-define(INFO_KEYS, [socktype, peername, sockname, sockstate, active_n]).
-define(CONN_STATS, [recv_pkt, recv_msg, send_pkt, send_msg]).
-define(SOCK_STATS, [recv_oct, recv_cnt, send_oct, send_cnt, send_pend]).
-define(ENABLED(X), (X =/= undefined)).
-define(ALARM_TCP_CONGEST(Channel),
list_to_binary(io_lib:format("mqtt_conn/congested/~s/~s",
[emqx_channel:info(clientid, Channel),
emqx_channel:info(username, Channel)]))).
-define(ALARM_CONN_INFO_KEYS, [
socktype, sockname, peername,
clientid, username, proto_name, proto_ver, connected_at
]).
-define(ALARM_SOCK_STATS_KEYS, [send_pend, recv_cnt, recv_oct, send_cnt, send_oct]).
-define(ALARM_SOCK_OPTS_KEYS, [high_watermark, high_msgq_watermark, sndbuf, recbuf, buffer]).
-dialyzer({no_match, [info/2]}).
-dialyzer({nowarn_function, [ init/4
, init_state/3
, run_loop/2
, system_terminate/4
, system_code_change/4
]}).
-spec(start_link(esockd:transport(), esockd:socket(), proplists:proplist())
-> {ok, pid()}).
start_link(Transport, Socket, Options) ->
Args = [self(), Transport, Socket, Options],
CPid = proc_lib:spawn_link(?MODULE, init, Args),
{ok, CPid}.
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
%% @doc Get infos of the connection/channel.
-spec(info(pid()|state()) -> emqx_types:infos()).
info(CPid) when is_pid(CPid) ->
call(CPid, info);
info(State = #state{channel = Channel}) ->
ChanInfo = emqx_channel:info(Channel),
SockInfo = maps:from_list(
info(?INFO_KEYS, State)),
ChanInfo#{sockinfo => SockInfo}.
info(Keys, State) when is_list(Keys) ->
[{Key, info(Key, State)} || Key <- Keys];
info(socktype, #state{transport = Transport, socket = Socket}) ->
Transport:type(Socket);
info(peername, #state{peername = Peername}) ->
Peername;
info(sockname, #state{sockname = Sockname}) ->
Sockname;
info(sockstate, #state{sockstate = SockSt}) ->
SockSt;
info(active_n, #state{active_n = ActiveN}) ->
ActiveN;
info(stats_timer, #state{stats_timer = StatsTimer}) ->
StatsTimer;
info(limit_timer, #state{limit_timer = LimitTimer}) ->
LimitTimer;
info(limiter, #state{limiter = Limiter}) ->
maybe_apply(fun emqx_limiter:info/1, Limiter).
%% @doc Get stats of the connection/channel.
-spec(stats(pid()|state()) -> emqx_types:stats()).
stats(CPid) when is_pid(CPid) ->
call(CPid, stats);
stats(#state{transport = Transport,
socket = Socket,
channel = Channel}) ->
SockStats = case Transport:getstat(Socket, ?SOCK_STATS) of
{ok, Ss} -> Ss;
{error, _} -> []
end,
ConnStats = emqx_pd:get_counters(?CONN_STATS),
ChanStats = emqx_channel:stats(Channel),
ProcStats = emqx_misc:proc_stats(),
lists:append([SockStats, ConnStats, ChanStats, ProcStats]).
%% @doc Set TCP keepalive socket options to override system defaults.
Idle : The number of seconds a connection needs to be idle before
TCP begins sending out keep - alive probes ( Linux default 7200 ) .
%% Interval: The number of seconds between TCP keep-alive probes
( Linux default 75 ) .
%% Probes: The maximum number of TCP keep-alive probes to send before
%% giving up and killing the connection if no response is
obtained from the other end ( Linux default 9 ) .
%%
%% NOTE: This API sets TCP socket options, which has nothing to do with
%% the MQTT layer's keepalive (PINGREQ and PINGRESP).
async_set_keepalive(Idle, Interval, Probes) ->
async_set_keepalive(self(), Idle, Interval, Probes).
async_set_keepalive(Pid, Idle, Interval, Probes) ->
Options = [ {keepalive, true}
, {raw, 6, 4, <<Idle:32/native>>}
, {raw, 6, 5, <<Interval:32/native>>}
, {raw, 6, 6, <<Probes:32/native>>}
],
async_set_socket_options(Pid, Options).
%% @doc Set custom socket options.
%% This API is made async because the call might be originated from
%% a hookpoint callback (otherwise deadlock).
%% If failed to set, the error message is logged.
async_set_socket_options(Pid, Options) ->
cast(Pid, {async_set_socket_options, Options}).
cast(Pid, Req) ->
gen_server:cast(Pid, Req).
call(Pid, Req) ->
call(Pid, Req, infinity).
call(Pid, Req, Timeout) ->
gen_server:call(Pid, Req, Timeout).
stop(Pid) ->
gen_server:stop(Pid).
%%--------------------------------------------------------------------
%% callbacks
%%--------------------------------------------------------------------
init(Parent, Transport, RawSocket, Options) ->
case Transport:wait(RawSocket) of
{ok, Socket} ->
run_loop(Parent, init_state(Transport, Socket, Options));
{error, Reason} ->
ok = Transport:fast_close(RawSocket),
exit_on_sock_error(Reason)
end.
init_state(Transport, Socket, Options) ->
{ok, Peername} = Transport:ensure_ok_or_exit(peername, [Socket]),
{ok, Sockname} = Transport:ensure_ok_or_exit(sockname, [Socket]),
Peercert = Transport:ensure_ok_or_exit(peercert, [Socket]),
ConnInfo = #{socktype => Transport:type(Socket),
peername => Peername,
sockname => Sockname,
peercert => Peercert,
conn_mod => ?MODULE
},
Zone = proplists:get_value(zone, Options),
ActiveN = proplists:get_value(active_n, Options, ?ACTIVE_N),
PubLimit = emqx_zone:publish_limit(Zone),
BytesIn = proplists:get_value(rate_limit, Options),
RateLimit = emqx_zone:ratelimit(Zone),
Limiter = emqx_limiter:init(Zone, PubLimit, BytesIn, RateLimit),
FrameOpts = emqx_zone:mqtt_frame_options(Zone),
ParseState = emqx_frame:initial_parse_state(FrameOpts),
Serialize = emqx_frame:serialize_opts(),
Channel = emqx_channel:init(ConnInfo, Options),
GcState = emqx_zone:init_gc_state(Zone),
StatsTimer = emqx_zone:stats_timer(Zone),
IdleTimeout = emqx_zone:idle_timeout(Zone),
IdleTimer = start_timer(IdleTimeout, idle_timeout),
#state{transport = Transport,
socket = Socket,
peername = Peername,
sockname = Sockname,
sockstate = idle,
active_n = ActiveN,
limiter = Limiter,
parse_state = ParseState,
serialize = Serialize,
channel = Channel,
gc_state = GcState,
stats_timer = StatsTimer,
idle_timeout = IdleTimeout,
idle_timer = IdleTimer
}.
run_loop(Parent, State = #state{transport = Transport,
socket = Socket,
peername = Peername,
channel = Channel}) ->
emqx_logger:set_metadata_peername(esockd:format(Peername)),
emqx_misc:tune_heap_size(emqx_zone:oom_policy(
emqx_channel:info(zone, Channel))),
case activate_socket(State) of
{ok, NState} -> hibernate(Parent, NState);
{error, Reason} ->
ok = Transport:fast_close(Socket),
exit_on_sock_error(Reason)
end.
-spec exit_on_sock_error(any()) -> no_return().
exit_on_sock_error(Reason) when Reason =:= einval;
Reason =:= enotconn;
Reason =:= closed ->
erlang:exit(normal);
exit_on_sock_error(timeout) ->
erlang:exit({shutdown, ssl_upgrade_timeout});
exit_on_sock_error(Reason) ->
erlang:exit({shutdown, Reason}).
%%--------------------------------------------------------------------
Recv Loop
recvloop(Parent, State = #state{idle_timeout = IdleTimeout}) ->
receive
Msg ->
handle_recv(Msg, Parent, State)
after
IdleTimeout + 100 ->
hibernate(Parent, cancel_stats_timer(State))
end.
handle_recv({system, From, Request}, Parent, State) ->
sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State);
handle_recv({'EXIT', Parent, Reason}, Parent, State) ->
FIXME : it 's not trapping exit , should never receive an EXIT
terminate(Reason, State);
handle_recv(Msg, Parent, State = #state{idle_timeout = IdleTimeout}) ->
case process_msg([Msg], ensure_stats_timer(IdleTimeout, State)) of
{ok, NewState} ->
?MODULE:recvloop(Parent, NewState);
{stop, Reason, NewSate} ->
terminate(Reason, NewSate)
end.
hibernate(Parent, State) ->
proc_lib:hibernate(?MODULE, wakeup_from_hib, [Parent, State]).
%% Maybe do something here later.
wakeup_from_hib(Parent, State) ->
?MODULE:recvloop(Parent, State).
%%--------------------------------------------------------------------
%% Ensure/cancel stats timer
-compile({inline, [ensure_stats_timer/2]}).
ensure_stats_timer(Timeout, State = #state{stats_timer = undefined}) ->
State#state{stats_timer = start_timer(Timeout, emit_stats)};
ensure_stats_timer(_Timeout, State) -> State.
-compile({inline, [cancel_stats_timer/1]}).
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
?tp(debug, cancel_stats_timer, #{}),
ok = emqx_misc:cancel_timer(TRef),
State#state{stats_timer = undefined};
cancel_stats_timer(State) -> State.
%%--------------------------------------------------------------------
Process next Msg
process_msg([], State) ->
{ok, State};
process_msg([Msg|More], State) ->
try
case handle_msg(Msg, State) of
ok ->
process_msg(More, State);
{ok, NState} ->
process_msg(More, NState);
{ok, Msgs, NState} ->
process_msg(append_msg(More, Msgs), NState);
{stop, Reason, NState} ->
{stop, Reason, NState}
end
catch
exit : normal ->
{stop, normal, State};
exit : shutdown ->
{stop, shutdown, State};
exit : {shutdown, _} = Shutdown ->
{stop, Shutdown, State};
Exception : Context : Stack ->
{stop, #{exception => Exception,
context => Context,
stacktrace => Stack}, State}
end.
-compile({inline, [append_msg/2]}).
append_msg([], Msgs) when is_list(Msgs) ->
Msgs;
append_msg([], Msg) -> [Msg];
append_msg(Q, Msgs) when is_list(Msgs) ->
lists:append(Q, Msgs);
append_msg(Q, Msg) ->
lists:append(Q, [Msg]).
%%--------------------------------------------------------------------
%% Handle a Msg
handle_msg({'$gen_call', From, Req}, State) ->
case handle_call(From, Req, State) of
{reply, Reply, NState} ->
gen_server:reply(From, Reply),
{ok, NState};
{stop, Reason, Reply, NState} ->
gen_server:reply(From, Reply),
stop(Reason, NState)
end;
handle_msg({'$gen_cast', Req}, State) ->
NewState = handle_cast(Req, State),
{ok, NewState};
handle_msg({Inet, _Sock, Data}, State) when Inet == tcp; Inet == ssl ->
?LOG(debug, "RECV ~0p", [Data]),
Oct = iolist_size(Data),
inc_counter(incoming_bytes, Oct),
ok = emqx_metrics:inc('bytes.received', Oct),
parse_incoming(Data, State);
handle_msg({incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
State = #state{idle_timer = IdleTimer}) ->
ok = emqx_misc:cancel_timer(IdleTimer),
Serialize = emqx_frame:serialize_opts(ConnPkt),
NState = State#state{serialize = Serialize,
idle_timer = undefined
},
handle_incoming(Packet, NState);
handle_msg({incoming, Packet}, State) ->
handle_incoming(Packet, State);
handle_msg({outgoing, Packets}, State) ->
handle_outgoing(Packets, State);
handle_msg({Error, _Sock, Reason}, State)
when Error == tcp_error; Error == ssl_error ->
handle_info({sock_error, Reason}, State);
handle_msg({Closed, _Sock}, State)
when Closed == tcp_closed; Closed == ssl_closed ->
handle_info({sock_closed, Closed}, close_socket(State));
handle_msg({Passive, _Sock}, State)
when Passive == tcp_passive; Passive == ssl_passive ->
%% In Stats
Pubs = emqx_pd:reset_counter(incoming_pubs),
Bytes = emqx_pd:reset_counter(incoming_bytes),
InStats = #{cnt => Pubs, oct => Bytes},
%% Ensure Rate Limit
NState = ensure_rate_limit(InStats, State),
Run GC and Check OOM
NState1 = check_oom(run_gc(InStats, NState)),
handle_info(activate_socket, NState1);
handle_msg(Deliver = {deliver, _Topic, _Msg},
#state{active_n = ActiveN} = State) ->
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State);
%% Something sent
handle_msg({inet_reply, _Sock, ok}, State = #state{active_n = ActiveN}) ->
case emqx_pd:get_counter(outgoing_pubs) > ActiveN of
true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes),
OutStats = #{cnt => Pubs, oct => Bytes},
{ok, check_oom(run_gc(OutStats, State))};
false -> ok
end;
handle_msg({inet_reply, _Sock, {error, Reason}}, State) ->
handle_info({sock_error, Reason}, State);
handle_msg({connack, ConnAck}, State) ->
handle_outgoing(ConnAck, State);
handle_msg({close, Reason}, State) ->
?LOG(debug, "Force to close the socket due to ~p", [Reason]),
handle_info({sock_closed, Reason}, close_socket(State));
handle_msg({event, connected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:insert_channel_info(ClientId, info(State), stats(State));
handle_msg({event, disconnected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:connection_closed(ClientId),
{ok, State};
handle_msg({event, _Other}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State};
handle_msg({timeout, TRef, TMsg}, State) ->
handle_timeout(TRef, TMsg, State);
handle_msg(Shutdown = {shutdown, _Reason}, State) ->
stop(Shutdown, State);
handle_msg(Msg, State) ->
handle_info(Msg, State).
%%--------------------------------------------------------------------
%% Terminate
-spec terminate(any(), state()) -> no_return().
terminate(Reason, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
try
Channel1 = emqx_channel:set_conn_state(disconnected, Channel),
emqx_congestion:cancel_alarms(Socket, Transport, Channel1),
emqx_channel:terminate(Reason, Channel1),
close_socket_ok(State)
catch
E : C : S ->
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end,
?tp(info, terminate, #{reason => Reason}),
maybe_raise_excption(Reason).
%% close socket, discard new state, always return ok.
close_socket_ok(State) ->
_ = close_socket(State),
ok.
%% tell truth about the original exception
maybe_raise_excption(#{exception := Exception,
context := Context,
stacktrace := Stacktrace
}) ->
erlang:raise(Exception, Context, Stacktrace);
maybe_raise_excption(Reason) ->
exit(Reason).
%%--------------------------------------------------------------------
Sys callbacks
system_continue(Parent, _Debug, State) ->
?MODULE:recvloop(Parent, State).
system_terminate(Reason, _Parent, _Debug, State) ->
terminate(Reason, State).
system_code_change(State, _Mod, _OldVsn, _Extra) ->
{ok, State}.
system_get_state(State) -> {ok, State}.
%%--------------------------------------------------------------------
%% Handle call
handle_call(_From, info, State) ->
{reply, info(State), State};
handle_call(_From, stats, State) ->
{reply, stats(State), State};
handle_call(_From, {ratelimit, Policy}, State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
Limiter = emqx_limiter:init(Zone, Policy),
{reply, ok, State#state{limiter = Limiter}};
handle_call(_From, Req, State = #state{channel = Channel}) ->
case emqx_channel:handle_call(Req, Channel) of
{reply, Reply, NChannel} ->
{reply, Reply, State#state{channel = NChannel}};
{shutdown, Reason, Reply, NChannel} ->
shutdown(Reason, Reply, State#state{channel = NChannel});
{shutdown, Reason, Reply, OutPacket, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(OutPacket, NState),
shutdown(Reason, Reply, NState)
end.
%%--------------------------------------------------------------------
%% Handle timeout
handle_timeout(_TRef, idle_timeout, State) ->
shutdown(idle_timeout, State);
handle_timeout(_TRef, limit_timeout, State) ->
NState = State#state{sockstate = idle,
limit_timer = undefined
},
handle_info(activate_socket, NState);
handle_timeout(_TRef, emit_stats, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State#state{stats_timer = undefined}};
handle_timeout(TRef, keepalive, State = #state{transport = Transport,
socket = Socket,
channel = Channel})->
case emqx_channel:info(conn_state, Channel) of
disconnected -> {ok, State};
_ ->
case Transport:getstat(Socket, [recv_oct]) of
{ok, [{recv_oct, RecvOct}]} ->
handle_timeout(TRef, {keepalive, RecvOct}, State);
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end
end;
handle_timeout(TRef, Msg, State) ->
with_channel(handle_timeout, [TRef, Msg], State).
%%--------------------------------------------------------------------
Parse incoming data
-compile({inline, [parse_incoming/2]}).
parse_incoming(Data, State) ->
{Packets, NState} = parse_incoming(Data, [], State),
{ok, next_incoming_msgs(Packets), NState}.
parse_incoming(<<>>, Packets, State) ->
{Packets, State};
parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
try emqx_frame:parse(Data, ParseState) of
{more, NParseState} ->
{Packets, State#state{parse_state = NParseState}};
{ok, Packet, Rest, NParseState} ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet|Packets], NState)
catch
error:Reason:Stk ->
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data:~0p",
[Reason, Stk, Data]),
{[{frame_error, Reason}|Packets], State}
end.
-compile({inline, [next_incoming_msgs/1]}).
next_incoming_msgs([Packet]) ->
{incoming, Packet};
next_incoming_msgs(Packets) ->
[{incoming, Packet} || Packet <- lists:reverse(Packets)].
%%--------------------------------------------------------------------
%% Handle incoming packet
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet),
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) ->
with_channel(handle_in, [FrameError], State).
%%--------------------------------------------------------------------
%% With Channel
with_channel(Fun, Args, State = #state{channel = Channel}) ->
case erlang:apply(emqx_channel, Fun, Args ++ [Channel]) of
ok -> {ok, State};
{ok, NChannel} ->
{ok, State#state{channel = NChannel}};
{ok, Replies, NChannel} ->
{ok, next_msgs(Replies), State#state{channel = NChannel}};
{shutdown, Reason, NChannel} ->
shutdown(Reason, State#state{channel = NChannel});
{shutdown, Reason, Packet, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(Packet, NState),
shutdown(Reason, NState)
end.
%%--------------------------------------------------------------------
%% Handle outgoing packets
handle_outgoing(Packets, State) when is_list(Packets) ->
send(lists:map(serialize_and_inc_stats_fun(State), Packets), State);
handle_outgoing(Packet, State) ->
send((serialize_and_inc_stats_fun(State))(Packet), State).
serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
fun(Packet) ->
case emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?LOG(warning, "~s is discarded due to the frame is too large!",
[emqx_packet:format(Packet)]),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
<<>>;
Data -> ?LOG(debug, "SEND ~s", [emqx_packet:format(Packet)]),
ok = inc_outgoing_stats(Packet),
Data
end
end.
%%--------------------------------------------------------------------
%% Send data
-spec(send(iodata(), state()) -> ok).
send(IoData, #state{transport = Transport, socket = Socket, channel = Channel}) ->
Oct = iolist_size(IoData),
ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct),
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, []) of
ok -> ok;
Error = {error, _Reason} ->
%% Send an inet_reply to postpone handling the error
self() ! {inet_reply, Socket, Error},
ok
end.
%%--------------------------------------------------------------------
%% Handle Info
handle_info(activate_socket, State = #state{sockstate = OldSst}) ->
case activate_socket(State) of
{ok, NState = #state{sockstate = NewSst}} ->
case OldSst =/= NewSst of
true -> {ok, {event, NewSst}, NState};
false -> {ok, NState}
end;
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end;
handle_info({sock_error, Reason}, State) ->
case Reason =/= closed andalso Reason =/= einval of
true -> ?LOG(warning, "socket_error: ~p", [Reason]);
false -> ok
end,
handle_info({sock_closed, Reason}, close_socket(State));
handle_info(Info, State) ->
with_channel(handle_info, [Info], State).
%%--------------------------------------------------------------------
%% Handle Info
handle_cast({async_set_socket_options, Opts},
State = #state{transport = Transport,
socket = Socket
}) ->
case Transport:setopts(Socket, Opts) of
ok -> ?tp(info, "custom_socket_options_successfully", #{opts => Opts});
Err -> ?tp(error, "failed_to_set_custom_socket_optionn", #{reason => Err})
end,
State;
handle_cast(Req, State) ->
?tp(error, "received_unknown_cast", #{cast => Req}),
State.
%%--------------------------------------------------------------------
%% Ensure rate limit
ensure_rate_limit(Stats, State = #state{limiter = Limiter}) ->
case ?ENABLED(Limiter) andalso emqx_limiter:check(Stats, Limiter) of
false -> State;
{ok, Limiter1} ->
State#state{limiter = Limiter1};
{pause, Time, Limiter1} ->
?LOG(warning, "Pause ~pms due to rate limit", [Time]),
TRef = start_timer(Time, limit_timeout),
State#state{sockstate = blocked,
limiter = Limiter1,
limit_timer = TRef
}
end.
%%--------------------------------------------------------------------
Run GC and Check OOM
run_gc(Stats, State = #state{gc_state = GcSt}) ->
case ?ENABLED(GcSt) andalso emqx_gc:run(Stats, GcSt) of
false -> State;
{_IsGC, GcSt1} ->
State#state{gc_state = GcSt1}
end.
check_oom(State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
OomPolicy = emqx_zone:oom_policy(Zone),
?tp(debug, check_oom, #{policy => OomPolicy}),
case ?ENABLED(OomPolicy) andalso emqx_misc:check_oom(OomPolicy) of
{shutdown, Reason} ->
%% triggers terminate/2 callback immediately
erlang:exit({shutdown, Reason});
_Other ->
ok
end,
State.
%%--------------------------------------------------------------------
Activate Socket
-compile({inline, [activate_socket/1]}).
activate_socket(State = #state{sockstate = closed}) ->
{ok, State};
activate_socket(State = #state{sockstate = blocked}) ->
{ok, State};
activate_socket(State = #state{transport = Transport,
socket = Socket,
active_n = N}) ->
case Transport:setopts(Socket, [{active, N}]) of
ok -> {ok, State#state{sockstate = running}};
Error -> Error
end.
%%--------------------------------------------------------------------
%% Close Socket
close_socket(State = #state{sockstate = closed}) -> State;
close_socket(State = #state{transport = Transport, socket = Socket}) ->
ok = Transport:fast_close(Socket),
State#state{sockstate = closed}.
%%--------------------------------------------------------------------
%% Inc incoming/outgoing stats
-compile({inline, [inc_incoming_stats/1]}).
inc_incoming_stats(Packet = ?PACKET(Type)) ->
inc_counter(recv_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(recv_msg, 1),
inc_counter(incoming_pubs, 1);
false ->
ok
end,
emqx_metrics:inc_recv(Packet).
-compile({inline, [inc_outgoing_stats/1]}).
inc_outgoing_stats(Packet = ?PACKET(Type)) ->
inc_counter(send_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(send_msg, 1),
inc_counter(outgoing_pubs, 1);
false ->
ok
end,
emqx_metrics:inc_sent(Packet).
%%--------------------------------------------------------------------
%% Helper functions
-compile({inline, [next_msgs/1]}).
next_msgs(Packet) when is_record(Packet, mqtt_packet) ->
{outgoing, Packet};
next_msgs(Event) when is_tuple(Event) ->
Event;
next_msgs(More) when is_list(More) ->
More.
-compile({inline, [shutdown/2, shutdown/3]}).
shutdown(Reason, State) ->
stop({shutdown, Reason}, State).
shutdown(Reason, Reply, State) ->
stop({shutdown, Reason}, Reply, State).
-compile({inline, [stop/2, stop/3]}).
stop(Reason, State) ->
{stop, Reason, State}.
stop(Reason, Reply, State) ->
{stop, Reason, Reply, State}.
inc_counter(Key, Inc) ->
_ = emqx_pd:inc_counter(Key, Inc),
ok.
%%--------------------------------------------------------------------
%% For CT tests
%%--------------------------------------------------------------------
set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
setelement(Pos+1, State, Value).
get_state(Pid) ->
State = sys:get_state(Pid),
maps:from_list(lists:zip(record_info(fields, state),
tl(tuple_to_list(State)))).
| null | https://raw.githubusercontent.com/dgiot/dgiot/a6b816a094b1c9bd024ce40b8142375a0f0289d8/src/emqx_connection.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
MQTT/TCP|TLS Connection
API
Callback
Internal callback
Export for CT
TCP/TLS Socket
Peername of the connection
Sock State
The {active, N} option
Limit Timer
Stats Timer
Idle Timeout
Idle Timer
--------------------------------------------------------------------
API
--------------------------------------------------------------------
@doc Get infos of the connection/channel.
@doc Get stats of the connection/channel.
@doc Set TCP keepalive socket options to override system defaults.
Interval: The number of seconds between TCP keep-alive probes
Probes: The maximum number of TCP keep-alive probes to send before
giving up and killing the connection if no response is
NOTE: This API sets TCP socket options, which has nothing to do with
the MQTT layer's keepalive (PINGREQ and PINGRESP).
@doc Set custom socket options.
This API is made async because the call might be originated from
a hookpoint callback (otherwise deadlock).
If failed to set, the error message is logged.
--------------------------------------------------------------------
callbacks
--------------------------------------------------------------------
--------------------------------------------------------------------
Maybe do something here later.
--------------------------------------------------------------------
Ensure/cancel stats timer
--------------------------------------------------------------------
--------------------------------------------------------------------
Handle a Msg
In Stats
Ensure Rate Limit
Something sent
--------------------------------------------------------------------
Terminate
close socket, discard new state, always return ok.
tell truth about the original exception
--------------------------------------------------------------------
--------------------------------------------------------------------
Handle call
--------------------------------------------------------------------
Handle timeout
--------------------------------------------------------------------
--------------------------------------------------------------------
Handle incoming packet
--------------------------------------------------------------------
With Channel
--------------------------------------------------------------------
Handle outgoing packets
--------------------------------------------------------------------
Send data
Send an inet_reply to postpone handling the error
--------------------------------------------------------------------
Handle Info
--------------------------------------------------------------------
Handle Info
--------------------------------------------------------------------
Ensure rate limit
--------------------------------------------------------------------
triggers terminate/2 callback immediately
--------------------------------------------------------------------
--------------------------------------------------------------------
Close Socket
--------------------------------------------------------------------
Inc incoming/outgoing stats
--------------------------------------------------------------------
Helper functions
--------------------------------------------------------------------
For CT tests
-------------------------------------------------------------------- | Copyright ( c ) 2018 - 2021 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_connection).
-include("emqx.hrl").
-include("emqx_mqtt.hrl").
-include("logger.hrl").
-include("types.hrl").
-include_lib("snabbkaffe/include/snabbkaffe.hrl").
-logger_header("[MQTT]").
-ifdef(TEST).
-compile(export_all).
-compile(nowarn_export_all).
-endif.
-elvis([{elvis_style, invalid_dynamic_call, #{ignore => [emqx_connection]}}]).
-export([ start_link/3
, stop/1
]).
-export([ info/1
, stats/1
]).
-export([ async_set_keepalive/3
, async_set_keepalive/4
, async_set_socket_options/2
]).
-export([ call/2
, call/3
, cast/2
]).
-export([init/4]).
Sys callbacks
-export([ system_continue/3
, system_terminate/4
, system_code_change/4
, system_get_state/1
]).
-export([wakeup_from_hib/2, recvloop/2, get_state/1]).
-export([set_field/3]).
-import(emqx_misc,
[ maybe_apply/2
, start_timer/2
]).
-record(state, {
TCP / TLS Transport
transport :: esockd:transport(),
socket :: esockd:socket(),
peername :: emqx_types:peername(),
of the connection
sockname :: emqx_types:peername(),
sockstate :: emqx_types:sockstate(),
active_n :: pos_integer(),
Limiter
limiter :: maybe(emqx_limiter:limiter()),
limit_timer :: maybe(reference()),
Parse State
parse_state :: emqx_frame:parse_state(),
Serialize options
serialize :: emqx_frame:serialize_opts(),
Channel State
channel :: emqx_channel:channel(),
GC State
gc_state :: maybe(emqx_gc:gc_state()),
stats_timer :: disabled | maybe(reference()),
idle_timeout :: integer(),
idle_timer :: maybe(reference())
}).
-type(state() :: #state{}).
-define(ACTIVE_N, 100).
-define(INFO_KEYS, [socktype, peername, sockname, sockstate, active_n]).
-define(CONN_STATS, [recv_pkt, recv_msg, send_pkt, send_msg]).
-define(SOCK_STATS, [recv_oct, recv_cnt, send_oct, send_cnt, send_pend]).
-define(ENABLED(X), (X =/= undefined)).
-define(ALARM_TCP_CONGEST(Channel),
list_to_binary(io_lib:format("mqtt_conn/congested/~s/~s",
[emqx_channel:info(clientid, Channel),
emqx_channel:info(username, Channel)]))).
-define(ALARM_CONN_INFO_KEYS, [
socktype, sockname, peername,
clientid, username, proto_name, proto_ver, connected_at
]).
-define(ALARM_SOCK_STATS_KEYS, [send_pend, recv_cnt, recv_oct, send_cnt, send_oct]).
-define(ALARM_SOCK_OPTS_KEYS, [high_watermark, high_msgq_watermark, sndbuf, recbuf, buffer]).
-dialyzer({no_match, [info/2]}).
-dialyzer({nowarn_function, [ init/4
, init_state/3
, run_loop/2
, system_terminate/4
, system_code_change/4
]}).
-spec(start_link(esockd:transport(), esockd:socket(), proplists:proplist())
-> {ok, pid()}).
start_link(Transport, Socket, Options) ->
Args = [self(), Transport, Socket, Options],
CPid = proc_lib:spawn_link(?MODULE, init, Args),
{ok, CPid}.
-spec(info(pid()|state()) -> emqx_types:infos()).
info(CPid) when is_pid(CPid) ->
call(CPid, info);
info(State = #state{channel = Channel}) ->
ChanInfo = emqx_channel:info(Channel),
SockInfo = maps:from_list(
info(?INFO_KEYS, State)),
ChanInfo#{sockinfo => SockInfo}.
info(Keys, State) when is_list(Keys) ->
[{Key, info(Key, State)} || Key <- Keys];
info(socktype, #state{transport = Transport, socket = Socket}) ->
Transport:type(Socket);
info(peername, #state{peername = Peername}) ->
Peername;
info(sockname, #state{sockname = Sockname}) ->
Sockname;
info(sockstate, #state{sockstate = SockSt}) ->
SockSt;
info(active_n, #state{active_n = ActiveN}) ->
ActiveN;
info(stats_timer, #state{stats_timer = StatsTimer}) ->
StatsTimer;
info(limit_timer, #state{limit_timer = LimitTimer}) ->
LimitTimer;
info(limiter, #state{limiter = Limiter}) ->
maybe_apply(fun emqx_limiter:info/1, Limiter).
-spec(stats(pid()|state()) -> emqx_types:stats()).
stats(CPid) when is_pid(CPid) ->
call(CPid, stats);
stats(#state{transport = Transport,
socket = Socket,
channel = Channel}) ->
SockStats = case Transport:getstat(Socket, ?SOCK_STATS) of
{ok, Ss} -> Ss;
{error, _} -> []
end,
ConnStats = emqx_pd:get_counters(?CONN_STATS),
ChanStats = emqx_channel:stats(Channel),
ProcStats = emqx_misc:proc_stats(),
lists:append([SockStats, ConnStats, ChanStats, ProcStats]).
Idle : The number of seconds a connection needs to be idle before
TCP begins sending out keep - alive probes ( Linux default 7200 ) .
( Linux default 75 ) .
obtained from the other end ( Linux default 9 ) .
async_set_keepalive(Idle, Interval, Probes) ->
async_set_keepalive(self(), Idle, Interval, Probes).
async_set_keepalive(Pid, Idle, Interval, Probes) ->
Options = [ {keepalive, true}
, {raw, 6, 4, <<Idle:32/native>>}
, {raw, 6, 5, <<Interval:32/native>>}
, {raw, 6, 6, <<Probes:32/native>>}
],
async_set_socket_options(Pid, Options).
async_set_socket_options(Pid, Options) ->
cast(Pid, {async_set_socket_options, Options}).
cast(Pid, Req) ->
gen_server:cast(Pid, Req).
call(Pid, Req) ->
call(Pid, Req, infinity).
call(Pid, Req, Timeout) ->
gen_server:call(Pid, Req, Timeout).
stop(Pid) ->
gen_server:stop(Pid).
init(Parent, Transport, RawSocket, Options) ->
case Transport:wait(RawSocket) of
{ok, Socket} ->
run_loop(Parent, init_state(Transport, Socket, Options));
{error, Reason} ->
ok = Transport:fast_close(RawSocket),
exit_on_sock_error(Reason)
end.
init_state(Transport, Socket, Options) ->
{ok, Peername} = Transport:ensure_ok_or_exit(peername, [Socket]),
{ok, Sockname} = Transport:ensure_ok_or_exit(sockname, [Socket]),
Peercert = Transport:ensure_ok_or_exit(peercert, [Socket]),
ConnInfo = #{socktype => Transport:type(Socket),
peername => Peername,
sockname => Sockname,
peercert => Peercert,
conn_mod => ?MODULE
},
Zone = proplists:get_value(zone, Options),
ActiveN = proplists:get_value(active_n, Options, ?ACTIVE_N),
PubLimit = emqx_zone:publish_limit(Zone),
BytesIn = proplists:get_value(rate_limit, Options),
RateLimit = emqx_zone:ratelimit(Zone),
Limiter = emqx_limiter:init(Zone, PubLimit, BytesIn, RateLimit),
FrameOpts = emqx_zone:mqtt_frame_options(Zone),
ParseState = emqx_frame:initial_parse_state(FrameOpts),
Serialize = emqx_frame:serialize_opts(),
Channel = emqx_channel:init(ConnInfo, Options),
GcState = emqx_zone:init_gc_state(Zone),
StatsTimer = emqx_zone:stats_timer(Zone),
IdleTimeout = emqx_zone:idle_timeout(Zone),
IdleTimer = start_timer(IdleTimeout, idle_timeout),
#state{transport = Transport,
socket = Socket,
peername = Peername,
sockname = Sockname,
sockstate = idle,
active_n = ActiveN,
limiter = Limiter,
parse_state = ParseState,
serialize = Serialize,
channel = Channel,
gc_state = GcState,
stats_timer = StatsTimer,
idle_timeout = IdleTimeout,
idle_timer = IdleTimer
}.
run_loop(Parent, State = #state{transport = Transport,
socket = Socket,
peername = Peername,
channel = Channel}) ->
emqx_logger:set_metadata_peername(esockd:format(Peername)),
emqx_misc:tune_heap_size(emqx_zone:oom_policy(
emqx_channel:info(zone, Channel))),
case activate_socket(State) of
{ok, NState} -> hibernate(Parent, NState);
{error, Reason} ->
ok = Transport:fast_close(Socket),
exit_on_sock_error(Reason)
end.
-spec exit_on_sock_error(any()) -> no_return().
exit_on_sock_error(Reason) when Reason =:= einval;
Reason =:= enotconn;
Reason =:= closed ->
erlang:exit(normal);
exit_on_sock_error(timeout) ->
erlang:exit({shutdown, ssl_upgrade_timeout});
exit_on_sock_error(Reason) ->
erlang:exit({shutdown, Reason}).
Recv Loop
recvloop(Parent, State = #state{idle_timeout = IdleTimeout}) ->
receive
Msg ->
handle_recv(Msg, Parent, State)
after
IdleTimeout + 100 ->
hibernate(Parent, cancel_stats_timer(State))
end.
handle_recv({system, From, Request}, Parent, State) ->
sys:handle_system_msg(Request, From, Parent, ?MODULE, [], State);
handle_recv({'EXIT', Parent, Reason}, Parent, State) ->
FIXME : it 's not trapping exit , should never receive an EXIT
terminate(Reason, State);
handle_recv(Msg, Parent, State = #state{idle_timeout = IdleTimeout}) ->
case process_msg([Msg], ensure_stats_timer(IdleTimeout, State)) of
{ok, NewState} ->
?MODULE:recvloop(Parent, NewState);
{stop, Reason, NewSate} ->
terminate(Reason, NewSate)
end.
hibernate(Parent, State) ->
proc_lib:hibernate(?MODULE, wakeup_from_hib, [Parent, State]).
wakeup_from_hib(Parent, State) ->
?MODULE:recvloop(Parent, State).
-compile({inline, [ensure_stats_timer/2]}).
ensure_stats_timer(Timeout, State = #state{stats_timer = undefined}) ->
State#state{stats_timer = start_timer(Timeout, emit_stats)};
ensure_stats_timer(_Timeout, State) -> State.
-compile({inline, [cancel_stats_timer/1]}).
cancel_stats_timer(State = #state{stats_timer = TRef}) when is_reference(TRef) ->
?tp(debug, cancel_stats_timer, #{}),
ok = emqx_misc:cancel_timer(TRef),
State#state{stats_timer = undefined};
cancel_stats_timer(State) -> State.
Process next Msg
process_msg([], State) ->
{ok, State};
process_msg([Msg|More], State) ->
try
case handle_msg(Msg, State) of
ok ->
process_msg(More, State);
{ok, NState} ->
process_msg(More, NState);
{ok, Msgs, NState} ->
process_msg(append_msg(More, Msgs), NState);
{stop, Reason, NState} ->
{stop, Reason, NState}
end
catch
exit : normal ->
{stop, normal, State};
exit : shutdown ->
{stop, shutdown, State};
exit : {shutdown, _} = Shutdown ->
{stop, Shutdown, State};
Exception : Context : Stack ->
{stop, #{exception => Exception,
context => Context,
stacktrace => Stack}, State}
end.
-compile({inline, [append_msg/2]}).
append_msg([], Msgs) when is_list(Msgs) ->
Msgs;
append_msg([], Msg) -> [Msg];
append_msg(Q, Msgs) when is_list(Msgs) ->
lists:append(Q, Msgs);
append_msg(Q, Msg) ->
lists:append(Q, [Msg]).
handle_msg({'$gen_call', From, Req}, State) ->
case handle_call(From, Req, State) of
{reply, Reply, NState} ->
gen_server:reply(From, Reply),
{ok, NState};
{stop, Reason, Reply, NState} ->
gen_server:reply(From, Reply),
stop(Reason, NState)
end;
handle_msg({'$gen_cast', Req}, State) ->
NewState = handle_cast(Req, State),
{ok, NewState};
handle_msg({Inet, _Sock, Data}, State) when Inet == tcp; Inet == ssl ->
?LOG(debug, "RECV ~0p", [Data]),
Oct = iolist_size(Data),
inc_counter(incoming_bytes, Oct),
ok = emqx_metrics:inc('bytes.received', Oct),
parse_incoming(Data, State);
handle_msg({incoming, Packet = ?CONNECT_PACKET(ConnPkt)},
State = #state{idle_timer = IdleTimer}) ->
ok = emqx_misc:cancel_timer(IdleTimer),
Serialize = emqx_frame:serialize_opts(ConnPkt),
NState = State#state{serialize = Serialize,
idle_timer = undefined
},
handle_incoming(Packet, NState);
handle_msg({incoming, Packet}, State) ->
handle_incoming(Packet, State);
handle_msg({outgoing, Packets}, State) ->
handle_outgoing(Packets, State);
handle_msg({Error, _Sock, Reason}, State)
when Error == tcp_error; Error == ssl_error ->
handle_info({sock_error, Reason}, State);
handle_msg({Closed, _Sock}, State)
when Closed == tcp_closed; Closed == ssl_closed ->
handle_info({sock_closed, Closed}, close_socket(State));
handle_msg({Passive, _Sock}, State)
when Passive == tcp_passive; Passive == ssl_passive ->
Pubs = emqx_pd:reset_counter(incoming_pubs),
Bytes = emqx_pd:reset_counter(incoming_bytes),
InStats = #{cnt => Pubs, oct => Bytes},
NState = ensure_rate_limit(InStats, State),
Run GC and Check OOM
NState1 = check_oom(run_gc(InStats, NState)),
handle_info(activate_socket, NState1);
handle_msg(Deliver = {deliver, _Topic, _Msg},
#state{active_n = ActiveN} = State) ->
Delivers = [Deliver|emqx_misc:drain_deliver(ActiveN)],
with_channel(handle_deliver, [Delivers], State);
handle_msg({inet_reply, _Sock, ok}, State = #state{active_n = ActiveN}) ->
case emqx_pd:get_counter(outgoing_pubs) > ActiveN of
true ->
Pubs = emqx_pd:reset_counter(outgoing_pubs),
Bytes = emqx_pd:reset_counter(outgoing_bytes),
OutStats = #{cnt => Pubs, oct => Bytes},
{ok, check_oom(run_gc(OutStats, State))};
false -> ok
end;
handle_msg({inet_reply, _Sock, {error, Reason}}, State) ->
handle_info({sock_error, Reason}, State);
handle_msg({connack, ConnAck}, State) ->
handle_outgoing(ConnAck, State);
handle_msg({close, Reason}, State) ->
?LOG(debug, "Force to close the socket due to ~p", [Reason]),
handle_info({sock_closed, Reason}, close_socket(State));
handle_msg({event, connected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:insert_channel_info(ClientId, info(State), stats(State));
handle_msg({event, disconnected}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:connection_closed(ClientId),
{ok, State};
handle_msg({event, _Other}, State = #state{channel = Channel}) ->
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_info(ClientId, info(State)),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State};
handle_msg({timeout, TRef, TMsg}, State) ->
handle_timeout(TRef, TMsg, State);
handle_msg(Shutdown = {shutdown, _Reason}, State) ->
stop(Shutdown, State);
handle_msg(Msg, State) ->
handle_info(Msg, State).
-spec terminate(any(), state()) -> no_return().
terminate(Reason, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
try
Channel1 = emqx_channel:set_conn_state(disconnected, Channel),
emqx_congestion:cancel_alarms(Socket, Transport, Channel1),
emqx_channel:terminate(Reason, Channel1),
close_socket_ok(State)
catch
E : C : S ->
?tp(warning, unclean_terminate, #{exception => E, context => C, stacktrace => S})
end,
?tp(info, terminate, #{reason => Reason}),
maybe_raise_excption(Reason).
close_socket_ok(State) ->
_ = close_socket(State),
ok.
maybe_raise_excption(#{exception := Exception,
context := Context,
stacktrace := Stacktrace
}) ->
erlang:raise(Exception, Context, Stacktrace);
maybe_raise_excption(Reason) ->
exit(Reason).
Sys callbacks
system_continue(Parent, _Debug, State) ->
?MODULE:recvloop(Parent, State).
system_terminate(Reason, _Parent, _Debug, State) ->
terminate(Reason, State).
system_code_change(State, _Mod, _OldVsn, _Extra) ->
{ok, State}.
system_get_state(State) -> {ok, State}.
handle_call(_From, info, State) ->
{reply, info(State), State};
handle_call(_From, stats, State) ->
{reply, stats(State), State};
handle_call(_From, {ratelimit, Policy}, State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
Limiter = emqx_limiter:init(Zone, Policy),
{reply, ok, State#state{limiter = Limiter}};
handle_call(_From, Req, State = #state{channel = Channel}) ->
case emqx_channel:handle_call(Req, Channel) of
{reply, Reply, NChannel} ->
{reply, Reply, State#state{channel = NChannel}};
{shutdown, Reason, Reply, NChannel} ->
shutdown(Reason, Reply, State#state{channel = NChannel});
{shutdown, Reason, Reply, OutPacket, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(OutPacket, NState),
shutdown(Reason, Reply, NState)
end.
handle_timeout(_TRef, idle_timeout, State) ->
shutdown(idle_timeout, State);
handle_timeout(_TRef, limit_timeout, State) ->
NState = State#state{sockstate = idle,
limit_timer = undefined
},
handle_info(activate_socket, NState);
handle_timeout(_TRef, emit_stats, State = #state{channel = Channel, transport = Transport,
socket = Socket}) ->
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
ClientId = emqx_channel:info(clientid, Channel),
emqx_cm:set_chan_stats(ClientId, stats(State)),
{ok, State#state{stats_timer = undefined}};
handle_timeout(TRef, keepalive, State = #state{transport = Transport,
socket = Socket,
channel = Channel})->
case emqx_channel:info(conn_state, Channel) of
disconnected -> {ok, State};
_ ->
case Transport:getstat(Socket, [recv_oct]) of
{ok, [{recv_oct, RecvOct}]} ->
handle_timeout(TRef, {keepalive, RecvOct}, State);
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end
end;
handle_timeout(TRef, Msg, State) ->
with_channel(handle_timeout, [TRef, Msg], State).
Parse incoming data
-compile({inline, [parse_incoming/2]}).
parse_incoming(Data, State) ->
{Packets, NState} = parse_incoming(Data, [], State),
{ok, next_incoming_msgs(Packets), NState}.
parse_incoming(<<>>, Packets, State) ->
{Packets, State};
parse_incoming(Data, Packets, State = #state{parse_state = ParseState}) ->
try emqx_frame:parse(Data, ParseState) of
{more, NParseState} ->
{Packets, State#state{parse_state = NParseState}};
{ok, Packet, Rest, NParseState} ->
NState = State#state{parse_state = NParseState},
parse_incoming(Rest, [Packet|Packets], NState)
catch
error:Reason:Stk ->
?LOG(error, "~nParse failed for ~0p~n~0p~nFrame data:~0p",
[Reason, Stk, Data]),
{[{frame_error, Reason}|Packets], State}
end.
-compile({inline, [next_incoming_msgs/1]}).
next_incoming_msgs([Packet]) ->
{incoming, Packet};
next_incoming_msgs(Packets) ->
[{incoming, Packet} || Packet <- lists:reverse(Packets)].
handle_incoming(Packet, State) when is_record(Packet, mqtt_packet) ->
ok = inc_incoming_stats(Packet),
?LOG(debug, "RECV ~s", [emqx_packet:format(Packet)]),
with_channel(handle_in, [Packet], State);
handle_incoming(FrameError, State) ->
with_channel(handle_in, [FrameError], State).
with_channel(Fun, Args, State = #state{channel = Channel}) ->
case erlang:apply(emqx_channel, Fun, Args ++ [Channel]) of
ok -> {ok, State};
{ok, NChannel} ->
{ok, State#state{channel = NChannel}};
{ok, Replies, NChannel} ->
{ok, next_msgs(Replies), State#state{channel = NChannel}};
{shutdown, Reason, NChannel} ->
shutdown(Reason, State#state{channel = NChannel});
{shutdown, Reason, Packet, NChannel} ->
NState = State#state{channel = NChannel},
ok = handle_outgoing(Packet, NState),
shutdown(Reason, NState)
end.
handle_outgoing(Packets, State) when is_list(Packets) ->
send(lists:map(serialize_and_inc_stats_fun(State), Packets), State);
handle_outgoing(Packet, State) ->
send((serialize_and_inc_stats_fun(State))(Packet), State).
serialize_and_inc_stats_fun(#state{serialize = Serialize}) ->
fun(Packet) ->
case emqx_frame:serialize_pkt(Packet, Serialize) of
<<>> -> ?LOG(warning, "~s is discarded due to the frame is too large!",
[emqx_packet:format(Packet)]),
ok = emqx_metrics:inc('delivery.dropped.too_large'),
ok = emqx_metrics:inc('delivery.dropped'),
<<>>;
Data -> ?LOG(debug, "SEND ~s", [emqx_packet:format(Packet)]),
ok = inc_outgoing_stats(Packet),
Data
end
end.
-spec(send(iodata(), state()) -> ok).
send(IoData, #state{transport = Transport, socket = Socket, channel = Channel}) ->
Oct = iolist_size(IoData),
ok = emqx_metrics:inc('bytes.sent', Oct),
inc_counter(outgoing_bytes, Oct),
emqx_congestion:maybe_alarm_conn_congestion(Socket, Transport, Channel),
case Transport:async_send(Socket, IoData, []) of
ok -> ok;
Error = {error, _Reason} ->
self() ! {inet_reply, Socket, Error},
ok
end.
handle_info(activate_socket, State = #state{sockstate = OldSst}) ->
case activate_socket(State) of
{ok, NState = #state{sockstate = NewSst}} ->
case OldSst =/= NewSst of
true -> {ok, {event, NewSst}, NState};
false -> {ok, NState}
end;
{error, Reason} ->
handle_info({sock_error, Reason}, State)
end;
handle_info({sock_error, Reason}, State) ->
case Reason =/= closed andalso Reason =/= einval of
true -> ?LOG(warning, "socket_error: ~p", [Reason]);
false -> ok
end,
handle_info({sock_closed, Reason}, close_socket(State));
handle_info(Info, State) ->
with_channel(handle_info, [Info], State).
handle_cast({async_set_socket_options, Opts},
State = #state{transport = Transport,
socket = Socket
}) ->
case Transport:setopts(Socket, Opts) of
ok -> ?tp(info, "custom_socket_options_successfully", #{opts => Opts});
Err -> ?tp(error, "failed_to_set_custom_socket_optionn", #{reason => Err})
end,
State;
handle_cast(Req, State) ->
?tp(error, "received_unknown_cast", #{cast => Req}),
State.
ensure_rate_limit(Stats, State = #state{limiter = Limiter}) ->
case ?ENABLED(Limiter) andalso emqx_limiter:check(Stats, Limiter) of
false -> State;
{ok, Limiter1} ->
State#state{limiter = Limiter1};
{pause, Time, Limiter1} ->
?LOG(warning, "Pause ~pms due to rate limit", [Time]),
TRef = start_timer(Time, limit_timeout),
State#state{sockstate = blocked,
limiter = Limiter1,
limit_timer = TRef
}
end.
Run GC and Check OOM
run_gc(Stats, State = #state{gc_state = GcSt}) ->
case ?ENABLED(GcSt) andalso emqx_gc:run(Stats, GcSt) of
false -> State;
{_IsGC, GcSt1} ->
State#state{gc_state = GcSt1}
end.
check_oom(State = #state{channel = Channel}) ->
Zone = emqx_channel:info(zone, Channel),
OomPolicy = emqx_zone:oom_policy(Zone),
?tp(debug, check_oom, #{policy => OomPolicy}),
case ?ENABLED(OomPolicy) andalso emqx_misc:check_oom(OomPolicy) of
{shutdown, Reason} ->
erlang:exit({shutdown, Reason});
_Other ->
ok
end,
State.
Activate Socket
-compile({inline, [activate_socket/1]}).
activate_socket(State = #state{sockstate = closed}) ->
{ok, State};
activate_socket(State = #state{sockstate = blocked}) ->
{ok, State};
activate_socket(State = #state{transport = Transport,
socket = Socket,
active_n = N}) ->
case Transport:setopts(Socket, [{active, N}]) of
ok -> {ok, State#state{sockstate = running}};
Error -> Error
end.
close_socket(State = #state{sockstate = closed}) -> State;
close_socket(State = #state{transport = Transport, socket = Socket}) ->
ok = Transport:fast_close(Socket),
State#state{sockstate = closed}.
-compile({inline, [inc_incoming_stats/1]}).
inc_incoming_stats(Packet = ?PACKET(Type)) ->
inc_counter(recv_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(recv_msg, 1),
inc_counter(incoming_pubs, 1);
false ->
ok
end,
emqx_metrics:inc_recv(Packet).
-compile({inline, [inc_outgoing_stats/1]}).
inc_outgoing_stats(Packet = ?PACKET(Type)) ->
inc_counter(send_pkt, 1),
case Type =:= ?PUBLISH of
true ->
inc_counter(send_msg, 1),
inc_counter(outgoing_pubs, 1);
false ->
ok
end,
emqx_metrics:inc_sent(Packet).
-compile({inline, [next_msgs/1]}).
next_msgs(Packet) when is_record(Packet, mqtt_packet) ->
{outgoing, Packet};
next_msgs(Event) when is_tuple(Event) ->
Event;
next_msgs(More) when is_list(More) ->
More.
-compile({inline, [shutdown/2, shutdown/3]}).
shutdown(Reason, State) ->
stop({shutdown, Reason}, State).
shutdown(Reason, Reply, State) ->
stop({shutdown, Reason}, Reply, State).
-compile({inline, [stop/2, stop/3]}).
stop(Reason, State) ->
{stop, Reason, State}.
stop(Reason, Reply, State) ->
{stop, Reason, Reply, State}.
inc_counter(Key, Inc) ->
_ = emqx_pd:inc_counter(Key, Inc),
ok.
set_field(Name, Value, State) ->
Pos = emqx_misc:index_of(Name, record_info(fields, state)),
setelement(Pos+1, State, Value).
get_state(Pid) ->
State = sys:get_state(Pid),
maps:from_list(lists:zip(record_info(fields, state),
tl(tuple_to_list(State)))).
|
bc9a88f08b8fd227ef8a15834eb725e5e8519d18a018632959705987929c24e6 | machinedgod/helda | TileMap.hs | # LANGUAGE UnicodeSyntax #
# LANGUAGE TemplateHaskell #
module TileMap
( Width
, Height
, TileMap(TileMap)
, name
, width
, height
, mapData
, loadFromFile
, linear2Coord
, coord2Linear
) where
import Control.Lens (makeLenses)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Linear (V2(V2))
import qualified Data.Vector as V (Vector, fromList)
--------------------------------------------------------------------------------
type Width = Word
type Height = Word
--------------------------------------------------------------------------------
data TileMap a = TileMap {
_name ∷ String
, _width ∷ Width
, _height ∷ Height
, _mapData ∷ V.Vector a
}
makeLenses ''TileMap
loadFromFile ∷ (MonadIO m) ⇒ FilePath → (Char → a) → m (TileMap a)
loadFromFile f ef = liftIO (stringToMap <$> readFile f)
where
stringToMap = let w = fromIntegral . length . takeWhile (/=('\n'))
h = fromIntegral . (+1) . length . filter (==('\n'))
d = V.fromList . fmap ef . filter (/=('\n'))
in TileMap f <$> w <*> h <*> d
linear2Coord ∷ Word → Width → V2 Word
linear2Coord i w = let x = i `mod` w
y = i `div` w
in V2 x y
coord2Linear ∷ V2 Int → Width → Word
coord2Linear (V2 x y) w = fromIntegral (max 0 (y * fromIntegral w + x))
| null | https://raw.githubusercontent.com/machinedgod/helda/d9da022bae77001a77892a2a65c7a1a1f9b2bce2/src/TileMap.hs | haskell | ------------------------------------------------------------------------------
------------------------------------------------------------------------------ | # LANGUAGE UnicodeSyntax #
# LANGUAGE TemplateHaskell #
module TileMap
( Width
, Height
, TileMap(TileMap)
, name
, width
, height
, mapData
, loadFromFile
, linear2Coord
, coord2Linear
) where
import Control.Lens (makeLenses)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Linear (V2(V2))
import qualified Data.Vector as V (Vector, fromList)
type Width = Word
type Height = Word
data TileMap a = TileMap {
_name ∷ String
, _width ∷ Width
, _height ∷ Height
, _mapData ∷ V.Vector a
}
makeLenses ''TileMap
loadFromFile ∷ (MonadIO m) ⇒ FilePath → (Char → a) → m (TileMap a)
loadFromFile f ef = liftIO (stringToMap <$> readFile f)
where
stringToMap = let w = fromIntegral . length . takeWhile (/=('\n'))
h = fromIntegral . (+1) . length . filter (==('\n'))
d = V.fromList . fmap ef . filter (/=('\n'))
in TileMap f <$> w <*> h <*> d
linear2Coord ∷ Word → Width → V2 Word
linear2Coord i w = let x = i `mod` w
y = i `div` w
in V2 x y
coord2Linear ∷ V2 Int → Width → Word
coord2Linear (V2 x y) w = fromIntegral (max 0 (y * fromIntegral w + x))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.