_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
c98fd8debe0fd48e7e7eb12b9fdb505dca40a90f11106d37b342938463146e78 | tezos/tezos-mirror | builtin_models.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2022 Nomadic Labs , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
val ns : Namespace.cons
val fv : string -> Free_variable.t
val timer_model : unit Model.t
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/cdc7a4382ef6dfcd6c73f86d9a29b829b33d18d4/src/lib_benchmark/builtin_models.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
*************************************************************************** | Copyright ( c ) 2022 Nomadic Labs , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
val ns : Namespace.cons
val fv : string -> Free_variable.t
val timer_model : unit Model.t
|
fa283b5309f2ad609cf4be153d7a5859889e28c52ec8014fa83bff378c31b83b | jackfirth/resyntax | info.rkt | #lang info
(define collection "resyntax")
(define scribblings
(list (list "main.scrbl"
(list 'multi-page)
(list 'library)
"resyntax")))
(define deps
(list "base"
"br-parser-tools-lib"
"brag-lib"
"fancy-app"
"gui-lib"
"rackunit-lib"
"rebellion"
"uri-old"))
(define build-deps
(list "racket-doc"
"rackunit-lib"
"scribble-lib"))
(define racket-launcher-names
(list "resyntax"))
(define racket-launcher-libraries
(list "cli.rkt"))
| null | https://raw.githubusercontent.com/jackfirth/resyntax/fd05e555affc61b62c8b4e5e65ade1c10941d555/info.rkt | racket | #lang info
(define collection "resyntax")
(define scribblings
(list (list "main.scrbl"
(list 'multi-page)
(list 'library)
"resyntax")))
(define deps
(list "base"
"br-parser-tools-lib"
"brag-lib"
"fancy-app"
"gui-lib"
"rackunit-lib"
"rebellion"
"uri-old"))
(define build-deps
(list "racket-doc"
"rackunit-lib"
"scribble-lib"))
(define racket-launcher-names
(list "resyntax"))
(define racket-launcher-libraries
(list "cli.rkt"))
| |
9013d0c751bede4074177e0bb54c75267d7165a35a3726804a4ae368b93a2860 | sixohsix/tak | RunLoop.hs | module Tak.RunLoop (doMainLoop, confirm, query) where
import Tak.Types
import Tak.Display
import Tak.Editor
import Tak.GlobalState
import Data.Monoid (mappend)
import Control.Lens (set, over, view)
import Control.Monad (when, guard)
renderAndRefresh :: GlobalState -> IO ()
renderAndRefresh gst = do
(y, x) <- getScreenSize
renderEditor (Box (y - 1) 0 1 x) (view infoLine gst)
renderEditor (Box 0 0 (y - 1) x) (view editor gst)
refresh
renderAndWaitEvent :: GlobalState -> IO Event
renderAndWaitEvent gst = do
when (view needsRepaint gst) (renderAndRefresh gst)
waitEvent
doMainLoop :: (Event -> GlobalState -> IO a) -> GlobalState -> IO a
doMainLoop handle globalState = do
evt <- renderAndWaitEvent globalState
handle evt (updateRepaint globalState)
confirm :: String -> GlobalState -> IO Bool
confirm msg gst =
let gst' = updateInfoLine (" " ++ msg ++ " [y/n] ") gst
loopConfirm = doMainLoop confirmHandler
confirmHandler evt gst = case evt of
KeyEvent (KeyChar 'y') -> return True
KeyEvent (KeyChar 'n') -> return False
otherwise -> loopConfirm (preventRepaint gst)
in loopConfirm gst'
query :: String -> GlobalState -> IO (Maybe String)
query msg =
let updateIL s = updateInfoLine (" " ++ msg ++ " " ++ s)
loopQuery s gst = (doMainLoop $ queryHandler s) (updateIL s gst)
queryHandler s evt gst =
case evt of
KeyEvent (KeyChar c) -> loopQuery (mappend s [c]) gst
KeyEvent KeyDel -> loopQuery (reverse $ drop 1 $ reverse s) gst
KeyEvent KeyEnter -> return $ Just s
KeyEvent (KeyCtrlChar 'G') -> return Nothing
otherwise -> loopQuery s gst
in loopQuery ""
| null | https://raw.githubusercontent.com/sixohsix/tak/6310d19faa683156933dde38666c11dc087d79ea/src/Tak/RunLoop.hs | haskell | module Tak.RunLoop (doMainLoop, confirm, query) where
import Tak.Types
import Tak.Display
import Tak.Editor
import Tak.GlobalState
import Data.Monoid (mappend)
import Control.Lens (set, over, view)
import Control.Monad (when, guard)
renderAndRefresh :: GlobalState -> IO ()
renderAndRefresh gst = do
(y, x) <- getScreenSize
renderEditor (Box (y - 1) 0 1 x) (view infoLine gst)
renderEditor (Box 0 0 (y - 1) x) (view editor gst)
refresh
renderAndWaitEvent :: GlobalState -> IO Event
renderAndWaitEvent gst = do
when (view needsRepaint gst) (renderAndRefresh gst)
waitEvent
doMainLoop :: (Event -> GlobalState -> IO a) -> GlobalState -> IO a
doMainLoop handle globalState = do
evt <- renderAndWaitEvent globalState
handle evt (updateRepaint globalState)
confirm :: String -> GlobalState -> IO Bool
confirm msg gst =
let gst' = updateInfoLine (" " ++ msg ++ " [y/n] ") gst
loopConfirm = doMainLoop confirmHandler
confirmHandler evt gst = case evt of
KeyEvent (KeyChar 'y') -> return True
KeyEvent (KeyChar 'n') -> return False
otherwise -> loopConfirm (preventRepaint gst)
in loopConfirm gst'
query :: String -> GlobalState -> IO (Maybe String)
query msg =
let updateIL s = updateInfoLine (" " ++ msg ++ " " ++ s)
loopQuery s gst = (doMainLoop $ queryHandler s) (updateIL s gst)
queryHandler s evt gst =
case evt of
KeyEvent (KeyChar c) -> loopQuery (mappend s [c]) gst
KeyEvent KeyDel -> loopQuery (reverse $ drop 1 $ reverse s) gst
KeyEvent KeyEnter -> return $ Just s
KeyEvent (KeyCtrlChar 'G') -> return Nothing
otherwise -> loopQuery s gst
in loopQuery ""
| |
d9c3e26369042a665bea6f58504c4d4ad256ea8f646897146212a09a5fe60944 | ogaml/ogaml | window.ml | open OgamlCore
open OgamlUtils
open OgamlMath
type t = {
context : Context.t;
internal : LL.Window.t;
settings : ContextSettings.t;
mutable min_spf : float;
clock : Clock.t
}
let create ?width:(width=800) ?height:(height=600) ?title:(title="")
?settings:(settings=OgamlCore.ContextSettings.create ()) () =
let internal = LL.Window.create ~width ~height ~title ~settings in
let context = Context.LL.create () in
let min_spf =
match ContextSettings.framerate_limit settings with
| None -> 0.
| Some i -> 1. /. (float_of_int i)
in
Context.LL.set_viewport context OgamlMath.IntRect.({x = 0; y = 0; width; height});
{
context;
internal;
settings;
min_spf;
clock = Clock.create ()
}
let set_title win title = LL.Window.set_title win.internal title
let set_framerate_limit win i =
match i with
| None -> win.min_spf <- 0.
| Some i -> win.min_spf <- 1. /. (float_of_int i)
let settings win = win.settings
let close win = LL.Window.close win.internal
let rect win = LL.Window.rect win.internal
let destroy win = LL.Window.destroy win.internal
let resize win size = LL.Window.resize win.internal size
let toggle_fullscreen win = LL.Window.toggle_fullscreen win.internal
let is_open win = LL.Window.is_open win.internal
let has_focus win = LL.Window.has_focus win.internal
let size win = LL.Window.size win.internal
let poll_event win = LL.Window.poll_event win.internal
let display win =
RenderTarget.bind_fbo win.context 0 None;
LL.Window.display win.internal;
if win.min_spf <> 0. then begin
let dt = win.min_spf -. (Clock.time win.clock) in
if dt > 0. then Thread.delay dt;
Clock.restart win.clock
end
let clear ?color:(color=Some (`RGB Color.RGB.black))
?depth:(depth=true)
?stencil:(stencil=true) win =
let depth = (ContextSettings.depth_bits win.settings > 0) && depth in
let stencil = (ContextSettings.stencil_bits win.settings > 0) && stencil in
if depth && not (Context.LL.depth_writing win.context) then begin
Context.LL.set_depth_writing win.context true;
GL.Pervasives.depth_mask true
end;
RenderTarget.bind_fbo win.context 0 None;
RenderTarget.clear ?color ~depth ~stencil win.context
let show_cursor win b = LL.Window.show_cursor win.internal b
let context win = win.context
let bind win params =
RenderTarget.bind_fbo win.context 0 None;
RenderTarget.bind_draw_parameters win.context (size win)
(ContextSettings.aa_level win.settings) params
let internal win = win.internal
let screenshot win =
let size = size win in
RenderTarget.bind_fbo win.context 0 None;
let data =
GL.Pervasives.read_pixels (0,0) (size.Vector2i.x, size.Vector2i.y) GLTypes.PixelFormat.RGBA
in
let rev_data =
Bytes.create (Bytes.length data)
in
for i = 0 to size.Vector2i.y - 1 do
Bytes.blit data (i * size.Vector2i.x * 4) rev_data ((size.Vector2i.y - 1 - i) * size.Vector2i.x * 4) (size.Vector2i.x * 4)
done;
Image.create (`Data (size, rev_data))
| null | https://raw.githubusercontent.com/ogaml/ogaml/5e74597521abf7ba2833a9247e55780eabfbab78/src/graphics/window/window.ml | ocaml | open OgamlCore
open OgamlUtils
open OgamlMath
type t = {
context : Context.t;
internal : LL.Window.t;
settings : ContextSettings.t;
mutable min_spf : float;
clock : Clock.t
}
let create ?width:(width=800) ?height:(height=600) ?title:(title="")
?settings:(settings=OgamlCore.ContextSettings.create ()) () =
let internal = LL.Window.create ~width ~height ~title ~settings in
let context = Context.LL.create () in
let min_spf =
match ContextSettings.framerate_limit settings with
| None -> 0.
| Some i -> 1. /. (float_of_int i)
in
Context.LL.set_viewport context OgamlMath.IntRect.({x = 0; y = 0; width; height});
{
context;
internal;
settings;
min_spf;
clock = Clock.create ()
}
let set_title win title = LL.Window.set_title win.internal title
let set_framerate_limit win i =
match i with
| None -> win.min_spf <- 0.
| Some i -> win.min_spf <- 1. /. (float_of_int i)
let settings win = win.settings
let close win = LL.Window.close win.internal
let rect win = LL.Window.rect win.internal
let destroy win = LL.Window.destroy win.internal
let resize win size = LL.Window.resize win.internal size
let toggle_fullscreen win = LL.Window.toggle_fullscreen win.internal
let is_open win = LL.Window.is_open win.internal
let has_focus win = LL.Window.has_focus win.internal
let size win = LL.Window.size win.internal
let poll_event win = LL.Window.poll_event win.internal
let display win =
RenderTarget.bind_fbo win.context 0 None;
LL.Window.display win.internal;
if win.min_spf <> 0. then begin
let dt = win.min_spf -. (Clock.time win.clock) in
if dt > 0. then Thread.delay dt;
Clock.restart win.clock
end
let clear ?color:(color=Some (`RGB Color.RGB.black))
?depth:(depth=true)
?stencil:(stencil=true) win =
let depth = (ContextSettings.depth_bits win.settings > 0) && depth in
let stencil = (ContextSettings.stencil_bits win.settings > 0) && stencil in
if depth && not (Context.LL.depth_writing win.context) then begin
Context.LL.set_depth_writing win.context true;
GL.Pervasives.depth_mask true
end;
RenderTarget.bind_fbo win.context 0 None;
RenderTarget.clear ?color ~depth ~stencil win.context
let show_cursor win b = LL.Window.show_cursor win.internal b
let context win = win.context
let bind win params =
RenderTarget.bind_fbo win.context 0 None;
RenderTarget.bind_draw_parameters win.context (size win)
(ContextSettings.aa_level win.settings) params
let internal win = win.internal
let screenshot win =
let size = size win in
RenderTarget.bind_fbo win.context 0 None;
let data =
GL.Pervasives.read_pixels (0,0) (size.Vector2i.x, size.Vector2i.y) GLTypes.PixelFormat.RGBA
in
let rev_data =
Bytes.create (Bytes.length data)
in
for i = 0 to size.Vector2i.y - 1 do
Bytes.blit data (i * size.Vector2i.x * 4) rev_data ((size.Vector2i.y - 1 - i) * size.Vector2i.x * 4) (size.Vector2i.x * 4)
done;
Image.create (`Data (size, rev_data))
| |
c501aae02c3817e9d0c451220a0e2bb4135ce878763f8ffafbe2b751465b7b36 | metametadata/carry | project.clj | (defproject
carry-history "0.7.0"
:description "Carry middleware which simplifies working with browser history."
:url ""
:license {:name "MIT" :url ""}
:dependencies [[org.clojure/clojure "1.8.0" :scope "provided"]
[org.clojure/clojurescript "1.9.229" :scope "provided"]
[org.clojure/core.match "0.3.0-alpha4"]]
:plugins [[lein-codox "0.10.0"]]
:pedantic? :abort
:source-paths ["src"]
:repositories {"clojars" {:sign-releases false}}
:codox {:source-uri "/{filepath}#L{line}"
:language :clojurescript
:source-paths ["src"]
:output-path "api"
:metadata {:doc/format :markdown}})
| null | https://raw.githubusercontent.com/metametadata/carry/fa5c7cd0d8f1b71edca70330acc97c6245638efb/contrib/history/project.clj | clojure | (defproject
carry-history "0.7.0"
:description "Carry middleware which simplifies working with browser history."
:url ""
:license {:name "MIT" :url ""}
:dependencies [[org.clojure/clojure "1.8.0" :scope "provided"]
[org.clojure/clojurescript "1.9.229" :scope "provided"]
[org.clojure/core.match "0.3.0-alpha4"]]
:plugins [[lein-codox "0.10.0"]]
:pedantic? :abort
:source-paths ["src"]
:repositories {"clojars" {:sign-releases false}}
:codox {:source-uri "/{filepath}#L{line}"
:language :clojurescript
:source-paths ["src"]
:output-path "api"
:metadata {:doc/format :markdown}})
| |
b0fe3886b52787018080c8d78eeb9aed32538e434773f3cd8a4868db8596c058 | awslabs/s2n-bignum | bignum_copy.ml |
* Copyright Amazon.com , Inc. or its affiliates . All Rights Reserved .
* SPDX - License - Identifier : Apache-2.0 OR ISC
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0 OR ISC
*)
(* ========================================================================= *)
(* Copying (with truncation or extension) bignums *)
(* ========================================================================= *)
(**** print_literal_from_elf "x86/generic/bignum_copy.o";;
****)
let bignum_copy_mc =
define_assert_from_elf "bignum_copy_mc" "x86/generic/bignum_copy.o"
[
CMP ( % rdi ) ( % rdx )
CMOVB ( % rdx ) ( % rdi )
0x4d; 0x31; 0xc0; (* XOR (% r8) (% r8) *)
0x48; 0x85; 0xd2; (* TEST (% rdx) (% rdx) *)
JE ( Imm8 ( word 16 ) )
MOV ( % rax ) ( ( % % % ( rcx,3,r8 ) ) )
MOV ( ( % % % ( rsi,3,r8 ) ) ) ( % rax )
0x49; 0xff; 0xc0; (* INC (% r8) *)
CMP ( % r8 ) ( % rdx )
JB ( Imm8 ( word 240 ) )
CMP ( % r8 ) ( % rdi )
JAE ( Imm8 ( word 15 ) )
0x48; 0x31; 0xc0; (* XOR (% rax) (% rax) *)
MOV ( ( % % % ( rsi,3,r8 ) ) ) ( % rax )
0x49; 0xff; 0xc0; (* INC (% r8) *)
CMP ( % r8 ) ( % rdi )
JB ( Imm8 ( word 244 ) )
RET
];;
let BIGNUM_COPY_EXEC = X86_MK_CORE_EXEC_RULE bignum_copy_mc;;
(* ------------------------------------------------------------------------- *)
(* Correctness proof. *)
(* ------------------------------------------------------------------------- *)
let BIGNUM_COPY_CORRECT = prove
(`!k z n x a pc.
nonoverlapping (word pc,0x34) (z,8 * val k) /\
(x = z \/ nonoverlapping (x,8 * MIN (val n) (val k)) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) (BUTLAST bignum_copy_mc) /\
read RIP s = word pc /\
C_ARGUMENTS [k; z; n; x] s /\
bignum_from_memory (x,val n) s = a)
(\s. read RIP s = word (pc + 0x33) /\
bignum_from_memory (z,val k) s = lowdigits a (val k))
(MAYCHANGE [RIP; RAX; RDX; R8] ,, MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
REWRITE_TAC[NONOVERLAPPING_CLAUSES] THEN
REWRITE_TAC[C_ARGUMENTS; C_RETURN; SOME_FLAGS; BIGNUM_COPY_EXEC] THEN
W64_GEN_TAC `k:num` THEN X_GEN_TAC `z:int64` THEN
W64_GEN_TAC `n:num` THEN X_GEN_TAC `x:int64` THEN
MAP_EVERY X_GEN_TAC [`a:num`; `pc:num`] THEN
DISCH_THEN(REPEAT_TCL CONJUNCTS_THEN ASSUME_TAC) THEN
* * Simulate the initial computation of min(n , k ) and then
* * * recast the problem with n ' = min(n , k ) so we can assume
* * * hereafter that n < = makes life a bit easier since
* * * otherwise n can actually be any number < 2 ^ 64 without
* * * violating the preconditions .
* *
*** recast the problem with n' = min(n,k) so we can assume
*** hereafter that n <= k. This makes life a bit easier since
*** otherwise n can actually be any number < 2^64 without
*** violating the preconditions.
***)
ENSURES_SEQUENCE_TAC `pc + 0xa`
`\s. read RDI s = word k /\
read RSI s = z /\
read RDX s = word(MIN n k) /\
read RCX s = x /\
read R8 s = word 0 /\
bignum_from_memory (x,MIN n k) s = lowdigits a k` THEN
CONJ_TAC THENL
[REWRITE_TAC[GSYM LOWDIGITS_BIGNUM_FROM_MEMORY] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--3) THEN
REWRITE_TAC[ARITH_RULE `MIN n k = if k < n then k else n`] THEN
MESON_TAC[];
REPEAT(FIRST_X_ASSUM(MP_TAC o check (free_in `k:num` o concl))) THEN
POP_ASSUM_LIST(K ALL_TAC) THEN MP_TAC(ARITH_RULE `MIN n k <= k`) THEN
SPEC_TAC(`lowdigits a k`,`a:num`) THEN SPEC_TAC(`MIN n k`,`n:num`) THEN
REPEAT GEN_TAC THEN REPEAT DISCH_TAC THEN
VAL_INT64_TAC `n:num` THEN BIGNUM_RANGE_TAC "n" "a"] THEN
(*** Break at the start of the padding stage ***)
ENSURES_SEQUENCE_TAC `pc + 0x1f`
`\s. read RDI s = word k /\
read RSI s = z /\
read R8 s = word n /\
bignum_from_memory(z,n) s = a` THEN
CONJ_TAC THENL
[ASM_CASES_TAC `n = 0` THENL
[ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL] THEN
REWRITE_TAC[MESON[] `0 = a <=> a = 0`] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2);
ALL_TAC] THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ]
NONOVERLAPPING_IMP_SMALL_2)) THEN
ANTS_TAC THENL [SIMPLE_ARITH_TAC; DISCH_TAC] THEN
(*** The main copying loop, in the case when n is nonzero ***)
ENSURES_WHILE_UP_TAC `n:num` `pc + 0xf` `pc + 0x1a`
`\i s. read RDI s = word k /\
read RSI s = z /\
read RDX s = word n /\
read RCX s = x /\
read R8 s = word i /\
bignum_from_memory(z,i) s = lowdigits a i /\
bignum_from_memory(word_add x (word(8 * i)),n - i) s =
highdigits a i` THEN
ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL
[X86_SIM_TAC BIGNUM_COPY_EXEC (1--2) THEN
REWRITE_TAC[SUB_0; GSYM BIGNUM_FROM_MEMORY_BYTES; HIGHDIGITS_0] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL; MULT_CLAUSES; WORD_ADD_0] THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; LOWDIGITS_0];
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV)
[BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN
ASM_REWRITE_TAC[SUB_EQ_0; GSYM NOT_LT] THEN
REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--3) THEN
ASM_REWRITE_TAC[GSYM WORD_ADD; VAL_WORD_BIGDIGIT] THEN
REWRITE_TAC[LOWDIGITS_CLAUSES] THEN ARITH_TAC;
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2);
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2) THEN
ASM_SIMP_TAC[LOWDIGITS_SELF]];
ALL_TAC] THEN
(*** Degenerate case of no padding (initial k <= n) ***)
FIRST_X_ASSUM(DISJ_CASES_THEN2 SUBST_ALL_TAC ASSUME_TAC o
MATCH_MP (ARITH_RULE `n:num <= k ==> n = k \/ n < k`))
THENL [X86_SIM_TAC BIGNUM_COPY_EXEC (1--2); ALL_TAC] THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ]
NONOVERLAPPING_IMP_SMALL_2)) THEN
ANTS_TAC THENL [SIMPLE_ARITH_TAC; DISCH_TAC] THEN
(*** Main padding loop ***)
ENSURES_WHILE_AUP_TAC `n:num` `k:num` `pc + 0x27` `pc + 0x2e`
`\i s. read RDI s = word k /\
read RSI s = z /\
read R8 s = word i /\
read RAX s = word 0 /\
bignum_from_memory(z,i) s = a` THEN
ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL
[X86_SIM_TAC BIGNUM_COPY_EXEC (1--3);
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2) THEN
REWRITE_TAC[VAL_WORD_0; MULT_CLAUSES; ADD_CLAUSES; WORD_ADD];
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2);
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2)]);;
let BIGNUM_COPY_SUBROUTINE_CORRECT = prove
(`!k z n x a pc stackpointer returnaddress.
nonoverlapping (word pc,0x34) (z,8 * val k) /\
nonoverlapping(z,8 * val k) (stackpointer,8) /\
(x = z \/ nonoverlapping(x,8 * MIN (val n) (val k)) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) bignum_copy_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
C_ARGUMENTS [k; z; n; x] s /\
bignum_from_memory (x,val n) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory (z,val k) s = lowdigits a (val k))
(MAYCHANGE [RIP; RSP; RAX; RDX; R8] ,, MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
X86_PROMOTE_RETURN_NOSTACK_TAC bignum_copy_mc BIGNUM_COPY_CORRECT);;
(* ------------------------------------------------------------------------- *)
(* Correctness of Windows ABI version. *)
(* ------------------------------------------------------------------------- *)
let windows_bignum_copy_mc = define_from_elf
"windows_bignum_copy_mc" "x86/generic/bignum_copy.obj";;
let WINDOWS_BIGNUM_COPY_SUBROUTINE_CORRECT = prove
(`!k z n x a pc stackpointer returnaddress.
ALL (nonoverlapping (word_sub stackpointer (word 16),16))
[(word pc,0x44); (x,8 * val n)] /\
nonoverlapping (word pc,0x44) (z,8 * val k) /\
nonoverlapping(z,8 * val k) (word_sub stackpointer (word 16),24) /\
(x = z \/ nonoverlapping(x,8 * MIN (val n) (val k)) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) windows_bignum_copy_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
WINDOWS_C_ARGUMENTS [k; z; n; x] s /\
bignum_from_memory (x,val n) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory (z,val k) s = lowdigits a (val k))
(MAYCHANGE [RIP; RSP; RCX; RAX; RDX; R8] ,, MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k);
memory :> bytes(word_sub stackpointer (word 16),16)])`,
WINDOWS_X86_WRAP_NOSTACK_TAC windows_bignum_copy_mc bignum_copy_mc
BIGNUM_COPY_CORRECT);;
| null | https://raw.githubusercontent.com/awslabs/s2n-bignum/824c15f908d7a343af1b2f378cfedd36e880bdde/x86/proofs/bignum_copy.ml | ocaml | =========================================================================
Copying (with truncation or extension) bignums
=========================================================================
*** print_literal_from_elf "x86/generic/bignum_copy.o";;
***
XOR (% r8) (% r8)
TEST (% rdx) (% rdx)
INC (% r8)
XOR (% rax) (% rax)
INC (% r8)
-------------------------------------------------------------------------
Correctness proof.
-------------------------------------------------------------------------
** Break at the start of the padding stage **
** The main copying loop, in the case when n is nonzero **
** Degenerate case of no padding (initial k <= n) **
** Main padding loop **
-------------------------------------------------------------------------
Correctness of Windows ABI version.
------------------------------------------------------------------------- |
* Copyright Amazon.com , Inc. or its affiliates . All Rights Reserved .
* SPDX - License - Identifier : Apache-2.0 OR ISC
* Copyright Amazon.com, Inc. or its affiliates. All Rights Reserved.
* SPDX-License-Identifier: Apache-2.0 OR ISC
*)
let bignum_copy_mc =
define_assert_from_elf "bignum_copy_mc" "x86/generic/bignum_copy.o"
[
CMP ( % rdi ) ( % rdx )
CMOVB ( % rdx ) ( % rdi )
JE ( Imm8 ( word 16 ) )
MOV ( % rax ) ( ( % % % ( rcx,3,r8 ) ) )
MOV ( ( % % % ( rsi,3,r8 ) ) ) ( % rax )
CMP ( % r8 ) ( % rdx )
JB ( Imm8 ( word 240 ) )
CMP ( % r8 ) ( % rdi )
JAE ( Imm8 ( word 15 ) )
MOV ( ( % % % ( rsi,3,r8 ) ) ) ( % rax )
CMP ( % r8 ) ( % rdi )
JB ( Imm8 ( word 244 ) )
RET
];;
let BIGNUM_COPY_EXEC = X86_MK_CORE_EXEC_RULE bignum_copy_mc;;
let BIGNUM_COPY_CORRECT = prove
(`!k z n x a pc.
nonoverlapping (word pc,0x34) (z,8 * val k) /\
(x = z \/ nonoverlapping (x,8 * MIN (val n) (val k)) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) (BUTLAST bignum_copy_mc) /\
read RIP s = word pc /\
C_ARGUMENTS [k; z; n; x] s /\
bignum_from_memory (x,val n) s = a)
(\s. read RIP s = word (pc + 0x33) /\
bignum_from_memory (z,val k) s = lowdigits a (val k))
(MAYCHANGE [RIP; RAX; RDX; R8] ,, MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
REWRITE_TAC[NONOVERLAPPING_CLAUSES] THEN
REWRITE_TAC[C_ARGUMENTS; C_RETURN; SOME_FLAGS; BIGNUM_COPY_EXEC] THEN
W64_GEN_TAC `k:num` THEN X_GEN_TAC `z:int64` THEN
W64_GEN_TAC `n:num` THEN X_GEN_TAC `x:int64` THEN
MAP_EVERY X_GEN_TAC [`a:num`; `pc:num`] THEN
DISCH_THEN(REPEAT_TCL CONJUNCTS_THEN ASSUME_TAC) THEN
* * Simulate the initial computation of min(n , k ) and then
* * * recast the problem with n ' = min(n , k ) so we can assume
* * * hereafter that n < = makes life a bit easier since
* * * otherwise n can actually be any number < 2 ^ 64 without
* * * violating the preconditions .
* *
*** recast the problem with n' = min(n,k) so we can assume
*** hereafter that n <= k. This makes life a bit easier since
*** otherwise n can actually be any number < 2^64 without
*** violating the preconditions.
***)
ENSURES_SEQUENCE_TAC `pc + 0xa`
`\s. read RDI s = word k /\
read RSI s = z /\
read RDX s = word(MIN n k) /\
read RCX s = x /\
read R8 s = word 0 /\
bignum_from_memory (x,MIN n k) s = lowdigits a k` THEN
CONJ_TAC THENL
[REWRITE_TAC[GSYM LOWDIGITS_BIGNUM_FROM_MEMORY] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--3) THEN
REWRITE_TAC[ARITH_RULE `MIN n k = if k < n then k else n`] THEN
MESON_TAC[];
REPEAT(FIRST_X_ASSUM(MP_TAC o check (free_in `k:num` o concl))) THEN
POP_ASSUM_LIST(K ALL_TAC) THEN MP_TAC(ARITH_RULE `MIN n k <= k`) THEN
SPEC_TAC(`lowdigits a k`,`a:num`) THEN SPEC_TAC(`MIN n k`,`n:num`) THEN
REPEAT GEN_TAC THEN REPEAT DISCH_TAC THEN
VAL_INT64_TAC `n:num` THEN BIGNUM_RANGE_TAC "n" "a"] THEN
ENSURES_SEQUENCE_TAC `pc + 0x1f`
`\s. read RDI s = word k /\
read RSI s = z /\
read R8 s = word n /\
bignum_from_memory(z,n) s = a` THEN
CONJ_TAC THENL
[ASM_CASES_TAC `n = 0` THENL
[ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL] THEN
REWRITE_TAC[MESON[] `0 = a <=> a = 0`] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2);
ALL_TAC] THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ]
NONOVERLAPPING_IMP_SMALL_2)) THEN
ANTS_TAC THENL [SIMPLE_ARITH_TAC; DISCH_TAC] THEN
ENSURES_WHILE_UP_TAC `n:num` `pc + 0xf` `pc + 0x1a`
`\i s. read RDI s = word k /\
read RSI s = z /\
read RDX s = word n /\
read RCX s = x /\
read R8 s = word i /\
bignum_from_memory(z,i) s = lowdigits a i /\
bignum_from_memory(word_add x (word(8 * i)),n - i) s =
highdigits a i` THEN
ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL
[X86_SIM_TAC BIGNUM_COPY_EXEC (1--2) THEN
REWRITE_TAC[SUB_0; GSYM BIGNUM_FROM_MEMORY_BYTES; HIGHDIGITS_0] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_TRIVIAL; MULT_CLAUSES; WORD_ADD_0] THEN
ASM_REWRITE_TAC[BIGNUM_FROM_MEMORY_BYTES; LOWDIGITS_0];
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV o ONCE_DEPTH_CONV)
[BIGNUM_FROM_MEMORY_OFFSET_EQ_HIGHDIGITS] THEN
ASM_REWRITE_TAC[SUB_EQ_0; GSYM NOT_LT] THEN
REWRITE_TAC[ARITH_RULE `k - i - 1 = k - (i + 1)`] THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--3) THEN
ASM_REWRITE_TAC[GSYM WORD_ADD; VAL_WORD_BIGDIGIT] THEN
REWRITE_TAC[LOWDIGITS_CLAUSES] THEN ARITH_TAC;
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2);
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2) THEN
ASM_SIMP_TAC[LOWDIGITS_SELF]];
ALL_TAC] THEN
FIRST_X_ASSUM(DISJ_CASES_THEN2 SUBST_ALL_TAC ASSUME_TAC o
MATCH_MP (ARITH_RULE `n:num <= k ==> n = k \/ n < k`))
THENL [X86_SIM_TAC BIGNUM_COPY_EXEC (1--2); ALL_TAC] THEN
FIRST_ASSUM(MP_TAC o MATCH_MP (ONCE_REWRITE_RULE[IMP_CONJ]
NONOVERLAPPING_IMP_SMALL_2)) THEN
ANTS_TAC THENL [SIMPLE_ARITH_TAC; DISCH_TAC] THEN
ENSURES_WHILE_AUP_TAC `n:num` `k:num` `pc + 0x27` `pc + 0x2e`
`\i s. read RDI s = word k /\
read RSI s = z /\
read R8 s = word i /\
read RAX s = word 0 /\
bignum_from_memory(z,i) s = a` THEN
ASM_REWRITE_TAC[] THEN REPEAT CONJ_TAC THENL
[X86_SIM_TAC BIGNUM_COPY_EXEC (1--3);
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
REWRITE_TAC[BIGNUM_FROM_MEMORY_STEP] THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2) THEN
REWRITE_TAC[VAL_WORD_0; MULT_CLAUSES; ADD_CLAUSES; WORD_ADD];
X_GEN_TAC `i:num` THEN STRIP_TAC THEN VAL_INT64_TAC `i:num` THEN
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2);
X86_SIM_TAC BIGNUM_COPY_EXEC (1--2)]);;
let BIGNUM_COPY_SUBROUTINE_CORRECT = prove
(`!k z n x a pc stackpointer returnaddress.
nonoverlapping (word pc,0x34) (z,8 * val k) /\
nonoverlapping(z,8 * val k) (stackpointer,8) /\
(x = z \/ nonoverlapping(x,8 * MIN (val n) (val k)) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) bignum_copy_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
C_ARGUMENTS [k; z; n; x] s /\
bignum_from_memory (x,val n) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory (z,val k) s = lowdigits a (val k))
(MAYCHANGE [RIP; RSP; RAX; RDX; R8] ,, MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k)])`,
X86_PROMOTE_RETURN_NOSTACK_TAC bignum_copy_mc BIGNUM_COPY_CORRECT);;
let windows_bignum_copy_mc = define_from_elf
"windows_bignum_copy_mc" "x86/generic/bignum_copy.obj";;
let WINDOWS_BIGNUM_COPY_SUBROUTINE_CORRECT = prove
(`!k z n x a pc stackpointer returnaddress.
ALL (nonoverlapping (word_sub stackpointer (word 16),16))
[(word pc,0x44); (x,8 * val n)] /\
nonoverlapping (word pc,0x44) (z,8 * val k) /\
nonoverlapping(z,8 * val k) (word_sub stackpointer (word 16),24) /\
(x = z \/ nonoverlapping(x,8 * MIN (val n) (val k)) (z,8 * val k))
==> ensures x86
(\s. bytes_loaded s (word pc) windows_bignum_copy_mc /\
read RIP s = word pc /\
read RSP s = stackpointer /\
read (memory :> bytes64 stackpointer) s = returnaddress /\
WINDOWS_C_ARGUMENTS [k; z; n; x] s /\
bignum_from_memory (x,val n) s = a)
(\s. read RIP s = returnaddress /\
read RSP s = word_add stackpointer (word 8) /\
bignum_from_memory (z,val k) s = lowdigits a (val k))
(MAYCHANGE [RIP; RSP; RCX; RAX; RDX; R8] ,, MAYCHANGE SOME_FLAGS ,,
MAYCHANGE [memory :> bignum(z,val k);
memory :> bytes(word_sub stackpointer (word 16),16)])`,
WINDOWS_X86_WRAP_NOSTACK_TAC windows_bignum_copy_mc bignum_copy_mc
BIGNUM_COPY_CORRECT);;
|
1b8cff668ac1f76e9e920a1e46ace8f287d45388b4b67771151a446e35428e75 | riemann/riemann | druid.clj | (ns riemann.druid
"Forwards events to Druid"
(:require
[clj-http.client :as http]
[cheshire.core :refer [generate-string]]
[riemann.common :refer [unix-to-iso8601]]))
(defn post-datapoint
"Post the riemann metrics as datapoints."
[host port dataset json-data http-opts]
(let [scheme "http://"
endpoint "/v1/post/"
url (str scheme host ":" port endpoint dataset)
http-options (assoc (merge {:conn-timeout 5000
:socket-timeout 5000
:throw-entire-message? true}
http-opts)
:body json-data
:content-type :json)]
(http/post url http-options)))
(defn generate-event [event]
{:host (:host event)
:service (:service event)
:state (:state event)
:timestamp (unix-to-iso8601 (:time event))
:tags (:tags event)
:description (:description event)
:value (:metric event)})
(defn druid
"Returns a function which accepts single events or batches of
events in a vector and sends them to the Druid Tranquility Server.
Usage:
(druid {:host \"druid.example.com\"})
Options:
- `:host` Hostname of Druid Tranquility server. (default: `\"localhost\"`)
- `:port` Port at which Druid Tranquility is listening (default: `8200`)
- `:dataset` Dataset name to be given (default: `\"riemann\"`)
Example:
```clojure
(def druid-async
(batch 100 1/10
(async-queue!
:druid-async ; A name for the forwarder
10,000 events max
:core-pool-size 5 ; Minimum 5 threads
Maximum 100 threads
(druid {:host \"localhost\"}))))
```"
[opts]
(let [opts (merge {:host "localhost"
:port 8200
:dataset "riemann"}
opts)]
(fn [event]
(let [events (if (sequential? event)
event
[event])
post-data (mapv generate-event events)
json-data (generate-string post-data)]
(post-datapoint (:host opts) (:port opts) (:dataset opts) json-data (get opts :http-options {}))))))
| null | https://raw.githubusercontent.com/riemann/riemann/1649687c0bd913c378701ee0b964a9863bde7c7c/src/riemann/druid.clj | clojure | A name for the forwarder
Minimum 5 threads | (ns riemann.druid
"Forwards events to Druid"
(:require
[clj-http.client :as http]
[cheshire.core :refer [generate-string]]
[riemann.common :refer [unix-to-iso8601]]))
(defn post-datapoint
"Post the riemann metrics as datapoints."
[host port dataset json-data http-opts]
(let [scheme "http://"
endpoint "/v1/post/"
url (str scheme host ":" port endpoint dataset)
http-options (assoc (merge {:conn-timeout 5000
:socket-timeout 5000
:throw-entire-message? true}
http-opts)
:body json-data
:content-type :json)]
(http/post url http-options)))
(defn generate-event [event]
{:host (:host event)
:service (:service event)
:state (:state event)
:timestamp (unix-to-iso8601 (:time event))
:tags (:tags event)
:description (:description event)
:value (:metric event)})
(defn druid
"Returns a function which accepts single events or batches of
events in a vector and sends them to the Druid Tranquility Server.
Usage:
(druid {:host \"druid.example.com\"})
Options:
- `:host` Hostname of Druid Tranquility server. (default: `\"localhost\"`)
- `:port` Port at which Druid Tranquility is listening (default: `8200`)
- `:dataset` Dataset name to be given (default: `\"riemann\"`)
Example:
```clojure
(def druid-async
(batch 100 1/10
(async-queue!
10,000 events max
Maximum 100 threads
(druid {:host \"localhost\"}))))
```"
[opts]
(let [opts (merge {:host "localhost"
:port 8200
:dataset "riemann"}
opts)]
(fn [event]
(let [events (if (sequential? event)
event
[event])
post-data (mapv generate-event events)
json-data (generate-string post-data)]
(post-datapoint (:host opts) (:port opts) (:dataset opts) json-data (get opts :http-options {}))))))
|
4f3f12f43cfa9ada12b7d61410f3d834736f154a418777c605a34cb44fd94e3b | runtimeverification/haskell-backend | InfoUserLog.hs | # LANGUAGE NoStrict #
# LANGUAGE NoStrictData #
|
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Log.InfoUserLog (
InfoUserLog (..),
infoUserLog,
) where
import Data.Text
import Debug
import Log
import Prelude.Kore
import Pretty (
Pretty,
)
import Pretty qualified
newtype InfoUserLog = InfoUserLog {getUserLog :: Text}
deriving stock (Eq, Ord, Show)
instance Debug InfoUserLog where
debugPrec w = \_ -> Pretty.pretty . show $ w
instance Diff InfoUserLog where
diffPrec = diffPrecEq
instance Pretty InfoUserLog where
pretty (InfoUserLog userLog) =
Pretty.pretty userLog
instance Entry InfoUserLog where
entrySeverity _ = Info
oneLineDoc (InfoUserLog userLog) = Pretty.pretty userLog
helpDoc _ = "user-specified log message"
infoUserLog :: MonadLog log => Text -> log ()
infoUserLog = logEntry . InfoUserLog
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/76027ab6c805c1b2523da94ea48368f0f4303edc/kore/src/Kore/Log/InfoUserLog.hs | haskell | # LANGUAGE NoStrict #
# LANGUAGE NoStrictData #
|
Copyright : ( c ) Runtime Verification , 2020 - 2021
License : BSD-3 - Clause
Copyright : (c) Runtime Verification, 2020-2021
License : BSD-3-Clause
-}
module Kore.Log.InfoUserLog (
InfoUserLog (..),
infoUserLog,
) where
import Data.Text
import Debug
import Log
import Prelude.Kore
import Pretty (
Pretty,
)
import Pretty qualified
newtype InfoUserLog = InfoUserLog {getUserLog :: Text}
deriving stock (Eq, Ord, Show)
instance Debug InfoUserLog where
debugPrec w = \_ -> Pretty.pretty . show $ w
instance Diff InfoUserLog where
diffPrec = diffPrecEq
instance Pretty InfoUserLog where
pretty (InfoUserLog userLog) =
Pretty.pretty userLog
instance Entry InfoUserLog where
entrySeverity _ = Info
oneLineDoc (InfoUserLog userLog) = Pretty.pretty userLog
helpDoc _ = "user-specified log message"
infoUserLog :: MonadLog log => Text -> log ()
infoUserLog = logEntry . InfoUserLog
| |
e192df260fa7b5ba0195f1d2c37eb43b62e50d566bb09b925d6c4b00a03ad1d1 | bobby/redisconf19-demo | events.cljs | (ns redis-streams-clj.storefront.ui.events
(:require [clojure.set :as set]
[re-frame.core :as re-frame]
[day8.re-frame.tracing :refer-macros [fn-traced]]
[re-graph.core :as re-graph]
redis-streams-clj.storefront.ui.effects
[redis-streams-clj.storefront.ui.config :as config]
[redis-streams-clj.storefront.ui.core :as api]
[redis-streams-clj.storefront.ui.routes :as routes]))
(defn index-by-id
[m]
(reduce (fn [agg e]
(assoc agg (:id e) e))
{}
m))
(defn- dissoc-all
[m ks]
(apply dissoc m ks))
(re-frame/reg-event-fx
:app/initialize
(fn-traced [fx _]
{:db {:menu {}
:notifications {}}
;; TODO: make these urls configurable
:dispatch [::re-graph/init {:ws-url config/ws-url
:http-url config/http-url}]}))
(re-frame/reg-event-db
:routes/home
(fn-traced [db _]
(assoc db :page :home)))
(re-frame/reg-event-fx
:routes/menu
(fn-traced [{:keys [db]} [_ query-params]]
{:db (assoc db :page :menu)
:dispatch [::re-graph/query
"query menu{menu{id,title,description,price,photo_url}}"
{}
[:query/menu]]}))
(re-frame/reg-event-db
:query/menu
(fn-traced [db [_ {:keys [data errors] :as result}]]
(if (seq errors)
(update db :notifications #(reduce (fn [agg error]
(let [id (random-uuid)]
(assoc agg id error)))
%
(map api/format-error errors)))
(update db :menu merge (reduce (fn [agg a]
(assoc agg (:id a) a))
{}
(:menu data))))))
(re-frame/reg-event-db
:routes/basket
(fn-traced [db _]
(assoc db :page :basket)))
(re-frame/reg-event-db
:routes/orders
(fn-traced [db _]
(assoc db :page :orders)))
(re-frame/reg-event-fx
:command/upsert-customer!
(fn-traced [{:keys [db] :as fx} [_ params]]
{:dispatch [::re-graph/mutate
"mutation upsertCustomer($name:String,$email:String){upsertCustomer(name:$name,email:$email){id,name,email,basket{id,menu_item_id,customization,quantity},orders{id,items{id,menu_item_id,customization,quantity,status},status}}}"
(select-keys params [:name :email])
[:event/customer-upserted]]
:db (let [id (random-uuid)]
(update db :notifications assoc id {:color :info :message "Signed in!"}))
:navigate (routes/menu)}))
(re-frame/reg-event-fx
:event/customer-upserted
(fn-traced [{:keys [db]} [_ {:keys [data errors] :as result}]]
(let [customer (:upsertCustomer data)]
{:db (if (seq errors)
(update db :notifications #(reduce (fn [agg error]
(let [id (random-uuid)]
(assoc agg id error)))
%
(map api/format-error errors)))
(assoc db :customer (-> customer
(update :basket index-by-id)
(update :orders index-by-id))))
:dispatch [::re-graph/subscribe
(:email customer)
"subscription customerByEmail($email:String){customerByEmail(email:$email){id,name,email,basket{id,menu_item_id,customization,quantity},orders{id,items{id,menu_item_id,customization,quantity,status},status}}}"
{:email (:email customer)}
[:customer/updated]]})))
;; TODO: handle errors
(re-frame/reg-event-db
:customer/updated
(fn-traced [db [_ result]]
(let [customer (-> result :data :customerByEmail)]
(assoc db :customer (-> customer
(update :basket index-by-id)
(update :orders index-by-id))))))
(re-frame/reg-event-fx
:command/add-items-to-basket!
(fn-traced [{:keys [db] :as fx} [_ items]]
{:dispatch [::re-graph/mutate
"mutation addItemsToBasket($customer_email:String,$items:[BasketItem]){addItemsToBasket(customer_email:$customer_email,items:$items){id,menu_item_id,customization,quantity}}"
{:customer_email (get-in db [:customer :email])
:items items}
[:event/items-added-to-basket]]}))
;; TODO: handle :errors
(re-frame/reg-event-db
:event/items-added-to-basket
(fn-traced [db [_ {:keys [data] :as result}]]
(-> db
(assoc-in [:notifications (random-uuid)] {:color :success :message "Item added to basket!"})
(update-in [:customer :basket] merge (-> data :addItemsToBasket index-by-id)))))
(re-frame/reg-event-fx
:command/remove-items-from-basket!
(fn-traced [{:keys [db] :as fx} [_ items]]
{:dispatch [::re-graph/mutate
"mutation removeItemsFromBasket($customer_email:String,$items:[ID]){removeItemsFromBasket(customer_email:$customer_email,items:$items)}"
{:customer_email (get-in db [:customer :email])
:items items}
[:event/items-removed-from-basket]]}))
;; TODO: handle :errors
(re-frame/reg-event-db
:event/items-removed-from-basket
(fn-traced [db [_ {:keys [data]}]]
(-> db
(assoc-in [:notifications (random-uuid)] {:color :success :message "Item removed from basket!"})
(update-in [:customer :basket] dissoc-all (:removeItemsFromBasket data)))))
(re-frame/reg-event-fx
:command/place-order!
(fn-traced [{:keys [db] :as fx} [_]]
(let [items (-> db :customer :basket vals)]
{:dispatch [::re-graph/mutate
"mutation placeOrder($customer_email:String,$items:[BasketItem]){placeOrder(customer_email:$customer_email,items:$items){id,items{id,menu_item_id,customization,quantity,status},status}}"
{:customer_email (get-in db [:customer :email])
:items items}
[:event/order-placed]]})))
;; TODO: handle :errors
(re-frame/reg-event-fx
:event/order-placed
(fn-traced [{:keys [db] :as fx} [_ {:keys [data] :as result}]]
(let [order (:placeOrder data)]
{:db (-> db
(assoc-in [:customer :orders (:id order)] order)
(update :notifications assoc
(random-uuid) {:color :info
:message (str "Order #" (:id order) " Placed!")}))
:navigate (routes/orders)})))
(re-frame/reg-event-fx
:command/pay-order!
(fn-traced [{:keys [db] :as fx} [_ id]]
{:dispatch [::re-graph/mutate
"mutation payOrder($id:ID){payOrder(id:$id){id,items,status}}"
{:id id}
[:event/order-paid]]}))
;; TODO: handle :errors
(re-frame/reg-event-db
:event/order-paid
(fn-traced [db [_ order]]
(assoc-in db [:customer :orders (:id order)] order)))
(re-frame/reg-event-db
:command/dismiss-notification
(fn-traced [db [_ id]]
(update db :notifications dissoc id)))
| null | https://raw.githubusercontent.com/bobby/redisconf19-demo/0191ed1cf6a7cdf47037de3e4dd187a5c6a33a36/storefront/src/redis_streams_clj/storefront/ui/events.cljs | clojure | TODO: make these urls configurable
TODO: handle errors
TODO: handle :errors
TODO: handle :errors
TODO: handle :errors
TODO: handle :errors | (ns redis-streams-clj.storefront.ui.events
(:require [clojure.set :as set]
[re-frame.core :as re-frame]
[day8.re-frame.tracing :refer-macros [fn-traced]]
[re-graph.core :as re-graph]
redis-streams-clj.storefront.ui.effects
[redis-streams-clj.storefront.ui.config :as config]
[redis-streams-clj.storefront.ui.core :as api]
[redis-streams-clj.storefront.ui.routes :as routes]))
(defn index-by-id
[m]
(reduce (fn [agg e]
(assoc agg (:id e) e))
{}
m))
(defn- dissoc-all
[m ks]
(apply dissoc m ks))
(re-frame/reg-event-fx
:app/initialize
(fn-traced [fx _]
{:db {:menu {}
:notifications {}}
:dispatch [::re-graph/init {:ws-url config/ws-url
:http-url config/http-url}]}))
(re-frame/reg-event-db
:routes/home
(fn-traced [db _]
(assoc db :page :home)))
(re-frame/reg-event-fx
:routes/menu
(fn-traced [{:keys [db]} [_ query-params]]
{:db (assoc db :page :menu)
:dispatch [::re-graph/query
"query menu{menu{id,title,description,price,photo_url}}"
{}
[:query/menu]]}))
(re-frame/reg-event-db
:query/menu
(fn-traced [db [_ {:keys [data errors] :as result}]]
(if (seq errors)
(update db :notifications #(reduce (fn [agg error]
(let [id (random-uuid)]
(assoc agg id error)))
%
(map api/format-error errors)))
(update db :menu merge (reduce (fn [agg a]
(assoc agg (:id a) a))
{}
(:menu data))))))
(re-frame/reg-event-db
:routes/basket
(fn-traced [db _]
(assoc db :page :basket)))
(re-frame/reg-event-db
:routes/orders
(fn-traced [db _]
(assoc db :page :orders)))
(re-frame/reg-event-fx
:command/upsert-customer!
(fn-traced [{:keys [db] :as fx} [_ params]]
{:dispatch [::re-graph/mutate
"mutation upsertCustomer($name:String,$email:String){upsertCustomer(name:$name,email:$email){id,name,email,basket{id,menu_item_id,customization,quantity},orders{id,items{id,menu_item_id,customization,quantity,status},status}}}"
(select-keys params [:name :email])
[:event/customer-upserted]]
:db (let [id (random-uuid)]
(update db :notifications assoc id {:color :info :message "Signed in!"}))
:navigate (routes/menu)}))
(re-frame/reg-event-fx
:event/customer-upserted
(fn-traced [{:keys [db]} [_ {:keys [data errors] :as result}]]
(let [customer (:upsertCustomer data)]
{:db (if (seq errors)
(update db :notifications #(reduce (fn [agg error]
(let [id (random-uuid)]
(assoc agg id error)))
%
(map api/format-error errors)))
(assoc db :customer (-> customer
(update :basket index-by-id)
(update :orders index-by-id))))
:dispatch [::re-graph/subscribe
(:email customer)
"subscription customerByEmail($email:String){customerByEmail(email:$email){id,name,email,basket{id,menu_item_id,customization,quantity},orders{id,items{id,menu_item_id,customization,quantity,status},status}}}"
{:email (:email customer)}
[:customer/updated]]})))
(re-frame/reg-event-db
:customer/updated
(fn-traced [db [_ result]]
(let [customer (-> result :data :customerByEmail)]
(assoc db :customer (-> customer
(update :basket index-by-id)
(update :orders index-by-id))))))
(re-frame/reg-event-fx
:command/add-items-to-basket!
(fn-traced [{:keys [db] :as fx} [_ items]]
{:dispatch [::re-graph/mutate
"mutation addItemsToBasket($customer_email:String,$items:[BasketItem]){addItemsToBasket(customer_email:$customer_email,items:$items){id,menu_item_id,customization,quantity}}"
{:customer_email (get-in db [:customer :email])
:items items}
[:event/items-added-to-basket]]}))
(re-frame/reg-event-db
:event/items-added-to-basket
(fn-traced [db [_ {:keys [data] :as result}]]
(-> db
(assoc-in [:notifications (random-uuid)] {:color :success :message "Item added to basket!"})
(update-in [:customer :basket] merge (-> data :addItemsToBasket index-by-id)))))
(re-frame/reg-event-fx
:command/remove-items-from-basket!
(fn-traced [{:keys [db] :as fx} [_ items]]
{:dispatch [::re-graph/mutate
"mutation removeItemsFromBasket($customer_email:String,$items:[ID]){removeItemsFromBasket(customer_email:$customer_email,items:$items)}"
{:customer_email (get-in db [:customer :email])
:items items}
[:event/items-removed-from-basket]]}))
(re-frame/reg-event-db
:event/items-removed-from-basket
(fn-traced [db [_ {:keys [data]}]]
(-> db
(assoc-in [:notifications (random-uuid)] {:color :success :message "Item removed from basket!"})
(update-in [:customer :basket] dissoc-all (:removeItemsFromBasket data)))))
(re-frame/reg-event-fx
:command/place-order!
(fn-traced [{:keys [db] :as fx} [_]]
(let [items (-> db :customer :basket vals)]
{:dispatch [::re-graph/mutate
"mutation placeOrder($customer_email:String,$items:[BasketItem]){placeOrder(customer_email:$customer_email,items:$items){id,items{id,menu_item_id,customization,quantity,status},status}}"
{:customer_email (get-in db [:customer :email])
:items items}
[:event/order-placed]]})))
(re-frame/reg-event-fx
:event/order-placed
(fn-traced [{:keys [db] :as fx} [_ {:keys [data] :as result}]]
(let [order (:placeOrder data)]
{:db (-> db
(assoc-in [:customer :orders (:id order)] order)
(update :notifications assoc
(random-uuid) {:color :info
:message (str "Order #" (:id order) " Placed!")}))
:navigate (routes/orders)})))
(re-frame/reg-event-fx
:command/pay-order!
(fn-traced [{:keys [db] :as fx} [_ id]]
{:dispatch [::re-graph/mutate
"mutation payOrder($id:ID){payOrder(id:$id){id,items,status}}"
{:id id}
[:event/order-paid]]}))
(re-frame/reg-event-db
:event/order-paid
(fn-traced [db [_ order]]
(assoc-in db [:customer :orders (:id order)] order)))
(re-frame/reg-event-db
:command/dismiss-notification
(fn-traced [db [_ id]]
(update db :notifications dissoc id)))
|
cc38330713a011025c6f9887e2a6812b4b981aa1d3209ae801091243c489440f | genmeblog/genuary | day18.clj | ;; VHS.
;; full process:
(ns genuary.2022.day18
(:require [clojure2d.core :as c2d]
[clojure2d.extra.utils :as utils]
[clojure2d.pixels :as p]
[clojure2d.color :as c]
[fastmath.signal :as sig]
[clojure2d.extra.signal :as sig2d]
[clojure2d.extra.overlays :as o]
[fastmath.core :as m]
[fastmath.random :as r]))
(def w 672)
(def h 504)
(def low-pass-effect (sig/effect :simple-lowpass {:rate 10000 :cutoff 2000}))
(def low-pass-effect2 (sig/effect :simple-lowpass {:rate 10000 :cutoff 1000}))
(def high-pass-effect (sig/effect :simple-highpass {:rate 100000 :cutoff 3}))
(def low-pass (sig2d/effects-filter low-pass-effect w))
(def low-pass3 (sig2d/effects-filter (sig/compose-effects low-pass-effect2 low-pass-effect2 low-pass-effect2) w))
(def high-pass (sig2d/effects-filter (sig/compose-effects high-pass-effect high-pass-effect) w))
(def delay-i (sig2d/effects-filter (sig/effect :echo {:rate w :delay 0.03 :decay 0.6}) w))
(def delay-q (sig2d/effects-filter (sig/effect :echo {:rate w :delay 0.04 :decay 0.6}) w))
(def n (o/noise-overlay w h {:alpha 60}))
(defn make-stripes
[canvas]
(c2d/push-matrix canvas)
(c2d/translate canvas (/ w 2) (+ 20 (/ h 2)))
(doseq [angle (range -0.555 (+ 0.5432 m/PI) 0.09)]
(c2d/push-matrix canvas)
(c2d/rotate canvas angle)
(c2d/line canvas 0 0 w 0)
(c2d/pop-matrix canvas))
(c2d/pop-matrix canvas))
(def g (c/gradient [:cyan :lightblue :white :indigo :black]))
(defn add-errors
[canvas]
(doseq [x (range w)
y (range h)
:let [n (r/noise (/ x 800.0) (/ y 15.0))]]
(when (< (r/drand) (m/pow n 8.0))
(c2d/set-color canvas (g (r/drand)) 200)
(c2d/ellipse canvas x y (r/drand 0.5 3) (r/drand 0.5 3))))
canvas)
(defn unsharp-luma
[pixels]
(let [p (p/clone-pixels pixels)]
(p/set-channel! p 1 (p/get-channel p 0))
(p/set-channel! p 2 (p/get-channel p 0))
(let [res (p/to-pixels (c2d/convolve p [0 0 0 -2 5 -2 0 0 0]))] ;; convolve horizontally only
(p/set-channel! pixels 0 (p/get-channel res 0)))))
(c2d/with-canvas-> (c2d/canvas w h :highest)
(c2d/set-background :black)
(c2d/set-color :darkred)
(c2d/set-stroke 2.3)
(make-stripes)
(c2d/set-color :chocolate)
(c2d/push-matrix)
(c2d/translate 0 350)
(c2d/flip-y)
(make-stripes)
(c2d/pop-matrix)
(c2d/ellipse (/ w 2) 70 50 50)
(c2d/gradient-mode (/ w 2) 300 (c/darken :indigo) (/ w 2) 480 :white)
(c2d/triangle (* 0.2 w) 330 (* 0.8 w) 330 (* 0.5 w) 450)
(c2d/gradient-mode (/ w 2) 300 :white (/ w 2) 480 :indigo)
(c2d/triangle (* 0.2 w) 480 (* 0.8 w) 480 (* 0.5 w) 300)
(c2d/set-font "Heavitas")
(c2d/set-font-attributes 100 :bold-italic)
(c2d/set-color :magenta)
(c2d/text "GENUARY" 35 205)
(c2d/set-color [240 220 240])
(c2d/gradient-mode (/ w 2) 150 [255 235 240] (/ w 2) 200 :darkcyan)
(c2d/text "GENUARY" 25 195)
(c2d/set-font-attributes 50)
(c2d/set-color :magenta)
(c2d/text "2022" 505 255)
(c2d/set-color [240 220 240])
(c2d/text "2022" 495 245)
(c2d/image n 0 0 w h)
(add-errors)
(p/to-pixels)
(->> (p/filter-colors c/to-YIQ*)
(p/filter-channels high-pass low-pass3 low-pass3 nil)
(unsharp-luma)
(p/filter-channels nil delay-i delay-q nil)
(p/filter-channels low-pass low-pass3 low-pass3 nil)
(p/filter-colors c/from-YIQ*)
(p/filter-channels p/normalize))
(c2d/get-image)
(o/render-rgb-scanlines {:scale 1.1})
#_(c2d/save "results/2022/day18.jpg")
(utils/show-image))
| null | https://raw.githubusercontent.com/genmeblog/genuary/c8d5d23d5bc3d91b90a894461c9af27f9a15ad65/src/genuary/2022/day18.clj | clojure | VHS.
full process:
convolve horizontally only |
(ns genuary.2022.day18
(:require [clojure2d.core :as c2d]
[clojure2d.extra.utils :as utils]
[clojure2d.pixels :as p]
[clojure2d.color :as c]
[fastmath.signal :as sig]
[clojure2d.extra.signal :as sig2d]
[clojure2d.extra.overlays :as o]
[fastmath.core :as m]
[fastmath.random :as r]))
(def w 672)
(def h 504)
(def low-pass-effect (sig/effect :simple-lowpass {:rate 10000 :cutoff 2000}))
(def low-pass-effect2 (sig/effect :simple-lowpass {:rate 10000 :cutoff 1000}))
(def high-pass-effect (sig/effect :simple-highpass {:rate 100000 :cutoff 3}))
(def low-pass (sig2d/effects-filter low-pass-effect w))
(def low-pass3 (sig2d/effects-filter (sig/compose-effects low-pass-effect2 low-pass-effect2 low-pass-effect2) w))
(def high-pass (sig2d/effects-filter (sig/compose-effects high-pass-effect high-pass-effect) w))
(def delay-i (sig2d/effects-filter (sig/effect :echo {:rate w :delay 0.03 :decay 0.6}) w))
(def delay-q (sig2d/effects-filter (sig/effect :echo {:rate w :delay 0.04 :decay 0.6}) w))
(def n (o/noise-overlay w h {:alpha 60}))
(defn make-stripes
[canvas]
(c2d/push-matrix canvas)
(c2d/translate canvas (/ w 2) (+ 20 (/ h 2)))
(doseq [angle (range -0.555 (+ 0.5432 m/PI) 0.09)]
(c2d/push-matrix canvas)
(c2d/rotate canvas angle)
(c2d/line canvas 0 0 w 0)
(c2d/pop-matrix canvas))
(c2d/pop-matrix canvas))
(def g (c/gradient [:cyan :lightblue :white :indigo :black]))
(defn add-errors
[canvas]
(doseq [x (range w)
y (range h)
:let [n (r/noise (/ x 800.0) (/ y 15.0))]]
(when (< (r/drand) (m/pow n 8.0))
(c2d/set-color canvas (g (r/drand)) 200)
(c2d/ellipse canvas x y (r/drand 0.5 3) (r/drand 0.5 3))))
canvas)
(defn unsharp-luma
[pixels]
(let [p (p/clone-pixels pixels)]
(p/set-channel! p 1 (p/get-channel p 0))
(p/set-channel! p 2 (p/get-channel p 0))
(p/set-channel! pixels 0 (p/get-channel res 0)))))
(c2d/with-canvas-> (c2d/canvas w h :highest)
(c2d/set-background :black)
(c2d/set-color :darkred)
(c2d/set-stroke 2.3)
(make-stripes)
(c2d/set-color :chocolate)
(c2d/push-matrix)
(c2d/translate 0 350)
(c2d/flip-y)
(make-stripes)
(c2d/pop-matrix)
(c2d/ellipse (/ w 2) 70 50 50)
(c2d/gradient-mode (/ w 2) 300 (c/darken :indigo) (/ w 2) 480 :white)
(c2d/triangle (* 0.2 w) 330 (* 0.8 w) 330 (* 0.5 w) 450)
(c2d/gradient-mode (/ w 2) 300 :white (/ w 2) 480 :indigo)
(c2d/triangle (* 0.2 w) 480 (* 0.8 w) 480 (* 0.5 w) 300)
(c2d/set-font "Heavitas")
(c2d/set-font-attributes 100 :bold-italic)
(c2d/set-color :magenta)
(c2d/text "GENUARY" 35 205)
(c2d/set-color [240 220 240])
(c2d/gradient-mode (/ w 2) 150 [255 235 240] (/ w 2) 200 :darkcyan)
(c2d/text "GENUARY" 25 195)
(c2d/set-font-attributes 50)
(c2d/set-color :magenta)
(c2d/text "2022" 505 255)
(c2d/set-color [240 220 240])
(c2d/text "2022" 495 245)
(c2d/image n 0 0 w h)
(add-errors)
(p/to-pixels)
(->> (p/filter-colors c/to-YIQ*)
(p/filter-channels high-pass low-pass3 low-pass3 nil)
(unsharp-luma)
(p/filter-channels nil delay-i delay-q nil)
(p/filter-channels low-pass low-pass3 low-pass3 nil)
(p/filter-colors c/from-YIQ*)
(p/filter-channels p/normalize))
(c2d/get-image)
(o/render-rgb-scanlines {:scale 1.1})
#_(c2d/save "results/2022/day18.jpg")
(utils/show-image))
|
bcdfe0cc6a7d7564f7325047ecf9c07e660f9dcb60fcea1c0d3786daeb97e2ef | Dexterminator/imperimetric | server.clj | (ns imperimetric.server
(:require [imperimetric.handler :refer [handler dev-handler]]
[config.core :refer [env]]
[imperimetric.frinj-setup :refer [frinj-setup!]]
[ring.adapter.jetty :refer [run-jetty]])
(:gen-class))
(defn -main [& args]
(frinj-setup!)
(let [port (Integer/parseInt (or (env :port) "3000"))]
(run-jetty handler {:port port :join? false})))
(defn dev-main []
(frinj-setup!)
(run-jetty dev-handler {:port 4000 :join? false}))
| null | https://raw.githubusercontent.com/Dexterminator/imperimetric/57e975c470490724f69cc43c2f5d0fa2359745d0/src/clj/imperimetric/server.clj | clojure | (ns imperimetric.server
(:require [imperimetric.handler :refer [handler dev-handler]]
[config.core :refer [env]]
[imperimetric.frinj-setup :refer [frinj-setup!]]
[ring.adapter.jetty :refer [run-jetty]])
(:gen-class))
(defn -main [& args]
(frinj-setup!)
(let [port (Integer/parseInt (or (env :port) "3000"))]
(run-jetty handler {:port port :join? false})))
(defn dev-main []
(frinj-setup!)
(run-jetty dev-handler {:port 4000 :join? false}))
| |
bac39a0c3b894d07e30c10ce02226bb65a07517bbe323c4ea5cd6a483d3b6298 | semerdzhiev/fp-2020-21 | 07-composition.rkt | #lang racket
(require rackunit)
(require rackunit/text-ui)
(define (accumulate op null-value start end term next)
(if (> start end)
null-value
(op (term start) (accumulate op null-value (next start) end term next))))
(define (compose f g)
(lambda (x) (f (g x))))
Искаме да изразим функция n пъти чрез accumulate .
Как изглеждаше това на упражнение ?
(define (repeated f n)
(if (= n 1) f
(compose f (repeated f (- n 1)))))
(define (repeat f n)
(void)
;(accumulate ??? ??? ??? ??? ??? ???)
)
(define tests
(test-suite "Repeat tests"
(check-equal? ((repeat (lambda (x) (+ x 1)) 3) 5) 8)
Искаме да проверим дали нашата accumulate , на упражнение
(let ((f (lambda (x) (expt x 2)))
(arg 2))
(check-equal? ((repeat f 2) arg) ((repeated f 2) arg)))
)
)
(run-tests tests 'verbose) | null | https://raw.githubusercontent.com/semerdzhiev/fp-2020-21/64fa00c4f940f75a28cc5980275b124ca21244bc/group-b/exercises/03.higher-order-functions/07-composition.rkt | racket | (accumulate ??? ??? ??? ??? ??? ???) | #lang racket
(require rackunit)
(require rackunit/text-ui)
(define (accumulate op null-value start end term next)
(if (> start end)
null-value
(op (term start) (accumulate op null-value (next start) end term next))))
(define (compose f g)
(lambda (x) (f (g x))))
Искаме да изразим функция n пъти чрез accumulate .
Как изглеждаше това на упражнение ?
(define (repeated f n)
(if (= n 1) f
(compose f (repeated f (- n 1)))))
(define (repeat f n)
(void)
)
(define tests
(test-suite "Repeat tests"
(check-equal? ((repeat (lambda (x) (+ x 1)) 3) 5) 8)
Искаме да проверим дали нашата accumulate , на упражнение
(let ((f (lambda (x) (expt x 2)))
(arg 2))
(check-equal? ((repeat f 2) arg) ((repeated f 2) arg)))
)
)
(run-tests tests 'verbose) |
a9443d8484a7b4f83e0b18489f720719704b927c07f313c8189eed21819c07f8 | reflex-frp/patch | MapWithMove.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveTraversable #-}
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternGuards #
# LANGUAGE PatternSynonyms #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
|
Description : An intermediate ' Patch ' on ' Map '
Patches of this type can insert , delete , and also move values from one key to
another .
Description: An intermediate 'Patch' on 'Map'
Patches of this type can insert, delete, and also move values from one key to
another.
-}
module Data.Patch.MapWithMove
( PatchMapWithMove
( PatchMapWithMove
, unPatchMapWithMove
, ..
)
, patchMapWithMove
, patchMapWithMoveInsertAll
, insertMapKey
, moveMapKey
, swapMapKey
, deleteMapKey
, unsafePatchMapWithMove
, patchMapWithMoveNewElements
, patchMapWithMoveNewElementsMap
, patchThatSortsMapWith
, patchThatChangesAndSortsMapWith
, patchThatChangesMap
-- * Node Info
, NodeInfo
( NodeInfo
, _nodeInfo_from
, _nodeInfo_to
, ..
)
, bitraverseNodeInfo
, nodeInfoMapFrom
, nodeInfoMapMFrom
, nodeInfoSetTo
-- * From
, From
( From_Insert
, From_Delete
, From_Move
, ..
)
, bitraverseFrom
-- * To
, To
) where
import Data.Coerce
import Data.Kind (Type)
import Data.Patch.Class
import Data.Patch.MapWithPatchingMove (PatchMapWithPatchingMove(..), To)
import qualified Data.Patch.MapWithPatchingMove as PM -- already a transparent synonym
import Control.Lens hiding (FunctorWithIndex, FoldableWithIndex, TraversableWithIndex)
#if !MIN_VERSION_lens(5,0,0)
import qualified Control.Lens as L
#endif
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Proxy
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup (Semigroup (..))
#endif
import Data.Traversable (foldMapDefault)
import Data.Functor.WithIndex
import Data.Foldable.WithIndex
import Data.Traversable.WithIndex
| Patch a Map with additions , deletions , and moves . Invariant : If key @k1@
is coming from @From_Move k2@ , then key @k2@ should be going to @Just ,
-- and vice versa. There should never be any unpaired From/To keys.
newtype PatchMapWithMove k (v :: Type) = PatchMapWithMove'
{ -- | Extract the underlying 'PatchMapWithPatchingMove k (Proxy v)'
unPatchMapWithMove' :: PatchMapWithPatchingMove k (Proxy v)
}
deriving ( Show, Read, Eq, Ord
can not handle documentation here before GHC 8.6
,
#if __GLASGOW_HASKELL__ >= 806
-- | Compose patches having the same effect as applying the
-- patches in turn: @'applyAlways' (p <> q) == 'applyAlways' p .
-- 'applyAlways' q@
#endif
Semigroup
, Monoid
)
pattern Coerce :: Coercible a b => a -> b
pattern Coerce x <- (coerce -> x)
where Coerce x = coerce x
{-# COMPLETE PatchMapWithMove #-}
pattern PatchMapWithMove :: Map k (NodeInfo k v) -> PatchMapWithMove k v
-- | Extract the representation of the t'PatchMapWithMove' as a map of
-- t'NodeInfo'.
unPatchMapWithMove :: PatchMapWithMove k v -> Map k (NodeInfo k v)
pattern PatchMapWithMove { unPatchMapWithMove } = PatchMapWithMove' (PatchMapWithPatchingMove (Coerce unPatchMapWithMove))
_PatchMapWithMove
:: Iso
(PatchMapWithMove k0 v0)
(PatchMapWithMove k1 v1)
(Map k0 (NodeInfo k0 v0))
(Map k1 (NodeInfo k1 v1))
_PatchMapWithMove = iso unPatchMapWithMove PatchMapWithMove
instance Functor (PatchMapWithMove k) where
fmap f = runIdentity . traverse (Identity . f)
instance Foldable (PatchMapWithMove k) where
foldMap = foldMapDefault
instance Traversable (PatchMapWithMove k) where
traverse =
_PatchMapWithMove .
traverse .
traverse
instance FunctorWithIndex k (PatchMapWithMove k)
instance FoldableWithIndex k (PatchMapWithMove k)
instance TraversableWithIndex k (PatchMapWithMove k) where
itraverse = (_PatchMapWithMove .> itraversed <. traverse) . Indexed
#if !MIN_VERSION_lens(5,0,0)
instance L.FunctorWithIndex k (PatchMapWithMove k) where imap = Data.Functor.WithIndex.imap
instance L.FoldableWithIndex k (PatchMapWithMove k) where ifoldMap = Data.Foldable.WithIndex.ifoldMap
instance L.TraversableWithIndex k (PatchMapWithMove k) where itraverse = Data.Traversable.WithIndex.itraverse
#endif
-- | Create a t'PatchMapWithMove', validating it
patchMapWithMove :: Ord k => Map k (NodeInfo k v) -> Maybe (PatchMapWithMove k v)
patchMapWithMove = fmap PatchMapWithMove' . PM.patchMapWithPatchingMove . coerce
-- | Create a t'PatchMapWithMove' that inserts everything in the given 'Map'
patchMapWithMoveInsertAll :: Map k v -> PatchMapWithMove k v
patchMapWithMoveInsertAll = PatchMapWithMove' . PM.patchMapWithPatchingMoveInsertAll
| Make a @t'PatchMapWithMove ' k v@ which has the effect of inserting or updating a value @v@ to the given key @k@ , like ' Map.insert ' .
insertMapKey :: k -> v -> PatchMapWithMove k v
insertMapKey k v = PatchMapWithMove' $ PM.insertMapKey k v
|Make a @t'PatchMapWithMove ' k v@ which has the effect of moving the value from the first key @k@ to the second key @k@ , equivalent to :
--
-- @
-- 'Map.delete' src (maybe map ('Map.insert' dst) (Map.lookup src map))
-- @
moveMapKey :: Ord k => k -> k -> PatchMapWithMove k v
moveMapKey src dst = PatchMapWithMove' $ PM.moveMapKey src dst
|Make a @t'PatchMapWithMove ' k v@ which has the effect of swapping two keys in the mapping , equivalent to :
--
-- @
let = Map.lookup a map
= Map.lookup b map
in maybe i d ( Map.insert a ) ( bMay < > )
. maybe i d ( Map.insert b ) ( < > )
-- . Map.delete a . Map.delete b $ map
-- @
swapMapKey :: Ord k => k -> k -> PatchMapWithMove k v
swapMapKey src dst = PatchMapWithMove' $ PM.swapMapKey src dst
-- |Make a @t'PatchMapWithMove' k v@ which has the effect of deleting a key in the mapping, equivalent to 'Map.delete'.
deleteMapKey :: k -> PatchMapWithMove k v
deleteMapKey = PatchMapWithMove' . PM.deleteMapKey
| Wrap a @'Map ' k ( NodeInfo k v)@ representing patch changes into a @t'PatchMapWithMove ' k v@ , without checking any invariants .
--
-- __Warning:__ when using this function, you must ensure that the invariants of t'PatchMapWithMove' are preserved; they will not be checked.
unsafePatchMapWithMove :: Map k (NodeInfo k v) -> PatchMapWithMove k v
unsafePatchMapWithMove = coerce PM.unsafePatchMapWithPatchingMove
-- | Apply the insertions, deletions, and moves to a given 'Map'
instance Ord k => Patch (PatchMapWithMove k v) where
type PatchTarget (PatchMapWithMove k v) = Map k v
apply (PatchMapWithMove' p) = apply p
-- | Returns all the new elements that will be added to the 'Map'.
patchMapWithMoveNewElements :: PatchMapWithMove k v -> [v]
patchMapWithMoveNewElements = PM.patchMapWithPatchingMoveNewElements . unPatchMapWithMove'
-- | Return a @'Map' k v@ with all the inserts/updates from the given @t'PatchMapWithMove' k v@.
patchMapWithMoveNewElementsMap :: PatchMapWithMove k v -> Map k v
patchMapWithMoveNewElementsMap = PM.patchMapWithPatchingMoveNewElementsMap . unPatchMapWithMove'
-- | Create a t'PatchMapWithMove' that, if applied to the given 'Map', will sort
-- its values using the given ordering function. The set keys of the 'Map' is
-- not changed.
patchThatSortsMapWith :: Ord k => (v -> v -> Ordering) -> Map k v -> PatchMapWithMove k v
patchThatSortsMapWith cmp = PatchMapWithMove' . PM.patchThatSortsMapWith cmp
| Create a t'PatchMapWithMove ' that , if applied to the first ' Map ' provided ,
will produce a ' Map ' with the same values as the second ' Map ' but with the
-- values sorted with the given ordering function.
patchThatChangesAndSortsMapWith :: (Ord k, Ord v) => (v -> v -> Ordering) -> Map k v -> Map k v -> PatchMapWithMove k v
patchThatChangesAndSortsMapWith cmp oldByIndex newByIndexUnsorted = patchThatChangesMap oldByIndex newByIndex
where newList = Map.toList newByIndexUnsorted
newByIndex = Map.fromList $ zip (fst <$> newList) $ sortBy cmp $ snd <$> newList
| Create a t'PatchMapWithMove ' that , if applied to the first ' Map ' provided ,
will produce the second ' Map ' .
patchThatChangesMap :: (Ord k, Ord v) => Map k v -> Map k v -> PatchMapWithMove k v
patchThatChangesMap oldByIndex newByIndex = PatchMapWithMove' $
PM.patchThatChangesMap oldByIndex newByIndex
--
NodeInfo
--
-- | Holds the information about each key: where its new value should come from,
-- and where its old value should go to
newtype NodeInfo k (v :: Type) = NodeInfo' { unNodeInfo' :: PM.NodeInfo k (Proxy v) }
deriving instance (Show k, Show p) => Show (NodeInfo k p)
deriving instance (Read k, Read p) => Read (NodeInfo k p)
deriving instance (Eq k, Eq p) => Eq (NodeInfo k p)
deriving instance (Ord k, Ord p) => Ord (NodeInfo k p)
{-# COMPLETE NodeInfo #-}
pattern NodeInfo :: From k v -> To k -> NodeInfo k v
_nodeInfo_from :: NodeInfo k v -> From k v
_nodeInfo_to :: NodeInfo k v -> To k
pattern NodeInfo { _nodeInfo_from, _nodeInfo_to } = NodeInfo'
PM.NodeInfo
{ PM._nodeInfo_from = Coerce _nodeInfo_from
, PM._nodeInfo_to = _nodeInfo_to
}
_NodeInfo
:: Iso
(NodeInfo k0 v0)
(NodeInfo k1 v1)
(PM.NodeInfo k0 (Proxy v0))
(PM.NodeInfo k1 (Proxy v1))
_NodeInfo = iso unNodeInfo' NodeInfo'
instance Functor (NodeInfo k) where
fmap f = runIdentity . traverse (Identity . f)
instance Foldable (NodeInfo k) where
foldMap = foldMapDefault
instance Traversable (NodeInfo k) where
traverse = bitraverseNodeInfo pure
-- | Like 'Data.Bitraversable.bitraverse'
bitraverseNodeInfo
:: Applicative f
=> (k0 -> f k1)
-> (v0 -> f v1)
-> NodeInfo k0 v0 -> f (NodeInfo k1 v1)
bitraverseNodeInfo fk fv = fmap NodeInfo'
. PM.bitraverseNodeInfo fk (\ ~Proxy -> pure Proxy) fv
. coerce
-- | Change the 'From' value of a t'NodeInfo'
nodeInfoMapFrom :: (From k v -> From k v) -> NodeInfo k v -> NodeInfo k v
nodeInfoMapFrom f = coerce $ PM.nodeInfoMapFrom (unFrom' . f . From')
-- | Change the 'From' value of a t'NodeInfo', using a 'Functor' (or
-- 'Applicative', 'Monad', etc.) action to get the new value
nodeInfoMapMFrom
:: Functor f
=> (From k v -> f (From k v))
-> NodeInfo k v -> f (NodeInfo k v)
nodeInfoMapMFrom f = fmap NodeInfo'
. PM.nodeInfoMapMFrom (fmap unFrom' . f . From')
. coerce
-- | Set the 'To' field of a t'NodeInfo'
nodeInfoSetTo :: To k -> NodeInfo k v -> NodeInfo k v
nodeInfoSetTo = coerce . PM.nodeInfoSetTo
--
-- From
--
-- | Describe how a key's new value should be produced
newtype From k (v :: Type) = From' { unFrom' :: PM.From k (Proxy v) }
# , From_Delete , From_Move #
-- | Insert the given value here
pattern From_Insert :: v -> From k v
pattern From_Insert v = From' (PM.From_Insert v)
-- | Delete the existing value, if any, from here
pattern From_Delete :: From k v
pattern From_Delete = From' PM.From_Delete
-- | Move the value here from the given key
pattern From_Move :: k -> From k v
pattern From_Move k = From' (PM.From_Move k Proxy)
-- | Like 'Data.Bitraversable.bitraverse'
bitraverseFrom
:: Applicative f
=> (k0 -> f k1)
-> (v0 -> f v1)
-> From k0 v0 -> f (From k1 v1)
bitraverseFrom fk fv = fmap From'
. PM.bitraverseFrom fk (\ ~Proxy -> pure Proxy) fv
. coerce
makeWrapped ''PatchMapWithMove
makeWrapped ''NodeInfo
makeWrapped ''From
| null | https://raw.githubusercontent.com/reflex-frp/patch/b4f8f06441c0af33d1917530d5aa84f915bdbf2e/src/Data/Patch/MapWithMove.hs | haskell | # LANGUAGE DeriveTraversable #
* Node Info
* From
* To
already a transparent synonym
and vice versa. There should never be any unpaired From/To keys.
| Extract the underlying 'PatchMapWithPatchingMove k (Proxy v)'
| Compose patches having the same effect as applying the
patches in turn: @'applyAlways' (p <> q) == 'applyAlways' p .
'applyAlways' q@
# COMPLETE PatchMapWithMove #
| Extract the representation of the t'PatchMapWithMove' as a map of
t'NodeInfo'.
| Create a t'PatchMapWithMove', validating it
| Create a t'PatchMapWithMove' that inserts everything in the given 'Map'
@
'Map.delete' src (maybe map ('Map.insert' dst) (Map.lookup src map))
@
@
. Map.delete a . Map.delete b $ map
@
|Make a @t'PatchMapWithMove' k v@ which has the effect of deleting a key in the mapping, equivalent to 'Map.delete'.
__Warning:__ when using this function, you must ensure that the invariants of t'PatchMapWithMove' are preserved; they will not be checked.
| Apply the insertions, deletions, and moves to a given 'Map'
| Returns all the new elements that will be added to the 'Map'.
| Return a @'Map' k v@ with all the inserts/updates from the given @t'PatchMapWithMove' k v@.
| Create a t'PatchMapWithMove' that, if applied to the given 'Map', will sort
its values using the given ordering function. The set keys of the 'Map' is
not changed.
values sorted with the given ordering function.
| Holds the information about each key: where its new value should come from,
and where its old value should go to
# COMPLETE NodeInfo #
| Like 'Data.Bitraversable.bitraverse'
| Change the 'From' value of a t'NodeInfo'
| Change the 'From' value of a t'NodeInfo', using a 'Functor' (or
'Applicative', 'Monad', etc.) action to get the new value
| Set the 'To' field of a t'NodeInfo'
From
| Describe how a key's new value should be produced
| Insert the given value here
| Delete the existing value, if any, from here
| Move the value here from the given key
| Like 'Data.Bitraversable.bitraverse' | # LANGUAGE CPP #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternGuards #
# LANGUAGE PatternSynonyms #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE StandaloneDeriving #
# LANGUAGE TemplateHaskell #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
|
Description : An intermediate ' Patch ' on ' Map '
Patches of this type can insert , delete , and also move values from one key to
another .
Description: An intermediate 'Patch' on 'Map'
Patches of this type can insert, delete, and also move values from one key to
another.
-}
module Data.Patch.MapWithMove
( PatchMapWithMove
( PatchMapWithMove
, unPatchMapWithMove
, ..
)
, patchMapWithMove
, patchMapWithMoveInsertAll
, insertMapKey
, moveMapKey
, swapMapKey
, deleteMapKey
, unsafePatchMapWithMove
, patchMapWithMoveNewElements
, patchMapWithMoveNewElementsMap
, patchThatSortsMapWith
, patchThatChangesAndSortsMapWith
, patchThatChangesMap
, NodeInfo
( NodeInfo
, _nodeInfo_from
, _nodeInfo_to
, ..
)
, bitraverseNodeInfo
, nodeInfoMapFrom
, nodeInfoMapMFrom
, nodeInfoSetTo
, From
( From_Insert
, From_Delete
, From_Move
, ..
)
, bitraverseFrom
, To
) where
import Data.Coerce
import Data.Kind (Type)
import Data.Patch.Class
import Data.Patch.MapWithPatchingMove (PatchMapWithPatchingMove(..), To)
import Control.Lens hiding (FunctorWithIndex, FoldableWithIndex, TraversableWithIndex)
#if !MIN_VERSION_lens(5,0,0)
import qualified Control.Lens as L
#endif
import Data.List
import Data.Map (Map)
import qualified Data.Map as Map
import Data.Proxy
#if !MIN_VERSION_base(4,11,0)
import Data.Semigroup (Semigroup (..))
#endif
import Data.Traversable (foldMapDefault)
import Data.Functor.WithIndex
import Data.Foldable.WithIndex
import Data.Traversable.WithIndex
| Patch a Map with additions , deletions , and moves . Invariant : If key @k1@
is coming from @From_Move k2@ , then key @k2@ should be going to @Just ,
newtype PatchMapWithMove k (v :: Type) = PatchMapWithMove'
unPatchMapWithMove' :: PatchMapWithPatchingMove k (Proxy v)
}
deriving ( Show, Read, Eq, Ord
can not handle documentation here before GHC 8.6
,
#if __GLASGOW_HASKELL__ >= 806
#endif
Semigroup
, Monoid
)
pattern Coerce :: Coercible a b => a -> b
pattern Coerce x <- (coerce -> x)
where Coerce x = coerce x
pattern PatchMapWithMove :: Map k (NodeInfo k v) -> PatchMapWithMove k v
unPatchMapWithMove :: PatchMapWithMove k v -> Map k (NodeInfo k v)
pattern PatchMapWithMove { unPatchMapWithMove } = PatchMapWithMove' (PatchMapWithPatchingMove (Coerce unPatchMapWithMove))
_PatchMapWithMove
:: Iso
(PatchMapWithMove k0 v0)
(PatchMapWithMove k1 v1)
(Map k0 (NodeInfo k0 v0))
(Map k1 (NodeInfo k1 v1))
_PatchMapWithMove = iso unPatchMapWithMove PatchMapWithMove
instance Functor (PatchMapWithMove k) where
fmap f = runIdentity . traverse (Identity . f)
instance Foldable (PatchMapWithMove k) where
foldMap = foldMapDefault
instance Traversable (PatchMapWithMove k) where
traverse =
_PatchMapWithMove .
traverse .
traverse
instance FunctorWithIndex k (PatchMapWithMove k)
instance FoldableWithIndex k (PatchMapWithMove k)
instance TraversableWithIndex k (PatchMapWithMove k) where
itraverse = (_PatchMapWithMove .> itraversed <. traverse) . Indexed
#if !MIN_VERSION_lens(5,0,0)
instance L.FunctorWithIndex k (PatchMapWithMove k) where imap = Data.Functor.WithIndex.imap
instance L.FoldableWithIndex k (PatchMapWithMove k) where ifoldMap = Data.Foldable.WithIndex.ifoldMap
instance L.TraversableWithIndex k (PatchMapWithMove k) where itraverse = Data.Traversable.WithIndex.itraverse
#endif
patchMapWithMove :: Ord k => Map k (NodeInfo k v) -> Maybe (PatchMapWithMove k v)
patchMapWithMove = fmap PatchMapWithMove' . PM.patchMapWithPatchingMove . coerce
patchMapWithMoveInsertAll :: Map k v -> PatchMapWithMove k v
patchMapWithMoveInsertAll = PatchMapWithMove' . PM.patchMapWithPatchingMoveInsertAll
| Make a @t'PatchMapWithMove ' k v@ which has the effect of inserting or updating a value @v@ to the given key @k@ , like ' Map.insert ' .
insertMapKey :: k -> v -> PatchMapWithMove k v
insertMapKey k v = PatchMapWithMove' $ PM.insertMapKey k v
|Make a @t'PatchMapWithMove ' k v@ which has the effect of moving the value from the first key @k@ to the second key @k@ , equivalent to :
moveMapKey :: Ord k => k -> k -> PatchMapWithMove k v
moveMapKey src dst = PatchMapWithMove' $ PM.moveMapKey src dst
|Make a @t'PatchMapWithMove ' k v@ which has the effect of swapping two keys in the mapping , equivalent to :
let = Map.lookup a map
= Map.lookup b map
in maybe i d ( Map.insert a ) ( bMay < > )
. maybe i d ( Map.insert b ) ( < > )
swapMapKey :: Ord k => k -> k -> PatchMapWithMove k v
swapMapKey src dst = PatchMapWithMove' $ PM.swapMapKey src dst
deleteMapKey :: k -> PatchMapWithMove k v
deleteMapKey = PatchMapWithMove' . PM.deleteMapKey
| Wrap a @'Map ' k ( NodeInfo k v)@ representing patch changes into a @t'PatchMapWithMove ' k v@ , without checking any invariants .
unsafePatchMapWithMove :: Map k (NodeInfo k v) -> PatchMapWithMove k v
unsafePatchMapWithMove = coerce PM.unsafePatchMapWithPatchingMove
instance Ord k => Patch (PatchMapWithMove k v) where
type PatchTarget (PatchMapWithMove k v) = Map k v
apply (PatchMapWithMove' p) = apply p
patchMapWithMoveNewElements :: PatchMapWithMove k v -> [v]
patchMapWithMoveNewElements = PM.patchMapWithPatchingMoveNewElements . unPatchMapWithMove'
patchMapWithMoveNewElementsMap :: PatchMapWithMove k v -> Map k v
patchMapWithMoveNewElementsMap = PM.patchMapWithPatchingMoveNewElementsMap . unPatchMapWithMove'
patchThatSortsMapWith :: Ord k => (v -> v -> Ordering) -> Map k v -> PatchMapWithMove k v
patchThatSortsMapWith cmp = PatchMapWithMove' . PM.patchThatSortsMapWith cmp
| Create a t'PatchMapWithMove ' that , if applied to the first ' Map ' provided ,
will produce a ' Map ' with the same values as the second ' Map ' but with the
patchThatChangesAndSortsMapWith :: (Ord k, Ord v) => (v -> v -> Ordering) -> Map k v -> Map k v -> PatchMapWithMove k v
patchThatChangesAndSortsMapWith cmp oldByIndex newByIndexUnsorted = patchThatChangesMap oldByIndex newByIndex
where newList = Map.toList newByIndexUnsorted
newByIndex = Map.fromList $ zip (fst <$> newList) $ sortBy cmp $ snd <$> newList
| Create a t'PatchMapWithMove ' that , if applied to the first ' Map ' provided ,
will produce the second ' Map ' .
patchThatChangesMap :: (Ord k, Ord v) => Map k v -> Map k v -> PatchMapWithMove k v
patchThatChangesMap oldByIndex newByIndex = PatchMapWithMove' $
PM.patchThatChangesMap oldByIndex newByIndex
NodeInfo
newtype NodeInfo k (v :: Type) = NodeInfo' { unNodeInfo' :: PM.NodeInfo k (Proxy v) }
deriving instance (Show k, Show p) => Show (NodeInfo k p)
deriving instance (Read k, Read p) => Read (NodeInfo k p)
deriving instance (Eq k, Eq p) => Eq (NodeInfo k p)
deriving instance (Ord k, Ord p) => Ord (NodeInfo k p)
pattern NodeInfo :: From k v -> To k -> NodeInfo k v
_nodeInfo_from :: NodeInfo k v -> From k v
_nodeInfo_to :: NodeInfo k v -> To k
pattern NodeInfo { _nodeInfo_from, _nodeInfo_to } = NodeInfo'
PM.NodeInfo
{ PM._nodeInfo_from = Coerce _nodeInfo_from
, PM._nodeInfo_to = _nodeInfo_to
}
_NodeInfo
:: Iso
(NodeInfo k0 v0)
(NodeInfo k1 v1)
(PM.NodeInfo k0 (Proxy v0))
(PM.NodeInfo k1 (Proxy v1))
_NodeInfo = iso unNodeInfo' NodeInfo'
instance Functor (NodeInfo k) where
fmap f = runIdentity . traverse (Identity . f)
instance Foldable (NodeInfo k) where
foldMap = foldMapDefault
instance Traversable (NodeInfo k) where
traverse = bitraverseNodeInfo pure
bitraverseNodeInfo
:: Applicative f
=> (k0 -> f k1)
-> (v0 -> f v1)
-> NodeInfo k0 v0 -> f (NodeInfo k1 v1)
bitraverseNodeInfo fk fv = fmap NodeInfo'
. PM.bitraverseNodeInfo fk (\ ~Proxy -> pure Proxy) fv
. coerce
nodeInfoMapFrom :: (From k v -> From k v) -> NodeInfo k v -> NodeInfo k v
nodeInfoMapFrom f = coerce $ PM.nodeInfoMapFrom (unFrom' . f . From')
nodeInfoMapMFrom
:: Functor f
=> (From k v -> f (From k v))
-> NodeInfo k v -> f (NodeInfo k v)
nodeInfoMapMFrom f = fmap NodeInfo'
. PM.nodeInfoMapMFrom (fmap unFrom' . f . From')
. coerce
nodeInfoSetTo :: To k -> NodeInfo k v -> NodeInfo k v
nodeInfoSetTo = coerce . PM.nodeInfoSetTo
newtype From k (v :: Type) = From' { unFrom' :: PM.From k (Proxy v) }
# , From_Delete , From_Move #
pattern From_Insert :: v -> From k v
pattern From_Insert v = From' (PM.From_Insert v)
pattern From_Delete :: From k v
pattern From_Delete = From' PM.From_Delete
pattern From_Move :: k -> From k v
pattern From_Move k = From' (PM.From_Move k Proxy)
bitraverseFrom
:: Applicative f
=> (k0 -> f k1)
-> (v0 -> f v1)
-> From k0 v0 -> f (From k1 v1)
bitraverseFrom fk fv = fmap From'
. PM.bitraverseFrom fk (\ ~Proxy -> pure Proxy) fv
. coerce
makeWrapped ''PatchMapWithMove
makeWrapped ''NodeInfo
makeWrapped ''From
|
455d78054c410f2cfc0579fb7145046a05ba1fbb0215b152dac40e1857e215cd | matsen/pplacer | test_util.ml | open Mass_map
open Ppatteries
open OUnit
(* Assume the test runner is running in the project root. We can't do much
better than this. *)
let tests_dir = "./tests/"
(* *** convenience funs for getting things *** *)
let placeruns_of_dir which =
get_dir_contents
~pred:(flip Filename.check_suffix "jplace")
(tests_dir ^ "data/" ^ which)
|> List.of_enum
|> List.sort compare
|> List.map Placerun_io.of_any_file
let placerun_of_dir dir which =
placeruns_of_dir dir
|> List.find (Placerun.get_name |- (=) which)
let pres_of_dir weighting criterion which =
let tbl = Hashtbl.create 10 in
List.iter
(fun pr ->
let pre = Pre.normalize_mass (Pre.of_placerun weighting criterion pr) in
Hashtbl.add tbl pr.Placerun.name (pr, pre))
(placeruns_of_dir which);
tbl
For white space separated vectors and matrices .
* These are n't very smart-- leading and trailing whitespace will annoy them .
*
* These aren't very smart-- leading and trailing whitespace will annoy them.
* *)
let farr_of_string s =
Array.of_list (List.map float_of_string (Str.split (Str.regexp "[ ]+") s))
let farrarr_of_string s =
Array.of_list (List.map farr_of_string (Str.split (Str.regexp "\n") s))
let vec_of_string s = Gsl_vector.of_array (farr_of_string s)
let mat_of_string s = Gsl_matrix.of_arrays (farrarr_of_string s)
(* *** equalities *** *)
let gtree_equal g1 g2 =
g1.Gtree.stree = g2.Gtree.stree
&& IntMap.equal (fun b1 b2 -> (Newick_bark.compare b1 b2) = 0) g1.Gtree.bark_map g2.Gtree.bark_map
let placement_equal p1 p2 =
let open Placement in
p1.location = p2.location
&& (p1.ml_ratio =~ p2.ml_ratio)
&& (p1.log_like =~ p2.log_like)
&& Option.eq ~eq:(=~) p1.post_prob p2.post_prob
&& Option.eq ~eq:(=~) p1.marginal_prob p2.marginal_prob
&& (p1.distal_bl =~ p2.distal_bl)
&& (p1.pendant_bl =~ p2.pendant_bl)
&& (p1.classif = p2.classif)
&& Option.eq ~eq:(fun (a1, b1) (a2, b2) -> a1 =~ a2 && b1 = b2) p1.map_identity p2.map_identity
let pquery_equal pq1 pq2 =
let open Pquery in
List.for_all2
(fun (n1, m1) (n2, m2) -> n1 = n2 && m1 =~ m2)
pq1.namlom
pq2.namlom
&& List.for_all2 placement_equal pq1.place_list pq2.place_list
let placerun_equal pr1 pr2 =
let open Placerun in
gtree_equal pr1.ref_tree pr2.ref_tree
&& Option.eq ~eq:(IntMap.equal (=)) pr1.transm pr2.transm
&& List.for_all2 pquery_equal pr1.pqueries pr2.pqueries
(* *** approximate equalities *** *)
let vec_approx_equal ?(epsilon = 1e-5) v1 v2 =
let dim = Gsl_vector.length v1 in
try
assert(dim = Gsl_vector.length v2);
for i=0 to dim-1 do
if not (approx_equal ~epsilon v1.{i} v2.{i}) then raise Exit
done;
true
with
| Exit -> false
let mat_approx_equal ?(epsilon = 1e-5) m1 m2 =
let (rows,cols) as dim1 = Gsl_matrix.dims m1 in
try
assert(dim1 = Gsl_matrix.dims m2);
for i=0 to rows-1 do
for j=0 to cols-1 do
if not (approx_equal ~epsilon m1.{i,j} m2.{i,j}) then raise Exit
done
done;
true
with
| Exit -> false
let array_f_equal f a1 a2 =
try
ArrayFuns.iter2 (fun x y -> if not (f x y) then raise Exit) a1 a2;
true
with Exit -> false
let farr_approx_equal ?(epsilon = 1e-5) fa1 fa2 =
array_f_equal (approx_equal ~epsilon) fa1 fa2
let farrarr_approx_equal ?(epsilon = 1e-5) faa1 faa2 =
array_f_equal (farr_approx_equal ~epsilon) faa1 faa2
let vecarr_approx_equal ?(epsilon = 1e-5) va1 va2 =
array_f_equal (vec_approx_equal ~epsilon) va1 va2
exception Unequal of Jsontype.jsontype * Jsontype.jsontype
let rec json_equal ?(epsilon = 1e-5) j1 j2 =
if begin match j1, j2 with
| Jsontype.Bool b1, Jsontype.Bool b2 -> b1 = b2
| Jsontype.String s1, Jsontype.String s2 -> s1 = s2
| Jsontype.Int i1, Jsontype.Int i2 -> i1 = i2
| Jsontype.Float f1, Jsontype.Float f2 -> approx_equal ~epsilon f1 f2
| Jsontype.Int i, Jsontype.Float f
| Jsontype.Float f, Jsontype.Int i ->
approx_equal ~epsilon f (float_of_int i)
| Jsontype.Object o1, Jsontype.Object o2 ->
(Hashtbl.length o1) = (Hashtbl.length o2) && begin
Hashtbl.iter
(fun k v ->
if not (Hashtbl.mem o2 k) then raise (Unequal (j1, j2));
json_equal ~epsilon v (Hashtbl.find o2 k))
o1;
true
end
| Jsontype.Array a1, Jsontype.Array a2 ->
(List.length a1) = (List.length a2) && begin
List.iter2
(json_equal ~epsilon)
a1
a2;
true
end
| Jsontype.Null, Jsontype.Null -> true
| _, _ -> false
end then () else raise (Unequal (j1, j2))
(* *** infixes for equalities *** *)
let ( =| ) = vec_approx_equal
let ( =|| ) = mat_approx_equal
let ( =@ ) = farr_approx_equal
let ( =@@ ) = farrarr_approx_equal
let ( =|@ ) = vecarr_approx_equal
let check_map_approx_equal message = Enum.iter2
(fun (k1, v1) (k2, v2) ->
(Printf.sprintf message k1 v1 k2 v2)
@? (k1 = k2 && approx_equal v1 v2))
(* *** random stuff *** *)
let rand_symmetric n =
let m = Gsl_matrix.create n n in
for i=0 to n-1 do
for j=i to n-1 do
m.{i,j} <- 1. -. Random.float 2.;
m.{j,i} <- m.{i,j};
done;
done;
m;;
let make_rng seed =
let rng = Gsl_rng.make Gsl_rng.KNUTHRAN2002 in
Gsl_rng.set rng (Nativeint.of_int seed);
rng
let colorset_of_strings = List.map Tax_id.of_string |- Convex.ColorSet.of_list
let simple_refpkg tree_string =
Refpkg.of_path
~ref_tree:(Newick_gtree.of_string tree_string)
(tests_dir ^ "data/simple.refpkg")
| null | https://raw.githubusercontent.com/matsen/pplacer/f40a363e962cca7131f1f2d372262e0081ff1190/tests/test_util.ml | ocaml | Assume the test runner is running in the project root. We can't do much
better than this.
*** convenience funs for getting things ***
*** equalities ***
*** approximate equalities ***
*** infixes for equalities ***
*** random stuff *** | open Mass_map
open Ppatteries
open OUnit
let tests_dir = "./tests/"
let placeruns_of_dir which =
get_dir_contents
~pred:(flip Filename.check_suffix "jplace")
(tests_dir ^ "data/" ^ which)
|> List.of_enum
|> List.sort compare
|> List.map Placerun_io.of_any_file
let placerun_of_dir dir which =
placeruns_of_dir dir
|> List.find (Placerun.get_name |- (=) which)
let pres_of_dir weighting criterion which =
let tbl = Hashtbl.create 10 in
List.iter
(fun pr ->
let pre = Pre.normalize_mass (Pre.of_placerun weighting criterion pr) in
Hashtbl.add tbl pr.Placerun.name (pr, pre))
(placeruns_of_dir which);
tbl
For white space separated vectors and matrices .
* These are n't very smart-- leading and trailing whitespace will annoy them .
*
* These aren't very smart-- leading and trailing whitespace will annoy them.
* *)
let farr_of_string s =
Array.of_list (List.map float_of_string (Str.split (Str.regexp "[ ]+") s))
let farrarr_of_string s =
Array.of_list (List.map farr_of_string (Str.split (Str.regexp "\n") s))
let vec_of_string s = Gsl_vector.of_array (farr_of_string s)
let mat_of_string s = Gsl_matrix.of_arrays (farrarr_of_string s)
let gtree_equal g1 g2 =
g1.Gtree.stree = g2.Gtree.stree
&& IntMap.equal (fun b1 b2 -> (Newick_bark.compare b1 b2) = 0) g1.Gtree.bark_map g2.Gtree.bark_map
let placement_equal p1 p2 =
let open Placement in
p1.location = p2.location
&& (p1.ml_ratio =~ p2.ml_ratio)
&& (p1.log_like =~ p2.log_like)
&& Option.eq ~eq:(=~) p1.post_prob p2.post_prob
&& Option.eq ~eq:(=~) p1.marginal_prob p2.marginal_prob
&& (p1.distal_bl =~ p2.distal_bl)
&& (p1.pendant_bl =~ p2.pendant_bl)
&& (p1.classif = p2.classif)
&& Option.eq ~eq:(fun (a1, b1) (a2, b2) -> a1 =~ a2 && b1 = b2) p1.map_identity p2.map_identity
let pquery_equal pq1 pq2 =
let open Pquery in
List.for_all2
(fun (n1, m1) (n2, m2) -> n1 = n2 && m1 =~ m2)
pq1.namlom
pq2.namlom
&& List.for_all2 placement_equal pq1.place_list pq2.place_list
let placerun_equal pr1 pr2 =
let open Placerun in
gtree_equal pr1.ref_tree pr2.ref_tree
&& Option.eq ~eq:(IntMap.equal (=)) pr1.transm pr2.transm
&& List.for_all2 pquery_equal pr1.pqueries pr2.pqueries
let vec_approx_equal ?(epsilon = 1e-5) v1 v2 =
let dim = Gsl_vector.length v1 in
try
assert(dim = Gsl_vector.length v2);
for i=0 to dim-1 do
if not (approx_equal ~epsilon v1.{i} v2.{i}) then raise Exit
done;
true
with
| Exit -> false
let mat_approx_equal ?(epsilon = 1e-5) m1 m2 =
let (rows,cols) as dim1 = Gsl_matrix.dims m1 in
try
assert(dim1 = Gsl_matrix.dims m2);
for i=0 to rows-1 do
for j=0 to cols-1 do
if not (approx_equal ~epsilon m1.{i,j} m2.{i,j}) then raise Exit
done
done;
true
with
| Exit -> false
let array_f_equal f a1 a2 =
try
ArrayFuns.iter2 (fun x y -> if not (f x y) then raise Exit) a1 a2;
true
with Exit -> false
let farr_approx_equal ?(epsilon = 1e-5) fa1 fa2 =
array_f_equal (approx_equal ~epsilon) fa1 fa2
let farrarr_approx_equal ?(epsilon = 1e-5) faa1 faa2 =
array_f_equal (farr_approx_equal ~epsilon) faa1 faa2
let vecarr_approx_equal ?(epsilon = 1e-5) va1 va2 =
array_f_equal (vec_approx_equal ~epsilon) va1 va2
exception Unequal of Jsontype.jsontype * Jsontype.jsontype
let rec json_equal ?(epsilon = 1e-5) j1 j2 =
if begin match j1, j2 with
| Jsontype.Bool b1, Jsontype.Bool b2 -> b1 = b2
| Jsontype.String s1, Jsontype.String s2 -> s1 = s2
| Jsontype.Int i1, Jsontype.Int i2 -> i1 = i2
| Jsontype.Float f1, Jsontype.Float f2 -> approx_equal ~epsilon f1 f2
| Jsontype.Int i, Jsontype.Float f
| Jsontype.Float f, Jsontype.Int i ->
approx_equal ~epsilon f (float_of_int i)
| Jsontype.Object o1, Jsontype.Object o2 ->
(Hashtbl.length o1) = (Hashtbl.length o2) && begin
Hashtbl.iter
(fun k v ->
if not (Hashtbl.mem o2 k) then raise (Unequal (j1, j2));
json_equal ~epsilon v (Hashtbl.find o2 k))
o1;
true
end
| Jsontype.Array a1, Jsontype.Array a2 ->
(List.length a1) = (List.length a2) && begin
List.iter2
(json_equal ~epsilon)
a1
a2;
true
end
| Jsontype.Null, Jsontype.Null -> true
| _, _ -> false
end then () else raise (Unequal (j1, j2))
let ( =| ) = vec_approx_equal
let ( =|| ) = mat_approx_equal
let ( =@ ) = farr_approx_equal
let ( =@@ ) = farrarr_approx_equal
let ( =|@ ) = vecarr_approx_equal
let check_map_approx_equal message = Enum.iter2
(fun (k1, v1) (k2, v2) ->
(Printf.sprintf message k1 v1 k2 v2)
@? (k1 = k2 && approx_equal v1 v2))
let rand_symmetric n =
let m = Gsl_matrix.create n n in
for i=0 to n-1 do
for j=i to n-1 do
m.{i,j} <- 1. -. Random.float 2.;
m.{j,i} <- m.{i,j};
done;
done;
m;;
let make_rng seed =
let rng = Gsl_rng.make Gsl_rng.KNUTHRAN2002 in
Gsl_rng.set rng (Nativeint.of_int seed);
rng
let colorset_of_strings = List.map Tax_id.of_string |- Convex.ColorSet.of_list
let simple_refpkg tree_string =
Refpkg.of_path
~ref_tree:(Newick_gtree.of_string tree_string)
(tests_dir ^ "data/simple.refpkg")
|
8f7bf0961b81dfcabc13b8bbb4cc0a4c22010dca7aa6e3520d471cccd7ea967d | LexiFi/menhir | SortInference.mli | (******************************************************************************)
(* *)
(* *)
, Paris
, PPS , Université Paris Diderot
(* *)
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
(* file LICENSE. *)
(* *)
(******************************************************************************)
open Syntax
open GroundSort
[ ] performs sort inference for the grammar [ g ] ,
rejecting the grammar if it is ill - sorted . It returns a map of
( terminal and nonterminal ) symbols to ground sorts .
rejecting the grammar if it is ill-sorted. It returns a map of
(terminal and nonterminal) symbols to ground sorts. *)
type sorts = sort StringMap.t
val infer: grammar -> sorts
| null | https://raw.githubusercontent.com/LexiFi/menhir/794e64e7997d4d3f91d36dd49aaecc942ea858b7/src/SortInference.mli | ocaml | ****************************************************************************
file LICENSE.
**************************************************************************** |
, Paris
, PPS , Université Paris Diderot
. All rights reserved . This file is distributed under the
terms of the GNU General Public License version 2 , as described in the
open Syntax
open GroundSort
[ ] performs sort inference for the grammar [ g ] ,
rejecting the grammar if it is ill - sorted . It returns a map of
( terminal and nonterminal ) symbols to ground sorts .
rejecting the grammar if it is ill-sorted. It returns a map of
(terminal and nonterminal) symbols to ground sorts. *)
type sorts = sort StringMap.t
val infer: grammar -> sorts
|
77fec2dad7d3f149e24ee4a938fe54be5faf41ca82ccca63d213252dee664271 | erlang-ls/erlang_ls | els_uri.erl | %%==============================================================================
%% Library to parse RFC-3986 URIs
%%==============================================================================
%% For details, see:
%%==============================================================================
-module(els_uri).
%%==============================================================================
%% Exports
%%==============================================================================
-export([
module/1,
path/1,
uri/1
]).
%%==============================================================================
%% Types
%%==============================================================================
-type path() :: binary().
-export_type([path/0]).
%%==============================================================================
%% Includes
%%==============================================================================
-include("els_core.hrl").
-spec module(uri()) -> atom().
module(Uri) ->
binary_to_atom(filename:basename(path(Uri), <<".erl">>), utf8).
-spec path(uri()) -> path().
path(Uri) ->
path(Uri, els_utils:is_windows()).
-spec path(uri(), boolean()) -> path().
path(Uri, IsWindows) ->
#{
host := Host,
path := Path0,
scheme := <<"file">>
} = uri_string:normalize(Uri, [return_map]),
Path = percent_decode(Path0),
case {IsWindows, Host} of
{true, <<>>} ->
Windows drive letter , have to strip the initial slash
re:replace(
Path, "^/([a-zA-Z]:)(.*)", "\\1\\2", [{return, binary}]
);
{true, _} ->
<<"//", Host/binary, Path/binary>>;
{false, <<>>} ->
Path;
{false, _} ->
error(badarg)
end.
-spec uri(path()) -> uri().
uri(Path) ->
[Head | Tail] = filename:split(Path),
{Host, Path1} =
case {els_utils:is_windows(), Head} of
{false, <<"/">>} ->
{<<>>, uri_join(Tail)};
{true, X} when X =:= <<"//">> orelse X =:= <<"\\\\">> ->
[H | T] = Tail,
{H, uri_join(T)};
{true, _} ->
Strip the trailing slash from the first component
H1 = string:slice(Head, 0, 2),
{<<>>, uri_join([H1 | Tail])}
end,
els_utils:to_binary(
uri_string:recompose(#{
scheme => <<"file">>,
host => Host,
path => [$/, Path1]
})
).
-spec uri_join([path()]) -> iolist().
uri_join(List) ->
lists:join(<<"/">>, List).
-if(?OTP_RELEASE >= 23).
-spec percent_decode(binary()) -> binary().
percent_decode(Str) ->
uri_string:percent_decode(Str).
-else.
-spec percent_decode(binary()) -> binary().
percent_decode(Str) ->
http_uri:decode(Str).
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
path_uri_test_() ->
[
?_assertEqual(
<<"/foo/bar.erl">>,
path(<<"file">>)
),
?_assertEqual(
<<"/foo/bar baz.erl">>,
path(<<"file">>)
),
?_assertEqual(
<<"/foo/bar.erl">>,
path(uri(path(<<"file">>)))
),
?_assertEqual(
<<"/foo/bar baz.erl">>,
path(uri(<<"/foo/bar baz.erl">>))
),
?_assertEqual(
<<"file">>,
uri(<<"/foo/bar baz.erl">>)
)
].
path_windows_test() ->
?assertEqual(
<<"C:/foo/bar.erl">>,
path(<<"file">>, true)
).
-endif.
| null | https://raw.githubusercontent.com/erlang-ls/erlang_ls/2dfb48aca3879e5b44f6fd676f8349525262779f/apps/els_core/src/els_uri.erl | erlang | ==============================================================================
Library to parse RFC-3986 URIs
==============================================================================
For details, see:
==============================================================================
==============================================================================
Exports
==============================================================================
==============================================================================
Types
==============================================================================
==============================================================================
Includes
============================================================================== | -module(els_uri).
-export([
module/1,
path/1,
uri/1
]).
-type path() :: binary().
-export_type([path/0]).
-include("els_core.hrl").
-spec module(uri()) -> atom().
module(Uri) ->
binary_to_atom(filename:basename(path(Uri), <<".erl">>), utf8).
-spec path(uri()) -> path().
path(Uri) ->
path(Uri, els_utils:is_windows()).
-spec path(uri(), boolean()) -> path().
path(Uri, IsWindows) ->
#{
host := Host,
path := Path0,
scheme := <<"file">>
} = uri_string:normalize(Uri, [return_map]),
Path = percent_decode(Path0),
case {IsWindows, Host} of
{true, <<>>} ->
Windows drive letter , have to strip the initial slash
re:replace(
Path, "^/([a-zA-Z]:)(.*)", "\\1\\2", [{return, binary}]
);
{true, _} ->
<<"//", Host/binary, Path/binary>>;
{false, <<>>} ->
Path;
{false, _} ->
error(badarg)
end.
-spec uri(path()) -> uri().
uri(Path) ->
[Head | Tail] = filename:split(Path),
{Host, Path1} =
case {els_utils:is_windows(), Head} of
{false, <<"/">>} ->
{<<>>, uri_join(Tail)};
{true, X} when X =:= <<"//">> orelse X =:= <<"\\\\">> ->
[H | T] = Tail,
{H, uri_join(T)};
{true, _} ->
Strip the trailing slash from the first component
H1 = string:slice(Head, 0, 2),
{<<>>, uri_join([H1 | Tail])}
end,
els_utils:to_binary(
uri_string:recompose(#{
scheme => <<"file">>,
host => Host,
path => [$/, Path1]
})
).
-spec uri_join([path()]) -> iolist().
uri_join(List) ->
lists:join(<<"/">>, List).
-if(?OTP_RELEASE >= 23).
-spec percent_decode(binary()) -> binary().
percent_decode(Str) ->
uri_string:percent_decode(Str).
-else.
-spec percent_decode(binary()) -> binary().
percent_decode(Str) ->
http_uri:decode(Str).
-endif.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
path_uri_test_() ->
[
?_assertEqual(
<<"/foo/bar.erl">>,
path(<<"file">>)
),
?_assertEqual(
<<"/foo/bar baz.erl">>,
path(<<"file">>)
),
?_assertEqual(
<<"/foo/bar.erl">>,
path(uri(path(<<"file">>)))
),
?_assertEqual(
<<"/foo/bar baz.erl">>,
path(uri(<<"/foo/bar baz.erl">>))
),
?_assertEqual(
<<"file">>,
uri(<<"/foo/bar baz.erl">>)
)
].
path_windows_test() ->
?assertEqual(
<<"C:/foo/bar.erl">>,
path(<<"file">>, true)
).
-endif.
|
fa10b50c73c98d62e1eaaab9d9e536dc7054f2a05387ba3ad32fecee4523c8b4 | commercialhaskell/stack | StackTest.hs | module StackTest
( run'
, run
, runShell
, runWithCwd
, stackExe
, stackSrc
, testDir
, stack'
, stack
, stackCleanFull
, stackIgnoreException
, stackErr
, Repl
, ReplConnection (..)
, nextPrompt
, replCommand
, replGetChar
, replGetLine
, runRepl
, repl
, stackStderr
, stackCheckStderr
, stackErrStderr
, runEx
, runEx'
, stackCheckStdout
, doesNotExist
, doesExist
, doesFileOrDirExist
, copy
, fileContentsMatch
, logInfo
, showProcessArgDebug
, exeExt
, isWindows
, isLinux
, getIsAlpine
, isARM
, isMacOSX
, defaultResolverArg
, removeFileIgnore
, removeDirIgnore
, withCwd
, withSourceDirectory
, superslow
) where
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader
import Control.Concurrent
import Control.Exception
import Data.Maybe (fromMaybe)
import System.Environment
import System.Directory
import System.IO
import System.IO.Error
import System.Process
import System.Exit
import System.Info (arch, os)
import GHC.Stack (HasCallStack)
run' :: HasCallStack => FilePath -> [String] -> IO ExitCode
run' cmd args = do
logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args)
(Nothing, Nothing, Nothing, ph) <- createProcess (proc cmd args)
waitForProcess ph
run :: HasCallStack => FilePath -> [String] -> IO ()
run cmd args = do
ec <- run' cmd args
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
runShell :: HasCallStack => String -> IO ()
runShell cmd = do
logInfo $ "Running: " ++ cmd
(Nothing, Nothing, Nothing, ph) <- createProcess (shell cmd)
ec <- waitForProcess ph
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
runWithCwd :: HasCallStack => FilePath -> String -> [String] -> IO String
runWithCwd cwdPath cmd args = do
logInfo $ "Running: " ++ cmd
let cp = proc cmd args
(ec, stdoutStr, _) <- readCreateProcessWithExitCode (cp { cwd = Just cwdPath }) ""
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
pure stdoutStr
stackExe :: IO String
stackExe = getEnv "STACK_EXE"
stackSrc :: IO String
stackSrc = getEnv "SRC_DIR"
testDir :: IO String
testDir = getEnv "TEST_DIR"
stack' :: HasCallStack => [String] -> IO ExitCode
stack' args = do
stackEnv <- stackExe
run' stackEnv args
stack :: HasCallStack => [String] -> IO ()
stack args = do
ec <- stack' args
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
Temporary workaround for Windows to ignore exceptions arising out
of Windows when we do stack clean . More info here :
stackCleanFull :: HasCallStack => IO ()
stackCleanFull = stackIgnoreException ["clean", "--full"]
Temporary workaround for Windows to ignore exceptions arising out
of Windows when we do stack clean . More info here :
stackIgnoreException :: HasCallStack => [String] -> IO ()
stackIgnoreException args = if isWindows
then void (stack' args) `catch` (\(_e :: IOException) -> pure ())
else stack args
stackErr :: HasCallStack => [String] -> IO ()
stackErr args = do
ec <- stack' args
when (ec == ExitSuccess) $ error "stack was supposed to fail, but didn't"
type Repl = ReaderT ReplConnection IO
data ReplConnection
= ReplConnection
{ replStdin :: Handle
, replStdout :: Handle
}
nextPrompt :: Repl ()
nextPrompt = do
(ReplConnection _ inputHandle) <- ask
c <- liftIO $ hGetChar inputHandle
if c == '>'
then do _ <- liftIO $ hGetChar inputHandle
pure ()
else nextPrompt
replCommand :: String -> Repl ()
replCommand cmd = do
(ReplConnection input _) <- ask
liftIO $ hPutStrLn input cmd
replGetLine :: Repl String
replGetLine = ask >>= liftIO . hGetLine . replStdout
replGetChar :: Repl Char
replGetChar = ask >>= liftIO . hGetChar . replStdout
runRepl :: HasCallStack => FilePath -> [String] -> ReaderT ReplConnection IO () -> IO ExitCode
runRepl cmd args actions = do
logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args)
(Just rStdin, Just rStdout, Just rStderr, ph) <-
createProcess (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = CreatePipe
}
hSetBuffering rStdin NoBuffering
hSetBuffering rStdout NoBuffering
hSetBuffering rStderr NoBuffering
tempDir <- if isWindows
then fromMaybe "" <$> lookupEnv "TEMP"
else pure "/tmp"
let tempFP = tempDir ++ "/stderr"
_ <- forkIO $ withFile tempFP WriteMode
$ \err -> do
hSetBuffering err NoBuffering
forever $ catch (hGetChar rStderr >>= hPutChar err)
$ \e -> unless (isEOFError e) $ throw e
runReaderT (nextPrompt >> actions) (ReplConnection rStdin rStdout)
waitForProcess ph
repl :: HasCallStack => [String] -> Repl () -> IO ()
repl args action = do
stackExe' <- stackExe
ec <- runRepl stackExe' ("repl":args) action
unless (ec == ExitSuccess) $ pure ()
TODO : Understand why the exit code is 1 despite running GHCi tests
-- successfully.
else error $ " Exited with exit code : " + + show ec
stackStderr :: HasCallStack => [String] -> IO (ExitCode, String)
stackStderr args = do
stackExe' <- stackExe
logInfo $ "Running: " ++ stackExe' ++ " " ++ unwords (map showProcessArgDebug args)
(ec, _, err) <- readProcessWithExitCode stackExe' args ""
hPutStr stderr err
pure (ec, err)
-- | Run stack with arguments and apply a check to the resulting
-- stderr output if the process succeeded.
stackCheckStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO ()
stackCheckStderr args check = do
(ec, err) <- stackStderr args
if ec /= ExitSuccess
then error $ "Exited with exit code: " ++ displayException ec
else check err
| Same as ' stackCheckStderr ' , but ensures that the Stack process
-- fails.
stackErrStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO ()
stackErrStderr args check = do
(ec, err) <- stackStderr args
if ec == ExitSuccess
then error "Stack process succeeded, but it shouldn't"
else check err
runEx :: HasCallStack => FilePath -> String -> IO (ExitCode, String, String)
runEx cmd args = runEx' cmd $ words args
runEx' :: HasCallStack => FilePath -> [String] -> IO (ExitCode, String, String)
runEx' cmd args = do
logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args)
(ec, out, err) <- readProcessWithExitCode cmd args ""
putStr out
hPutStr stderr err
pure (ec, out, err)
-- | Run stack with arguments and apply a check to the resulting
-- stdout output if the process succeeded.
--
-- Take care with newlines; if the output includes a newline character that
-- should not be there, use 'Data.List.Extra.trimEnd' to remove it.
stackCheckStdout :: HasCallStack => [String] -> (String -> IO ()) -> IO ()
stackCheckStdout args check = do
stackExe' <- stackExe
(ec, out, _) <- runEx' stackExe' args
if ec /= ExitSuccess
then error $ "Exited with exit code: " ++ displayException ec
else check out
doesNotExist :: HasCallStack => FilePath -> IO ()
doesNotExist fp = do
logInfo $ "doesNotExist " ++ fp
exists <- doesFileOrDirExist fp
case exists of
(Right msg) -> error msg
(Left _) -> pure ()
doesExist :: HasCallStack => FilePath -> IO ()
doesExist fp = do
logInfo $ "doesExist " ++ fp
exists <- doesFileOrDirExist fp
case exists of
(Right _) -> pure ()
(Left _) -> error "No file or directory exists"
doesFileOrDirExist :: HasCallStack => FilePath -> IO (Either () String)
doesFileOrDirExist fp = do
isFile <- doesFileExist fp
if isFile
then pure (Right ("File exists: " ++ fp))
else do
isDir <- doesDirectoryExist fp
if isDir
then pure (Right ("Directory exists: " ++ fp))
else pure (Left ())
copy :: HasCallStack => FilePath -> FilePath -> IO ()
copy src dest = do
logInfo ("Copy " ++ show src ++ " to " ++ show dest)
System.Directory.copyFile src dest
fileContentsMatch :: HasCallStack => FilePath -> FilePath -> IO ()
fileContentsMatch f1 f2 = do
doesExist f1
doesExist f2
f1Contents <- readFile f1
f2Contents <- readFile f2
unless (f1Contents == f2Contents) $
error ("contents do not match for " ++ show f1 ++ " " ++ show f2)
logInfo :: String -> IO ()
logInfo = hPutStrLn stderr
TODO : use Stack 's process running utilities ? ( better logging )
for now just copy+modifying this one from System . Process . Log
| Show a process arg including when necessary . Just for
-- debugging purposes, not functionally important.
showProcessArgDebug :: String -> String
showProcessArgDebug x
| any special x = show x
| otherwise = x
where
special '"' = True
special ' ' = True
special _ = False
-- | Extension of executables
exeExt :: String
exeExt = if isWindows then ".exe" else ""
-- | Is the OS Windows?
isWindows :: Bool
isWindows = os == "mingw32"
isLinux :: Bool
isLinux = os == "linux"
| Is the OS Alpine Linux ?
getIsAlpine :: IO Bool
getIsAlpine = doesFileExist "/etc/alpine-release"
-- | Is the architecture ARM?
isARM :: Bool
isARM = arch == "arm"
-- | Is the OS Mac OS X?
isMacOSX :: Bool
isMacOSX = os == "darwin"
| To avoid problems with GHC version mismatch when a new LTS major
version is released , pass this argument to @stack@ when running in
a global context . The LTS major version here should match that of
-- the main @stack.yaml@.
--
defaultResolverArg :: String
defaultResolverArg = "--resolver=nightly-2022-11-14"
-- | Remove a file and ignore any warnings about missing files.
removeFileIgnore :: HasCallStack => FilePath -> IO ()
removeFileIgnore fp = removeFile fp `catch` \e ->
if isDoesNotExistError e
then pure ()
else throwIO e
-- | Remove a directory and ignore any warnings about missing files.
removeDirIgnore :: HasCallStack => FilePath -> IO ()
removeDirIgnore fp = removeDirectoryRecursive fp `catch` \e ->
if isDoesNotExistError e
then pure ()
else throwIO e
-- | Changes to the specified working directory.
withCwd :: HasCallStack => FilePath -> IO () -> IO ()
withCwd dir action = do
currentDirectory <- getCurrentDirectory
let enterDir = setCurrentDirectory dir
exitDir = setCurrentDirectory currentDirectory
bracket_ enterDir exitDir action
| Changes working directory to Stack source directory .
withSourceDirectory :: HasCallStack => IO () -> IO ()
withSourceDirectory action = do
dir <- stackSrc
withCwd dir action
-- | Mark a test as superslow, only to be run when explicitly requested.
superslow :: HasCallStack => IO () -> IO ()
superslow inner = do
mres <- lookupEnv "STACK_TEST_SPEED"
case mres of
Just "NORMAL" -> logInfo "Skipping superslow test"
Just "SUPERSLOW" -> do
logInfo "Running superslow test, hold on to your butts"
inner
Nothing -> do
logInfo "No STACK_TEST_SPEED specified. Executing superslow test, hold on to your butts"
inner
Just x -> error $ "Invalid value for STACK_TEST_SPEED env var: " ++ show x
| null | https://raw.githubusercontent.com/commercialhaskell/stack/80429690da92c634cb129f99f1507dbc47a70d45/test/integration/lib/StackTest.hs | haskell | successfully.
| Run stack with arguments and apply a check to the resulting
stderr output if the process succeeded.
fails.
| Run stack with arguments and apply a check to the resulting
stdout output if the process succeeded.
Take care with newlines; if the output includes a newline character that
should not be there, use 'Data.List.Extra.trimEnd' to remove it.
debugging purposes, not functionally important.
| Extension of executables
| Is the OS Windows?
| Is the architecture ARM?
| Is the OS Mac OS X?
the main @stack.yaml@.
| Remove a file and ignore any warnings about missing files.
| Remove a directory and ignore any warnings about missing files.
| Changes to the specified working directory.
| Mark a test as superslow, only to be run when explicitly requested. | module StackTest
( run'
, run
, runShell
, runWithCwd
, stackExe
, stackSrc
, testDir
, stack'
, stack
, stackCleanFull
, stackIgnoreException
, stackErr
, Repl
, ReplConnection (..)
, nextPrompt
, replCommand
, replGetChar
, replGetLine
, runRepl
, repl
, stackStderr
, stackCheckStderr
, stackErrStderr
, runEx
, runEx'
, stackCheckStdout
, doesNotExist
, doesExist
, doesFileOrDirExist
, copy
, fileContentsMatch
, logInfo
, showProcessArgDebug
, exeExt
, isWindows
, isLinux
, getIsAlpine
, isARM
, isMacOSX
, defaultResolverArg
, removeFileIgnore
, removeDirIgnore
, withCwd
, withSourceDirectory
, superslow
) where
import Control.Monad
import Control.Monad.IO.Class
import Control.Monad.Trans.Reader
import Control.Concurrent
import Control.Exception
import Data.Maybe (fromMaybe)
import System.Environment
import System.Directory
import System.IO
import System.IO.Error
import System.Process
import System.Exit
import System.Info (arch, os)
import GHC.Stack (HasCallStack)
run' :: HasCallStack => FilePath -> [String] -> IO ExitCode
run' cmd args = do
logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args)
(Nothing, Nothing, Nothing, ph) <- createProcess (proc cmd args)
waitForProcess ph
run :: HasCallStack => FilePath -> [String] -> IO ()
run cmd args = do
ec <- run' cmd args
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
runShell :: HasCallStack => String -> IO ()
runShell cmd = do
logInfo $ "Running: " ++ cmd
(Nothing, Nothing, Nothing, ph) <- createProcess (shell cmd)
ec <- waitForProcess ph
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
runWithCwd :: HasCallStack => FilePath -> String -> [String] -> IO String
runWithCwd cwdPath cmd args = do
logInfo $ "Running: " ++ cmd
let cp = proc cmd args
(ec, stdoutStr, _) <- readCreateProcessWithExitCode (cp { cwd = Just cwdPath }) ""
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
pure stdoutStr
stackExe :: IO String
stackExe = getEnv "STACK_EXE"
stackSrc :: IO String
stackSrc = getEnv "SRC_DIR"
testDir :: IO String
testDir = getEnv "TEST_DIR"
stack' :: HasCallStack => [String] -> IO ExitCode
stack' args = do
stackEnv <- stackExe
run' stackEnv args
stack :: HasCallStack => [String] -> IO ()
stack args = do
ec <- stack' args
unless (ec == ExitSuccess) $
error $ "Exited with exit code: " ++ displayException ec
Temporary workaround for Windows to ignore exceptions arising out
of Windows when we do stack clean . More info here :
stackCleanFull :: HasCallStack => IO ()
stackCleanFull = stackIgnoreException ["clean", "--full"]
Temporary workaround for Windows to ignore exceptions arising out
of Windows when we do stack clean . More info here :
stackIgnoreException :: HasCallStack => [String] -> IO ()
stackIgnoreException args = if isWindows
then void (stack' args) `catch` (\(_e :: IOException) -> pure ())
else stack args
stackErr :: HasCallStack => [String] -> IO ()
stackErr args = do
ec <- stack' args
when (ec == ExitSuccess) $ error "stack was supposed to fail, but didn't"
type Repl = ReaderT ReplConnection IO
data ReplConnection
= ReplConnection
{ replStdin :: Handle
, replStdout :: Handle
}
nextPrompt :: Repl ()
nextPrompt = do
(ReplConnection _ inputHandle) <- ask
c <- liftIO $ hGetChar inputHandle
if c == '>'
then do _ <- liftIO $ hGetChar inputHandle
pure ()
else nextPrompt
replCommand :: String -> Repl ()
replCommand cmd = do
(ReplConnection input _) <- ask
liftIO $ hPutStrLn input cmd
replGetLine :: Repl String
replGetLine = ask >>= liftIO . hGetLine . replStdout
replGetChar :: Repl Char
replGetChar = ask >>= liftIO . hGetChar . replStdout
runRepl :: HasCallStack => FilePath -> [String] -> ReaderT ReplConnection IO () -> IO ExitCode
runRepl cmd args actions = do
logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args)
(Just rStdin, Just rStdout, Just rStderr, ph) <-
createProcess (proc cmd args)
{ std_in = CreatePipe
, std_out = CreatePipe
, std_err = CreatePipe
}
hSetBuffering rStdin NoBuffering
hSetBuffering rStdout NoBuffering
hSetBuffering rStderr NoBuffering
tempDir <- if isWindows
then fromMaybe "" <$> lookupEnv "TEMP"
else pure "/tmp"
let tempFP = tempDir ++ "/stderr"
_ <- forkIO $ withFile tempFP WriteMode
$ \err -> do
hSetBuffering err NoBuffering
forever $ catch (hGetChar rStderr >>= hPutChar err)
$ \e -> unless (isEOFError e) $ throw e
runReaderT (nextPrompt >> actions) (ReplConnection rStdin rStdout)
waitForProcess ph
repl :: HasCallStack => [String] -> Repl () -> IO ()
repl args action = do
stackExe' <- stackExe
ec <- runRepl stackExe' ("repl":args) action
unless (ec == ExitSuccess) $ pure ()
TODO : Understand why the exit code is 1 despite running GHCi tests
else error $ " Exited with exit code : " + + show ec
stackStderr :: HasCallStack => [String] -> IO (ExitCode, String)
stackStderr args = do
stackExe' <- stackExe
logInfo $ "Running: " ++ stackExe' ++ " " ++ unwords (map showProcessArgDebug args)
(ec, _, err) <- readProcessWithExitCode stackExe' args ""
hPutStr stderr err
pure (ec, err)
stackCheckStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO ()
stackCheckStderr args check = do
(ec, err) <- stackStderr args
if ec /= ExitSuccess
then error $ "Exited with exit code: " ++ displayException ec
else check err
| Same as ' stackCheckStderr ' , but ensures that the Stack process
stackErrStderr :: HasCallStack => [String] -> (String -> IO ()) -> IO ()
stackErrStderr args check = do
(ec, err) <- stackStderr args
if ec == ExitSuccess
then error "Stack process succeeded, but it shouldn't"
else check err
runEx :: HasCallStack => FilePath -> String -> IO (ExitCode, String, String)
runEx cmd args = runEx' cmd $ words args
runEx' :: HasCallStack => FilePath -> [String] -> IO (ExitCode, String, String)
runEx' cmd args = do
logInfo $ "Running: " ++ cmd ++ " " ++ unwords (map showProcessArgDebug args)
(ec, out, err) <- readProcessWithExitCode cmd args ""
putStr out
hPutStr stderr err
pure (ec, out, err)
stackCheckStdout :: HasCallStack => [String] -> (String -> IO ()) -> IO ()
stackCheckStdout args check = do
stackExe' <- stackExe
(ec, out, _) <- runEx' stackExe' args
if ec /= ExitSuccess
then error $ "Exited with exit code: " ++ displayException ec
else check out
doesNotExist :: HasCallStack => FilePath -> IO ()
doesNotExist fp = do
logInfo $ "doesNotExist " ++ fp
exists <- doesFileOrDirExist fp
case exists of
(Right msg) -> error msg
(Left _) -> pure ()
doesExist :: HasCallStack => FilePath -> IO ()
doesExist fp = do
logInfo $ "doesExist " ++ fp
exists <- doesFileOrDirExist fp
case exists of
(Right _) -> pure ()
(Left _) -> error "No file or directory exists"
doesFileOrDirExist :: HasCallStack => FilePath -> IO (Either () String)
doesFileOrDirExist fp = do
isFile <- doesFileExist fp
if isFile
then pure (Right ("File exists: " ++ fp))
else do
isDir <- doesDirectoryExist fp
if isDir
then pure (Right ("Directory exists: " ++ fp))
else pure (Left ())
copy :: HasCallStack => FilePath -> FilePath -> IO ()
copy src dest = do
logInfo ("Copy " ++ show src ++ " to " ++ show dest)
System.Directory.copyFile src dest
fileContentsMatch :: HasCallStack => FilePath -> FilePath -> IO ()
fileContentsMatch f1 f2 = do
doesExist f1
doesExist f2
f1Contents <- readFile f1
f2Contents <- readFile f2
unless (f1Contents == f2Contents) $
error ("contents do not match for " ++ show f1 ++ " " ++ show f2)
logInfo :: String -> IO ()
logInfo = hPutStrLn stderr
TODO : use Stack 's process running utilities ? ( better logging )
for now just copy+modifying this one from System . Process . Log
| Show a process arg including when necessary . Just for
showProcessArgDebug :: String -> String
showProcessArgDebug x
| any special x = show x
| otherwise = x
where
special '"' = True
special ' ' = True
special _ = False
exeExt :: String
exeExt = if isWindows then ".exe" else ""
isWindows :: Bool
isWindows = os == "mingw32"
isLinux :: Bool
isLinux = os == "linux"
| Is the OS Alpine Linux ?
getIsAlpine :: IO Bool
getIsAlpine = doesFileExist "/etc/alpine-release"
isARM :: Bool
isARM = arch == "arm"
isMacOSX :: Bool
isMacOSX = os == "darwin"
| To avoid problems with GHC version mismatch when a new LTS major
version is released , pass this argument to @stack@ when running in
a global context . The LTS major version here should match that of
defaultResolverArg :: String
defaultResolverArg = "--resolver=nightly-2022-11-14"
removeFileIgnore :: HasCallStack => FilePath -> IO ()
removeFileIgnore fp = removeFile fp `catch` \e ->
if isDoesNotExistError e
then pure ()
else throwIO e
removeDirIgnore :: HasCallStack => FilePath -> IO ()
removeDirIgnore fp = removeDirectoryRecursive fp `catch` \e ->
if isDoesNotExistError e
then pure ()
else throwIO e
withCwd :: HasCallStack => FilePath -> IO () -> IO ()
withCwd dir action = do
currentDirectory <- getCurrentDirectory
let enterDir = setCurrentDirectory dir
exitDir = setCurrentDirectory currentDirectory
bracket_ enterDir exitDir action
| Changes working directory to Stack source directory .
withSourceDirectory :: HasCallStack => IO () -> IO ()
withSourceDirectory action = do
dir <- stackSrc
withCwd dir action
superslow :: HasCallStack => IO () -> IO ()
superslow inner = do
mres <- lookupEnv "STACK_TEST_SPEED"
case mres of
Just "NORMAL" -> logInfo "Skipping superslow test"
Just "SUPERSLOW" -> do
logInfo "Running superslow test, hold on to your butts"
inner
Nothing -> do
logInfo "No STACK_TEST_SPEED specified. Executing superslow test, hold on to your butts"
inner
Just x -> error $ "Invalid value for STACK_TEST_SPEED env var: " ++ show x
|
52960bbc69d838d3a2cf3cf7521d6d7a83c7da3e0bdd34f9d61489d55602522e | jrh13/hol-light | basics.ml | (* ========================================================================= *)
(* More syntax constructors, and prelogical utilities like matching. *)
(* *)
, University of Cambridge Computer Laboratory
(* *)
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
( c ) Copyright , , 2017 - 2018
(* ========================================================================= *)
needs "fusion.ml";;
(* ------------------------------------------------------------------------- *)
(* Create probably-fresh variable *)
(* ------------------------------------------------------------------------- *)
let genvar =
let gcounter = ref 0 in
fun ty -> let count = !gcounter in
(gcounter := count + 1;
mk_var("_"^(string_of_int count),ty));;
(* ------------------------------------------------------------------------- *)
(* Convenient functions for manipulating types. *)
(* ------------------------------------------------------------------------- *)
let dest_fun_ty ty =
match ty with
Tyapp("fun",[ty1;ty2]) -> (ty1,ty2)
| _ -> failwith "dest_fun_ty";;
let rec occurs_in ty bigty =
bigty = ty ||
is_type bigty && exists (occurs_in ty) (snd(dest_type bigty));;
let rec tysubst alist ty =
try rev_assoc ty alist with Failure _ ->
if is_vartype ty then ty else
let tycon,tyvars = dest_type ty in
mk_type(tycon,map (tysubst alist) tyvars);;
(* ------------------------------------------------------------------------- *)
(* A bit more syntax. *)
(* ------------------------------------------------------------------------- *)
let bndvar tm =
try fst(dest_abs tm)
with Failure _ -> failwith "bndvar: Not an abstraction";;
let body tm =
try snd(dest_abs tm)
with Failure _ -> failwith "body: Not an abstraction";;
let list_mk_comb(h,t) = rev_itlist (C (curry mk_comb)) t h;;
let list_mk_abs(vs,bod) = itlist (curry mk_abs) vs bod;;
let strip_comb = rev_splitlist dest_comb;;
let strip_abs = splitlist dest_abs;;
(* ------------------------------------------------------------------------- *)
Generic syntax to deal with some binary operators .
(* *)
(* Note that "mk_binary" only works for monomorphic functions. *)
(* ------------------------------------------------------------------------- *)
let is_binary s tm =
match tm with
Comb(Comb(Const(s',_),_),_) -> s' = s
| _ -> false;;
let dest_binary s tm =
match tm with
Comb(Comb(Const(s',_),l),r) when s' = s -> (l,r)
| _ -> failwith "dest_binary";;
let mk_binary s =
let c = mk_const(s,[]) in
fun (l,r) -> try mk_comb(mk_comb(c,l),r)
with Failure _ -> failwith "mk_binary";;
(* ------------------------------------------------------------------------- *)
(* Produces a sequence of variants, considering previous inventions. *)
(* ------------------------------------------------------------------------- *)
let rec variants av vs =
if vs = [] then [] else
let vh = variant av (hd vs) in vh::(variants (vh::av) (tl vs));;
(* ------------------------------------------------------------------------- *)
(* Gets all variables (free and/or bound) in a term. *)
(* ------------------------------------------------------------------------- *)
let variables =
let rec vars(acc,tm) =
if is_var tm then insert tm acc
else if is_const tm then acc
else if is_abs tm then
let v,bod = dest_abs tm in
vars(insert v acc,bod)
else
let l,r = dest_comb tm in
vars(vars(acc,l),r) in
fun tm -> vars([],tm);;
(* ------------------------------------------------------------------------- *)
(* General substitution (for any free expression). *)
(* ------------------------------------------------------------------------- *)
let subst =
let rec ssubst ilist tm =
if ilist = [] then tm else
try fst (find ((aconv tm) o snd) ilist) with Failure _ ->
match tm with
Comb(f,x) -> let f' = ssubst ilist f and x' = ssubst ilist x in
if f' == f && x' == x then tm else mk_comb(f',x')
| Abs(v,bod) ->
let ilist' = filter (not o (vfree_in v) o snd) ilist in
mk_abs(v,ssubst ilist' bod)
| _ -> tm in
fun ilist ->
let theta = filter (fun (s,t) -> Pervasives.compare s t <> 0) ilist in
if theta = [] then (fun tm -> tm) else
let ts,xs = unzip theta in
fun tm ->
let gs = variants (variables tm) (map (genvar o type_of) xs) in
let tm' = ssubst (zip gs xs) tm in
if tm' == tm then tm else vsubst (zip ts gs) tm';;
(* ------------------------------------------------------------------------- *)
Alpha conversion term operation .
(* ------------------------------------------------------------------------- *)
let alpha v tm =
let v0,bod = try dest_abs tm
with Failure _ -> failwith "alpha: Not an abstraction"in
if v = v0 then tm else
if type_of v = type_of v0 && not (vfree_in v bod) then
mk_abs(v,vsubst[v,v0]bod)
else failwith "alpha: Invalid new variable";;
(* ------------------------------------------------------------------------- *)
(* Type matching. *)
(* ------------------------------------------------------------------------- *)
let rec type_match vty cty sofar =
if is_vartype vty then
try if rev_assoc vty sofar = cty then sofar else failwith "type_match"
with Failure "find" -> (cty,vty)::sofar
else
let vop,vargs = dest_type vty and cop,cargs = dest_type cty in
if vop = cop then itlist2 type_match vargs cargs sofar
else failwith "type_match";;
(* ------------------------------------------------------------------------- *)
(* Conventional matching version of mk_const (but with a sanity test). *)
(* ------------------------------------------------------------------------- *)
let mk_mconst(c,ty) =
try let uty = get_const_type c in
let mat = type_match uty ty [] in
let con = mk_const(c,mat) in
if type_of con = ty then con else fail()
with Failure _ -> failwith "mk_const: generic type cannot be instantiated";;
(* ------------------------------------------------------------------------- *)
(* Like mk_comb, but instantiates type variables in rator if necessary. *)
(* ------------------------------------------------------------------------- *)
let mk_icomb(tm1,tm2) =
let "fun",[ty;_] = dest_type (type_of tm1) in
let tyins = type_match ty (type_of tm2) [] in
mk_comb(inst tyins tm1,tm2);;
(* ------------------------------------------------------------------------- *)
(* Instantiates types for constant c and iteratively makes combination. *)
(* ------------------------------------------------------------------------- *)
let list_mk_icomb cname args =
let atys,_ = nsplit dest_fun_ty args (get_const_type cname) in
let tyin = itlist2 (fun g a -> type_match g (type_of a)) atys args [] in
list_mk_comb(mk_const(cname,tyin),args);;
(* ------------------------------------------------------------------------- *)
(* Free variables in assumption list and conclusion of a theorem. *)
(* ------------------------------------------------------------------------- *)
let thm_frees th =
let asl,c = dest_thm th in
itlist (union o frees) asl (frees c);;
(* ------------------------------------------------------------------------- *)
Is one term free in another ?
(* ------------------------------------------------------------------------- *)
let rec free_in tm1 tm2 =
if aconv tm1 tm2 then true
else if is_comb tm2 then
let l,r = dest_comb tm2 in free_in tm1 l || free_in tm1 r
else if is_abs tm2 then
let bv,bod = dest_abs tm2 in
not (vfree_in bv tm1) && free_in tm1 bod
else false;;
(* ------------------------------------------------------------------------- *)
(* Searching for terms. *)
(* ------------------------------------------------------------------------- *)
let rec find_term p tm =
if p tm then tm else
if is_abs tm then find_term p (body tm) else
if is_comb tm then
let l,r = dest_comb tm in
try find_term p l with Failure _ -> find_term p r
else failwith "find_term";;
let find_terms =
let rec accum tl p tm =
let tl' = if p tm then insert tm tl else tl in
if is_abs tm then
accum tl' p (body tm)
else if is_comb tm then
accum (accum tl' p (rator tm)) p (rand tm)
else tl' in
accum [];;
(* ------------------------------------------------------------------------- *)
(* General syntax for binders. *)
(* *)
NB ! The " mk_binder " function expects polytype " A " , which is the domain .
(* ------------------------------------------------------------------------- *)
let is_binder s tm =
match tm with
Comb(Const(s',_),Abs(_,_)) -> s' = s
| _ -> false;;
let dest_binder s tm =
match tm with
Comb(Const(s',_),Abs(x,t)) when s' = s -> (x,t)
| _ -> failwith "dest_binder";;
let mk_binder op =
let c = mk_const(op,[]) in
fun (v,tm) -> mk_comb(inst [type_of v,aty] c,mk_abs(v,tm));;
(* ------------------------------------------------------------------------- *)
(* Syntax for binary operators. *)
(* ------------------------------------------------------------------------- *)
let is_binop op tm =
match tm with
Comb(Comb(op',_),_) -> op' = op
| _ -> false;;
let dest_binop op tm =
match tm with
Comb(Comb(op',l),r) when op' = op -> (l,r)
| _ -> failwith "dest_binop";;
let mk_binop op tm1 =
let f = mk_comb(op,tm1) in
fun tm2 -> mk_comb(f,tm2);;
let list_mk_binop op = end_itlist (mk_binop op);;
let binops op = striplist (dest_binop op);;
(* ------------------------------------------------------------------------- *)
(* Some common special cases *)
(* ------------------------------------------------------------------------- *)
let is_conj = is_binary "/\\";;
let dest_conj = dest_binary "/\\";;
let conjuncts = striplist dest_conj;;
let is_imp = is_binary "==>";;
let dest_imp = dest_binary "==>";;
let is_forall = is_binder "!";;
let dest_forall = dest_binder "!";;
let strip_forall = splitlist dest_forall;;
let is_exists = is_binder "?";;
let dest_exists = dest_binder "?";;
let strip_exists = splitlist dest_exists;;
let is_disj = is_binary "\\/";;
let dest_disj = dest_binary "\\/";;
let disjuncts = striplist dest_disj;;
let is_neg tm =
try fst(dest_const(rator tm)) = "~"
with Failure _ -> false;;
let dest_neg tm =
try let n,p = dest_comb tm in
if fst(dest_const n) = "~" then p else fail()
with Failure _ -> failwith "dest_neg";;
let is_uexists = is_binder "?!";;
let dest_uexists = dest_binder "?!";;
let dest_cons = dest_binary "CONS";;
let is_cons = is_binary "CONS";;
let dest_list tm =
try let tms,nil = splitlist dest_cons tm in
if fst(dest_const nil) = "NIL" then tms else fail()
with Failure _ -> failwith "dest_list";;
let is_list = can dest_list;;
(* ------------------------------------------------------------------------- *)
(* Syntax for numerals. *)
(* ------------------------------------------------------------------------- *)
let dest_numeral =
let rec dest_num tm =
if try fst(dest_const tm) = "_0" with Failure _ -> false then num_0 else
let l,r = dest_comb tm in
let n = num_2 */ dest_num r in
let cn = fst(dest_const l) in
if cn = "BIT0" then n
else if cn = "BIT1" then n +/ num_1
else fail() in
fun tm -> try let l,r = dest_comb tm in
if fst(dest_const l) = "NUMERAL" then dest_num r else fail()
with Failure _ -> failwith "dest_numeral";;
(* ------------------------------------------------------------------------- *)
(* Syntax for generalized abstractions. *)
(* *)
These are here because they are used by the preterm->term translator ;
(* preterms regard generalized abstractions as an atomic notion. This is *)
(* slightly unclean --- for example we need locally some operations on *)
(* universal quantifiers --- but probably simplest. It has to go somewhere! *)
(* ------------------------------------------------------------------------- *)
let dest_gabs =
let dest_geq = dest_binary "GEQ" in
fun tm ->
try if is_abs tm then dest_abs tm else
let l,r = dest_comb tm in
if not (fst(dest_const l) = "GABS") then fail() else
let ltm,rtm = dest_geq(snd(strip_forall(body r))) in
rand ltm,rtm
with Failure _ -> failwith "dest_gabs: Not a generalized abstraction";;
let is_gabs = can dest_gabs;;
let mk_gabs =
let mk_forall(v,t) =
let cop = mk_const("!",[type_of v,aty]) in
mk_comb(cop,mk_abs(v,t)) in
let list_mk_forall(vars,bod) = itlist (curry mk_forall) vars bod in
let mk_geq(t1,t2) =
let p = mk_const("GEQ",[type_of t1,aty]) in
mk_comb(mk_comb(p,t1),t2) in
fun (tm1,tm2) ->
if is_var tm1 then mk_abs(tm1,tm2) else
let fvs = frees tm1 in
let fty = mk_fun_ty (type_of tm1) (type_of tm2) in
let f = variant (frees tm1 @ frees tm2) (mk_var("f",fty)) in
let bod = mk_abs(f,list_mk_forall(fvs,mk_geq(mk_comb(f,tm1),tm2))) in
mk_comb(mk_const("GABS",[fty,aty]),bod);;
let list_mk_gabs(vs,bod) = itlist (curry mk_gabs) vs bod;;
let strip_gabs = splitlist dest_gabs;;
(* ------------------------------------------------------------------------- *)
(* Syntax for let terms. *)
(* ------------------------------------------------------------------------- *)
let dest_let tm =
try let l,aargs = strip_comb tm in
if fst(dest_const l) <> "LET" then fail() else
let vars,lebod = strip_gabs (hd aargs) in
let eqs = zip vars (tl aargs) in
let le,bod = dest_comb lebod in
if fst(dest_const le) = "LET_END" then eqs,bod else fail()
with Failure _ -> failwith "dest_let: not a let-term";;
let is_let = can dest_let;;
let mk_let(assigs,bod) =
let lefts,rights = unzip assigs in
let lend = mk_comb(mk_const("LET_END",[type_of bod,aty]),bod) in
let lbod = list_mk_gabs(lefts,lend) in
let ty1,ty2 = dest_fun_ty(type_of lbod) in
let ltm = mk_const("LET",[ty1,aty; ty2,bty]) in
list_mk_comb(ltm,lbod::rights);;
(* ------------------------------------------------------------------------- *)
(* Constructors and destructors for finite types. *)
(* ------------------------------------------------------------------------- *)
let mk_finty:num->hol_type =
let rec finty n =
if n =/ num_1 then mk_type("1",[]) else
mk_type((if Num.mod_num n num_2 =/ num_0 then "tybit0" else "tybit1"),
[finty(Num.quo_num n num_2)]) in
fun n ->
if not(is_integer_num n) || n </ num_1 then failwith "mk_finty" else
finty n;;
let rec dest_finty:hol_type->num =
function
Tyapp("1",_) -> num_1
| Tyapp("tybit0",[ty]) -> dest_finty ty */ num_2
| Tyapp("tybit1",[ty]) -> succ_num (dest_finty ty */ num_2)
| _ -> failwith "dest_finty";;
(* ------------------------------------------------------------------------- *)
(* Useful function to create stylized arguments using numbers. *)
(* ------------------------------------------------------------------------- *)
let make_args =
let rec margs n s avoid tys =
if tys = [] then [] else
let v = variant avoid (mk_var(s^(string_of_int n),hd tys)) in
v::(margs (n + 1) s (v::avoid) (tl tys)) in
fun s avoid tys ->
if length tys = 1 then
[variant avoid (mk_var(s,hd tys))]
else
margs 0 s avoid tys;;
(* ------------------------------------------------------------------------- *)
(* Director strings down a term. *)
(* ------------------------------------------------------------------------- *)
let find_path =
let rec find_path p tm =
if p tm then [] else
if is_abs tm then "b"::(find_path p (body tm)) else
try "r"::(find_path p (rand tm))
with Failure _ -> "l"::(find_path p (rator tm)) in
fun p tm -> implode(find_path p tm);;
let follow_path =
let rec follow_path s tm =
match s with
[] -> tm
| "l"::t -> follow_path t (rator tm)
| "r"::t -> follow_path t (rand tm)
| _::t -> follow_path t (body tm) in
fun s tm -> follow_path (explode s) tm;;
| null | https://raw.githubusercontent.com/jrh13/hol-light/d125b0ae73e546a63ed458a7891f4e14ae0409e2/basics.ml | ocaml | =========================================================================
More syntax constructors, and prelogical utilities like matching.
=========================================================================
-------------------------------------------------------------------------
Create probably-fresh variable
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Convenient functions for manipulating types.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
A bit more syntax.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Note that "mk_binary" only works for monomorphic functions.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Produces a sequence of variants, considering previous inventions.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Gets all variables (free and/or bound) in a term.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
General substitution (for any free expression).
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Type matching.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Conventional matching version of mk_const (but with a sanity test).
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Like mk_comb, but instantiates type variables in rator if necessary.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Instantiates types for constant c and iteratively makes combination.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Free variables in assumption list and conclusion of a theorem.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Searching for terms.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
General syntax for binders.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Syntax for binary operators.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Some common special cases
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Syntax for numerals.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Syntax for generalized abstractions.
preterms regard generalized abstractions as an atomic notion. This is
slightly unclean --- for example we need locally some operations on
universal quantifiers --- but probably simplest. It has to go somewhere!
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Syntax for let terms.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Constructors and destructors for finite types.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Useful function to create stylized arguments using numbers.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Director strings down a term.
------------------------------------------------------------------------- | , University of Cambridge Computer Laboratory
( c ) Copyright , University of Cambridge 1998
( c ) Copyright , 1998 - 2007
( c ) Copyright , , 2017 - 2018
needs "fusion.ml";;
let genvar =
let gcounter = ref 0 in
fun ty -> let count = !gcounter in
(gcounter := count + 1;
mk_var("_"^(string_of_int count),ty));;
let dest_fun_ty ty =
match ty with
Tyapp("fun",[ty1;ty2]) -> (ty1,ty2)
| _ -> failwith "dest_fun_ty";;
let rec occurs_in ty bigty =
bigty = ty ||
is_type bigty && exists (occurs_in ty) (snd(dest_type bigty));;
let rec tysubst alist ty =
try rev_assoc ty alist with Failure _ ->
if is_vartype ty then ty else
let tycon,tyvars = dest_type ty in
mk_type(tycon,map (tysubst alist) tyvars);;
let bndvar tm =
try fst(dest_abs tm)
with Failure _ -> failwith "bndvar: Not an abstraction";;
let body tm =
try snd(dest_abs tm)
with Failure _ -> failwith "body: Not an abstraction";;
let list_mk_comb(h,t) = rev_itlist (C (curry mk_comb)) t h;;
let list_mk_abs(vs,bod) = itlist (curry mk_abs) vs bod;;
let strip_comb = rev_splitlist dest_comb;;
let strip_abs = splitlist dest_abs;;
Generic syntax to deal with some binary operators .
let is_binary s tm =
match tm with
Comb(Comb(Const(s',_),_),_) -> s' = s
| _ -> false;;
let dest_binary s tm =
match tm with
Comb(Comb(Const(s',_),l),r) when s' = s -> (l,r)
| _ -> failwith "dest_binary";;
let mk_binary s =
let c = mk_const(s,[]) in
fun (l,r) -> try mk_comb(mk_comb(c,l),r)
with Failure _ -> failwith "mk_binary";;
let rec variants av vs =
if vs = [] then [] else
let vh = variant av (hd vs) in vh::(variants (vh::av) (tl vs));;
let variables =
let rec vars(acc,tm) =
if is_var tm then insert tm acc
else if is_const tm then acc
else if is_abs tm then
let v,bod = dest_abs tm in
vars(insert v acc,bod)
else
let l,r = dest_comb tm in
vars(vars(acc,l),r) in
fun tm -> vars([],tm);;
let subst =
let rec ssubst ilist tm =
if ilist = [] then tm else
try fst (find ((aconv tm) o snd) ilist) with Failure _ ->
match tm with
Comb(f,x) -> let f' = ssubst ilist f and x' = ssubst ilist x in
if f' == f && x' == x then tm else mk_comb(f',x')
| Abs(v,bod) ->
let ilist' = filter (not o (vfree_in v) o snd) ilist in
mk_abs(v,ssubst ilist' bod)
| _ -> tm in
fun ilist ->
let theta = filter (fun (s,t) -> Pervasives.compare s t <> 0) ilist in
if theta = [] then (fun tm -> tm) else
let ts,xs = unzip theta in
fun tm ->
let gs = variants (variables tm) (map (genvar o type_of) xs) in
let tm' = ssubst (zip gs xs) tm in
if tm' == tm then tm else vsubst (zip ts gs) tm';;
Alpha conversion term operation .
let alpha v tm =
let v0,bod = try dest_abs tm
with Failure _ -> failwith "alpha: Not an abstraction"in
if v = v0 then tm else
if type_of v = type_of v0 && not (vfree_in v bod) then
mk_abs(v,vsubst[v,v0]bod)
else failwith "alpha: Invalid new variable";;
let rec type_match vty cty sofar =
if is_vartype vty then
try if rev_assoc vty sofar = cty then sofar else failwith "type_match"
with Failure "find" -> (cty,vty)::sofar
else
let vop,vargs = dest_type vty and cop,cargs = dest_type cty in
if vop = cop then itlist2 type_match vargs cargs sofar
else failwith "type_match";;
let mk_mconst(c,ty) =
try let uty = get_const_type c in
let mat = type_match uty ty [] in
let con = mk_const(c,mat) in
if type_of con = ty then con else fail()
with Failure _ -> failwith "mk_const: generic type cannot be instantiated";;
let mk_icomb(tm1,tm2) =
let "fun",[ty;_] = dest_type (type_of tm1) in
let tyins = type_match ty (type_of tm2) [] in
mk_comb(inst tyins tm1,tm2);;
let list_mk_icomb cname args =
let atys,_ = nsplit dest_fun_ty args (get_const_type cname) in
let tyin = itlist2 (fun g a -> type_match g (type_of a)) atys args [] in
list_mk_comb(mk_const(cname,tyin),args);;
let thm_frees th =
let asl,c = dest_thm th in
itlist (union o frees) asl (frees c);;
Is one term free in another ?
let rec free_in tm1 tm2 =
if aconv tm1 tm2 then true
else if is_comb tm2 then
let l,r = dest_comb tm2 in free_in tm1 l || free_in tm1 r
else if is_abs tm2 then
let bv,bod = dest_abs tm2 in
not (vfree_in bv tm1) && free_in tm1 bod
else false;;
let rec find_term p tm =
if p tm then tm else
if is_abs tm then find_term p (body tm) else
if is_comb tm then
let l,r = dest_comb tm in
try find_term p l with Failure _ -> find_term p r
else failwith "find_term";;
let find_terms =
let rec accum tl p tm =
let tl' = if p tm then insert tm tl else tl in
if is_abs tm then
accum tl' p (body tm)
else if is_comb tm then
accum (accum tl' p (rator tm)) p (rand tm)
else tl' in
accum [];;
NB ! The " mk_binder " function expects polytype " A " , which is the domain .
let is_binder s tm =
match tm with
Comb(Const(s',_),Abs(_,_)) -> s' = s
| _ -> false;;
let dest_binder s tm =
match tm with
Comb(Const(s',_),Abs(x,t)) when s' = s -> (x,t)
| _ -> failwith "dest_binder";;
let mk_binder op =
let c = mk_const(op,[]) in
fun (v,tm) -> mk_comb(inst [type_of v,aty] c,mk_abs(v,tm));;
let is_binop op tm =
match tm with
Comb(Comb(op',_),_) -> op' = op
| _ -> false;;
let dest_binop op tm =
match tm with
Comb(Comb(op',l),r) when op' = op -> (l,r)
| _ -> failwith "dest_binop";;
let mk_binop op tm1 =
let f = mk_comb(op,tm1) in
fun tm2 -> mk_comb(f,tm2);;
let list_mk_binop op = end_itlist (mk_binop op);;
let binops op = striplist (dest_binop op);;
let is_conj = is_binary "/\\";;
let dest_conj = dest_binary "/\\";;
let conjuncts = striplist dest_conj;;
let is_imp = is_binary "==>";;
let dest_imp = dest_binary "==>";;
let is_forall = is_binder "!";;
let dest_forall = dest_binder "!";;
let strip_forall = splitlist dest_forall;;
let is_exists = is_binder "?";;
let dest_exists = dest_binder "?";;
let strip_exists = splitlist dest_exists;;
let is_disj = is_binary "\\/";;
let dest_disj = dest_binary "\\/";;
let disjuncts = striplist dest_disj;;
let is_neg tm =
try fst(dest_const(rator tm)) = "~"
with Failure _ -> false;;
let dest_neg tm =
try let n,p = dest_comb tm in
if fst(dest_const n) = "~" then p else fail()
with Failure _ -> failwith "dest_neg";;
let is_uexists = is_binder "?!";;
let dest_uexists = dest_binder "?!";;
let dest_cons = dest_binary "CONS";;
let is_cons = is_binary "CONS";;
let dest_list tm =
try let tms,nil = splitlist dest_cons tm in
if fst(dest_const nil) = "NIL" then tms else fail()
with Failure _ -> failwith "dest_list";;
let is_list = can dest_list;;
let dest_numeral =
let rec dest_num tm =
if try fst(dest_const tm) = "_0" with Failure _ -> false then num_0 else
let l,r = dest_comb tm in
let n = num_2 */ dest_num r in
let cn = fst(dest_const l) in
if cn = "BIT0" then n
else if cn = "BIT1" then n +/ num_1
else fail() in
fun tm -> try let l,r = dest_comb tm in
if fst(dest_const l) = "NUMERAL" then dest_num r else fail()
with Failure _ -> failwith "dest_numeral";;
These are here because they are used by the preterm->term translator ;
let dest_gabs =
let dest_geq = dest_binary "GEQ" in
fun tm ->
try if is_abs tm then dest_abs tm else
let l,r = dest_comb tm in
if not (fst(dest_const l) = "GABS") then fail() else
let ltm,rtm = dest_geq(snd(strip_forall(body r))) in
rand ltm,rtm
with Failure _ -> failwith "dest_gabs: Not a generalized abstraction";;
let is_gabs = can dest_gabs;;
let mk_gabs =
let mk_forall(v,t) =
let cop = mk_const("!",[type_of v,aty]) in
mk_comb(cop,mk_abs(v,t)) in
let list_mk_forall(vars,bod) = itlist (curry mk_forall) vars bod in
let mk_geq(t1,t2) =
let p = mk_const("GEQ",[type_of t1,aty]) in
mk_comb(mk_comb(p,t1),t2) in
fun (tm1,tm2) ->
if is_var tm1 then mk_abs(tm1,tm2) else
let fvs = frees tm1 in
let fty = mk_fun_ty (type_of tm1) (type_of tm2) in
let f = variant (frees tm1 @ frees tm2) (mk_var("f",fty)) in
let bod = mk_abs(f,list_mk_forall(fvs,mk_geq(mk_comb(f,tm1),tm2))) in
mk_comb(mk_const("GABS",[fty,aty]),bod);;
let list_mk_gabs(vs,bod) = itlist (curry mk_gabs) vs bod;;
let strip_gabs = splitlist dest_gabs;;
let dest_let tm =
try let l,aargs = strip_comb tm in
if fst(dest_const l) <> "LET" then fail() else
let vars,lebod = strip_gabs (hd aargs) in
let eqs = zip vars (tl aargs) in
let le,bod = dest_comb lebod in
if fst(dest_const le) = "LET_END" then eqs,bod else fail()
with Failure _ -> failwith "dest_let: not a let-term";;
let is_let = can dest_let;;
let mk_let(assigs,bod) =
let lefts,rights = unzip assigs in
let lend = mk_comb(mk_const("LET_END",[type_of bod,aty]),bod) in
let lbod = list_mk_gabs(lefts,lend) in
let ty1,ty2 = dest_fun_ty(type_of lbod) in
let ltm = mk_const("LET",[ty1,aty; ty2,bty]) in
list_mk_comb(ltm,lbod::rights);;
let mk_finty:num->hol_type =
let rec finty n =
if n =/ num_1 then mk_type("1",[]) else
mk_type((if Num.mod_num n num_2 =/ num_0 then "tybit0" else "tybit1"),
[finty(Num.quo_num n num_2)]) in
fun n ->
if not(is_integer_num n) || n </ num_1 then failwith "mk_finty" else
finty n;;
let rec dest_finty:hol_type->num =
function
Tyapp("1",_) -> num_1
| Tyapp("tybit0",[ty]) -> dest_finty ty */ num_2
| Tyapp("tybit1",[ty]) -> succ_num (dest_finty ty */ num_2)
| _ -> failwith "dest_finty";;
let make_args =
let rec margs n s avoid tys =
if tys = [] then [] else
let v = variant avoid (mk_var(s^(string_of_int n),hd tys)) in
v::(margs (n + 1) s (v::avoid) (tl tys)) in
fun s avoid tys ->
if length tys = 1 then
[variant avoid (mk_var(s,hd tys))]
else
margs 0 s avoid tys;;
let find_path =
let rec find_path p tm =
if p tm then [] else
if is_abs tm then "b"::(find_path p (body tm)) else
try "r"::(find_path p (rand tm))
with Failure _ -> "l"::(find_path p (rator tm)) in
fun p tm -> implode(find_path p tm);;
let follow_path =
let rec follow_path s tm =
match s with
[] -> tm
| "l"::t -> follow_path t (rator tm)
| "r"::t -> follow_path t (rand tm)
| _::t -> follow_path t (body tm) in
fun s tm -> follow_path (explode s) tm;;
|
2fd5c3df9cf45bd25c343bd80a8beef8d8fefc78abafdfd181ba16af2caf9e51 | BranchTaken/Hemlock | test_sink.ml | open Basis
let () =
File.Fmt.sink |> Fmt.fmt "sink\n" |> Fmt.flush |> ignore;
File.Fmt.stdout |> Fmt.fmt "stdout\n" |> Fmt.flush |> ignore
| null | https://raw.githubusercontent.com/BranchTaken/Hemlock/a21b462fe7f70475591d2ffae185c91552bf6372/bootstrap/test/basis/file/test_sink.ml | ocaml | open Basis
let () =
File.Fmt.sink |> Fmt.fmt "sink\n" |> Fmt.flush |> ignore;
File.Fmt.stdout |> Fmt.fmt "stdout\n" |> Fmt.flush |> ignore
| |
c55e80219ff9648d624549073f386fa3d9b66e284802a1f5ca13bfd7ef06a53d | Frama-C/Frama-C-snapshot | menu_manager.mli | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
(** Handle the menubar and the toolbar.
@since Boron-20100401 *)
(** Where to put a new entry.
@since Boron-20100401 *)
type where =
| Toolbar of GtkStock.id * string * string (** Label then tooltip *)
| Menubar of
GtkStock.id option (** Stock used for the icon *) * string (** Label *)
| ToolMenubar of GtkStock.id * string * string (** Label then tooltip *)
* Callback for the buttons that can be in the menus . Standard buttons / menus
have a callback with no argument . Buttons / menus with states are displayed
with checkboxes in menus , or as toggle buttons in toolbars . They receive the
after - click state as argument . The state of the button with the second
argument of [ Bool_callback ] . Currently checks menus can not have images in
Gtk , or the [ GtkStock.id ] fields of [ where ] are ignored .
@since
have a callback with no argument. Buttons/menus with states are displayed
with checkboxes in menus, or as toggle buttons in toolbars. They receive the
after-click state as argument. The state of the button with the second
argument of [Bool_callback]. Currently checks menus cannot have images in
Gtk, or the [GtkStock.id] fields of [where] are ignored.
@since Nitrogen-20111001 *)
type callback_state =
| Unit_callback of (unit -> unit)
| Bool_callback of (bool -> unit) * (unit -> bool)
* @since Boron-20100401
@modify Nitrogen-20111001 *)
type entry = private {
e_where: where;
e_callback: callback_state (** callback called when the button is clicked *);
e_sensitive: unit -> bool (** should the button be activated when the gui
is refreshed *);
}
* { 2 Smart constructors for menu entries . }
If not supplied , the [ sensitive ] parameter is the function that always
returns [ true ] .
@since
If not supplied, the [sensitive] parameter is the function that always
returns [true].
@since Nitrogen-20111001 *)
val toolbar:
?sensitive:(unit -> bool) ->
icon:GtkStock.id ->
label:string ->
?tooltip:string ->
callback_state ->
entry
val menubar:
?sensitive:(unit -> bool) -> ?icon:GtkStock.id -> string -> callback_state ->
entry
val toolmenubar:
?sensitive:(unit -> bool) ->
icon:GtkStock.id ->
label:string ->
?tooltip:string ->
callback_state ->
entry
(** The item type corresponding to an entry.
@since Boron-20100401 *)
class type item = object
method menu_item: GMenu.menu_item option
(** @since Boron-20100401 *)
method check_menu_item: GMenu.check_menu_item option
* @since
method menu_item_skel: GMenu.menu_item_skel option
* @since
method menu: GMenu.menu option
* Return the menu in which the item has been inserted , if meaningful
@since
@since Nitrogen-20111001 *)
method add_accelerator: Gdk.Tags.modifier -> char -> unit
* Add an accelerator iff there is a menu item .
@since
@since Boron-20100401 *)
method tool_button: GButton.tool_button option
(** @since Boron-20100401 *)
method toggle_tool_button: GButton.toggle_tool_button option
* @since
method tool_button_skel: GButton.tool_button_skel option
* @since
end
(** How to handle a Frama-C menu.
@since Boron-20100401 *)
class menu_manager: ?packing:(GObj.widget -> unit) -> host:Gtk_helper.host ->
object
* { 2 API for plug - ins }
method add_plugin: ?title:string -> entry list -> item array
* Add entries dedicated to a plug - in .
If [ title ] is specified , then the entries are added in a dedicated
sub - menu of name [ title ] .
The elements of the returned array are in the same order that the ones
in the input list .
@since
If [title] is specified, then the entries are added in a dedicated
sub-menu of name [title].
The elements of the returned array are in the same order that the ones
in the input list.
@since Boron-20100401 *)
method add_debug:
?title:string -> ?show:(unit -> bool) -> entry list -> item array
* Add entries to the menu dedicated to debugging tools .
If [ title ] is specified , then the entries are added in a dedicated
sub - menu of name [ title ] .
If [ show ] is specified , then the entries are only shown when this
function returns [ true ] ( it returns [ true ] by default ) .
The elements of the returned array are in the same order that the ones
in the input list .
@since
If [title] is specified, then the entries are added in a dedicated
sub-menu of name [title].
If [show] is specified, then the entries are only shown when this
function returns [true] (it returns [true] by default).
The elements of the returned array are in the same order that the ones
in the input list.
@since Boron-20100401 *)
* { 2 High - level API }
method add_menu: ?pos:int -> string -> GMenu.menu_item * GMenu.menu
(** @since Boron-20100401 *)
method add_entries:
?title:string -> ?pos:int -> GMenu.menu -> entry list -> item array
* Add entries in the given menu . If [ title ] is specified , then the
entries are added in a dedicated sub - menu of name [ title ] .
The elements of the returned array are in the same order that the ones
in the input list .
@since
entries are added in a dedicated sub-menu of name [title].
The elements of the returned array are in the same order that the ones
in the input list.
@since Boron-20100401 *)
method set_sensitive: bool -> unit
* Set the sensitive property of all the entries .
@since
@since Boron-20100401 *)
* { 2 Low - level API }
method factory: GMenu.menu_shell GMenu.factory
(** @since Boron-20100401 *)
method menubar: GMenu.menu_shell
(** @since Boron-20100401 *)
method toolbar: GButton.toolbar
(** @since Boron-20100401 *)
method refresh: unit -> unit
* Reset the activation state of the buttons
@since
@since Nitrogen-20111001 *)
end
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/gui/menu_manager.mli | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
* Handle the menubar and the toolbar.
@since Boron-20100401
* Where to put a new entry.
@since Boron-20100401
* Label then tooltip
* Stock used for the icon
* Label
* Label then tooltip
* callback called when the button is clicked
* should the button be activated when the gui
is refreshed
* The item type corresponding to an entry.
@since Boron-20100401
* @since Boron-20100401
* @since Boron-20100401
* How to handle a Frama-C menu.
@since Boron-20100401
* @since Boron-20100401
* @since Boron-20100401
* @since Boron-20100401
* @since Boron-20100401
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
type where =
| Menubar of
* Callback for the buttons that can be in the menus . Standard buttons / menus
have a callback with no argument . Buttons / menus with states are displayed
with checkboxes in menus , or as toggle buttons in toolbars . They receive the
after - click state as argument . The state of the button with the second
argument of [ Bool_callback ] . Currently checks menus can not have images in
Gtk , or the [ GtkStock.id ] fields of [ where ] are ignored .
@since
have a callback with no argument. Buttons/menus with states are displayed
with checkboxes in menus, or as toggle buttons in toolbars. They receive the
after-click state as argument. The state of the button with the second
argument of [Bool_callback]. Currently checks menus cannot have images in
Gtk, or the [GtkStock.id] fields of [where] are ignored.
@since Nitrogen-20111001 *)
type callback_state =
| Unit_callback of (unit -> unit)
| Bool_callback of (bool -> unit) * (unit -> bool)
* @since Boron-20100401
@modify Nitrogen-20111001 *)
type entry = private {
e_where: where;
}
* { 2 Smart constructors for menu entries . }
If not supplied , the [ sensitive ] parameter is the function that always
returns [ true ] .
@since
If not supplied, the [sensitive] parameter is the function that always
returns [true].
@since Nitrogen-20111001 *)
val toolbar:
?sensitive:(unit -> bool) ->
icon:GtkStock.id ->
label:string ->
?tooltip:string ->
callback_state ->
entry
val menubar:
?sensitive:(unit -> bool) -> ?icon:GtkStock.id -> string -> callback_state ->
entry
val toolmenubar:
?sensitive:(unit -> bool) ->
icon:GtkStock.id ->
label:string ->
?tooltip:string ->
callback_state ->
entry
class type item = object
method menu_item: GMenu.menu_item option
method check_menu_item: GMenu.check_menu_item option
* @since
method menu_item_skel: GMenu.menu_item_skel option
* @since
method menu: GMenu.menu option
* Return the menu in which the item has been inserted , if meaningful
@since
@since Nitrogen-20111001 *)
method add_accelerator: Gdk.Tags.modifier -> char -> unit
* Add an accelerator iff there is a menu item .
@since
@since Boron-20100401 *)
method tool_button: GButton.tool_button option
method toggle_tool_button: GButton.toggle_tool_button option
* @since
method tool_button_skel: GButton.tool_button_skel option
* @since
end
class menu_manager: ?packing:(GObj.widget -> unit) -> host:Gtk_helper.host ->
object
* { 2 API for plug - ins }
method add_plugin: ?title:string -> entry list -> item array
* Add entries dedicated to a plug - in .
If [ title ] is specified , then the entries are added in a dedicated
sub - menu of name [ title ] .
The elements of the returned array are in the same order that the ones
in the input list .
@since
If [title] is specified, then the entries are added in a dedicated
sub-menu of name [title].
The elements of the returned array are in the same order that the ones
in the input list.
@since Boron-20100401 *)
method add_debug:
?title:string -> ?show:(unit -> bool) -> entry list -> item array
* Add entries to the menu dedicated to debugging tools .
If [ title ] is specified , then the entries are added in a dedicated
sub - menu of name [ title ] .
If [ show ] is specified , then the entries are only shown when this
function returns [ true ] ( it returns [ true ] by default ) .
The elements of the returned array are in the same order that the ones
in the input list .
@since
If [title] is specified, then the entries are added in a dedicated
sub-menu of name [title].
If [show] is specified, then the entries are only shown when this
function returns [true] (it returns [true] by default).
The elements of the returned array are in the same order that the ones
in the input list.
@since Boron-20100401 *)
* { 2 High - level API }
method add_menu: ?pos:int -> string -> GMenu.menu_item * GMenu.menu
method add_entries:
?title:string -> ?pos:int -> GMenu.menu -> entry list -> item array
* Add entries in the given menu . If [ title ] is specified , then the
entries are added in a dedicated sub - menu of name [ title ] .
The elements of the returned array are in the same order that the ones
in the input list .
@since
entries are added in a dedicated sub-menu of name [title].
The elements of the returned array are in the same order that the ones
in the input list.
@since Boron-20100401 *)
method set_sensitive: bool -> unit
* Set the sensitive property of all the entries .
@since
@since Boron-20100401 *)
* { 2 Low - level API }
method factory: GMenu.menu_shell GMenu.factory
method menubar: GMenu.menu_shell
method toolbar: GButton.toolbar
method refresh: unit -> unit
* Reset the activation state of the buttons
@since
@since Nitrogen-20111001 *)
end
|
69c18342b98035ecdbd62f3c04d078a2c590843f5492a5c4be7b9ba33bf3e814 | IndiscriminateCoding/clarity | align.mli | (** Zip datatypes with non-uniform shapes using These.t *)
module type Basic = sig
type 'a t
val align_as : ('a -> 'b -> 'c) -> ('a -> 'c) -> ('b -> 'c) -> 'a t -> 'b t -> 'c t
end
module type Basic2 = sig
type ('p, 'a) t
val align_as :
('a -> 'b -> 'c) -> ('a -> 'c) -> ('b -> 'c) -> ('p, 'a) t -> ('p, 'b) t -> ('p, 'c) t
end
module type Basic3 = sig
type ('p, 'q, 'a) t
val align_as : ('a -> 'b -> 'c) -> ('a -> 'c) -> ('b -> 'c) ->
('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'c) t
end
module type S = sig
include Basic
val align_with : (('a, 'b) These.t -> 'c) -> 'a t -> 'b t -> 'c t
val align : 'a t -> 'b t -> ('a, 'b) These.t t
val falign : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t
val pad_zip_with : ('a option -> 'b option -> 'c) -> 'a t -> 'b t -> 'c t
val pad_zip : 'a t -> 'b t -> ('a option * 'b option) t
end
module type S2 = sig
include Basic2
val align_with : (('a, 'b) These.t -> 'c) -> ('p, 'a) t -> ('p, 'b) t -> ('p, 'c) t
val align : ('p, 'a) t -> ('p, 'b) t -> ('p, ('a, 'b) These.t) t
val falign : ('a -> 'a -> 'a) -> ('p, 'a) t -> ('p, 'a) t -> ('p, 'a) t
val pad_zip_with : ('a option -> 'b option -> 'c) -> ('p, 'a) t -> ('p, 'b) t -> ('p, 'c) t
val pad_zip : ('p, 'a) t -> ('p, 'b) t -> ('p, 'a option * 'b option) t
end
module type S3 = sig
include Basic3
val align_with : (('a, 'b) These.t -> 'c) -> ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'c) t
val align : ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, ('a, 'b) These.t) t
val falign : ('a -> 'a -> 'a) -> ('p, 'q, 'a) t -> ('p, 'q, 'a) t -> ('p, 'q, 'a) t
val pad_zip_with :
('a option -> 'b option -> 'c) -> ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'c) t
val pad_zip : ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'a option * 'b option) t
end
module Make (A : Basic) : S with type 'a t := 'a A.t
module Make2 (A : Basic2) : S2 with type ('p, 'a) t := ('p, 'a) A.t
module Make3 (A : Basic3) : S3 with type ('p, 'q, 'a) t := ('p, 'q, 'a) A.t
| null | https://raw.githubusercontent.com/IndiscriminateCoding/clarity/163c16249cb3f01c4244b80be39e9aad0b1ca325/lib/classes/align.mli | ocaml | * Zip datatypes with non-uniform shapes using These.t |
module type Basic = sig
type 'a t
val align_as : ('a -> 'b -> 'c) -> ('a -> 'c) -> ('b -> 'c) -> 'a t -> 'b t -> 'c t
end
module type Basic2 = sig
type ('p, 'a) t
val align_as :
('a -> 'b -> 'c) -> ('a -> 'c) -> ('b -> 'c) -> ('p, 'a) t -> ('p, 'b) t -> ('p, 'c) t
end
module type Basic3 = sig
type ('p, 'q, 'a) t
val align_as : ('a -> 'b -> 'c) -> ('a -> 'c) -> ('b -> 'c) ->
('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'c) t
end
module type S = sig
include Basic
val align_with : (('a, 'b) These.t -> 'c) -> 'a t -> 'b t -> 'c t
val align : 'a t -> 'b t -> ('a, 'b) These.t t
val falign : ('a -> 'a -> 'a) -> 'a t -> 'a t -> 'a t
val pad_zip_with : ('a option -> 'b option -> 'c) -> 'a t -> 'b t -> 'c t
val pad_zip : 'a t -> 'b t -> ('a option * 'b option) t
end
module type S2 = sig
include Basic2
val align_with : (('a, 'b) These.t -> 'c) -> ('p, 'a) t -> ('p, 'b) t -> ('p, 'c) t
val align : ('p, 'a) t -> ('p, 'b) t -> ('p, ('a, 'b) These.t) t
val falign : ('a -> 'a -> 'a) -> ('p, 'a) t -> ('p, 'a) t -> ('p, 'a) t
val pad_zip_with : ('a option -> 'b option -> 'c) -> ('p, 'a) t -> ('p, 'b) t -> ('p, 'c) t
val pad_zip : ('p, 'a) t -> ('p, 'b) t -> ('p, 'a option * 'b option) t
end
module type S3 = sig
include Basic3
val align_with : (('a, 'b) These.t -> 'c) -> ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'c) t
val align : ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, ('a, 'b) These.t) t
val falign : ('a -> 'a -> 'a) -> ('p, 'q, 'a) t -> ('p, 'q, 'a) t -> ('p, 'q, 'a) t
val pad_zip_with :
('a option -> 'b option -> 'c) -> ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'c) t
val pad_zip : ('p, 'q, 'a) t -> ('p, 'q, 'b) t -> ('p, 'q, 'a option * 'b option) t
end
module Make (A : Basic) : S with type 'a t := 'a A.t
module Make2 (A : Basic2) : S2 with type ('p, 'a) t := ('p, 'a) A.t
module Make3 (A : Basic3) : S3 with type ('p, 'q, 'a) t := ('p, 'q, 'a) A.t
|
318ead0a1cb1185b76ec372dbe2a1ebd1579d2b83612b9533bc2328c449b97db | naproche/naproche | Bash.hs | {- generated by Isabelle -}
Title : Isabelle / Bash.hs
Author : Makarius
LICENSE : BSD 3 - clause ( Isabelle )
Support for GNU bash .
See " $ ISABELLE_HOME / src / Pure / System / bash . ML "
Author: Makarius
LICENSE: BSD 3-clause (Isabelle)
Support for GNU bash.
See "$ISABELLE_HOME/src/Pure/System/bash.ML"
-}
{-# LANGUAGE OverloadedStrings #-}
module Isabelle.Bash (
string, strings,
Params,
get_script, get_input, get_cwd, get_putenv, get_redirect,
get_timeout, get_description,
script, input, cwd, putenv, redirect, timeout, description,
server_run, server_kill,
server_uuid, server_interrupt, server_failure, server_result
)
where
import Text.Printf (printf)
import qualified Isabelle.Symbol as Symbol
import qualified Isabelle.Bytes as Bytes
import Isabelle.Bytes (Bytes)
import qualified Isabelle.Time as Time
import Isabelle.Time (Time)
import Isabelle.Library
{- concrete syntax -}
string :: Bytes -> Bytes
string str =
if Bytes.null str then "\"\""
else str |> Bytes.unpack |> map trans |> Bytes.concat
where
trans b =
case Bytes.char b of
'\t' -> "$'\\t'"
'\n' -> "$'\\n'"
'\f' -> "$'\\f'"
'\r' -> "$'\\r'"
c ->
if Symbol.is_ascii_letter c || Symbol.is_ascii_digit c || c `elem` ("+,-./:_" :: String)
then Bytes.singleton b
else if b < 32 || b >= 127 then make_bytes (printf "$'\\x%02x'" b :: String)
else "\\" <> Bytes.singleton b
strings :: [Bytes] -> Bytes
strings = space_implode " " . map string
{- server parameters -}
data Params = Params {
_script :: Bytes,
_input :: Bytes,
_cwd :: Maybe Bytes,
_putenv :: [(Bytes, Bytes)],
_redirect :: Bool,
_timeout :: Time,
_description :: Bytes}
deriving (Show, Eq)
get_script :: Params -> Bytes
get_script = _script
get_input :: Params -> Bytes
get_input = _input
get_cwd :: Params -> Maybe Bytes
get_cwd = _cwd
get_putenv :: Params -> [(Bytes, Bytes)]
get_putenv = _putenv
get_redirect :: Params -> Bool
get_redirect = _redirect
get_timeout :: Params -> Time
get_timeout = _timeout
get_description :: Params -> Bytes
get_description = _description
script :: Bytes -> Params
script script = Params script "" Nothing [] False Time.zero ""
input :: Bytes -> Params -> Params
input input params = params { _input = input }
cwd :: Bytes -> Params -> Params
cwd cwd params = params { _cwd = Just cwd }
putenv :: [(Bytes, Bytes)] -> Params -> Params
putenv putenv params = params { _putenv = putenv }
redirect :: Params -> Params
redirect params = params { _redirect = True }
timeout :: Time -> Params -> Params
timeout timeout params = params { _timeout = timeout }
description :: Bytes -> Params -> Params
description description params = params { _description = description }
{- server messages -}
server_run, server_kill :: Bytes
server_run = "run";
server_kill = "kill";
server_uuid, server_interrupt, server_failure, server_result :: Bytes
server_uuid = "uuid";
server_interrupt = "interrupt";
server_failure = "failure";
server_result = "result";
| null | https://raw.githubusercontent.com/naproche/naproche/804f94c07b4cd1efde8c466d8e73b1ef96861e12/Isabelle/src/Isabelle/Bash.hs | haskell | generated by Isabelle
# LANGUAGE OverloadedStrings #
concrete syntax
server parameters
server messages |
Title : Isabelle / Bash.hs
Author : Makarius
LICENSE : BSD 3 - clause ( Isabelle )
Support for GNU bash .
See " $ ISABELLE_HOME / src / Pure / System / bash . ML "
Author: Makarius
LICENSE: BSD 3-clause (Isabelle)
Support for GNU bash.
See "$ISABELLE_HOME/src/Pure/System/bash.ML"
-}
module Isabelle.Bash (
string, strings,
Params,
get_script, get_input, get_cwd, get_putenv, get_redirect,
get_timeout, get_description,
script, input, cwd, putenv, redirect, timeout, description,
server_run, server_kill,
server_uuid, server_interrupt, server_failure, server_result
)
where
import Text.Printf (printf)
import qualified Isabelle.Symbol as Symbol
import qualified Isabelle.Bytes as Bytes
import Isabelle.Bytes (Bytes)
import qualified Isabelle.Time as Time
import Isabelle.Time (Time)
import Isabelle.Library
string :: Bytes -> Bytes
string str =
if Bytes.null str then "\"\""
else str |> Bytes.unpack |> map trans |> Bytes.concat
where
trans b =
case Bytes.char b of
'\t' -> "$'\\t'"
'\n' -> "$'\\n'"
'\f' -> "$'\\f'"
'\r' -> "$'\\r'"
c ->
if Symbol.is_ascii_letter c || Symbol.is_ascii_digit c || c `elem` ("+,-./:_" :: String)
then Bytes.singleton b
else if b < 32 || b >= 127 then make_bytes (printf "$'\\x%02x'" b :: String)
else "\\" <> Bytes.singleton b
strings :: [Bytes] -> Bytes
strings = space_implode " " . map string
data Params = Params {
_script :: Bytes,
_input :: Bytes,
_cwd :: Maybe Bytes,
_putenv :: [(Bytes, Bytes)],
_redirect :: Bool,
_timeout :: Time,
_description :: Bytes}
deriving (Show, Eq)
get_script :: Params -> Bytes
get_script = _script
get_input :: Params -> Bytes
get_input = _input
get_cwd :: Params -> Maybe Bytes
get_cwd = _cwd
get_putenv :: Params -> [(Bytes, Bytes)]
get_putenv = _putenv
get_redirect :: Params -> Bool
get_redirect = _redirect
get_timeout :: Params -> Time
get_timeout = _timeout
get_description :: Params -> Bytes
get_description = _description
script :: Bytes -> Params
script script = Params script "" Nothing [] False Time.zero ""
input :: Bytes -> Params -> Params
input input params = params { _input = input }
cwd :: Bytes -> Params -> Params
cwd cwd params = params { _cwd = Just cwd }
putenv :: [(Bytes, Bytes)] -> Params -> Params
putenv putenv params = params { _putenv = putenv }
redirect :: Params -> Params
redirect params = params { _redirect = True }
timeout :: Time -> Params -> Params
timeout timeout params = params { _timeout = timeout }
description :: Bytes -> Params -> Params
description description params = params { _description = description }
server_run, server_kill :: Bytes
server_run = "run";
server_kill = "kill";
server_uuid, server_interrupt, server_failure, server_result :: Bytes
server_uuid = "uuid";
server_interrupt = "interrupt";
server_failure = "failure";
server_result = "result";
|
fbea82dc70afc6119bdac35e4a8795b015a1f2b35eebac1940578e53762eec40 | haskell-foundation/foundation | Mappable.hs | -- |
-- Module : Basement.Mappable
-- License : BSD-style
Maintainer : < >
-- Stability : experimental
-- Portability : portable
--
-- Class of collection that can be traversed from left to right,
-- performing an action on each element.
--
module Foundation.Collection.Mappable
( Mappable(..)
, sequence_
, traverse_
, mapM_
, forM
, forM_
) where
import Basement.Compat.Base
import qualified Data.Traversable
import Basement.BoxedArray (Array)
-- | Functors representing data structures that can be traversed from
-- left to right.
--
Mostly like base 's ` ` but applied to collections only .
--
class Functor collection => Mappable collection where
# MINIMAL traverse | sequenceA #
-- | Map each element of a structure to an action, evaluate these actions
-- from left to right, and collect the results. For a version that ignores
-- the results see 'Foundation.Collection.traverse_'.
traverse :: Applicative f => (a -> f b)
-> collection a
-> f (collection b)
traverse f = sequenceA . fmap f
-- | Evaluate each actions of the given collections, from left to right,
-- and collect the results. For a version that ignores the results, see
-- `Foundation.Collection.sequenceA_`
sequenceA :: Applicative f => collection (f a)
-> f (collection a)
sequenceA = traverse id
-- | Map each element of the collection to an action, evaluate these actions
-- from left to right, and collect the results. For a version that ignores
-- the results see 'Foundation.Collection.mapM_'.
mapM :: (Applicative m, Monad m) => (a -> m b) -> collection a -> m (collection b)
mapM = traverse
-- | Evaluate each actions of the given collections, from left to right,
-- and collect the results. For a version that ignores the results, see
` Foundation.Collection.sequence _ `
sequence :: (Applicative m, Monad m) => collection (m a) -> m (collection a)
sequence = sequenceA
-- | Map each element of a collection to an action, evaluate these
-- actions from left to right, and ignore the results. For a version
-- that doesn't ignore the results see 'Foundation.Collection.traverse`
traverse_ :: (Mappable col, Applicative f) => (a -> f b) -> col a -> f ()
traverse_ f col = traverse f col *> pure ()
-- | Evaluate each action in the collection from left to right, and
-- ignore the results. For a version that doesn't ignore the results
-- see 'Foundation.Collection.sequenceA'.
--sequenceA_ :: (Mappable col, Applicative f) => col (f a) -> f ()
--sequenceA_ col = sequenceA col *> pure ()
-- | Map each element of a collection to a monadic action, evaluate
-- these actions from left to right, and ignore the results. For a
-- version that doesn't ignore the results see
-- 'Foundation.Collection.mapM'.
mapM_ :: (Mappable col, Applicative m, Monad m) => (a -> m b) -> col a -> m ()
mapM_ f c = mapM f c *> return ()
-- | Evaluate each monadic action in the collection from left to right,
-- and ignore the results. For a version that doesn't ignore the
-- results see 'Foundation.Collection.sequence'.
sequence_ :: (Mappable col, Applicative m, Monad m) => col (m a) -> m ()
sequence_ c = sequence c *> return ()
-- | 'forM' is 'mapM' with its arguments flipped. For a version that
-- ignores the results see 'Foundation.Collection.forM_'.
forM :: (Mappable col, Applicative m, Monad m) => col a -> (a -> m b) -> m (col b)
forM = flip mapM
-- | 'forM_' is 'mapM_' with its arguments flipped. For a version that
does n't ignore the results see ' Foundation . ' .
forM_ :: (Mappable col, Applicative m, Monad m) => col a -> (a -> m b) -> m ()
forM_ = flip mapM_
----------------------------
-- Foldable instances
----------------------------
instance Mappable [] where
{-# INLINE traverse #-}
traverse = Data.Traversable.traverse
instance Mappable Array where
-- | TODO: to optimise
traverse f arr = fromList <$> traverse f (toList arr)
| null | https://raw.githubusercontent.com/haskell-foundation/foundation/58568e9f5368170d272000ecf16ef64fb91d0732/foundation/Foundation/Collection/Mappable.hs | haskell | |
Module : Basement.Mappable
License : BSD-style
Stability : experimental
Portability : portable
Class of collection that can be traversed from left to right,
performing an action on each element.
| Functors representing data structures that can be traversed from
left to right.
| Map each element of a structure to an action, evaluate these actions
from left to right, and collect the results. For a version that ignores
the results see 'Foundation.Collection.traverse_'.
| Evaluate each actions of the given collections, from left to right,
and collect the results. For a version that ignores the results, see
`Foundation.Collection.sequenceA_`
| Map each element of the collection to an action, evaluate these actions
from left to right, and collect the results. For a version that ignores
the results see 'Foundation.Collection.mapM_'.
| Evaluate each actions of the given collections, from left to right,
and collect the results. For a version that ignores the results, see
| Map each element of a collection to an action, evaluate these
actions from left to right, and ignore the results. For a version
that doesn't ignore the results see 'Foundation.Collection.traverse`
| Evaluate each action in the collection from left to right, and
ignore the results. For a version that doesn't ignore the results
see 'Foundation.Collection.sequenceA'.
sequenceA_ :: (Mappable col, Applicative f) => col (f a) -> f ()
sequenceA_ col = sequenceA col *> pure ()
| Map each element of a collection to a monadic action, evaluate
these actions from left to right, and ignore the results. For a
version that doesn't ignore the results see
'Foundation.Collection.mapM'.
| Evaluate each monadic action in the collection from left to right,
and ignore the results. For a version that doesn't ignore the
results see 'Foundation.Collection.sequence'.
| 'forM' is 'mapM' with its arguments flipped. For a version that
ignores the results see 'Foundation.Collection.forM_'.
| 'forM_' is 'mapM_' with its arguments flipped. For a version that
--------------------------
Foldable instances
--------------------------
# INLINE traverse #
| TODO: to optimise | Maintainer : < >
module Foundation.Collection.Mappable
( Mappable(..)
, sequence_
, traverse_
, mapM_
, forM
, forM_
) where
import Basement.Compat.Base
import qualified Data.Traversable
import Basement.BoxedArray (Array)
Mostly like base 's ` ` but applied to collections only .
class Functor collection => Mappable collection where
# MINIMAL traverse | sequenceA #
traverse :: Applicative f => (a -> f b)
-> collection a
-> f (collection b)
traverse f = sequenceA . fmap f
sequenceA :: Applicative f => collection (f a)
-> f (collection a)
sequenceA = traverse id
mapM :: (Applicative m, Monad m) => (a -> m b) -> collection a -> m (collection b)
mapM = traverse
` Foundation.Collection.sequence _ `
sequence :: (Applicative m, Monad m) => collection (m a) -> m (collection a)
sequence = sequenceA
traverse_ :: (Mappable col, Applicative f) => (a -> f b) -> col a -> f ()
traverse_ f col = traverse f col *> pure ()
mapM_ :: (Mappable col, Applicative m, Monad m) => (a -> m b) -> col a -> m ()
mapM_ f c = mapM f c *> return ()
sequence_ :: (Mappable col, Applicative m, Monad m) => col (m a) -> m ()
sequence_ c = sequence c *> return ()
forM :: (Mappable col, Applicative m, Monad m) => col a -> (a -> m b) -> m (col b)
forM = flip mapM
does n't ignore the results see ' Foundation . ' .
forM_ :: (Mappable col, Applicative m, Monad m) => col a -> (a -> m b) -> m ()
forM_ = flip mapM_
instance Mappable [] where
traverse = Data.Traversable.traverse
instance Mappable Array where
traverse f arr = fromList <$> traverse f (toList arr)
|
c56a0e7cbfbcec4658b05434dd6e4e41a2c9033a79bd3ff73959d716f2aa36cd | chiroptical/optics-by-example | Main.hs | module Main where
main :: IO ()
main = putStrLn "Chapter 5"
| null | https://raw.githubusercontent.com/chiroptical/optics-by-example/3ee33546ee18c3a6f5510eec17a69d34e750198e/chapter5/app/Main.hs | haskell | module Main where
main :: IO ()
main = putStrLn "Chapter 5"
| |
288254499b86baa62ce6d71969fedd4d538da33a034e13a4b5cd18c507991ef8 | incoherentsoftware/defect-process | ZIndex.hs | module Menu.ZIndex
( menuOverExpandedZIndex
, menuOverZIndex
, menuZIndex
) where
import Window.Graphics
menuOverExpandedZIndex = ZIndex 1 :: ZIndex
menuOverZIndex = ZIndex 2 :: ZIndex
menuZIndex = ZIndex 3 :: ZIndex
| null | https://raw.githubusercontent.com/incoherentsoftware/defect-process/15f2569e7d0e481c2e28c0ca3a5e72d2c049b667/src/Menu/ZIndex.hs | haskell | module Menu.ZIndex
( menuOverExpandedZIndex
, menuOverZIndex
, menuZIndex
) where
import Window.Graphics
menuOverExpandedZIndex = ZIndex 1 :: ZIndex
menuOverZIndex = ZIndex 2 :: ZIndex
menuZIndex = ZIndex 3 :: ZIndex
| |
b63e51eafdc4725b91501b08c07889bd1596a35481a692574b9ec5fb0ec1e0ac | telekons/one-more-re-nightmare | package.lisp | (defpackage :one-more-re-nightmare-tests
(:use :cl)
(:export #:run-tests #:regrind))
| null | https://raw.githubusercontent.com/telekons/one-more-re-nightmare/31c030888cb909d4563fa09e010ccdfcd73be2b8/Tests/package.lisp | lisp | (defpackage :one-more-re-nightmare-tests
(:use :cl)
(:export #:run-tests #:regrind))
| |
801c759a15a1b10826fc8e728ef9d475bb2b26e5aa0bd6ad01ecf068934cd120 | cirodrig/triolet | Control.hs | {-| Control flow analysis of parsed code.
-}
# LANGUAGE FlexibleContexts , UndecidableInstances #
module Parser.Control where
import Compiler.Hoopl
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Language.Python.Common.Pretty as Python
import qualified Language.Python.Common.PrettyAST as Python
import Text.PrettyPrint.HughesPJ
import Common.SourcePos
import Parser.ParserSyntax hiding(Stmt(..))
-- | A control flow source.
-- A source consists of a block label and an outgoing path.
data Source = Source !Label !FlowPath deriving(Eq, Show)
data FlowPath = JumpPath | TruePath | FalsePath deriving(Eq, Show)
-- | An outgoing control flow path.
--
SSA annotates a control flow path with variable IDs .
data Flow id = Flow
{ flowLabel :: !Label
, flowSSA :: !(Maybe [Var id])
}
noSSAFlow :: Label -> Flow id
noSSAFlow l = Flow l Nothing
-- | A control flow node. Performs an action, has inputs and outputs.
data Stmt id e x where
Assign LHS the value of RHS
Assign :: Parameter id -> LExpr id -> Stmt id O O
-- A group of function definitions.
-- Definition groups are annotated with their live-in variables
-- during live variable analysis. The live-in variables is the
-- union of the functions' live-ins, minus the functions themselves.
DefGroup :: [LCFunc id] -> !MLiveness -> Stmt id O O
Assert that some propositions hold
Assert :: [LExpr id] -> Stmt id O O
-- Type annotation
Require :: Var id -> LExpr id -> Stmt id O O
-- A control flow target, for incoming control flow.
--
-- Targets are annotated with parameters during SSA analysis.
Target :: Label -> !(Maybe [Var id]) -> Stmt id C O
-- Conditional branch
If :: LExpr id -> Flow id -> Flow id -> Stmt id O C
-- Direct jump
Jump :: Flow id -> Stmt id O C
-- Return from function
Return :: LExpr id -> Stmt id O C
instance NonLocal (Stmt id) where
entryLabel (Target l _) = l
successors (If _ t f) = [flowLabel t, flowLabel f]
successors (Jump l) = [flowLabel l]
successors (Return _) = []
newtype LStmt id e x = LStmt (Loc (Stmt id e x))
instance NonLocal (LStmt id) where
entryLabel x = entryLabel $ unLStmt x
successors x = successors $ unLStmt x
unLStmt :: LStmt id e x -> Stmt id e x
unLStmt (LStmt (Loc _ s)) = s
lStmt :: SourcePos -> Stmt id e x -> LStmt id e x
lStmt pos s = LStmt (Loc pos s)
type family FuncBody id
type instance FuncBody AST = CFG AST C C
-- | A control flow based function definition
data CFunc id =
CFunc
{ cfSignature :: !(FunSig id)
, cfLivenesses :: !MLivenesses
, cfEntry :: !Label
, cfBody :: FuncBody id
}
type LCFunc id = Loc (CFunc id)
type CFG id e x = Graph (LStmt id) e x
-- | Get the outgoing edges of a block
blockOutEdges :: Block (LStmt id) C C -> [(Source, Label)]
blockOutEdges block = let
!block_label = entryLabel block
!(_, _, JustC last) = blockToNodeList block
paths = case unLStmt last
of If _ t f -> [(TruePath, flowLabel t), (FalsePath, flowLabel f)]
Jump l -> [(JumpPath, flowLabel l)]
Return _ -> []
in [(Source block_label path, succ_label) | (path, succ_label) <- paths]
-------------------------------------------------------------------------------
-- Printing
class Ppr a where
ppr :: a -> Doc
-- | Locations are not shown when pretty-printing
instance Ppr a => Ppr (Loc a) where ppr (Loc _ x) = ppr x
instance Ppr Literal where
ppr (IntLit n) = text (show n)
ppr (FloatLit d) = text (show d)
ppr (ImaginaryLit d) = text (show d ++ "j")
ppr (BoolLit True) = text "True"
ppr (BoolLit False) = text "False"
ppr NoneLit = text "None"
instance Ppr (Var AST) where
ppr v = text (varName v ++ '\'' : show (varID v))
pprCommaList xs = punctuate comma $ map ppr xs
instance Ppr (Var a) => Ppr (Parameter a) where
ppr (Parameter v Nothing) = ppr v
ppr (Parameter v (Just e)) = ppr v <+> colon <+> ppr e
ppr (TupleParam ps) = parens (fsep $ pprCommaList ps)
instance Ppr (Var a) => Ppr (Expr a) where
ppr (Variable v) = ppr v
ppr (Literal l) = ppr l
ppr (Tuple es) = parens $ fsep $ pprCommaList es
ppr (List es) = brackets $ fsep $ pprCommaList es
ppr (Unary op e) = parens $ Python.pretty op <> ppr e
ppr (Binary op e1 e2) = parens $ ppr e1 <+> Python.pretty op <+> ppr e2
ppr (Subscript e es) = ppr e <> brackets (fsep $ pprCommaList es)
ppr (Slicing e ss) = ppr e <> brackets (fsep $ pprCommaList ss)
ppr (ListComp iter) = brackets $ ppr iter
ppr (Generator iter) = parens $ ppr iter
ppr (Call e es) = ppr e <> parens (fsep $ pprCommaList es)
ppr (Cond c t f) = parens $
ppr t <+> text "if" <+> ppr c <+> text "else" <+> ppr f
ppr (Lambda ps e) = text "lambda" <+> sep (pprCommaList ps) <> colon <+>
ppr e
ppr (Let p e b) = text "let" <+> ppr p <+> equals <+> ppr e <+>
text "in" <+> ppr b
instance Ppr (Var a) => Ppr (Slice a) where
ppr (SliceSlice _ l u s) =
let l_doc = maybe empty ppr l
u_doc = maybe empty ppr u
in case s
of Nothing -> l_doc <> colon <> u_doc
Just s1 -> l_doc <> colon <> u_doc <> colon <> maybe empty ppr s1
ppr (ExprSlice e) = ppr e
instance Ppr (Var a) => Ppr (IterFor a) where
ppr iter = let (b, i) = pprIterFor iter in b <+> sep i
pprIterFor (IterFor _ ps e c) =
let !(b, i) = pprComp c
clause = text "for" <+> hsep (pprCommaList ps) <+> text "in" <+> ppr e
in (b, clause : i)
pprIterIf (IterIf _ e c) =
let !(b, i) = pprComp c
clause = text "if" <+> ppr e
in (b, clause : i)
pprIterLet (IterLet _ p e c) =
let !(b, i) = pprComp c
clause = text "let" <+> ppr p <+> equals <+> ppr e
in (b, clause : i)
pprComp (CompFor i) = pprIterFor i
pprComp (CompIf i) = pprIterIf i
pprComp (CompLet i) = pprIterLet i
pprComp (CompBody e) = (ppr e, [])
instance Ppr (Var a) => Ppr (FunSig a) where
ppr (FunSig name ann pragma params r_ann) = let
annotation = case ann
of Nothing -> empty
Just a -> text "<forall annotation>"
r_annotation = case r_ann
of Nothing -> empty
Just a -> text "->" <+> ppr a
parameters = parens (sep $ pprCommaList params) <+> r_annotation
in annotation $$ text "def" <+> ppr name <> parameters
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (CFunc a) where
ppr func = let
signature = ppr (cfSignature func) <> colon
entry_point = text "goto" <+> ppr (cfEntry func)
body = ppr (cfBody func)
in signature <+> entry_point $$ body
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (Graph' Block (LStmt a) C C) where
ppr (GMany NothingO blocks NothingO) =
vcat [ppr l $$ nest 2 (ppr b) | (l, b) <- mapToList blocks]
instance Ppr (Var a) => Ppr (Flow a) where
ppr (Flow l Nothing) = ppr l
ppr (Flow l (Just vs)) = ppr l <+> text "with" <+> hsep (pprCommaList vs)
instance Ppr Source where ppr s = text (show s)
instance Ppr Label where ppr l = text (show l)
-- | Blocks are pretty-printable.
-- The statements in a block are listed vertically.
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (Block (LStmt a) C C) where
ppr b = foldBlockNodesB prepend_node b empty
where
prepend_node :: forall e x. LStmt a e x -> Doc -> Doc
prepend_node n d = ppr n $$ d
pprLiveness s = hang (text "liveness:") 4 $ fsep [ppr v | v <- Set.toList s]
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (LStmt a e x) where
ppr stmt =
case unLStmt stmt
of Assign p e -> hang (ppr p <+> equals) 4 (ppr e)
DefGroup fs l -> let lv_doc = maybe empty pprLiveness l
fs_doc = vcat $ map ppr fs
in hang (text "defgroup:") 4 (lv_doc $$ fs_doc)
Assert es -> text "assert" <+> sep (pprCommaList es)
Require v t -> text "require" <+> ppr v <+> colon <+> ppr t
Target l ps -> let ps_doc = case ps
of Nothing -> empty
Just vs -> text "with" <+>
sep (pprCommaList vs)
in text "<target>" <+> ppr l <+> ps_doc
If c t f -> text "if" <+> ppr c $$
text "then" <+> ppr t $$
text "else" <+> ppr f
Jump l -> text "goto" <+> ppr l
Return e -> text "return" <+> ppr e
| null | https://raw.githubusercontent.com/cirodrig/triolet/e515a1dc0d6b3e546320eac7b71fb36cea5b53d0/src/program/Parser/Control.hs | haskell | | Control flow analysis of parsed code.
| A control flow source.
A source consists of a block label and an outgoing path.
| An outgoing control flow path.
| A control flow node. Performs an action, has inputs and outputs.
A group of function definitions.
Definition groups are annotated with their live-in variables
during live variable analysis. The live-in variables is the
union of the functions' live-ins, minus the functions themselves.
Type annotation
A control flow target, for incoming control flow.
Targets are annotated with parameters during SSA analysis.
Conditional branch
Direct jump
Return from function
| A control flow based function definition
| Get the outgoing edges of a block
-----------------------------------------------------------------------------
Printing
| Locations are not shown when pretty-printing
| Blocks are pretty-printable.
The statements in a block are listed vertically. |
# LANGUAGE FlexibleContexts , UndecidableInstances #
module Parser.Control where
import Compiler.Hoopl
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified Language.Python.Common.Pretty as Python
import qualified Language.Python.Common.PrettyAST as Python
import Text.PrettyPrint.HughesPJ
import Common.SourcePos
import Parser.ParserSyntax hiding(Stmt(..))
data Source = Source !Label !FlowPath deriving(Eq, Show)
data FlowPath = JumpPath | TruePath | FalsePath deriving(Eq, Show)
SSA annotates a control flow path with variable IDs .
data Flow id = Flow
{ flowLabel :: !Label
, flowSSA :: !(Maybe [Var id])
}
noSSAFlow :: Label -> Flow id
noSSAFlow l = Flow l Nothing
data Stmt id e x where
Assign LHS the value of RHS
Assign :: Parameter id -> LExpr id -> Stmt id O O
DefGroup :: [LCFunc id] -> !MLiveness -> Stmt id O O
Assert that some propositions hold
Assert :: [LExpr id] -> Stmt id O O
Require :: Var id -> LExpr id -> Stmt id O O
Target :: Label -> !(Maybe [Var id]) -> Stmt id C O
If :: LExpr id -> Flow id -> Flow id -> Stmt id O C
Jump :: Flow id -> Stmt id O C
Return :: LExpr id -> Stmt id O C
instance NonLocal (Stmt id) where
entryLabel (Target l _) = l
successors (If _ t f) = [flowLabel t, flowLabel f]
successors (Jump l) = [flowLabel l]
successors (Return _) = []
newtype LStmt id e x = LStmt (Loc (Stmt id e x))
instance NonLocal (LStmt id) where
entryLabel x = entryLabel $ unLStmt x
successors x = successors $ unLStmt x
unLStmt :: LStmt id e x -> Stmt id e x
unLStmt (LStmt (Loc _ s)) = s
lStmt :: SourcePos -> Stmt id e x -> LStmt id e x
lStmt pos s = LStmt (Loc pos s)
type family FuncBody id
type instance FuncBody AST = CFG AST C C
data CFunc id =
CFunc
{ cfSignature :: !(FunSig id)
, cfLivenesses :: !MLivenesses
, cfEntry :: !Label
, cfBody :: FuncBody id
}
type LCFunc id = Loc (CFunc id)
type CFG id e x = Graph (LStmt id) e x
blockOutEdges :: Block (LStmt id) C C -> [(Source, Label)]
blockOutEdges block = let
!block_label = entryLabel block
!(_, _, JustC last) = blockToNodeList block
paths = case unLStmt last
of If _ t f -> [(TruePath, flowLabel t), (FalsePath, flowLabel f)]
Jump l -> [(JumpPath, flowLabel l)]
Return _ -> []
in [(Source block_label path, succ_label) | (path, succ_label) <- paths]
class Ppr a where
ppr :: a -> Doc
instance Ppr a => Ppr (Loc a) where ppr (Loc _ x) = ppr x
instance Ppr Literal where
ppr (IntLit n) = text (show n)
ppr (FloatLit d) = text (show d)
ppr (ImaginaryLit d) = text (show d ++ "j")
ppr (BoolLit True) = text "True"
ppr (BoolLit False) = text "False"
ppr NoneLit = text "None"
instance Ppr (Var AST) where
ppr v = text (varName v ++ '\'' : show (varID v))
pprCommaList xs = punctuate comma $ map ppr xs
instance Ppr (Var a) => Ppr (Parameter a) where
ppr (Parameter v Nothing) = ppr v
ppr (Parameter v (Just e)) = ppr v <+> colon <+> ppr e
ppr (TupleParam ps) = parens (fsep $ pprCommaList ps)
instance Ppr (Var a) => Ppr (Expr a) where
ppr (Variable v) = ppr v
ppr (Literal l) = ppr l
ppr (Tuple es) = parens $ fsep $ pprCommaList es
ppr (List es) = brackets $ fsep $ pprCommaList es
ppr (Unary op e) = parens $ Python.pretty op <> ppr e
ppr (Binary op e1 e2) = parens $ ppr e1 <+> Python.pretty op <+> ppr e2
ppr (Subscript e es) = ppr e <> brackets (fsep $ pprCommaList es)
ppr (Slicing e ss) = ppr e <> brackets (fsep $ pprCommaList ss)
ppr (ListComp iter) = brackets $ ppr iter
ppr (Generator iter) = parens $ ppr iter
ppr (Call e es) = ppr e <> parens (fsep $ pprCommaList es)
ppr (Cond c t f) = parens $
ppr t <+> text "if" <+> ppr c <+> text "else" <+> ppr f
ppr (Lambda ps e) = text "lambda" <+> sep (pprCommaList ps) <> colon <+>
ppr e
ppr (Let p e b) = text "let" <+> ppr p <+> equals <+> ppr e <+>
text "in" <+> ppr b
instance Ppr (Var a) => Ppr (Slice a) where
ppr (SliceSlice _ l u s) =
let l_doc = maybe empty ppr l
u_doc = maybe empty ppr u
in case s
of Nothing -> l_doc <> colon <> u_doc
Just s1 -> l_doc <> colon <> u_doc <> colon <> maybe empty ppr s1
ppr (ExprSlice e) = ppr e
instance Ppr (Var a) => Ppr (IterFor a) where
ppr iter = let (b, i) = pprIterFor iter in b <+> sep i
pprIterFor (IterFor _ ps e c) =
let !(b, i) = pprComp c
clause = text "for" <+> hsep (pprCommaList ps) <+> text "in" <+> ppr e
in (b, clause : i)
pprIterIf (IterIf _ e c) =
let !(b, i) = pprComp c
clause = text "if" <+> ppr e
in (b, clause : i)
pprIterLet (IterLet _ p e c) =
let !(b, i) = pprComp c
clause = text "let" <+> ppr p <+> equals <+> ppr e
in (b, clause : i)
pprComp (CompFor i) = pprIterFor i
pprComp (CompIf i) = pprIterIf i
pprComp (CompLet i) = pprIterLet i
pprComp (CompBody e) = (ppr e, [])
instance Ppr (Var a) => Ppr (FunSig a) where
ppr (FunSig name ann pragma params r_ann) = let
annotation = case ann
of Nothing -> empty
Just a -> text "<forall annotation>"
r_annotation = case r_ann
of Nothing -> empty
Just a -> text "->" <+> ppr a
parameters = parens (sep $ pprCommaList params) <+> r_annotation
in annotation $$ text "def" <+> ppr name <> parameters
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (CFunc a) where
ppr func = let
signature = ppr (cfSignature func) <> colon
entry_point = text "goto" <+> ppr (cfEntry func)
body = ppr (cfBody func)
in signature <+> entry_point $$ body
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (Graph' Block (LStmt a) C C) where
ppr (GMany NothingO blocks NothingO) =
vcat [ppr l $$ nest 2 (ppr b) | (l, b) <- mapToList blocks]
instance Ppr (Var a) => Ppr (Flow a) where
ppr (Flow l Nothing) = ppr l
ppr (Flow l (Just vs)) = ppr l <+> text "with" <+> hsep (pprCommaList vs)
instance Ppr Source where ppr s = text (show s)
instance Ppr Label where ppr l = text (show l)
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (Block (LStmt a) C C) where
ppr b = foldBlockNodesB prepend_node b empty
where
prepend_node :: forall e x. LStmt a e x -> Doc -> Doc
prepend_node n d = ppr n $$ d
pprLiveness s = hang (text "liveness:") 4 $ fsep [ppr v | v <- Set.toList s]
instance (Ppr (Var a), Ppr (FuncBody a)) => Ppr (LStmt a e x) where
ppr stmt =
case unLStmt stmt
of Assign p e -> hang (ppr p <+> equals) 4 (ppr e)
DefGroup fs l -> let lv_doc = maybe empty pprLiveness l
fs_doc = vcat $ map ppr fs
in hang (text "defgroup:") 4 (lv_doc $$ fs_doc)
Assert es -> text "assert" <+> sep (pprCommaList es)
Require v t -> text "require" <+> ppr v <+> colon <+> ppr t
Target l ps -> let ps_doc = case ps
of Nothing -> empty
Just vs -> text "with" <+>
sep (pprCommaList vs)
in text "<target>" <+> ppr l <+> ps_doc
If c t f -> text "if" <+> ppr c $$
text "then" <+> ppr t $$
text "else" <+> ppr f
Jump l -> text "goto" <+> ppr l
Return e -> text "return" <+> ppr e
|
ca6c9e5f5827b23ee155ae0151b3e4925310c0bfee929df5cb44c15f9cc41e7d | plumatic/grab-bag | empirical_gradient.clj | (ns flop.empirical-gradient
"An empirical test to verify the consistency of the objective value and gradient
Computes an empirical approximation to the gradient at point x:
empirical-grad(x) = (f(x + dx) - f(x)) / dx
and reports the differences between the empirical gradient and
the gradient defined in the function (grad f(x))."
(:use plumbing.core)
(:require
[schema.core :as s]
[hiphip.double :as dbl]
[plumbing.logging :as log]
[flop.optimize :as optimize]))
(s/defschema DimensionDiff
"Represents the gradient and empirical gradient along a single dimension"
[(s/one s/Num "gradient") (s/one s/Num "empirical gradient")])
(s/defn report
"Displays a report of the difference between the gradient and empirical gradient"
[diffs :- [DimensionDiff]]
(doseq [diff diffs]
(let [[dimension [grad emp-grad]] (indexed diff)]
(log/infof "GRADIENT-TEST: dim %03d . %s vs %s . diff = %s\n"
dimension grad emp-grad (- grad emp-grad)))))
(defnk empirical-gradient :- [DimensionDiff]
"Computes the gradient and empirical gradient of `f` at point `xs`
by independentally varying each of the `num-dims` dimensions of
initial point `xs` by infinitesimal amount `dx`."
[num-dims :- Long
f :- (s/=> optimize/ValueGradientPair doubles)
{dx 1e-4}
{xs (double-array (repeatedly num-dims #(rand)))}]
(let [empirical-grad (dbl/amake [i num-dims] 0.0)
[obj computed-grad] (f xs)]
(doseq [i (range num-dims)]
(dbl/ainc xs i dx)
(let [[new-obj _] (f xs)]
(dbl/aset empirical-grad i (/ (- new-obj obj) dx)))
(dbl/ainc xs i (- dx)))
(for [i (range num-dims)]
[(dbl/aget computed-grad i) (dbl/aget empirical-grad i)])))
| null | https://raw.githubusercontent.com/plumatic/grab-bag/a15e943322fbbf6f00790ce5614ba6f90de1a9b5/lib/flop/src/flop/empirical_gradient.clj | clojure | (ns flop.empirical-gradient
"An empirical test to verify the consistency of the objective value and gradient
Computes an empirical approximation to the gradient at point x:
empirical-grad(x) = (f(x + dx) - f(x)) / dx
and reports the differences between the empirical gradient and
the gradient defined in the function (grad f(x))."
(:use plumbing.core)
(:require
[schema.core :as s]
[hiphip.double :as dbl]
[plumbing.logging :as log]
[flop.optimize :as optimize]))
(s/defschema DimensionDiff
"Represents the gradient and empirical gradient along a single dimension"
[(s/one s/Num "gradient") (s/one s/Num "empirical gradient")])
(s/defn report
"Displays a report of the difference between the gradient and empirical gradient"
[diffs :- [DimensionDiff]]
(doseq [diff diffs]
(let [[dimension [grad emp-grad]] (indexed diff)]
(log/infof "GRADIENT-TEST: dim %03d . %s vs %s . diff = %s\n"
dimension grad emp-grad (- grad emp-grad)))))
(defnk empirical-gradient :- [DimensionDiff]
"Computes the gradient and empirical gradient of `f` at point `xs`
by independentally varying each of the `num-dims` dimensions of
initial point `xs` by infinitesimal amount `dx`."
[num-dims :- Long
f :- (s/=> optimize/ValueGradientPair doubles)
{dx 1e-4}
{xs (double-array (repeatedly num-dims #(rand)))}]
(let [empirical-grad (dbl/amake [i num-dims] 0.0)
[obj computed-grad] (f xs)]
(doseq [i (range num-dims)]
(dbl/ainc xs i dx)
(let [[new-obj _] (f xs)]
(dbl/aset empirical-grad i (/ (- new-obj obj) dx)))
(dbl/ainc xs i (- dx)))
(for [i (range num-dims)]
[(dbl/aget computed-grad i) (dbl/aget empirical-grad i)])))
| |
334e3865d1f406d62c082acec432e6551392e1fc01c67b50ff4f8e0ef0fc7876 | ralt/lxc-wrapper | ip.lisp | (in-package #:lxc-wrapper-test)
(5am:in-suite ip)
(5am:test test-line-matches-ip
(5am:is-true (lxc-wrapper::line-matches-ip "10.0.3.4 foo"))
(5am:is-false (lxc-wrapper::line-matches-ip "10.10.3.4 bar"))
(5am:is-false (lxc-wrapper::line-matches-ip "foo bar")))
(5am:test test-vector-to-list
(5am:is-true (eq (type-of (lxc-wrapper::vector-to-list #(1 2 3))) 'cons)))
(5am:test test-line-get-ip
(5am:is-true (equal '(10 0 3 1) (lxc-wrapper::line-get-ip "10.0.3.1 foobar"))))
(5am:test test-generate-next-ip
(5am:is-true (equal '(10 0 3 3) (lxc-wrapper::generate-next-ip
'((10 0 3 1) (10 0 3 2))))))
(5am:test test-new-ip
(5am:is-true (equal '(10 0 3 3) (lxc-wrapper::new-ip '(10 0 3 2) 3 3))))
(5am:test test-path-lxc-interfaces
(5am:is-true (equal #p"/var/lib/lxc/foo/rootfs/etc/network/interfaces"
(lxc-wrapper::path-lxc-interfaces "foo"))))
| null | https://raw.githubusercontent.com/ralt/lxc-wrapper/16f34432e2cfa8dcb7fa1a5383930143085dc1e0/test/ip.lisp | lisp | (in-package #:lxc-wrapper-test)
(5am:in-suite ip)
(5am:test test-line-matches-ip
(5am:is-true (lxc-wrapper::line-matches-ip "10.0.3.4 foo"))
(5am:is-false (lxc-wrapper::line-matches-ip "10.10.3.4 bar"))
(5am:is-false (lxc-wrapper::line-matches-ip "foo bar")))
(5am:test test-vector-to-list
(5am:is-true (eq (type-of (lxc-wrapper::vector-to-list #(1 2 3))) 'cons)))
(5am:test test-line-get-ip
(5am:is-true (equal '(10 0 3 1) (lxc-wrapper::line-get-ip "10.0.3.1 foobar"))))
(5am:test test-generate-next-ip
(5am:is-true (equal '(10 0 3 3) (lxc-wrapper::generate-next-ip
'((10 0 3 1) (10 0 3 2))))))
(5am:test test-new-ip
(5am:is-true (equal '(10 0 3 3) (lxc-wrapper::new-ip '(10 0 3 2) 3 3))))
(5am:test test-path-lxc-interfaces
(5am:is-true (equal #p"/var/lib/lxc/foo/rootfs/etc/network/interfaces"
(lxc-wrapper::path-lxc-interfaces "foo"))))
| |
084a0c4c31afb6a95b0b7c8c1fd652dd672bd7fec23f5ed8defd86e7d755e922 | mzp/coq-ide-for-ios | decl_proof_instr.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
$ I d : decl_proof_instr.mli 13323 2010 - 07 - 24 15:57:30Z herbelin $
open Refiner
open Names
open Term
open Tacmach
open Decl_mode
val go_to_proof_mode: unit -> unit
val return_from_tactic_mode: unit -> unit
val register_automation_tac: tactic -> unit
val automation_tac : tactic
val daimon_subtree: pftreestate -> pftreestate
val concl_refiner:
Termops.meta_type_map -> constr -> Proof_type.goal sigma -> constr
val do_instr: Decl_expr.raw_proof_instr -> pftreestate -> pftreestate
val proof_instr: Decl_expr.raw_proof_instr -> unit
val tcl_change_info : Decl_mode.pm_info -> tactic
val mark_proof_tree_as_done : Proof_type.proof_tree -> Proof_type.proof_tree
val mark_as_done : pftreestate -> pftreestate
val execute_cases :
Names.name ->
Decl_mode.per_info ->
(Term.constr -> Proof_type.tactic) ->
(Names.Idset.elt * (Term.constr option * Term.constr list) list) list ->
Term.constr list -> int -> Decl_mode.split_tree -> Proof_type.tactic
val tree_of_pats :
identifier * (int * int) -> (Rawterm.cases_pattern*recpath) list list ->
split_tree
val add_branch :
identifier * (int * int) -> (Rawterm.cases_pattern*recpath) list list ->
split_tree -> split_tree
val append_branch :
identifier *(int * int) -> int -> (Rawterm.cases_pattern*recpath) list list ->
(Names.Idset.t * Decl_mode.split_tree) option ->
(Names.Idset.t * Decl_mode.split_tree) option
val append_tree :
identifier * (int * int) -> int -> (Rawterm.cases_pattern*recpath) list list ->
split_tree -> split_tree
val build_dep_clause : Term.types Decl_expr.statement list ->
Decl_expr.proof_pattern ->
Decl_mode.per_info ->
(Term.types Decl_expr.statement, Term.types Decl_expr.or_thesis)
Decl_expr.hyp list -> Proof_type.goal Tacmach.sigma -> Term.types
val register_dep_subcase :
Names.identifier * (int * int) ->
Environ.env ->
Decl_mode.per_info ->
Rawterm.cases_pattern -> Decl_mode.elim_kind -> Decl_mode.elim_kind
val thesis_for : Term.constr ->
Term.constr -> Decl_mode.per_info -> Environ.env -> Term.constr
val close_previous_case : pftreestate -> pftreestate
val pop_stacks :
(Names.identifier *
(Term.constr option * Term.constr list) list) list ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list
val push_head : Term.constr ->
Names.Idset.t ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list
val push_arg : Term.constr ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list
val hrec_for:
Names.identifier ->
Decl_mode.per_info -> Proof_type.goal Tacmach.sigma ->
Names.identifier -> Term.constr
val consider_match :
bool ->
(Names.Idset.elt*bool) list ->
Names.Idset.elt list ->
(Term.types Decl_expr.statement, Term.types) Decl_expr.hyp list ->
Proof_type.tactic
val init_tree:
Names.Idset.t ->
Names.inductive ->
int option * Declarations.wf_paths ->
(int ->
(int option * Declarations.recarg Rtree.t) array ->
(Names.Idset.t * Decl_mode.split_tree) option) ->
Decl_mode.split_tree
val set_refine : (Evd.open_constr -> Proof_type.tactic) -> unit
| null | https://raw.githubusercontent.com/mzp/coq-ide-for-ios/4cdb389bbecd7cdd114666a8450ecf5b5f0391d3/coqlib/tactics/decl_proof_instr.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * INRIA - CNRS - LIX - LRI - PPS - Copyright 1999 - 2010
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
$ I d : decl_proof_instr.mli 13323 2010 - 07 - 24 15:57:30Z herbelin $
open Refiner
open Names
open Term
open Tacmach
open Decl_mode
val go_to_proof_mode: unit -> unit
val return_from_tactic_mode: unit -> unit
val register_automation_tac: tactic -> unit
val automation_tac : tactic
val daimon_subtree: pftreestate -> pftreestate
val concl_refiner:
Termops.meta_type_map -> constr -> Proof_type.goal sigma -> constr
val do_instr: Decl_expr.raw_proof_instr -> pftreestate -> pftreestate
val proof_instr: Decl_expr.raw_proof_instr -> unit
val tcl_change_info : Decl_mode.pm_info -> tactic
val mark_proof_tree_as_done : Proof_type.proof_tree -> Proof_type.proof_tree
val mark_as_done : pftreestate -> pftreestate
val execute_cases :
Names.name ->
Decl_mode.per_info ->
(Term.constr -> Proof_type.tactic) ->
(Names.Idset.elt * (Term.constr option * Term.constr list) list) list ->
Term.constr list -> int -> Decl_mode.split_tree -> Proof_type.tactic
val tree_of_pats :
identifier * (int * int) -> (Rawterm.cases_pattern*recpath) list list ->
split_tree
val add_branch :
identifier * (int * int) -> (Rawterm.cases_pattern*recpath) list list ->
split_tree -> split_tree
val append_branch :
identifier *(int * int) -> int -> (Rawterm.cases_pattern*recpath) list list ->
(Names.Idset.t * Decl_mode.split_tree) option ->
(Names.Idset.t * Decl_mode.split_tree) option
val append_tree :
identifier * (int * int) -> int -> (Rawterm.cases_pattern*recpath) list list ->
split_tree -> split_tree
val build_dep_clause : Term.types Decl_expr.statement list ->
Decl_expr.proof_pattern ->
Decl_mode.per_info ->
(Term.types Decl_expr.statement, Term.types Decl_expr.or_thesis)
Decl_expr.hyp list -> Proof_type.goal Tacmach.sigma -> Term.types
val register_dep_subcase :
Names.identifier * (int * int) ->
Environ.env ->
Decl_mode.per_info ->
Rawterm.cases_pattern -> Decl_mode.elim_kind -> Decl_mode.elim_kind
val thesis_for : Term.constr ->
Term.constr -> Decl_mode.per_info -> Environ.env -> Term.constr
val close_previous_case : pftreestate -> pftreestate
val pop_stacks :
(Names.identifier *
(Term.constr option * Term.constr list) list) list ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list
val push_head : Term.constr ->
Names.Idset.t ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list
val push_arg : Term.constr ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list ->
(Names.identifier *
(Term.constr option * Term.constr list) list) list
val hrec_for:
Names.identifier ->
Decl_mode.per_info -> Proof_type.goal Tacmach.sigma ->
Names.identifier -> Term.constr
val consider_match :
bool ->
(Names.Idset.elt*bool) list ->
Names.Idset.elt list ->
(Term.types Decl_expr.statement, Term.types) Decl_expr.hyp list ->
Proof_type.tactic
val init_tree:
Names.Idset.t ->
Names.inductive ->
int option * Declarations.wf_paths ->
(int ->
(int option * Declarations.recarg Rtree.t) array ->
(Names.Idset.t * Decl_mode.split_tree) option) ->
Decl_mode.split_tree
val set_refine : (Evd.open_constr -> Proof_type.tactic) -> unit
|
6dfb3f15f885308942bd46d5dc5e558e38bd0155ff7c3550bcec80164048863a | exercism/erlang | strain_tests.erl | -module(strain_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
empty_keep_test() ->
?assertEqual([], strain:keep(fun(X) -> X < 10 end, [])).
keep_everything_test() ->
?assertEqual([1, 2, 3], strain:keep(fun(X) -> X < 10 end, [1,2,3])).
keep_first_last_test() ->
?assertEqual([1, 3], strain:keep(fun(X) -> odd(X) end, [1,2,3])).
keep_nothin_test() ->
?assertEqual([], strain:keep(fun(X) -> even(X) end, [1,3,5,7])).
keep_neither_first_nor_last_test() ->
?assertEqual([2], strain:keep(fun(X) -> even(X) end, [1,2,3])).
keep_strings_test() ->
Str = ["apple", "zebra", "banana", "zombies", "cherimoya", "zealot"],
?assertEqual(
["zebra", "zombies", "zealot"],
strain:keep(fun(S) -> string:sub_string(S, 1,1) =:= "z" end, Str)).
empty_discard_test() ->
?assertEqual([], strain:discard(fun(X) -> X < 10 end, [])).
discard_everything_test() ->
?assertEqual([], strain:discard(fun(X) -> X < 10 end, [1,2,3])).
discard_first_and_last_test() ->
?assertEqual([2], strain:discard(fun(X) -> odd(X) end, [1,2,3])).
discard_nothing_test() ->
?assertEqual([1, 3, 5, 7], strain:discard(fun(X) -> even(X) end, [1,3,5,7])).
discard_neither_first_nor_last_test() ->
?assertEqual([1, 3], strain:discard(fun(X) -> even(X) end, [1,2,3])).
discard_strings_test() ->
Str = ["apple", "zebra", "banana", "zombies", "cherimoya", "zealot"],
?assertEqual(
["apple", "banana", "cherimoya"],
strain:discard(fun(S) -> string:sub_string(S, 1,1) =:= "z" end, Str)).
odd(N) -> N rem 2 > 0.
even(N) -> N rem 2 =:= 0.
| null | https://raw.githubusercontent.com/exercism/erlang/57ac2707dae643682950715e74eb271f732e2100/exercises/practice/strain/test/strain_tests.erl | erlang | -module(strain_tests).
-include_lib("erl_exercism/include/exercism.hrl").
-include_lib("eunit/include/eunit.hrl").
empty_keep_test() ->
?assertEqual([], strain:keep(fun(X) -> X < 10 end, [])).
keep_everything_test() ->
?assertEqual([1, 2, 3], strain:keep(fun(X) -> X < 10 end, [1,2,3])).
keep_first_last_test() ->
?assertEqual([1, 3], strain:keep(fun(X) -> odd(X) end, [1,2,3])).
keep_nothin_test() ->
?assertEqual([], strain:keep(fun(X) -> even(X) end, [1,3,5,7])).
keep_neither_first_nor_last_test() ->
?assertEqual([2], strain:keep(fun(X) -> even(X) end, [1,2,3])).
keep_strings_test() ->
Str = ["apple", "zebra", "banana", "zombies", "cherimoya", "zealot"],
?assertEqual(
["zebra", "zombies", "zealot"],
strain:keep(fun(S) -> string:sub_string(S, 1,1) =:= "z" end, Str)).
empty_discard_test() ->
?assertEqual([], strain:discard(fun(X) -> X < 10 end, [])).
discard_everything_test() ->
?assertEqual([], strain:discard(fun(X) -> X < 10 end, [1,2,3])).
discard_first_and_last_test() ->
?assertEqual([2], strain:discard(fun(X) -> odd(X) end, [1,2,3])).
discard_nothing_test() ->
?assertEqual([1, 3, 5, 7], strain:discard(fun(X) -> even(X) end, [1,3,5,7])).
discard_neither_first_nor_last_test() ->
?assertEqual([1, 3], strain:discard(fun(X) -> even(X) end, [1,2,3])).
discard_strings_test() ->
Str = ["apple", "zebra", "banana", "zombies", "cherimoya", "zealot"],
?assertEqual(
["apple", "banana", "cherimoya"],
strain:discard(fun(S) -> string:sub_string(S, 1,1) =:= "z" end, Str)).
odd(N) -> N rem 2 > 0.
even(N) -> N rem 2 =:= 0.
| |
1d6cf723330b02f1007dbb092c7f8af31ca6ba224bcdfb3b44e3335d8540f986 | ovotech/ring-jwt | project.clj | (defproject ovotech/ring-jwt "2.3.0"
:description "JWT middleware for Ring"
:url "-jwt"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[cheshire "5.10.0"]
[commons-codec "1.15"]
[org.clojure/clojure "1.10.1"]
[com.auth0/java-jwt "3.12.0"]
[com.auth0/jwks-rsa "0.15.0"]]
:profiles {:dev {:dependencies [[org.clojure/test.check "1.1.0"]
[kelveden/clj-wiremock "1.5.7"]
[org.slf4j/slf4j-simple "1.7.30"]]
:eftest {:multithread? false}
:plugins [[lein-eftest "0.4.3"]]}})
| null | https://raw.githubusercontent.com/ovotech/ring-jwt/6977423b57a16bcf67761531dabea1edfd3b3371/project.clj | clojure | (defproject ovotech/ring-jwt "2.3.0"
:description "JWT middleware for Ring"
:url "-jwt"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[cheshire "5.10.0"]
[commons-codec "1.15"]
[org.clojure/clojure "1.10.1"]
[com.auth0/java-jwt "3.12.0"]
[com.auth0/jwks-rsa "0.15.0"]]
:profiles {:dev {:dependencies [[org.clojure/test.check "1.1.0"]
[kelveden/clj-wiremock "1.5.7"]
[org.slf4j/slf4j-simple "1.7.30"]]
:eftest {:multithread? false}
:plugins [[lein-eftest "0.4.3"]]}})
| |
3f87323fabf8f4dc740f3b86de022c2735ea3f1e4acf165aa662a1c3ac58f4cb | greglook/clj-arrangement | project.clj | (defproject mvxcvi/arrangement "2.1.0"
:description "Total-order comparator for Clojure(Script)."
:url "-arrangement"
:license {:name "Public Domain"
:url "/"}
:deploy-branches ["main"]
:aliases
{"kaocha" ["with-profile" "+kaocha" "run" "-m" "kaocha.runner"]
"coverage" ["with-profile" "+test,+coverage" "cloverage"]}
:profiles
{:dev
{:dependencies
[[org.clojure/clojure "1.11.1"]
[org.clojure/clojurescript "1.11.60"]
[org.clojure/test.check "1.1.1"]
[criterium "0.4.6"]]}
:kaocha
{:dependencies
[[lambdaisland/kaocha "1.71.1119"]
[lambdaisland/kaocha-cljs "1.4.130"]]}
:coverage
{:plugins [[lein-cloverage "1.2.4"]]
:dependencies [[org.clojure/tools.reader "1.3.6"]]}})
| null | https://raw.githubusercontent.com/greglook/clj-arrangement/4428958f130e93a170735c4b792a801041fdf23e/project.clj | clojure | (defproject mvxcvi/arrangement "2.1.0"
:description "Total-order comparator for Clojure(Script)."
:url "-arrangement"
:license {:name "Public Domain"
:url "/"}
:deploy-branches ["main"]
:aliases
{"kaocha" ["with-profile" "+kaocha" "run" "-m" "kaocha.runner"]
"coverage" ["with-profile" "+test,+coverage" "cloverage"]}
:profiles
{:dev
{:dependencies
[[org.clojure/clojure "1.11.1"]
[org.clojure/clojurescript "1.11.60"]
[org.clojure/test.check "1.1.1"]
[criterium "0.4.6"]]}
:kaocha
{:dependencies
[[lambdaisland/kaocha "1.71.1119"]
[lambdaisland/kaocha-cljs "1.4.130"]]}
:coverage
{:plugins [[lein-cloverage "1.2.4"]]
:dependencies [[org.clojure/tools.reader "1.3.6"]]}})
| |
49bca90529792044e5396bee771e3be364cadce730b2354cc4b91e3a44f1079b | OpenBookStore/openbookstore | bookshops.lisp | (in-package :bookshops)
(defun init ()
"Init i18n, connect to the DB,..."
(bookshops.models:connect)
;; Disabled until we ship the translation files into the binary release.
;; (i18n-load)
(log:config :error))
(defun handle-parser-error (c)
(format t "Argument error: ~a~&" (opts:option c))
;; XXX: probably don't quit.
(uiop:quit 1))
(defparameter +version+
(let ((version (asdf/component:component-version (asdf:find-system :bookshops)))
(directory (asdf:system-source-directory :bookshops)))
(or (ignore-errors
(uiop:with-current-directory (directory)
(multiple-value-bind (current-commit)
(uiop:run-program (list "git" "describe" "--always")
:output '(:string :stripped t))
(concatenate 'string
version
(format nil "-~a" current-commit)))))
version))
"The version number as in the asd appended with the current commit id.")
(defun search-books (query)
"Search on datasources, get a list of hash-tables, transform them to book objects,
and check if some already exist in our DB. In that case, update them."
(let ((res (books query)))
(loop for bk in res
collect (find-existing (make-book
:title (access bk :title)
:isbn (access bk :isbn)
:authors (access bk :authors)
:details-url (access bk :details-url)
:cover-url (access bk :cover-url)
:publisher (access bk :publisher)
:date-publication (access bk :date-publication)
:price (access bk :price)
:datasource (access bk :datasource))
:update t))))
(defun print-system-info (&optional (stream t))
;; see also -info
(format stream "~&OS: ~a ~a~&" (software-type) (software-version))
(format stream "~&Lisp: ~a ~a~&" (lisp-implementation-type) (lisp-implementation-version))
#+asdf
(format stream "~&ASDF: ~a~&" (asdf:asdf-version))
#-asdf
(format stream "NO ASDF!")
# + quicklisp
;; (format stream "~&Quicklisp: ~a~&" (ql-dist:all-dists)) ;; not for release?
#-quicklisp
(format stream "!! Quicklisp is not installed !!"))
(defun main ()
(unless (uiop:file-exists-p (bookshops.models::db-name))
(uiop:format! t "Creating the database into ~a...~&" (bookshops.models::db-name))
(bookshops.models::initialize-database))
(opts:define-opts
(:name :help
:description "print this help and exit."
:short #\h
:long "help")
(:name :version
:description "print the version number and exit."
:short #\v
:long "version")
(:name :verbose
:description "print debug info."
:short #\V
:long "verbose")
(:name :interactive
:description "enter the interactive prompt."
:short #\i
:long "interactive")
(:name :web
:description "run the web application."
:short #\w
:long "web")
(:name :port
:arg-parser #'parse-integer
:description "set the port for the web server. You can also use the OBS_PORT environment variable."
:short #\p
:long "port")
(:name :manage
:arg-parser #'identity
:description "Run a management command, such as createsuperuser"
:long "manage"))
(multiple-value-bind (options free-args)
(handler-bind ((error #'handle-parser-error))
(opts:get-opts))
(format t "OpenBookStore version ~a~&" +version+)
(when (getf options :version)
(print-system-info)
(uiop:quit))
(when (getf options :help)
(opts:describe)
(uiop:quit))
(when (getf options :verbose)
(print-system-info))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Management commands.
;; Create a superuser with admin rights.
(when (getf options :manage)
(let ((command (getf options :manage)))
(when (equal "createsuperuser" (str:downcase (str:trim command)))
(format t "Initializing...~&")
Connect to the DB .
(init)
(uiop:format! t "Running management command ~a…~&" command)
;; XXX: avoid circular dependencies:
;; we now want to call bookshops.manager, but this package relies on models,
;; we can't load it before. Fix.
(eval (read-from-string "(bookshops.manager::add-superuser)")))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Run the interactive terminal application.
(when (getf options :interactive)
(format t "Initializing...~&")
(init)
(setf replic:*prompt* (cl-ansi-text:green "bookshops > "))
(setf replic:*prompt-prefix* (format nil "(~a) " (name (default-place))))
;; create commands from the exported functions and variables.
(replic.completion:functions-to-commands :replic.base)
(setf replic:*help-preamble* "With cl-bookshops you can search for books by keywords or isbn, add some to your stock and explore it.")
(replic.completion:functions-to-commands :bookshops.commands)
(replic.completion:functions-to-commands :bookshops.manager)
;; define completions.
;; (push '("add" . *results*) replic:*args-completions*)
(replic:repl)
(handler-case
(when free-args
(search-books (str:join " " free-args)))
(error (c)
(progn
(format *error-output* "~a~&" c)
(uiop:quit 1)))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Run the web app.
(when (getf options :web)
(handler-case
(progn
(bookshops/web::start-app :port (or (getf options :port)
(ignore-errors (parse-integer (uiop:getenv "OBS_PORT")))
bookshops/web::*port*))
;; Without this, the binary exits immediately after having
;; run the web server in its thread.
(bt:join-thread
(find-if (lambda (th)
(search "hunchentoot" (bt:thread-name th)))
(bt:all-threads))))
(usocket:address-in-use-error ()
(format *error-output* "This port is already taken. You can use the --port option or the OBS_PORT environment variable to specify a new port.~&"))
#+sbcl
(sb-sys:interactive-interrupt ()
(format *error-output* "~&Bye!~&")
(uiop:quit))
(error (c)
(format *error-output* "~&An error occured: ~a~&" c)
;; XXX: quit also kills the current lisp process, which is
;; annoying when developing with a REPL.
( uiop : quit 1 )
)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Search on data sources, print results and exit.
(when free-args
(handler-case
(progn
(init)
(bookshops.models::pprint-books (search-books (str:join " " free-args))))
(error (c)
(progn
(format *error-output* "~a~&" c)
(uiop:quit 1)))))
))
(defun run ()
"Call main, print a backtrace if an error occurs."
(handler-bind ((error (lambda (c)
(format *error-output* "~&An error occured: ~a~&" c)
(format *error-output* "~&Backtrace:~&")
(trivial-backtrace:print-backtrace c))))
(main)))
| null | https://raw.githubusercontent.com/OpenBookStore/openbookstore/30b46dc5b89acb36e334b63467ace61ee527ac17/src/bookshops.lisp | lisp | Disabled until we ship the translation files into the binary release.
(i18n-load)
XXX: probably don't quit.
see also -info
(format stream "~&Quicklisp: ~a~&" (ql-dist:all-dists)) ;; not for release?
Management commands.
Create a superuser with admin rights.
XXX: avoid circular dependencies:
we now want to call bookshops.manager, but this package relies on models,
we can't load it before. Fix.
Run the interactive terminal application.
create commands from the exported functions and variables.
define completions.
(push '("add" . *results*) replic:*args-completions*)
Run the web app.
Without this, the binary exits immediately after having
run the web server in its thread.
XXX: quit also kills the current lisp process, which is
annoying when developing with a REPL.
Search on data sources, print results and exit. | (in-package :bookshops)
(defun init ()
"Init i18n, connect to the DB,..."
(bookshops.models:connect)
(log:config :error))
(defun handle-parser-error (c)
(format t "Argument error: ~a~&" (opts:option c))
(uiop:quit 1))
(defparameter +version+
(let ((version (asdf/component:component-version (asdf:find-system :bookshops)))
(directory (asdf:system-source-directory :bookshops)))
(or (ignore-errors
(uiop:with-current-directory (directory)
(multiple-value-bind (current-commit)
(uiop:run-program (list "git" "describe" "--always")
:output '(:string :stripped t))
(concatenate 'string
version
(format nil "-~a" current-commit)))))
version))
"The version number as in the asd appended with the current commit id.")
(defun search-books (query)
"Search on datasources, get a list of hash-tables, transform them to book objects,
and check if some already exist in our DB. In that case, update them."
(let ((res (books query)))
(loop for bk in res
collect (find-existing (make-book
:title (access bk :title)
:isbn (access bk :isbn)
:authors (access bk :authors)
:details-url (access bk :details-url)
:cover-url (access bk :cover-url)
:publisher (access bk :publisher)
:date-publication (access bk :date-publication)
:price (access bk :price)
:datasource (access bk :datasource))
:update t))))
(defun print-system-info (&optional (stream t))
(format stream "~&OS: ~a ~a~&" (software-type) (software-version))
(format stream "~&Lisp: ~a ~a~&" (lisp-implementation-type) (lisp-implementation-version))
#+asdf
(format stream "~&ASDF: ~a~&" (asdf:asdf-version))
#-asdf
(format stream "NO ASDF!")
# + quicklisp
#-quicklisp
(format stream "!! Quicklisp is not installed !!"))
(defun main ()
(unless (uiop:file-exists-p (bookshops.models::db-name))
(uiop:format! t "Creating the database into ~a...~&" (bookshops.models::db-name))
(bookshops.models::initialize-database))
(opts:define-opts
(:name :help
:description "print this help and exit."
:short #\h
:long "help")
(:name :version
:description "print the version number and exit."
:short #\v
:long "version")
(:name :verbose
:description "print debug info."
:short #\V
:long "verbose")
(:name :interactive
:description "enter the interactive prompt."
:short #\i
:long "interactive")
(:name :web
:description "run the web application."
:short #\w
:long "web")
(:name :port
:arg-parser #'parse-integer
:description "set the port for the web server. You can also use the OBS_PORT environment variable."
:short #\p
:long "port")
(:name :manage
:arg-parser #'identity
:description "Run a management command, such as createsuperuser"
:long "manage"))
(multiple-value-bind (options free-args)
(handler-bind ((error #'handle-parser-error))
(opts:get-opts))
(format t "OpenBookStore version ~a~&" +version+)
(when (getf options :version)
(print-system-info)
(uiop:quit))
(when (getf options :help)
(opts:describe)
(uiop:quit))
(when (getf options :verbose)
(print-system-info))
(when (getf options :manage)
(let ((command (getf options :manage)))
(when (equal "createsuperuser" (str:downcase (str:trim command)))
(format t "Initializing...~&")
Connect to the DB .
(init)
(uiop:format! t "Running management command ~a…~&" command)
(eval (read-from-string "(bookshops.manager::add-superuser)")))))
(when (getf options :interactive)
(format t "Initializing...~&")
(init)
(setf replic:*prompt* (cl-ansi-text:green "bookshops > "))
(setf replic:*prompt-prefix* (format nil "(~a) " (name (default-place))))
(replic.completion:functions-to-commands :replic.base)
(setf replic:*help-preamble* "With cl-bookshops you can search for books by keywords or isbn, add some to your stock and explore it.")
(replic.completion:functions-to-commands :bookshops.commands)
(replic.completion:functions-to-commands :bookshops.manager)
(replic:repl)
(handler-case
(when free-args
(search-books (str:join " " free-args)))
(error (c)
(progn
(format *error-output* "~a~&" c)
(uiop:quit 1)))))
(when (getf options :web)
(handler-case
(progn
(bookshops/web::start-app :port (or (getf options :port)
(ignore-errors (parse-integer (uiop:getenv "OBS_PORT")))
bookshops/web::*port*))
(bt:join-thread
(find-if (lambda (th)
(search "hunchentoot" (bt:thread-name th)))
(bt:all-threads))))
(usocket:address-in-use-error ()
(format *error-output* "This port is already taken. You can use the --port option or the OBS_PORT environment variable to specify a new port.~&"))
#+sbcl
(sb-sys:interactive-interrupt ()
(format *error-output* "~&Bye!~&")
(uiop:quit))
(error (c)
(format *error-output* "~&An error occured: ~a~&" c)
( uiop : quit 1 )
)))
(when free-args
(handler-case
(progn
(init)
(bookshops.models::pprint-books (search-books (str:join " " free-args))))
(error (c)
(progn
(format *error-output* "~a~&" c)
(uiop:quit 1)))))
))
(defun run ()
"Call main, print a backtrace if an error occurs."
(handler-bind ((error (lambda (c)
(format *error-output* "~&An error occured: ~a~&" c)
(format *error-output* "~&Backtrace:~&")
(trivial-backtrace:print-backtrace c))))
(main)))
|
f2dab11b6b4fdbf3213619b165c992e5fb8b031c10298587a1c5a98cc7579af5 | spurious/sagittarius-scheme-mirror | complex.scm | -*- mode : scheme ; coding : utf-8 ; -*-
#!core
(library (scheme complex)
(export angle imag-part magnitude make-polar make-rectangular real-part)
(import (rnrs)))
| null | https://raw.githubusercontent.com/spurious/sagittarius-scheme-mirror/53f104188934109227c01b1e9a9af5312f9ce997/sitelib/scheme/complex.scm | scheme | coding : utf-8 ; -*- | #!core
(library (scheme complex)
(export angle imag-part magnitude make-polar make-rectangular real-part)
(import (rnrs)))
|
dfedad847415fcc2809f6feaa650a4bf3f6a5862cf2738a7d1c3b155dacb5f93 | lambdaclass/holiday_pinger | channel_test_handler.erl | -module(channel_test_handler).
-export([init/3,
rest_init/2,
allowed_methods/2,
content_types_accepted/2,
is_authorized/2,
from_json/2]).
init(_Transport, _Req, []) ->
{upgrade, protocol, cowboy_rest}.
rest_init(Req, _Opts) ->
{Name, Req2} = cowboy_req:binding(name, Req),
State = #{name => Name},
{ok, Req2, State}.
is_authorized(Req, State) ->
req_utils:is_authorized(bearer, Req, State).
allowed_methods(Req, State) ->
{[<<"POST">>, <<"HEAD">>, <<"OPTIONS">>], Req, State}.
content_types_accepted(Req, State) ->
{[{<<"application/json">>, from_json}], Req, State}.
from_json(Req, State = #{user := User, email := Email, name := Name}) ->
case db_channel:get(Email, Name) of
{ok, Channel} ->
remind_router:send_test(User, Channel, erlang:date()),
{true, Req, State};
_ ->
req_utils:error_response(404, <<"Channel not found.">>, Req)
end.
| null | https://raw.githubusercontent.com/lambdaclass/holiday_pinger/a8a6d1e28de57cf24d6205d275981a718305b351/src/handlers/channel_test_handler.erl | erlang | -module(channel_test_handler).
-export([init/3,
rest_init/2,
allowed_methods/2,
content_types_accepted/2,
is_authorized/2,
from_json/2]).
init(_Transport, _Req, []) ->
{upgrade, protocol, cowboy_rest}.
rest_init(Req, _Opts) ->
{Name, Req2} = cowboy_req:binding(name, Req),
State = #{name => Name},
{ok, Req2, State}.
is_authorized(Req, State) ->
req_utils:is_authorized(bearer, Req, State).
allowed_methods(Req, State) ->
{[<<"POST">>, <<"HEAD">>, <<"OPTIONS">>], Req, State}.
content_types_accepted(Req, State) ->
{[{<<"application/json">>, from_json}], Req, State}.
from_json(Req, State = #{user := User, email := Email, name := Name}) ->
case db_channel:get(Email, Name) of
{ok, Channel} ->
remind_router:send_test(User, Channel, erlang:date()),
{true, Req, State};
_ ->
req_utils:error_response(404, <<"Channel not found.">>, Req)
end.
| |
2be99f1b949e74f5abe497b08085ae733ee3c585feeb840cf232a53e0c026cf7 | theodormoroianu/SecondYearCourses | HaskellChurchMonad_20210415161458.hs | module HaskellChurchMonad where
A boolean is any way to choose between two alternatives
newtype CBool t = CBool {cIf :: t -> t -> t}
toBool :: CBool Bool -> Bool
toBool b = cIf b True False
The boolean constant true always chooses the first alternative
cTrue :: CBool t
cTrue = CBool $ \t f -> t
The boolean constant false always chooses the second alternative
cFalse :: CBool t
cFalse = CBool $ \t f -> f
--The boolean negation switches the alternatives
cNot :: CBool t -> CBool t
cNot b = CBool $ \t f -> cIf b f t
--The boolean conjunction can be built as a conditional
(&&:) :: CBool t -> CBool t -> CBool t
b1 &&: b2 = CBool $ \t f -> cIf b1 (cIf b2 t f) f
infixr 3 &&:
--The boolean disjunction can be built as a conditional
(||:) :: CBool t -> CBool t -> CBool t
b1 ||: b2 = CBool $ \t f -> cIf b1 t (cIf b2 t f)
infixr 2 ||:
-- a pair is a way to compute something based on the values
-- contained within the pair.
newtype CPair a b t = CPair { cOn :: (a -> b -> t) -> t }
toPair :: CPair a b (a,b) -> (a,b)
toPair p = cOn p (,)
builds a pair out of two values as an object which , when given
--a function to be applied on the values, it will apply it on them.
cPair :: a -> b -> CPair a b t
cPair a b = CPair $ \f -> f a b
first projection uses the function selecting first component on a pair
cFst :: CPair a b a -> a
cFst p = cOn p (\f s -> f)
second projection
cSnd :: CPair a b b -> b
cSnd p = cOn p (\f s -> s)
-- A natural number is any way to iterate a function s a number of times
-- over an initial value z
newtype CNat t = CNat { cFor :: (t -> t) -> t -> t }
-- An instance to show CNats as regular natural numbers
toNat :: CNat Integer -> Integer
toNat n = cFor n (1 +) 0
--0 will iterate the function s 0 times over z, producing z
c0 :: CNat t
c0 = CNat $ \s z -> z
1 is the the function s iterated 1 times over z , that is , z
c1 :: CNat t
c1 = CNat $ \s z -> s z
--Successor n either
- applies s one more time in addition to what n does
-- - iterates s n times over (s z)
cS :: CNat t -> CNat t
cS n = CNat $ \s z -> s (cFor n s z)
--Addition of m and n is done by iterating s n times over m
(+:) :: CNat t -> CNat t -> CNat t
m +: n = CNat $ \s -> cFor n s . cFor m s
infixl 6 +:
--Multiplication of m and n can be done by composing n and m
(*:) :: CNat t -> CNat t -> CNat t
m *: n = CNat $ cFor n . cFor m
infixl 7 *:
m *: n = CNat $ \s z -> (cFor n (cFor m s)) z
--Exponentiation of m and n can be done by applying n to m
(^:) :: CNat t -> CNat (CNat t) -> CNat t
m ^: n = \ s z -> cFor n (\t -> m *:) (s z)
infixr 8 ^:
--Testing whether a value is 0 can be done through iteration
-- using a function constantly false and an initial value true
cIs0 : : CNat - > CBool
cIs0 = \n - > cFor n ( \ _ - > cFalse ) cTrue
--Predecessor ( evaluating to 0 for 0 ) can be defined iterating
--over pairs , starting from an initial value ( 0 , 0 )
cPred : : CNat - > CNat
cPred = undefined
--substraction from m n ( evaluating to 0 if m < n ) is repeated application
-- of the predeccesor function
(-: ) : : CNat - > CNat - > CNat
(-: ) = \m n - > cFor n cPred m
-- Transform a value into a CNat ( should yield c0 for nums < = 0 )
cNat : : ( Ord p , ) = > p - > CNat
cNat n = undefined
-- We can define an instance Num CNat which will allow us to see any
-- integer constant as a CNat ( e.g. 12 : : CNat ) and also use regular
-- arithmetic
instance Num CNat where
( + ) = ( + :)
( * ) = ( * :)
( - ) = (-: )
abs = i d
signum n = ( cIs0 n ) 0 1
fromInteger = cNat
-- m is less than ( or equal to ) n if when substracting n from m we get 0
( < = :) : : CNat - > CNat - > CBool
( < = :) = undefined
infix 4 < = :
( > = :) : : CNat - > CNat - > CBool
( > = :) = \m n - > n < = : m
infix 4 > = :
( < :) : : CNat - > CNat - > CBool
( < :) = \m n - > cNot ( m > = : n )
infix 4 < :
( > :) : : CNat - > CNat - > CBool
( > :) = \m n - > n < : m
infix 4 > :
-- equality on naturals can be defined my means of comparisons
(= = :) : : CNat - > CNat - > CBool
(= = :) = undefined
--Testing whether a value is 0 can be done through iteration
-- using a function constantly false and an initial value true
cIs0 :: CNat -> CBool
cIs0 = \n -> cFor n (\_ -> cFalse) cTrue
--Predecessor (evaluating to 0 for 0) can be defined iterating
--over pairs, starting from an initial value (0, 0)
cPred :: CNat -> CNat
cPred = undefined
--substraction from m n (evaluating to 0 if m < n) is repeated application
-- of the predeccesor function
(-:) :: CNat -> CNat -> CNat
(-:) = \m n -> cFor n cPred m
-- Transform a Num value into a CNat (should yield c0 for nums <= 0)
cNat :: (Ord p, Num p) => p -> CNat
cNat n = undefined
-- We can define an instance Num CNat which will allow us to see any
-- integer constant as a CNat (e.g. 12 :: CNat ) and also use regular
-- arithmetic
instance Num CNat where
(+) = (+:)
(*) = (*:)
(-) = (-:)
abs = id
signum n = cIf (cIs0 n) 0 1
fromInteger = cNat
-- m is less than (or equal to) n if when substracting n from m we get 0
(<=:) :: CNat -> CNat -> CBool
(<=:) = undefined
infix 4 <=:
(>=:) :: CNat -> CNat -> CBool
(>=:) = \m n -> n <=: m
infix 4 >=:
(<:) :: CNat -> CNat -> CBool
(<:) = \m n -> cNot (m >=: n)
infix 4 <:
(>:) :: CNat -> CNat -> CBool
(>:) = \m n -> n <: m
infix 4 >:
-- equality on naturals can be defined my means of comparisons
(==:) :: CNat -> CNat -> CBool
(==:) = undefined
-} | null | https://raw.githubusercontent.com/theodormoroianu/SecondYearCourses/5e359e6a7cf588a527d27209bf53b4ce6b8d5e83/FLP/Laboratoare/Lab%209/.history/HaskellChurchMonad_20210415161458.hs | haskell | The boolean negation switches the alternatives
The boolean conjunction can be built as a conditional
The boolean disjunction can be built as a conditional
a pair is a way to compute something based on the values
contained within the pair.
a function to be applied on the values, it will apply it on them.
A natural number is any way to iterate a function s a number of times
over an initial value z
An instance to show CNats as regular natural numbers
0 will iterate the function s 0 times over z, producing z
Successor n either
- iterates s n times over (s z)
Addition of m and n is done by iterating s n times over m
Multiplication of m and n can be done by composing n and m
Exponentiation of m and n can be done by applying n to m
Testing whether a value is 0 can be done through iteration
using a function constantly false and an initial value true
Predecessor ( evaluating to 0 for 0 ) can be defined iterating
over pairs , starting from an initial value ( 0 , 0 )
substraction from m n ( evaluating to 0 if m < n ) is repeated application
of the predeccesor function
Transform a value into a CNat ( should yield c0 for nums < = 0 )
We can define an instance Num CNat which will allow us to see any
integer constant as a CNat ( e.g. 12 : : CNat ) and also use regular
arithmetic
m is less than ( or equal to ) n if when substracting n from m we get 0
equality on naturals can be defined my means of comparisons
Testing whether a value is 0 can be done through iteration
using a function constantly false and an initial value true
Predecessor (evaluating to 0 for 0) can be defined iterating
over pairs, starting from an initial value (0, 0)
substraction from m n (evaluating to 0 if m < n) is repeated application
of the predeccesor function
Transform a Num value into a CNat (should yield c0 for nums <= 0)
We can define an instance Num CNat which will allow us to see any
integer constant as a CNat (e.g. 12 :: CNat ) and also use regular
arithmetic
m is less than (or equal to) n if when substracting n from m we get 0
equality on naturals can be defined my means of comparisons | module HaskellChurchMonad where
A boolean is any way to choose between two alternatives
newtype CBool t = CBool {cIf :: t -> t -> t}
toBool :: CBool Bool -> Bool
toBool b = cIf b True False
The boolean constant true always chooses the first alternative
cTrue :: CBool t
cTrue = CBool $ \t f -> t
The boolean constant false always chooses the second alternative
cFalse :: CBool t
cFalse = CBool $ \t f -> f
cNot :: CBool t -> CBool t
cNot b = CBool $ \t f -> cIf b f t
(&&:) :: CBool t -> CBool t -> CBool t
b1 &&: b2 = CBool $ \t f -> cIf b1 (cIf b2 t f) f
infixr 3 &&:
(||:) :: CBool t -> CBool t -> CBool t
b1 ||: b2 = CBool $ \t f -> cIf b1 t (cIf b2 t f)
infixr 2 ||:
newtype CPair a b t = CPair { cOn :: (a -> b -> t) -> t }
toPair :: CPair a b (a,b) -> (a,b)
toPair p = cOn p (,)
builds a pair out of two values as an object which , when given
cPair :: a -> b -> CPair a b t
cPair a b = CPair $ \f -> f a b
first projection uses the function selecting first component on a pair
cFst :: CPair a b a -> a
cFst p = cOn p (\f s -> f)
second projection
cSnd :: CPair a b b -> b
cSnd p = cOn p (\f s -> s)
newtype CNat t = CNat { cFor :: (t -> t) -> t -> t }
toNat :: CNat Integer -> Integer
toNat n = cFor n (1 +) 0
c0 :: CNat t
c0 = CNat $ \s z -> z
1 is the the function s iterated 1 times over z , that is , z
c1 :: CNat t
c1 = CNat $ \s z -> s z
- applies s one more time in addition to what n does
cS :: CNat t -> CNat t
cS n = CNat $ \s z -> s (cFor n s z)
(+:) :: CNat t -> CNat t -> CNat t
m +: n = CNat $ \s -> cFor n s . cFor m s
infixl 6 +:
(*:) :: CNat t -> CNat t -> CNat t
m *: n = CNat $ cFor n . cFor m
infixl 7 *:
m *: n = CNat $ \s z -> (cFor n (cFor m s)) z
(^:) :: CNat t -> CNat (CNat t) -> CNat t
m ^: n = \ s z -> cFor n (\t -> m *:) (s z)
infixr 8 ^:
cIs0 : : CNat - > CBool
cIs0 = \n - > cFor n ( \ _ - > cFalse ) cTrue
cPred : : CNat - > CNat
cPred = undefined
(-: ) : : CNat - > CNat - > CNat
(-: ) = \m n - > cFor n cPred m
cNat : : ( Ord p , ) = > p - > CNat
cNat n = undefined
instance Num CNat where
( + ) = ( + :)
( * ) = ( * :)
( - ) = (-: )
abs = i d
signum n = ( cIs0 n ) 0 1
fromInteger = cNat
( < = :) : : CNat - > CNat - > CBool
( < = :) = undefined
infix 4 < = :
( > = :) : : CNat - > CNat - > CBool
( > = :) = \m n - > n < = : m
infix 4 > = :
( < :) : : CNat - > CNat - > CBool
( < :) = \m n - > cNot ( m > = : n )
infix 4 < :
( > :) : : CNat - > CNat - > CBool
( > :) = \m n - > n < : m
infix 4 > :
(= = :) : : CNat - > CNat - > CBool
(= = :) = undefined
cIs0 :: CNat -> CBool
cIs0 = \n -> cFor n (\_ -> cFalse) cTrue
cPred :: CNat -> CNat
cPred = undefined
(-:) :: CNat -> CNat -> CNat
(-:) = \m n -> cFor n cPred m
cNat :: (Ord p, Num p) => p -> CNat
cNat n = undefined
instance Num CNat where
(+) = (+:)
(*) = (*:)
(-) = (-:)
abs = id
signum n = cIf (cIs0 n) 0 1
fromInteger = cNat
(<=:) :: CNat -> CNat -> CBool
(<=:) = undefined
infix 4 <=:
(>=:) :: CNat -> CNat -> CBool
(>=:) = \m n -> n <=: m
infix 4 >=:
(<:) :: CNat -> CNat -> CBool
(<:) = \m n -> cNot (m >=: n)
infix 4 <:
(>:) :: CNat -> CNat -> CBool
(>:) = \m n -> n <: m
infix 4 >:
(==:) :: CNat -> CNat -> CBool
(==:) = undefined
-} |
34b69bed930dd91983acf475df7b292366af58bc386cdffb14f5583d171ff9ce | charlieg/Sparser | html-actions.lisp | ;;; -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER LISP) -*-
copyright ( c ) 1995 -- all rights reserved
;;;
;;; File: "html actions"
;;; Module: "grammar;rules:SGML:"
Version : September 1995
initiated 9/20/95
(in-package :sparser)
(defun do-html-tag (tag-edge
pos-before-open pos-after-close
pos-after-open pos-before-close
layout )
;; called from Do-paired-punctuation-interior via
the : angle - brackets hook . This case handles both the first of
;; a paired tag and empty tags.
(let ((category (edge-referent tag-edge)))
(let ((i (define-or-find-individual category
:start-index (pos-token-index (pos-edge-starts-at tag-edge)))))
(bind-variable 'start-index (pos-token-index pos-before-open) i)
(bind-variable 'interior-start (pos-token-index pos-after-close) i)
(unless (eq layout :single-span)
(break "Stub: there are attributes to be collected"))
(when (value-of 'start-action i)
(break "Stub: run start-action of html tag"))
(let ((edge (make-edge-over-long-span
pos-before-open
pos-after-close
category
:rule :html-tag
:form (edge-form tag-edge) ;; section-marker
:referent i )))
(when (itypep i 'paired-html-tag)
(push-on-pending-left-opener i edge))
edge ))))
(defun do-html-tag/end (close-edge
pos-before-open pos-after-close
pos-after-open pos-before-close
layout )
;; called from Do-paired-punctuation-interior via
;; the :angle-brackets hook. This case handles both the end case
;; of a paired tag.
(let ((category (edge-referent close-edge)))
(multiple-value-bind (i open-edge)
(pop-off-top-pending-left-opener)
(unless (and (typep i 'individual)
(itypep i category))
(break "Unbalanced document? The most recent open tag~
~%is a ~A, but we've just closed~
~%a ~A tag.~%" category i)
(return-from Do-html-tag/end nil))
(bind-variable 'end-index (pos-token-index pos-after-close) i)
(bind-variable 'interior-end (pos-token-index pos-before-open) i)
(when (value-of 'end-action i)
(break "Stub: run end-action of html tag"))
)))
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/grammar/rules/SGML/html-actions.lisp | lisp | -*- Mode:LISP; Syntax:Common-Lisp; Package:(SPARSER LISP) -*-
File: "html actions"
Module: "grammar;rules:SGML:"
called from Do-paired-punctuation-interior via
a paired tag and empty tags.
section-marker
called from Do-paired-punctuation-interior via
the :angle-brackets hook. This case handles both the end case
of a paired tag. | copyright ( c ) 1995 -- all rights reserved
Version : September 1995
initiated 9/20/95
(in-package :sparser)
(defun do-html-tag (tag-edge
pos-before-open pos-after-close
pos-after-open pos-before-close
layout )
the : angle - brackets hook . This case handles both the first of
(let ((category (edge-referent tag-edge)))
(let ((i (define-or-find-individual category
:start-index (pos-token-index (pos-edge-starts-at tag-edge)))))
(bind-variable 'start-index (pos-token-index pos-before-open) i)
(bind-variable 'interior-start (pos-token-index pos-after-close) i)
(unless (eq layout :single-span)
(break "Stub: there are attributes to be collected"))
(when (value-of 'start-action i)
(break "Stub: run start-action of html tag"))
(let ((edge (make-edge-over-long-span
pos-before-open
pos-after-close
category
:rule :html-tag
:referent i )))
(when (itypep i 'paired-html-tag)
(push-on-pending-left-opener i edge))
edge ))))
(defun do-html-tag/end (close-edge
pos-before-open pos-after-close
pos-after-open pos-before-close
layout )
(let ((category (edge-referent close-edge)))
(multiple-value-bind (i open-edge)
(pop-off-top-pending-left-opener)
(unless (and (typep i 'individual)
(itypep i category))
(break "Unbalanced document? The most recent open tag~
~%is a ~A, but we've just closed~
~%a ~A tag.~%" category i)
(return-from Do-html-tag/end nil))
(bind-variable 'end-index (pos-token-index pos-after-close) i)
(bind-variable 'interior-end (pos-token-index pos-before-open) i)
(when (value-of 'end-action i)
(break "Stub: run end-action of html tag"))
)))
|
9344240a841581028695750b1261b1f4822c34fc42519de75b0a02ab157d08d0 | zellige/zellige | Config.hs | {-# LANGUAGE CPP #-}
# LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoMonomorphismRestriction #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE TypeFamilies #-}
{-# LANGUAGE TypeOperators #-}
# OPTIONS_GHC -fno - warn - orphans #
module Data.Geometry.Types.Config where
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy as ByteStringLazy
import qualified Data.Char as Char
import qualified Data.Monoid as Monoid
import qualified Data.Semigroup as Semigroup
import qualified Data.String as String
import qualified Data.Text as Text
import qualified Data.Text.Encoding as TextEncoding
import qualified Data.Word as Word
import qualified Data.Geometry.Types.Geography as TypesGeography
data Config = Config
{ _name :: ByteStringLazy.ByteString
, _gtc :: TypesGeography.GoogleTileCoordsInt
, _buffer :: Word.Word
, _extents :: Int
, _quantizePixels :: Int
, _simplify :: SimplificationAlgorithm
, _version :: Word.Word
} deriving (Show, Eq)
mkConfig :: Text.Text -> TypesGeography.Pixels -> (TypesGeography.Pixels, TypesGeography.Pixels) -> TypesGeography.Pixels -> TypesGeography.Pixels -> TypesGeography.Pixels -> SimplificationAlgorithm -> Config
mkConfig name z (x, y) buffer extents quantizePixels simplify = Config ((ByteStringLazy.fromStrict . TextEncoding.encodeUtf8) name) (TypesGeography.mkGoogleTileCoordsInt z x y)
(fromIntegral buffer) (fromIntegral extents) (TypesGeography.toInt quantizePixels) simplify TypesGeography.defaultVersion
-- Zoom Config
data ZoomConfig = ZoomConfig
{ _zcExtents :: Int
, _zcQuantize :: Int
, _zcBBox :: TypesGeography.BoundingBox
, _zcSimplify :: SimplificationAlgorithm
} deriving (Eq, Show)
-- Simplification
data SimplificationAlgorithm = NoAlgorithm
| Visvalingam
| DouglasPeucker
deriving (Eq, Show)
instance String.IsString SimplificationAlgorithm where
fromString s =
case Char.toLower <$> s of
"visvalingam" -> Visvalingam
"douglas-peucker" -> DouglasPeucker
_ -> NoAlgorithm
instance Aeson.ToJSON SimplificationAlgorithm where
toJSON algo =
Aeson.String $ case algo of
NoAlgorithm -> "none"
Visvalingam -> "visvalingam"
DouglasPeucker -> "douglas-peucker"
instance Aeson.FromJSON SimplificationAlgorithm where
parseJSON = Aeson.withText "SimplificationAlgorithm" $ \case
"none" -> pure NoAlgorithm
"visvalingam" -> pure Visvalingam
"douglas-peucker" -> pure DouglasPeucker
_ -> fail "Unknown algorithm"
-- Options
data Options = Options
{ oVersion :: Monoid.Last Int
, oName :: Monoid.Last String
, oExtent :: Monoid.Last Int
} deriving (Show, Eq)
instance Semigroup.Semigroup Options where
(<>) x y = Options
{ oVersion = oVersion x Monoid.<> oVersion y
, oName = oName x Monoid.<> oName y
, oExtent = oExtent x Monoid.<> oExtent y
}
instance Monoid Options where
mempty = Options mempty mempty mempty
#if !(MIN_VERSION_base(4,11,0))
-- this is redundant starting with base-4.11 / GHC 8.4
if you want to avoid CPP , you can define ` mappend = ( < > ) ` unconditionally
mappend = (<>)
#endif
| null | https://raw.githubusercontent.com/zellige/zellige/87e6dab11ac4c1843009043580f14422a1d83ebf/src/Data/Geometry/Types/Config.hs | haskell | # LANGUAGE CPP #
# LANGUAGE OverloadedStrings #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
Zoom Config
Simplification
Options
this is redundant starting with base-4.11 / GHC 8.4 | # LANGUAGE DataKinds #
# LANGUAGE FlexibleInstances #
# LANGUAGE LambdaCase #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoMonomorphismRestriction #
# OPTIONS_GHC -fno - warn - orphans #
module Data.Geometry.Types.Config where
import qualified Data.Aeson as Aeson
import qualified Data.ByteString.Lazy as ByteStringLazy
import qualified Data.Char as Char
import qualified Data.Monoid as Monoid
import qualified Data.Semigroup as Semigroup
import qualified Data.String as String
import qualified Data.Text as Text
import qualified Data.Text.Encoding as TextEncoding
import qualified Data.Word as Word
import qualified Data.Geometry.Types.Geography as TypesGeography
data Config = Config
{ _name :: ByteStringLazy.ByteString
, _gtc :: TypesGeography.GoogleTileCoordsInt
, _buffer :: Word.Word
, _extents :: Int
, _quantizePixels :: Int
, _simplify :: SimplificationAlgorithm
, _version :: Word.Word
} deriving (Show, Eq)
mkConfig :: Text.Text -> TypesGeography.Pixels -> (TypesGeography.Pixels, TypesGeography.Pixels) -> TypesGeography.Pixels -> TypesGeography.Pixels -> TypesGeography.Pixels -> SimplificationAlgorithm -> Config
mkConfig name z (x, y) buffer extents quantizePixels simplify = Config ((ByteStringLazy.fromStrict . TextEncoding.encodeUtf8) name) (TypesGeography.mkGoogleTileCoordsInt z x y)
(fromIntegral buffer) (fromIntegral extents) (TypesGeography.toInt quantizePixels) simplify TypesGeography.defaultVersion
data ZoomConfig = ZoomConfig
{ _zcExtents :: Int
, _zcQuantize :: Int
, _zcBBox :: TypesGeography.BoundingBox
, _zcSimplify :: SimplificationAlgorithm
} deriving (Eq, Show)
data SimplificationAlgorithm = NoAlgorithm
| Visvalingam
| DouglasPeucker
deriving (Eq, Show)
instance String.IsString SimplificationAlgorithm where
fromString s =
case Char.toLower <$> s of
"visvalingam" -> Visvalingam
"douglas-peucker" -> DouglasPeucker
_ -> NoAlgorithm
instance Aeson.ToJSON SimplificationAlgorithm where
toJSON algo =
Aeson.String $ case algo of
NoAlgorithm -> "none"
Visvalingam -> "visvalingam"
DouglasPeucker -> "douglas-peucker"
instance Aeson.FromJSON SimplificationAlgorithm where
parseJSON = Aeson.withText "SimplificationAlgorithm" $ \case
"none" -> pure NoAlgorithm
"visvalingam" -> pure Visvalingam
"douglas-peucker" -> pure DouglasPeucker
_ -> fail "Unknown algorithm"
data Options = Options
{ oVersion :: Monoid.Last Int
, oName :: Monoid.Last String
, oExtent :: Monoid.Last Int
} deriving (Show, Eq)
instance Semigroup.Semigroup Options where
(<>) x y = Options
{ oVersion = oVersion x Monoid.<> oVersion y
, oName = oName x Monoid.<> oName y
, oExtent = oExtent x Monoid.<> oExtent y
}
instance Monoid Options where
mempty = Options mempty mempty mempty
#if !(MIN_VERSION_base(4,11,0))
if you want to avoid CPP , you can define ` mappend = ( < > ) ` unconditionally
mappend = (<>)
#endif
|
dddb619818fa298779218e8998263ec009ce2ad197c95af0a3fba027d154b54e | elh/advent-2022 | day_14.clj | (ns advent-2022.day-14
(:require [clojure.string :as str]
[clojure.set :as set]))
(defn read-input [file-name]
(as-> (slurp file-name) v
(str/split v #"\n")
(mapv #(str/split % #" -> ") v)
(mapv (fn [x] (mapv #(str/split % #",") x)) v)
(mapv (fn [x] (mapv (fn [y] (mapv #(Integer/parseInt %) y)) x)) v)))
(defn draw-line [line]
(loop [acc #{} l line]
(if (<= (count l) 1)
acc
(let [xs (sort [(first (first l)) (first (second l))])
ys (sort [(second (first l)) (second (second l))])
locs (set (for [x (vec (range (first xs) (inc (second xs))))
y (vec (range (first ys) (inc (second ys))))]
[x y]))]
(recur (set/union acc locs) (rest l))))))
(defn make-occupancy [input]
(reduce (fn [acc line]
(set/union acc (draw-line line)))
#{} input))
(defn pour-sand [occupancy]
(let [lowest (last (sort (map #(second %) occupancy)))]
(loop [occupancy occupancy sand-loc [500 0]]
(if (>= (second sand-loc) lowest)
occupancy
(cond
(not (contains? occupancy (map + sand-loc [0 1]))) (recur occupancy (map + sand-loc [0 1]))
(not (contains? occupancy (map + sand-loc [-1 1]))) (recur occupancy (map + sand-loc [-1 1]))
(not (contains? occupancy (map + sand-loc [1 1]))) (recur occupancy (map + sand-loc [1 1]))
:else (recur (conj occupancy sand-loc) [500 0]))))))
(defn pour-sand-p2 [occupancy]
(let [lowest (last (sort (map #(second %) occupancy)))
floor (+ lowest 2)]
(letfn [(open? [o l]
(if (>= (second l) floor)
false
(not (contains? o l))))]
(loop [occupancy occupancy sand-loc [500 0]]
(cond
(open? occupancy (map + sand-loc [0 1])) (recur occupancy (map + sand-loc [0 1]))
(open? occupancy (map + sand-loc [-1 1])) (recur occupancy (map + sand-loc [-1 1]))
(open? occupancy (map + sand-loc [1 1])) (recur occupancy (map + sand-loc [1 1]))
:else (if (= sand-loc [500 0])
(conj occupancy sand-loc)
(recur (conj occupancy sand-loc) [500 0])))))))
(defn -main [& args]
(when (not= (count args) 1)
(throw (Exception. (format "FAIL: expects input file as cmdline arg. got %d args" (count args)))))
(let [input (read-input (first args))
occupancy (make-occupancy input)]
(println "part 1:" (time (- (count (pour-sand occupancy)) (count occupancy))))
(println "part 2:" (time (- (count (pour-sand-p2 occupancy)) (count occupancy))))))
| null | https://raw.githubusercontent.com/elh/advent-2022/f217702c37723a680b272c64ec09117e826739bc/src/advent_2022/day_14.clj | clojure | (ns advent-2022.day-14
(:require [clojure.string :as str]
[clojure.set :as set]))
(defn read-input [file-name]
(as-> (slurp file-name) v
(str/split v #"\n")
(mapv #(str/split % #" -> ") v)
(mapv (fn [x] (mapv #(str/split % #",") x)) v)
(mapv (fn [x] (mapv (fn [y] (mapv #(Integer/parseInt %) y)) x)) v)))
(defn draw-line [line]
(loop [acc #{} l line]
(if (<= (count l) 1)
acc
(let [xs (sort [(first (first l)) (first (second l))])
ys (sort [(second (first l)) (second (second l))])
locs (set (for [x (vec (range (first xs) (inc (second xs))))
y (vec (range (first ys) (inc (second ys))))]
[x y]))]
(recur (set/union acc locs) (rest l))))))
(defn make-occupancy [input]
(reduce (fn [acc line]
(set/union acc (draw-line line)))
#{} input))
(defn pour-sand [occupancy]
(let [lowest (last (sort (map #(second %) occupancy)))]
(loop [occupancy occupancy sand-loc [500 0]]
(if (>= (second sand-loc) lowest)
occupancy
(cond
(not (contains? occupancy (map + sand-loc [0 1]))) (recur occupancy (map + sand-loc [0 1]))
(not (contains? occupancy (map + sand-loc [-1 1]))) (recur occupancy (map + sand-loc [-1 1]))
(not (contains? occupancy (map + sand-loc [1 1]))) (recur occupancy (map + sand-loc [1 1]))
:else (recur (conj occupancy sand-loc) [500 0]))))))
(defn pour-sand-p2 [occupancy]
(let [lowest (last (sort (map #(second %) occupancy)))
floor (+ lowest 2)]
(letfn [(open? [o l]
(if (>= (second l) floor)
false
(not (contains? o l))))]
(loop [occupancy occupancy sand-loc [500 0]]
(cond
(open? occupancy (map + sand-loc [0 1])) (recur occupancy (map + sand-loc [0 1]))
(open? occupancy (map + sand-loc [-1 1])) (recur occupancy (map + sand-loc [-1 1]))
(open? occupancy (map + sand-loc [1 1])) (recur occupancy (map + sand-loc [1 1]))
:else (if (= sand-loc [500 0])
(conj occupancy sand-loc)
(recur (conj occupancy sand-loc) [500 0])))))))
(defn -main [& args]
(when (not= (count args) 1)
(throw (Exception. (format "FAIL: expects input file as cmdline arg. got %d args" (count args)))))
(let [input (read-input (first args))
occupancy (make-occupancy input)]
(println "part 1:" (time (- (count (pour-sand occupancy)) (count occupancy))))
(println "part 2:" (time (- (count (pour-sand-p2 occupancy)) (count occupancy))))))
| |
a6f06fe83099e102ac86b1ece4f0dfb113499b94b27dd5bbeab654d6bce0595d | micmarsh/re-frame-youtube-fx | project.clj | (defproject basic-player "0.1.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.9.229"]
[reagent "0.6.0"]
[binaryage/devtools "0.8.2"]
[re-frame "0.8.0"]]
:plugins [[lein-cljsbuild "1.1.4"]]
:min-lein-version "2.5.3"
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:figwheel {:css-dirs ["resources/public/css"]}
:profiles
{:dev
{:dependencies [[camel-snake-kebab "0.4.0"]]
:plugins [[lein-figwheel "0.5.7"]]}}
:cljsbuild
{:builds
[{:id "dev"
:source-paths ["src/cljs" "../../src"]
:figwheel {:on-jsload "basic-player.core/mount-root"}
:compiler {:main basic-player.core
:output-to "resources/public/js/compiled/app.js"
:output-dir "resources/public/js/compiled/out"
:asset-path "js/compiled/out"
:source-map-timestamp true}}
{:id "min"
:source-paths ["src/cljs" "../../src"]
:compiler {:main basic-player.core
:output-to "resources/public/js/compiled/app.js"
:optimizations :advanced
:closure-defines {goog.DEBUG false}
:pretty-print false
:externs ["resources/public/externs.js"]}}
]}
)
| null | https://raw.githubusercontent.com/micmarsh/re-frame-youtube-fx/dfbf2790930da2810da2ea8e21a1e72abf82f0d7/examples/basic-player/project.clj | clojure | (defproject basic-player "0.1.0-SNAPSHOT"
:dependencies [[org.clojure/clojure "1.8.0"]
[org.clojure/clojurescript "1.9.229"]
[reagent "0.6.0"]
[binaryage/devtools "0.8.2"]
[re-frame "0.8.0"]]
:plugins [[lein-cljsbuild "1.1.4"]]
:min-lein-version "2.5.3"
:clean-targets ^{:protect false} ["resources/public/js/compiled" "target"]
:figwheel {:css-dirs ["resources/public/css"]}
:profiles
{:dev
{:dependencies [[camel-snake-kebab "0.4.0"]]
:plugins [[lein-figwheel "0.5.7"]]}}
:cljsbuild
{:builds
[{:id "dev"
:source-paths ["src/cljs" "../../src"]
:figwheel {:on-jsload "basic-player.core/mount-root"}
:compiler {:main basic-player.core
:output-to "resources/public/js/compiled/app.js"
:output-dir "resources/public/js/compiled/out"
:asset-path "js/compiled/out"
:source-map-timestamp true}}
{:id "min"
:source-paths ["src/cljs" "../../src"]
:compiler {:main basic-player.core
:output-to "resources/public/js/compiled/app.js"
:optimizations :advanced
:closure-defines {goog.DEBUG false}
:pretty-print false
:externs ["resources/public/externs.js"]}}
]}
)
| |
ab7090beac7dd4bf791042e5c2bbf9d478fdf2d17b616a9a7e89d67213968470 | DalekBaldwin/check-it | destructive-tests.lisp | (in-package :check-it-test)
(in-root-suite)
(in-suite randomized-tests)
;;;; Test pattern: copy generated value, mutate value in cache, regenerate, and compare
(deftest test-list-generator-mutation ()
(let ((g (generator (list (integer) :min-length 2)))
(filler (generator (list (integer) :min-length 3))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value)))
(setf (car test-value) :derp)
(setf (cdr test-value) (generate filler))
(is (equal (regenerate g) copied))))))
(deftest test-tuple-generator-mutation ()
(let ((g (generator (tuple (integer) (integer))))
(filler (generator (tuple (integer) (integer)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value))
(fill-value (generate filler)))
(setf (first test-value) (first fill-value)
(cdr test-value) fill-value)
(is (equal (regenerate g) copied))))))
(deftest test-string-generator-mutation ()
(let ((g (generator (string :min-length 2))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-seq test-value)))
(setf (aref test-value 1) #\x)
(is (equal (regenerate g) copied))))))
(deftest test-struct-generator-mutation ()
(let ((g (generator (struct a-struct
:a-slot (integer)
:another-slot (real)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-structure test-value)))
(setf (slot-value test-value 'a-slot) :derp
(slot-value test-value 'another-slot) :herp)
(is (equalp (regenerate g) copied)))))
(let ((g (generator (struct a-struct
:a-slot (list (integer) :min-length 2)
:another-slot (list (real) :min-length 2)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied
(make-a-struct
:a-slot
(copy-list (slot-value test-value 'a-slot))
:another-slot
(copy-list (slot-value test-value 'another-slot)))))
(setf (car (slot-value test-value 'a-slot)) :derp
(car (slot-value test-value 'another-slot)) :herp)
(is (equalp (regenerate g) copied))))))
(deftest test-mapped-generator-mutation ()
(let ((g (generator
(map (lambda (x y) (list y x y x))
(list (integer) :min-length 2)
(list (integer) :min-length 3)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-tree test-value)))
(setf (caar test-value) :derp
(cddr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-chained-generator-mutation ()
(let ((g (generator
(chain ((x (integer 10 20))
(y (integer 21 30)))
(generator (list (integer x y) :min-length x :max-length y))))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value)))
(setf (car test-value) :derp
(cddr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-guard-generator-mutation ()
(let ((g (generator
(guard (lambda (x) (> (length x) 4))
(list (integer))))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value)))
(setf (car test-value) :derp
(cddr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-or-generator-mutation ()
(let ((g (generator
(or (list (integer 5) :min-length 2)
(tuple (integer) (real))))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-tree test-value)))
(setf (car test-value) :derp
(cdr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-custom-generator-mutation ()
(let ((g (generator (big-custom-generator))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (if (listp test-value)
(copy-tree test-value)
(make-a-struct
:a-slot
(slot-value test-value 'a-slot)
:another-slot
(slot-value test-value 'another-slot)))))
(if (listp test-value)
(setf (car test-value) :derp
(cdr test-value) (list 1 2 3 :herp))
(setf (slot-value test-value 'a-slot) :herp
(slot-value test-value 'another-slot) :derp))
(is (equalp (regenerate g) copied))))))
| null | https://raw.githubusercontent.com/DalekBaldwin/check-it/b79c9103665be3976915b56b570038f03486e62f/test/destructive-tests.lisp | lisp | Test pattern: copy generated value, mutate value in cache, regenerate, and compare | (in-package :check-it-test)
(in-root-suite)
(in-suite randomized-tests)
(deftest test-list-generator-mutation ()
(let ((g (generator (list (integer) :min-length 2)))
(filler (generator (list (integer) :min-length 3))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value)))
(setf (car test-value) :derp)
(setf (cdr test-value) (generate filler))
(is (equal (regenerate g) copied))))))
(deftest test-tuple-generator-mutation ()
(let ((g (generator (tuple (integer) (integer))))
(filler (generator (tuple (integer) (integer)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value))
(fill-value (generate filler)))
(setf (first test-value) (first fill-value)
(cdr test-value) fill-value)
(is (equal (regenerate g) copied))))))
(deftest test-string-generator-mutation ()
(let ((g (generator (string :min-length 2))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-seq test-value)))
(setf (aref test-value 1) #\x)
(is (equal (regenerate g) copied))))))
(deftest test-struct-generator-mutation ()
(let ((g (generator (struct a-struct
:a-slot (integer)
:another-slot (real)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-structure test-value)))
(setf (slot-value test-value 'a-slot) :derp
(slot-value test-value 'another-slot) :herp)
(is (equalp (regenerate g) copied)))))
(let ((g (generator (struct a-struct
:a-slot (list (integer) :min-length 2)
:another-slot (list (real) :min-length 2)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied
(make-a-struct
:a-slot
(copy-list (slot-value test-value 'a-slot))
:another-slot
(copy-list (slot-value test-value 'another-slot)))))
(setf (car (slot-value test-value 'a-slot)) :derp
(car (slot-value test-value 'another-slot)) :herp)
(is (equalp (regenerate g) copied))))))
(deftest test-mapped-generator-mutation ()
(let ((g (generator
(map (lambda (x y) (list y x y x))
(list (integer) :min-length 2)
(list (integer) :min-length 3)))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-tree test-value)))
(setf (caar test-value) :derp
(cddr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-chained-generator-mutation ()
(let ((g (generator
(chain ((x (integer 10 20))
(y (integer 21 30)))
(generator (list (integer x y) :min-length x :max-length y))))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value)))
(setf (car test-value) :derp
(cddr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-guard-generator-mutation ()
(let ((g (generator
(guard (lambda (x) (> (length x) 4))
(list (integer))))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-list test-value)))
(setf (car test-value) :derp
(cddr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-or-generator-mutation ()
(let ((g (generator
(or (list (integer 5) :min-length 2)
(tuple (integer) (real))))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (copy-tree test-value)))
(setf (car test-value) :derp
(cdr test-value) (list 1 2 3 :herp))
(is (equal (regenerate g) copied))))))
(deftest test-custom-generator-mutation ()
(let ((g (generator (big-custom-generator))))
(loop repeat 10
do
(let* ((test-value (generate g))
(copied (if (listp test-value)
(copy-tree test-value)
(make-a-struct
:a-slot
(slot-value test-value 'a-slot)
:another-slot
(slot-value test-value 'another-slot)))))
(if (listp test-value)
(setf (car test-value) :derp
(cdr test-value) (list 1 2 3 :herp))
(setf (slot-value test-value 'a-slot) :herp
(slot-value test-value 'another-slot) :derp))
(is (equalp (regenerate g) copied))))))
|
468caea2eb9b36790490dff588224d89a8a9d100b612798e67f536b3051a1189 | softwarelanguageslab/maf | R5RS_gambit_tail-1.scm | ; Changes:
* removed : 0
* added : 1
* swaps : 0
; * negated predicates: 0
; * swapped branches: 0
* calls to i d fun : 1
(letrec ((inport #f)
(outport #f)
(readline (lambda (port line-so-far)
(let ((x (read-char port)))
(if (eof-object? x)
x
(if (char=? x #\
)
(list->string (reverse (cons x line-so-far)))
(readline port (cons x line-so-far)))))))
(tail-r-aux (lambda (port file-so-far)
(<change>
()
(display display))
(let ((x (readline port ())))
(if (eof-object? x)
(begin
(<change>
(display file-so-far outport)
((lambda (x) x) (display file-so-far outport)))
(close-output-port outport))
(tail-r-aux port (cons x file-so-far))))))
(tail-r (lambda (port)
(tail-r-aux port ())))
(go (lambda ()
(set! inport (open-input-file "input.txt"))
(set! outport (open-output-file "output.txt"))
(tail-r inport)
(close-input-port inport))))
(go)) | null | https://raw.githubusercontent.com/softwarelanguageslab/maf/11acedf56b9bf0c8e55ddb6aea754b6766d8bb40/test/changes/scheme/generated/R5RS_gambit_tail-1.scm | scheme | Changes:
* negated predicates: 0
* swapped branches: 0 | * removed : 0
* added : 1
* swaps : 0
* calls to i d fun : 1
(letrec ((inport #f)
(outport #f)
(readline (lambda (port line-so-far)
(let ((x (read-char port)))
(if (eof-object? x)
x
(if (char=? x #\
)
(list->string (reverse (cons x line-so-far)))
(readline port (cons x line-so-far)))))))
(tail-r-aux (lambda (port file-so-far)
(<change>
()
(display display))
(let ((x (readline port ())))
(if (eof-object? x)
(begin
(<change>
(display file-so-far outport)
((lambda (x) x) (display file-so-far outport)))
(close-output-port outport))
(tail-r-aux port (cons x file-so-far))))))
(tail-r (lambda (port)
(tail-r-aux port ())))
(go (lambda ()
(set! inport (open-input-file "input.txt"))
(set! outport (open-output-file "output.txt"))
(tail-r inport)
(close-input-port inport))))
(go)) |
3d5979a2c3bdb82a36c21bb2ebd6c506fe5906e07fa7b40d4982e1bfa0dd42af | reborg/fluorine | project.clj | (defproject net.reborg/fluorine "0.0.9"
:description "Distributed configuration for Clojure"
:url ""
:license {:name "Eclipse Public License - v 1.0"
:url "-v10.html"
:distribution :repo
:comments "same as Clojure"}
:dependencies [[org.clojure/clojure "1.8.0"]
;; logging
[ch.qos.logback/logback-classic "1.1.3" :exclusions [org.slf4j/slf4j-api]]
[ch.qos.logback/logback-access "1.1.3"]
[ch.qos.logback/logback-core "1.1.3"]
[org.slf4j/slf4j-api "1.7.12"]
[org.clojure/tools.logging "0.3.1"]
;; components
[com.stuartsierra/component "0.3.0"]
[org.clojure/tools.nrepl "0.2.11"]
[org.clojure/tools.namespace "0.2.11"]
;; aleph
[aleph "0.4.1"]
[org.clojure/core.async "0.1.346.0-17112a-alpha"]
[manifold "0.1.1"]
[gloss "0.2.5"]
[compojure "1.4.0"]
;; other
[clojure-watch "0.1.11"]
[cheshire "5.2.0"]
]
:uberjar-name "fluorine.jar"
:repl-options {:init-ns user
:init (do (require 'midje.repl) (midje.repl/autotest))}
:profiles {:uberjar {:main net.reborg.fluorine.system
:aot :all}
:dev {:plugins [[lein-midje "3.1.3"]]
:dependencies [[midje "1.6.3"]]
:source-paths ["dev"]}}
:jvm-opts ~(vec (map (fn [[p v]] (str "-D" (name p) "=" v))
{:java.awt.headless "true"
:log.dir "logs"})))
| null | https://raw.githubusercontent.com/reborg/fluorine/58d533646adce8537ca5c57692a0a998f06e1d25/project.clj | clojure | logging
components
aleph
other | (defproject net.reborg/fluorine "0.0.9"
:description "Distributed configuration for Clojure"
:url ""
:license {:name "Eclipse Public License - v 1.0"
:url "-v10.html"
:distribution :repo
:comments "same as Clojure"}
:dependencies [[org.clojure/clojure "1.8.0"]
[ch.qos.logback/logback-classic "1.1.3" :exclusions [org.slf4j/slf4j-api]]
[ch.qos.logback/logback-access "1.1.3"]
[ch.qos.logback/logback-core "1.1.3"]
[org.slf4j/slf4j-api "1.7.12"]
[org.clojure/tools.logging "0.3.1"]
[com.stuartsierra/component "0.3.0"]
[org.clojure/tools.nrepl "0.2.11"]
[org.clojure/tools.namespace "0.2.11"]
[aleph "0.4.1"]
[org.clojure/core.async "0.1.346.0-17112a-alpha"]
[manifold "0.1.1"]
[gloss "0.2.5"]
[compojure "1.4.0"]
[clojure-watch "0.1.11"]
[cheshire "5.2.0"]
]
:uberjar-name "fluorine.jar"
:repl-options {:init-ns user
:init (do (require 'midje.repl) (midje.repl/autotest))}
:profiles {:uberjar {:main net.reborg.fluorine.system
:aot :all}
:dev {:plugins [[lein-midje "3.1.3"]]
:dependencies [[midje "1.6.3"]]
:source-paths ["dev"]}}
:jvm-opts ~(vec (map (fn [[p v]] (str "-D" (name p) "=" v))
{:java.awt.headless "true"
:log.dir "logs"})))
|
6cc4509b9fb0451a5cf7b4c6c8e47bee711e64e616e5c03e5daa1aa52cab51d2 | coq/coq | dyn.ml | (************************************************************************)
(* * The Coq Proof Assistant / The Coq Development Team *)
v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
* GNU Lesser General Public License Version 2.1
(* * (see LICENSE file for the text of the license) *)
(************************************************************************)
module type ValueS =
sig
type 'a t
end
module type MapS =
sig
type t
type 'a key
type 'a value
val empty : t
val add : 'a key -> 'a value -> t -> t
val remove : 'a key -> t -> t
val find : 'a key -> t -> 'a value
val mem : 'a key -> t -> bool
val modify : 'a key -> ('a value -> 'a value) -> t -> t
type map = { map : 'a. 'a key -> 'a value -> 'a value }
val map : map -> t -> t
type any = Any : 'a key * 'a value -> any
val iter : (any -> unit) -> t -> unit
val fold : (any -> 'r -> 'r) -> t -> 'r -> 'r
type filter = { filter : 'a. 'a key -> 'a value -> bool }
val filter : filter -> t -> t
end
module type PreS =
sig
type 'a tag
type t = Dyn : 'a tag * 'a -> t
val create : string -> 'a tag
val anonymous : int -> 'a tag
val eq : 'a tag -> 'b tag -> ('a, 'b) CSig.eq option
val repr : 'a tag -> string
val dump : unit -> (int * string) list
type any = Any : 'a tag -> any
val name : string -> any option
module Map(Value : ValueS) :
MapS with type 'a key = 'a tag and type 'a value = 'a Value.t
module HMap (V1 : ValueS)(V2 : ValueS) :
sig
type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t }
val map : map -> Map(V1).t -> Map(V2).t
type filter = { filter : 'a. 'a tag -> 'a V1.t -> bool }
val filter : filter -> Map(V1).t -> Map(V1).t
end
end
module type S =
sig
include PreS
module Easy : sig
val make_dyn_tag : string -> ('a -> t) * (t -> 'a) * 'a tag
val make_dyn : string -> ('a -> t) * (t -> 'a)
val inj : 'a -> 'a tag -> t
val prj : t -> 'a tag -> 'a option
end
end
module Make () = struct
module Self : PreS = struct
(* Dynamics, programmed with DANGER !!! *)
type 'a tag = int
type t = Dyn : 'a tag * 'a -> t
type any = Any : 'a tag -> any
let dyntab = ref (Int.Map.empty : string Int.Map.t)
(** Instead of working with tags as strings, which are costly, we use their
hash. We ensure unicity of the hash in the [create] function. If ever a
collision occurs, which is unlikely, it is sufficient to tweak the offending
dynamic tag. *)
let create (s : string) =
let hash = Hashtbl.hash s in
if Int.Map.mem hash !dyntab then begin
let old = Int.Map.find hash !dyntab in
Printf.eprintf "Dynamic tag collision: %s vs. %s\n%!" s old;
assert false
end;
dyntab := Int.Map.add hash s !dyntab;
hash
let anonymous n =
if Int.Map.mem n !dyntab then begin
Printf.eprintf "Dynamic tag collision: %d\n%!" n;
assert false
end;
dyntab := Int.Map.add n "<anonymous>" !dyntab;
n
let eq : 'a 'b. 'a tag -> 'b tag -> ('a, 'b) CSig.eq option =
fun h1 h2 -> if Int.equal h1 h2 then Some (Obj.magic CSig.Refl) else None
let repr s =
try Int.Map.find s !dyntab
with Not_found ->
let () = Printf.eprintf "Unknown dynamic tag %i\n%!" s in
assert false
let name s =
let hash = Hashtbl.hash s in
if Int.Map.mem hash !dyntab then Some (Any hash) else None
let dump () = Int.Map.bindings !dyntab
module Map(Value: ValueS) =
struct
type t = Obj.t Value.t Int.Map.t
type 'a key = 'a tag
type 'a value = 'a Value.t
let cast : 'a value -> 'b value = Obj.magic
let empty = Int.Map.empty
let add tag v m = Int.Map.add tag (cast v) m
let remove tag m = Int.Map.remove tag m
let find tag m = cast (Int.Map.find tag m)
let mem = Int.Map.mem
let modify tag f m = Int.Map.modify tag (fun _ v -> cast (f (cast v))) m
type map = { map : 'a. 'a tag -> 'a value -> 'a value }
let map f m = Int.Map.mapi f.map m
type any = Any : 'a tag * 'a value -> any
let iter f m = Int.Map.iter (fun k v -> f (Any (k, v))) m
let fold f m accu = Int.Map.fold (fun k v accu -> f (Any (k, v)) accu) m accu
type filter = { filter : 'a. 'a tag -> 'a value -> bool }
let filter f m = Int.Map.filter f.filter m
end
module HMap (V1 : ValueS) (V2 : ValueS) =
struct
type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t }
let map (f : map) (m : Map(V1).t) : Map(V2).t =
Int.Map.mapi f.map m
type filter = { filter : 'a. 'a tag -> 'a V1.t -> bool }
let filter (f : filter) (m : Map(V1).t) : Map(V1).t =
Int.Map.filter f.filter m
end
end
include Self
module Easy = struct
(* now tags are opaque, we can do the trick *)
let make_dyn_tag (s : string) =
(fun (type a) (tag : a tag) ->
let infun : (a -> t) = fun x -> Dyn (tag, x) in
let outfun : (t -> a) = fun (Dyn (t, x)) ->
match eq tag t with
| None -> assert false
| Some CSig.Refl -> x
in
infun, outfun, tag)
(create s)
let make_dyn (s : string) =
let inf, outf, _ = make_dyn_tag s in inf, outf
let inj x tag = Dyn(tag,x)
let prj : type a. t -> a tag -> a option =
fun (Dyn(tag',x)) tag ->
match eq tag tag' with
| None -> None
| Some CSig.Refl -> Some x
end
end
| null | https://raw.githubusercontent.com/coq/coq/686e9097959307e382f8889aa63fe8638ad76fc1/clib/dyn.ml | ocaml | **********************************************************************
* The Coq Proof Assistant / The Coq Development Team
// * This file is distributed under the terms of the
* (see LICENSE file for the text of the license)
**********************************************************************
Dynamics, programmed with DANGER !!!
* Instead of working with tags as strings, which are costly, we use their
hash. We ensure unicity of the hash in the [create] function. If ever a
collision occurs, which is unlikely, it is sufficient to tweak the offending
dynamic tag.
now tags are opaque, we can do the trick | v * Copyright INRIA , CNRS and contributors
< O _ _ _ , , * ( see version control and CREDITS file for authors & dates )
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* GNU Lesser General Public License Version 2.1
module type ValueS =
sig
type 'a t
end
module type MapS =
sig
type t
type 'a key
type 'a value
val empty : t
val add : 'a key -> 'a value -> t -> t
val remove : 'a key -> t -> t
val find : 'a key -> t -> 'a value
val mem : 'a key -> t -> bool
val modify : 'a key -> ('a value -> 'a value) -> t -> t
type map = { map : 'a. 'a key -> 'a value -> 'a value }
val map : map -> t -> t
type any = Any : 'a key * 'a value -> any
val iter : (any -> unit) -> t -> unit
val fold : (any -> 'r -> 'r) -> t -> 'r -> 'r
type filter = { filter : 'a. 'a key -> 'a value -> bool }
val filter : filter -> t -> t
end
module type PreS =
sig
type 'a tag
type t = Dyn : 'a tag * 'a -> t
val create : string -> 'a tag
val anonymous : int -> 'a tag
val eq : 'a tag -> 'b tag -> ('a, 'b) CSig.eq option
val repr : 'a tag -> string
val dump : unit -> (int * string) list
type any = Any : 'a tag -> any
val name : string -> any option
module Map(Value : ValueS) :
MapS with type 'a key = 'a tag and type 'a value = 'a Value.t
module HMap (V1 : ValueS)(V2 : ValueS) :
sig
type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t }
val map : map -> Map(V1).t -> Map(V2).t
type filter = { filter : 'a. 'a tag -> 'a V1.t -> bool }
val filter : filter -> Map(V1).t -> Map(V1).t
end
end
module type S =
sig
include PreS
module Easy : sig
val make_dyn_tag : string -> ('a -> t) * (t -> 'a) * 'a tag
val make_dyn : string -> ('a -> t) * (t -> 'a)
val inj : 'a -> 'a tag -> t
val prj : t -> 'a tag -> 'a option
end
end
module Make () = struct
module Self : PreS = struct
type 'a tag = int
type t = Dyn : 'a tag * 'a -> t
type any = Any : 'a tag -> any
let dyntab = ref (Int.Map.empty : string Int.Map.t)
let create (s : string) =
let hash = Hashtbl.hash s in
if Int.Map.mem hash !dyntab then begin
let old = Int.Map.find hash !dyntab in
Printf.eprintf "Dynamic tag collision: %s vs. %s\n%!" s old;
assert false
end;
dyntab := Int.Map.add hash s !dyntab;
hash
let anonymous n =
if Int.Map.mem n !dyntab then begin
Printf.eprintf "Dynamic tag collision: %d\n%!" n;
assert false
end;
dyntab := Int.Map.add n "<anonymous>" !dyntab;
n
let eq : 'a 'b. 'a tag -> 'b tag -> ('a, 'b) CSig.eq option =
fun h1 h2 -> if Int.equal h1 h2 then Some (Obj.magic CSig.Refl) else None
let repr s =
try Int.Map.find s !dyntab
with Not_found ->
let () = Printf.eprintf "Unknown dynamic tag %i\n%!" s in
assert false
let name s =
let hash = Hashtbl.hash s in
if Int.Map.mem hash !dyntab then Some (Any hash) else None
let dump () = Int.Map.bindings !dyntab
module Map(Value: ValueS) =
struct
type t = Obj.t Value.t Int.Map.t
type 'a key = 'a tag
type 'a value = 'a Value.t
let cast : 'a value -> 'b value = Obj.magic
let empty = Int.Map.empty
let add tag v m = Int.Map.add tag (cast v) m
let remove tag m = Int.Map.remove tag m
let find tag m = cast (Int.Map.find tag m)
let mem = Int.Map.mem
let modify tag f m = Int.Map.modify tag (fun _ v -> cast (f (cast v))) m
type map = { map : 'a. 'a tag -> 'a value -> 'a value }
let map f m = Int.Map.mapi f.map m
type any = Any : 'a tag * 'a value -> any
let iter f m = Int.Map.iter (fun k v -> f (Any (k, v))) m
let fold f m accu = Int.Map.fold (fun k v accu -> f (Any (k, v)) accu) m accu
type filter = { filter : 'a. 'a tag -> 'a value -> bool }
let filter f m = Int.Map.filter f.filter m
end
module HMap (V1 : ValueS) (V2 : ValueS) =
struct
type map = { map : 'a. 'a tag -> 'a V1.t -> 'a V2.t }
let map (f : map) (m : Map(V1).t) : Map(V2).t =
Int.Map.mapi f.map m
type filter = { filter : 'a. 'a tag -> 'a V1.t -> bool }
let filter (f : filter) (m : Map(V1).t) : Map(V1).t =
Int.Map.filter f.filter m
end
end
include Self
module Easy = struct
let make_dyn_tag (s : string) =
(fun (type a) (tag : a tag) ->
let infun : (a -> t) = fun x -> Dyn (tag, x) in
let outfun : (t -> a) = fun (Dyn (t, x)) ->
match eq tag t with
| None -> assert false
| Some CSig.Refl -> x
in
infun, outfun, tag)
(create s)
let make_dyn (s : string) =
let inf, outf, _ = make_dyn_tag s in inf, outf
let inj x tag = Dyn(tag,x)
let prj : type a. t -> a tag -> a option =
fun (Dyn(tag',x)) tag ->
match eq tag tag' with
| None -> None
| Some CSig.Refl -> Some x
end
end
|
7b66ccf4ff0b3d23907fd3a6db1c22db6ec0d1658e4f31ef848425b73261ecfc | zeniuseducation/poly-euler | one.clj | (defn )
| null | https://raw.githubusercontent.com/zeniuseducation/poly-euler/734fdcf1ddd096a8730600b684bf7398d071d499/julia/one.clj | clojure | (defn )
| |
46bc873ffd5bd5d61ebd51794f574d1f1ac79fe9e89beff7f969fd296e8a7596 | sirherrbatka/statistical-learning | types.lisp | (cl:in-package #:statistical-learning.tree-protocol)
(defclass split-result ()
((%split-vector :initarg :split-vector
:reader split-vector)
(%split-point :initarg :split-point
:reader split-point)
(%left-length :initarg :left-length
:reader left-length)
(%right-length :initarg :right-length
:reader right-length)
(%left-score :initarg :left-score
:reader left-score)
(%right-score :initarg :right-score
:reader right-score)))
(defclass fundamental-splitter (sl.common:proxy-enabled)
())
(defclass data-point-oriented-splitter (fundamental-splitter)
())
(defclass random-splitter (sl.common:lifting-proxy)
((%trials-count :initarg :trials-count
:reader trials-count)))
(defclass random-attribute-splitter (data-point-oriented-splitter)
())
(defclass hyperplane-splitter (data-point-oriented-splitter)
())
(defclass distance-splitter (data-point-oriented-splitter)
((%distance-function :initarg :distance-function
:reader distance-function)
(%repeats :initarg :repeats
:reader repeats)
(%iterations :initarg :iterations
:reader iterations))
(:default-initargs :iterations 2))
(defclass set-splitter (data-point-oriented-splitter)
()
(:documentation "Splitter which can be used for unordered sets of tuples."))
(defclass fundamental-node ()
())
(defclass fundamental-tree-node (fundamental-node)
((%left-node :initarg :left-node
:accessor left-node)
(%right-node :initarg :right-node
:accessor right-node)
(%point :initarg :point
:accessor point)))
(defclass fundamental-leaf-node (fundamental-node)
())
(defclass standard-leaf-node (fundamental-leaf-node fundamental-node)
((%predictions :initarg :predictions
:accessor predictions))
(:default-initargs :predictions nil))
(defclass fundamental-tree-training-parameters
(statistical-learning.mp:fundamental-model-parameters)
())
(defclass basic-tree-training-parameters
(fundamental-tree-training-parameters)
())
(defclass standard-tree-training-parameters (basic-tree-training-parameters)
((%maximal-depth :initarg :maximal-depth
:reader maximal-depth)
(%minimal-difference :initarg :minimal-difference
:reader minimal-difference)
(%minimal-size :initarg :minimal-size
:reader minimal-size)
(%parallel :initarg :parallel
:reader parallel)
(%splitter :initarg :splitter
:reader splitter))
(:default-initargs
:splitter (sl.common:lift (make 'random-attribute-splitter)
'random-splitter
:trials-count 20)))
(defclass tree-training-state (sl.mp:fundamental-training-state)
((%attribute-indexes :initarg :attributes
:accessor attribute-indexes)
(%data-points :initarg :data-points
:accessor sl.mp:data-points)
(%depth :initarg :depth
:reader depth)
(%loss :initarg :loss
:reader loss)
(%target-data :initarg :target-data
:reader sl.mp:target-data)
(%weights :initarg :weights
:reader sl.mp:weights)
(%split-point :initarg :split-point
:accessor split-point)
(%train-data :initarg :train-data
:reader sl.mp:train-data)
(%spritter-state :initarg :splitter-state
:accessor splitter-state)
(%parent-state :initarg :parent-state
:reader parent-state))
(:default-initargs :depth 0
:attributes nil
:split-point nil
:splitter-state nil
:weights nil
:parent-state nil
:data-points nil))
(defclass tree-model (statistical-learning.mp:supervised-model)
((%root :initarg :root
:writer write-root
:reader root)
(%attribute-indexes :initarg :attribute-indexes
:reader attribute-indexes)
(%forced :initarg :forced
:accessor forced)
(%weight :initarg :weight
:accessor weight))
(:default-initargs
:forced nil
:weight 1.0))
(defclass contributed-predictions ()
((%training-parameters :initarg :training-parameters
:reader sl.mp:training-parameters)
(%predictions-lock :initarg :predictions-lock
:reader predictions-lock)
(%contributions-count :initarg :contributions-count
:accessor contributions-count)
(%indexes :initarg :indexes
:reader indexes)
(%sums :initarg :sums
:accessor sums))
(:default-initargs
:contributions-count 0.0
:predictions-lock (bt:make-lock)))
| null | https://raw.githubusercontent.com/sirherrbatka/statistical-learning/40b529beff5820c08cb92b1fa92c4f688a1989bb/source/tree-protocol/types.lisp | lisp | (cl:in-package #:statistical-learning.tree-protocol)
(defclass split-result ()
((%split-vector :initarg :split-vector
:reader split-vector)
(%split-point :initarg :split-point
:reader split-point)
(%left-length :initarg :left-length
:reader left-length)
(%right-length :initarg :right-length
:reader right-length)
(%left-score :initarg :left-score
:reader left-score)
(%right-score :initarg :right-score
:reader right-score)))
(defclass fundamental-splitter (sl.common:proxy-enabled)
())
(defclass data-point-oriented-splitter (fundamental-splitter)
())
(defclass random-splitter (sl.common:lifting-proxy)
((%trials-count :initarg :trials-count
:reader trials-count)))
(defclass random-attribute-splitter (data-point-oriented-splitter)
())
(defclass hyperplane-splitter (data-point-oriented-splitter)
())
(defclass distance-splitter (data-point-oriented-splitter)
((%distance-function :initarg :distance-function
:reader distance-function)
(%repeats :initarg :repeats
:reader repeats)
(%iterations :initarg :iterations
:reader iterations))
(:default-initargs :iterations 2))
(defclass set-splitter (data-point-oriented-splitter)
()
(:documentation "Splitter which can be used for unordered sets of tuples."))
(defclass fundamental-node ()
())
(defclass fundamental-tree-node (fundamental-node)
((%left-node :initarg :left-node
:accessor left-node)
(%right-node :initarg :right-node
:accessor right-node)
(%point :initarg :point
:accessor point)))
(defclass fundamental-leaf-node (fundamental-node)
())
(defclass standard-leaf-node (fundamental-leaf-node fundamental-node)
((%predictions :initarg :predictions
:accessor predictions))
(:default-initargs :predictions nil))
(defclass fundamental-tree-training-parameters
(statistical-learning.mp:fundamental-model-parameters)
())
(defclass basic-tree-training-parameters
(fundamental-tree-training-parameters)
())
(defclass standard-tree-training-parameters (basic-tree-training-parameters)
((%maximal-depth :initarg :maximal-depth
:reader maximal-depth)
(%minimal-difference :initarg :minimal-difference
:reader minimal-difference)
(%minimal-size :initarg :minimal-size
:reader minimal-size)
(%parallel :initarg :parallel
:reader parallel)
(%splitter :initarg :splitter
:reader splitter))
(:default-initargs
:splitter (sl.common:lift (make 'random-attribute-splitter)
'random-splitter
:trials-count 20)))
(defclass tree-training-state (sl.mp:fundamental-training-state)
((%attribute-indexes :initarg :attributes
:accessor attribute-indexes)
(%data-points :initarg :data-points
:accessor sl.mp:data-points)
(%depth :initarg :depth
:reader depth)
(%loss :initarg :loss
:reader loss)
(%target-data :initarg :target-data
:reader sl.mp:target-data)
(%weights :initarg :weights
:reader sl.mp:weights)
(%split-point :initarg :split-point
:accessor split-point)
(%train-data :initarg :train-data
:reader sl.mp:train-data)
(%spritter-state :initarg :splitter-state
:accessor splitter-state)
(%parent-state :initarg :parent-state
:reader parent-state))
(:default-initargs :depth 0
:attributes nil
:split-point nil
:splitter-state nil
:weights nil
:parent-state nil
:data-points nil))
(defclass tree-model (statistical-learning.mp:supervised-model)
((%root :initarg :root
:writer write-root
:reader root)
(%attribute-indexes :initarg :attribute-indexes
:reader attribute-indexes)
(%forced :initarg :forced
:accessor forced)
(%weight :initarg :weight
:accessor weight))
(:default-initargs
:forced nil
:weight 1.0))
(defclass contributed-predictions ()
((%training-parameters :initarg :training-parameters
:reader sl.mp:training-parameters)
(%predictions-lock :initarg :predictions-lock
:reader predictions-lock)
(%contributions-count :initarg :contributions-count
:accessor contributions-count)
(%indexes :initarg :indexes
:reader indexes)
(%sums :initarg :sums
:accessor sums))
(:default-initargs
:contributions-count 0.0
:predictions-lock (bt:make-lock)))
| |
07a88cba50acd79d0510db9c4a94a889ccbd2d16e47c307519ee4aeb1614feab | sdanzan/erlang-systools | systools_app.erl | %%% --------------------------------------------------------------------------
%%% @doc Dummy application module for the systool library.
%%% --------------------------------------------------------------------------
-module(systools_app).
-behaviour(application).
%% Application callbacks
-export([start/2, stop/1]).
%% ===================================================================
%% Application callbacks
%% ===================================================================
start(_StartType, _StartArgs) ->
systools_sup:start_link().
stop(_State) ->
ok.
| null | https://raw.githubusercontent.com/sdanzan/erlang-systools/ced3faf1c807d36c528e53cbb366d69f464ff4e5/src/systools_app.erl | erlang | --------------------------------------------------------------------------
@doc Dummy application module for the systool library.
--------------------------------------------------------------------------
Application callbacks
===================================================================
Application callbacks
=================================================================== |
-module(systools_app).
-behaviour(application).
-export([start/2, stop/1]).
start(_StartType, _StartArgs) ->
systools_sup:start_link().
stop(_State) ->
ok.
|
076e3ebae07fc3cfd13127e88254e84febaf07442b5b94c85bf9bee8250118df | johnwhitington/camlpdf | pdfgenlex.mli | (** A very fast lexer for very basic tokens *)
(** To avoid too much storage allocation (and hence garbage collection), we use
the same data type for this very basic lexing module as for the main lexing in
[Pdfread]. Eventually, we may unify this with the parsing type too. *)
type t =
| LexNull
| LexBool of bool
| LexInt of int
| LexReal of float
| LexString of string
| LexName of string
| LexLeftSquare
| LexRightSquare
| LexLeftDict
| LexRightDict
| LexStream of Pdf.stream
| LexEndStream
| LexObj
| LexEndObj
| LexR
| LexComment
| StopLexing
| LexNone
(** For debug only. *)
val string_of_token : t -> string
(** For debug only. *)
val string_of_tokens : t list -> string
* a single token from a [ Pdfio.input ] .
val lex_single : (Pdfio.input -> t)
(** Lex all the token in a [Pdfio.input]. *)
val lex : (Pdfio.input -> t list)
* all the tokens from a string .
val lex_string : string -> t list
| null | https://raw.githubusercontent.com/johnwhitington/camlpdf/88d781d4395f0b84cae384bf8365e5989f5d586f/pdfgenlex.mli | ocaml | * A very fast lexer for very basic tokens
* To avoid too much storage allocation (and hence garbage collection), we use
the same data type for this very basic lexing module as for the main lexing in
[Pdfread]. Eventually, we may unify this with the parsing type too.
* For debug only.
* For debug only.
* Lex all the token in a [Pdfio.input]. |
type t =
| LexNull
| LexBool of bool
| LexInt of int
| LexReal of float
| LexString of string
| LexName of string
| LexLeftSquare
| LexRightSquare
| LexLeftDict
| LexRightDict
| LexStream of Pdf.stream
| LexEndStream
| LexObj
| LexEndObj
| LexR
| LexComment
| StopLexing
| LexNone
val string_of_token : t -> string
val string_of_tokens : t list -> string
* a single token from a [ Pdfio.input ] .
val lex_single : (Pdfio.input -> t)
val lex : (Pdfio.input -> t list)
* all the tokens from a string .
val lex_string : string -> t list
|
e246e5128d9ef7ce43b5a56466e0718b1213a88a96e5700f62ed16ac8b34b6df | xapi-project/xenvm | host.mli | val create : string -> unit Lwt.t
val connect : Config.Xenvmd.t -> string -> unit Lwt.t
val disconnect : cooperative:bool -> string -> unit Lwt.t
val destroy : string -> unit Lwt.t
val all : unit -> Xenvm_interface.host list Lwt.t
val reconnect_all : Config.Xenvmd.t -> unit Lwt.t
val flush_all : unit -> unit Lwt.t
val shutdown : unit -> unit Lwt.t
| null | https://raw.githubusercontent.com/xapi-project/xenvm/401754dfb05376b5fc78c9290453b006f6f38aa1/xenvmd/host.mli | ocaml | val create : string -> unit Lwt.t
val connect : Config.Xenvmd.t -> string -> unit Lwt.t
val disconnect : cooperative:bool -> string -> unit Lwt.t
val destroy : string -> unit Lwt.t
val all : unit -> Xenvm_interface.host list Lwt.t
val reconnect_all : Config.Xenvmd.t -> unit Lwt.t
val flush_all : unit -> unit Lwt.t
val shutdown : unit -> unit Lwt.t
| |
4ca739b1b824e270ecd8823f4125edda7e4007be9d564037f4ccf8099e0addf2 | static-analysis-engineering/codehawk | jCHSignatureBindings.mli | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHLanguage
(* jchlib *)
open JCHBasicTypesAPI
class type signature_bindings_int =
object
(* accessors *)
method get_this_variable: variable_t
method get_indexed_ref_arguments: (int * variable_t) list
method get_indexed_string_arguments: (int * variable_t) list
method get_ref_argument_types : (variable_t * value_type_t) list
(* predicates *)
method has_this_variable: bool
method is_static : bool
end
val get_signature_bindings:
method_signature_int -> bindings_t -> bool -> signature_bindings_int
| null | https://raw.githubusercontent.com/static-analysis-engineering/codehawk/98ced4d5e6d7989575092df232759afc2cb851f6/CodeHawk/CHJ/jchpre/jCHSignatureBindings.mli | ocaml | jchlib
accessors
predicates | = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author : ------------------------------------------------------------------------------
The MIT License ( MIT )
Copyright ( c ) 2005 - 2020 Kestrel Technology LLC
Permission is hereby granted , free of charge , to any person obtaining a copy
of this software and associated documentation files ( the " Software " ) , to deal
in the Software without restriction , including without limitation the rights
to use , copy , modify , merge , publish , distribute , sublicense , and/or sell
copies of the Software , and to permit persons to whom the Software is
furnished to do so , subject to the following conditions :
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY ,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT . IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM , DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE .
= = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = = =
CodeHawk Java Analyzer
Author: Henny Sipma
------------------------------------------------------------------------------
The MIT License (MIT)
Copyright (c) 2005-2020 Kestrel Technology LLC
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
============================================================================= *)
chlib
open CHLanguage
open JCHBasicTypesAPI
class type signature_bindings_int =
object
method get_this_variable: variable_t
method get_indexed_ref_arguments: (int * variable_t) list
method get_indexed_string_arguments: (int * variable_t) list
method get_ref_argument_types : (variable_t * value_type_t) list
method has_this_variable: bool
method is_static : bool
end
val get_signature_bindings:
method_signature_int -> bindings_t -> bool -> signature_bindings_int
|
d5df07410fb769cf87fa700182d581152e1ee066b501b004295b8665f17c8b21 | basho/riak_core | riak_core_handoff_cli.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2014 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(riak_core_handoff_cli).
-behavior(clique_handler).
-export([register_cli/0]).
-spec register_cli() -> ok.
register_cli() ->
register_cli_usage(),
register_cli_cfg(),
register_cli_cmds(),
register_config_whitelist(),
ok.
register_cli_cmds() ->
register_enable_disable_commands(),
ok = clique:register_command(["riak-admin", "handoff", "summary"], [], [],
fun riak_core_handoff_status:handoff_summary/3),
ok = clique:register_command(["riak-admin", "handoff", "details"], [],
node_and_all_flags(), fun riak_core_handoff_status:handoff_details/3),
ok = clique:register_command(["riak-admin", "handoff", "config"], [],
node_and_all_flags(), fun handoff_config/3).
node_and_all_flags() ->
[{node, [{shortname, "n"}, {longname, "node"},
{typecast, fun clique_typecast:to_node/1}]},
{all, [{shortname, "a"}, {longname, "all"}]}].
register_enable_disable_commands() ->
CmdList = [handoff_cmd_spec(EnOrDis, Dir) ||
EnOrDis <- [enable, disable],
Dir <- [inbound, outbound, both]],
lists:foreach(fun(Args) -> apply(clique, register_command, Args) end,
CmdList).
register_cli_cfg() ->
lists:foreach(fun(K) ->
clique:register_config(K, fun handoff_cfg_change_callback/2)
end, [["handoff", "inbound"], ["handoff", "outbound"]]),
clique:register_config(["transfer_limit"], fun set_transfer_limit/2).
register_config_whitelist() ->
ok = clique:register_config_whitelist(["transfer_limit",
"handoff.outbound",
"handoff.inbound"]).
register_cli_usage() ->
clique:register_usage(["riak-admin", "handoff"], handoff_usage()),
clique:register_usage(["riak-admin", "handoff", "enable"], handoff_enable_disable_usage()),
clique:register_usage(["riak-admin", "handoff", "disable"], handoff_enable_disable_usage()),
clique:register_usage(["riak-admin", "handoff", "summary"], summary_usage()),
clique:register_usage(["riak-admin", "handoff", "details"], details_usage()),
clique:register_usage(["riak-admin", "handoff", "config"], config_usage()).
handoff_usage() ->
[
"riak-admin handoff <sub-command>\n\n",
" Display handoff-related status and settings.\n\n",
" Sub-commands:\n",
" enable Enable handoffs for the specified node(s)\n",
" disable Disable handoffs for the specified node(s)\n"
" summary Show cluster-wide handoff summary\n",
" details Show details of all active transfers (per-node or cluster wide)\n",
" config Show all configuration for handoff subsystem\n\n",
" Use --help after a sub-command for more details.\n"
].
config_usage() ->
["riak-admin handoff config\n\n",
" Display handoff related configuration variables\n\n",
"Options\n",
" -n <node>, --node <node>\n",
" Show the settings on the specified node.\n",
" This flag can currently take only one node and be used once\n"
" -a, --all\n",
" Show the settings on every node in the cluster\n"
].
handoff_enable_disable_usage() ->
["riak-admin handoff <enable|disable> <inbound|outbound|both> ",
"[-n <node>|--all]\n\n",
" Enable or disable handoffs on the local or specified node(s).\n",
" If handoffs are disabled in a direction, any currently\n",
" running handoffs in that direction will be terminated.\n\n"
"Options\n",
" -n <node>, --node <node>\n",
" Modify the setting on the specified node.\n",
" This flag can currently take only one node and be used once\n"
" -a, --all\n",
" Modify the setting on every node in the cluster\n"
].
handoff_cmd_spec(EnOrDis, Direction) ->
Cmd = ["riak-admin", "handoff", atom_to_list(EnOrDis), atom_to_list(Direction)],
Callback = fun(_, [], Flags) ->
handoff_change_enabled_setting(EnOrDis, Direction, Flags)
end,
[
Cmd,
KeySpecs
[{all, [{shortname, "a"},
{longname, "all"}]},
{node, [{shortname, "n"},
FlagSpecs
Callback
].
summary_usage() ->
[
"riak-admin handoff summary\n\n",
" Display a summarized view of handoffs.\n"
].
details_usage() ->
[
"riak-admin handoff details [--node <node>|--all]\n\n",
" Display a detailed list of handoffs. Defaults to local node.\n\n"
"Options\n",
" -n <node>, --node <node>\n",
" Display the handoffs on the specified node.\n",
" This flag can currently take only one node and be used once\n"
" -a, --all\n",
" Display the handoffs on every node in the cluster\n"
].
handoff_config(_CmdBase, _Args, Flags) when length(Flags) > 1 ->
[clique_status:text("Can't specify both --all and --node flags")];
handoff_config(_CmdBase, _Args, []) ->
clique_config:show(config_vars(), []);
handoff_config(_CmdBase, _Args, [{all, Val}]) ->
clique_config:show(config_vars(), [{all, Val}]);
handoff_config(_CmdBase, _Args, [{node, Node}]) ->
clique_config:show(config_vars(), [{node, Node}]).
config_vars() ->
["transfer_limit", "handoff.outbound", "handoff.inbound", "handoff.port"].
handoff_change_enabled_setting(_EnOrDis, _Direction, Flags) when length(Flags) > 1 ->
[clique_status:text("Can't specify both --all and --node flags")];
handoff_change_enabled_setting(EnOrDis, Direction, [{all, _}]) ->
Nodes = clique_nodes:nodes(),
{_, Down} = rpc:multicall(Nodes,
riak_core_handoff_manager,
handoff_change_enabled_setting,
[EnOrDis, Direction],
60000),
case Down of
[] ->
[clique_status:text("All nodes successfully updated")];
_ ->
Output = io_lib:format("Handoff ~s failed on nodes: ~p", [EnOrDis, Down]),
[clique_status:alert([clique_status:text(Output)])]
end;
handoff_change_enabled_setting(EnOrDis, Direction, [{node, NodeStr}]) ->
Node = clique_typecast:to_node(NodeStr),
Result = clique_nodes:safe_rpc(Node,
riak_core_handoff_manager, handoff_change_enabled_setting,
[EnOrDis, Direction]),
case Result of
{badrpc, Reason} ->
Output = io_lib:format("Failed to update handoff settings on node ~p. Reason: ~p",
[Node, Reason]),
[clique_status:alert([clique_status:text(Output)])];
_ ->
[clique_status:text("Handoff setting successfully updated")]
end;
handoff_change_enabled_setting(EnOrDis, Direction, []) ->
riak_core_handoff_manager:handoff_change_enabled_setting(EnOrDis, Direction),
[clique_status:text("Handoff setting successfully updated")].
handoff_cfg_change_callback(["handoff", Cmd], "off") ->
case Cmd of
"inbound" ->
riak_core_handoff_manager:kill_handoffs_in_direction(inbound),
"Inbound handoffs terminated";
"outbound" ->
riak_core_handoff_manager:kill_handoffs_in_direction(outbound),
"Outbound handoffs terminated"
end;
handoff_cfg_change_callback(_, _) ->
"".
set_transfer_limit(["transfer_limit"], LimitStr) ->
Limit = list_to_integer(LimitStr),
riak_core_handoff_manager:set_concurrency(Limit),
"".
| null | https://raw.githubusercontent.com/basho/riak_core/762ec81ae9af9a278e853f1feca418b9dcf748a3/src/riak_core_handoff_cli.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
------------------------------------------------------------------- | Copyright ( c ) 2014 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_core_handoff_cli).
-behavior(clique_handler).
-export([register_cli/0]).
-spec register_cli() -> ok.
register_cli() ->
register_cli_usage(),
register_cli_cfg(),
register_cli_cmds(),
register_config_whitelist(),
ok.
register_cli_cmds() ->
register_enable_disable_commands(),
ok = clique:register_command(["riak-admin", "handoff", "summary"], [], [],
fun riak_core_handoff_status:handoff_summary/3),
ok = clique:register_command(["riak-admin", "handoff", "details"], [],
node_and_all_flags(), fun riak_core_handoff_status:handoff_details/3),
ok = clique:register_command(["riak-admin", "handoff", "config"], [],
node_and_all_flags(), fun handoff_config/3).
node_and_all_flags() ->
[{node, [{shortname, "n"}, {longname, "node"},
{typecast, fun clique_typecast:to_node/1}]},
{all, [{shortname, "a"}, {longname, "all"}]}].
register_enable_disable_commands() ->
CmdList = [handoff_cmd_spec(EnOrDis, Dir) ||
EnOrDis <- [enable, disable],
Dir <- [inbound, outbound, both]],
lists:foreach(fun(Args) -> apply(clique, register_command, Args) end,
CmdList).
register_cli_cfg() ->
lists:foreach(fun(K) ->
clique:register_config(K, fun handoff_cfg_change_callback/2)
end, [["handoff", "inbound"], ["handoff", "outbound"]]),
clique:register_config(["transfer_limit"], fun set_transfer_limit/2).
register_config_whitelist() ->
ok = clique:register_config_whitelist(["transfer_limit",
"handoff.outbound",
"handoff.inbound"]).
register_cli_usage() ->
clique:register_usage(["riak-admin", "handoff"], handoff_usage()),
clique:register_usage(["riak-admin", "handoff", "enable"], handoff_enable_disable_usage()),
clique:register_usage(["riak-admin", "handoff", "disable"], handoff_enable_disable_usage()),
clique:register_usage(["riak-admin", "handoff", "summary"], summary_usage()),
clique:register_usage(["riak-admin", "handoff", "details"], details_usage()),
clique:register_usage(["riak-admin", "handoff", "config"], config_usage()).
handoff_usage() ->
[
"riak-admin handoff <sub-command>\n\n",
" Display handoff-related status and settings.\n\n",
" Sub-commands:\n",
" enable Enable handoffs for the specified node(s)\n",
" disable Disable handoffs for the specified node(s)\n"
" summary Show cluster-wide handoff summary\n",
" details Show details of all active transfers (per-node or cluster wide)\n",
" config Show all configuration for handoff subsystem\n\n",
" Use --help after a sub-command for more details.\n"
].
config_usage() ->
["riak-admin handoff config\n\n",
" Display handoff related configuration variables\n\n",
"Options\n",
" -n <node>, --node <node>\n",
" Show the settings on the specified node.\n",
" This flag can currently take only one node and be used once\n"
" -a, --all\n",
" Show the settings on every node in the cluster\n"
].
handoff_enable_disable_usage() ->
["riak-admin handoff <enable|disable> <inbound|outbound|both> ",
"[-n <node>|--all]\n\n",
" Enable or disable handoffs on the local or specified node(s).\n",
" If handoffs are disabled in a direction, any currently\n",
" running handoffs in that direction will be terminated.\n\n"
"Options\n",
" -n <node>, --node <node>\n",
" Modify the setting on the specified node.\n",
" This flag can currently take only one node and be used once\n"
" -a, --all\n",
" Modify the setting on every node in the cluster\n"
].
handoff_cmd_spec(EnOrDis, Direction) ->
Cmd = ["riak-admin", "handoff", atom_to_list(EnOrDis), atom_to_list(Direction)],
Callback = fun(_, [], Flags) ->
handoff_change_enabled_setting(EnOrDis, Direction, Flags)
end,
[
Cmd,
KeySpecs
[{all, [{shortname, "a"},
{longname, "all"}]},
{node, [{shortname, "n"},
FlagSpecs
Callback
].
summary_usage() ->
[
"riak-admin handoff summary\n\n",
" Display a summarized view of handoffs.\n"
].
details_usage() ->
[
"riak-admin handoff details [--node <node>|--all]\n\n",
" Display a detailed list of handoffs. Defaults to local node.\n\n"
"Options\n",
" -n <node>, --node <node>\n",
" Display the handoffs on the specified node.\n",
" This flag can currently take only one node and be used once\n"
" -a, --all\n",
" Display the handoffs on every node in the cluster\n"
].
handoff_config(_CmdBase, _Args, Flags) when length(Flags) > 1 ->
[clique_status:text("Can't specify both --all and --node flags")];
handoff_config(_CmdBase, _Args, []) ->
clique_config:show(config_vars(), []);
handoff_config(_CmdBase, _Args, [{all, Val}]) ->
clique_config:show(config_vars(), [{all, Val}]);
handoff_config(_CmdBase, _Args, [{node, Node}]) ->
clique_config:show(config_vars(), [{node, Node}]).
config_vars() ->
["transfer_limit", "handoff.outbound", "handoff.inbound", "handoff.port"].
handoff_change_enabled_setting(_EnOrDis, _Direction, Flags) when length(Flags) > 1 ->
[clique_status:text("Can't specify both --all and --node flags")];
handoff_change_enabled_setting(EnOrDis, Direction, [{all, _}]) ->
Nodes = clique_nodes:nodes(),
{_, Down} = rpc:multicall(Nodes,
riak_core_handoff_manager,
handoff_change_enabled_setting,
[EnOrDis, Direction],
60000),
case Down of
[] ->
[clique_status:text("All nodes successfully updated")];
_ ->
Output = io_lib:format("Handoff ~s failed on nodes: ~p", [EnOrDis, Down]),
[clique_status:alert([clique_status:text(Output)])]
end;
handoff_change_enabled_setting(EnOrDis, Direction, [{node, NodeStr}]) ->
Node = clique_typecast:to_node(NodeStr),
Result = clique_nodes:safe_rpc(Node,
riak_core_handoff_manager, handoff_change_enabled_setting,
[EnOrDis, Direction]),
case Result of
{badrpc, Reason} ->
Output = io_lib:format("Failed to update handoff settings on node ~p. Reason: ~p",
[Node, Reason]),
[clique_status:alert([clique_status:text(Output)])];
_ ->
[clique_status:text("Handoff setting successfully updated")]
end;
handoff_change_enabled_setting(EnOrDis, Direction, []) ->
riak_core_handoff_manager:handoff_change_enabled_setting(EnOrDis, Direction),
[clique_status:text("Handoff setting successfully updated")].
handoff_cfg_change_callback(["handoff", Cmd], "off") ->
case Cmd of
"inbound" ->
riak_core_handoff_manager:kill_handoffs_in_direction(inbound),
"Inbound handoffs terminated";
"outbound" ->
riak_core_handoff_manager:kill_handoffs_in_direction(outbound),
"Outbound handoffs terminated"
end;
handoff_cfg_change_callback(_, _) ->
"".
set_transfer_limit(["transfer_limit"], LimitStr) ->
Limit = list_to_integer(LimitStr),
riak_core_handoff_manager:set_concurrency(Limit),
"".
|
f8fd5c17e13dfa4468ac6eed3c96a905cd5c27a307df1a5b54da7b9cc29df454 | moby/datakit | datakit_github_conv.ml | open Lwt.Infix
open Datakit_client.Path.Infix
open Datakit_github
open Result
open Datakit_client
let src = Logs.Src.create "dkt-github" ~doc:"Github to Git bridge"
module Log = (val Logs.src_log src : Logs.LOG)
let ( >>*= ) x f = x >>= function Ok x -> f x | Error _ as e -> Lwt.return e
let pp_path = Fmt.(list ~sep:(unit "/") string)
let mapo f = function None -> None | Some x -> Some (f x)
let failf fmt = Fmt.kstrf failwith fmt
module Make (DK : S) = struct
type tree = DK.Tree.t
conversion between GitHub and DataKit states .
let path s = Path.of_steps_exn s
TODO : Lots of these functions used to ignore errors silently . This can lead
to bugs in the users of the library ( e.g. we lost our 9p connection but
we think instead that the file we wanted does n't exist ) . For now , I 've
converted it to log errors in these cases but continue with the old
behaviour . Assuming we do n't see these errors being logged , we can
change the code to raise exceptions instead .
to bugs in the users of the library (e.g. we lost our 9p connection but
we think instead that the file we wanted doesn't exist). For now, I've
converted it to log errors in these cases but continue with the old
behaviour. Assuming we don't see these errors being logged, we can
change the code to raise exceptions instead. *)
let remove_if_exists t path =
DK.Transaction.remove t path >|= function
| Error `Does_not_exist | Ok () -> ()
| Error e -> failf "remove_if_exists(%a): %a" Path.pp path DK.pp_error e
let read_dir_if_exists t dir =
DK.Tree.read_dir t dir >|= function
| Ok dirs -> dirs
| Error (`Does_not_exist | `Not_dir) -> []
| Error e -> failf "safe_read_dir(%a): %a" Path.pp dir DK.pp_error e
let exists_dir t dir =
DK.Tree.exists_dir t dir >|= function
| Ok b -> b
| Error `Not_dir ->
false (* Some parent doesn't exist or isn't a directory *)
| Error e -> failf "exists_dir(%a): %a" Path.pp dir DK.pp_error e
let exists_file t file =
DK.Tree.exists_file t file >|= function
| Ok b -> b
| Error `Not_dir ->
false (* Some parent doesn't exist or isn't a directory *)
| Error e -> failf "exists_file(%a): %a" Path.pp file DK.pp_error e
let read_file_if_exists ?(trim = true) t file =
DK.Tree.read_file t file >|= function
| Ok b ->
let b = Cstruct.to_string b in
Some (if trim then String.trim b else b)
| Error (`Does_not_exist | `Not_dir) -> None
| Error e -> failf "read_file(%a): %a" Path.pp file DK.pp_error e
let create_file tr file contents =
match Path.basename file with
| None -> failf "%a is not a file" Path.pp file
| Some _ -> (
let dir = Path.dirname file in
( DK.Transaction.make_dirs tr dir >>*= fun () ->
DK.Transaction.create_or_replace_file tr file contents )
>|= function
| Ok () -> ()
| Error e ->
failf "Got %a while creating %a" DK.pp_error e Path.pp file )
let tr_diff tr c =
DK.Transaction.diff tr c >|= function
| Ok d -> d
| Error e -> failf "tr_diff: %a" DK.pp_error e
let lift_errors name f =
f >>= function
| Error e -> Lwt.fail_with @@ Fmt.strf "%s: %a" name DK.pp_error e
| Ok x -> Lwt.return x
let path_of_diff = function
| `Added f | `Removed f | `Updated f -> Path.unwrap f
let safe_tree c =
DK.Commit.tree c >>= function
| Error e -> Fmt.kstrf Lwt.fail_with "%a" DK.pp_error e
| Ok tree -> Lwt.return tree
type dirty = Elt.IdSet.t
let changes diff =
let rdecons l =
match List.rev l with [] -> assert false | h :: t -> (h, List.rev t)
in
List.fold_left
(fun (acc, dirty) d ->
let path = path_of_diff d in
let added = match d with `Removed _ -> false | _ -> true in
let t =
match path with
| [] | [ _ ] -> None
| user :: repo :: path -> (
let user = User.v user in
let repo = Repo.v ~user ~repo in
let pr repo id = `PR (repo, int_of_string id) in
match path with
| [] | [ ".monitor" ] -> Some (`Repo repo)
| [ ".dirty" ] when added -> Some (`Dirty (`Repo repo))
| [ "pr"; id; ".dirty" ] when added -> Some (`Dirty (pr repo id))
| "pr" :: id :: _ -> Some (pr repo id)
| [ "commit"; id ] -> Some (`Commit (Commit.v repo id))
| "commit" :: id :: "status" :: (_ :: _ :: _ as tl) ->
let _, last = rdecons tl in
Some (`Status (Commit.v repo id, last))
| "ref" :: (_ :: _ :: _ as tl) ->
let f, last = rdecons tl in
let r = `Ref (repo, last) in
if f = ".dirty" then Some (`Dirty r) else Some r
| _ -> None )
in
match t with
| None -> (acc, dirty)
| Some (`Dirty d) -> (acc, Elt.IdSet.add d dirty)
| Some (#Elt.id as e) -> (Elt.IdSet.add e acc, dirty))
(Elt.IdSet.empty, Elt.IdSet.empty)
diff
let safe_diff x y =
DK.Commit.diff x y >|= function
| Error e ->
Log.err (fun f -> f "safe_diff: %a" DK.pp_error e);
(Elt.IdSet.empty, Elt.IdSet.empty)
| Ok d -> changes d
let walk (type elt t) (module Set : SET with type elt = elt and type t = t)
tree root (file, fn) =
let rec aux acc = function
| [] -> Lwt.return acc
| context :: todo -> (
match Path.of_steps context with
| Error e ->
Log.err (fun l -> l "%s" e);
aux acc todo
| Ok ctx -> (
let dir = root /@ ctx in
read_dir_if_exists tree dir >>= fun childs ->
let todo = List.map (fun c -> context @ [ c ]) childs @ todo in
exists_file tree (dir / file) >>= function
| false -> aux acc todo
| true -> (
fn (Path.unwrap ctx) >>= function
| None -> aux acc todo
| Some e -> aux (Set.add e acc) todo ) ) )
in
aux Set.empty [ [] ]
let empty = Path.empty
let root r = empty / User.name r.Repo.user / r.Repo.repo
(* Repos *)
let repo tree repo =
read_file_if_exists tree (root repo / ".monitor") >|= function
| None ->
Log.debug (fun l -> l "repo %a -> false" Repo.pp repo);
None
| Some _ ->
Log.debug (fun l -> l "repo %a -> true" Repo.pp repo);
Some repo
let reduce_repos = List.fold_left Repo.Set.union Repo.Set.empty
let repos tree =
let root = Path.empty in
read_dir_if_exists tree root >>= fun users ->
Lwt_list.map_p
(fun user ->
read_dir_if_exists tree (root / user) >>= fun repos ->
Lwt_list.map_p
(fun repo ->
read_file_if_exists tree (root / user / repo / ".monitor")
>|= function
| None -> Repo.Set.empty
| Some _ ->
let user = User.v user in
let repo = Repo.v ~user ~repo in
Repo.Set.singleton repo)
repos
>|= reduce_repos)
users
>|= fun repos ->
let repos = reduce_repos repos in
Log.debug (fun l -> l "repos -> @;@[<2>%a@]" Repo.Set.pp repos);
repos
let update_repo_aux tr s r =
let dir = root r in
match s with
| `Ignored -> remove_if_exists tr (root r / ".monitor")
| `Monitored ->
let remove =
DK.Transaction.make_dirs tr dir >>*= fun () ->
let empty = Cstruct.of_string "" in
DK.Transaction.create_or_replace_file tr (dir / ".monitor") empty
in
lift_errors "update_repo" remove
let update_repo tr r = update_repo_aux tr `Monitored r
let remove_repo tr r = update_repo_aux tr `Ignored r
let update_commit tr c =
let dir = root (Commit.repo c) / "commit" in
lift_errors "update_commit" @@ DK.Transaction.make_dirs tr dir
(* PRs *)
let update_pr t pr =
let dir = root (PR.repo pr) / "pr" / string_of_int pr.PR.number in
Log.debug (fun l -> l "update_pr %s" @@ Path.to_hum dir);
let update =
DK.Transaction.make_dirs t dir >>*= fun () ->
let write ?prefix ?(newline = true) k v =
let v = Cstruct.of_string (if newline then v ^ "\n" else v) in
let dir = match prefix with None -> dir | Some p -> dir /@ p in
DK.Transaction.create_or_replace_file t (dir / k) v
in
write "head" (PR.commit_hash pr) >>*= fun () ->
write "state" (PR.string_of_state pr.PR.state) >>*= fun () ->
write "title" pr.PR.title >>*= fun () ->
write "owner" (User.name pr.PR.owner) >>*= fun () ->
write "base" pr.PR.base >>*= fun () ->
remove_if_exists t (dir / "comments") >>= fun () ->
Lwt_list.mapi_p
(fun id c ->
let prefix = Path.empty / "comments" / string_of_int id in
DK.Transaction.make_dirs t (dir /@ prefix) >>*= fun () ->
let user = User.name c.Comment.user in
write ~prefix "id" (string_of_int c.Comment.id) >>*= fun () ->
write ~prefix "user" user >>*= fun () ->
write ~newline:false ~prefix "body" c.Comment.body)
(Array.to_list pr.PR.comments)
>>= fun l ->
List.fold_left
(fun acc x ->
acc >>*= fun () ->
Lwt.return x)
(Lwt.return (Ok ())) l
in
lift_errors "update_pr" update
let remove_pr t (repo, num) =
let dir = root repo / "pr" / string_of_int num in
Log.debug (fun l -> l "remove_pr %s" @@ Path.to_hum dir);
remove_if_exists t dir
let comments tree dir =
read_dir_if_exists tree dir >>= fun ids ->
Lwt_list.map_p
(fun n ->
read_file_if_exists tree (dir / n / "id") >>= fun rid ->
read_file_if_exists tree (dir / n / "user") >>= fun user ->
read_file_if_exists ~trim:false tree (dir / n / "body") >|= fun body ->
let body = match body with None -> "" | Some b -> b in
let id =
match rid with
| None -> None
| Some id -> ( try Some (int_of_string id) with Failure _ -> None )
in
match (id, user) with
| Some id, Some name ->
let user = User.v name in
Some (Comment.v ~id ~user ~body)
| Some id, None ->
Log.debug (fun l ->
l "error: %a/comments/%d/author does not exist" Path.pp dir id);
None
| _ ->
Log.debug (fun l ->
l "error: %a/comments: %s is not a valid id" Path.pp dir n);
None)
ids
>|= fun comments ->
List.fold_left
(fun acc -> function None -> acc | Some x -> x :: acc)
[] (List.rev comments)
|> Array.of_list
let pr tree (repo, number) =
let dir = root repo / "pr" / string_of_int number in
Log.debug (fun l -> l "pr %a" Path.pp dir);
read_file_if_exists tree (dir / "head") >>= fun head ->
read_file_if_exists tree (dir / "state") >>= fun state ->
read_file_if_exists tree (dir / "title") >>= fun title ->
read_file_if_exists tree (dir / "owner") >>= fun owner ->
comments tree (dir / "comments") >>= fun comments ->
read_file_if_exists tree (dir / "base") >|= fun base ->
match (head, state, owner) with
| None, _, _ ->
Log.debug (fun l ->
l "error: %a/pr/%d/head does not exist" Repo.pp repo number);
None
| _, None, _ ->
Log.debug (fun l ->
l "error: %a/pr/%d/state does not exist" Repo.pp repo number);
None
| _, _, None ->
Log.debug (fun l ->
l "error: %a/pr/%d/owner does not exist" Repo.pp repo number);
None
| Some id, Some state, Some owner ->
let base =
match base with
| Some b -> b
| None ->
Log.debug (fun l ->
l
"error: %a/pr/%d/base does not exist, using 'master' \
instead"
Repo.pp repo number);
"master"
in
let owner = User.v owner in
let head = Commit.v repo id in
let title = match title with None -> "" | Some t -> t in
let state =
match PR.state_of_string state with
| Some s -> s
| None ->
Log.err (fun l ->
l "%s is not a valid PR state, picking `Closed instead" state);
`Closed
in
Some (PR.v ~state ~title ~base ~owner ~comments head number)
let reduce_prs = List.fold_left PR.Set.union PR.Set.empty
let prs_of_repo tree repo =
let dir = root repo / "pr" in
read_dir_if_exists tree dir >>= fun nums ->
Lwt_list.map_p
(fun n ->
pr tree (repo, int_of_string n) >|= function
| None -> PR.Set.empty
| Some p -> PR.Set.singleton p)
nums
>|= fun prs ->
let prs = reduce_prs prs in
Log.debug (fun l ->
l "prs_of_repo %a -> @;@[<2>%a@]" Repo.pp repo PR.Set.pp prs);
prs
let maybe_repos tree = function
| None -> repos tree
| Some rs -> Lwt.return rs
let prs ?repos:rs tree =
maybe_repos tree rs >>= fun repos ->
Lwt_list.map_p (prs_of_repo tree) (Repo.Set.elements repos) >|= fun prs ->
let prs = reduce_prs prs in
Log.debug (fun l -> l "prs -> @;@[<2>%a@]" PR.Set.pp prs);
prs
(* Commits *)
let commit tree { Commit.repo; hash } =
let dir = root repo / "commit" / hash in
exists_dir tree dir >|= function
| false ->
Log.debug (fun l -> l "commit {%a %s} -> false" Repo.pp repo hash);
None
| true ->
Log.debug (fun l -> l "commit {%a %s} -> true" Repo.pp repo hash);
Some (Commit.v repo hash)
let commits_of_repo tree repo =
let dir = root repo / "commit" in
read_dir_if_exists tree dir >|= fun commits ->
List.fold_left
(fun s id -> Commit.Set.add (Commit.v repo id) s)
Commit.Set.empty commits
|> fun cs ->
Log.debug (fun l ->
l "commits_of_repo %a -> @;@[<2>%a@]" Repo.pp repo Commit.Set.pp cs);
cs
let reduce_commits = List.fold_left Commit.Set.union Commit.Set.empty
let commits ?repos:rs tree =
maybe_repos tree rs >>= fun repos ->
Lwt_list.map_p (commits_of_repo tree) (Repo.Set.elements repos)
>|= fun cs ->
let cs = reduce_commits cs in
Log.debug (fun l -> l "commits -> @;@[<2>%a@]" Commit.Set.pp cs);
cs
(* Status *)
let update_status t s =
let dir =
root (Status.repo s)
/ "commit" / Status.commit_hash s / "status"
/@ path (Status.context s)
in
Log.debug (fun l -> l "update_status %a" Path.pp dir);
lift_errors "update_status" (DK.Transaction.make_dirs t dir) >>= fun () ->
let description =
match Status.description s with
| None -> None
| Some d -> Some (String.trim d)
in
let kvs =
[ ("description", description);
("state", Some (Status_state.to_string @@ Status.state s));
("target_url", mapo Uri.to_string (Status.url s))
]
in
Lwt_list.iter_p
(fun (k, v) ->
match v with
| None -> remove_if_exists t (dir / k)
| Some v ->
let v = Cstruct.of_string (v ^ "\n") in
lift_errors "update_status"
@@ DK.Transaction.create_or_replace_file t (dir / k) v)
kvs
let status tree (commit, context) =
let context = Path.of_steps_exn context in
let dir =
root (Commit.repo commit)
/ "commit" / Commit.hash commit / "status" /@ context
in
read_file_if_exists tree (dir / "state") >>= fun state ->
match state with
| None ->
Log.debug (fun l -> l "status %a -> None" Path.pp dir);
Lwt.return_none
| Some str ->
let state =
match Status_state.of_string str with
| Some s -> s
| None ->
Log.err (fun l ->
l "%s: invalid state, using `Failure instead" str);
`Failure
in
Log.debug (fun l ->
l "status %a -> %a" Path.pp context Status_state.pp state);
read_file_if_exists tree (dir / "description") >>= fun description ->
read_file_if_exists tree (dir / "target_url") >|= fun url ->
let context = Path.unwrap context in
let url = mapo Uri.of_string url in
Some (Status.v ?description ?url commit context state)
let reduce_status = List.fold_left Status.Set.union Status.Set.empty
let statuses_of_commits tree commits =
Lwt_list.map_p
(fun commit ->
let dir = root (Commit.repo commit) / "commit" in
let dir = dir / Commit.hash commit / "status" in
walk
(module Status.Set)
tree dir
("state", fun c -> status tree (commit, c)))
(Commit.Set.elements commits)
>|= fun status ->
let status = reduce_status status in
Log.debug (fun l ->
l "statuses_of_commits %a -> @;@[<2>%a@]" Commit.Set.pp commits
Status.Set.pp status);
status
let maybe_commits tree = function
| None -> commits tree
| Some c -> Lwt.return c
let statuses ?commits:cs tree =
maybe_commits tree cs >>= fun commits ->
statuses_of_commits tree commits >|= fun status ->
Log.debug (fun l -> l "statuses -> @;@[<2>%a@]" Status.Set.pp status);
status
(* Refs *)
let ref tree (repo, name) =
let path = Path.of_steps_exn name in
let head = root repo / "ref" /@ path / "head" in
read_file_if_exists tree head >|= function
| None ->
Log.debug (fun l -> l "ref_ %a:%a -> None" Repo.pp repo pp_path name);
None
| Some id ->
Log.debug (fun l -> l "ref_ %a:%a -> %s" Repo.pp repo pp_path name id);
let head = Commit.v repo id in
Some (Ref.v head name)
let refs_of_repo tree repo =
let dir = root repo / "ref" in
walk (module Ref.Set) tree dir ("head", fun n -> ref tree (repo, n))
>|= fun refs ->
Log.debug (fun l ->
l "refs_of_repo %a -> @;@[<2>%a@]" Repo.pp repo Ref.Set.pp refs);
refs
let reduce_refs = List.fold_left Ref.Set.union Ref.Set.empty
let refs ?repos:rs tree =
maybe_repos tree rs >>= fun repos ->
Lwt_list.map_p (refs_of_repo tree) (Repo.Set.elements repos)
>|= fun refs ->
let refs = reduce_refs refs in
Log.debug (fun l -> l "refs -> @;@[<2>%a@]" Ref.Set.pp refs);
refs
let update_ref tr r =
let path = Path.of_steps_exn (Ref.name r) in
let dir = root (Ref.repo r) / "ref" /@ path in
Log.debug (fun l -> l "update_ref %a" Path.pp dir);
let update =
DK.Transaction.make_dirs tr dir >>*= fun () ->
let head = Cstruct.of_string (Ref.commit_hash r ^ "\n") in
DK.Transaction.create_or_replace_file tr (dir / "head") head
in
lift_errors "update_ref" update
let remove_ref tr (repo, name) =
let path = Path.of_steps_exn name in
let dir = root repo / "ref" /@ path in
Log.debug (fun l -> l "remove_ref %a" Path.pp dir);
remove_if_exists tr dir
let update_event t = function
| Event.Repo (s, r) -> update_repo_aux t s r
| Event.PR pr -> update_pr t pr
| Event.Status s -> update_status t s
| Event.Ref (`Removed r) -> remove_ref t r
| Event.Ref (`Created r | `Updated r) -> update_ref t r
| Event.Other o ->
Log.debug (fun l -> l "ignoring event: %s" @@ snd o);
Lwt.return_unit
Snapshot
let snapshot_of_repos tree repos =
commits ~repos tree >>= fun commits ->
prs ~repos tree >>= fun prs ->
statuses ~commits tree >>= fun status ->
refs ~repos tree >|= fun refs ->
Snapshot.v ~repos ~status ~prs ~refs ~commits
let snapshot_of_commit c =
safe_tree c >>= fun tree ->
repos tree >>= fun repos ->
snapshot_of_repos tree repos
(* Dirty *)
let reduce_elts = List.fold_left Elt.IdSet.union Elt.IdSet.empty
let dirty_repos tree =
let root = Path.empty in
read_dir_if_exists tree root >>= fun users ->
Lwt_list.map_p
(fun user ->
read_dir_if_exists tree (root / user) >>= fun repos ->
Lwt_list.map_p
(fun repo ->
exists_file tree (root / user / repo / ".dirty") >|= function
| false -> Elt.IdSet.empty
| true ->
let user = User.v user in
let repo = Repo.v ~user ~repo in
Elt.IdSet.singleton (`Repo repo))
repos
>|= reduce_elts)
users
>|= reduce_elts
let dirty_prs tree repo =
let dir = root repo / "pr" in
read_dir_if_exists tree dir >>= fun nums ->
Lwt_list.map_p
(fun n ->
let d = dir / n / ".dirty" in
exists_file tree d >|= function
| false -> Elt.IdSet.empty
| true -> (
try Elt.IdSet.singleton (`PR (repo, int_of_string n))
with Failure _ -> Elt.IdSet.empty ))
nums
>|= reduce_elts
let dirty_refs tree repo =
let dir = root repo / "ref" in
let r name = Lwt.return (Some (`Ref (repo, name))) in
walk (module Elt.IdSet) tree dir (".dirty", r)
let dirty_of_commit c : dirty Lwt.t =
safe_tree c >>= fun t ->
let ( ++ ) = Elt.IdSet.union in
(* we handle dirty repo even if not monitored *)
dirty_repos t >>= fun dirty_repos ->
repos t >>= fun repos ->
(* we only check for dirty prs/refs for monitored repos only *)
Lwt_list.map_p
(fun r ->
dirty_prs t r >>= fun prs ->
dirty_refs t r >|= fun refs ->
prs ++ refs)
(Repo.Set.elements repos)
>|= fun more ->
dirty_repos ++ reduce_elts more
let dirty_file : Elt.id -> Path.t = function
| `Repo r -> root r / ".dirty"
| `PR (r, id) -> root r / "pr" / string_of_int id / ".dirty"
| `Ref (r, n) -> root r / "ref" /@ Path.of_steps_exn n / ".dirty"
| _ -> failwith "TODO"
let clean tr dirty =
Lwt_list.iter_p (fun d -> remove_if_exists tr (dirty_file d))
@@ Elt.IdSet.elements dirty
let empty = Cstruct.of_string ""
let stain tr dirty =
Lwt_list.iter_p (fun d -> create_file tr (dirty_file d) empty)
@@ Elt.IdSet.elements dirty
(* Elements *)
let find t (id : Elt.id) =
match id with
| `Repo id -> repo t id >|= mapo (fun r -> `Repo r)
| `Commit id -> commit t id >|= mapo (fun c -> `Commit c)
| `PR id -> pr t id >|= mapo (fun p -> `PR p)
| `Ref id -> ref t id >|= mapo (fun r -> `Ref r)
| `Status id -> status t id >|= mapo (fun s -> `Status s)
(* Diffs *)
let combine_repo t tree r =
repo tree r >>= function
| None -> Lwt.return (Diff.with_remove (`Repo r) t)
| Some r ->
snapshot_of_repos tree (Repo.Set.singleton r) >|= fun s ->
Elt.Set.fold Diff.with_update (Snapshot.elts s) t
let combine_commit t tree c =
commit tree c >|= function
| None -> Diff.with_remove (`Commit c) t
| Some c -> Diff.with_update (`Commit c) t
let combine_pr t tree id =
pr tree id >|= function
| Some pr -> Diff.with_update (`PR pr) t
| None -> Diff.with_remove (`PR id) t
let combine_status t tree id =
status tree id >|= function
| None -> Diff.with_remove (`Status id) t
| Some s -> Diff.with_update (`Status s) t
let combine_ref t tree id =
ref tree id >|= function
| None -> Diff.with_remove (`Ref id) t
| Some r -> Diff.with_update (`Ref r) t
let apply_on_commit diff head =
Log.debug (fun l -> l "apply");
safe_tree head >>= fun tree ->
if Elt.IdSet.is_empty diff then Lwt.return Diff.empty
else
Lwt_list.fold_left_s
(fun acc -> function `Repo repo -> combine_repo acc tree repo
| `PR id -> combine_pr acc tree id
| `Ref id -> combine_ref acc tree id
| `Commit id -> combine_commit acc tree id
| `Status id ->
combine_status acc tree id >>= fun acc ->
combine_commit acc tree (fst id))
Diff.empty (Elt.IdSet.elements diff)
>|= fun r ->
Log.debug (fun l ->
l "apply @[<2>%a@]@;@[<2>->%a@]" Elt.IdSet.pp diff Diff.pp r);
r
type t = { head : DK.Commit.t; snapshot : Snapshot.t; dirty : dirty }
let snapshot t = t.snapshot
let head t = t.head
let dirty t = t.dirty
let pp ppf s =
Fmt.pf ppf "@[%a:@;@[<2>%a@]@]" DK.Commit.pp s.head Snapshot.pp s.snapshot
let diff x y =
safe_diff x y >>= fun (diff, dirty) ->
apply_on_commit diff x >|= fun s ->
(s, dirty)
let tr_head tr =
DK.Transaction.parents tr >>= function
| Error e ->
Log.err (fun l -> l "tr_head: %a" DK.pp_error e);
Lwt.fail_with "tr_head"
| Ok [] -> Lwt.fail_with "no parents!"
| Ok [ p ] -> Lwt.return p
| Ok _ -> Lwt.fail_with "too many parents!"
let of_branch ~debug ?old branch =
DK.Branch.transaction branch >>= function
| Error e ->
Log.err (fun l ->
l "snpshot %s: %a" (DK.Branch.name branch) DK.pp_error e);
Lwt.fail_with "snapshot"
| Ok tr -> (
Log.debug (fun l ->
let c =
match old with None -> "*" | Some t -> DK.Commit.id t.head
in
l "snapshot %s old=%s" debug c);
tr_head tr >>= fun head ->
match old with
| None ->
snapshot_of_commit head >>= fun snapshot ->
dirty_of_commit head >|= fun dirty ->
(tr, { head; snapshot; dirty })
| Some old ->
diff head old.head >|= fun (diff, dirty) ->
let snapshot = Diff.apply diff old.snapshot in
(tr, { head; snapshot; dirty }) )
let of_commit ~debug ?old head =
Log.debug (fun l ->
let c = match old with None -> "*" | Some t -> DK.Commit.id t.head in
l "snapshot %s old=%s" debug c);
match old with
| None ->
snapshot_of_commit head >>= fun snapshot ->
dirty_of_commit head >|= fun dirty ->
{ head; snapshot; dirty }
| Some old ->
diff head old.head >|= fun (diff, dirty) ->
let snapshot = Diff.apply diff old.snapshot in
{ head; snapshot; dirty }
let remove_elt tr = function
| `Repo repo -> remove_repo tr repo
| `PR pr -> remove_pr tr pr
| `Ref r -> remove_ref tr r
| `Status (h, c) ->
let dir =
root (Commit.repo h) / "commit" / Commit.hash h / "status" /@ path c
in
remove_if_exists tr dir
| `Commit c ->
let dir = root (Commit.repo c) / "commit" / c.Commit.hash in
remove_if_exists tr dir
let update_elt tr = function
| `Repo r -> update_repo tr r
| `Commit c -> update_commit tr c
| `PR pr -> update_pr tr pr
| `Ref r -> update_ref tr r
| `Status s -> update_status tr s
let remove ~debug t =
if Elt.IdSet.is_empty t then None
else
let f tr =
Log.debug (fun l ->
l "remove_snapshot (from %s):@;%a" debug Elt.IdSet.pp t);
Lwt_list.iter_p (remove_elt tr) (Elt.IdSet.elements t)
in
Some f
let update ~debug t =
if Elt.Set.is_empty t then None
else
let f tr =
Log.debug (fun l ->
l "update_snapshot (from %s):@;%a" debug Elt.Set.pp t);
Lwt_list.iter_p (update_elt tr) (Elt.Set.elements t)
in
Some f
let apply ~debug diff tr =
let clean () =
match remove ~debug (Diff.remove diff) with
| None -> Lwt.return_unit
| Some f -> f tr
in
let update () =
match update ~debug (Diff.update diff) with
| None -> Lwt.return_unit
| Some f -> f tr
in
tr_head tr >>= fun head ->
clean () >>= fun () ->
update () >>= fun () ->
tr_diff tr head >|= fun diff ->
diff <> []
end
| null | https://raw.githubusercontent.com/moby/datakit/e047e55a2dfa3aaec02398d7d7699f4f7afd2b47/src/datakit-github/datakit_github_conv.ml | ocaml | Some parent doesn't exist or isn't a directory
Some parent doesn't exist or isn't a directory
Repos
PRs
Commits
Status
Refs
Dirty
we handle dirty repo even if not monitored
we only check for dirty prs/refs for monitored repos only
Elements
Diffs | open Lwt.Infix
open Datakit_client.Path.Infix
open Datakit_github
open Result
open Datakit_client
let src = Logs.Src.create "dkt-github" ~doc:"Github to Git bridge"
module Log = (val Logs.src_log src : Logs.LOG)
let ( >>*= ) x f = x >>= function Ok x -> f x | Error _ as e -> Lwt.return e
let pp_path = Fmt.(list ~sep:(unit "/") string)
let mapo f = function None -> None | Some x -> Some (f x)
let failf fmt = Fmt.kstrf failwith fmt
module Make (DK : S) = struct
type tree = DK.Tree.t
conversion between GitHub and DataKit states .
let path s = Path.of_steps_exn s
TODO : Lots of these functions used to ignore errors silently . This can lead
to bugs in the users of the library ( e.g. we lost our 9p connection but
we think instead that the file we wanted does n't exist ) . For now , I 've
converted it to log errors in these cases but continue with the old
behaviour . Assuming we do n't see these errors being logged , we can
change the code to raise exceptions instead .
to bugs in the users of the library (e.g. we lost our 9p connection but
we think instead that the file we wanted doesn't exist). For now, I've
converted it to log errors in these cases but continue with the old
behaviour. Assuming we don't see these errors being logged, we can
change the code to raise exceptions instead. *)
let remove_if_exists t path =
DK.Transaction.remove t path >|= function
| Error `Does_not_exist | Ok () -> ()
| Error e -> failf "remove_if_exists(%a): %a" Path.pp path DK.pp_error e
let read_dir_if_exists t dir =
DK.Tree.read_dir t dir >|= function
| Ok dirs -> dirs
| Error (`Does_not_exist | `Not_dir) -> []
| Error e -> failf "safe_read_dir(%a): %a" Path.pp dir DK.pp_error e
let exists_dir t dir =
DK.Tree.exists_dir t dir >|= function
| Ok b -> b
| Error `Not_dir ->
| Error e -> failf "exists_dir(%a): %a" Path.pp dir DK.pp_error e
let exists_file t file =
DK.Tree.exists_file t file >|= function
| Ok b -> b
| Error `Not_dir ->
| Error e -> failf "exists_file(%a): %a" Path.pp file DK.pp_error e
let read_file_if_exists ?(trim = true) t file =
DK.Tree.read_file t file >|= function
| Ok b ->
let b = Cstruct.to_string b in
Some (if trim then String.trim b else b)
| Error (`Does_not_exist | `Not_dir) -> None
| Error e -> failf "read_file(%a): %a" Path.pp file DK.pp_error e
let create_file tr file contents =
match Path.basename file with
| None -> failf "%a is not a file" Path.pp file
| Some _ -> (
let dir = Path.dirname file in
( DK.Transaction.make_dirs tr dir >>*= fun () ->
DK.Transaction.create_or_replace_file tr file contents )
>|= function
| Ok () -> ()
| Error e ->
failf "Got %a while creating %a" DK.pp_error e Path.pp file )
let tr_diff tr c =
DK.Transaction.diff tr c >|= function
| Ok d -> d
| Error e -> failf "tr_diff: %a" DK.pp_error e
let lift_errors name f =
f >>= function
| Error e -> Lwt.fail_with @@ Fmt.strf "%s: %a" name DK.pp_error e
| Ok x -> Lwt.return x
let path_of_diff = function
| `Added f | `Removed f | `Updated f -> Path.unwrap f
let safe_tree c =
DK.Commit.tree c >>= function
| Error e -> Fmt.kstrf Lwt.fail_with "%a" DK.pp_error e
| Ok tree -> Lwt.return tree
type dirty = Elt.IdSet.t
let changes diff =
let rdecons l =
match List.rev l with [] -> assert false | h :: t -> (h, List.rev t)
in
List.fold_left
(fun (acc, dirty) d ->
let path = path_of_diff d in
let added = match d with `Removed _ -> false | _ -> true in
let t =
match path with
| [] | [ _ ] -> None
| user :: repo :: path -> (
let user = User.v user in
let repo = Repo.v ~user ~repo in
let pr repo id = `PR (repo, int_of_string id) in
match path with
| [] | [ ".monitor" ] -> Some (`Repo repo)
| [ ".dirty" ] when added -> Some (`Dirty (`Repo repo))
| [ "pr"; id; ".dirty" ] when added -> Some (`Dirty (pr repo id))
| "pr" :: id :: _ -> Some (pr repo id)
| [ "commit"; id ] -> Some (`Commit (Commit.v repo id))
| "commit" :: id :: "status" :: (_ :: _ :: _ as tl) ->
let _, last = rdecons tl in
Some (`Status (Commit.v repo id, last))
| "ref" :: (_ :: _ :: _ as tl) ->
let f, last = rdecons tl in
let r = `Ref (repo, last) in
if f = ".dirty" then Some (`Dirty r) else Some r
| _ -> None )
in
match t with
| None -> (acc, dirty)
| Some (`Dirty d) -> (acc, Elt.IdSet.add d dirty)
| Some (#Elt.id as e) -> (Elt.IdSet.add e acc, dirty))
(Elt.IdSet.empty, Elt.IdSet.empty)
diff
let safe_diff x y =
DK.Commit.diff x y >|= function
| Error e ->
Log.err (fun f -> f "safe_diff: %a" DK.pp_error e);
(Elt.IdSet.empty, Elt.IdSet.empty)
| Ok d -> changes d
let walk (type elt t) (module Set : SET with type elt = elt and type t = t)
tree root (file, fn) =
let rec aux acc = function
| [] -> Lwt.return acc
| context :: todo -> (
match Path.of_steps context with
| Error e ->
Log.err (fun l -> l "%s" e);
aux acc todo
| Ok ctx -> (
let dir = root /@ ctx in
read_dir_if_exists tree dir >>= fun childs ->
let todo = List.map (fun c -> context @ [ c ]) childs @ todo in
exists_file tree (dir / file) >>= function
| false -> aux acc todo
| true -> (
fn (Path.unwrap ctx) >>= function
| None -> aux acc todo
| Some e -> aux (Set.add e acc) todo ) ) )
in
aux Set.empty [ [] ]
let empty = Path.empty
let root r = empty / User.name r.Repo.user / r.Repo.repo
let repo tree repo =
read_file_if_exists tree (root repo / ".monitor") >|= function
| None ->
Log.debug (fun l -> l "repo %a -> false" Repo.pp repo);
None
| Some _ ->
Log.debug (fun l -> l "repo %a -> true" Repo.pp repo);
Some repo
let reduce_repos = List.fold_left Repo.Set.union Repo.Set.empty
let repos tree =
let root = Path.empty in
read_dir_if_exists tree root >>= fun users ->
Lwt_list.map_p
(fun user ->
read_dir_if_exists tree (root / user) >>= fun repos ->
Lwt_list.map_p
(fun repo ->
read_file_if_exists tree (root / user / repo / ".monitor")
>|= function
| None -> Repo.Set.empty
| Some _ ->
let user = User.v user in
let repo = Repo.v ~user ~repo in
Repo.Set.singleton repo)
repos
>|= reduce_repos)
users
>|= fun repos ->
let repos = reduce_repos repos in
Log.debug (fun l -> l "repos -> @;@[<2>%a@]" Repo.Set.pp repos);
repos
let update_repo_aux tr s r =
let dir = root r in
match s with
| `Ignored -> remove_if_exists tr (root r / ".monitor")
| `Monitored ->
let remove =
DK.Transaction.make_dirs tr dir >>*= fun () ->
let empty = Cstruct.of_string "" in
DK.Transaction.create_or_replace_file tr (dir / ".monitor") empty
in
lift_errors "update_repo" remove
let update_repo tr r = update_repo_aux tr `Monitored r
let remove_repo tr r = update_repo_aux tr `Ignored r
let update_commit tr c =
let dir = root (Commit.repo c) / "commit" in
lift_errors "update_commit" @@ DK.Transaction.make_dirs tr dir
let update_pr t pr =
let dir = root (PR.repo pr) / "pr" / string_of_int pr.PR.number in
Log.debug (fun l -> l "update_pr %s" @@ Path.to_hum dir);
let update =
DK.Transaction.make_dirs t dir >>*= fun () ->
let write ?prefix ?(newline = true) k v =
let v = Cstruct.of_string (if newline then v ^ "\n" else v) in
let dir = match prefix with None -> dir | Some p -> dir /@ p in
DK.Transaction.create_or_replace_file t (dir / k) v
in
write "head" (PR.commit_hash pr) >>*= fun () ->
write "state" (PR.string_of_state pr.PR.state) >>*= fun () ->
write "title" pr.PR.title >>*= fun () ->
write "owner" (User.name pr.PR.owner) >>*= fun () ->
write "base" pr.PR.base >>*= fun () ->
remove_if_exists t (dir / "comments") >>= fun () ->
Lwt_list.mapi_p
(fun id c ->
let prefix = Path.empty / "comments" / string_of_int id in
DK.Transaction.make_dirs t (dir /@ prefix) >>*= fun () ->
let user = User.name c.Comment.user in
write ~prefix "id" (string_of_int c.Comment.id) >>*= fun () ->
write ~prefix "user" user >>*= fun () ->
write ~newline:false ~prefix "body" c.Comment.body)
(Array.to_list pr.PR.comments)
>>= fun l ->
List.fold_left
(fun acc x ->
acc >>*= fun () ->
Lwt.return x)
(Lwt.return (Ok ())) l
in
lift_errors "update_pr" update
let remove_pr t (repo, num) =
let dir = root repo / "pr" / string_of_int num in
Log.debug (fun l -> l "remove_pr %s" @@ Path.to_hum dir);
remove_if_exists t dir
let comments tree dir =
read_dir_if_exists tree dir >>= fun ids ->
Lwt_list.map_p
(fun n ->
read_file_if_exists tree (dir / n / "id") >>= fun rid ->
read_file_if_exists tree (dir / n / "user") >>= fun user ->
read_file_if_exists ~trim:false tree (dir / n / "body") >|= fun body ->
let body = match body with None -> "" | Some b -> b in
let id =
match rid with
| None -> None
| Some id -> ( try Some (int_of_string id) with Failure _ -> None )
in
match (id, user) with
| Some id, Some name ->
let user = User.v name in
Some (Comment.v ~id ~user ~body)
| Some id, None ->
Log.debug (fun l ->
l "error: %a/comments/%d/author does not exist" Path.pp dir id);
None
| _ ->
Log.debug (fun l ->
l "error: %a/comments: %s is not a valid id" Path.pp dir n);
None)
ids
>|= fun comments ->
List.fold_left
(fun acc -> function None -> acc | Some x -> x :: acc)
[] (List.rev comments)
|> Array.of_list
let pr tree (repo, number) =
let dir = root repo / "pr" / string_of_int number in
Log.debug (fun l -> l "pr %a" Path.pp dir);
read_file_if_exists tree (dir / "head") >>= fun head ->
read_file_if_exists tree (dir / "state") >>= fun state ->
read_file_if_exists tree (dir / "title") >>= fun title ->
read_file_if_exists tree (dir / "owner") >>= fun owner ->
comments tree (dir / "comments") >>= fun comments ->
read_file_if_exists tree (dir / "base") >|= fun base ->
match (head, state, owner) with
| None, _, _ ->
Log.debug (fun l ->
l "error: %a/pr/%d/head does not exist" Repo.pp repo number);
None
| _, None, _ ->
Log.debug (fun l ->
l "error: %a/pr/%d/state does not exist" Repo.pp repo number);
None
| _, _, None ->
Log.debug (fun l ->
l "error: %a/pr/%d/owner does not exist" Repo.pp repo number);
None
| Some id, Some state, Some owner ->
let base =
match base with
| Some b -> b
| None ->
Log.debug (fun l ->
l
"error: %a/pr/%d/base does not exist, using 'master' \
instead"
Repo.pp repo number);
"master"
in
let owner = User.v owner in
let head = Commit.v repo id in
let title = match title with None -> "" | Some t -> t in
let state =
match PR.state_of_string state with
| Some s -> s
| None ->
Log.err (fun l ->
l "%s is not a valid PR state, picking `Closed instead" state);
`Closed
in
Some (PR.v ~state ~title ~base ~owner ~comments head number)
let reduce_prs = List.fold_left PR.Set.union PR.Set.empty
let prs_of_repo tree repo =
let dir = root repo / "pr" in
read_dir_if_exists tree dir >>= fun nums ->
Lwt_list.map_p
(fun n ->
pr tree (repo, int_of_string n) >|= function
| None -> PR.Set.empty
| Some p -> PR.Set.singleton p)
nums
>|= fun prs ->
let prs = reduce_prs prs in
Log.debug (fun l ->
l "prs_of_repo %a -> @;@[<2>%a@]" Repo.pp repo PR.Set.pp prs);
prs
let maybe_repos tree = function
| None -> repos tree
| Some rs -> Lwt.return rs
let prs ?repos:rs tree =
maybe_repos tree rs >>= fun repos ->
Lwt_list.map_p (prs_of_repo tree) (Repo.Set.elements repos) >|= fun prs ->
let prs = reduce_prs prs in
Log.debug (fun l -> l "prs -> @;@[<2>%a@]" PR.Set.pp prs);
prs
let commit tree { Commit.repo; hash } =
let dir = root repo / "commit" / hash in
exists_dir tree dir >|= function
| false ->
Log.debug (fun l -> l "commit {%a %s} -> false" Repo.pp repo hash);
None
| true ->
Log.debug (fun l -> l "commit {%a %s} -> true" Repo.pp repo hash);
Some (Commit.v repo hash)
let commits_of_repo tree repo =
let dir = root repo / "commit" in
read_dir_if_exists tree dir >|= fun commits ->
List.fold_left
(fun s id -> Commit.Set.add (Commit.v repo id) s)
Commit.Set.empty commits
|> fun cs ->
Log.debug (fun l ->
l "commits_of_repo %a -> @;@[<2>%a@]" Repo.pp repo Commit.Set.pp cs);
cs
let reduce_commits = List.fold_left Commit.Set.union Commit.Set.empty
let commits ?repos:rs tree =
maybe_repos tree rs >>= fun repos ->
Lwt_list.map_p (commits_of_repo tree) (Repo.Set.elements repos)
>|= fun cs ->
let cs = reduce_commits cs in
Log.debug (fun l -> l "commits -> @;@[<2>%a@]" Commit.Set.pp cs);
cs
let update_status t s =
let dir =
root (Status.repo s)
/ "commit" / Status.commit_hash s / "status"
/@ path (Status.context s)
in
Log.debug (fun l -> l "update_status %a" Path.pp dir);
lift_errors "update_status" (DK.Transaction.make_dirs t dir) >>= fun () ->
let description =
match Status.description s with
| None -> None
| Some d -> Some (String.trim d)
in
let kvs =
[ ("description", description);
("state", Some (Status_state.to_string @@ Status.state s));
("target_url", mapo Uri.to_string (Status.url s))
]
in
Lwt_list.iter_p
(fun (k, v) ->
match v with
| None -> remove_if_exists t (dir / k)
| Some v ->
let v = Cstruct.of_string (v ^ "\n") in
lift_errors "update_status"
@@ DK.Transaction.create_or_replace_file t (dir / k) v)
kvs
let status tree (commit, context) =
let context = Path.of_steps_exn context in
let dir =
root (Commit.repo commit)
/ "commit" / Commit.hash commit / "status" /@ context
in
read_file_if_exists tree (dir / "state") >>= fun state ->
match state with
| None ->
Log.debug (fun l -> l "status %a -> None" Path.pp dir);
Lwt.return_none
| Some str ->
let state =
match Status_state.of_string str with
| Some s -> s
| None ->
Log.err (fun l ->
l "%s: invalid state, using `Failure instead" str);
`Failure
in
Log.debug (fun l ->
l "status %a -> %a" Path.pp context Status_state.pp state);
read_file_if_exists tree (dir / "description") >>= fun description ->
read_file_if_exists tree (dir / "target_url") >|= fun url ->
let context = Path.unwrap context in
let url = mapo Uri.of_string url in
Some (Status.v ?description ?url commit context state)
let reduce_status = List.fold_left Status.Set.union Status.Set.empty
let statuses_of_commits tree commits =
Lwt_list.map_p
(fun commit ->
let dir = root (Commit.repo commit) / "commit" in
let dir = dir / Commit.hash commit / "status" in
walk
(module Status.Set)
tree dir
("state", fun c -> status tree (commit, c)))
(Commit.Set.elements commits)
>|= fun status ->
let status = reduce_status status in
Log.debug (fun l ->
l "statuses_of_commits %a -> @;@[<2>%a@]" Commit.Set.pp commits
Status.Set.pp status);
status
let maybe_commits tree = function
| None -> commits tree
| Some c -> Lwt.return c
let statuses ?commits:cs tree =
maybe_commits tree cs >>= fun commits ->
statuses_of_commits tree commits >|= fun status ->
Log.debug (fun l -> l "statuses -> @;@[<2>%a@]" Status.Set.pp status);
status
let ref tree (repo, name) =
let path = Path.of_steps_exn name in
let head = root repo / "ref" /@ path / "head" in
read_file_if_exists tree head >|= function
| None ->
Log.debug (fun l -> l "ref_ %a:%a -> None" Repo.pp repo pp_path name);
None
| Some id ->
Log.debug (fun l -> l "ref_ %a:%a -> %s" Repo.pp repo pp_path name id);
let head = Commit.v repo id in
Some (Ref.v head name)
let refs_of_repo tree repo =
let dir = root repo / "ref" in
walk (module Ref.Set) tree dir ("head", fun n -> ref tree (repo, n))
>|= fun refs ->
Log.debug (fun l ->
l "refs_of_repo %a -> @;@[<2>%a@]" Repo.pp repo Ref.Set.pp refs);
refs
let reduce_refs = List.fold_left Ref.Set.union Ref.Set.empty
let refs ?repos:rs tree =
maybe_repos tree rs >>= fun repos ->
Lwt_list.map_p (refs_of_repo tree) (Repo.Set.elements repos)
>|= fun refs ->
let refs = reduce_refs refs in
Log.debug (fun l -> l "refs -> @;@[<2>%a@]" Ref.Set.pp refs);
refs
let update_ref tr r =
let path = Path.of_steps_exn (Ref.name r) in
let dir = root (Ref.repo r) / "ref" /@ path in
Log.debug (fun l -> l "update_ref %a" Path.pp dir);
let update =
DK.Transaction.make_dirs tr dir >>*= fun () ->
let head = Cstruct.of_string (Ref.commit_hash r ^ "\n") in
DK.Transaction.create_or_replace_file tr (dir / "head") head
in
lift_errors "update_ref" update
let remove_ref tr (repo, name) =
let path = Path.of_steps_exn name in
let dir = root repo / "ref" /@ path in
Log.debug (fun l -> l "remove_ref %a" Path.pp dir);
remove_if_exists tr dir
let update_event t = function
| Event.Repo (s, r) -> update_repo_aux t s r
| Event.PR pr -> update_pr t pr
| Event.Status s -> update_status t s
| Event.Ref (`Removed r) -> remove_ref t r
| Event.Ref (`Created r | `Updated r) -> update_ref t r
| Event.Other o ->
Log.debug (fun l -> l "ignoring event: %s" @@ snd o);
Lwt.return_unit
Snapshot
let snapshot_of_repos tree repos =
commits ~repos tree >>= fun commits ->
prs ~repos tree >>= fun prs ->
statuses ~commits tree >>= fun status ->
refs ~repos tree >|= fun refs ->
Snapshot.v ~repos ~status ~prs ~refs ~commits
let snapshot_of_commit c =
safe_tree c >>= fun tree ->
repos tree >>= fun repos ->
snapshot_of_repos tree repos
let reduce_elts = List.fold_left Elt.IdSet.union Elt.IdSet.empty
let dirty_repos tree =
let root = Path.empty in
read_dir_if_exists tree root >>= fun users ->
Lwt_list.map_p
(fun user ->
read_dir_if_exists tree (root / user) >>= fun repos ->
Lwt_list.map_p
(fun repo ->
exists_file tree (root / user / repo / ".dirty") >|= function
| false -> Elt.IdSet.empty
| true ->
let user = User.v user in
let repo = Repo.v ~user ~repo in
Elt.IdSet.singleton (`Repo repo))
repos
>|= reduce_elts)
users
>|= reduce_elts
let dirty_prs tree repo =
let dir = root repo / "pr" in
read_dir_if_exists tree dir >>= fun nums ->
Lwt_list.map_p
(fun n ->
let d = dir / n / ".dirty" in
exists_file tree d >|= function
| false -> Elt.IdSet.empty
| true -> (
try Elt.IdSet.singleton (`PR (repo, int_of_string n))
with Failure _ -> Elt.IdSet.empty ))
nums
>|= reduce_elts
let dirty_refs tree repo =
let dir = root repo / "ref" in
let r name = Lwt.return (Some (`Ref (repo, name))) in
walk (module Elt.IdSet) tree dir (".dirty", r)
let dirty_of_commit c : dirty Lwt.t =
safe_tree c >>= fun t ->
let ( ++ ) = Elt.IdSet.union in
dirty_repos t >>= fun dirty_repos ->
repos t >>= fun repos ->
Lwt_list.map_p
(fun r ->
dirty_prs t r >>= fun prs ->
dirty_refs t r >|= fun refs ->
prs ++ refs)
(Repo.Set.elements repos)
>|= fun more ->
dirty_repos ++ reduce_elts more
let dirty_file : Elt.id -> Path.t = function
| `Repo r -> root r / ".dirty"
| `PR (r, id) -> root r / "pr" / string_of_int id / ".dirty"
| `Ref (r, n) -> root r / "ref" /@ Path.of_steps_exn n / ".dirty"
| _ -> failwith "TODO"
let clean tr dirty =
Lwt_list.iter_p (fun d -> remove_if_exists tr (dirty_file d))
@@ Elt.IdSet.elements dirty
let empty = Cstruct.of_string ""
let stain tr dirty =
Lwt_list.iter_p (fun d -> create_file tr (dirty_file d) empty)
@@ Elt.IdSet.elements dirty
let find t (id : Elt.id) =
match id with
| `Repo id -> repo t id >|= mapo (fun r -> `Repo r)
| `Commit id -> commit t id >|= mapo (fun c -> `Commit c)
| `PR id -> pr t id >|= mapo (fun p -> `PR p)
| `Ref id -> ref t id >|= mapo (fun r -> `Ref r)
| `Status id -> status t id >|= mapo (fun s -> `Status s)
let combine_repo t tree r =
repo tree r >>= function
| None -> Lwt.return (Diff.with_remove (`Repo r) t)
| Some r ->
snapshot_of_repos tree (Repo.Set.singleton r) >|= fun s ->
Elt.Set.fold Diff.with_update (Snapshot.elts s) t
let combine_commit t tree c =
commit tree c >|= function
| None -> Diff.with_remove (`Commit c) t
| Some c -> Diff.with_update (`Commit c) t
let combine_pr t tree id =
pr tree id >|= function
| Some pr -> Diff.with_update (`PR pr) t
| None -> Diff.with_remove (`PR id) t
let combine_status t tree id =
status tree id >|= function
| None -> Diff.with_remove (`Status id) t
| Some s -> Diff.with_update (`Status s) t
let combine_ref t tree id =
ref tree id >|= function
| None -> Diff.with_remove (`Ref id) t
| Some r -> Diff.with_update (`Ref r) t
let apply_on_commit diff head =
Log.debug (fun l -> l "apply");
safe_tree head >>= fun tree ->
if Elt.IdSet.is_empty diff then Lwt.return Diff.empty
else
Lwt_list.fold_left_s
(fun acc -> function `Repo repo -> combine_repo acc tree repo
| `PR id -> combine_pr acc tree id
| `Ref id -> combine_ref acc tree id
| `Commit id -> combine_commit acc tree id
| `Status id ->
combine_status acc tree id >>= fun acc ->
combine_commit acc tree (fst id))
Diff.empty (Elt.IdSet.elements diff)
>|= fun r ->
Log.debug (fun l ->
l "apply @[<2>%a@]@;@[<2>->%a@]" Elt.IdSet.pp diff Diff.pp r);
r
type t = { head : DK.Commit.t; snapshot : Snapshot.t; dirty : dirty }
let snapshot t = t.snapshot
let head t = t.head
let dirty t = t.dirty
let pp ppf s =
Fmt.pf ppf "@[%a:@;@[<2>%a@]@]" DK.Commit.pp s.head Snapshot.pp s.snapshot
let diff x y =
safe_diff x y >>= fun (diff, dirty) ->
apply_on_commit diff x >|= fun s ->
(s, dirty)
let tr_head tr =
DK.Transaction.parents tr >>= function
| Error e ->
Log.err (fun l -> l "tr_head: %a" DK.pp_error e);
Lwt.fail_with "tr_head"
| Ok [] -> Lwt.fail_with "no parents!"
| Ok [ p ] -> Lwt.return p
| Ok _ -> Lwt.fail_with "too many parents!"
let of_branch ~debug ?old branch =
DK.Branch.transaction branch >>= function
| Error e ->
Log.err (fun l ->
l "snpshot %s: %a" (DK.Branch.name branch) DK.pp_error e);
Lwt.fail_with "snapshot"
| Ok tr -> (
Log.debug (fun l ->
let c =
match old with None -> "*" | Some t -> DK.Commit.id t.head
in
l "snapshot %s old=%s" debug c);
tr_head tr >>= fun head ->
match old with
| None ->
snapshot_of_commit head >>= fun snapshot ->
dirty_of_commit head >|= fun dirty ->
(tr, { head; snapshot; dirty })
| Some old ->
diff head old.head >|= fun (diff, dirty) ->
let snapshot = Diff.apply diff old.snapshot in
(tr, { head; snapshot; dirty }) )
let of_commit ~debug ?old head =
Log.debug (fun l ->
let c = match old with None -> "*" | Some t -> DK.Commit.id t.head in
l "snapshot %s old=%s" debug c);
match old with
| None ->
snapshot_of_commit head >>= fun snapshot ->
dirty_of_commit head >|= fun dirty ->
{ head; snapshot; dirty }
| Some old ->
diff head old.head >|= fun (diff, dirty) ->
let snapshot = Diff.apply diff old.snapshot in
{ head; snapshot; dirty }
let remove_elt tr = function
| `Repo repo -> remove_repo tr repo
| `PR pr -> remove_pr tr pr
| `Ref r -> remove_ref tr r
| `Status (h, c) ->
let dir =
root (Commit.repo h) / "commit" / Commit.hash h / "status" /@ path c
in
remove_if_exists tr dir
| `Commit c ->
let dir = root (Commit.repo c) / "commit" / c.Commit.hash in
remove_if_exists tr dir
let update_elt tr = function
| `Repo r -> update_repo tr r
| `Commit c -> update_commit tr c
| `PR pr -> update_pr tr pr
| `Ref r -> update_ref tr r
| `Status s -> update_status tr s
let remove ~debug t =
if Elt.IdSet.is_empty t then None
else
let f tr =
Log.debug (fun l ->
l "remove_snapshot (from %s):@;%a" debug Elt.IdSet.pp t);
Lwt_list.iter_p (remove_elt tr) (Elt.IdSet.elements t)
in
Some f
let update ~debug t =
if Elt.Set.is_empty t then None
else
let f tr =
Log.debug (fun l ->
l "update_snapshot (from %s):@;%a" debug Elt.Set.pp t);
Lwt_list.iter_p (update_elt tr) (Elt.Set.elements t)
in
Some f
let apply ~debug diff tr =
let clean () =
match remove ~debug (Diff.remove diff) with
| None -> Lwt.return_unit
| Some f -> f tr
in
let update () =
match update ~debug (Diff.update diff) with
| None -> Lwt.return_unit
| Some f -> f tr
in
tr_head tr >>= fun head ->
clean () >>= fun () ->
update () >>= fun () ->
tr_diff tr head >|= fun diff ->
diff <> []
end
|
b8507090e13ca8de414e6101c3ae355e89b3841517609ec20817e725c45243d9 | jiangpengnju/htdp2e | designing_with_itemization.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname designing_with_itemization) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Demonstrating the six design steps for problems concerning functions that
consume itemizations , including enumerations and intervals .
Sample Problem : The state of Tax Land has created a three - stage sales tax
; to cope with its budget defict.
Inexpensive items , those costing less than $ 1,000 , are not taxed .
Luxury items , with a price of more than $ 10,000 , are taxed at the rate of
eight percent ( 8.00 % ) .
Everything in between comes with a five percent ( 5 % ) mark up .
; Design a function for a cash register that given the price of an item,
; computes the sales tax.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
1 . When the problem statement distinguishes different classes of input information ,
; you need carefully formulated data definitions.
A Price falls into one of three intervals :
- 0 through 1000 ;
- 1000 through 10000 ;
; - 10000 and above.
; interpretation: the price of an item
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
2 . As far as the signature , purpose statement , and function header are concerned ,
; you proceed as before.
; Price -> Number
; computes the amount of tax charge for price p
(define (sales-tax p) 0)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
3 . For functional examples , however , it is imperative that you pick at least
one example from each subclass in the data definition .
; Also, if a subclass is a finite range, be sure to pick examples from the
; boundaries of the range and from its interior.
Since our sample data definition involves three distinct intervals , let us pick
all boundary examples and one price from inside each interval and determine the
amount of tax for each : 0 , 537 , 1000 , 1282 , 10000 , 12017 .
; turn examples into test cases:
(check-expect (sales-tax 0) 0)
(check-expect (sales-tax 537) 0)
(check-expect (sales-tax 1000) (* 0.05 1000))
(check-expect (sales-tax 1282) (* 0.05 1282))
(check-expect (sales-tax 10000) (* 0.08 10000))
(check-expect (sales-tax 12017) (* 0.08 12017))
;; Instead of just writing down the expected result, we write down
;; HOW to compute the expected result.
;; This makes it easier later to formulate the function definition.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
4 . Conditional template .
; "the template mirrors the organization of subclasses with a cond."
; Price -> Number
; computes the amount of tax charge for price p
(define (sales-tax p)
(cond
[(and (<= 0 p) (< p 1000)) ...]
[(and (<= 1000 p) (< p 10000)) ...]
[(>= p 10000) ...]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
5 . Define the function
; Price -> Number
; computes the amount of tax charged for price p
(define (sales-tax p)
(cond
[(and (<= 0 p) (< p 1000)) 0]
[(and (<= 1000 p) (< p 10000)) (* 0.05 p)]
[(>= p 10000) (* 0.08 p)]))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
6 . Run tests and make sure that the tests cover all cond clauses . | null | https://raw.githubusercontent.com/jiangpengnju/htdp2e/d41555519fbb378330f75c88141f72b00a9ab1d3/fixed-size-data/intervals-enumerations-itemizations/designing_with_itemization.rkt | racket | about the language level of this file in a form that our tools can easily process.
to cope with its budget defict.
Design a function for a cash register that given the price of an item,
computes the sales tax.
you need carefully formulated data definitions.
- 10000 and above.
interpretation: the price of an item
you proceed as before.
Price -> Number
computes the amount of tax charge for price p
Also, if a subclass is a finite range, be sure to pick examples from the
boundaries of the range and from its interior.
turn examples into test cases:
Instead of just writing down the expected result, we write down
HOW to compute the expected result.
This makes it easier later to formulate the function definition.
"the template mirrors the organization of subclasses with a cond."
Price -> Number
computes the amount of tax charge for price p
Price -> Number
computes the amount of tax charged for price p
| The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname designing_with_itemization) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
Demonstrating the six design steps for problems concerning functions that
consume itemizations , including enumerations and intervals .
Sample Problem : The state of Tax Land has created a three - stage sales tax
Inexpensive items , those costing less than $ 1,000 , are not taxed .
Luxury items , with a price of more than $ 10,000 , are taxed at the rate of
eight percent ( 8.00 % ) .
Everything in between comes with a five percent ( 5 % ) mark up .
1 . When the problem statement distinguishes different classes of input information ,
A Price falls into one of three intervals :
2 . As far as the signature , purpose statement , and function header are concerned ,
(define (sales-tax p) 0)
3 . For functional examples , however , it is imperative that you pick at least
one example from each subclass in the data definition .
Since our sample data definition involves three distinct intervals , let us pick
all boundary examples and one price from inside each interval and determine the
amount of tax for each : 0 , 537 , 1000 , 1282 , 10000 , 12017 .
(check-expect (sales-tax 0) 0)
(check-expect (sales-tax 537) 0)
(check-expect (sales-tax 1000) (* 0.05 1000))
(check-expect (sales-tax 1282) (* 0.05 1282))
(check-expect (sales-tax 10000) (* 0.08 10000))
(check-expect (sales-tax 12017) (* 0.08 12017))
4 . Conditional template .
(define (sales-tax p)
(cond
[(and (<= 0 p) (< p 1000)) ...]
[(and (<= 1000 p) (< p 10000)) ...]
[(>= p 10000) ...]))
5 . Define the function
(define (sales-tax p)
(cond
[(and (<= 0 p) (< p 1000)) 0]
[(and (<= 1000 p) (< p 10000)) (* 0.05 p)]
[(>= p 10000) (* 0.08 p)]))
6 . Run tests and make sure that the tests cover all cond clauses . |
ed612451628fea75464d651ee44a7dbd9d4f9c1aa152e8816ea4f8cd9dbf56f9 | jumarko/web-development-with-clojure | project.clj | (defproject db-examples "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[com.layerware/hugsql "0.4.7"]
[org.clojure/java.jdbc "0.6.1"]
[org.postgresql/postgresql "9.4-1201-jdbc41"]])
| null | https://raw.githubusercontent.com/jumarko/web-development-with-clojure/dfff6e40c76b64e9fcd440d80c7aa29809601b6b/examples/db-examples/project.clj | clojure | (defproject db-examples "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.8.0"]
[com.layerware/hugsql "0.4.7"]
[org.clojure/java.jdbc "0.6.1"]
[org.postgresql/postgresql "9.4-1201-jdbc41"]])
| |
51b7f9251bba736342174ac5c0edcbae36a2d29483500ceee94ef210e179f390 | eccentric-j/cljs-tui-template | subs.cljs | (ns {{main-ns}}.subs
"Re-frame app db subscriptions. Essentially maps a keyword describing a
result to a function that retrieves the current value from the app db."
(:require [re-frame.core :as rf]))
(rf/reg-sub
:db
(fn [db _]
db))
(rf/reg-sub
:view
(fn [db _]
(:router/view db)))
(rf/reg-sub
:size
(fn [db _]
(:terminal/size db)))
| null | https://raw.githubusercontent.com/eccentric-j/cljs-tui-template/6ad22eb0d069666a072c58709fc82e6f1a2ca8c3/resources/leiningen/new/cljs_tui/src/subs.cljs | clojure | (ns {{main-ns}}.subs
"Re-frame app db subscriptions. Essentially maps a keyword describing a
result to a function that retrieves the current value from the app db."
(:require [re-frame.core :as rf]))
(rf/reg-sub
:db
(fn [db _]
db))
(rf/reg-sub
:view
(fn [db _]
(:router/view db)))
(rf/reg-sub
:size
(fn [db _]
(:terminal/size db)))
| |
520e3b18392e63ef88c948973ea8d4a03d7e513389799fc6ef2b9851d34adbee | JHU-PL-Lab/jaylang | tests.ml | open! Core
module R = Dbmc.Rstack
let x = Dbmc.Id.Ident "x"
let f = Dbmc.Id.Ident "f"
let r0 = R.empty
let xf : R.frame = (x, f)
let r1 = R.push r0 xf
let%expect_test _ =
Fmt.pr "%a" R.pp r1 ;
[%expect {| +(x,f); |}] ;
Fmt.pr "%s" (R.to_string r1) ;
[%expect {| 5775445702 |}]
(* String.equal printed showed *)
| null | https://raw.githubusercontent.com/JHU-PL-Lab/jaylang/81abf9ff185758a2aaefd90478da4c7bb53f4384/src-test/dbmc/inline-expect/tests.ml | ocaml | String.equal printed showed | open! Core
module R = Dbmc.Rstack
let x = Dbmc.Id.Ident "x"
let f = Dbmc.Id.Ident "f"
let r0 = R.empty
let xf : R.frame = (x, f)
let r1 = R.push r0 xf
let%expect_test _ =
Fmt.pr "%a" R.pp r1 ;
[%expect {| +(x,f); |}] ;
Fmt.pr "%s" (R.to_string r1) ;
[%expect {| 5775445702 |}]
|
7a5d8b88e2e4605618a79417f28bcf31756ff8adc69dafd791c04b47d9be4e5a | patricoferris/ocaml-multicore-monorepo | test_sql_async.ml | Copyright ( C ) 2014 - -2021 Petter A. Urkedal < >
*
* This library is free software ; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version , with the OCaml static compilation exception .
*
* This library is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library . If not , see < / > .
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the OCaml static compilation exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library. If not, see </>.
*)
open Async_kernel
open Async_unix
open Core_kernel
module Sys = struct
type 'a future = 'a Deferred.t
let return = return
let or_fail = function
| Ok x -> return x
| Error (#Caqti_error.t as err) ->
Error.raise (Error.of_exn (Caqti_error.Exn err))
module Infix = struct
let (>>=) = (>>=)
let (>|=) = (>>|)
end
end
module Test = Test_sql.Make (Sys) (Caqti_async)
let main uris () =
let open Deferred in
let rec loop = function
| [] -> return ()
| uri :: uris ->
Caqti_async.connect uri >>= Sys.or_fail >>= Test.run >>= fun () ->
(match Caqti_async.connect_pool ~post_connect:Test.post_connect uri with
| Ok pool ->
Test.run_pool pool >>= fun () ->
loop uris
| Error err ->
Error.raise (Error.of_exn (Caqti_error.Exn err))) in
upon (loop uris) (fun () -> Shutdown.shutdown 0)
let () =
let uris = Testkit.parse_common_args () in
never_returns (Scheduler.go_main ~main:(main uris) ())
| null | https://raw.githubusercontent.com/patricoferris/ocaml-multicore-monorepo/22b441e6727bc303950b3b37c8fbc024c748fe55/duniverse/ocaml-caqti/tests/test_sql_async.ml | ocaml | Copyright ( C ) 2014 - -2021 Petter A. Urkedal < >
*
* This library is free software ; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation , either version 3 of the License , or ( at your
* option ) any later version , with the OCaml static compilation exception .
*
* This library is distributed in the hope that it will be useful , but WITHOUT
* ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE . See the GNU Lesser General Public
* License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library . If not , see < / > .
*
* This library is free software; you can redistribute it and/or modify it
* under the terms of the GNU Lesser General Public License as published by
* the Free Software Foundation, either version 3 of the License, or (at your
* option) any later version, with the OCaml static compilation exception.
*
* This library is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU Lesser General Public
* License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this library. If not, see </>.
*)
open Async_kernel
open Async_unix
open Core_kernel
module Sys = struct
type 'a future = 'a Deferred.t
let return = return
let or_fail = function
| Ok x -> return x
| Error (#Caqti_error.t as err) ->
Error.raise (Error.of_exn (Caqti_error.Exn err))
module Infix = struct
let (>>=) = (>>=)
let (>|=) = (>>|)
end
end
module Test = Test_sql.Make (Sys) (Caqti_async)
let main uris () =
let open Deferred in
let rec loop = function
| [] -> return ()
| uri :: uris ->
Caqti_async.connect uri >>= Sys.or_fail >>= Test.run >>= fun () ->
(match Caqti_async.connect_pool ~post_connect:Test.post_connect uri with
| Ok pool ->
Test.run_pool pool >>= fun () ->
loop uris
| Error err ->
Error.raise (Error.of_exn (Caqti_error.Exn err))) in
upon (loop uris) (fun () -> Shutdown.shutdown 0)
let () =
let uris = Testkit.parse_common_args () in
never_returns (Scheduler.go_main ~main:(main uris) ())
| |
40ff01362ff5a7741e070cc7d03e6db3b72e49d8995b2df8942f4712ee56125a | christoph-frick/factorio-blueprint-tools | core.cljs | (ns factorio-blueprint-tools.core
(:require-macros [factorio-blueprint-tools.macros :as m])
(:require [factorio-blueprint-tools.controller.tile :as tile-controller]
[factorio-blueprint-tools.controller.mirror :as mirror-controller]
[factorio-blueprint-tools.upgrade :as upgrade]
[factorio-blueprint-tools.controller.upgrade :as upgrade-controller]
[factorio-blueprint-tools.controller.landfill :as landfill-controller]
[factorio-blueprint-tools.controller.split :as split-controller]
[factorio-blueprint-tools.controller.buffer :as buffer-controller]
[factorio-blueprint-tools.controller.debug :as debug-controller]
[factorio-blueprint-tools.preview :as preview]
[clojure.string :as str]
[cljs.pprint]
[antizer.rum :as ant]
[rum.core :as rum]
[citrus.core :as citrus]
[pushy.core :as pushy]))
(enable-console-print!)
;;; Components
; Tools
(def ta-no-spellcheck
{:autoComplete "off"
:autoCorrect "off"
:autoCapitalize "off"
:spellCheck "false"})
(defn alert-error
[error-message]
(ant/alert {:message error-message
:showIcon true
:type "error"}))
(defn radio-options
[options]
(for [[option label help] options]
(ant/radio
{:key option
:value option}
[:span label
" "
[:span {:class "ant-form-explain"} help]])))
(rum/defc BlueprintPreview < rum/static
[blueprint]
[:span
{:style {:padding-left "24px"}
:dangerouslySetInnerHTML {:__html (preview/preview blueprint)}}])
(rum/defc BlueprintInput <
rum/reactive
[r controller]
(ant/form-item {:label "Blueprint string"
:help "Copy a blueprint string from Factorio and paste it in this field"}
[:div
(ant/input-text-area (assoc ta-no-spellcheck
:class "input-blueprint"
:allow-clear true
:style {:height "10em" :width "calc(100% - 10em - 24px)"}
:value (rum/react (citrus/subscription r [controller :input :encoded]))
:onChange #(citrus/dispatch! r controller :set-blueprint (-> % .-target .-value))
:onFocus #(.select (.-target %))))
(when-let [blueprint (rum/react (citrus/subscription r [controller :input :blueprint]))]
(BlueprintPreview blueprint))
(when-let [error (rum/react (citrus/subscription r [controller :input :error]))]
(alert-error (str "Could not load blueprint. Please make sure to copy and paste the whole string from Factorio. (Error: " error ")")))]))
(rum/defc BlueprintOutput <
rum/reactive
[r controller]
(ant/form-item {:label "Result"
:help "Copy this blueprint string and import in from the blueprint library in Factorio"}
[:div
(ant/input-text-area (assoc ta-no-spellcheck
:class "input-result-blueprint"
:style {:height "10em" :width "calc(100% - 10em - 24px)"}
:value (rum/react (citrus/subscription r [controller :output :encoded]))
:onFocus #(.select (.-target %))))
(when-let [blueprint (rum/react (citrus/subscription r [controller :output :blueprint]))]
(when (:blueprint blueprint)
(BlueprintPreview blueprint)))]))
; About
(rum/defc ContentAbout < rum/static
[]
(ant/layout-content
{:class "content"}
[:div {:dangerouslySetInnerHTML {:__html (m/load-markdown "docs.md")}}]
[:div
[:h2 "Reporting Bugs"]
"In case you find a bug or wish for a feature, feel free to " [:a {:href "-frick/factorio-blueprint-tools/issues"} "create an issue"] "."
" "
"It is super helpful to include how to reproduce the bug e.g. by providing a blueprint string."]
[:div {:dangerouslySetInnerHTML {:__html (m/load-markdown "changelog.md")}}]))
; Settings
(rum/defc ContentSettings < rum/static
[]
(ant/layout-content
{:class "content"}
[:h2 "Settings"]
(ant/alert {:message "Currently there is no way to change or add mods etc. for the sizes occupied by the entities."
:showIcon true
:type "warning"})
(ant/form
(ant/form-item {:label "Factorio entities"}
(ant/select {:value "vanilla-1.0"}
(ant/select-option {:key "vanilla-1.0"} "Vanilla 1.1"))))))
; Tile
(rum/defc ContentTile <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Tile a blueprint"]
[:p "Arrange copies of the blueprint in a grid. E.g. take a six electric miner blueprint and tile 15x15 to cover even the biggest resource fields" ]
(ant/form
(BlueprintInput r :tile))
(when (rum/react (citrus/subscription r [:tile :input :blueprint]))
(ant/form
(ant/form-item {:label "Tiles on X axis"}
(ant/input-number {:class "input-tile-x"
:value (rum/react (citrus/subscription r [:tile :config :tile-x]))
:onChange #(citrus/dispatch! r :tile :set-config :tile-x %)
:min 1}))
(ant/form-item {:label "Tiles on Y axis"}
(ant/input-number {:class "input-tile-y"
:value (rum/react (citrus/subscription r [:tile :config :tile-y]))
:onChange #(citrus/dispatch! r :tile :set-config :tile-y %)
:min 1}))
(BlueprintOutput r :tile)))))
Mirror
(rum/defc ContentMirror <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Mirror a blueprint"]
[:p "Mirror the blueprint either vertically or horizontally"]
(ant/form
(BlueprintInput r :mirror))
(when (rum/react (citrus/subscription r [:mirror :input :blueprint]))
(ant/form
(ant/form-item {:label "Direction"}
(ant/radio-group {:class "input-mirror-direction"
:value (rum/react (citrus/subscription r [:mirror :config :direction]))
:onChange #(citrus/dispatch! r :mirror :set-config :direction (-> % .-target .-value keyword))}
(for [[option label] [[:vertically "Vertically"] [:horizontally "Horizontally"]]]
(ant/radio {:key option :value option} label))))
(BlueprintOutput r :mirror)))))
; Upgrade
(rum/defc ContentUpgrade <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Upgrade (or downgrade) a blueprint"]
[:p "Decide what common upgradeable entities (e.g. inserters) to upgrade. Also supports downgrading (e.g. you have a great blueprint but not the tech yet)"]
(ant/form
(BlueprintInput r :upgrade))
(when-let [blueprint (rum/react (citrus/subscription r [:upgrade :input :blueprint]))]
(let [upgradable (upgrade/upgradeable-from-blueprint blueprint)
order (filter upgradable upgrade/upgrades-order)]
(ant/form
(for [from order]
(ant/form-item {:label (upgrade/upgrades-names from)}
(ant/radio-group {:value (rum/react (citrus/subscription r [:upgrade :config from]))
:onChange #(citrus/dispatch! r :upgrade :set-config from (-> % .-target .-value))}
(for [option (upgrade/upgrades-by-key from)]
(ant/radio {:key option :value option} (upgrade/upgrades-names option))))))
(BlueprintOutput r :upgrade))))))
; Landfill
(rum/defc ContentLandfill <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Add landfill as tiles under a blueprint"]
[:p "Put landfill under a blueprint"]
(ant/alert {:message "Please note, that the modified blueprint can not be placed in one go in Factorio right now. If there are entities on water, they can not be placed. Force-place (shift) the blueprint to build the landfill and all placeable entities first, and once the landfill is in, place the blueprint again."
:showIcon true
:type "warning"})
(ant/form
(BlueprintInput r :landfill))
(when (rum/react (citrus/subscription r [:landfill :input :blueprint]))
(ant/form
(ant/form-item {:label "Filling mode"}
(ant/radio-group {:class "input-landfill-fill-mode"
:value (rum/react (citrus/subscription r [:landfill :config :fill-mode]))
:onChange #(citrus/dispatch! r :landfill :set-config :fill-mode (-> % .-target .-value keyword))}
(radio-options [[:full "Full" "(complete area/bounding box of blueprint)"]
[:sparse "Sparse" "(only under entities; keeps gap for pumps)"]])))
(ant/form-item {:label "Existing tiles"}
(ant/radio-group {:class "input-landfill-tile-mode"
:value (rum/react (citrus/subscription r [:landfill :config :tile-mode]))
:onChange #(citrus/dispatch! r :landfill :set-config :tile-mode (-> % .-target .-value keyword))}
(radio-options [[:remove "Remove" "(all tiles are removed)"]
[:replace "Replace" "(tiles are removed, but landfill is also added where tiles where honouring the filling mode)"]
[:to-book "Blueprint book" "(separate blueprint for landfill and original as book)"]])))
(BlueprintOutput r :landfill)))))
; Split
(rum/defc ContentSplit <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Splits a blueprint into multiple tiles"]
[:p "Split a large blueprint into tiles to make it easier to place in game"]
(ant/form
(BlueprintInput r :split))
(when (rum/react (citrus/subscription r [:split :input :blueprint]))
[:div
(ant/form
(ant/form-item {:label "Size of one tile"}
(ant/input-number {:class "input-split-tile-size"
:value (rum/react (citrus/subscription r [:split :config :tile-size]))
:onChange #(citrus/dispatch! r :split :set-config :tile-size %)
:min 32}))
(BlueprintOutput r :split))])))
; Buffer
(rum/defc ContentBuffer <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Create buffer chests"]
[:p "Turn a blueprint into a blueprint for buffer chests requesting the initial blueprint"]
(ant/alert {:message "This is currently under development"
:showIcon true
:type "warning"})
(ant/form
(BlueprintInput r :buffer))
(when (rum/react (citrus/subscription r [:buffer :input :blueprint]))
[:div
(ant/form
(BlueprintOutput r :buffer))])))
; Debug
(defn pprint
[edn]
(with-out-str
(cljs.pprint/pprint edn)))
(rum/defc ContentDebug <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Show the content of a blueprint"]
(ant/form
(BlueprintInput r :debug)
(when (rum/react (citrus/subscription r [:debug :input :blueprint]))
(ant/form-item {:label "EDN"}
(ant/input-text-area {:style {:font-family "monospace"}
:auto-size true
:value (pprint (rum/react (citrus/subscription r [:debug :output])))}))))))
;;; Main
Navigation
(defonce navigations
[{:key "about" :icon "info-circle-o" :title "About" :component ContentAbout}
{:key "tile" :icon "appstore-o" :title "Tile" :component ContentTile}
{:key "split" :icon "scissor" :title "Split" :component ContentSplit}
{:key "mirror" :icon "swap" :title "Mirror" :component ContentMirror}
{:key "upgrade" :icon "tool" :title "Upgrade" :component ContentUpgrade}
{:key "landfill" :icon "table" :title "Landfill" :component ContentLandfill}
{:key "buffer" :icon "filter" :title "Buffer-Chest" :component ContentBuffer}
{:key "debug" :icon "bug" :title "Debug" :component ContentDebug}
{:key "settings" :icon "setting" :title "Settings" :component ContentSettings}])
(defn key-to-route
[key]
(str "#" key))
(defonce navigations-by-key
(into {}
(map (juxt (comp key-to-route :key) identity))
navigations))
(defonce default-navigation
(-> navigations first :key key-to-route))
(declare reconciler)
(defn route-to-key
[route]
(when-let [idx (some-> route (str/index-of "#"))]
(when-let [key (subs route idx)]
(when-let [_ (get navigations-by-key key)]
key))))
(def history
(pushy/pushy
#(citrus/dispatch! reconciler :navigation :goto %) ; not partial!
(fn [route]
(if-let [nav-key (route-to-key route)]
nav-key
default-navigation))))
(defn nav!
[nav-key]
(pushy/set-token! history nav-key))
(defmulti navigation identity)
(defmethod navigation :init []
{:state {:current default-navigation
:navigations navigations
:navigations-by-key navigations-by-key}})
(defmethod navigation :goto [_ [target] state]
{:state (assoc state :current target)})
;; Effect Handlers
(defn dispatch [r _ events]
(doseq [[ctrl & args] events]
(apply citrus/dispatch! (into [r ctrl] args))))
;; Reconciler
(defonce reconciler
(citrus/reconciler
{:state (atom {})
:controllers {:navigation navigation
:tile tile-controller/tile
:mirror mirror-controller/mirror
:upgrade upgrade-controller/upgrade
:landfill landfill-controller/landfill
:split split-controller/split
:buffer buffer-controller/buffer
:debug debug-controller/debug}
:effect-handlers {:dispatch dispatch}}))
;;; Main content
(defn- menu-item
[{:keys [key icon title]}]
(ant/menu-item {:key (key-to-route key) :class (str "menu-" key)} [:span (ant/icon {:type icon}) title]))
(rum/defc AppHeader < rum/static []
(ant/layout-header
{:style {:padding-left "16px"}}
[:h1
{:style {:color "white"}}
(ant/icon {:type "setting" :style {:padding-right "12px"}})
"Factorio Blueprint Tools"]))
(rum/defc AppFooter < rum/static []
(ant/layout-footer
{:style {:text-align "center"}}
[:span
"Copyright © 2021 Christoph Frick"
" — "
[:a {:href "-frick/factorio-blueprint-tools"} "Source code"]
" — "
[:a {:href "-frick/factorio-blueprint-tools/issues"} "Found an issue?"]]))
(rum/defc App < rum/reactive
[r]
(ant/layout {:style {:min-height "100vh"}}
(AppHeader)
(let [{:keys [current navigations navigations-by-key]} (rum/react (citrus/subscription r [:navigation]))]
(ant/layout
(ant/layout-sider
{:theme "light"}
(ant/menu {:theme "light"
:mode "inline"
:selectedKeys [current]
:onSelect #(nav! (.-key %))
:style {:min-height "calc(100vh-64px)"}}
(map menu-item navigations)))
(ant/layout
((:component (navigations-by-key current)) r)
(AppFooter))))))
(defonce init-ctrl
(citrus/broadcast-sync! reconciler :init))
(defn init!
[]
(rum/mount (App reconciler) (js/document.getElementById "app")))
(init!)
(pushy/start! history)
(defn on-js-reload [])
| null | https://raw.githubusercontent.com/christoph-frick/factorio-blueprint-tools/11543ddbcaf58d9960bb945a2bd60b1801c3858e/src/factorio_blueprint_tools/core.cljs | clojure | Components
Tools
About
Settings
Tile
Upgrade
Landfill
Split
Buffer
Debug
Main
not partial!
Effect Handlers
Reconciler
Main content | (ns factorio-blueprint-tools.core
(:require-macros [factorio-blueprint-tools.macros :as m])
(:require [factorio-blueprint-tools.controller.tile :as tile-controller]
[factorio-blueprint-tools.controller.mirror :as mirror-controller]
[factorio-blueprint-tools.upgrade :as upgrade]
[factorio-blueprint-tools.controller.upgrade :as upgrade-controller]
[factorio-blueprint-tools.controller.landfill :as landfill-controller]
[factorio-blueprint-tools.controller.split :as split-controller]
[factorio-blueprint-tools.controller.buffer :as buffer-controller]
[factorio-blueprint-tools.controller.debug :as debug-controller]
[factorio-blueprint-tools.preview :as preview]
[clojure.string :as str]
[cljs.pprint]
[antizer.rum :as ant]
[rum.core :as rum]
[citrus.core :as citrus]
[pushy.core :as pushy]))
(enable-console-print!)
(def ta-no-spellcheck
{:autoComplete "off"
:autoCorrect "off"
:autoCapitalize "off"
:spellCheck "false"})
(defn alert-error
[error-message]
(ant/alert {:message error-message
:showIcon true
:type "error"}))
(defn radio-options
[options]
(for [[option label help] options]
(ant/radio
{:key option
:value option}
[:span label
" "
[:span {:class "ant-form-explain"} help]])))
(rum/defc BlueprintPreview < rum/static
[blueprint]
[:span
{:style {:padding-left "24px"}
:dangerouslySetInnerHTML {:__html (preview/preview blueprint)}}])
(rum/defc BlueprintInput <
rum/reactive
[r controller]
(ant/form-item {:label "Blueprint string"
:help "Copy a blueprint string from Factorio and paste it in this field"}
[:div
(ant/input-text-area (assoc ta-no-spellcheck
:class "input-blueprint"
:allow-clear true
:style {:height "10em" :width "calc(100% - 10em - 24px)"}
:value (rum/react (citrus/subscription r [controller :input :encoded]))
:onChange #(citrus/dispatch! r controller :set-blueprint (-> % .-target .-value))
:onFocus #(.select (.-target %))))
(when-let [blueprint (rum/react (citrus/subscription r [controller :input :blueprint]))]
(BlueprintPreview blueprint))
(when-let [error (rum/react (citrus/subscription r [controller :input :error]))]
(alert-error (str "Could not load blueprint. Please make sure to copy and paste the whole string from Factorio. (Error: " error ")")))]))
(rum/defc BlueprintOutput <
rum/reactive
[r controller]
(ant/form-item {:label "Result"
:help "Copy this blueprint string and import in from the blueprint library in Factorio"}
[:div
(ant/input-text-area (assoc ta-no-spellcheck
:class "input-result-blueprint"
:style {:height "10em" :width "calc(100% - 10em - 24px)"}
:value (rum/react (citrus/subscription r [controller :output :encoded]))
:onFocus #(.select (.-target %))))
(when-let [blueprint (rum/react (citrus/subscription r [controller :output :blueprint]))]
(when (:blueprint blueprint)
(BlueprintPreview blueprint)))]))
(rum/defc ContentAbout < rum/static
[]
(ant/layout-content
{:class "content"}
[:div {:dangerouslySetInnerHTML {:__html (m/load-markdown "docs.md")}}]
[:div
[:h2 "Reporting Bugs"]
"In case you find a bug or wish for a feature, feel free to " [:a {:href "-frick/factorio-blueprint-tools/issues"} "create an issue"] "."
" "
"It is super helpful to include how to reproduce the bug e.g. by providing a blueprint string."]
[:div {:dangerouslySetInnerHTML {:__html (m/load-markdown "changelog.md")}}]))
(rum/defc ContentSettings < rum/static
[]
(ant/layout-content
{:class "content"}
[:h2 "Settings"]
(ant/alert {:message "Currently there is no way to change or add mods etc. for the sizes occupied by the entities."
:showIcon true
:type "warning"})
(ant/form
(ant/form-item {:label "Factorio entities"}
(ant/select {:value "vanilla-1.0"}
(ant/select-option {:key "vanilla-1.0"} "Vanilla 1.1"))))))
(rum/defc ContentTile <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Tile a blueprint"]
[:p "Arrange copies of the blueprint in a grid. E.g. take a six electric miner blueprint and tile 15x15 to cover even the biggest resource fields" ]
(ant/form
(BlueprintInput r :tile))
(when (rum/react (citrus/subscription r [:tile :input :blueprint]))
(ant/form
(ant/form-item {:label "Tiles on X axis"}
(ant/input-number {:class "input-tile-x"
:value (rum/react (citrus/subscription r [:tile :config :tile-x]))
:onChange #(citrus/dispatch! r :tile :set-config :tile-x %)
:min 1}))
(ant/form-item {:label "Tiles on Y axis"}
(ant/input-number {:class "input-tile-y"
:value (rum/react (citrus/subscription r [:tile :config :tile-y]))
:onChange #(citrus/dispatch! r :tile :set-config :tile-y %)
:min 1}))
(BlueprintOutput r :tile)))))
Mirror
(rum/defc ContentMirror <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Mirror a blueprint"]
[:p "Mirror the blueprint either vertically or horizontally"]
(ant/form
(BlueprintInput r :mirror))
(when (rum/react (citrus/subscription r [:mirror :input :blueprint]))
(ant/form
(ant/form-item {:label "Direction"}
(ant/radio-group {:class "input-mirror-direction"
:value (rum/react (citrus/subscription r [:mirror :config :direction]))
:onChange #(citrus/dispatch! r :mirror :set-config :direction (-> % .-target .-value keyword))}
(for [[option label] [[:vertically "Vertically"] [:horizontally "Horizontally"]]]
(ant/radio {:key option :value option} label))))
(BlueprintOutput r :mirror)))))
(rum/defc ContentUpgrade <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Upgrade (or downgrade) a blueprint"]
[:p "Decide what common upgradeable entities (e.g. inserters) to upgrade. Also supports downgrading (e.g. you have a great blueprint but not the tech yet)"]
(ant/form
(BlueprintInput r :upgrade))
(when-let [blueprint (rum/react (citrus/subscription r [:upgrade :input :blueprint]))]
(let [upgradable (upgrade/upgradeable-from-blueprint blueprint)
order (filter upgradable upgrade/upgrades-order)]
(ant/form
(for [from order]
(ant/form-item {:label (upgrade/upgrades-names from)}
(ant/radio-group {:value (rum/react (citrus/subscription r [:upgrade :config from]))
:onChange #(citrus/dispatch! r :upgrade :set-config from (-> % .-target .-value))}
(for [option (upgrade/upgrades-by-key from)]
(ant/radio {:key option :value option} (upgrade/upgrades-names option))))))
(BlueprintOutput r :upgrade))))))
(rum/defc ContentLandfill <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Add landfill as tiles under a blueprint"]
[:p "Put landfill under a blueprint"]
(ant/alert {:message "Please note, that the modified blueprint can not be placed in one go in Factorio right now. If there are entities on water, they can not be placed. Force-place (shift) the blueprint to build the landfill and all placeable entities first, and once the landfill is in, place the blueprint again."
:showIcon true
:type "warning"})
(ant/form
(BlueprintInput r :landfill))
(when (rum/react (citrus/subscription r [:landfill :input :blueprint]))
(ant/form
(ant/form-item {:label "Filling mode"}
(ant/radio-group {:class "input-landfill-fill-mode"
:value (rum/react (citrus/subscription r [:landfill :config :fill-mode]))
:onChange #(citrus/dispatch! r :landfill :set-config :fill-mode (-> % .-target .-value keyword))}
(radio-options [[:full "Full" "(complete area/bounding box of blueprint)"]
[:sparse "Sparse" "(only under entities; keeps gap for pumps)"]])))
(ant/form-item {:label "Existing tiles"}
(ant/radio-group {:class "input-landfill-tile-mode"
:value (rum/react (citrus/subscription r [:landfill :config :tile-mode]))
:onChange #(citrus/dispatch! r :landfill :set-config :tile-mode (-> % .-target .-value keyword))}
(radio-options [[:remove "Remove" "(all tiles are removed)"]
[:replace "Replace" "(tiles are removed, but landfill is also added where tiles where honouring the filling mode)"]
[:to-book "Blueprint book" "(separate blueprint for landfill and original as book)"]])))
(BlueprintOutput r :landfill)))))
(rum/defc ContentSplit <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Splits a blueprint into multiple tiles"]
[:p "Split a large blueprint into tiles to make it easier to place in game"]
(ant/form
(BlueprintInput r :split))
(when (rum/react (citrus/subscription r [:split :input :blueprint]))
[:div
(ant/form
(ant/form-item {:label "Size of one tile"}
(ant/input-number {:class "input-split-tile-size"
:value (rum/react (citrus/subscription r [:split :config :tile-size]))
:onChange #(citrus/dispatch! r :split :set-config :tile-size %)
:min 32}))
(BlueprintOutput r :split))])))
(rum/defc ContentBuffer <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Create buffer chests"]
[:p "Turn a blueprint into a blueprint for buffer chests requesting the initial blueprint"]
(ant/alert {:message "This is currently under development"
:showIcon true
:type "warning"})
(ant/form
(BlueprintInput r :buffer))
(when (rum/react (citrus/subscription r [:buffer :input :blueprint]))
[:div
(ant/form
(BlueprintOutput r :buffer))])))
(defn pprint
[edn]
(with-out-str
(cljs.pprint/pprint edn)))
(rum/defc ContentDebug <
rum/reactive
[r]
(ant/layout-content
{:class "content"}
[:h2 "Show the content of a blueprint"]
(ant/form
(BlueprintInput r :debug)
(when (rum/react (citrus/subscription r [:debug :input :blueprint]))
(ant/form-item {:label "EDN"}
(ant/input-text-area {:style {:font-family "monospace"}
:auto-size true
:value (pprint (rum/react (citrus/subscription r [:debug :output])))}))))))
Navigation
(defonce navigations
[{:key "about" :icon "info-circle-o" :title "About" :component ContentAbout}
{:key "tile" :icon "appstore-o" :title "Tile" :component ContentTile}
{:key "split" :icon "scissor" :title "Split" :component ContentSplit}
{:key "mirror" :icon "swap" :title "Mirror" :component ContentMirror}
{:key "upgrade" :icon "tool" :title "Upgrade" :component ContentUpgrade}
{:key "landfill" :icon "table" :title "Landfill" :component ContentLandfill}
{:key "buffer" :icon "filter" :title "Buffer-Chest" :component ContentBuffer}
{:key "debug" :icon "bug" :title "Debug" :component ContentDebug}
{:key "settings" :icon "setting" :title "Settings" :component ContentSettings}])
(defn key-to-route
[key]
(str "#" key))
(defonce navigations-by-key
(into {}
(map (juxt (comp key-to-route :key) identity))
navigations))
(defonce default-navigation
(-> navigations first :key key-to-route))
(declare reconciler)
(defn route-to-key
[route]
(when-let [idx (some-> route (str/index-of "#"))]
(when-let [key (subs route idx)]
(when-let [_ (get navigations-by-key key)]
key))))
(def history
(pushy/pushy
(fn [route]
(if-let [nav-key (route-to-key route)]
nav-key
default-navigation))))
(defn nav!
[nav-key]
(pushy/set-token! history nav-key))
(defmulti navigation identity)
(defmethod navigation :init []
{:state {:current default-navigation
:navigations navigations
:navigations-by-key navigations-by-key}})
(defmethod navigation :goto [_ [target] state]
{:state (assoc state :current target)})
(defn dispatch [r _ events]
(doseq [[ctrl & args] events]
(apply citrus/dispatch! (into [r ctrl] args))))
(defonce reconciler
(citrus/reconciler
{:state (atom {})
:controllers {:navigation navigation
:tile tile-controller/tile
:mirror mirror-controller/mirror
:upgrade upgrade-controller/upgrade
:landfill landfill-controller/landfill
:split split-controller/split
:buffer buffer-controller/buffer
:debug debug-controller/debug}
:effect-handlers {:dispatch dispatch}}))
(defn- menu-item
[{:keys [key icon title]}]
(ant/menu-item {:key (key-to-route key) :class (str "menu-" key)} [:span (ant/icon {:type icon}) title]))
(rum/defc AppHeader < rum/static []
(ant/layout-header
{:style {:padding-left "16px"}}
[:h1
{:style {:color "white"}}
(ant/icon {:type "setting" :style {:padding-right "12px"}})
"Factorio Blueprint Tools"]))
(rum/defc AppFooter < rum/static []
(ant/layout-footer
{:style {:text-align "center"}}
[:span
"Copyright © 2021 Christoph Frick"
" — "
[:a {:href "-frick/factorio-blueprint-tools"} "Source code"]
" — "
[:a {:href "-frick/factorio-blueprint-tools/issues"} "Found an issue?"]]))
(rum/defc App < rum/reactive
[r]
(ant/layout {:style {:min-height "100vh"}}
(AppHeader)
(let [{:keys [current navigations navigations-by-key]} (rum/react (citrus/subscription r [:navigation]))]
(ant/layout
(ant/layout-sider
{:theme "light"}
(ant/menu {:theme "light"
:mode "inline"
:selectedKeys [current]
:onSelect #(nav! (.-key %))
:style {:min-height "calc(100vh-64px)"}}
(map menu-item navigations)))
(ant/layout
((:component (navigations-by-key current)) r)
(AppFooter))))))
(defonce init-ctrl
(citrus/broadcast-sync! reconciler :init))
(defn init!
[]
(rum/mount (App reconciler) (js/document.getElementById "app")))
(init!)
(pushy/start! history)
(defn on-js-reload [])
|
d90a7a51101a17f675f24179eb18b1fd76a7751d1cebc3bcd0574631dc9559cd | CardanoSolutions/kupo | PolicyId.hs | module Kupo.Data.Cardano.PolicyId where
import Kupo.Prelude
import qualified Cardano.Ledger.Mary.Value as Ledger
import Kupo.Data.Cardano.ScriptHash
( scriptHashFromBytes
, scriptHashFromText
, scriptHashToBytes
, scriptHashToText
)
type PolicyId = Ledger.PolicyID StandardCrypto
policyIdToBytes :: PolicyId -> ByteString
policyIdToBytes (Ledger.PolicyID h) =
scriptHashToBytes h
# INLINABLE policyIdToBytes #
unsafePolicyIdFromBytes :: ByteString -> PolicyId
unsafePolicyIdFromBytes =
maybe (error "unsafePolicyIdFromBytes") Ledger.PolicyID . scriptHashFromBytes
# INLINABLE unsafePolicyIdFromBytes #
policyIdFromText :: Text -> Maybe PolicyId
policyIdFromText =
fmap Ledger.PolicyID . scriptHashFromText
# INLINABLE policyIdFromText #
policyIdToText :: PolicyId -> Text
policyIdToText =
scriptHashToText . Ledger.policyID
# INLINABLE policyIdToText #
| null | https://raw.githubusercontent.com/CardanoSolutions/kupo/4904123abeed53f672eb34e0ef10c6c710bda61b/src/Kupo/Data/Cardano/PolicyId.hs | haskell | module Kupo.Data.Cardano.PolicyId where
import Kupo.Prelude
import qualified Cardano.Ledger.Mary.Value as Ledger
import Kupo.Data.Cardano.ScriptHash
( scriptHashFromBytes
, scriptHashFromText
, scriptHashToBytes
, scriptHashToText
)
type PolicyId = Ledger.PolicyID StandardCrypto
policyIdToBytes :: PolicyId -> ByteString
policyIdToBytes (Ledger.PolicyID h) =
scriptHashToBytes h
# INLINABLE policyIdToBytes #
unsafePolicyIdFromBytes :: ByteString -> PolicyId
unsafePolicyIdFromBytes =
maybe (error "unsafePolicyIdFromBytes") Ledger.PolicyID . scriptHashFromBytes
# INLINABLE unsafePolicyIdFromBytes #
policyIdFromText :: Text -> Maybe PolicyId
policyIdFromText =
fmap Ledger.PolicyID . scriptHashFromText
# INLINABLE policyIdFromText #
policyIdToText :: PolicyId -> Text
policyIdToText =
scriptHashToText . Ledger.policyID
# INLINABLE policyIdToText #
| |
2fe04a012f0b5589a6a72e6ec442761a62e0836777c2b08ec70940949cb05613 | input-output-hk/marlowe-cardano | Serialization.hs | -----------------------------------------------------------------------------
--
-- Module : $Headers
License : Apache 2.0
--
-- Stability : Experimental
Portability : Portable
--
| Test of 's Cardano JSON implementation against the reference implementation .
--
-----------------------------------------------------------------------------
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
module Spec.Marlowe.Service.Serialization
( -- * Types
SerializationResponse(..)
, knownJsonTypes
-- * Testing
, roundtripSerialization
) where
import Control.Applicative ((<|>))
import Data.Aeson (FromJSON(..), ToJSON(..))
import Data.Jsonable (JsonableType(JsonableType), KnownJsonable, isKnownJson, roundTripJsonable)
import Data.Proxy (Proxy(..))
import Spec.Marlowe.Semantics.Arbitrary ()
import qualified Data.Aeson as A (Value, object, withObject, (.:), (.=))
import qualified Language.Marlowe.Core.V1.Semantics as Marlowe
import qualified Language.Marlowe.Core.V1.Semantics.Types as Marlowe
-- | Response to a round-trip serialization request.
data SerializationResponse =
-- | Success.
SerializationSuccess
{
valueReserialized :: A.Value -- ^ The reserialized value.
}
-- | The type was not known.
| UnknownType
{
unknownType :: String -- ^ The type.
}
-- | The deserialization or serialization failed.
| SerializationError
{
serializationError :: String -- ^ The error message.
}
deriving (Eq, Ord, Read, Show)
instance ToJSON SerializationResponse where
toJSON SerializationSuccess{..} = A.object . pure $ "serialization-success" A..= valueReserialized
toJSON UnknownType{..} = A.object . pure $ "unknown-type" A..= unknownType
toJSON SerializationError{..} = A.object . pure $ "serialization-error" A..= serializationError
instance FromJSON SerializationResponse where
parseJSON =
A.withObject "SerializationResponse"
$ \o ->
(SerializationSuccess <$> o A..: "serialization-success")
<|> (UnknownType <$> o A..: "unknown-type")
<|> (SerializationError <$> o A..: "serialization-error")
| and then serialize a value .
roundtripSerialization
:: String -- ^ The key to the type.
-> A.Value -- ^ The value.
-> SerializationResponse -- ^ The result.
roundtripSerialization typeSerialized valueSerialized =
if isKnownJson knownJsonTypes typeSerialized
then case roundTripJsonable knownJsonTypes typeSerialized valueSerialized of
Right valueReserialized -> SerializationSuccess{..}
Left serializationError -> SerializationError{..}
else UnknownType typeSerialized
-- | List of known types that can be serialized and deserialized as JSON.
knownJsonTypes :: KnownJsonable
knownJsonTypes =
[
JsonableType "Core.Action" (Proxy :: Proxy Marlowe.Action)
, JsonableType "Core.Bound" (Proxy :: Proxy Marlowe.Bound)
, JsonableType "Core.Case" (Proxy :: Proxy (Marlowe.Case Marlowe.Contract))
, JsonableType "Core.ChoiceId" (Proxy :: Proxy Marlowe.ChoiceId)
, JsonableType "Core.Contract" (Proxy :: Proxy Marlowe.Contract)
, JsonableType "Core.Token" (Proxy :: Proxy Marlowe.Token)
, JsonableType "Core.Payee" (Proxy :: Proxy Marlowe.Payee)
, JsonableType "Core.Input" (Proxy :: Proxy Marlowe.Input)
, JsonableType "Core.Observation" (Proxy :: Proxy Marlowe.Observation)
, JsonableType "Core.Value" (Proxy :: Proxy (Marlowe.Value Marlowe.Observation))
, JsonableType "Core.Party" (Proxy :: Proxy Marlowe.Party)
, JsonableType "Core.State" (Proxy :: Proxy Marlowe.State)
, JsonableType "Core.Payment" (Proxy :: Proxy Marlowe.Payment)
, JsonableType "Core.Transaction" (Proxy :: Proxy Marlowe.TransactionInput)
, JsonableType "Core.TransactionOutput" (Proxy :: Proxy Marlowe.TransactionOutput)
, JsonableType "Core.TransactionWarning" (Proxy :: Proxy Marlowe.TransactionWarning)
, JsonableType "Core.TransactionError" (Proxy :: Proxy Marlowe.TransactionError)
, JsonableType "Core.IntervalError" (Proxy :: Proxy Marlowe.IntervalError)
]
| null | https://raw.githubusercontent.com/input-output-hk/marlowe-cardano/27f83c21c76d05cb1f381022d596a5782a4655e1/marlowe-test/src/Spec/Marlowe/Service/Serialization.hs | haskell | ---------------------------------------------------------------------------
Module : $Headers
Stability : Experimental
---------------------------------------------------------------------------
# LANGUAGE OverloadedStrings #
* Types
* Testing
| Response to a round-trip serialization request.
| Success.
^ The reserialized value.
| The type was not known.
^ The type.
| The deserialization or serialization failed.
^ The error message.
^ The key to the type.
^ The value.
^ The result.
| List of known types that can be serialized and deserialized as JSON. | License : Apache 2.0
Portability : Portable
| Test of 's Cardano JSON implementation against the reference implementation .
# LANGUAGE RecordWildCards #
module Spec.Marlowe.Service.Serialization
SerializationResponse(..)
, knownJsonTypes
, roundtripSerialization
) where
import Control.Applicative ((<|>))
import Data.Aeson (FromJSON(..), ToJSON(..))
import Data.Jsonable (JsonableType(JsonableType), KnownJsonable, isKnownJson, roundTripJsonable)
import Data.Proxy (Proxy(..))
import Spec.Marlowe.Semantics.Arbitrary ()
import qualified Data.Aeson as A (Value, object, withObject, (.:), (.=))
import qualified Language.Marlowe.Core.V1.Semantics as Marlowe
import qualified Language.Marlowe.Core.V1.Semantics.Types as Marlowe
data SerializationResponse =
SerializationSuccess
{
}
| UnknownType
{
}
| SerializationError
{
}
deriving (Eq, Ord, Read, Show)
instance ToJSON SerializationResponse where
toJSON SerializationSuccess{..} = A.object . pure $ "serialization-success" A..= valueReserialized
toJSON UnknownType{..} = A.object . pure $ "unknown-type" A..= unknownType
toJSON SerializationError{..} = A.object . pure $ "serialization-error" A..= serializationError
instance FromJSON SerializationResponse where
parseJSON =
A.withObject "SerializationResponse"
$ \o ->
(SerializationSuccess <$> o A..: "serialization-success")
<|> (UnknownType <$> o A..: "unknown-type")
<|> (SerializationError <$> o A..: "serialization-error")
| and then serialize a value .
roundtripSerialization
roundtripSerialization typeSerialized valueSerialized =
if isKnownJson knownJsonTypes typeSerialized
then case roundTripJsonable knownJsonTypes typeSerialized valueSerialized of
Right valueReserialized -> SerializationSuccess{..}
Left serializationError -> SerializationError{..}
else UnknownType typeSerialized
knownJsonTypes :: KnownJsonable
knownJsonTypes =
[
JsonableType "Core.Action" (Proxy :: Proxy Marlowe.Action)
, JsonableType "Core.Bound" (Proxy :: Proxy Marlowe.Bound)
, JsonableType "Core.Case" (Proxy :: Proxy (Marlowe.Case Marlowe.Contract))
, JsonableType "Core.ChoiceId" (Proxy :: Proxy Marlowe.ChoiceId)
, JsonableType "Core.Contract" (Proxy :: Proxy Marlowe.Contract)
, JsonableType "Core.Token" (Proxy :: Proxy Marlowe.Token)
, JsonableType "Core.Payee" (Proxy :: Proxy Marlowe.Payee)
, JsonableType "Core.Input" (Proxy :: Proxy Marlowe.Input)
, JsonableType "Core.Observation" (Proxy :: Proxy Marlowe.Observation)
, JsonableType "Core.Value" (Proxy :: Proxy (Marlowe.Value Marlowe.Observation))
, JsonableType "Core.Party" (Proxy :: Proxy Marlowe.Party)
, JsonableType "Core.State" (Proxy :: Proxy Marlowe.State)
, JsonableType "Core.Payment" (Proxy :: Proxy Marlowe.Payment)
, JsonableType "Core.Transaction" (Proxy :: Proxy Marlowe.TransactionInput)
, JsonableType "Core.TransactionOutput" (Proxy :: Proxy Marlowe.TransactionOutput)
, JsonableType "Core.TransactionWarning" (Proxy :: Proxy Marlowe.TransactionWarning)
, JsonableType "Core.TransactionError" (Proxy :: Proxy Marlowe.TransactionError)
, JsonableType "Core.IntervalError" (Proxy :: Proxy Marlowe.IntervalError)
]
|
84b9dc335e501ccfbd23e70fff35eae374dc61ca98785326cd2e67aa6189023f | xmonad/xmonad-contrib | ClickableWorkspaces.hs | -------------------------------------------------------------------------------
-- |
-- Module : XMonad.Util.ClickableWorkspaces
Description : Make workspace tags clickable in XMobar ( for switching focus ) .
Copyright : ( c ) < >
-- License : BSD3-style (see LICENSE)
--
Maintainer : deRosenroll < >
-- Stability : unstable
-- Portability : unportable
--
Provides @clickablePP@ , which when applied to the ' PP ' pretty - printer used
by " XMonad . Hooks . StatusBar " will make the workspace tags clickable in
XMobar ( for switching focus ) .
--
-----------------------------------------------------------------------------
module XMonad.Util.ClickableWorkspaces (
-- * Usage
-- $usage
clickablePP,
clickableWrap,
) where
import XMonad.Prelude ((<&>), (>=>))
import XMonad
import XMonad.Hooks.StatusBar.PP (xmobarAction, PP(..))
import XMonad.Util.WorkspaceCompare (getSortByIndex)
import qualified XMonad.StackSet as W
import Data.List (elemIndex)
-- $usage
If you 're using the " XMonad . Hooks . StatusBar " interface , apply ' clickablePP '
-- to the 'PP' passed to 'XMonad.Hooks.StatusBar.statusBarProp':
--
-- > mySB <- statusBarProp "xmobar" (clickablePP xmobarPP)
--
-- Or if you're using the old "XMonad.Hooks.DynamicLog" interface:
--
-- > logHook = clickablePP xmobarPP { ... } >>= dynamicLogWithPP
--
-- Requirements:
--
-- * @xdotool@ on system (in path)
-- * "XMonad.Hooks.EwmhDesktops" for @xdotool@ support (see Hackage docs for setup)
* use of UnsafeStdinReader\/UnsafeXMonadLog in xmobarrc ( rather than StdinReader\/XMonadLog )
--
-- Note that UnsafeStdinReader is potentially dangerous if your workspace
-- names are dynamically generated from untrusted input (like window titles).
You may need to add to ' ppRename ' before applying
-- 'clickablePP' in such case.
-- | Wrap string with an xmobar action that uses @xdotool@ to switch to
workspace
clickableWrap :: Int -> String -> String
clickableWrap i = xmobarAction ("xdotool set_desktop " ++ show i) "1"
-- | 'XMonad.Util.WorkspaceCompare.getWsIndex' extended to handle workspaces
-- not in the static 'workspaces' config, such as those created by
-- "XMonad.Action.DynamicWorkspaces".
--
-- Uses 'getSortByIndex', as that's what "XMonad.Hooks.EwmhDesktops" uses to
export the information to tools like @xdotool@. ( Note that EwmhDesktops can
-- be configured with a custom sort function, and we don't handle that here
-- yet.)
getWsIndex :: X (WorkspaceId -> Maybe Int)
getWsIndex = do
wSort <- getSortByIndex
spaces <- gets (map W.tag . wSort . W.workspaces . windowset)
return $ flip elemIndex spaces
-- | Return a function that wraps workspace names in an xmobar action that
-- switches to that workspace.
--
-- This assumes that 'XMonad.Hooks.EwmhDesktops.ewmhDesktopsEventHook'
-- isn't configured to change the workspace order. We might need to add an
-- additional parameter if anyone needs that.
getClickable :: X (String -> WindowSpace -> String)
getClickable = getWsIndex <&> \idx s w -> maybe id clickableWrap (idx (W.tag w)) s
| Apply clickable wrapping to the given PP .
clickablePP :: PP -> X PP
clickablePP pp = getClickable <&> \ren -> pp{ ppRename = ppRename pp >=> ren }
| null | https://raw.githubusercontent.com/xmonad/xmonad-contrib/3058d1ca22d565b2fa93227fdde44d8626d6f75d/XMonad/Util/ClickableWorkspaces.hs | haskell | -----------------------------------------------------------------------------
|
Module : XMonad.Util.ClickableWorkspaces
License : BSD3-style (see LICENSE)
Stability : unstable
Portability : unportable
---------------------------------------------------------------------------
* Usage
$usage
$usage
to the 'PP' passed to 'XMonad.Hooks.StatusBar.statusBarProp':
> mySB <- statusBarProp "xmobar" (clickablePP xmobarPP)
Or if you're using the old "XMonad.Hooks.DynamicLog" interface:
> logHook = clickablePP xmobarPP { ... } >>= dynamicLogWithPP
Requirements:
* @xdotool@ on system (in path)
* "XMonad.Hooks.EwmhDesktops" for @xdotool@ support (see Hackage docs for setup)
Note that UnsafeStdinReader is potentially dangerous if your workspace
names are dynamically generated from untrusted input (like window titles).
'clickablePP' in such case.
| Wrap string with an xmobar action that uses @xdotool@ to switch to
| 'XMonad.Util.WorkspaceCompare.getWsIndex' extended to handle workspaces
not in the static 'workspaces' config, such as those created by
"XMonad.Action.DynamicWorkspaces".
Uses 'getSortByIndex', as that's what "XMonad.Hooks.EwmhDesktops" uses to
be configured with a custom sort function, and we don't handle that here
yet.)
| Return a function that wraps workspace names in an xmobar action that
switches to that workspace.
This assumes that 'XMonad.Hooks.EwmhDesktops.ewmhDesktopsEventHook'
isn't configured to change the workspace order. We might need to add an
additional parameter if anyone needs that. | Description : Make workspace tags clickable in XMobar ( for switching focus ) .
Copyright : ( c ) < >
Maintainer : deRosenroll < >
Provides @clickablePP@ , which when applied to the ' PP ' pretty - printer used
by " XMonad . Hooks . StatusBar " will make the workspace tags clickable in
XMobar ( for switching focus ) .
module XMonad.Util.ClickableWorkspaces (
clickablePP,
clickableWrap,
) where
import XMonad.Prelude ((<&>), (>=>))
import XMonad
import XMonad.Hooks.StatusBar.PP (xmobarAction, PP(..))
import XMonad.Util.WorkspaceCompare (getSortByIndex)
import qualified XMonad.StackSet as W
import Data.List (elemIndex)
If you 're using the " XMonad . Hooks . StatusBar " interface , apply ' clickablePP '
* use of UnsafeStdinReader\/UnsafeXMonadLog in xmobarrc ( rather than StdinReader\/XMonadLog )
You may need to add to ' ppRename ' before applying
workspace
clickableWrap :: Int -> String -> String
clickableWrap i = xmobarAction ("xdotool set_desktop " ++ show i) "1"
export the information to tools like @xdotool@. ( Note that EwmhDesktops can
getWsIndex :: X (WorkspaceId -> Maybe Int)
getWsIndex = do
wSort <- getSortByIndex
spaces <- gets (map W.tag . wSort . W.workspaces . windowset)
return $ flip elemIndex spaces
getClickable :: X (String -> WindowSpace -> String)
getClickable = getWsIndex <&> \idx s w -> maybe id clickableWrap (idx (W.tag w)) s
| Apply clickable wrapping to the given PP .
clickablePP :: PP -> X PP
clickablePP pp = getClickable <&> \ren -> pp{ ppRename = ppRename pp >=> ren }
|
cb8690b7ba73fbcab3b454879e1155674982a47ae18474c4ee6b57b2fbb8972f | xu-hao/QueryArrow | ElasticSearchUtils.hs | module QueryArrow.HTTP.ElasticSearchUtils where
-- -a-rest-client-in-haskell/swizec/6152
import Network.HTTP.Conduit hiding (host, port)
import Control.Monad.IO.Class
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as B8
import Data.Aeson
get::(MonadIO m) => String -> m BL.ByteString
get url = simpleHttp url
-way-of-sending-http-post-in-haskell-using-http-conduit
buildPostRequest :: String -> RequestBody -> IO Request
buildPostRequest url body = do
nakedRequest <- parseUrl url
return (nakedRequest { method = B8.pack "POST", requestBody = body })
post :: String -> RequestBody -> IO BL.ByteString
post url s = do
manager <- newManager tlsManagerSettings
request <- buildPostRequest url s
response <- httpLbs request manager
return (responseBody response)
postJSON :: ToJSON a => String -> a -> IO BL.ByteString
postJSON url rec =
post url (RequestBodyLBS (encode rec))
buildPutRequest :: String -> RequestBody -> IO Request
buildPutRequest url body = do
nakedRequest <- parseUrl url
return (nakedRequest { method = B8.pack "PUT", requestBody = body })
put :: String -> RequestBody -> IO BL.ByteString
put url s = do
manager <- newManager tlsManagerSettings
request <- buildPutRequest url s
response <- httpLbs request manager
return (responseBody response)
putJSON :: ToJSON a => String -> a -> IO BL.ByteString
putJSON url rec =
put url (RequestBodyLBS (encode rec))
buildDeleteRequest :: String -> String -> IO Request
buildDeleteRequest url esid = do
nakedRequest <- parseUrl (url ++ "/" ++ esid)
return (nakedRequest { method = B8.pack "DELETE" })
delete :: String -> String -> IO BL.ByteString
delete url esid = do
manager <- newManager tlsManagerSettings
request <- buildDeleteRequest url esid
response <- httpLbs request manager
return (responseBody response)
| null | https://raw.githubusercontent.com/xu-hao/QueryArrow/4dd5b8a22c8ed2d24818de5b8bcaa9abc456ef0d/QueryArrow-db-elastic/src/QueryArrow/HTTP/ElasticSearchUtils.hs | haskell | -a-rest-client-in-haskell/swizec/6152 | module QueryArrow.HTTP.ElasticSearchUtils where
import Network.HTTP.Conduit hiding (host, port)
import Control.Monad.IO.Class
import qualified Data.ByteString.Lazy as BL
import qualified Data.ByteString.Char8 as B8
import Data.Aeson
get::(MonadIO m) => String -> m BL.ByteString
get url = simpleHttp url
-way-of-sending-http-post-in-haskell-using-http-conduit
buildPostRequest :: String -> RequestBody -> IO Request
buildPostRequest url body = do
nakedRequest <- parseUrl url
return (nakedRequest { method = B8.pack "POST", requestBody = body })
post :: String -> RequestBody -> IO BL.ByteString
post url s = do
manager <- newManager tlsManagerSettings
request <- buildPostRequest url s
response <- httpLbs request manager
return (responseBody response)
postJSON :: ToJSON a => String -> a -> IO BL.ByteString
postJSON url rec =
post url (RequestBodyLBS (encode rec))
buildPutRequest :: String -> RequestBody -> IO Request
buildPutRequest url body = do
nakedRequest <- parseUrl url
return (nakedRequest { method = B8.pack "PUT", requestBody = body })
put :: String -> RequestBody -> IO BL.ByteString
put url s = do
manager <- newManager tlsManagerSettings
request <- buildPutRequest url s
response <- httpLbs request manager
return (responseBody response)
putJSON :: ToJSON a => String -> a -> IO BL.ByteString
putJSON url rec =
put url (RequestBodyLBS (encode rec))
buildDeleteRequest :: String -> String -> IO Request
buildDeleteRequest url esid = do
nakedRequest <- parseUrl (url ++ "/" ++ esid)
return (nakedRequest { method = B8.pack "DELETE" })
delete :: String -> String -> IO BL.ByteString
delete url esid = do
manager <- newManager tlsManagerSettings
request <- buildDeleteRequest url esid
response <- httpLbs request manager
return (responseBody response)
|
8a629dc46169a6c431b9484b3af380c158db6bdacf21e1f900d03f9a58c5c4f7 | toyokumo/kintone-client | test_helper.cljc | (ns kintone-client.test-helper
(:require #?@(:clj [[clojure.core.async :refer [<!! chan put!]]]
:cljs [[cljs.core.async :refer [<! chan put!] :refer-macros [go]]])
[kintone-client.authentication :as auth]
[kintone-client.connection :as conn]
[kintone-client.types :as t]
[kintone-client.protocols :as pt]))
(def ^:private auth
(auth/new-auth {:api-token "MyToken"}))
(def ^:private conn
(conn/new-connection {:auth auth
:domain "test.kintone.com"}))
(defn- fake-url [path]
(pt/-url conn path))
(defn- fake-user-api-url [path]
(pt/-user-api-url conn path))
(def fake-conn
(reify pt/IRequest
(-path [_ path]
(str "/k" path))
(-url [_ path]
(fake-url path))
(-user-api-url [_ path]
(fake-user-api-url path))
(-get [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-post [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-put [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-delete [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-get-blob [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-multipart-post [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))))
| null | https://raw.githubusercontent.com/toyokumo/kintone-client/32cf5abec1efa6d7ed5f33cf889535863358875b/test/kintone_client/test_helper.cljc | clojure | (ns kintone-client.test-helper
(:require #?@(:clj [[clojure.core.async :refer [<!! chan put!]]]
:cljs [[cljs.core.async :refer [<! chan put!] :refer-macros [go]]])
[kintone-client.authentication :as auth]
[kintone-client.connection :as conn]
[kintone-client.types :as t]
[kintone-client.protocols :as pt]))
(def ^:private auth
(auth/new-auth {:api-token "MyToken"}))
(def ^:private conn
(conn/new-connection {:auth auth
:domain "test.kintone.com"}))
(defn- fake-url [path]
(pt/-url conn path))
(defn- fake-user-api-url [path]
(pt/-user-api-url conn path))
(def fake-conn
(reify pt/IRequest
(-path [_ path]
(str "/k" path))
(-url [_ path]
(fake-url path))
(-user-api-url [_ path]
(fake-user-api-url path))
(-get [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-post [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-put [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-delete [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-get-blob [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))
(-multipart-post [_ url req]
(let [c (chan)]
(put! c (t/->KintoneResponse {:url url :req req} nil))
c))))
| |
069331dede5505f051bf359a878be9f957d7230b1707297d49bed9407db859f7 | ocaml/ocaml | includemod.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Inclusion checks for the module language *)
open Typedtree
open Types
(** Type describing which arguments of an inclusion to consider as used
for the usage warnings. [Mark_both] is the default. *)
type mark =
| Mark_both
* definitions used from both arguments
| Mark_positive
* definitions used from the positive ( first ) argument
| Mark_negative
* definitions used from the negative ( second ) argument
| Mark_neither
(** Do not mark definitions used from either argument *)
module Error: sig
type ('elt,'explanation) diff = {
got:'elt;
expected:'elt;
symptom:'explanation
}
type 'elt core_diff =('elt,unit) diff
type functor_arg_descr =
| Anonymous
| Named of Path.t
| Unit
| Empty_struct
(** For backward compatibility's sake, an empty struct can be implicitly
converted to an unit module. *)
type core_sigitem_symptom =
| Value_descriptions of
(Types.value_description, Includecore.value_mismatch) diff
| Type_declarations of
(Types.type_declaration, Includecore.type_mismatch) diff
| Extension_constructors of
(Types.extension_constructor,
Includecore.extension_constructor_mismatch) diff
| Class_type_declarations of
(Types.class_type_declaration, Ctype.class_match_failure list) diff
| Class_declarations of
(Types.class_declaration, Ctype.class_match_failure list) diff
type core_module_type_symptom =
| Not_an_alias
| Not_an_identifier
| Incompatible_aliases
| Abstract_module_type
| Unbound_module_path of Path.t
type module_type_symptom =
| Mt_core of core_module_type_symptom
| Signature of signature_symptom
| Functor of functor_symptom
| Invalid_module_alias of Path.t
| After_alias_expansion of module_type_diff
and module_type_diff = (Types.module_type, module_type_symptom) diff
and functor_symptom =
| Params of functor_params_diff
| Result of module_type_diff
and ('arg,'path) functor_param_symptom =
| Incompatible_params of 'arg * Types.functor_parameter
| Mismatch of module_type_diff
and arg_functor_param_symptom =
(Types.functor_parameter, Ident.t) functor_param_symptom
and functor_params_diff =
(Types.functor_parameter list * Types.module_type) core_diff
and signature_symptom = {
env: Env.t;
missings: Types.signature_item list;
incompatibles: (Ident.t * sigitem_symptom) list;
oks: (int * Typedtree.module_coercion) list;
leftovers: ((Types.signature_item as 'it) * 'it * int) list
(** signature items that could not be compared due to type divergence *)
}
and sigitem_symptom =
| Core of core_sigitem_symptom
| Module_type_declaration of
(Types.modtype_declaration, module_type_declaration_symptom) diff
| Module_type of module_type_diff
and module_type_declaration_symptom =
| Illegal_permutation of Typedtree.module_coercion
| Not_greater_than of module_type_diff
| Not_less_than of module_type_diff
| Incomparable of
{less_than:module_type_diff; greater_than: module_type_diff}
type all =
| In_Compilation_unit of (string, signature_symptom) diff
| In_Signature of signature_symptom
| In_Module_type of module_type_diff
| In_Module_type_substitution of
Ident.t * (Types.module_type,module_type_declaration_symptom) diff
| In_Type_declaration of Ident.t * core_sigitem_symptom
| In_Expansion of core_module_type_symptom
end
type explanation = Env.t * Error.all
(* Extract name, kind and ident from a signature item *)
type field_kind =
| Field_value
| Field_type
| Field_exception
| Field_typext
| Field_module
| Field_modtype
| Field_class
| Field_classtype
type field_desc = { name: string; kind: field_kind }
val kind_of_field_desc: field_desc -> string
val field_desc: field_kind -> Ident.t -> field_desc
(** Map indexed by both field types and names.
This avoids name clashes between different sorts of fields
such as values and types. *)
module FieldMap: Map.S with type key = field_desc
val item_ident_name: Types.signature_item -> Ident.t * Location.t * field_desc
val is_runtime_component: Types.signature_item -> bool
(* Typechecking *)
val modtypes:
loc:Location.t -> Env.t -> mark:mark ->
module_type -> module_type -> module_coercion
val modtypes_with_shape:
shape:Shape.t -> loc:Location.t -> Env.t -> mark:mark ->
module_type -> module_type -> module_coercion * Shape.t
val strengthened_module_decl:
loc:Location.t -> aliasable:bool -> Env.t -> mark:mark ->
module_declaration -> Path.t -> module_declaration -> module_coercion
val check_modtype_inclusion :
loc:Location.t -> Env.t -> Types.module_type -> Path.t -> Types.module_type ->
explanation option
* [ check_modtype_inclusion ~loc env mty1 path1 mty2 ] checks that the
functor application F(M ) is well typed , where mty2 is the type of
the argument of F and path1 / mty1 is the path / unstrenghened type of M.
functor application F(M) is well typed, where mty2 is the type of
the argument of F and path1/mty1 is the path/unstrenghened type of M. *)
val check_modtype_equiv:
loc:Location.t -> Env.t -> Ident.t -> module_type -> module_type -> unit
val signatures: Env.t -> mark:mark ->
signature -> signature -> module_coercion
val compunit:
Env.t -> mark:mark -> string -> signature ->
string -> signature -> Shape.t -> module_coercion * Shape.t
val type_declarations:
loc:Location.t -> Env.t -> mark:mark ->
Ident.t -> type_declaration -> type_declaration -> unit
val print_coercion: Format.formatter -> module_coercion -> unit
type symptom =
Missing_field of Ident.t * Location.t * string (* kind *)
| Value_descriptions of
Ident.t * value_description * value_description
* Includecore.value_mismatch
| Type_declarations of Ident.t * type_declaration
* type_declaration * Includecore.type_mismatch
| Extension_constructors of Ident.t * extension_constructor
* extension_constructor * Includecore.extension_constructor_mismatch
| Module_types of module_type * module_type
| Modtype_infos of Ident.t * modtype_declaration * modtype_declaration
| Modtype_permutation of Types.module_type * Typedtree.module_coercion
| Interface_mismatch of string * string
| Class_type_declarations of
Ident.t * class_type_declaration * class_type_declaration *
Ctype.class_match_failure list
| Class_declarations of
Ident.t * class_declaration * class_declaration *
Ctype.class_match_failure list
| Unbound_module_path of Path.t
| Invalid_module_alias of Path.t
type pos =
| Module of Ident.t
| Modtype of Ident.t
| Arg of functor_parameter
| Body of functor_parameter
exception Error of explanation
exception Apply_error of {
loc : Location.t ;
env : Env.t ;
lid_app : Longident.t option ;
mty_f : module_type ;
args : (Error.functor_arg_descr * Types.module_type) list ;
}
val expand_module_alias: strengthen:bool -> Env.t -> Path.t -> Types.module_type
module Functor_inclusion_diff: sig
module Defs: sig
type left = Types.functor_parameter
type right = left
type eq = Typedtree.module_coercion
type diff = (Types.functor_parameter, unit) Error.functor_param_symptom
type state
end
val diff: Env.t ->
Types.functor_parameter list * Types.module_type ->
Types.functor_parameter list * Types.module_type ->
Diffing.Define(Defs).patch
end
module Functor_app_diff: sig
module Defs: sig
type left = Error.functor_arg_descr * Types.module_type
type right = Types.functor_parameter
type eq = Typedtree.module_coercion
type diff = (Error.functor_arg_descr, unit) Error.functor_param_symptom
type state
end
val diff:
Env.t ->
f:Types.module_type ->
args:(Error.functor_arg_descr * Types.module_type) list ->
Diffing.Define(Defs).patch
end
| null | https://raw.githubusercontent.com/ocaml/ocaml/3490eaa060cd1e2b4143bf5df42fdbeb121f0c4d/typing/includemod.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Inclusion checks for the module language
* Type describing which arguments of an inclusion to consider as used
for the usage warnings. [Mark_both] is the default.
* Do not mark definitions used from either argument
* For backward compatibility's sake, an empty struct can be implicitly
converted to an unit module.
* signature items that could not be compared due to type divergence
Extract name, kind and ident from a signature item
* Map indexed by both field types and names.
This avoids name clashes between different sorts of fields
such as values and types.
Typechecking
kind | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Typedtree
open Types
type mark =
| Mark_both
* definitions used from both arguments
| Mark_positive
* definitions used from the positive ( first ) argument
| Mark_negative
* definitions used from the negative ( second ) argument
| Mark_neither
module Error: sig
type ('elt,'explanation) diff = {
got:'elt;
expected:'elt;
symptom:'explanation
}
type 'elt core_diff =('elt,unit) diff
type functor_arg_descr =
| Anonymous
| Named of Path.t
| Unit
| Empty_struct
type core_sigitem_symptom =
| Value_descriptions of
(Types.value_description, Includecore.value_mismatch) diff
| Type_declarations of
(Types.type_declaration, Includecore.type_mismatch) diff
| Extension_constructors of
(Types.extension_constructor,
Includecore.extension_constructor_mismatch) diff
| Class_type_declarations of
(Types.class_type_declaration, Ctype.class_match_failure list) diff
| Class_declarations of
(Types.class_declaration, Ctype.class_match_failure list) diff
type core_module_type_symptom =
| Not_an_alias
| Not_an_identifier
| Incompatible_aliases
| Abstract_module_type
| Unbound_module_path of Path.t
type module_type_symptom =
| Mt_core of core_module_type_symptom
| Signature of signature_symptom
| Functor of functor_symptom
| Invalid_module_alias of Path.t
| After_alias_expansion of module_type_diff
and module_type_diff = (Types.module_type, module_type_symptom) diff
and functor_symptom =
| Params of functor_params_diff
| Result of module_type_diff
and ('arg,'path) functor_param_symptom =
| Incompatible_params of 'arg * Types.functor_parameter
| Mismatch of module_type_diff
and arg_functor_param_symptom =
(Types.functor_parameter, Ident.t) functor_param_symptom
and functor_params_diff =
(Types.functor_parameter list * Types.module_type) core_diff
and signature_symptom = {
env: Env.t;
missings: Types.signature_item list;
incompatibles: (Ident.t * sigitem_symptom) list;
oks: (int * Typedtree.module_coercion) list;
leftovers: ((Types.signature_item as 'it) * 'it * int) list
}
and sigitem_symptom =
| Core of core_sigitem_symptom
| Module_type_declaration of
(Types.modtype_declaration, module_type_declaration_symptom) diff
| Module_type of module_type_diff
and module_type_declaration_symptom =
| Illegal_permutation of Typedtree.module_coercion
| Not_greater_than of module_type_diff
| Not_less_than of module_type_diff
| Incomparable of
{less_than:module_type_diff; greater_than: module_type_diff}
type all =
| In_Compilation_unit of (string, signature_symptom) diff
| In_Signature of signature_symptom
| In_Module_type of module_type_diff
| In_Module_type_substitution of
Ident.t * (Types.module_type,module_type_declaration_symptom) diff
| In_Type_declaration of Ident.t * core_sigitem_symptom
| In_Expansion of core_module_type_symptom
end
type explanation = Env.t * Error.all
type field_kind =
| Field_value
| Field_type
| Field_exception
| Field_typext
| Field_module
| Field_modtype
| Field_class
| Field_classtype
type field_desc = { name: string; kind: field_kind }
val kind_of_field_desc: field_desc -> string
val field_desc: field_kind -> Ident.t -> field_desc
module FieldMap: Map.S with type key = field_desc
val item_ident_name: Types.signature_item -> Ident.t * Location.t * field_desc
val is_runtime_component: Types.signature_item -> bool
val modtypes:
loc:Location.t -> Env.t -> mark:mark ->
module_type -> module_type -> module_coercion
val modtypes_with_shape:
shape:Shape.t -> loc:Location.t -> Env.t -> mark:mark ->
module_type -> module_type -> module_coercion * Shape.t
val strengthened_module_decl:
loc:Location.t -> aliasable:bool -> Env.t -> mark:mark ->
module_declaration -> Path.t -> module_declaration -> module_coercion
val check_modtype_inclusion :
loc:Location.t -> Env.t -> Types.module_type -> Path.t -> Types.module_type ->
explanation option
* [ check_modtype_inclusion ~loc env mty1 path1 mty2 ] checks that the
functor application F(M ) is well typed , where mty2 is the type of
the argument of F and path1 / mty1 is the path / unstrenghened type of M.
functor application F(M) is well typed, where mty2 is the type of
the argument of F and path1/mty1 is the path/unstrenghened type of M. *)
val check_modtype_equiv:
loc:Location.t -> Env.t -> Ident.t -> module_type -> module_type -> unit
val signatures: Env.t -> mark:mark ->
signature -> signature -> module_coercion
val compunit:
Env.t -> mark:mark -> string -> signature ->
string -> signature -> Shape.t -> module_coercion * Shape.t
val type_declarations:
loc:Location.t -> Env.t -> mark:mark ->
Ident.t -> type_declaration -> type_declaration -> unit
val print_coercion: Format.formatter -> module_coercion -> unit
type symptom =
| Value_descriptions of
Ident.t * value_description * value_description
* Includecore.value_mismatch
| Type_declarations of Ident.t * type_declaration
* type_declaration * Includecore.type_mismatch
| Extension_constructors of Ident.t * extension_constructor
* extension_constructor * Includecore.extension_constructor_mismatch
| Module_types of module_type * module_type
| Modtype_infos of Ident.t * modtype_declaration * modtype_declaration
| Modtype_permutation of Types.module_type * Typedtree.module_coercion
| Interface_mismatch of string * string
| Class_type_declarations of
Ident.t * class_type_declaration * class_type_declaration *
Ctype.class_match_failure list
| Class_declarations of
Ident.t * class_declaration * class_declaration *
Ctype.class_match_failure list
| Unbound_module_path of Path.t
| Invalid_module_alias of Path.t
type pos =
| Module of Ident.t
| Modtype of Ident.t
| Arg of functor_parameter
| Body of functor_parameter
exception Error of explanation
exception Apply_error of {
loc : Location.t ;
env : Env.t ;
lid_app : Longident.t option ;
mty_f : module_type ;
args : (Error.functor_arg_descr * Types.module_type) list ;
}
val expand_module_alias: strengthen:bool -> Env.t -> Path.t -> Types.module_type
module Functor_inclusion_diff: sig
module Defs: sig
type left = Types.functor_parameter
type right = left
type eq = Typedtree.module_coercion
type diff = (Types.functor_parameter, unit) Error.functor_param_symptom
type state
end
val diff: Env.t ->
Types.functor_parameter list * Types.module_type ->
Types.functor_parameter list * Types.module_type ->
Diffing.Define(Defs).patch
end
module Functor_app_diff: sig
module Defs: sig
type left = Error.functor_arg_descr * Types.module_type
type right = Types.functor_parameter
type eq = Typedtree.module_coercion
type diff = (Error.functor_arg_descr, unit) Error.functor_param_symptom
type state
end
val diff:
Env.t ->
f:Types.module_type ->
args:(Error.functor_arg_descr * Types.module_type) list ->
Diffing.Define(Defs).patch
end
|
a7feba5e122b2da29ab9850646a1ce315d3c26865a62fced722f3f23b208e19d | marick/structural-typing | type_repo.clj | (ns structural-typing.assist.type-repo
"The `TypeRepo` structure and its functions."
(:use structural-typing.clojure.core)
(:require [structural-typing.guts.type-descriptions :as type-descriptions]
[structural-typing.guts.compile.compile :as compile]
[structural-typing.assist.defaults :as defaults])
(:refer-clojure :exclude [any?]))
(def valid-type-signifier?
"A valid type signifier is either a keyword or a string."
(some-fn keyword? string?))
(defprotocol TypeRepoLike
(canonicalize [type-repo condensed-type-descriptions])
(hold-type [type-repo type-signifier type-descriptions])
(get-compiled-type [type-repo type-signifier])
(replace-success-handler [type-repo handler]
"For this `type-repo`, handle candidates that typecheck successfully by
passing them to `handler` as the last step in [[built-like]].
Thus, `built-like` will return the handler's result.")
(replace-error-handler [type-repo handler]
"For this `type-repo`, pass [[oopsies]] generated by type failures to
`handler` as the last step in [[built-like]]. Thus, `built-like` will
return the handler's result.")
(the-success-handler [type-repo])
(the-error-handler [type-repo]))
(defrecord TypeRepo [success-handler error-handler]
TypeRepoLike
(canonicalize [type-repo condensed-type-descriptions]
(type-descriptions/canonicalize condensed-type-descriptions
(:canonicalized-type-descriptions type-repo)))
(hold-type [type-repo type-signifier condensed-type-descriptions]
(let [canonicalized (canonicalize type-repo condensed-type-descriptions)
compiled (compile/compile-type canonicalized)]
(-> type-repo
(assoc-in [:original-type-descriptions type-signifier] condensed-type-descriptions)
(assoc-in [:canonicalized-type-descriptions type-signifier] canonicalized)
(assoc-in [:compiled-types type-signifier] compiled))))
(get-compiled-type [type-repo type-signifier]
(or (get-in type-repo [:compiled-types type-signifier])
(boom! "There is no type `%s`" type-signifier)))
(replace-error-handler [type-repo f]
(assoc type-repo :error-handler f))
(replace-success-handler [type-repo f]
(assoc type-repo :success-handler f))
(the-error-handler [type-repo] (:error-handler type-repo))
(the-success-handler [type-repo] (:success-handler type-repo)))
(defmethod clojure.core/print-method TypeRepo [o, ^java.io.Writer w]
(.write w "#TypeRepo[")
(.write w (->> o :original-type-descriptions keys (str-join ", ")))
(.write w "]"))
(def empty-type-repo
"A type repo that contains no types and uses the default success and error handlers."
(->TypeRepo defaults/default-success-handler defaults/default-error-handler))
(defn origin
"Returns the original description of the `type-signifier` (a sequence of vectors and maps)"
[type-repo type-signifier]
(get-in type-repo [:original-type-descriptions type-signifier]))
(defn description
"Returns the canonical (expanded) description of the `type-signifier`."
[type-repo type-signifier]
(get-in type-repo [:canonicalized-type-descriptions type-signifier]))
| null | https://raw.githubusercontent.com/marick/structural-typing/9b44c303dcfd4a72c5b75ec7a1114687c809fba1/src/structural_typing/assist/type_repo.clj | clojure | (ns structural-typing.assist.type-repo
"The `TypeRepo` structure and its functions."
(:use structural-typing.clojure.core)
(:require [structural-typing.guts.type-descriptions :as type-descriptions]
[structural-typing.guts.compile.compile :as compile]
[structural-typing.assist.defaults :as defaults])
(:refer-clojure :exclude [any?]))
(def valid-type-signifier?
"A valid type signifier is either a keyword or a string."
(some-fn keyword? string?))
(defprotocol TypeRepoLike
(canonicalize [type-repo condensed-type-descriptions])
(hold-type [type-repo type-signifier type-descriptions])
(get-compiled-type [type-repo type-signifier])
(replace-success-handler [type-repo handler]
"For this `type-repo`, handle candidates that typecheck successfully by
passing them to `handler` as the last step in [[built-like]].
Thus, `built-like` will return the handler's result.")
(replace-error-handler [type-repo handler]
"For this `type-repo`, pass [[oopsies]] generated by type failures to
`handler` as the last step in [[built-like]]. Thus, `built-like` will
return the handler's result.")
(the-success-handler [type-repo])
(the-error-handler [type-repo]))
(defrecord TypeRepo [success-handler error-handler]
TypeRepoLike
(canonicalize [type-repo condensed-type-descriptions]
(type-descriptions/canonicalize condensed-type-descriptions
(:canonicalized-type-descriptions type-repo)))
(hold-type [type-repo type-signifier condensed-type-descriptions]
(let [canonicalized (canonicalize type-repo condensed-type-descriptions)
compiled (compile/compile-type canonicalized)]
(-> type-repo
(assoc-in [:original-type-descriptions type-signifier] condensed-type-descriptions)
(assoc-in [:canonicalized-type-descriptions type-signifier] canonicalized)
(assoc-in [:compiled-types type-signifier] compiled))))
(get-compiled-type [type-repo type-signifier]
(or (get-in type-repo [:compiled-types type-signifier])
(boom! "There is no type `%s`" type-signifier)))
(replace-error-handler [type-repo f]
(assoc type-repo :error-handler f))
(replace-success-handler [type-repo f]
(assoc type-repo :success-handler f))
(the-error-handler [type-repo] (:error-handler type-repo))
(the-success-handler [type-repo] (:success-handler type-repo)))
(defmethod clojure.core/print-method TypeRepo [o, ^java.io.Writer w]
(.write w "#TypeRepo[")
(.write w (->> o :original-type-descriptions keys (str-join ", ")))
(.write w "]"))
(def empty-type-repo
"A type repo that contains no types and uses the default success and error handlers."
(->TypeRepo defaults/default-success-handler defaults/default-error-handler))
(defn origin
"Returns the original description of the `type-signifier` (a sequence of vectors and maps)"
[type-repo type-signifier]
(get-in type-repo [:original-type-descriptions type-signifier]))
(defn description
"Returns the canonical (expanded) description of the `type-signifier`."
[type-repo type-signifier]
(get-in type-repo [:canonicalized-type-descriptions type-signifier]))
| |
456bec8a7a7bd5d38096f5c341014248e7973dc5e82a95c5526d48b1a14ee3ff | leo-project/leo_gateway | leo_gateway_s3_api.erl | %%======================================================================
%%
%% Leo S3 Handler
%%
Copyright ( c ) 2012 - 2015 Rakuten , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% ---------------------------------------------------------------------
Leo Gateway S3 - API
%% @doc
%% @end
%%======================================================================
-module(leo_gateway_s3_api).
-behaviour(leo_gateway_http_behaviour).
-export([start/2, stop/0,
init/3, handle/2, terminate/3]).
-export([onrequest/1, onresponse/1]).
-export([get_bucket/3, put_bucket/3, delete_bucket/3, head_bucket/3,
get_object/3, put_object/3, delete_object/3, head_object/3,
get_object_with_cache/4, range_object/3
]).
-include("leo_gateway.hrl").
-include("leo_http.hrl").
-include_lib("leo_commons/include/leo_commons.hrl").
-include_lib("leo_logger/include/leo_logger.hrl").
-include_lib("leo_object_storage/include/leo_object_storage.hrl").
-include_lib("leo_redundant_manager/include/leo_redundant_manager.hrl").
-include_lib("leo_s3_libs/include/leo_s3_auth.hrl").
-include_lib("leo_s3_libs/include/leo_s3_bucket.hrl").
-include_lib("leo_s3_libs/include/leo_s3_endpoint.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("xmerl/include/xmerl.hrl").
-compile({inline, [handle/2, handle_1/4, handle_2/6,
handle_multi_upload_1/8,
handle_multi_upload_2/6,
handle_multi_upload_3/3,
gen_upload_key/1, gen_upload_initiate_xml/3, gen_upload_completion_xml/4,
resp_copy_obj_xml/2, request_params/2, auth/5, auth/7, auth_1/7,
get_bucket_1/6, put_bucket_1/3, delete_bucket_1/2, head_bucket_1/2
]}).
%%--------------------------------------------------------------------
%% API
%%--------------------------------------------------------------------
%% @doc Start cowboy's listeners
-spec(start(Sup, HttpOptions) ->
ok | {error, Cause} when Sup::module(),
HttpOptions::[{atom(), any()}],
Cause::any()).
start(Sup, HttpOptions) ->
leo_gateway_http_commons:start(Sup, HttpOptions).
%% @doc Stop cowboy's listeners
-spec(stop() ->
ok).
stop() ->
cowboy:stop_listener(?MODULE),
cowboy:stop_listener(list_to_atom(lists:append([?MODULE_STRING, "_ssl"]))),
ok.
%% @doc Initializer
init({_Any, http}, Req, Opts) ->
{ok, Req, Opts}.
%% @doc Handle a request
%% @callback
-spec(handle(Req, State) ->
{ok, Req, State} when Req::cowboy_req:req(),
State::term()).
handle(Req, State) ->
case leo_watchdog_state:find_not_safe_items() of
not_found ->
{Host, _} = cowboy_req:host(Req),
Host header must be included even if a request with HTTP/1.0
case Host of
<<>> ->
{ok, Req2} = ?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, <<>>, <<>>, Req),
{ok, Req2, State};
_ ->
case check_request(Req) of
ok ->
{Bucket, Path} = get_bucket_and_path(Req),
handle_1(Req, State, Bucket, Path);
{error, Req2} ->
{ok, Req2, State}
end
end;
{ok, ErrorItems} ->
?debug("handle/2", "error-items:~p", [ErrorItems]),
{ok, Req2} = ?reply_service_unavailable_error([?SERVER_HEADER], <<>>, <<>>, Req),
{ok, Req2, State}
end.
%% @doc Terminater
terminate(_Reason, _Req, _State) ->
ok.
%% @doc Check whether request is valid or not
@private
-spec(check_request(Req) ->
ok | {error, Cause} when Req::cowboy_req:req(),
Cause::any()).
check_request(Req) ->
CheckList = [
fun check_bad_date/1
],
check_request(Req, CheckList).
@private
check_request(_Req, []) ->
ok;
check_request(Req, [CheckFun|Rest]) ->
case CheckFun(Req) of
{error, 400, Code, Msg} ->
{ok, Req2} = ?reply_bad_request([?SERVER_HEADER], Code, Msg, <<>>, <<>>, Req),
{error, Req2};
{error, 403, Code, Msg} ->
{ok, Req2} = ?reply_forbidden([?SERVER_HEADER], Code, Msg, <<>>, <<>>, Req),
{error, Req2};
_ ->
check_request(Req, Rest)
end.
@private
check_bad_date(Req) ->
case cowboy_req:header(?HTTP_HEAD_AUTHORIZATION, Req) of
{undefined, _} ->
%% no date header needed
ok;
_ ->
check_bad_date_1(Req)
end.
@private
check_bad_date_1(Req) ->
case cowboy_req:header(?HTTP_HEAD_DATE, Req) of
{undefined, _} ->
case cowboy_req:header(?HTTP_HRAD_X_AMZ_DATE, Req) of
{undefined, _} ->
{error, 403, ?XML_ERROR_CODE_AccessDenied, ?XML_ERROR_MSG_AccessDenied};
{Date, _} ->
check_bad_date_invalid(Date)
end;
{Date, _} ->
check_bad_date_invalid(Date)
end.
@private
check_bad_date_invalid(Date) ->
case catch cowboy_date:parse_date(Date) of
{error, badarg} ->
{error, 403, ?XML_ERROR_CODE_AccessDenied, ?XML_ERROR_MSG_AccessDenied};
{'EXIT', _} ->
{error, 403, ?XML_ERROR_CODE_AccessDenied, ?XML_ERROR_MSG_AccessDenied};
{{Y,_,_},_} ->
case (Y =< 2010 orelse 2030 =< Y) of
true ->
{error, 403, ?XML_ERROR_CODE_RequestTimeTooSkewed,
?XML_ERROR_MSG_RequestTimeTooSkewed};
_ ->
ok
end
end.
%%--------------------------------------------------------------------
%% Callbacks from Cowboy
%%--------------------------------------------------------------------
%% @doc Handle request
%%
-spec(onrequest(CacheCondition) ->
Ret when CacheCondition::#cache_condition{},
Ret::any()).
onrequest(CacheCondition) ->
leo_gateway_http_commons:onrequest(CacheCondition, fun get_bucket_and_path/1).
%% @doc Handle response
%%
-spec(onresponse(CacheCondition) ->
Ret when CacheCondition::#cache_condition{},
Ret::any()).
onresponse(CacheCondition) ->
leo_gateway_http_commons:onresponse(CacheCondition, fun get_bucket_and_path/1).
%% ---------------------------------------------------------------------
%% Callbacks from HTTP-Handler
%%
%% For BUCKET-OPERATION
%% ---------------------------------------------------------------------
@doc GET buckets and dirs
-spec(get_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
get_bucket(Req, Key, #req_params{access_key_id = AccessKeyId,
is_acl = false,
qs_prefix = Prefix}) ->
BeginTime = leo_date:clock(),
NormalizedMarker = case cowboy_req:qs_val(?HTTP_QS_BIN_MARKER, Req) of
{undefined,_} ->
<<>>;
{Marker,_} ->
%% Normalize Marker
Append $ BucketName/ at the beginning of as necessary
KeySize = size(Key),
case binary:match(Marker, Key) of
{0, KeySize} ->
Marker;
_Other ->
<< Key/binary, Marker/binary >>
end
end,
MaxKeys = case cowboy_req:qs_val(?HTTP_QS_BIN_MAXKEYS, Req) of
{undefined, _} ->
?DEF_S3API_MAX_KEYS;
{Val_2, _} ->
try
MaxKeys1 = binary_to_integer(Val_2),
erlang:min(MaxKeys1, ?HTTP_MAXKEYS_LIMIT)
catch _:_ ->
?DEF_S3API_MAX_KEYS
end
end,
Delimiter = case cowboy_req:qs_val(?HTTP_QS_BIN_DELIMITER, Req) of
{undefined, _} -> none;
{Val, _} ->
Val
end,
PrefixBin = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
case get_bucket_1(AccessKeyId, Key, Delimiter, NormalizedMarker, MaxKeys, Prefix) of
{ok, XMLRet} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_OK, BeginTime),
Header = [?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}],
?reply_ok(Header, XMLRet, Req);
{error, badarg} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_BAD_REQ, BeginTime),
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, Key, <<>>, Req);
{error, not_found} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_NOT_FOUND, BeginTime),
?reply_not_found([?SERVER_HEADER], Key, <<>>, Req);
{error, unavailable} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_service_unavailable_error([?SERVER_HEADER], Key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_INTERNAL_ERROR, BeginTime),
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req)
end;
get_bucket(Req, Bucket, #req_params{access_key_id = _AccessKeyId,
is_acl = true}) ->
Bucket_2 = formalize_bucket(Bucket),
case leo_s3_bucket:find_bucket_by_name(Bucket_2) of
{ok, BucketInfo} ->
XML = generate_acl_xml(BucketInfo),
Header = [?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}],
?reply_ok(Header, XML, Req);
not_found ->
?reply_not_found([?SERVER_HEADER], Bucket_2, <<>>, Req);
{error, _Cause} ->
?reply_internal_error([?SERVER_HEADER], Bucket_2, <<>>, Req)
end.
%% @doc Put a bucket
-spec(put_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
put_bucket(Req, Key, #req_params{access_key_id = AccessKeyId,
is_acl = false}) ->
BeginTime = leo_date:clock(),
Bucket = formalize_bucket(Key),
CannedACL = string:to_lower(binary_to_list(?http_header(Req, ?HTTP_HEAD_X_AMZ_ACL))),
%% Consume CreateBucketConfiguration
Req_1 = case cowboy_req:has_body(Req) of
false ->
Req;
true ->
{ok, _Bin_2, Req_2} = cowboy_req:body(Req),
Req_2
end,
case put_bucket_1(CannedACL, AccessKeyId, Bucket) of
ok ->
?access_log_bucket_put(Bucket, ?HTTP_ST_OK, BeginTime),
?reply_ok([?SERVER_HEADER], Req_1);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req_1);
{error, invalid_bucket_format} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidBucketName,
?XML_ERROR_MSG_InvalidBucketName, Key, <<>>, Req_1);
{error, invalid_access} ->
?reply_forbidden([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Key, <<>>, Req);
{error, already_exists} ->
?reply_conflict([?SERVER_HEADER], ?XML_ERROR_CODE_BucketAlreadyExists,
?XML_ERROR_MSG_BucketAlreadyExists, Key, <<>>, Req_1);
{error, already_yours} ->
?reply_conflict([?SERVER_HEADER], ?XML_ERROR_CODE_BucketAlreadyOwnedByYou,
?XML_ERROR_MSG_BucketAlreadyOwnedByYou, Key, <<>>, Req_1);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req_1)
end;
put_bucket(Req, Key, #req_params{access_key_id = AccessKeyId,
is_acl = true}) ->
Bucket = formalize_bucket(Key),
CannedACL = string:to_lower(binary_to_list(?http_header(Req, ?HTTP_HEAD_X_AMZ_ACL))),
case put_bucket_acl_1(CannedACL, AccessKeyId, Bucket) of
ok ->
?reply_ok([?SERVER_HEADER], Req);
{error, not_supported} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, Key, <<>>, Req);
{error, invalid_access} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Key, <<>>, Req);
{error, _} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req)
end.
%% @doc Remove a bucket
-spec(delete_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
delete_bucket(Req, Key, #req_params{access_key_id = AccessKeyId}) ->
BeginTime = leo_date:clock(),
Bucket = formalize_bucket(Key),
case delete_bucket_1(AccessKeyId, Key) of
ok ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_NO_CONTENT, BeginTime),
?reply_no_content([?SERVER_HEADER], Req);
not_found ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_NOT_FOUND, BeginTime),
?reply_not_found([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_timeout_without_body([?SERVER_HEADER], Req);
{error, _} ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_INTERNAL_ERROR, BeginTime),
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req)
end.
%% @doc Retrieve a bucket-info
-spec(head_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
head_bucket(Req, Key, #req_params{access_key_id = AccessKeyId}) ->
BeginTime = leo_date:clock(),
Bucket = formalize_bucket(Key),
case head_bucket_1(AccessKeyId, Bucket) of
ok ->
?access_log_bucket_head(Bucket, ?HTTP_ST_OK, BeginTime),
?reply_ok([?SERVER_HEADER], Req);
not_found ->
?access_log_bucket_head(Bucket, ?HTTP_ST_NOT_FOUND, BeginTime),
?reply_not_found_without_body([?SERVER_HEADER], Req);
{error, timeout} ->
?access_log_bucket_head(Bucket, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_timeout_without_body([?SERVER_HEADER], Req);
{error, _} ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_INTERNAL_ERROR, BeginTime),
?reply_internal_error_without_body([?SERVER_HEADER], Req)
end.
%% ---------------------------------------------------------------------
%% For OBJECT-OPERATION
%% ---------------------------------------------------------------------
%% @doc GET operation on Objects
-spec(get_object(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
get_object(Req, Key, Params) ->
leo_gateway_http_commons:get_object(Req, Key, Params).
%% @doc GET operation on Objects
-spec(get_object_with_cache(Req, Key, CacheObj, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
CacheObj::#cache{},
ReqParams::#req_params{}).
get_object_with_cache(Req, Key, CacheObj, Params) ->
leo_gateway_http_commons:get_object_with_cache(Req, Key, CacheObj, Params).
%% @doc utility func for getting x-amz-meta-directive correctly
-spec(get_x_amz_meta_directive(Req) ->
Ret when Req::cowboy_req:req(),
Ret::binary()).
get_x_amz_meta_directive(Req) ->
Directive = ?http_header(Req, ?HTTP_HEAD_X_AMZ_META_DIRECTIVE),
get_x_amz_meta_directive(Req, Directive).
@private
get_x_amz_meta_directive(Req, ?BIN_EMPTY) ->
CS = ?http_header(Req, ?HTTP_HEAD_X_AMZ_COPY_SOURCE),
case CS of
?BIN_EMPTY ->
?BIN_EMPTY;
_ ->
%% return default - 'copy'
?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY
end;
get_x_amz_meta_directive(_Req, Other) ->
Other.
%% @doc POST/PUT operation on Objects
-spec(put_object(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
put_object(Req, Key, Params) ->
put_object(get_x_amz_meta_directive(Req), Req, Key, Params).
%% @doc handle MULTIPLE DELETE request
-spec(put_object(Directive, Req, Key, ReqParams) ->
{ok, Req} when Directive::binary(),
Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
put_object(?BIN_EMPTY, Req, _Key, #req_params{is_multi_delete = true,
timeout_for_body = Timeout4Body,
transfer_decode_fun = TransferDecodeFun,
transfer_decode_state = TransferDecodeState} = Params) ->
BodyOpts = case TransferDecodeFun of
undefined ->
[{read_timeout, Timeout4Body}];
_ ->
[{read_timeout, Timeout4Body},
{transfer_decode, TransferDecodeFun, TransferDecodeState}]
end,
case cowboy_req:body(Req, BodyOpts) of
{ok, Body, Req1} ->
%% Check Content-MD5 with body
ContentMD5 = ?http_header(Req, ?HTTP_HEAD_CONTENT_MD5),
CalculatedMD5 = base64:encode(crypto:hash(md5, Body)),
delete_multi_objects_2(Req1, Body, ContentMD5, CalculatedMD5, Params);
{error, _Cause} ->
?reply_malformed_xml([?SERVER_HEADER], Req)
end;
put_object(?BIN_EMPTY, Req, Key, Params) ->
case catch cowboy_req:body_length(Req) of
{'EXIT', _} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, Key, <<>>, Req);
{BodySize, _} ->
Size = case cowboy_req:header(?HTTP_HEAD_X_AMZ_DECODED_CONTENT_LENGTH, Req) of
{undefined,_} ->
BodySize;
{Val,_} ->
binary_to_integer(Val)
end,
case (Size >= Params#req_params.threshold_of_chunk_len) of
true when Size >= Params#req_params.max_len_of_obj ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_EntityTooLarge,
?XML_ERROR_MSG_EntityTooLarge, Key, <<>>, Req);
true when Params#req_params.is_upload == false ->
leo_gateway_http_commons:put_large_object(Req, Key, Size, Params);
false ->
Ret = case cowboy_req:has_body(Req) of
true ->
TransferDecodeFun = Params#req_params.transfer_decode_fun,
TransferDecodeState = Params#req_params.transfer_decode_state,
Timeout4Body = Params#req_params.timeout_for_body,
BodyOpts = case TransferDecodeFun of
undefined ->
[{read_timeout, Timeout4Body}];
_ ->
[{read_timeout, Timeout4Body},
{transfer_decode, TransferDecodeFun, TransferDecodeState}]
end,
case cowboy_req:body(Req, BodyOpts) of
{ok, Bin, Req1} ->
{ok, {Size, Bin, Req1}};
{error, Cause} ->
{error, Cause}
end;
false ->
{ok, {0, ?BIN_EMPTY, Req}}
end,
leo_gateway_http_commons:put_small_object(Ret, Key, Params)
end
end;
%% @doc POST/PUT operation on Objects. COPY/REPLACE
@private
put_object(Directive, Req, Key, #req_params{handler = ?PROTO_HANDLER_S3,
custom_metadata = CMetaBin1} = Params) ->
CS = cow_qs:urldecode(?http_header(Req, ?HTTP_HEAD_X_AMZ_COPY_SOURCE)),
%% need to trim head '/' when cooperating with s3fs(-c)
CS2 = case binary:part(CS, {0, 1}) of
?BIN_SLASH ->
binary:part(CS, {1, byte_size(CS) -1});
_ ->
CS
end,
case (Key =:= CS2) of
true ->
400
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidRequest,
?XML_ERROR_MSG_InvalidRequest, Key, <<>>, Req);
false ->
case leo_gateway_rpc_handler:get(CS2) of
{ok, Meta, RespObject} ->
CMetaBin = case Directive of
?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY ->
Meta#?METADATA.meta;
_ ->
CMetaBin1
end,
case Meta#?METADATA.cnumber of
0 ->
put_object_1(Directive, Req, Key, Meta, RespObject, Params#req_params{custom_metadata = CMetaBin});
_TotalChunkedObjs ->
put_large_object_1(Directive, Req, Key, Meta, Params#req_params{custom_metadata = CMetaBin})
end;
{error, not_found} ->
?reply_not_found([?SERVER_HEADER], Key, <<>>, Req);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req)
end
end.
%% @doc POST/PUT operation on Objects. COPY
@private
put_object_1(Directive, Req, Key, Meta, Bin, #req_params{bucket_name = BucketName,
bucket_info = BucketInfo,
custom_metadata = CMetaBin} = Params) ->
BeginTime = leo_date:clock(),
Size = size(Bin),
case leo_gateway_rpc_handler:put(#put_req_params{path = Key,
body = Bin,
meta = CMetaBin,
dsize = Size,
msize = byte_size(CMetaBin),
bucket_info = BucketInfo}) of
{ok, _ETag} when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY ->
?access_log_put(BucketName, Key, Size, ?HTTP_ST_OK, BeginTime),
resp_copy_obj_xml(Req, Meta);
{ok, _ETag} when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_REPLACE ->
put_object_2(Req, Key, Meta, Params);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req)
end.
%% @doc POST/PUT operation on Objects. REPLACE
@private
put_object_2(Req, Key, Meta, Params) ->
case Key == Meta#?METADATA.key of
true ->
resp_copy_obj_xml(Req, Meta);
false ->
put_object_3(Req, Meta, Params)
end.
@private
put_object_3(Req, #?METADATA{key = Key, dsize = Size} = Meta, #req_params{bucket_name = BucketName}) ->
BeginTime = leo_date:clock(),
case leo_gateway_rpc_handler:delete(Meta#?METADATA.key) of
ok ->
?access_log_delete(BucketName, Key, Size, ?HTTP_ST_NO_CONTENT, BeginTime),
resp_copy_obj_xml(Req, Meta);
{error, not_found} ->
resp_copy_obj_xml(Req, Meta);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Meta#?METADATA.key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Meta#?METADATA.key, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Meta#?METADATA.key, <<>>, Req)
end.
%% @doc POST/PUT operation on `Large` Objects. COPY
@private
put_large_object_1(Directive, Req, Key, Meta, Params) ->
case leo_gateway_http_commons:move_large_object(Meta, Key, Params) of
ok when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY ->
resp_copy_obj_xml(Req, Meta);
ok when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_REPLACE ->
put_large_object_2(Req, Key, Meta);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req);
{error, _Other} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req)
end.
%% @doc POST/PUT operation on Objects. REPLACE
@private
put_large_object_2(Req, Key, Meta) ->
case Key == Meta#?METADATA.key of
true ->
resp_copy_obj_xml(Req, Meta);
false ->
put_large_object_3(Req, Meta)
end.
@private
put_large_object_3(Req, Meta) ->
leo_large_object_commons:delete_chunked_objects(Meta#?METADATA.key),
catch leo_gateway_rpc_handler:delete(Meta#?METADATA.key),
resp_copy_obj_xml(Req, Meta).
%% @doc DELETE operation on Objects
-spec(delete_object(cowboy_req:req(), binary(), #req_params{}) ->
{ok, cowboy_req:req()}).
delete_object(Req, Key, Params) ->
leo_gateway_http_commons:delete_object(Req, Key, Params).
%% @doc HEAD operation on Objects
-spec(head_object(cowboy_req:req(), binary(), #req_params{}) ->
{ok, cowboy_req:req()}).
head_object(Req, Key, Params) ->
leo_gateway_http_commons:head_object(Req, Key, Params).
%% @doc RANGE-Query operation on Objects
-spec(range_object(cowboy_req:req(), binary(), #req_params{}) ->
{ok, cowboy_req:req()}).
range_object(Req, Key, Params) ->
leo_gateway_http_commons:range_object(Req, Key, Params).
%% ---------------------------------------------------------------------
%% Inner Functions
%% ---------------------------------------------------------------------
%% @doc Create a key
@private
-spec(get_bucket_and_path(Req) ->
{ok, Ret} when Req::cowboy_req:req(),
Ret::{binary(), binary()}).
get_bucket_and_path(Req) ->
{RawPath, _} = cowboy_req:path(Req),
Path = cow_qs:urldecode(RawPath),
get_bucket_and_path(Req, Path).
@private
get_bucket_and_path(Req, Path) ->
EndPoints_2 = case leo_s3_endpoint:get_endpoints() of
{ok, EndPoints_1} ->
[Ep || #endpoint{endpoint = Ep} <- EndPoints_1];
_ ->
[]
end,
{Host,_} = cowboy_req:host(Req),
leo_http:key(EndPoints_2, Host, Path).
%% @doc Handle an http-request
@private
-spec(handle_1(Req, State, BucketName, Path) ->
{ok, Req, State} when Req::cowboy_req:req(),
State::[any()],
BucketName::binary(),
Path::binary()).
handle_1(Req, [{NumOfMinLayers, NumOfMaxLayers},
HasInnerCache, CustomHeaderSettings, Props] = State, BucketName, Path) ->
BinPart = binary:part(Path, {byte_size(Path)-1, 1}),
TokenLen = length(binary:split(Path, [?BIN_SLASH], [global, trim])),
HTTPMethod = cowboy_req:get(method, Req),
{Prefix, IsDir, Path_1, Req_2} =
case cowboy_req:qs_val(?HTTP_HEAD_PREFIX, Req) of
{undefined, Req_1} ->
{none, (TokenLen == 1 orelse ?BIN_SLASH == BinPart), Path, Req_1};
{BinParam, Req_1} ->
NewPath = case BinPart of
?BIN_SLASH ->
Path;
_ ->
<< Path/binary, ?BIN_SLASH/binary >>
end,
{BinParam, true, NewPath, Req_1}
end,
IsACL = case cowboy_req:qs_val(?HTTP_QS_BIN_ACL, Req_2) of
{undefined, _} ->
false;
_ ->
true
end,
ReqParams = request_params(Req_2,
#req_params{
handler = ?MODULE,
path = Path_1,
bucket_name = BucketName,
token_length = TokenLen,
min_layers = NumOfMinLayers,
max_layers = NumOfMaxLayers,
qs_prefix = Prefix,
has_inner_cache = HasInnerCache,
is_cached = true,
is_dir = IsDir,
is_acl = IsACL,
max_chunked_objs = Props#http_options.max_chunked_objs,
max_len_of_obj = Props#http_options.max_len_of_obj,
chunked_obj_len = Props#http_options.chunked_obj_len,
custom_header_settings = CustomHeaderSettings,
timeout_for_header = Props#http_options.timeout_for_header,
timeout_for_body = Props#http_options.timeout_for_body,
sending_chunked_obj_len = Props#http_options.sending_chunked_obj_len,
reading_chunked_obj_len = Props#http_options.reading_chunked_obj_len,
threshold_of_chunk_len = Props#http_options.threshold_of_chunk_len}),
case ReqParams of
{error, metadata_too_large} ->
{ok, Req_3} = ?reply_metadata_too_large([?SERVER_HEADER], Path_1, <<>>, Req_2),
{ok, Req_3, State};
_ ->
AuthRet = auth(Req_2, HTTPMethod, Path_1, TokenLen, ReqParams),
AuthRet_2 = case AuthRet of
{error, Reason} ->
{error, Reason};
{ok, AccessKeyId, _} ->
{ok, AccessKeyId}
end,
ReqParams_2 = case ReqParams#req_params.is_aws_chunked of
true ->
case AuthRet of
{ok, _, SignParams} ->
{Signature, SignHead, SignKey} =
case SignParams of
undefined ->
{undefined, undefined, undefined};
_ ->
SignParams
end,
AWSChunkSignParams = #aws_chunk_sign_params{
sign_head = SignHead,
sign_key = SignKey,
prev_sign = Signature,
chunk_sign = <<>>},
AWSChunkDecState = #aws_chunk_decode_state{
buffer = <<>>,
dec_state = wait_size,
chunk_offset = 0,
sign_params = AWSChunkSignParams,
total_len = 0},
ReqParams#req_params{
transfer_decode_fun = fun aws_chunk_decode/2,
transfer_decode_state = AWSChunkDecState};
_ ->
ReqParams
end;
_ ->
ReqParams
end,
handle_2(AuthRet_2, Req_2, HTTPMethod, Path_1, ReqParams_2, State)
end.
%% @doc Handle a request (sub)
@private
-spec(handle_2(Ret, Req, HttpVerb, Path, ReqParams, State) ->
{ok, Req, State} when Ret::{ok, AccessKeyId} | {error, Cause},
AccessKeyId::binary(),
Cause::any(),
Req::cowboy_req:req(),
HttpVerb::binary(),
Path::binary(),
ReqParams::#req_params{},
State::[any()]).
handle_2({error, unmatch}, Req,_HttpVerb, Key,_ReqParams, State) ->
{ok, Req_2} = ?reply_forbidden([?SERVER_HEADER],
?XML_ERROR_CODE_SignatureDoesNotMatch,
?XML_ERROR_MSG_SignatureDoesNotMatch, Key, <<>>, Req),
{ok, Req_2, State};
handle_2({error, not_found}, Req,_HttpVerb, Key,_ReqParams, State) ->
{ok, Req_2} = ?reply_not_found([?SERVER_HEADER], Key, <<>>, Req),
{ok, Req_2, State};
handle_2({error, already_yours}, Req,_HttpVerb, Key,_ReqParams, State) ->
{ok, Req_2} = ?reply_conflict([?SERVER_HEADER], ?XML_ERROR_CODE_BucketAlreadyOwnedByYou,
?XML_ERROR_MSG_BucketAlreadyOwnedByYou, Key, <<>>, Req),
{ok, Req_2, State};
handle_2({error, _Cause}, Req,_HttpVerb, Key,_ReqParams,State) ->
{ok, Req_2} = ?reply_forbidden([?SERVER_HEADER],
?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Key, <<>>, Req),
{ok, Req_2, State};
%% For Multipart Upload - Initiation
handle_2({ok,_AccessKeyId}, Req, ?HTTP_POST,_Key, #req_params{bucket_info = BucketInfo,
custom_metadata = CMetaBin,
path = Path,
is_upload = true}, State) ->
%% remove a registered object with 'touch-command'
%% from the cache
catch leo_cache_api:delete(Path),
%% Insert a metadata into the storage-cluster
NowBin = list_to_binary(integer_to_list(leo_date:now())),
UploadId = leo_hex:binary_to_hex(
crypto:hash(md5, << Path/binary, NowBin/binary >>)),
UploadIdBin = list_to_binary(UploadId),
UploadKey = << Path/binary, ?STR_NEWLINE, UploadIdBin/binary >>,
{ok, Req_2} =
case leo_gateway_rpc_handler:put(#put_req_params{path = UploadKey,
body = ?BIN_EMPTY,
meta = CMetaBin,
dsize = 0,
msize = byte_size(CMetaBin),
bucket_info = BucketInfo}) of
{ok, _ETag} ->
%% Response xml to a client
[BucketName|Path_1] = leo_misc:binary_tokens(Path, ?BIN_SLASH),
XML = gen_upload_initiate_xml(BucketName, Path_1, UploadId),
?reply_ok([?SERVER_HEADER], XML, Req);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Path, <<>>, Req);
{error, Cause} ->
?error("handle_2/6", [{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end,
{ok, Req_2, State};
For Multipart Upload - Upload a part of an object
@private
handle_2({ok,_AccessKeyId}, Req, ?HTTP_PUT, Key,
#req_params{upload_id = UploadId,
upload_part_num = PartNum,
max_chunked_objs = MaxChunkedObjs}, State) when UploadId /= <<>>,
PartNum > MaxChunkedObjs ->
{ok, Req_2} = ?reply_bad_request([?SERVER_HEADER],
?XML_ERROR_CODE_EntityTooLarge,
?XML_ERROR_MSG_EntityTooLarge,
Key, <<>>, Req),
{ok, Req_2, State};
handle_2({ok,_AccessKeyId}, Req, ?HTTP_PUT,_Key,
#req_params{path = Path,
is_upload = false,
upload_id = UploadId,
upload_part_num = PartNum1} = Params, State) when UploadId /= <<>>,
PartNum1 /= 0 ->
PartNum2 = list_to_binary(integer_to_list(PartNum1)),
%% for confirmation
Key1 = << Path/binary, ?STR_NEWLINE, UploadId/binary >>,
%% for put a part of an object
Key2 = << Path/binary, ?STR_NEWLINE, PartNum2/binary >>,
{ok, Req_2} =
case leo_gateway_rpc_handler:head(Key1) of
{ok, _Metadata} ->
put_object(?BIN_EMPTY, Req, Key2, Params);
{error, not_found} ->
?reply_not_found([?SERVER_HEADER], Path, <<>>, Req);
{error, unavailable} ->
?reply_service_unavailable_error(
[?SERVER_HEADER], Path, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Path, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end,
{ok, Req_2, State};
handle_2({ok,_AccessKeyId}, Req, ?HTTP_DELETE,_Key,
#req_params{bucket_info = BucketInfo,
path = Path,
upload_id = UploadId}, State) when UploadId /= <<>> ->
_ = leo_gateway_rpc_handler:put(#put_req_params{path = Path,
body = ?BIN_EMPTY,
dsize = 0,
bucket_info = BucketInfo}),
_ = leo_gateway_rpc_handler:delete(Path),
_ = leo_gateway_rpc_handler:delete(<< Path/binary, ?STR_NEWLINE >>),
{ok, Req_2} = ?reply_no_content([?SERVER_HEADER], Req),
{ok, Req_2, State};
For Multipart Upload - Completion
handle_2({ok,_AccessKeyId}, Req, ?HTTP_POST,_Key,
#req_params{bucket_info = BucketInfo,
path = Path,
chunked_obj_len = ChunkedLen,
is_upload = false,
upload_id = UploadId,
upload_part_num = PartNum,
transfer_decode_fun = TransferDecodeFun,
transfer_decode_state = TransferDecodeState}, State) when UploadId /= <<>>,
PartNum == 0 ->
Res = cowboy_req:has_body(Req),
{ok, Req_2} = handle_multi_upload_1(
Res, Req, Path, UploadId,
ChunkedLen, TransferDecodeFun, TransferDecodeState, BucketInfo),
{ok, Req_2, State};
%% For Regular cases
handle_2({ok, AccessKeyId}, Req, ?HTTP_POST, Path, Params, State) ->
handle_2({ok, AccessKeyId}, Req, ?HTTP_PUT, Path, Params, State);
handle_2({ok, AccessKeyId}, Req, HTTPMethod, Path, Params, State) ->
case catch leo_gateway_http_req_handler:handle(
HTTPMethod, Req,
Path, Params#req_params{access_key_id = AccessKeyId}) of
{'EXIT', {"aws-chunked decode failed", _} = Cause} ->
?error("handle_2/6", [{key, binary_to_list(Path)},
{cause, Cause}]),
{ok, Req_2} = ?reply_forbidden(
[?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Path, <<>>, Req),
{ok, Req_2, State};
{'EXIT', Cause} ->
?error("handle_2/6", [{key, binary_to_list(Path)},
{cause, Cause}]),
{ok, Req_2} = ?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req),
{ok, Req_2, State};
{ok, Req_2} ->
Req_3 = cowboy_req:compact(Req_2),
{ok, Req_3, State}
end.
@private
-spec(aws_chunk_decode(Bin, State) ->
{more|done, Acc, State} when Bin::binary(),
State::#aws_chunk_decode_state{},
Acc::binary()).
aws_chunk_decode(Bin, State) ->
Buffer = State#aws_chunk_decode_state.buffer,
DecState = State#aws_chunk_decode_state.dec_state,
Offset = State#aws_chunk_decode_state.chunk_offset,
SignParams = State#aws_chunk_decode_state.sign_params,
TotalLen = State#aws_chunk_decode_state.total_len,
Ret = aws_chunk_decode({ok, <<>>}, << Buffer/binary, Bin/binary >>,
DecState, Offset, SignParams),
case Ret of
{{error, Reason2}, {_, _, _, _}} ->
?error("aws_chunk_decode/2", [{simple_cause, "parsing error"},
{cause, Reason2}]),
erlang:error("aws-chunked decode failed");
{{ok, Acc}, {Buffer_2, DecState_2, Offset_2, SignParams_2}} ->
{more, Acc, #aws_chunk_decode_state{buffer = Buffer_2,
dec_state = DecState_2,
chunk_offset = Offset_2,
sign_params = SignParams_2,
total_len = TotalLen + byte_size(Acc)}};
{{done, Acc}, {Rest, _, _, _}} ->
{done, Acc, TotalLen + byte_size(Acc), Rest}
end.
@private
aws_chunk_decode({ok, Acc}, Buffer, wait_size, 0,
#aws_chunk_sign_params{sign_head = SignHead} = SignParams) ->
case byte_size(Buffer) of
Len when Len > 10 ->
<< Bin:10/binary, _/binary >> = Buffer,
case binary:match(Bin, <<";">>) of
nomatch ->
{{error, incorrect}, {Buffer, error, 0, SignParams}};
{Start, _} ->
<< SizeHexBin:Start/binary, ";", Rest/binary >> = Buffer,
SizeHex = binary_to_list(SizeHexBin),
Size = leo_hex:hex_to_integer(SizeHex),
SignParams_2 =
case SignHead of
undefined ->
SignParams#aws_chunk_sign_params{chunk_size = Size};
_ ->
Context = crypto:hash_init(sha256),
SignParams#aws_chunk_sign_params{chunk_size = Size,
hash_context = Context}
end,
aws_chunk_decode({ok, Acc}, Rest, wait_head, 0, SignParams_2)
end;
_ ->
{{ok, Acc}, {Buffer, wait_size, 0, SignParams}}
end;
aws_chunk_decode({ok, Acc}, Buffer, wait_head, 0, SignParams) ->
case byte_size(Buffer) of
Len when Len > 80 + 2 ->
<< "chunk-signature=", ChunkSign:64/binary,
"\r\n", Rest/binary >> = Buffer,
aws_chunk_decode({ok, Acc}, Rest, read_chunk, 0,
SignParams#aws_chunk_sign_params{chunk_sign = ChunkSign});
_ ->
{{ok, Acc}, {Buffer, wait_head, 0, SignParams}}
end;
aws_chunk_decode({ok, Acc}, Buffer, read_chunk, Offset,
#aws_chunk_sign_params{sign_head = SignHead,
sign_key = SignKey,
prev_sign = PrevSign,
chunk_sign = ChunkSign,
chunk_size = ChunkSize,
hash_context = Context} = SignParams) ->
ChunkRemainSize = ChunkSize - Offset,
case byte_size(Buffer) of
Len when Len >= ChunkRemainSize + 2 ->
<< ChunkPart:ChunkRemainSize/binary,
"\r\n", Rest/binary >> = Buffer,
case SignHead of
undefined ->
?debug("aws_chunk_decode/4", "Output Chunk Size: ~p, No Sign", [ChunkSize]),
case ChunkSize of
0 ->
{{done, Acc}, {Rest, done, 0, #aws_chunk_sign_params{}}};
_ ->
aws_chunk_decode({ok, << Acc/binary, ChunkPart/binary >>},
Rest, wait_size, 0, SignParams)
end;
_ ->
Context_2 = crypto:hash_update(Context, ChunkPart),
ChunkHash = crypto:hash_final(Context_2),
ChunkHashBin = leo_hex:binary_to_hexbin(ChunkHash),
BinToSign = << ?AWS_SIGNATURE_V4_SHA256_KEY/binary,
"\n",
SignHead/binary,
PrevSign/binary,
"\n",
?AWS_SIGNATURE_V4_SHA256_HASH/binary,
"\n",
ChunkHashBin/binary >>,
case (leo_hex:binary_to_hexbin(
crypto:hmac(sha256, SignKey, BinToSign))) of
ChunkSign ->
case (ChunkSize == 0) of
true ->
{{done, Acc}, {Rest, done, 0, #aws_chunk_sign_params{}}};
false ->
?debug("aws_chunk_decode/4",
"Output Chunk Size: ~p, Sign: ~p", [ChunkSize, ChunkSign]),
aws_chunk_decode({ok, << Acc/binary, ChunkPart/binary >>},
Rest, wait_size, 0,
SignParams#aws_chunk_sign_params{prev_sign = ChunkSign,
chunk_sign = <<>>})
end;
WrongSign ->
?error("aws_chunk_decode/4",
[{cause, "Chunk Signature Not Match"},
{wrong_sign, WrongSign},
{chunk_sign, ChunkSign},
{sign, binary_to_list(BinToSign)}]),
{{error, unmatch}, {Buffer, error, Offset, SignParams}}
end
end;
Len when ChunkRemainSize >= Len ->
SignParams_2 = case SignHead of
undefined ->
SignParams;
_ ->
Context_2 = crypto:hash_update(Context, Buffer),
SignParams#aws_chunk_sign_params{hash_context = Context_2}
end,
{{ok, << Acc/binary, Buffer/binary >>},
{<<>>, read_chunk, Offset + Len, SignParams_2}};
_ ->
{{ok, Acc},
{Buffer, read_chunk, Offset ,SignParams}}
end.
%% @doc Handle multi-upload processing
@private
-spec(handle_multi_upload_1(IsHandling, Req, Path, UploadId,
ChunkedLen, TransferDecodeFun, TransferDecodeState, BucketInfo) ->
{ok, Req} when IsHandling::boolean(),
Req::cowboy_req:req(),
Path::binary(),
UploadId::binary(),
ChunkedLen::non_neg_integer(),
TransferDecodeFun::function(),
TransferDecodeState::term(),
BucketInfo::#?BUCKET{}).
handle_multi_upload_1(true, Req, Path, UploadId,
ChunkedLen, TransferDecodeFun, TransferDecodeState, BucketInfo) ->
Path4Conf = << Path/binary, ?STR_NEWLINE, UploadId/binary >>,
case leo_gateway_rpc_handler:get(Path4Conf) of
{ok, #?METADATA{meta = CMetaBin}, _} ->
_ = leo_gateway_rpc_handler:delete(Path4Conf),
BodyOpts = case TransferDecodeFun of
undefined ->
[];
_ ->
[{transfer_decode, TransferDecodeFun, TransferDecodeState}]
end,
Ret = cowboy_req:body(Req, BodyOpts),
handle_multi_upload_2(Ret, Req, Path, ChunkedLen, BucketInfo, CMetaBin);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
_ ->
?reply_forbidden([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Path, <<>>, Req)
end;
handle_multi_upload_1(false, Req, Path,_UploadId,_ChunkedLen,_,_,_) ->
?reply_forbidden([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Path, <<>>, Req).
@private
-spec(handle_multi_upload_2({ok, Bin, Req}|{error, Cause}, Req, Path, ChunkedLen, BucketInfo, CMetaBin) ->
{ok, Req} when Bin::binary(),
Req::cowboy_req:req(),
Cause::any(),
Path::binary(),
ChunkedLen::non_neg_integer(),
BucketInfo::#?BUCKET{},
CMetaBin::binary()).
handle_multi_upload_2({ok, Bin, Req}, _Req, Path,_ChunkedLen, BucketInfo, CMetaBin) ->
%% trim spaces
Acc = fun(#xmlText{value = " ",
pos = P}, Acc, S) ->
{Acc, P, S};
(X, Acc, S) ->
{[X|Acc], S}
end,
{#xmlElement{content = Content},_} = xmerl_scan:string(
binary_to_list(Bin),
[{space,normalize}, {acc_fun, Acc}]),
TotalUploadedObjs = length(Content),
case handle_multi_upload_3(TotalUploadedObjs, Path, []) of
{ok, {Len, ETag_1}} ->
%% Retrieve the child object's metadata
%% to set the actual chunked length
IndexBin = list_to_binary(integer_to_list(1)),
ChildKey = << Path/binary, ?DEF_SEPARATOR/binary, IndexBin/binary >>,
case leo_gateway_rpc_handler:head(ChildKey) of
{ok, #?METADATA{del = 0,
dsize = ChildObjSize}} ->
case leo_gateway_rpc_handler:put(#put_req_params{path = Path,
body = ?BIN_EMPTY,
meta = CMetaBin,
dsize = Len,
msize = byte_size(CMetaBin),
csize = ChildObjSize,
total_chunks = TotalUploadedObjs,
digest = ETag_1,
bucket_info = BucketInfo}) of
{ok,_} ->
[BucketName|Path_1] = leo_misc:binary_tokens(Path, ?BIN_SLASH),
ETag2 = leo_hex:integer_to_hex(ETag_1, 32),
XML = gen_upload_completion_xml(
BucketName, Path_1, ETag2, TotalUploadedObjs),
?reply_ok([?SERVER_HEADER], XML, Req);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
{error, Cause} ->
?error("handle_multi_upload_2/5",
[{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end;
_ ->
?error("handle_multi_upload_2/5",
[{key, binary_to_list(Path)}, {cause, invalid_metadata}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end;
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
{error, Cause} ->
?error("handle_multi_upload_2/5", [{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end;
handle_multi_upload_2({error, Cause}, Req, Path,_ChunkedLen,_BucketInfo, _CMetaBin) ->
?error("handle_multi_upload_2/5", [{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req).
%% @doc Retrieve Metadatas for uploaded objects (Multipart)
@private
-spec(handle_multi_upload_3(PartNum, Path, Acc) ->
{ok, Ret} | {error, Cause} when PartNum::non_neg_integer(),
Path::binary(),
Acc::term(),
Ret::{Len, ETag},
Len::non_neg_integer(),
ETag::binary(),
Cause::any()).
handle_multi_upload_3(0,_Path, Acc) ->
{Len, ETag} = lists:foldl(
fun({_, {DSize, Checksum}}, {Sum, ETagBin_1}) ->
ETagBin_2 = leo_hex:integer_to_raw_binary(Checksum),
{Sum + DSize, << ETagBin_1/binary, ETagBin_2/binary >>}
end, {0, <<>>}, lists:sort(
lists:reverse(Acc))),
ETag_1 = leo_hex:hex_to_integer(leo_hex:binary_to_hex(crypto:hash(md5, ETag))),
{ok, {Len, ETag_1}};
handle_multi_upload_3(PartNum, Path, Acc) ->
PartNumBin = list_to_binary(integer_to_list(PartNum)),
Key = << Path/binary, ?STR_NEWLINE, PartNumBin/binary >>,
case leo_gateway_rpc_handler:head(Key) of
{ok, #?METADATA{dsize = Len,
checksum = Checksum}} ->
handle_multi_upload_3(PartNum - 1, Path, [{PartNum, {Len, Checksum}} | Acc]);
Error ->
Error
end.
%% @doc Generate an upload-key
@private
-spec(gen_upload_key(Path) ->
Key when Path::binary(),
Key::string()).
gen_upload_key(Path) ->
Key = lists:foldl(fun(I, []) ->
binary_to_list(I);
(I, Acc) ->
Acc ++ ?STR_SLASH ++ binary_to_list(I)
end, [], Path),
Key.
%% @doc Generate an update-initiate xml
@private
-spec(gen_upload_initiate_xml(BucketNameBin, Path, UploadId) ->
Ret when BucketNameBin::binary(),
Path::[binary()],
UploadId::binary(),
Ret::string()).
gen_upload_initiate_xml(BucketNameBin, Path, UploadId) ->
BucketName = binary_to_list(BucketNameBin),
Key = gen_upload_key(Path),
io_lib:format(?XML_UPLOAD_INITIATION, [BucketName, Key, UploadId]).
%% @doc Generate an update-completion xml
@private
-spec(gen_upload_completion_xml(BucketNameBin, Path, ETag, Total) ->
Ret when BucketNameBin::binary(),
Path::[binary()],
ETag::binary(),
Total::non_neg_integer(),
Ret::string()).
gen_upload_completion_xml(BucketNameBin, Path, ETag, Total) ->
BucketName = binary_to_list(BucketNameBin),
TotalStr = integer_to_list(Total),
Key = gen_upload_key(Path),
io_lib:format(?XML_UPLOAD_COMPLETION, [BucketName, Key, ETag, TotalStr]).
%% @doc Generate copy-obj's xml
@private
-spec(resp_copy_obj_xml(Req, Meta) ->
{ok, Req} when Req::cowboy_req:req(),
Meta::#?METADATA{}).
resp_copy_obj_xml(Req, Meta) ->
XML = io_lib:format(?XML_COPY_OBJ_RESULT,
[leo_http:web_date(Meta#?METADATA.timestamp),
leo_hex:integer_to_hex(Meta#?METADATA.checksum, 32)]),
?reply_ok([?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}
], XML, Req).
%% @doc Retrieve header values from a request
%% Set request params
@private
-spec(request_params(Req, ReqParams) ->
ReqParams when Req::cowboy_req:req(),
ReqParams::#req_params{}).
request_params(Req, Params) ->
IsMultiDelete = case cowboy_req:qs_val(?HTTP_QS_BIN_MULTI_DELETE, Req) of
{undefined,_} ->
false;
_ ->
true
end,
IsUpload = case cowboy_req:qs_val(?HTTP_QS_BIN_UPLOADS, Req) of
{undefined,_} ->
false;
_ ->
true
end,
UploadId = case cowboy_req:qs_val(?HTTP_QS_BIN_UPLOAD_ID, Req) of
{undefined,_} ->
<<>>;
{Val_1,_} ->
Val_1
end,
PartNum = case cowboy_req:qs_val(?HTTP_QS_BIN_PART_NUMBER, Req) of
{undefined,_} ->
0;
{Val_2,_} ->
list_to_integer(binary_to_list(Val_2))
end,
Range = element(1, cowboy_req:header(?HTTP_HEAD_RANGE, Req)),
IsAwsChunked = case cowboy_req:header(?HTTP_HEAD_X_AMZ_CONTENT_SHA256, Req) of
{?HTTP_HEAD_X_VAL_AWS4_SHA256,_} ->
true;
_ ->
false
end,
%% ?debug("request_params/2", "Is AWS Chunked: ~p", [IsAwsChunked]),
{Headers, _} = cowboy_req:headers(Req),
{ok, CMetaBin} = parse_headers_to_cmeta(Headers),
case byte_size(CMetaBin) of
MSize when MSize >= ?HTTP_METADATA_LIMIT ->
{error, metadata_too_large};
_ ->
Params#req_params{is_multi_delete = IsMultiDelete,
is_upload = IsUpload,
is_aws_chunked = IsAwsChunked,
upload_id = UploadId,
upload_part_num = PartNum,
custom_metadata = CMetaBin,
range_header = Range}
end.
%% @doc check if bucket is public-read
@private
-spec(is_public_read(BucketAclInfoL) ->
Ret when BucketAclInfoL::[#bucket_acl_info{}],
Ret::boolean()).
is_public_read([]) ->
false;
is_public_read([H|Rest]) ->
#bucket_acl_info{user_id = UserId, permissions = Permissions} = H,
case (UserId == ?GRANTEE_ALL_USER
andalso (Permissions == [read] orelse Permissions == [read, write])) of
true ->
true;
false ->
is_public_read(Rest)
end.
@private
-spec(is_public_read_write(BucketAclInfoL) ->
Ret when BucketAclInfoL::[#bucket_acl_info{}],
Ret::boolean()).
is_public_read_write([]) ->
false;
is_public_read_write([H|Rest]) ->
#bucket_acl_info{user_id = UserId, permissions = Permissions} = H,
case (UserId == ?GRANTEE_ALL_USER
andalso (Permissions == [read, write])) of
true ->
true;
false ->
is_public_read_write(Rest)
end.
%% @doc Authentication
@private
-spec(auth(Req, HTTPMethod, Path, TokenLen, ReqParams) ->
{ok, AccessKeyId, {Signature, SignHead, SignKey}|undefined} |
{error, Cause} when Req::cowboy_req:req(),
HTTPMethod::binary(),
Path::binary(),
TokenLen::non_neg_integer(),
ReqParams::#req_params{},
AccessKeyId::binary(),
Signature::binary(),
SignHead::binary(),
SignKey::binary(),
Cause::any()).
auth(Req, HTTPMethod, Path, TokenLen, ReqParams) ->
BucketName = case (TokenLen >= 1) of
true ->
erlang:hd(leo_misc:binary_tokens(Path, ?BIN_SLASH));
false ->
?BIN_EMPTY
end,
case leo_s3_bucket:get_latest_bucket(BucketName) of
{ok, #?BUCKET{acls = ACLs} = Bucket} ->
auth(Req, HTTPMethod, Path, TokenLen,
BucketName, ACLs, ReqParams#req_params{bucket_info = Bucket});
not_found ->
auth(Req, HTTPMethod, Path, TokenLen, BucketName, [], ReqParams);
{error, Cause} ->
{error, Cause}
end.
@private
-spec(auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) ->
{ok, AccessKeyId, {Signature, SignHead, SignKey}|undefined} |
{error, Cause} when Req::cowboy_req:req(),
HTTPMethod::binary(),
Path::binary(),
TokenLen::non_neg_integer(),
BucketName::binary(),
ACLs::[binary()],
ReqParams::#req_params{},
AccessKeyId::binary(),
Signature::binary(),
SignHead::binary(),
SignKey::binary(),
Cause::any()).
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs,
#req_params{is_multi_delete = true} = ReqParams) when TokenLen =< 1 ->
case is_public_read_write(ACLs) of
true ->
{ok, <<>>, undefined};
false ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams)
end;
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) when TokenLen =< 1 ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams);
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) when TokenLen > 1,
(HTTPMethod == ?HTTP_POST orelse
HTTPMethod == ?HTTP_PUT orelse
HTTPMethod == ?HTTP_DELETE) ->
case is_public_read_write(ACLs) of
true ->
{ok, <<>>, undefined};
false ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams)
end;
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) when TokenLen > 1 ->
case is_public_read(ACLs) of
true ->
{ok, <<>>, undefined};
false ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams)
end.
@private
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, _ACLs, #req_params{is_acl = IsACL}) ->
case cowboy_req:header(?HTTP_HEAD_AUTHORIZATION, Req) of
{undefined, _} ->
{error, undefined};
{AuthorizationBin, _} ->
case AuthorizationBin of
<< Head:4/binary,
_Rest/binary >> when Head =:= ?HTTP_HEAD_X_AWS_SIGNATURE_V2;
Head =:= ?HTTP_HEAD_X_AWS_SIGNATURE_V4 ->
IsCreateBucketOp = (TokenLen == 1 andalso
HTTPMethod == ?HTTP_PUT andalso
not IsACL),
{RawURI,_} = cowboy_req:path(Req),
{QStr,_} = cowboy_req:qs(Req),
{Headers,_} = cowboy_req:headers(Req),
%% NOTE:
%% - from s3cmd, dragondisk and others:
%% - Path: <<"photo/img">>
%% - RawURI: <<"/img">>
%%
%% - from ruby-client, other AWS-clients:
%% - Path: <<"photo/img">>
%% - RawURI: <<"/photo/img">>
%%
%% -> Adjust URI:
%% #sign_params{ requested_uri = << "/photo/img" >>
raw_uri = RawURI
%% }
%% * the hash-value is calculated by "raw_uri"
%%
Token_1 = leo_misc:binary_tokens(Path, << ?STR_SLASH >>),
Token_2 = leo_misc:binary_tokens(RawURI, << ?STR_SLASH >>),
Path_1 = case (length(Token_1) /= length(Token_2)) of
true ->
<< ?STR_SLASH, BucketName/binary, RawURI/binary >>;
false ->
case RawURI of
<< ?STR_SLASH, _/binary >> ->
RawURI;
_ ->
<< ?STR_SLASH, RawURI/binary >>
end
end,
Len = byte_size(QStr),
QStr_2 = case (Len > 0 andalso binary:last(QStr) == $=) of
true ->
binary:part(QStr, 0, (Len - 1));
false ->
QStr
end,
QStr_3 = case binary:match(QStr_2, << "&" >>) of
nomatch ->
QStr_2;
_ ->
Ret = lists:foldl(
fun(Q, []) ->
Q;
(Q, Acc) ->
lists:append([Acc, "&", Q])
end, [],
lists:sort(string:tokens(binary_to_list(QStr_2), "&"))),
list_to_binary(Ret)
end,
SignVer = case (Head =:= ?HTTP_HEAD_X_AWS_SIGNATURE_V4) of
true ->
v4;
false ->
v2
end,
SignParams = #sign_params{http_verb = HTTPMethod,
content_md5 = ?http_header(Req, ?HTTP_HEAD_CONTENT_MD5),
content_type = ?http_header(Req, ?HTTP_HEAD_CONTENT_TYPE),
date = ?http_header(Req, ?HTTP_HEAD_DATE),
bucket = BucketName,
raw_uri = RawURI,
requested_uri = Path_1,
query_str = QStr_3,
sign_ver = SignVer,
headers = Headers,
amz_headers = leo_http:get_amz_headers4cow(Headers)},
leo_s3_auth:authenticate(AuthorizationBin, SignParams, IsCreateBucketOp);
_->
{error, nomatch}
end
end.
%% @doc Get bucket list
@private
%% @see
-spec(get_bucket_1(AccessKeyId, Key, Delimiter, Marker, MaxKeys, Prefix) ->
{ok, XMLRet} | {error, Cause} when AccessKeyId::binary(),
Key::binary(),
Delimiter::binary(),
Marker::binary(),
MaxKeys::non_neg_integer(),
Prefix::binary()|none,
XMLRet::binary(),
Cause::any()).
get_bucket_1(AccessKeyId, <<>>, Delimiter, Marker, MaxKeys, none) ->
get_bucket_1(AccessKeyId, ?BIN_SLASH, Delimiter, Marker, MaxKeys, none);
get_bucket_1(AccessKeyId, ?BIN_SLASH, _Delimiter, _Marker, _MaxKeys, none) ->
case leo_s3_bucket:find_buckets_by_id(AccessKeyId) of
not_found ->
{ok, generate_bucket_xml([])};
{ok, []} ->
{ok, generate_bucket_xml([])};
{ok, MetadataL} ->
{ok, generate_bucket_xml(MetadataL)};
Error ->
Error
end;
get_bucket_1(_AccessKeyId, BucketName, _Delimiter, _Marker, 0, Prefix) ->
Prefix_1 = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
Path = << BucketName/binary, Prefix_1/binary >>,
{ok, generate_bucket_xml(Path, Prefix_1, [], 0)};
get_bucket_1(_AccessKeyId, BucketName, none, Marker, MaxKeys, Prefix) ->
?debug("get_bucket_1/6", "BucketName: ~p, Marker: ~p, MaxKeys: ~p",
[BucketName, Marker, MaxKeys]),
Prefix_1 = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
{ok, #redundancies{nodes = Redundancies}} =
leo_redundant_manager_api:get_redundancies_by_key(get, BucketName),
Key = << BucketName/binary, Prefix_1/binary >>,
case leo_gateway_rpc_handler:invoke(Redundancies,
leo_storage_handler_directory,
find_by_parent_dir,
[Key, ?BIN_SLASH, Marker, MaxKeys],
[]) of
{ok, Metadata} when is_list(Metadata) =:= true ->
BodyFunc = fun(Socket, Transport) ->
BucketName_1 = erlang:hd(leo_misc:binary_tokens(BucketName, <<"/">>)),
HeadBin = generate_list_head_xml(BucketName_1, Prefix_1, MaxKeys, <<>>),
ok = Transport:send(Socket, HeadBin),
{ok, IsTruncated, NextMarker} =
recursive_find(BucketName, Redundancies, Metadata,
Marker, MaxKeys, Transport, Socket),
FootBin = generate_list_foot_xml(IsTruncated, NextMarker),
ok = Transport:send(Socket, FootBin)
end,
{ok, BodyFunc};
{ok, _} ->
{error, invalid_format};
Error ->
Error
end;
get_bucket_1(_AccessKeyId, BucketName, Delimiter, Marker, MaxKeys, Prefix) ->
?debug("get_bucket_1/6", "BucketName: ~p, Delimiter: ~p, Marker: ~p, MaxKeys: ~p",
[BucketName, Delimiter, Marker, MaxKeys]),
Prefix_1 = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
{ok, #redundancies{nodes = Redundancies}} =
leo_redundant_manager_api:get_redundancies_by_key(get, BucketName),
Path = << BucketName/binary, Prefix_1/binary >>,
case leo_gateway_rpc_handler:invoke(Redundancies,
leo_storage_handler_directory,
find_by_parent_dir,
[Path, Delimiter, Marker, MaxKeys],
[]) of
not_found ->
{ok, generate_bucket_xml(Path, Prefix_1, [], MaxKeys)};
{ok, []} ->
{ok, generate_bucket_xml(Path, Prefix_1, [], MaxKeys)};
{ok, MetadataL} ->
{ok, generate_bucket_xml(Path, Prefix_1, MetadataL, MaxKeys)};
Error ->
Error
end.
%% @doc Put a bucket
@private
@see
-spec(put_bucket_1(CannedACL, AccessKeyId, BucketName) ->
ok | {error, Cause} when CannedACL::string(),
AccessKeyId::binary(),
BucketName::binary(),
Cause::any()).
put_bucket_1([], AccessKeyId, BucketName) ->
leo_s3_bucket:put(AccessKeyId, BucketName);
put_bucket_1(CannedACL, AccessKeyId, BucketName) ->
leo_s3_bucket:put(AccessKeyId, BucketName, CannedACL).
%% @doc Put a bucket ACL
@private
@see
-spec(put_bucket_acl_1(CannedACL, AccessKeyId, BucketName) ->
ok | {error, Cause} when CannedACL::string(),
AccessKeyId::binary(),
BucketName::binary(),
Cause::any()).
put_bucket_acl_1(?CANNED_ACL_PRIVATE, AccessKeyId, BucketName) ->
leo_s3_bucket:update_acls2private(AccessKeyId, BucketName);
put_bucket_acl_1(?CANNED_ACL_PUBLIC_READ, AccessKeyId, BucketName) ->
leo_s3_bucket:update_acls2public_read(AccessKeyId, BucketName);
put_bucket_acl_1(?CANNED_ACL_PUBLIC_READ_WRITE, AccessKeyId, BucketName) ->
leo_s3_bucket:update_acls2public_read_write(AccessKeyId, BucketName);
put_bucket_acl_1(_, _AccessKeyId, _BucketName) ->
{error, not_supported}.
%% @doc Delete a bucket
@private
@see
-spec(delete_bucket_1(AccessKeyId, BucketName) ->
ok | not_found | {error, Cause} when AccessKeyId::binary(),
BucketName::binary()|none,
Cause::any()).
delete_bucket_1(AccessKeyId, BucketName) ->
BucketName_2 = formalize_bucket(BucketName),
ManagerNodes = ?env_manager_nodes(leo_gateway),
delete_bucket_2(ManagerNodes, AccessKeyId, BucketName_2).
@private
-spec(delete_bucket_2(NodeL, AccessKeyId, BucketName) ->
ok | not_found | {error, Cause} when NodeL::[atom()],
AccessKeyId::binary(),
BucketName::binary()|none,
Cause::any()).
delete_bucket_2([],_,_) ->
{error, ?ERR_TYPE_INTERNAL_ERROR};
delete_bucket_2([Node|Rest], AccessKeyId, BucketName) ->
Node_1 = case is_list(Node) of
true ->
list_to_atom(Node);
false ->
Node
end,
case rpc:call(Node_1, leo_manager_api, delete_bucket,
[AccessKeyId, BucketName], ?DEF_TIMEOUT) of
ok ->
ok;
{error, not_found} ->
not_found;
{_, Cause} ->
?warn("delete_bucket_2/3", [{cause, Cause}]),
delete_bucket_2(Rest, AccessKeyId, BucketName)
end.
%% @doc Head a bucket
@private
@see
-spec(head_bucket_1(AccessKeyId, BucketName) ->
ok | not_found | {error, Cause} when AccessKeyId::binary(),
BucketName::binary(),
Cause::any()).
head_bucket_1(AccessKeyId, BucketName) ->
leo_s3_bucket:head(AccessKeyId, BucketName).
%% @doc Generate XML from matadata-list
@private
-spec(generate_bucket_xml(PathBin, PrefixBin, MetadataL, MaxKeys) ->
XMLRet when PathBin::binary(),
PrefixBin::binary(),
MetadataL::[#?METADATA{}],
MaxKeys::binary(),
XMLRet::string()).
generate_bucket_xml(PathBin, PrefixBin, MetadataL, MaxKeys) ->
Bucket = erlang:hd(leo_misc:binary_tokens(PathBin, <<"/">>)),
PathLen = byte_size(PathBin),
Path = binary_to_list(PathBin),
Prefix = binary_to_list(PrefixBin),
Ref = make_ref(),
ok = generate_bucket_xml_1(MetadataL, 1, Ref, PathLen, Path, Prefix, MaxKeys),
TotalDivs = leo_math:ceiling(length(MetadataL) / ?DEF_MAX_NUM_OF_METADATAS),
CallbackFun = fun(XMLList, NextMarker) ->
TruncatedStr = atom_to_list(length(MetadataL) =:= MaxKeys andalso MaxKeys =/= 0),
io_lib:format(?XML_OBJ_LIST,
[xmerl_lib:export_text(Bucket),
xmerl_lib:export_text(Prefix),
integer_to_list(MaxKeys),
XMLList,
TruncatedStr,
xmerl_lib:export_text(NextMarker)])
end,
generate_bucket_xml_loop(Ref, TotalDivs, CallbackFun, []).
@private
-spec(generate_bucket_xml(MetadataL) ->
XMLRet when MetadataL::[#?METADATA{}],
XMLRet::string()).
generate_bucket_xml(MetadataL) ->
Fun = fun(#?BUCKET{name = BucketNameBin,
created_at = CreatedAt} , Acc) ->
BucketName = binary_to_list(BucketNameBin),
case string:equal(?STR_SLASH, BucketName) of
true ->
Acc;
false ->
lists:append([Acc,
io_lib:format(?XML_BUCKET,
[xmerl_lib:export_text(BucketName),
leo_http:web_date(CreatedAt)])])
end
end,
io_lib:format(?XML_BUCKET_LIST, [lists:foldl(Fun, [], MetadataL)]).
@private
generate_bucket_xml_1([],_Index,_Ref,_PathLen,_Path,_Prefix,_MaxKeys) ->
ok;
generate_bucket_xml_1(MetadataL, Index, Ref, PathLen, Path, Prefix, MaxKeys) ->
{MetadataL_1, Rest} =
case (length(MetadataL) >= ?DEF_MAX_NUM_OF_METADATAS) of
true ->
lists:split(?DEF_MAX_NUM_OF_METADATAS, MetadataL);
false ->
{MetadataL, []}
end,
PId = self(),
spawn(fun() ->
Fun = fun(#?METADATA{key = EntryKeyBin,
dsize = DSize,
timestamp = Timestamp,
checksum = Checksum,
del = 0}, {Acc,_NextMarker}) ->
EntryKey = binary_to_list(EntryKeyBin),
case string:equal(Path, EntryKey) of
true ->
{Acc,_NextMarker};
false ->
Entry = string:sub_string(EntryKey, PathLen + 1),
case (DSize == -1) of
%% directory
true ->
{lists:append(
[Acc,
io_lib:format(?XML_DIR_PREFIX,
[xmerl_lib:export_text(Prefix),
xmerl_lib:export_text(Entry)])]),
EntryKeyBin};
%% object
false ->
{lists:append(
[Acc,
io_lib:format(?XML_OBJ_LIST_FILE_2,
[xmerl_lib:export_text(Prefix),
xmerl_lib:export_text(Entry),
leo_http:web_date(Timestamp),
leo_hex:integer_to_hex(Checksum, 32),
integer_to_list(DSize)])]),
EntryKeyBin}
end
end
end,
{XMLList, NextMarker} = lists:foldl(Fun, {[], <<>>}, MetadataL_1),
erlang:send(PId, {append, Ref, {Index, XMLList, NextMarker}})
end),
generate_bucket_xml_1(Rest, Index + 1, Ref, PathLen, Path, Prefix, MaxKeys).
@private
generate_bucket_xml_loop(_Ref, 0, CallbackFun, Acc) ->
{XMLList_1, NextMarker_1} =
lists:foldl(fun({_Index, XMLList, NextMarker}, {SoFar,_}) ->
{lists:append([SoFar, XMLList]), NextMarker}
end, {[], []}, lists:sort(Acc)),
CallbackFun(XMLList_1, NextMarker_1);
generate_bucket_xml_loop(Ref, TotalDivs, CallbackFun, Acc) ->
receive
{append, Ref, {Index, XMLList, NextMarker}} ->
generate_bucket_xml_loop(Ref, TotalDivs - 1,
CallbackFun, [{Index, XMLList, NextMarker}|Acc]);
_ ->
generate_bucket_xml_loop(Ref, TotalDivs, CallbackFun, Acc)
after
?DEF_REQ_TIMEOUT ->
{error, timeout}
end.
%% @doc Generate XML from ACL
@private
-spec(generate_acl_xml(BucketInfo) ->
XMLRet when BucketInfo::#?BUCKET{},
XMLRet::string()).
generate_acl_xml(#?BUCKET{access_key_id = ID, acls = ACLs}) ->
Fun = fun(#bucket_acl_info{user_id = URI,
permissions = Permissions} , Acc) ->
lists:foldl(
fun(read, Acc_1) ->
lists:flatten(
lists:append(
[Acc_1,
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_read]),
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_read_acp])
]));
(write, Acc_1) ->
lists:flatten(
lists:append(
[Acc_1,
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_write]),
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_write_acp])
]));
(full_control, Acc_1) ->
lists:append(
[Acc_1,
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_full_control])])
end, Acc, Permissions)
end,
io_lib:format(?XML_ACL_POLICY, [ID, ID, lists:foldl(Fun, [], ACLs)]).
@private
-spec(generate_delete_multi_xml(IsQuiet, DeletedKeys, ErrorKeys) ->
XMLRet when IsQuiet::boolean(),
DeletedKeys::[binary()],
ErrorKeys::[binary()],
XMLRet::string()).
generate_delete_multi_xml(IsQuiet, DeletedKeys, ErrorKeys) ->
DeletedElems = generate_delete_multi_xml_deleted_elem(DeletedKeys, []),
ErrorElems = case IsQuiet of
true ->
[];
false ->
generate_delete_multi_xml_error_elem(ErrorKeys, [])
end,
io_lib:format(?XML_MULTIPLE_DELETE, [DeletedElems, ErrorElems]).
@private
generate_delete_multi_xml_deleted_elem([], Acc) ->
Acc;
generate_delete_multi_xml_deleted_elem([DeletedKey|Rest], Acc) ->
generate_delete_multi_xml_deleted_elem(
Rest, lists:append([Acc,
io_lib:format(?XML_MULTIPLE_DELETE_SUCCESS_ELEM, [DeletedKey])])).
@private
generate_delete_multi_xml_error_elem([], Acc) ->
Acc;
generate_delete_multi_xml_error_elem([ErrorKey|Rest], Acc) ->
generate_delete_multi_xml_deleted_elem(
Rest, lists:append([Acc,
io_lib:format(?XML_MULTIPLE_DELETE_ERROR_ELEM, [ErrorKey])])).
%% @doc Delete multiple objects, then parse request XML
@private
-spec(delete_multi_objects_2(Req, Body, MD5, MD5, Params) ->
{ok, Req} when Req::cowboy_req:req(),
Body::binary(),
MD5::binary(),
Params::#req_params{}).
delete_multi_objects_2(Req, Body, MD5, MD5, Params) ->
Acc = fun(#xmlText{value = " ", pos = P}, Acc, S) ->
{Acc, P, S};
(X, Acc, S) ->
{[X|Acc], S}
end,
try
{#xmlElement{content = Content},_} =
xmerl_scan:string(binary_to_list(Body),
[{space,normalize}, {acc_fun, Acc}]),
delete_multi_objects_3(Req, Content, false, [], Params)
catch _:Cause ->
?error("delete_multi_objects_2/5", [{req, Req}, {cause, Cause}]),
?reply_malformed_xml([?SERVER_HEADER], Req)
end;
delete_multi_objects_2(Req, _Body, _MD5, _, _Params) ->
?reply_bad_digest([?SERVER_HEADER], <<>>, <<>>, Req).
%% @doc Retrieve every keys (ignore version element)
@private
delete_multi_objects_3(Req, [], IsQuiet, Keys, Params) ->
delete_multi_objects_4(Req, IsQuiet, Keys, [], [], Params);
delete_multi_objects_3(Req, [#xmlElement{name = 'Quiet'}|Rest], _IsQuiet, Keys, Params) ->
delete_multi_objects_3(Req, Rest, true, Keys, Params);
delete_multi_objects_3(Req, [#xmlElement{name = 'Object', content = KeyElem}|Rest], IsQuiet, Keys, Params) ->
[#xmlElement{content = TextElem}|_] = KeyElem,
[#xmlText{value = Key}|_] = TextElem,
delete_multi_objects_3(Req, Rest, IsQuiet, [Key|Keys], Params);
delete_multi_objects_3(Req, [_|Rest], IsQuiet, Keys, Params) ->
delete_multi_objects_3(Req, Rest, IsQuiet, Keys, Params).
%% @doc Issue delete requests for all keys by using leo_gateway_rpc_handler:delete
@private
delete_multi_objects_4(Req, IsQuiet, [], DeletedKeys, ErrorKeys, Params) ->
delete_multi_objects_5(Req, IsQuiet, DeletedKeys, ErrorKeys, Params);
delete_multi_objects_4(Req, IsQuiet, [Key|Rest], DeletedKeys, ErrorKeys,
#req_params{bucket_name = BucketName} = Params) ->
BinKey = list_to_binary(Key),
Path = << BucketName/binary, <<"/">>/binary, BinKey/binary >>,
case leo_gateway_rpc_handler:head(Path) of
{ok, Meta} ->
BeginTime = leo_date:clock(),
case leo_gateway_rpc_handler:delete(Path) of
ok ->
?access_log_delete(BucketName, Path, Meta#?METADATA.dsize, ?HTTP_ST_NO_CONTENT, BeginTime),
delete_multi_objects_4(Req, IsQuiet, Rest,
[Key|DeletedKeys], ErrorKeys, Params);
{error, not_found} ->
delete_multi_objects_4(Req, IsQuiet, Rest,
[Key|DeletedKeys], ErrorKeys, Params);
{error, _} ->
delete_multi_objects_4(Req, IsQuiet, Rest,
DeletedKeys, [Key|ErrorKeys], Params)
end;
_ ->
delete_multi_objects_4(Req, IsQuiet, Rest,
DeletedKeys, [Key|ErrorKeys], Params)
end.
%% @doc Make response XML based on the result of delete requests (ignore version related elements)
@private
delete_multi_objects_5(Req, IsQuiet, DeletedKeys, ErrorKeys, _Params) ->
XML = generate_delete_multi_xml(IsQuiet, DeletedKeys, ErrorKeys),
6 . Respond the response XML
?reply_ok([?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}
], XML, Req).
@private
-spec(formalize_bucket(BucketName) ->
BucketName when BucketName::binary()).
formalize_bucket(BucketName) ->
case (binary:last(BucketName) == $/) of
true ->
binary:part(BucketName, {0, byte_size(BucketName) - 1});
false ->
BucketName
end.
generate_list_head_xml(BucketName, Prefix, MaxKeys, Delimiter) ->
Delimiter_1 = case Delimiter of
<<>> ->
?DEF_DELIMITER;
_ ->
Delimiter
end,
io_lib:format(?XML_OBJ_LIST_HEAD,
[xmerl_lib:export_text(BucketName),
xmerl_lib:export_text(Prefix),
integer_to_list(MaxKeys),
xmerl_lib:export_text(Delimiter_1)]).
generate_list_foot_xml(IsTruncated, NextMarker) ->
TruncatedStr = case IsTruncated of
true ->
<< "true" >>;
false ->
<< "false" >>
end,
io_lib:format(?XML_OBJ_LIST_FOOT,
[TruncatedStr,
xmerl_lib:export_text(NextMarker)]).
generate_list_file_xml(BucketName, #?METADATA{key = Key,
dsize = Length,
timestamp = TS,
checksum = CS,
del = 0}) ->
BucketNameLen = byte_size(BucketName),
<< _:BucketNameLen/binary, Key_1/binary >> = Key,
io_lib:format(?XML_OBJ_LIST_FILE_1,
[xmerl_lib:export_text(Key_1),
leo_http:web_date(TS),
leo_hex:integer_to_hex(CS, 32),
integer_to_list(Length)]);
generate_list_file_xml(_,_) ->
error.
%% @doc Recursively find a key in the bucket
@private
-spec(recursive_find(BucketName, Redundancies, MetadataList,
Marker, MaxKeys, Transport, Socket) ->
{ok, CanFindKey, LastKey} | {error, any()} when BucketName::binary(),
Redundancies::[#redundancies{}],
MetadataList::[#?METADATA{}],
Marker::binary(),
MaxKeys::non_neg_integer(),
Transport::atom(),
Socket::port(),
CanFindKey::boolean(),
LastKey::binary()).
recursive_find(BucketName, Redundancies, MetadataList,
Marker, MaxKeys, Transport, Socket) ->
recursive_find(BucketName, Redundancies, [], MetadataList,
Marker, MaxKeys, <<>>, Transport, Socket).
recursive_find(_BucketName, _Redundancies,_,_,_, 0, LastKey,_,_) ->
{ok, true, LastKey};
recursive_find(_BucketName, _Redundancies,[],[],_,_,_,_,_) ->
{ok, false, <<>>};
recursive_find(BucketName, Redundancies, [Head|Rest], [],
Marker, MaxKeys, LastKey, Transport, Socket) ->
recursive_find(BucketName, Redundancies, Rest, Head,
Marker, MaxKeys, LastKey, Transport, Socket);
recursive_find(BucketName, Redundancies, Acc,
[#?METADATA{dsize = -1, key = Key}|Rest],
Marker, MaxKeys, LastKey, Transport, Socket) ->
case leo_gateway_rpc_handler:invoke(Redundancies,
leo_storage_handler_directory,
find_by_parent_dir,
[Key, ?BIN_SLASH, Marker, MaxKeys],
[]) of
{ok, Metadata} when is_list(Metadata) ->
recursive_find(BucketName, Redundancies, [Rest | Acc], Metadata,
Marker, MaxKeys, LastKey, Transport, Socket);
{ok,_} ->
{error, invalid_format};
Error ->
Error
end;
recursive_find(BucketName, Redundancies, Acc,
[#?METADATA{key = Key} = Head|Rest],
Marker, MaxKeys, LastKey, Transport, Socket) ->
case generate_list_file_xml(BucketName, Head) of
error ->
recursive_find(BucketName, Redundancies, Acc, Rest,
MaxKeys, MaxKeys, LastKey, Transport, Socket);
Bin ->
case Transport:send(Socket, Bin) of
ok ->
recursive_find(BucketName, Redundancies, Acc, Rest,
Marker, MaxKeys - 1, Key, Transport, Socket);
Error ->
Error
end
end.
@doc parse Custom Meta from Headers
-spec(parse_headers_to_cmeta(Headers) ->
{ok, Bin} | {error, Cause} when Headers::list(),
Bin::binary(),
Cause::any()).
parse_headers_to_cmeta(Headers) when is_list(Headers) ->
MetaList = lists:foldl(fun(Ele, Acc) ->
case Ele of
{<<"x-amz-meta-", _/binary>>, _} ->
[Ele | Acc];
_ ->
Acc
end
end, [], Headers),
case MetaList of
[] ->
{ok, <<>>};
_ ->
{ok, term_to_binary(MetaList)}
end;
parse_headers_to_cmeta(_) ->
{error, badarg}.
| null | https://raw.githubusercontent.com/leo-project/leo_gateway/7bac8912b762688f0ef237b31fee17e30e3888a3/src/leo_gateway_s3_api.erl | erlang | ======================================================================
Leo S3 Handler
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
---------------------------------------------------------------------
@doc
@end
======================================================================
--------------------------------------------------------------------
API
--------------------------------------------------------------------
@doc Start cowboy's listeners
@doc Stop cowboy's listeners
@doc Initializer
@doc Handle a request
@callback
@doc Terminater
@doc Check whether request is valid or not
no date header needed
--------------------------------------------------------------------
Callbacks from Cowboy
--------------------------------------------------------------------
@doc Handle request
@doc Handle response
---------------------------------------------------------------------
Callbacks from HTTP-Handler
For BUCKET-OPERATION
---------------------------------------------------------------------
Normalize Marker
@doc Put a bucket
Consume CreateBucketConfiguration
@doc Remove a bucket
@doc Retrieve a bucket-info
---------------------------------------------------------------------
For OBJECT-OPERATION
---------------------------------------------------------------------
@doc GET operation on Objects
@doc GET operation on Objects
@doc utility func for getting x-amz-meta-directive correctly
return default - 'copy'
@doc POST/PUT operation on Objects
@doc handle MULTIPLE DELETE request
Check Content-MD5 with body
@doc POST/PUT operation on Objects. COPY/REPLACE
need to trim head '/' when cooperating with s3fs(-c)
@doc POST/PUT operation on Objects. COPY
@doc POST/PUT operation on Objects. REPLACE
@doc POST/PUT operation on `Large` Objects. COPY
@doc POST/PUT operation on Objects. REPLACE
@doc DELETE operation on Objects
@doc HEAD operation on Objects
@doc RANGE-Query operation on Objects
---------------------------------------------------------------------
Inner Functions
---------------------------------------------------------------------
@doc Create a key
@doc Handle an http-request
@doc Handle a request (sub)
For Multipart Upload - Initiation
remove a registered object with 'touch-command'
from the cache
Insert a metadata into the storage-cluster
Response xml to a client
for confirmation
for put a part of an object
For Regular cases
@doc Handle multi-upload processing
trim spaces
Retrieve the child object's metadata
to set the actual chunked length
@doc Retrieve Metadatas for uploaded objects (Multipart)
@doc Generate an upload-key
@doc Generate an update-initiate xml
@doc Generate an update-completion xml
@doc Generate copy-obj's xml
@doc Retrieve header values from a request
Set request params
?debug("request_params/2", "Is AWS Chunked: ~p", [IsAwsChunked]),
@doc check if bucket is public-read
@doc Authentication
NOTE:
- from s3cmd, dragondisk and others:
- Path: <<"photo/img">>
- RawURI: <<"/img">>
- from ruby-client, other AWS-clients:
- Path: <<"photo/img">>
- RawURI: <<"/photo/img">>
-> Adjust URI:
#sign_params{ requested_uri = << "/photo/img" >>
}
* the hash-value is calculated by "raw_uri"
@doc Get bucket list
@see
@doc Put a bucket
@doc Put a bucket ACL
@doc Delete a bucket
@doc Head a bucket
@doc Generate XML from matadata-list
directory
object
@doc Generate XML from ACL
@doc Delete multiple objects, then parse request XML
@doc Retrieve every keys (ignore version element)
@doc Issue delete requests for all keys by using leo_gateway_rpc_handler:delete
@doc Make response XML based on the result of delete requests (ignore version related elements)
@doc Recursively find a key in the bucket | Copyright ( c ) 2012 - 2015 Rakuten , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
Leo Gateway S3 - API
-module(leo_gateway_s3_api).
-behaviour(leo_gateway_http_behaviour).
-export([start/2, stop/0,
init/3, handle/2, terminate/3]).
-export([onrequest/1, onresponse/1]).
-export([get_bucket/3, put_bucket/3, delete_bucket/3, head_bucket/3,
get_object/3, put_object/3, delete_object/3, head_object/3,
get_object_with_cache/4, range_object/3
]).
-include("leo_gateway.hrl").
-include("leo_http.hrl").
-include_lib("leo_commons/include/leo_commons.hrl").
-include_lib("leo_logger/include/leo_logger.hrl").
-include_lib("leo_object_storage/include/leo_object_storage.hrl").
-include_lib("leo_redundant_manager/include/leo_redundant_manager.hrl").
-include_lib("leo_s3_libs/include/leo_s3_auth.hrl").
-include_lib("leo_s3_libs/include/leo_s3_bucket.hrl").
-include_lib("leo_s3_libs/include/leo_s3_endpoint.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("xmerl/include/xmerl.hrl").
-compile({inline, [handle/2, handle_1/4, handle_2/6,
handle_multi_upload_1/8,
handle_multi_upload_2/6,
handle_multi_upload_3/3,
gen_upload_key/1, gen_upload_initiate_xml/3, gen_upload_completion_xml/4,
resp_copy_obj_xml/2, request_params/2, auth/5, auth/7, auth_1/7,
get_bucket_1/6, put_bucket_1/3, delete_bucket_1/2, head_bucket_1/2
]}).
-spec(start(Sup, HttpOptions) ->
ok | {error, Cause} when Sup::module(),
HttpOptions::[{atom(), any()}],
Cause::any()).
start(Sup, HttpOptions) ->
leo_gateway_http_commons:start(Sup, HttpOptions).
-spec(stop() ->
ok).
stop() ->
cowboy:stop_listener(?MODULE),
cowboy:stop_listener(list_to_atom(lists:append([?MODULE_STRING, "_ssl"]))),
ok.
init({_Any, http}, Req, Opts) ->
{ok, Req, Opts}.
-spec(handle(Req, State) ->
{ok, Req, State} when Req::cowboy_req:req(),
State::term()).
handle(Req, State) ->
case leo_watchdog_state:find_not_safe_items() of
not_found ->
{Host, _} = cowboy_req:host(Req),
Host header must be included even if a request with HTTP/1.0
case Host of
<<>> ->
{ok, Req2} = ?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, <<>>, <<>>, Req),
{ok, Req2, State};
_ ->
case check_request(Req) of
ok ->
{Bucket, Path} = get_bucket_and_path(Req),
handle_1(Req, State, Bucket, Path);
{error, Req2} ->
{ok, Req2, State}
end
end;
{ok, ErrorItems} ->
?debug("handle/2", "error-items:~p", [ErrorItems]),
{ok, Req2} = ?reply_service_unavailable_error([?SERVER_HEADER], <<>>, <<>>, Req),
{ok, Req2, State}
end.
terminate(_Reason, _Req, _State) ->
ok.
@private
-spec(check_request(Req) ->
ok | {error, Cause} when Req::cowboy_req:req(),
Cause::any()).
check_request(Req) ->
CheckList = [
fun check_bad_date/1
],
check_request(Req, CheckList).
@private
check_request(_Req, []) ->
ok;
check_request(Req, [CheckFun|Rest]) ->
case CheckFun(Req) of
{error, 400, Code, Msg} ->
{ok, Req2} = ?reply_bad_request([?SERVER_HEADER], Code, Msg, <<>>, <<>>, Req),
{error, Req2};
{error, 403, Code, Msg} ->
{ok, Req2} = ?reply_forbidden([?SERVER_HEADER], Code, Msg, <<>>, <<>>, Req),
{error, Req2};
_ ->
check_request(Req, Rest)
end.
@private
check_bad_date(Req) ->
case cowboy_req:header(?HTTP_HEAD_AUTHORIZATION, Req) of
{undefined, _} ->
ok;
_ ->
check_bad_date_1(Req)
end.
@private
check_bad_date_1(Req) ->
case cowboy_req:header(?HTTP_HEAD_DATE, Req) of
{undefined, _} ->
case cowboy_req:header(?HTTP_HRAD_X_AMZ_DATE, Req) of
{undefined, _} ->
{error, 403, ?XML_ERROR_CODE_AccessDenied, ?XML_ERROR_MSG_AccessDenied};
{Date, _} ->
check_bad_date_invalid(Date)
end;
{Date, _} ->
check_bad_date_invalid(Date)
end.
@private
check_bad_date_invalid(Date) ->
case catch cowboy_date:parse_date(Date) of
{error, badarg} ->
{error, 403, ?XML_ERROR_CODE_AccessDenied, ?XML_ERROR_MSG_AccessDenied};
{'EXIT', _} ->
{error, 403, ?XML_ERROR_CODE_AccessDenied, ?XML_ERROR_MSG_AccessDenied};
{{Y,_,_},_} ->
case (Y =< 2010 orelse 2030 =< Y) of
true ->
{error, 403, ?XML_ERROR_CODE_RequestTimeTooSkewed,
?XML_ERROR_MSG_RequestTimeTooSkewed};
_ ->
ok
end
end.
-spec(onrequest(CacheCondition) ->
Ret when CacheCondition::#cache_condition{},
Ret::any()).
onrequest(CacheCondition) ->
leo_gateway_http_commons:onrequest(CacheCondition, fun get_bucket_and_path/1).
-spec(onresponse(CacheCondition) ->
Ret when CacheCondition::#cache_condition{},
Ret::any()).
onresponse(CacheCondition) ->
leo_gateway_http_commons:onresponse(CacheCondition, fun get_bucket_and_path/1).
@doc GET buckets and dirs
-spec(get_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
get_bucket(Req, Key, #req_params{access_key_id = AccessKeyId,
is_acl = false,
qs_prefix = Prefix}) ->
BeginTime = leo_date:clock(),
NormalizedMarker = case cowboy_req:qs_val(?HTTP_QS_BIN_MARKER, Req) of
{undefined,_} ->
<<>>;
{Marker,_} ->
Append $ BucketName/ at the beginning of as necessary
KeySize = size(Key),
case binary:match(Marker, Key) of
{0, KeySize} ->
Marker;
_Other ->
<< Key/binary, Marker/binary >>
end
end,
MaxKeys = case cowboy_req:qs_val(?HTTP_QS_BIN_MAXKEYS, Req) of
{undefined, _} ->
?DEF_S3API_MAX_KEYS;
{Val_2, _} ->
try
MaxKeys1 = binary_to_integer(Val_2),
erlang:min(MaxKeys1, ?HTTP_MAXKEYS_LIMIT)
catch _:_ ->
?DEF_S3API_MAX_KEYS
end
end,
Delimiter = case cowboy_req:qs_val(?HTTP_QS_BIN_DELIMITER, Req) of
{undefined, _} -> none;
{Val, _} ->
Val
end,
PrefixBin = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
case get_bucket_1(AccessKeyId, Key, Delimiter, NormalizedMarker, MaxKeys, Prefix) of
{ok, XMLRet} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_OK, BeginTime),
Header = [?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}],
?reply_ok(Header, XMLRet, Req);
{error, badarg} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_BAD_REQ, BeginTime),
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, Key, <<>>, Req);
{error, not_found} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_NOT_FOUND, BeginTime),
?reply_not_found([?SERVER_HEADER], Key, <<>>, Req);
{error, unavailable} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_service_unavailable_error([?SERVER_HEADER], Key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_INTERNAL_ERROR, BeginTime),
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?access_log_bucket_get(Key, PrefixBin, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req)
end;
get_bucket(Req, Bucket, #req_params{access_key_id = _AccessKeyId,
is_acl = true}) ->
Bucket_2 = formalize_bucket(Bucket),
case leo_s3_bucket:find_bucket_by_name(Bucket_2) of
{ok, BucketInfo} ->
XML = generate_acl_xml(BucketInfo),
Header = [?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}],
?reply_ok(Header, XML, Req);
not_found ->
?reply_not_found([?SERVER_HEADER], Bucket_2, <<>>, Req);
{error, _Cause} ->
?reply_internal_error([?SERVER_HEADER], Bucket_2, <<>>, Req)
end.
-spec(put_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
put_bucket(Req, Key, #req_params{access_key_id = AccessKeyId,
is_acl = false}) ->
BeginTime = leo_date:clock(),
Bucket = formalize_bucket(Key),
CannedACL = string:to_lower(binary_to_list(?http_header(Req, ?HTTP_HEAD_X_AMZ_ACL))),
Req_1 = case cowboy_req:has_body(Req) of
false ->
Req;
true ->
{ok, _Bin_2, Req_2} = cowboy_req:body(Req),
Req_2
end,
case put_bucket_1(CannedACL, AccessKeyId, Bucket) of
ok ->
?access_log_bucket_put(Bucket, ?HTTP_ST_OK, BeginTime),
?reply_ok([?SERVER_HEADER], Req_1);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req_1);
{error, invalid_bucket_format} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidBucketName,
?XML_ERROR_MSG_InvalidBucketName, Key, <<>>, Req_1);
{error, invalid_access} ->
?reply_forbidden([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Key, <<>>, Req);
{error, already_exists} ->
?reply_conflict([?SERVER_HEADER], ?XML_ERROR_CODE_BucketAlreadyExists,
?XML_ERROR_MSG_BucketAlreadyExists, Key, <<>>, Req_1);
{error, already_yours} ->
?reply_conflict([?SERVER_HEADER], ?XML_ERROR_CODE_BucketAlreadyOwnedByYou,
?XML_ERROR_MSG_BucketAlreadyOwnedByYou, Key, <<>>, Req_1);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req_1)
end;
put_bucket(Req, Key, #req_params{access_key_id = AccessKeyId,
is_acl = true}) ->
Bucket = formalize_bucket(Key),
CannedACL = string:to_lower(binary_to_list(?http_header(Req, ?HTTP_HEAD_X_AMZ_ACL))),
case put_bucket_acl_1(CannedACL, AccessKeyId, Bucket) of
ok ->
?reply_ok([?SERVER_HEADER], Req);
{error, not_supported} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, Key, <<>>, Req);
{error, invalid_access} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Key, <<>>, Req);
{error, _} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req)
end.
-spec(delete_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
delete_bucket(Req, Key, #req_params{access_key_id = AccessKeyId}) ->
BeginTime = leo_date:clock(),
Bucket = formalize_bucket(Key),
case delete_bucket_1(AccessKeyId, Key) of
ok ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_NO_CONTENT, BeginTime),
?reply_no_content([?SERVER_HEADER], Req);
not_found ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_NOT_FOUND, BeginTime),
?reply_not_found([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_timeout_without_body([?SERVER_HEADER], Req);
{error, _} ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_INTERNAL_ERROR, BeginTime),
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req)
end.
-spec(head_bucket(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
head_bucket(Req, Key, #req_params{access_key_id = AccessKeyId}) ->
BeginTime = leo_date:clock(),
Bucket = formalize_bucket(Key),
case head_bucket_1(AccessKeyId, Bucket) of
ok ->
?access_log_bucket_head(Bucket, ?HTTP_ST_OK, BeginTime),
?reply_ok([?SERVER_HEADER], Req);
not_found ->
?access_log_bucket_head(Bucket, ?HTTP_ST_NOT_FOUND, BeginTime),
?reply_not_found_without_body([?SERVER_HEADER], Req);
{error, timeout} ->
?access_log_bucket_head(Bucket, ?HTTP_ST_SERVICE_UNAVAILABLE, BeginTime),
?reply_timeout_without_body([?SERVER_HEADER], Req);
{error, _} ->
?access_log_bucket_delete(Bucket, ?HTTP_ST_INTERNAL_ERROR, BeginTime),
?reply_internal_error_without_body([?SERVER_HEADER], Req)
end.
-spec(get_object(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
get_object(Req, Key, Params) ->
leo_gateway_http_commons:get_object(Req, Key, Params).
-spec(get_object_with_cache(Req, Key, CacheObj, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
CacheObj::#cache{},
ReqParams::#req_params{}).
get_object_with_cache(Req, Key, CacheObj, Params) ->
leo_gateway_http_commons:get_object_with_cache(Req, Key, CacheObj, Params).
-spec(get_x_amz_meta_directive(Req) ->
Ret when Req::cowboy_req:req(),
Ret::binary()).
get_x_amz_meta_directive(Req) ->
Directive = ?http_header(Req, ?HTTP_HEAD_X_AMZ_META_DIRECTIVE),
get_x_amz_meta_directive(Req, Directive).
@private
get_x_amz_meta_directive(Req, ?BIN_EMPTY) ->
CS = ?http_header(Req, ?HTTP_HEAD_X_AMZ_COPY_SOURCE),
case CS of
?BIN_EMPTY ->
?BIN_EMPTY;
_ ->
?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY
end;
get_x_amz_meta_directive(_Req, Other) ->
Other.
-spec(put_object(Req, Key, ReqParams) ->
{ok, Req} when Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
put_object(Req, Key, Params) ->
put_object(get_x_amz_meta_directive(Req), Req, Key, Params).
-spec(put_object(Directive, Req, Key, ReqParams) ->
{ok, Req} when Directive::binary(),
Req::cowboy_req:req(),
Key::binary(),
ReqParams::#req_params{}).
put_object(?BIN_EMPTY, Req, _Key, #req_params{is_multi_delete = true,
timeout_for_body = Timeout4Body,
transfer_decode_fun = TransferDecodeFun,
transfer_decode_state = TransferDecodeState} = Params) ->
BodyOpts = case TransferDecodeFun of
undefined ->
[{read_timeout, Timeout4Body}];
_ ->
[{read_timeout, Timeout4Body},
{transfer_decode, TransferDecodeFun, TransferDecodeState}]
end,
case cowboy_req:body(Req, BodyOpts) of
{ok, Body, Req1} ->
ContentMD5 = ?http_header(Req, ?HTTP_HEAD_CONTENT_MD5),
CalculatedMD5 = base64:encode(crypto:hash(md5, Body)),
delete_multi_objects_2(Req1, Body, ContentMD5, CalculatedMD5, Params);
{error, _Cause} ->
?reply_malformed_xml([?SERVER_HEADER], Req)
end;
put_object(?BIN_EMPTY, Req, Key, Params) ->
case catch cowboy_req:body_length(Req) of
{'EXIT', _} ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidArgument,
?XML_ERROR_MSG_InvalidArgument, Key, <<>>, Req);
{BodySize, _} ->
Size = case cowboy_req:header(?HTTP_HEAD_X_AMZ_DECODED_CONTENT_LENGTH, Req) of
{undefined,_} ->
BodySize;
{Val,_} ->
binary_to_integer(Val)
end,
case (Size >= Params#req_params.threshold_of_chunk_len) of
true when Size >= Params#req_params.max_len_of_obj ->
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_EntityTooLarge,
?XML_ERROR_MSG_EntityTooLarge, Key, <<>>, Req);
true when Params#req_params.is_upload == false ->
leo_gateway_http_commons:put_large_object(Req, Key, Size, Params);
false ->
Ret = case cowboy_req:has_body(Req) of
true ->
TransferDecodeFun = Params#req_params.transfer_decode_fun,
TransferDecodeState = Params#req_params.transfer_decode_state,
Timeout4Body = Params#req_params.timeout_for_body,
BodyOpts = case TransferDecodeFun of
undefined ->
[{read_timeout, Timeout4Body}];
_ ->
[{read_timeout, Timeout4Body},
{transfer_decode, TransferDecodeFun, TransferDecodeState}]
end,
case cowboy_req:body(Req, BodyOpts) of
{ok, Bin, Req1} ->
{ok, {Size, Bin, Req1}};
{error, Cause} ->
{error, Cause}
end;
false ->
{ok, {0, ?BIN_EMPTY, Req}}
end,
leo_gateway_http_commons:put_small_object(Ret, Key, Params)
end
end;
@private
put_object(Directive, Req, Key, #req_params{handler = ?PROTO_HANDLER_S3,
custom_metadata = CMetaBin1} = Params) ->
CS = cow_qs:urldecode(?http_header(Req, ?HTTP_HEAD_X_AMZ_COPY_SOURCE)),
CS2 = case binary:part(CS, {0, 1}) of
?BIN_SLASH ->
binary:part(CS, {1, byte_size(CS) -1});
_ ->
CS
end,
case (Key =:= CS2) of
true ->
400
?reply_bad_request([?SERVER_HEADER], ?XML_ERROR_CODE_InvalidRequest,
?XML_ERROR_MSG_InvalidRequest, Key, <<>>, Req);
false ->
case leo_gateway_rpc_handler:get(CS2) of
{ok, Meta, RespObject} ->
CMetaBin = case Directive of
?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY ->
Meta#?METADATA.meta;
_ ->
CMetaBin1
end,
case Meta#?METADATA.cnumber of
0 ->
put_object_1(Directive, Req, Key, Meta, RespObject, Params#req_params{custom_metadata = CMetaBin});
_TotalChunkedObjs ->
put_large_object_1(Directive, Req, Key, Meta, Params#req_params{custom_metadata = CMetaBin})
end;
{error, not_found} ->
?reply_not_found([?SERVER_HEADER], Key, <<>>, Req);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req)
end
end.
@private
put_object_1(Directive, Req, Key, Meta, Bin, #req_params{bucket_name = BucketName,
bucket_info = BucketInfo,
custom_metadata = CMetaBin} = Params) ->
BeginTime = leo_date:clock(),
Size = size(Bin),
case leo_gateway_rpc_handler:put(#put_req_params{path = Key,
body = Bin,
meta = CMetaBin,
dsize = Size,
msize = byte_size(CMetaBin),
bucket_info = BucketInfo}) of
{ok, _ETag} when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY ->
?access_log_put(BucketName, Key, Size, ?HTTP_ST_OK, BeginTime),
resp_copy_obj_xml(Req, Meta);
{ok, _ETag} when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_REPLACE ->
put_object_2(Req, Key, Meta, Params);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req)
end.
@private
put_object_2(Req, Key, Meta, Params) ->
case Key == Meta#?METADATA.key of
true ->
resp_copy_obj_xml(Req, Meta);
false ->
put_object_3(Req, Meta, Params)
end.
@private
put_object_3(Req, #?METADATA{key = Key, dsize = Size} = Meta, #req_params{bucket_name = BucketName}) ->
BeginTime = leo_date:clock(),
case leo_gateway_rpc_handler:delete(Meta#?METADATA.key) of
ok ->
?access_log_delete(BucketName, Key, Size, ?HTTP_ST_NO_CONTENT, BeginTime),
resp_copy_obj_xml(Req, Meta);
{error, not_found} ->
resp_copy_obj_xml(Req, Meta);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Meta#?METADATA.key, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Meta#?METADATA.key, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Meta#?METADATA.key, <<>>, Req)
end.
@private
put_large_object_1(Directive, Req, Key, Meta, Params) ->
case leo_gateway_http_commons:move_large_object(Meta, Key, Params) of
ok when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_COPY ->
resp_copy_obj_xml(Req, Meta);
ok when Directive == ?HTTP_HEAD_X_AMZ_META_DIRECTIVE_REPLACE ->
put_large_object_2(Req, Key, Meta);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Key, <<>>, Req);
{error, _Other} ->
?reply_internal_error([?SERVER_HEADER], Key, <<>>, Req)
end.
@private
put_large_object_2(Req, Key, Meta) ->
case Key == Meta#?METADATA.key of
true ->
resp_copy_obj_xml(Req, Meta);
false ->
put_large_object_3(Req, Meta)
end.
@private
put_large_object_3(Req, Meta) ->
leo_large_object_commons:delete_chunked_objects(Meta#?METADATA.key),
catch leo_gateway_rpc_handler:delete(Meta#?METADATA.key),
resp_copy_obj_xml(Req, Meta).
-spec(delete_object(cowboy_req:req(), binary(), #req_params{}) ->
{ok, cowboy_req:req()}).
delete_object(Req, Key, Params) ->
leo_gateway_http_commons:delete_object(Req, Key, Params).
-spec(head_object(cowboy_req:req(), binary(), #req_params{}) ->
{ok, cowboy_req:req()}).
head_object(Req, Key, Params) ->
leo_gateway_http_commons:head_object(Req, Key, Params).
-spec(range_object(cowboy_req:req(), binary(), #req_params{}) ->
{ok, cowboy_req:req()}).
range_object(Req, Key, Params) ->
leo_gateway_http_commons:range_object(Req, Key, Params).
@private
-spec(get_bucket_and_path(Req) ->
{ok, Ret} when Req::cowboy_req:req(),
Ret::{binary(), binary()}).
get_bucket_and_path(Req) ->
{RawPath, _} = cowboy_req:path(Req),
Path = cow_qs:urldecode(RawPath),
get_bucket_and_path(Req, Path).
@private
get_bucket_and_path(Req, Path) ->
EndPoints_2 = case leo_s3_endpoint:get_endpoints() of
{ok, EndPoints_1} ->
[Ep || #endpoint{endpoint = Ep} <- EndPoints_1];
_ ->
[]
end,
{Host,_} = cowboy_req:host(Req),
leo_http:key(EndPoints_2, Host, Path).
@private
-spec(handle_1(Req, State, BucketName, Path) ->
{ok, Req, State} when Req::cowboy_req:req(),
State::[any()],
BucketName::binary(),
Path::binary()).
handle_1(Req, [{NumOfMinLayers, NumOfMaxLayers},
HasInnerCache, CustomHeaderSettings, Props] = State, BucketName, Path) ->
BinPart = binary:part(Path, {byte_size(Path)-1, 1}),
TokenLen = length(binary:split(Path, [?BIN_SLASH], [global, trim])),
HTTPMethod = cowboy_req:get(method, Req),
{Prefix, IsDir, Path_1, Req_2} =
case cowboy_req:qs_val(?HTTP_HEAD_PREFIX, Req) of
{undefined, Req_1} ->
{none, (TokenLen == 1 orelse ?BIN_SLASH == BinPart), Path, Req_1};
{BinParam, Req_1} ->
NewPath = case BinPart of
?BIN_SLASH ->
Path;
_ ->
<< Path/binary, ?BIN_SLASH/binary >>
end,
{BinParam, true, NewPath, Req_1}
end,
IsACL = case cowboy_req:qs_val(?HTTP_QS_BIN_ACL, Req_2) of
{undefined, _} ->
false;
_ ->
true
end,
ReqParams = request_params(Req_2,
#req_params{
handler = ?MODULE,
path = Path_1,
bucket_name = BucketName,
token_length = TokenLen,
min_layers = NumOfMinLayers,
max_layers = NumOfMaxLayers,
qs_prefix = Prefix,
has_inner_cache = HasInnerCache,
is_cached = true,
is_dir = IsDir,
is_acl = IsACL,
max_chunked_objs = Props#http_options.max_chunked_objs,
max_len_of_obj = Props#http_options.max_len_of_obj,
chunked_obj_len = Props#http_options.chunked_obj_len,
custom_header_settings = CustomHeaderSettings,
timeout_for_header = Props#http_options.timeout_for_header,
timeout_for_body = Props#http_options.timeout_for_body,
sending_chunked_obj_len = Props#http_options.sending_chunked_obj_len,
reading_chunked_obj_len = Props#http_options.reading_chunked_obj_len,
threshold_of_chunk_len = Props#http_options.threshold_of_chunk_len}),
case ReqParams of
{error, metadata_too_large} ->
{ok, Req_3} = ?reply_metadata_too_large([?SERVER_HEADER], Path_1, <<>>, Req_2),
{ok, Req_3, State};
_ ->
AuthRet = auth(Req_2, HTTPMethod, Path_1, TokenLen, ReqParams),
AuthRet_2 = case AuthRet of
{error, Reason} ->
{error, Reason};
{ok, AccessKeyId, _} ->
{ok, AccessKeyId}
end,
ReqParams_2 = case ReqParams#req_params.is_aws_chunked of
true ->
case AuthRet of
{ok, _, SignParams} ->
{Signature, SignHead, SignKey} =
case SignParams of
undefined ->
{undefined, undefined, undefined};
_ ->
SignParams
end,
AWSChunkSignParams = #aws_chunk_sign_params{
sign_head = SignHead,
sign_key = SignKey,
prev_sign = Signature,
chunk_sign = <<>>},
AWSChunkDecState = #aws_chunk_decode_state{
buffer = <<>>,
dec_state = wait_size,
chunk_offset = 0,
sign_params = AWSChunkSignParams,
total_len = 0},
ReqParams#req_params{
transfer_decode_fun = fun aws_chunk_decode/2,
transfer_decode_state = AWSChunkDecState};
_ ->
ReqParams
end;
_ ->
ReqParams
end,
handle_2(AuthRet_2, Req_2, HTTPMethod, Path_1, ReqParams_2, State)
end.
@private
-spec(handle_2(Ret, Req, HttpVerb, Path, ReqParams, State) ->
{ok, Req, State} when Ret::{ok, AccessKeyId} | {error, Cause},
AccessKeyId::binary(),
Cause::any(),
Req::cowboy_req:req(),
HttpVerb::binary(),
Path::binary(),
ReqParams::#req_params{},
State::[any()]).
handle_2({error, unmatch}, Req,_HttpVerb, Key,_ReqParams, State) ->
{ok, Req_2} = ?reply_forbidden([?SERVER_HEADER],
?XML_ERROR_CODE_SignatureDoesNotMatch,
?XML_ERROR_MSG_SignatureDoesNotMatch, Key, <<>>, Req),
{ok, Req_2, State};
handle_2({error, not_found}, Req,_HttpVerb, Key,_ReqParams, State) ->
{ok, Req_2} = ?reply_not_found([?SERVER_HEADER], Key, <<>>, Req),
{ok, Req_2, State};
handle_2({error, already_yours}, Req,_HttpVerb, Key,_ReqParams, State) ->
{ok, Req_2} = ?reply_conflict([?SERVER_HEADER], ?XML_ERROR_CODE_BucketAlreadyOwnedByYou,
?XML_ERROR_MSG_BucketAlreadyOwnedByYou, Key, <<>>, Req),
{ok, Req_2, State};
handle_2({error, _Cause}, Req,_HttpVerb, Key,_ReqParams,State) ->
{ok, Req_2} = ?reply_forbidden([?SERVER_HEADER],
?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Key, <<>>, Req),
{ok, Req_2, State};
handle_2({ok,_AccessKeyId}, Req, ?HTTP_POST,_Key, #req_params{bucket_info = BucketInfo,
custom_metadata = CMetaBin,
path = Path,
is_upload = true}, State) ->
catch leo_cache_api:delete(Path),
NowBin = list_to_binary(integer_to_list(leo_date:now())),
UploadId = leo_hex:binary_to_hex(
crypto:hash(md5, << Path/binary, NowBin/binary >>)),
UploadIdBin = list_to_binary(UploadId),
UploadKey = << Path/binary, ?STR_NEWLINE, UploadIdBin/binary >>,
{ok, Req_2} =
case leo_gateway_rpc_handler:put(#put_req_params{path = UploadKey,
body = ?BIN_EMPTY,
meta = CMetaBin,
dsize = 0,
msize = byte_size(CMetaBin),
bucket_info = BucketInfo}) of
{ok, _ETag} ->
[BucketName|Path_1] = leo_misc:binary_tokens(Path, ?BIN_SLASH),
XML = gen_upload_initiate_xml(BucketName, Path_1, UploadId),
?reply_ok([?SERVER_HEADER], XML, Req);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Path, <<>>, Req);
{error, Cause} ->
?error("handle_2/6", [{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end,
{ok, Req_2, State};
For Multipart Upload - Upload a part of an object
@private
handle_2({ok,_AccessKeyId}, Req, ?HTTP_PUT, Key,
#req_params{upload_id = UploadId,
upload_part_num = PartNum,
max_chunked_objs = MaxChunkedObjs}, State) when UploadId /= <<>>,
PartNum > MaxChunkedObjs ->
{ok, Req_2} = ?reply_bad_request([?SERVER_HEADER],
?XML_ERROR_CODE_EntityTooLarge,
?XML_ERROR_MSG_EntityTooLarge,
Key, <<>>, Req),
{ok, Req_2, State};
handle_2({ok,_AccessKeyId}, Req, ?HTTP_PUT,_Key,
#req_params{path = Path,
is_upload = false,
upload_id = UploadId,
upload_part_num = PartNum1} = Params, State) when UploadId /= <<>>,
PartNum1 /= 0 ->
PartNum2 = list_to_binary(integer_to_list(PartNum1)),
Key1 = << Path/binary, ?STR_NEWLINE, UploadId/binary >>,
Key2 = << Path/binary, ?STR_NEWLINE, PartNum2/binary >>,
{ok, Req_2} =
case leo_gateway_rpc_handler:head(Key1) of
{ok, _Metadata} ->
put_object(?BIN_EMPTY, Req, Key2, Params);
{error, not_found} ->
?reply_not_found([?SERVER_HEADER], Path, <<>>, Req);
{error, unavailable} ->
?reply_service_unavailable_error(
[?SERVER_HEADER], Path, <<>>, Req);
{error, timeout} ->
?reply_timeout([?SERVER_HEADER], Path, <<>>, Req);
{error, ?ERR_TYPE_INTERNAL_ERROR} ->
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end,
{ok, Req_2, State};
handle_2({ok,_AccessKeyId}, Req, ?HTTP_DELETE,_Key,
#req_params{bucket_info = BucketInfo,
path = Path,
upload_id = UploadId}, State) when UploadId /= <<>> ->
_ = leo_gateway_rpc_handler:put(#put_req_params{path = Path,
body = ?BIN_EMPTY,
dsize = 0,
bucket_info = BucketInfo}),
_ = leo_gateway_rpc_handler:delete(Path),
_ = leo_gateway_rpc_handler:delete(<< Path/binary, ?STR_NEWLINE >>),
{ok, Req_2} = ?reply_no_content([?SERVER_HEADER], Req),
{ok, Req_2, State};
For Multipart Upload - Completion
handle_2({ok,_AccessKeyId}, Req, ?HTTP_POST,_Key,
#req_params{bucket_info = BucketInfo,
path = Path,
chunked_obj_len = ChunkedLen,
is_upload = false,
upload_id = UploadId,
upload_part_num = PartNum,
transfer_decode_fun = TransferDecodeFun,
transfer_decode_state = TransferDecodeState}, State) when UploadId /= <<>>,
PartNum == 0 ->
Res = cowboy_req:has_body(Req),
{ok, Req_2} = handle_multi_upload_1(
Res, Req, Path, UploadId,
ChunkedLen, TransferDecodeFun, TransferDecodeState, BucketInfo),
{ok, Req_2, State};
handle_2({ok, AccessKeyId}, Req, ?HTTP_POST, Path, Params, State) ->
handle_2({ok, AccessKeyId}, Req, ?HTTP_PUT, Path, Params, State);
handle_2({ok, AccessKeyId}, Req, HTTPMethod, Path, Params, State) ->
case catch leo_gateway_http_req_handler:handle(
HTTPMethod, Req,
Path, Params#req_params{access_key_id = AccessKeyId}) of
{'EXIT', {"aws-chunked decode failed", _} = Cause} ->
?error("handle_2/6", [{key, binary_to_list(Path)},
{cause, Cause}]),
{ok, Req_2} = ?reply_forbidden(
[?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Path, <<>>, Req),
{ok, Req_2, State};
{'EXIT', Cause} ->
?error("handle_2/6", [{key, binary_to_list(Path)},
{cause, Cause}]),
{ok, Req_2} = ?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req),
{ok, Req_2, State};
{ok, Req_2} ->
Req_3 = cowboy_req:compact(Req_2),
{ok, Req_3, State}
end.
@private
-spec(aws_chunk_decode(Bin, State) ->
{more|done, Acc, State} when Bin::binary(),
State::#aws_chunk_decode_state{},
Acc::binary()).
aws_chunk_decode(Bin, State) ->
Buffer = State#aws_chunk_decode_state.buffer,
DecState = State#aws_chunk_decode_state.dec_state,
Offset = State#aws_chunk_decode_state.chunk_offset,
SignParams = State#aws_chunk_decode_state.sign_params,
TotalLen = State#aws_chunk_decode_state.total_len,
Ret = aws_chunk_decode({ok, <<>>}, << Buffer/binary, Bin/binary >>,
DecState, Offset, SignParams),
case Ret of
{{error, Reason2}, {_, _, _, _}} ->
?error("aws_chunk_decode/2", [{simple_cause, "parsing error"},
{cause, Reason2}]),
erlang:error("aws-chunked decode failed");
{{ok, Acc}, {Buffer_2, DecState_2, Offset_2, SignParams_2}} ->
{more, Acc, #aws_chunk_decode_state{buffer = Buffer_2,
dec_state = DecState_2,
chunk_offset = Offset_2,
sign_params = SignParams_2,
total_len = TotalLen + byte_size(Acc)}};
{{done, Acc}, {Rest, _, _, _}} ->
{done, Acc, TotalLen + byte_size(Acc), Rest}
end.
@private
aws_chunk_decode({ok, Acc}, Buffer, wait_size, 0,
#aws_chunk_sign_params{sign_head = SignHead} = SignParams) ->
case byte_size(Buffer) of
Len when Len > 10 ->
<< Bin:10/binary, _/binary >> = Buffer,
case binary:match(Bin, <<";">>) of
nomatch ->
{{error, incorrect}, {Buffer, error, 0, SignParams}};
{Start, _} ->
<< SizeHexBin:Start/binary, ";", Rest/binary >> = Buffer,
SizeHex = binary_to_list(SizeHexBin),
Size = leo_hex:hex_to_integer(SizeHex),
SignParams_2 =
case SignHead of
undefined ->
SignParams#aws_chunk_sign_params{chunk_size = Size};
_ ->
Context = crypto:hash_init(sha256),
SignParams#aws_chunk_sign_params{chunk_size = Size,
hash_context = Context}
end,
aws_chunk_decode({ok, Acc}, Rest, wait_head, 0, SignParams_2)
end;
_ ->
{{ok, Acc}, {Buffer, wait_size, 0, SignParams}}
end;
aws_chunk_decode({ok, Acc}, Buffer, wait_head, 0, SignParams) ->
case byte_size(Buffer) of
Len when Len > 80 + 2 ->
<< "chunk-signature=", ChunkSign:64/binary,
"\r\n", Rest/binary >> = Buffer,
aws_chunk_decode({ok, Acc}, Rest, read_chunk, 0,
SignParams#aws_chunk_sign_params{chunk_sign = ChunkSign});
_ ->
{{ok, Acc}, {Buffer, wait_head, 0, SignParams}}
end;
aws_chunk_decode({ok, Acc}, Buffer, read_chunk, Offset,
#aws_chunk_sign_params{sign_head = SignHead,
sign_key = SignKey,
prev_sign = PrevSign,
chunk_sign = ChunkSign,
chunk_size = ChunkSize,
hash_context = Context} = SignParams) ->
ChunkRemainSize = ChunkSize - Offset,
case byte_size(Buffer) of
Len when Len >= ChunkRemainSize + 2 ->
<< ChunkPart:ChunkRemainSize/binary,
"\r\n", Rest/binary >> = Buffer,
case SignHead of
undefined ->
?debug("aws_chunk_decode/4", "Output Chunk Size: ~p, No Sign", [ChunkSize]),
case ChunkSize of
0 ->
{{done, Acc}, {Rest, done, 0, #aws_chunk_sign_params{}}};
_ ->
aws_chunk_decode({ok, << Acc/binary, ChunkPart/binary >>},
Rest, wait_size, 0, SignParams)
end;
_ ->
Context_2 = crypto:hash_update(Context, ChunkPart),
ChunkHash = crypto:hash_final(Context_2),
ChunkHashBin = leo_hex:binary_to_hexbin(ChunkHash),
BinToSign = << ?AWS_SIGNATURE_V4_SHA256_KEY/binary,
"\n",
SignHead/binary,
PrevSign/binary,
"\n",
?AWS_SIGNATURE_V4_SHA256_HASH/binary,
"\n",
ChunkHashBin/binary >>,
case (leo_hex:binary_to_hexbin(
crypto:hmac(sha256, SignKey, BinToSign))) of
ChunkSign ->
case (ChunkSize == 0) of
true ->
{{done, Acc}, {Rest, done, 0, #aws_chunk_sign_params{}}};
false ->
?debug("aws_chunk_decode/4",
"Output Chunk Size: ~p, Sign: ~p", [ChunkSize, ChunkSign]),
aws_chunk_decode({ok, << Acc/binary, ChunkPart/binary >>},
Rest, wait_size, 0,
SignParams#aws_chunk_sign_params{prev_sign = ChunkSign,
chunk_sign = <<>>})
end;
WrongSign ->
?error("aws_chunk_decode/4",
[{cause, "Chunk Signature Not Match"},
{wrong_sign, WrongSign},
{chunk_sign, ChunkSign},
{sign, binary_to_list(BinToSign)}]),
{{error, unmatch}, {Buffer, error, Offset, SignParams}}
end
end;
Len when ChunkRemainSize >= Len ->
SignParams_2 = case SignHead of
undefined ->
SignParams;
_ ->
Context_2 = crypto:hash_update(Context, Buffer),
SignParams#aws_chunk_sign_params{hash_context = Context_2}
end,
{{ok, << Acc/binary, Buffer/binary >>},
{<<>>, read_chunk, Offset + Len, SignParams_2}};
_ ->
{{ok, Acc},
{Buffer, read_chunk, Offset ,SignParams}}
end.
@private
-spec(handle_multi_upload_1(IsHandling, Req, Path, UploadId,
ChunkedLen, TransferDecodeFun, TransferDecodeState, BucketInfo) ->
{ok, Req} when IsHandling::boolean(),
Req::cowboy_req:req(),
Path::binary(),
UploadId::binary(),
ChunkedLen::non_neg_integer(),
TransferDecodeFun::function(),
TransferDecodeState::term(),
BucketInfo::#?BUCKET{}).
handle_multi_upload_1(true, Req, Path, UploadId,
ChunkedLen, TransferDecodeFun, TransferDecodeState, BucketInfo) ->
Path4Conf = << Path/binary, ?STR_NEWLINE, UploadId/binary >>,
case leo_gateway_rpc_handler:get(Path4Conf) of
{ok, #?METADATA{meta = CMetaBin}, _} ->
_ = leo_gateway_rpc_handler:delete(Path4Conf),
BodyOpts = case TransferDecodeFun of
undefined ->
[];
_ ->
[{transfer_decode, TransferDecodeFun, TransferDecodeState}]
end,
Ret = cowboy_req:body(Req, BodyOpts),
handle_multi_upload_2(Ret, Req, Path, ChunkedLen, BucketInfo, CMetaBin);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
_ ->
?reply_forbidden([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Path, <<>>, Req)
end;
handle_multi_upload_1(false, Req, Path,_UploadId,_ChunkedLen,_,_,_) ->
?reply_forbidden([?SERVER_HEADER], ?XML_ERROR_CODE_AccessDenied,
?XML_ERROR_MSG_AccessDenied, Path, <<>>, Req).
@private
-spec(handle_multi_upload_2({ok, Bin, Req}|{error, Cause}, Req, Path, ChunkedLen, BucketInfo, CMetaBin) ->
{ok, Req} when Bin::binary(),
Req::cowboy_req:req(),
Cause::any(),
Path::binary(),
ChunkedLen::non_neg_integer(),
BucketInfo::#?BUCKET{},
CMetaBin::binary()).
handle_multi_upload_2({ok, Bin, Req}, _Req, Path,_ChunkedLen, BucketInfo, CMetaBin) ->
Acc = fun(#xmlText{value = " ",
pos = P}, Acc, S) ->
{Acc, P, S};
(X, Acc, S) ->
{[X|Acc], S}
end,
{#xmlElement{content = Content},_} = xmerl_scan:string(
binary_to_list(Bin),
[{space,normalize}, {acc_fun, Acc}]),
TotalUploadedObjs = length(Content),
case handle_multi_upload_3(TotalUploadedObjs, Path, []) of
{ok, {Len, ETag_1}} ->
IndexBin = list_to_binary(integer_to_list(1)),
ChildKey = << Path/binary, ?DEF_SEPARATOR/binary, IndexBin/binary >>,
case leo_gateway_rpc_handler:head(ChildKey) of
{ok, #?METADATA{del = 0,
dsize = ChildObjSize}} ->
case leo_gateway_rpc_handler:put(#put_req_params{path = Path,
body = ?BIN_EMPTY,
meta = CMetaBin,
dsize = Len,
msize = byte_size(CMetaBin),
csize = ChildObjSize,
total_chunks = TotalUploadedObjs,
digest = ETag_1,
bucket_info = BucketInfo}) of
{ok,_} ->
[BucketName|Path_1] = leo_misc:binary_tokens(Path, ?BIN_SLASH),
ETag2 = leo_hex:integer_to_hex(ETag_1, 32),
XML = gen_upload_completion_xml(
BucketName, Path_1, ETag2, TotalUploadedObjs),
?reply_ok([?SERVER_HEADER], XML, Req);
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
{error, Cause} ->
?error("handle_multi_upload_2/5",
[{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end;
_ ->
?error("handle_multi_upload_2/5",
[{key, binary_to_list(Path)}, {cause, invalid_metadata}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end;
{error, unavailable} ->
?reply_service_unavailable_error([?SERVER_HEADER], Path, <<>>, Req);
{error, Cause} ->
?error("handle_multi_upload_2/5", [{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req)
end;
handle_multi_upload_2({error, Cause}, Req, Path,_ChunkedLen,_BucketInfo, _CMetaBin) ->
?error("handle_multi_upload_2/5", [{key, binary_to_list(Path)}, {cause, Cause}]),
?reply_internal_error([?SERVER_HEADER], Path, <<>>, Req).
@private
-spec(handle_multi_upload_3(PartNum, Path, Acc) ->
{ok, Ret} | {error, Cause} when PartNum::non_neg_integer(),
Path::binary(),
Acc::term(),
Ret::{Len, ETag},
Len::non_neg_integer(),
ETag::binary(),
Cause::any()).
handle_multi_upload_3(0,_Path, Acc) ->
{Len, ETag} = lists:foldl(
fun({_, {DSize, Checksum}}, {Sum, ETagBin_1}) ->
ETagBin_2 = leo_hex:integer_to_raw_binary(Checksum),
{Sum + DSize, << ETagBin_1/binary, ETagBin_2/binary >>}
end, {0, <<>>}, lists:sort(
lists:reverse(Acc))),
ETag_1 = leo_hex:hex_to_integer(leo_hex:binary_to_hex(crypto:hash(md5, ETag))),
{ok, {Len, ETag_1}};
handle_multi_upload_3(PartNum, Path, Acc) ->
PartNumBin = list_to_binary(integer_to_list(PartNum)),
Key = << Path/binary, ?STR_NEWLINE, PartNumBin/binary >>,
case leo_gateway_rpc_handler:head(Key) of
{ok, #?METADATA{dsize = Len,
checksum = Checksum}} ->
handle_multi_upload_3(PartNum - 1, Path, [{PartNum, {Len, Checksum}} | Acc]);
Error ->
Error
end.
@private
-spec(gen_upload_key(Path) ->
Key when Path::binary(),
Key::string()).
gen_upload_key(Path) ->
Key = lists:foldl(fun(I, []) ->
binary_to_list(I);
(I, Acc) ->
Acc ++ ?STR_SLASH ++ binary_to_list(I)
end, [], Path),
Key.
@private
-spec(gen_upload_initiate_xml(BucketNameBin, Path, UploadId) ->
Ret when BucketNameBin::binary(),
Path::[binary()],
UploadId::binary(),
Ret::string()).
gen_upload_initiate_xml(BucketNameBin, Path, UploadId) ->
BucketName = binary_to_list(BucketNameBin),
Key = gen_upload_key(Path),
io_lib:format(?XML_UPLOAD_INITIATION, [BucketName, Key, UploadId]).
@private
-spec(gen_upload_completion_xml(BucketNameBin, Path, ETag, Total) ->
Ret when BucketNameBin::binary(),
Path::[binary()],
ETag::binary(),
Total::non_neg_integer(),
Ret::string()).
gen_upload_completion_xml(BucketNameBin, Path, ETag, Total) ->
BucketName = binary_to_list(BucketNameBin),
TotalStr = integer_to_list(Total),
Key = gen_upload_key(Path),
io_lib:format(?XML_UPLOAD_COMPLETION, [BucketName, Key, ETag, TotalStr]).
@private
-spec(resp_copy_obj_xml(Req, Meta) ->
{ok, Req} when Req::cowboy_req:req(),
Meta::#?METADATA{}).
resp_copy_obj_xml(Req, Meta) ->
XML = io_lib:format(?XML_COPY_OBJ_RESULT,
[leo_http:web_date(Meta#?METADATA.timestamp),
leo_hex:integer_to_hex(Meta#?METADATA.checksum, 32)]),
?reply_ok([?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}
], XML, Req).
@private
-spec(request_params(Req, ReqParams) ->
ReqParams when Req::cowboy_req:req(),
ReqParams::#req_params{}).
request_params(Req, Params) ->
IsMultiDelete = case cowboy_req:qs_val(?HTTP_QS_BIN_MULTI_DELETE, Req) of
{undefined,_} ->
false;
_ ->
true
end,
IsUpload = case cowboy_req:qs_val(?HTTP_QS_BIN_UPLOADS, Req) of
{undefined,_} ->
false;
_ ->
true
end,
UploadId = case cowboy_req:qs_val(?HTTP_QS_BIN_UPLOAD_ID, Req) of
{undefined,_} ->
<<>>;
{Val_1,_} ->
Val_1
end,
PartNum = case cowboy_req:qs_val(?HTTP_QS_BIN_PART_NUMBER, Req) of
{undefined,_} ->
0;
{Val_2,_} ->
list_to_integer(binary_to_list(Val_2))
end,
Range = element(1, cowboy_req:header(?HTTP_HEAD_RANGE, Req)),
IsAwsChunked = case cowboy_req:header(?HTTP_HEAD_X_AMZ_CONTENT_SHA256, Req) of
{?HTTP_HEAD_X_VAL_AWS4_SHA256,_} ->
true;
_ ->
false
end,
{Headers, _} = cowboy_req:headers(Req),
{ok, CMetaBin} = parse_headers_to_cmeta(Headers),
case byte_size(CMetaBin) of
MSize when MSize >= ?HTTP_METADATA_LIMIT ->
{error, metadata_too_large};
_ ->
Params#req_params{is_multi_delete = IsMultiDelete,
is_upload = IsUpload,
is_aws_chunked = IsAwsChunked,
upload_id = UploadId,
upload_part_num = PartNum,
custom_metadata = CMetaBin,
range_header = Range}
end.
@private
-spec(is_public_read(BucketAclInfoL) ->
Ret when BucketAclInfoL::[#bucket_acl_info{}],
Ret::boolean()).
is_public_read([]) ->
false;
is_public_read([H|Rest]) ->
#bucket_acl_info{user_id = UserId, permissions = Permissions} = H,
case (UserId == ?GRANTEE_ALL_USER
andalso (Permissions == [read] orelse Permissions == [read, write])) of
true ->
true;
false ->
is_public_read(Rest)
end.
@private
-spec(is_public_read_write(BucketAclInfoL) ->
Ret when BucketAclInfoL::[#bucket_acl_info{}],
Ret::boolean()).
is_public_read_write([]) ->
false;
is_public_read_write([H|Rest]) ->
#bucket_acl_info{user_id = UserId, permissions = Permissions} = H,
case (UserId == ?GRANTEE_ALL_USER
andalso (Permissions == [read, write])) of
true ->
true;
false ->
is_public_read_write(Rest)
end.
@private
-spec(auth(Req, HTTPMethod, Path, TokenLen, ReqParams) ->
{ok, AccessKeyId, {Signature, SignHead, SignKey}|undefined} |
{error, Cause} when Req::cowboy_req:req(),
HTTPMethod::binary(),
Path::binary(),
TokenLen::non_neg_integer(),
ReqParams::#req_params{},
AccessKeyId::binary(),
Signature::binary(),
SignHead::binary(),
SignKey::binary(),
Cause::any()).
auth(Req, HTTPMethod, Path, TokenLen, ReqParams) ->
BucketName = case (TokenLen >= 1) of
true ->
erlang:hd(leo_misc:binary_tokens(Path, ?BIN_SLASH));
false ->
?BIN_EMPTY
end,
case leo_s3_bucket:get_latest_bucket(BucketName) of
{ok, #?BUCKET{acls = ACLs} = Bucket} ->
auth(Req, HTTPMethod, Path, TokenLen,
BucketName, ACLs, ReqParams#req_params{bucket_info = Bucket});
not_found ->
auth(Req, HTTPMethod, Path, TokenLen, BucketName, [], ReqParams);
{error, Cause} ->
{error, Cause}
end.
@private
-spec(auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) ->
{ok, AccessKeyId, {Signature, SignHead, SignKey}|undefined} |
{error, Cause} when Req::cowboy_req:req(),
HTTPMethod::binary(),
Path::binary(),
TokenLen::non_neg_integer(),
BucketName::binary(),
ACLs::[binary()],
ReqParams::#req_params{},
AccessKeyId::binary(),
Signature::binary(),
SignHead::binary(),
SignKey::binary(),
Cause::any()).
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs,
#req_params{is_multi_delete = true} = ReqParams) when TokenLen =< 1 ->
case is_public_read_write(ACLs) of
true ->
{ok, <<>>, undefined};
false ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams)
end;
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) when TokenLen =< 1 ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams);
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) when TokenLen > 1,
(HTTPMethod == ?HTTP_POST orelse
HTTPMethod == ?HTTP_PUT orelse
HTTPMethod == ?HTTP_DELETE) ->
case is_public_read_write(ACLs) of
true ->
{ok, <<>>, undefined};
false ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams)
end;
auth(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams) when TokenLen > 1 ->
case is_public_read(ACLs) of
true ->
{ok, <<>>, undefined};
false ->
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, ACLs, ReqParams)
end.
@private
auth_1(Req, HTTPMethod, Path, TokenLen, BucketName, _ACLs, #req_params{is_acl = IsACL}) ->
case cowboy_req:header(?HTTP_HEAD_AUTHORIZATION, Req) of
{undefined, _} ->
{error, undefined};
{AuthorizationBin, _} ->
case AuthorizationBin of
<< Head:4/binary,
_Rest/binary >> when Head =:= ?HTTP_HEAD_X_AWS_SIGNATURE_V2;
Head =:= ?HTTP_HEAD_X_AWS_SIGNATURE_V4 ->
IsCreateBucketOp = (TokenLen == 1 andalso
HTTPMethod == ?HTTP_PUT andalso
not IsACL),
{RawURI,_} = cowboy_req:path(Req),
{QStr,_} = cowboy_req:qs(Req),
{Headers,_} = cowboy_req:headers(Req),
raw_uri = RawURI
Token_1 = leo_misc:binary_tokens(Path, << ?STR_SLASH >>),
Token_2 = leo_misc:binary_tokens(RawURI, << ?STR_SLASH >>),
Path_1 = case (length(Token_1) /= length(Token_2)) of
true ->
<< ?STR_SLASH, BucketName/binary, RawURI/binary >>;
false ->
case RawURI of
<< ?STR_SLASH, _/binary >> ->
RawURI;
_ ->
<< ?STR_SLASH, RawURI/binary >>
end
end,
Len = byte_size(QStr),
QStr_2 = case (Len > 0 andalso binary:last(QStr) == $=) of
true ->
binary:part(QStr, 0, (Len - 1));
false ->
QStr
end,
QStr_3 = case binary:match(QStr_2, << "&" >>) of
nomatch ->
QStr_2;
_ ->
Ret = lists:foldl(
fun(Q, []) ->
Q;
(Q, Acc) ->
lists:append([Acc, "&", Q])
end, [],
lists:sort(string:tokens(binary_to_list(QStr_2), "&"))),
list_to_binary(Ret)
end,
SignVer = case (Head =:= ?HTTP_HEAD_X_AWS_SIGNATURE_V4) of
true ->
v4;
false ->
v2
end,
SignParams = #sign_params{http_verb = HTTPMethod,
content_md5 = ?http_header(Req, ?HTTP_HEAD_CONTENT_MD5),
content_type = ?http_header(Req, ?HTTP_HEAD_CONTENT_TYPE),
date = ?http_header(Req, ?HTTP_HEAD_DATE),
bucket = BucketName,
raw_uri = RawURI,
requested_uri = Path_1,
query_str = QStr_3,
sign_ver = SignVer,
headers = Headers,
amz_headers = leo_http:get_amz_headers4cow(Headers)},
leo_s3_auth:authenticate(AuthorizationBin, SignParams, IsCreateBucketOp);
_->
{error, nomatch}
end
end.
@private
-spec(get_bucket_1(AccessKeyId, Key, Delimiter, Marker, MaxKeys, Prefix) ->
{ok, XMLRet} | {error, Cause} when AccessKeyId::binary(),
Key::binary(),
Delimiter::binary(),
Marker::binary(),
MaxKeys::non_neg_integer(),
Prefix::binary()|none,
XMLRet::binary(),
Cause::any()).
get_bucket_1(AccessKeyId, <<>>, Delimiter, Marker, MaxKeys, none) ->
get_bucket_1(AccessKeyId, ?BIN_SLASH, Delimiter, Marker, MaxKeys, none);
get_bucket_1(AccessKeyId, ?BIN_SLASH, _Delimiter, _Marker, _MaxKeys, none) ->
case leo_s3_bucket:find_buckets_by_id(AccessKeyId) of
not_found ->
{ok, generate_bucket_xml([])};
{ok, []} ->
{ok, generate_bucket_xml([])};
{ok, MetadataL} ->
{ok, generate_bucket_xml(MetadataL)};
Error ->
Error
end;
get_bucket_1(_AccessKeyId, BucketName, _Delimiter, _Marker, 0, Prefix) ->
Prefix_1 = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
Path = << BucketName/binary, Prefix_1/binary >>,
{ok, generate_bucket_xml(Path, Prefix_1, [], 0)};
get_bucket_1(_AccessKeyId, BucketName, none, Marker, MaxKeys, Prefix) ->
?debug("get_bucket_1/6", "BucketName: ~p, Marker: ~p, MaxKeys: ~p",
[BucketName, Marker, MaxKeys]),
Prefix_1 = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
{ok, #redundancies{nodes = Redundancies}} =
leo_redundant_manager_api:get_redundancies_by_key(get, BucketName),
Key = << BucketName/binary, Prefix_1/binary >>,
case leo_gateway_rpc_handler:invoke(Redundancies,
leo_storage_handler_directory,
find_by_parent_dir,
[Key, ?BIN_SLASH, Marker, MaxKeys],
[]) of
{ok, Metadata} when is_list(Metadata) =:= true ->
BodyFunc = fun(Socket, Transport) ->
BucketName_1 = erlang:hd(leo_misc:binary_tokens(BucketName, <<"/">>)),
HeadBin = generate_list_head_xml(BucketName_1, Prefix_1, MaxKeys, <<>>),
ok = Transport:send(Socket, HeadBin),
{ok, IsTruncated, NextMarker} =
recursive_find(BucketName, Redundancies, Metadata,
Marker, MaxKeys, Transport, Socket),
FootBin = generate_list_foot_xml(IsTruncated, NextMarker),
ok = Transport:send(Socket, FootBin)
end,
{ok, BodyFunc};
{ok, _} ->
{error, invalid_format};
Error ->
Error
end;
get_bucket_1(_AccessKeyId, BucketName, Delimiter, Marker, MaxKeys, Prefix) ->
?debug("get_bucket_1/6", "BucketName: ~p, Delimiter: ~p, Marker: ~p, MaxKeys: ~p",
[BucketName, Delimiter, Marker, MaxKeys]),
Prefix_1 = case Prefix of
none ->
<<>>;
_ ->
Prefix
end,
{ok, #redundancies{nodes = Redundancies}} =
leo_redundant_manager_api:get_redundancies_by_key(get, BucketName),
Path = << BucketName/binary, Prefix_1/binary >>,
case leo_gateway_rpc_handler:invoke(Redundancies,
leo_storage_handler_directory,
find_by_parent_dir,
[Path, Delimiter, Marker, MaxKeys],
[]) of
not_found ->
{ok, generate_bucket_xml(Path, Prefix_1, [], MaxKeys)};
{ok, []} ->
{ok, generate_bucket_xml(Path, Prefix_1, [], MaxKeys)};
{ok, MetadataL} ->
{ok, generate_bucket_xml(Path, Prefix_1, MetadataL, MaxKeys)};
Error ->
Error
end.
@private
@see
-spec(put_bucket_1(CannedACL, AccessKeyId, BucketName) ->
ok | {error, Cause} when CannedACL::string(),
AccessKeyId::binary(),
BucketName::binary(),
Cause::any()).
put_bucket_1([], AccessKeyId, BucketName) ->
leo_s3_bucket:put(AccessKeyId, BucketName);
put_bucket_1(CannedACL, AccessKeyId, BucketName) ->
leo_s3_bucket:put(AccessKeyId, BucketName, CannedACL).
@private
@see
-spec(put_bucket_acl_1(CannedACL, AccessKeyId, BucketName) ->
ok | {error, Cause} when CannedACL::string(),
AccessKeyId::binary(),
BucketName::binary(),
Cause::any()).
put_bucket_acl_1(?CANNED_ACL_PRIVATE, AccessKeyId, BucketName) ->
leo_s3_bucket:update_acls2private(AccessKeyId, BucketName);
put_bucket_acl_1(?CANNED_ACL_PUBLIC_READ, AccessKeyId, BucketName) ->
leo_s3_bucket:update_acls2public_read(AccessKeyId, BucketName);
put_bucket_acl_1(?CANNED_ACL_PUBLIC_READ_WRITE, AccessKeyId, BucketName) ->
leo_s3_bucket:update_acls2public_read_write(AccessKeyId, BucketName);
put_bucket_acl_1(_, _AccessKeyId, _BucketName) ->
{error, not_supported}.
@private
@see
-spec(delete_bucket_1(AccessKeyId, BucketName) ->
ok | not_found | {error, Cause} when AccessKeyId::binary(),
BucketName::binary()|none,
Cause::any()).
delete_bucket_1(AccessKeyId, BucketName) ->
BucketName_2 = formalize_bucket(BucketName),
ManagerNodes = ?env_manager_nodes(leo_gateway),
delete_bucket_2(ManagerNodes, AccessKeyId, BucketName_2).
@private
-spec(delete_bucket_2(NodeL, AccessKeyId, BucketName) ->
ok | not_found | {error, Cause} when NodeL::[atom()],
AccessKeyId::binary(),
BucketName::binary()|none,
Cause::any()).
delete_bucket_2([],_,_) ->
{error, ?ERR_TYPE_INTERNAL_ERROR};
delete_bucket_2([Node|Rest], AccessKeyId, BucketName) ->
Node_1 = case is_list(Node) of
true ->
list_to_atom(Node);
false ->
Node
end,
case rpc:call(Node_1, leo_manager_api, delete_bucket,
[AccessKeyId, BucketName], ?DEF_TIMEOUT) of
ok ->
ok;
{error, not_found} ->
not_found;
{_, Cause} ->
?warn("delete_bucket_2/3", [{cause, Cause}]),
delete_bucket_2(Rest, AccessKeyId, BucketName)
end.
@private
@see
-spec(head_bucket_1(AccessKeyId, BucketName) ->
ok | not_found | {error, Cause} when AccessKeyId::binary(),
BucketName::binary(),
Cause::any()).
head_bucket_1(AccessKeyId, BucketName) ->
leo_s3_bucket:head(AccessKeyId, BucketName).
@private
-spec(generate_bucket_xml(PathBin, PrefixBin, MetadataL, MaxKeys) ->
XMLRet when PathBin::binary(),
PrefixBin::binary(),
MetadataL::[#?METADATA{}],
MaxKeys::binary(),
XMLRet::string()).
generate_bucket_xml(PathBin, PrefixBin, MetadataL, MaxKeys) ->
Bucket = erlang:hd(leo_misc:binary_tokens(PathBin, <<"/">>)),
PathLen = byte_size(PathBin),
Path = binary_to_list(PathBin),
Prefix = binary_to_list(PrefixBin),
Ref = make_ref(),
ok = generate_bucket_xml_1(MetadataL, 1, Ref, PathLen, Path, Prefix, MaxKeys),
TotalDivs = leo_math:ceiling(length(MetadataL) / ?DEF_MAX_NUM_OF_METADATAS),
CallbackFun = fun(XMLList, NextMarker) ->
TruncatedStr = atom_to_list(length(MetadataL) =:= MaxKeys andalso MaxKeys =/= 0),
io_lib:format(?XML_OBJ_LIST,
[xmerl_lib:export_text(Bucket),
xmerl_lib:export_text(Prefix),
integer_to_list(MaxKeys),
XMLList,
TruncatedStr,
xmerl_lib:export_text(NextMarker)])
end,
generate_bucket_xml_loop(Ref, TotalDivs, CallbackFun, []).
@private
-spec(generate_bucket_xml(MetadataL) ->
XMLRet when MetadataL::[#?METADATA{}],
XMLRet::string()).
generate_bucket_xml(MetadataL) ->
Fun = fun(#?BUCKET{name = BucketNameBin,
created_at = CreatedAt} , Acc) ->
BucketName = binary_to_list(BucketNameBin),
case string:equal(?STR_SLASH, BucketName) of
true ->
Acc;
false ->
lists:append([Acc,
io_lib:format(?XML_BUCKET,
[xmerl_lib:export_text(BucketName),
leo_http:web_date(CreatedAt)])])
end
end,
io_lib:format(?XML_BUCKET_LIST, [lists:foldl(Fun, [], MetadataL)]).
@private
generate_bucket_xml_1([],_Index,_Ref,_PathLen,_Path,_Prefix,_MaxKeys) ->
ok;
generate_bucket_xml_1(MetadataL, Index, Ref, PathLen, Path, Prefix, MaxKeys) ->
{MetadataL_1, Rest} =
case (length(MetadataL) >= ?DEF_MAX_NUM_OF_METADATAS) of
true ->
lists:split(?DEF_MAX_NUM_OF_METADATAS, MetadataL);
false ->
{MetadataL, []}
end,
PId = self(),
spawn(fun() ->
Fun = fun(#?METADATA{key = EntryKeyBin,
dsize = DSize,
timestamp = Timestamp,
checksum = Checksum,
del = 0}, {Acc,_NextMarker}) ->
EntryKey = binary_to_list(EntryKeyBin),
case string:equal(Path, EntryKey) of
true ->
{Acc,_NextMarker};
false ->
Entry = string:sub_string(EntryKey, PathLen + 1),
case (DSize == -1) of
true ->
{lists:append(
[Acc,
io_lib:format(?XML_DIR_PREFIX,
[xmerl_lib:export_text(Prefix),
xmerl_lib:export_text(Entry)])]),
EntryKeyBin};
false ->
{lists:append(
[Acc,
io_lib:format(?XML_OBJ_LIST_FILE_2,
[xmerl_lib:export_text(Prefix),
xmerl_lib:export_text(Entry),
leo_http:web_date(Timestamp),
leo_hex:integer_to_hex(Checksum, 32),
integer_to_list(DSize)])]),
EntryKeyBin}
end
end
end,
{XMLList, NextMarker} = lists:foldl(Fun, {[], <<>>}, MetadataL_1),
erlang:send(PId, {append, Ref, {Index, XMLList, NextMarker}})
end),
generate_bucket_xml_1(Rest, Index + 1, Ref, PathLen, Path, Prefix, MaxKeys).
@private
generate_bucket_xml_loop(_Ref, 0, CallbackFun, Acc) ->
{XMLList_1, NextMarker_1} =
lists:foldl(fun({_Index, XMLList, NextMarker}, {SoFar,_}) ->
{lists:append([SoFar, XMLList]), NextMarker}
end, {[], []}, lists:sort(Acc)),
CallbackFun(XMLList_1, NextMarker_1);
generate_bucket_xml_loop(Ref, TotalDivs, CallbackFun, Acc) ->
receive
{append, Ref, {Index, XMLList, NextMarker}} ->
generate_bucket_xml_loop(Ref, TotalDivs - 1,
CallbackFun, [{Index, XMLList, NextMarker}|Acc]);
_ ->
generate_bucket_xml_loop(Ref, TotalDivs, CallbackFun, Acc)
after
?DEF_REQ_TIMEOUT ->
{error, timeout}
end.
@private
-spec(generate_acl_xml(BucketInfo) ->
XMLRet when BucketInfo::#?BUCKET{},
XMLRet::string()).
generate_acl_xml(#?BUCKET{access_key_id = ID, acls = ACLs}) ->
Fun = fun(#bucket_acl_info{user_id = URI,
permissions = Permissions} , Acc) ->
lists:foldl(
fun(read, Acc_1) ->
lists:flatten(
lists:append(
[Acc_1,
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_read]),
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_read_acp])
]));
(write, Acc_1) ->
lists:flatten(
lists:append(
[Acc_1,
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_write]),
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_write_acp])
]));
(full_control, Acc_1) ->
lists:append(
[Acc_1,
io_lib:format(?XML_ACL_GRANT, [URI, ?acl_full_control])])
end, Acc, Permissions)
end,
io_lib:format(?XML_ACL_POLICY, [ID, ID, lists:foldl(Fun, [], ACLs)]).
@private
-spec(generate_delete_multi_xml(IsQuiet, DeletedKeys, ErrorKeys) ->
XMLRet when IsQuiet::boolean(),
DeletedKeys::[binary()],
ErrorKeys::[binary()],
XMLRet::string()).
generate_delete_multi_xml(IsQuiet, DeletedKeys, ErrorKeys) ->
DeletedElems = generate_delete_multi_xml_deleted_elem(DeletedKeys, []),
ErrorElems = case IsQuiet of
true ->
[];
false ->
generate_delete_multi_xml_error_elem(ErrorKeys, [])
end,
io_lib:format(?XML_MULTIPLE_DELETE, [DeletedElems, ErrorElems]).
@private
generate_delete_multi_xml_deleted_elem([], Acc) ->
Acc;
generate_delete_multi_xml_deleted_elem([DeletedKey|Rest], Acc) ->
generate_delete_multi_xml_deleted_elem(
Rest, lists:append([Acc,
io_lib:format(?XML_MULTIPLE_DELETE_SUCCESS_ELEM, [DeletedKey])])).
@private
generate_delete_multi_xml_error_elem([], Acc) ->
Acc;
generate_delete_multi_xml_error_elem([ErrorKey|Rest], Acc) ->
generate_delete_multi_xml_deleted_elem(
Rest, lists:append([Acc,
io_lib:format(?XML_MULTIPLE_DELETE_ERROR_ELEM, [ErrorKey])])).
@private
-spec(delete_multi_objects_2(Req, Body, MD5, MD5, Params) ->
{ok, Req} when Req::cowboy_req:req(),
Body::binary(),
MD5::binary(),
Params::#req_params{}).
delete_multi_objects_2(Req, Body, MD5, MD5, Params) ->
Acc = fun(#xmlText{value = " ", pos = P}, Acc, S) ->
{Acc, P, S};
(X, Acc, S) ->
{[X|Acc], S}
end,
try
{#xmlElement{content = Content},_} =
xmerl_scan:string(binary_to_list(Body),
[{space,normalize}, {acc_fun, Acc}]),
delete_multi_objects_3(Req, Content, false, [], Params)
catch _:Cause ->
?error("delete_multi_objects_2/5", [{req, Req}, {cause, Cause}]),
?reply_malformed_xml([?SERVER_HEADER], Req)
end;
delete_multi_objects_2(Req, _Body, _MD5, _, _Params) ->
?reply_bad_digest([?SERVER_HEADER], <<>>, <<>>, Req).
@private
delete_multi_objects_3(Req, [], IsQuiet, Keys, Params) ->
delete_multi_objects_4(Req, IsQuiet, Keys, [], [], Params);
delete_multi_objects_3(Req, [#xmlElement{name = 'Quiet'}|Rest], _IsQuiet, Keys, Params) ->
delete_multi_objects_3(Req, Rest, true, Keys, Params);
delete_multi_objects_3(Req, [#xmlElement{name = 'Object', content = KeyElem}|Rest], IsQuiet, Keys, Params) ->
[#xmlElement{content = TextElem}|_] = KeyElem,
[#xmlText{value = Key}|_] = TextElem,
delete_multi_objects_3(Req, Rest, IsQuiet, [Key|Keys], Params);
delete_multi_objects_3(Req, [_|Rest], IsQuiet, Keys, Params) ->
delete_multi_objects_3(Req, Rest, IsQuiet, Keys, Params).
@private
delete_multi_objects_4(Req, IsQuiet, [], DeletedKeys, ErrorKeys, Params) ->
delete_multi_objects_5(Req, IsQuiet, DeletedKeys, ErrorKeys, Params);
delete_multi_objects_4(Req, IsQuiet, [Key|Rest], DeletedKeys, ErrorKeys,
#req_params{bucket_name = BucketName} = Params) ->
BinKey = list_to_binary(Key),
Path = << BucketName/binary, <<"/">>/binary, BinKey/binary >>,
case leo_gateway_rpc_handler:head(Path) of
{ok, Meta} ->
BeginTime = leo_date:clock(),
case leo_gateway_rpc_handler:delete(Path) of
ok ->
?access_log_delete(BucketName, Path, Meta#?METADATA.dsize, ?HTTP_ST_NO_CONTENT, BeginTime),
delete_multi_objects_4(Req, IsQuiet, Rest,
[Key|DeletedKeys], ErrorKeys, Params);
{error, not_found} ->
delete_multi_objects_4(Req, IsQuiet, Rest,
[Key|DeletedKeys], ErrorKeys, Params);
{error, _} ->
delete_multi_objects_4(Req, IsQuiet, Rest,
DeletedKeys, [Key|ErrorKeys], Params)
end;
_ ->
delete_multi_objects_4(Req, IsQuiet, Rest,
DeletedKeys, [Key|ErrorKeys], Params)
end.
@private
delete_multi_objects_5(Req, IsQuiet, DeletedKeys, ErrorKeys, _Params) ->
XML = generate_delete_multi_xml(IsQuiet, DeletedKeys, ErrorKeys),
6 . Respond the response XML
?reply_ok([?SERVER_HEADER,
{?HTTP_HEAD_RESP_CONTENT_TYPE, ?HTTP_CTYPE_XML}
], XML, Req).
@private
-spec(formalize_bucket(BucketName) ->
BucketName when BucketName::binary()).
formalize_bucket(BucketName) ->
case (binary:last(BucketName) == $/) of
true ->
binary:part(BucketName, {0, byte_size(BucketName) - 1});
false ->
BucketName
end.
generate_list_head_xml(BucketName, Prefix, MaxKeys, Delimiter) ->
Delimiter_1 = case Delimiter of
<<>> ->
?DEF_DELIMITER;
_ ->
Delimiter
end,
io_lib:format(?XML_OBJ_LIST_HEAD,
[xmerl_lib:export_text(BucketName),
xmerl_lib:export_text(Prefix),
integer_to_list(MaxKeys),
xmerl_lib:export_text(Delimiter_1)]).
generate_list_foot_xml(IsTruncated, NextMarker) ->
TruncatedStr = case IsTruncated of
true ->
<< "true" >>;
false ->
<< "false" >>
end,
io_lib:format(?XML_OBJ_LIST_FOOT,
[TruncatedStr,
xmerl_lib:export_text(NextMarker)]).
generate_list_file_xml(BucketName, #?METADATA{key = Key,
dsize = Length,
timestamp = TS,
checksum = CS,
del = 0}) ->
BucketNameLen = byte_size(BucketName),
<< _:BucketNameLen/binary, Key_1/binary >> = Key,
io_lib:format(?XML_OBJ_LIST_FILE_1,
[xmerl_lib:export_text(Key_1),
leo_http:web_date(TS),
leo_hex:integer_to_hex(CS, 32),
integer_to_list(Length)]);
generate_list_file_xml(_,_) ->
error.
@private
-spec(recursive_find(BucketName, Redundancies, MetadataList,
Marker, MaxKeys, Transport, Socket) ->
{ok, CanFindKey, LastKey} | {error, any()} when BucketName::binary(),
Redundancies::[#redundancies{}],
MetadataList::[#?METADATA{}],
Marker::binary(),
MaxKeys::non_neg_integer(),
Transport::atom(),
Socket::port(),
CanFindKey::boolean(),
LastKey::binary()).
recursive_find(BucketName, Redundancies, MetadataList,
Marker, MaxKeys, Transport, Socket) ->
recursive_find(BucketName, Redundancies, [], MetadataList,
Marker, MaxKeys, <<>>, Transport, Socket).
recursive_find(_BucketName, _Redundancies,_,_,_, 0, LastKey,_,_) ->
{ok, true, LastKey};
recursive_find(_BucketName, _Redundancies,[],[],_,_,_,_,_) ->
{ok, false, <<>>};
recursive_find(BucketName, Redundancies, [Head|Rest], [],
Marker, MaxKeys, LastKey, Transport, Socket) ->
recursive_find(BucketName, Redundancies, Rest, Head,
Marker, MaxKeys, LastKey, Transport, Socket);
recursive_find(BucketName, Redundancies, Acc,
[#?METADATA{dsize = -1, key = Key}|Rest],
Marker, MaxKeys, LastKey, Transport, Socket) ->
case leo_gateway_rpc_handler:invoke(Redundancies,
leo_storage_handler_directory,
find_by_parent_dir,
[Key, ?BIN_SLASH, Marker, MaxKeys],
[]) of
{ok, Metadata} when is_list(Metadata) ->
recursive_find(BucketName, Redundancies, [Rest | Acc], Metadata,
Marker, MaxKeys, LastKey, Transport, Socket);
{ok,_} ->
{error, invalid_format};
Error ->
Error
end;
recursive_find(BucketName, Redundancies, Acc,
[#?METADATA{key = Key} = Head|Rest],
Marker, MaxKeys, LastKey, Transport, Socket) ->
case generate_list_file_xml(BucketName, Head) of
error ->
recursive_find(BucketName, Redundancies, Acc, Rest,
MaxKeys, MaxKeys, LastKey, Transport, Socket);
Bin ->
case Transport:send(Socket, Bin) of
ok ->
recursive_find(BucketName, Redundancies, Acc, Rest,
Marker, MaxKeys - 1, Key, Transport, Socket);
Error ->
Error
end
end.
@doc parse Custom Meta from Headers
-spec(parse_headers_to_cmeta(Headers) ->
{ok, Bin} | {error, Cause} when Headers::list(),
Bin::binary(),
Cause::any()).
parse_headers_to_cmeta(Headers) when is_list(Headers) ->
MetaList = lists:foldl(fun(Ele, Acc) ->
case Ele of
{<<"x-amz-meta-", _/binary>>, _} ->
[Ele | Acc];
_ ->
Acc
end
end, [], Headers),
case MetaList of
[] ->
{ok, <<>>};
_ ->
{ok, term_to_binary(MetaList)}
end;
parse_headers_to_cmeta(_) ->
{error, badarg}.
|
08d0468dcecde7c9e2c4c7a94e5d1b0a3ec3c74dde1e6c6b5899e8a474e54c48 | synrc/mad | mad_peg.erl | -module(mad_peg).
-compile(export_all).
peg_to_erl(F) ->
filename:join(filename:dirname(F),filename:basename(F, ".peg")) ++ ".erl".
compile(File,Inc,Bin,Opt,Deps) ->
ErlFile = peg_to_erl(File),
Compiled = mad_compile:is_compiled(ErlFile,File),
if Compiled == false ->
neotoma:file(File),
mad_erl:compile(ErlFile,Inc,Bin,Opt,Deps);
true -> false end.
| null | https://raw.githubusercontent.com/synrc/mad/b8811aebd662c2a281c9f87642b5cab9321cf543/src/compile/mad_peg.erl | erlang | -module(mad_peg).
-compile(export_all).
peg_to_erl(F) ->
filename:join(filename:dirname(F),filename:basename(F, ".peg")) ++ ".erl".
compile(File,Inc,Bin,Opt,Deps) ->
ErlFile = peg_to_erl(File),
Compiled = mad_compile:is_compiled(ErlFile,File),
if Compiled == false ->
neotoma:file(File),
mad_erl:compile(ErlFile,Inc,Bin,Opt,Deps);
true -> false end.
| |
f6489b48372f704875db469acc2515200075906aae60aff38ca9f48e43d31549 | NorfairKing/the-notes | AffineSpaces.hs | module Geometry.AffineSpaces where
import Notes
import Functions.Application.Macro
import Functions.Basics.Macro
import Functions.Basics.Terms
import Functions.Distances.Macro
import Functions.Distances.Terms
import LinearAlgebra.VectorSpaces.Terms
import Logic.FirstOrderLogic.Macro
import Logic.PropositionalLogic.Macro
import Sets.Basics.Terms
import Geometry.AffineSpaces.Macro
import Geometry.AffineSpaces.Terms
affineSpaces :: Note
affineSpaces = section "Affine Spaces" $ do
pointDefinition
affineSpaceDefinition
affineSetDefinition
convexSetDefinition
convexProjection
convexFunctionDefinition
concaveFunctionDefinition
strictlyConvexFunctionDefinition
affineSubspaceSS
pointDefinition :: Note
pointDefinition = de $ do
lab pointDefinitionLabel
let n = "n"
s ["An ", m n, "-dimensional ", point', " is an ", m n, "-tuple"]
affineSpaceDefinition :: Note
affineSpaceDefinition = de $ do
lab affineSpaceDefinitionLabel
let n = "n"
s ["An ", m n, "-dimensional ", affineSpace', " ", m $ aspace n, " is the set of all ", m n, "-dimensional ", point, "s"]
affineSetDefinition :: Note
affineSetDefinition = de $ do
lab affineSetDefinitionLabel
let a = "A"
s ["An ", affineSet', " ", m a, " is a set that contains the line through any two distinct points in the ", set]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ a) $ fa (y ∈ a) $ fa (theta ∈ reals) $ x ≠ y ⇒ theta * x + (pars $ 1 - theta) * y ∈ a
convexSetDefinition :: Note
convexSetDefinition = de $ do
lab convexSetDefinitionLabel
let a = "A"
s ["A ", convexSet', " ", m a, " is a set that contains the line segment through any two distinct points in the ", set]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ a) $ fa (y ∈ a) $ fa (theta ∈ ccint 0 1) $ x ≠ y ⇒ theta * x + (pars $ 1 - theta) * y ∈ a
convexProjection :: Note
convexProjection = de $ do
let a = "A"
as = mathbb "A"
x = "x"
y = "y"
s ["Let", m a, "be a", closed, ", ", convexSet, "and a", subset, "of", m as, "and let", m metr_, "be a", metric, "on", m as]
s ["The projection onto", m a, "is defined as follows"]
ma $ func ("Proj" !: a) as a x $ proj a x =: argmin (y ∈ a) (metrapp_ x y)
todo "defined closed"
convexFunctionDefinition :: Note
convexFunctionDefinition = de $ do
lab convexFunctionDefinitionLabel
let f = fun_
n = "n"
rn = reals ^ n
s ["A ", function, " ", m $ fun f rn reals, " is called a ", convexFunction', " if ", m $ dom f, " is a ", convexSet, " and the following property holds"]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ rn) $ fa (y ∈ rn) $ fn f (theta * x + (pars $ 1 - theta) * y) <= theta * fn f x + (pars $ 1 - theta) * fn f y
todo "Are we sure that this is the right place to put this definition?"
concaveFunctionDefinition :: Note
concaveFunctionDefinition = de $ do
lab concaveFunctionDefinitionLabel
s ["A ", function, " ", m fun_, " is called a ", concaveFunction', " if ", m $ - fun_, " is a ", convexFunction]
strictlyConvexFunctionDefinition :: Note
strictlyConvexFunctionDefinition = de $ do
lab strictlyConvexFunctionDefinitionLabel
let f = fun_
n = "n"
rn = reals ^ n
s ["A ", function, " ", m $ fun f rn reals, " is called a ", strictlyConvexFunction', " if ", m $ dom f, " is a ", convexSet, " and the following property holds"]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ rn) $ fa (y ∈ rn) $ fn f (theta * x + (pars $ 1 - theta) * y) < theta * fn f x + (pars $ 1 - theta) * fn f y
affineSubspaceSS :: Note
affineSubspaceSS = subsection "Affine subspaces" $ do
affineSubspaceDefinition
hyperplaneDefinition
affineSubspaceDefinition :: Note
affineSubspaceDefinition = de $ do
lab affineSubspaceDefinitionLabel
let (k, n, p) = ("k", "p", "n")
aspace_ = aspace n
s ["Let ", m p, " be a point in an ", affineSpace, " ", m aspace_, " and let ", m laset, " be a ", m k, "-dimensional ", linearSubspace_, " of ", m $ realVectorsSpace n]
s [m $ daspace p laset, " is called the ", affineSubspace', " of ", m aspace_, " with ", direction', " ", m laset, " through ", m p]
hyperplaneDefinition :: Note
hyperplaneDefinition = de $ do
lab hyperplaneDefinitionLabel
let n = "n"
aspace_ = aspace n
s ["Let ", m aspace_, " be an ", m n, "-dimensional ", affineSpace]
s ["Any ", m (pars $ n - 1), "-dimensional ", affineSubspace, " of ", m aspace_, " is called a ", hyperplane']
| null | https://raw.githubusercontent.com/NorfairKing/the-notes/ff9551b05ec3432d21dd56d43536251bf337be04/src/Geometry/AffineSpaces.hs | haskell | module Geometry.AffineSpaces where
import Notes
import Functions.Application.Macro
import Functions.Basics.Macro
import Functions.Basics.Terms
import Functions.Distances.Macro
import Functions.Distances.Terms
import LinearAlgebra.VectorSpaces.Terms
import Logic.FirstOrderLogic.Macro
import Logic.PropositionalLogic.Macro
import Sets.Basics.Terms
import Geometry.AffineSpaces.Macro
import Geometry.AffineSpaces.Terms
affineSpaces :: Note
affineSpaces = section "Affine Spaces" $ do
pointDefinition
affineSpaceDefinition
affineSetDefinition
convexSetDefinition
convexProjection
convexFunctionDefinition
concaveFunctionDefinition
strictlyConvexFunctionDefinition
affineSubspaceSS
pointDefinition :: Note
pointDefinition = de $ do
lab pointDefinitionLabel
let n = "n"
s ["An ", m n, "-dimensional ", point', " is an ", m n, "-tuple"]
affineSpaceDefinition :: Note
affineSpaceDefinition = de $ do
lab affineSpaceDefinitionLabel
let n = "n"
s ["An ", m n, "-dimensional ", affineSpace', " ", m $ aspace n, " is the set of all ", m n, "-dimensional ", point, "s"]
affineSetDefinition :: Note
affineSetDefinition = de $ do
lab affineSetDefinitionLabel
let a = "A"
s ["An ", affineSet', " ", m a, " is a set that contains the line through any two distinct points in the ", set]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ a) $ fa (y ∈ a) $ fa (theta ∈ reals) $ x ≠ y ⇒ theta * x + (pars $ 1 - theta) * y ∈ a
convexSetDefinition :: Note
convexSetDefinition = de $ do
lab convexSetDefinitionLabel
let a = "A"
s ["A ", convexSet', " ", m a, " is a set that contains the line segment through any two distinct points in the ", set]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ a) $ fa (y ∈ a) $ fa (theta ∈ ccint 0 1) $ x ≠ y ⇒ theta * x + (pars $ 1 - theta) * y ∈ a
convexProjection :: Note
convexProjection = de $ do
let a = "A"
as = mathbb "A"
x = "x"
y = "y"
s ["Let", m a, "be a", closed, ", ", convexSet, "and a", subset, "of", m as, "and let", m metr_, "be a", metric, "on", m as]
s ["The projection onto", m a, "is defined as follows"]
ma $ func ("Proj" !: a) as a x $ proj a x =: argmin (y ∈ a) (metrapp_ x y)
todo "defined closed"
convexFunctionDefinition :: Note
convexFunctionDefinition = de $ do
lab convexFunctionDefinitionLabel
let f = fun_
n = "n"
rn = reals ^ n
s ["A ", function, " ", m $ fun f rn reals, " is called a ", convexFunction', " if ", m $ dom f, " is a ", convexSet, " and the following property holds"]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ rn) $ fa (y ∈ rn) $ fn f (theta * x + (pars $ 1 - theta) * y) <= theta * fn f x + (pars $ 1 - theta) * fn f y
todo "Are we sure that this is the right place to put this definition?"
concaveFunctionDefinition :: Note
concaveFunctionDefinition = de $ do
lab concaveFunctionDefinitionLabel
s ["A ", function, " ", m fun_, " is called a ", concaveFunction', " if ", m $ - fun_, " is a ", convexFunction]
strictlyConvexFunctionDefinition :: Note
strictlyConvexFunctionDefinition = de $ do
lab strictlyConvexFunctionDefinitionLabel
let f = fun_
n = "n"
rn = reals ^ n
s ["A ", function, " ", m $ fun f rn reals, " is called a ", strictlyConvexFunction', " if ", m $ dom f, " is a ", convexSet, " and the following property holds"]
ma $ do
let (x, y) = ("x", "y")
fa (x ∈ rn) $ fa (y ∈ rn) $ fn f (theta * x + (pars $ 1 - theta) * y) < theta * fn f x + (pars $ 1 - theta) * fn f y
affineSubspaceSS :: Note
affineSubspaceSS = subsection "Affine subspaces" $ do
affineSubspaceDefinition
hyperplaneDefinition
affineSubspaceDefinition :: Note
affineSubspaceDefinition = de $ do
lab affineSubspaceDefinitionLabel
let (k, n, p) = ("k", "p", "n")
aspace_ = aspace n
s ["Let ", m p, " be a point in an ", affineSpace, " ", m aspace_, " and let ", m laset, " be a ", m k, "-dimensional ", linearSubspace_, " of ", m $ realVectorsSpace n]
s [m $ daspace p laset, " is called the ", affineSubspace', " of ", m aspace_, " with ", direction', " ", m laset, " through ", m p]
hyperplaneDefinition :: Note
hyperplaneDefinition = de $ do
lab hyperplaneDefinitionLabel
let n = "n"
aspace_ = aspace n
s ["Let ", m aspace_, " be an ", m n, "-dimensional ", affineSpace]
s ["Any ", m (pars $ n - 1), "-dimensional ", affineSubspace, " of ", m aspace_, " is called a ", hyperplane']
| |
73c307d6a70fdece2a23d7572a8d0a161e0dc455d067ca2ea5e3aafe7543e18f | unisonweb/unison | Array.hs | # LANGUAGE CPP #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE StandaloneKindSignatures #
-- This module wraps the operations in the primitive package so that
-- bounds checks can be toggled on during the build for debugging
purposes . It exports the entire API for the three array types
-- needed, and adds wrappers for the operations that are unchecked in
-- the base library.
--
-- Checking is toggled using the `arraychecks` flag.
module Unison.Runtime.Array
( module EPA,
readArray,
writeArray,
copyArray,
copyMutableArray,
cloneMutableArray,
readByteArray,
writeByteArray,
indexByteArray,
copyByteArray,
copyMutableByteArray,
moveByteArray,
readPrimArray,
writePrimArray,
indexPrimArray,
)
where
import Control.Monad.Primitive
import Data.Kind (Constraint)
import Data.Primitive.Array as EPA hiding
( cloneMutableArray,
copyArray,
copyMutableArray,
readArray,
writeArray,
)
import qualified Data.Primitive.Array as PA
import Data.Primitive.ByteArray as EPA hiding
( copyByteArray,
copyMutableByteArray,
indexByteArray,
moveByteArray,
readByteArray,
writeByteArray,
)
import qualified Data.Primitive.ByteArray as PA
import Data.Primitive.PrimArray as EPA hiding
( indexPrimArray,
readPrimArray,
writePrimArray,
)
import qualified Data.Primitive.PrimArray as PA
import Data.Primitive.Types
#ifdef ARRAY_CHECK
import GHC.Stack
type CheckCtx :: Constraint
type CheckCtx = HasCallStack
type MA = MutableArray
type MBA = MutableByteArray
type A = Array
type BA = ByteArray
-- check index mutable array
checkIMArray
:: CheckCtx
=> String
-> (MA s a -> Int -> r)
-> MA s a -> Int -> r
checkIMArray name f arr i
| i < 0 || sizeofMutableArray arr <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIMArray #
-- check copy array
checkCArray
:: CheckCtx
=> String
-> (MA s a -> Int -> A a -> Int -> Int -> r)
-> MA s a -> Int -> A a -> Int -> Int -> r
checkCArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableArray dst < d + l
|| sizeofArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
# inline checkCArray #
-- check copy mutable array
checkCMArray
:: CheckCtx
=> String
-> (MA s a -> Int -> MA s a -> Int -> Int -> r)
-> MA s a -> Int -> MA s a -> Int -> Int -> r
checkCMArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableArray dst < d + l
|| sizeofMutableArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
{-# inline checkCMArray #-}
-- check range mutable array
checkRMArray
:: CheckCtx
=> String
-> (MA s a -> Int -> Int -> r)
-> MA s a -> Int -> Int -> r
checkRMArray name f arr o l
| o < 0 || sizeofMutableArray arr < o+l
= error $ name ++ "unsafe check out of bounds: " ++ show (o, l)
| otherwise = f arr o l
# inline checkRMArray #
-- check index byte array
checkIBArray
:: CheckCtx
=> Prim a
=> String
-> a
-> (ByteArray -> Int -> r)
-> ByteArray -> Int -> r
checkIBArray name a f arr i
| i < 0 || sizeofByteArray arr `quot` sizeOf a <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIBArray #
-- check index mutable byte array
checkIMBArray
:: CheckCtx
=> Prim a
=> String
-> a
-> (MutableByteArray s -> Int -> r)
-> MutableByteArray s -> Int -> r
checkIMBArray name a f arr i
| i < 0 || sizeofMutableByteArray arr `quot` sizeOf a <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIMBArray #
-- check copy byte array
checkCBArray
:: CheckCtx
=> String
-> (MBA s -> Int -> BA -> Int -> Int -> r)
-> MBA s -> Int -> BA -> Int -> Int -> r
checkCBArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableByteArray dst < d + l
|| sizeofByteArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
# inline checkCBArray #
-- check copy mutable byte array
checkCMBArray
:: CheckCtx
=> String
-> (MBA s -> Int -> MBA s -> Int -> Int -> r)
-> MBA s -> Int -> MBA s -> Int -> Int -> r
checkCMBArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableByteArray dst < d + l
|| sizeofMutableByteArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
# inline checkCMBArray #
-- check index prim array
checkIPArray
:: CheckCtx
=> Prim a
=> String
-> (PrimArray a -> Int -> r)
-> PrimArray a -> Int -> r
checkIPArray name f arr i
| i < 0 || sizeofPrimArray arr <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIPArray #
-- check index mutable prim array
checkIMPArray
:: CheckCtx
=> Prim a
=> String
-> (MutablePrimArray s a -> Int -> r)
-> MutablePrimArray s a -> Int -> r
checkIMPArray name f arr i
| i < 0 || sizeofMutablePrimArray arr <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIMPArray #
#else
type CheckCtx :: Constraint
type CheckCtx = ()
checkIMArray, checkIMPArray, checkIPArray :: String -> r -> r
checkCArray, checkCMArray, checkRMArray :: String -> r -> r
checkIMArray _ = id
checkIMPArray _ = id
checkCArray _ = id
checkCMArray _ = id
checkRMArray _ = id
checkIPArray _ = id
checkIBArray, checkIMBArray :: String -> a -> r -> r
checkCBArray, checkCMBArray :: String -> r -> r
checkIBArray _ _ = id
checkIMBArray _ _ = id
checkCBArray _ = id
checkCMBArray _ = id
#endif
readArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
m a
readArray = checkIMArray "readArray" PA.readArray
# INLINE readArray #
writeArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
a ->
m ()
writeArray = checkIMArray "writeArray" PA.writeArray
# INLINE writeArray #
copyArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
Array a ->
Int ->
Int ->
m ()
copyArray = checkCArray "copyArray" PA.copyArray
# INLINE copyArray #
cloneMutableArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
Int ->
m (MutableArray (PrimState m) a)
cloneMutableArray = checkRMArray "cloneMutableArray" PA.cloneMutableArray
# INLINE cloneMutableArray #
copyMutableArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
MutableArray (PrimState m) a ->
Int ->
Int ->
m ()
copyMutableArray = checkCMArray "copyMutableArray" PA.copyMutableArray
# INLINE copyMutableArray #
readByteArray ::
forall a m.
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutableByteArray (PrimState m) ->
Int ->
m a
readByteArray = checkIMBArray @a "readByteArray" undefined PA.readByteArray
# INLINE readByteArray #
writeByteArray ::
forall a m.
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutableByteArray (PrimState m) ->
Int ->
a ->
m ()
writeByteArray = checkIMBArray @a "writeByteArray" undefined PA.writeByteArray
{-# INLINE writeByteArray #-}
indexByteArray ::
forall a.
(CheckCtx) =>
(Prim a) =>
ByteArray ->
Int ->
a
indexByteArray = checkIBArray @a "indexByteArray" undefined PA.indexByteArray
# INLINE indexByteArray #
copyByteArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableByteArray (PrimState m) ->
Int ->
ByteArray ->
Int ->
Int ->
m ()
copyByteArray = checkCBArray "copyByteArray" PA.copyByteArray
# INLINE copyByteArray #
copyMutableByteArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableByteArray (PrimState m) ->
Int ->
MutableByteArray (PrimState m) ->
Int ->
Int ->
m ()
copyMutableByteArray = checkCMBArray "copyMutableByteArray" PA.copyMutableByteArray
# INLINE copyMutableByteArray #
moveByteArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableByteArray (PrimState m) ->
Int ->
MutableByteArray (PrimState m) ->
Int ->
Int ->
m ()
moveByteArray = checkCMBArray "moveByteArray" PA.moveByteArray
# INLINE moveByteArray #
readPrimArray ::
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutablePrimArray (PrimState m) a ->
Int ->
m a
readPrimArray = checkIMPArray "readPrimArray" PA.readPrimArray
# INLINE readPrimArray #
writePrimArray ::
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutablePrimArray (PrimState m) a ->
Int ->
a ->
m ()
writePrimArray = checkIMPArray "writePrimArray" PA.writePrimArray
# INLINE writePrimArray #
indexPrimArray ::
(CheckCtx) =>
(Prim a) =>
PrimArray a ->
Int ->
a
indexPrimArray = checkIPArray "indexPrimArray" PA.indexPrimArray
# INLINE indexPrimArray #
| null | https://raw.githubusercontent.com/unisonweb/unison/cf278f9fb66ccb9436bf8a2eb4ab03fc7a92021d/parser-typechecker/src/Unison/Runtime/Array.hs | haskell | # LANGUAGE ConstraintKinds #
This module wraps the operations in the primitive package so that
bounds checks can be toggled on during the build for debugging
needed, and adds wrappers for the operations that are unchecked in
the base library.
Checking is toggled using the `arraychecks` flag.
check index mutable array
check copy array
check copy mutable array
# inline checkCMArray #
check range mutable array
check index byte array
check index mutable byte array
check copy byte array
check copy mutable byte array
check index prim array
check index mutable prim array
# INLINE writeByteArray # | # LANGUAGE CPP #
# LANGUAGE StandaloneKindSignatures #
purposes . It exports the entire API for the three array types
module Unison.Runtime.Array
( module EPA,
readArray,
writeArray,
copyArray,
copyMutableArray,
cloneMutableArray,
readByteArray,
writeByteArray,
indexByteArray,
copyByteArray,
copyMutableByteArray,
moveByteArray,
readPrimArray,
writePrimArray,
indexPrimArray,
)
where
import Control.Monad.Primitive
import Data.Kind (Constraint)
import Data.Primitive.Array as EPA hiding
( cloneMutableArray,
copyArray,
copyMutableArray,
readArray,
writeArray,
)
import qualified Data.Primitive.Array as PA
import Data.Primitive.ByteArray as EPA hiding
( copyByteArray,
copyMutableByteArray,
indexByteArray,
moveByteArray,
readByteArray,
writeByteArray,
)
import qualified Data.Primitive.ByteArray as PA
import Data.Primitive.PrimArray as EPA hiding
( indexPrimArray,
readPrimArray,
writePrimArray,
)
import qualified Data.Primitive.PrimArray as PA
import Data.Primitive.Types
#ifdef ARRAY_CHECK
import GHC.Stack
type CheckCtx :: Constraint
type CheckCtx = HasCallStack
type MA = MutableArray
type MBA = MutableByteArray
type A = Array
type BA = ByteArray
checkIMArray
:: CheckCtx
=> String
-> (MA s a -> Int -> r)
-> MA s a -> Int -> r
checkIMArray name f arr i
| i < 0 || sizeofMutableArray arr <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIMArray #
checkCArray
:: CheckCtx
=> String
-> (MA s a -> Int -> A a -> Int -> Int -> r)
-> MA s a -> Int -> A a -> Int -> Int -> r
checkCArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableArray dst < d + l
|| sizeofArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
# inline checkCArray #
checkCMArray
:: CheckCtx
=> String
-> (MA s a -> Int -> MA s a -> Int -> Int -> r)
-> MA s a -> Int -> MA s a -> Int -> Int -> r
checkCMArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableArray dst < d + l
|| sizeofMutableArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
checkRMArray
:: CheckCtx
=> String
-> (MA s a -> Int -> Int -> r)
-> MA s a -> Int -> Int -> r
checkRMArray name f arr o l
| o < 0 || sizeofMutableArray arr < o+l
= error $ name ++ "unsafe check out of bounds: " ++ show (o, l)
| otherwise = f arr o l
# inline checkRMArray #
checkIBArray
:: CheckCtx
=> Prim a
=> String
-> a
-> (ByteArray -> Int -> r)
-> ByteArray -> Int -> r
checkIBArray name a f arr i
| i < 0 || sizeofByteArray arr `quot` sizeOf a <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIBArray #
checkIMBArray
:: CheckCtx
=> Prim a
=> String
-> a
-> (MutableByteArray s -> Int -> r)
-> MutableByteArray s -> Int -> r
checkIMBArray name a f arr i
| i < 0 || sizeofMutableByteArray arr `quot` sizeOf a <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIMBArray #
checkCBArray
:: CheckCtx
=> String
-> (MBA s -> Int -> BA -> Int -> Int -> r)
-> MBA s -> Int -> BA -> Int -> Int -> r
checkCBArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableByteArray dst < d + l
|| sizeofByteArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
# inline checkCBArray #
checkCMBArray
:: CheckCtx
=> String
-> (MBA s -> Int -> MBA s -> Int -> Int -> r)
-> MBA s -> Int -> MBA s -> Int -> Int -> r
checkCMBArray name f dst d src s l
| d < 0
|| s < 0
|| sizeofMutableByteArray dst < d + l
|| sizeofMutableByteArray src < s + l
= error $ name ++ " unsafe check out of bounds: " ++ show (d, s, l)
| otherwise = f dst d src s l
# inline checkCMBArray #
checkIPArray
:: CheckCtx
=> Prim a
=> String
-> (PrimArray a -> Int -> r)
-> PrimArray a -> Int -> r
checkIPArray name f arr i
| i < 0 || sizeofPrimArray arr <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIPArray #
checkIMPArray
:: CheckCtx
=> Prim a
=> String
-> (MutablePrimArray s a -> Int -> r)
-> MutablePrimArray s a -> Int -> r
checkIMPArray name f arr i
| i < 0 || sizeofMutablePrimArray arr <= i
= error $ name ++ " unsafe check out of bounds: " ++ show i
| otherwise = f arr i
# inline checkIMPArray #
#else
type CheckCtx :: Constraint
type CheckCtx = ()
checkIMArray, checkIMPArray, checkIPArray :: String -> r -> r
checkCArray, checkCMArray, checkRMArray :: String -> r -> r
checkIMArray _ = id
checkIMPArray _ = id
checkCArray _ = id
checkCMArray _ = id
checkRMArray _ = id
checkIPArray _ = id
checkIBArray, checkIMBArray :: String -> a -> r -> r
checkCBArray, checkCMBArray :: String -> r -> r
checkIBArray _ _ = id
checkIMBArray _ _ = id
checkCBArray _ = id
checkCMBArray _ = id
#endif
readArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
m a
readArray = checkIMArray "readArray" PA.readArray
# INLINE readArray #
writeArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
a ->
m ()
writeArray = checkIMArray "writeArray" PA.writeArray
# INLINE writeArray #
copyArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
Array a ->
Int ->
Int ->
m ()
copyArray = checkCArray "copyArray" PA.copyArray
# INLINE copyArray #
cloneMutableArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
Int ->
m (MutableArray (PrimState m) a)
cloneMutableArray = checkRMArray "cloneMutableArray" PA.cloneMutableArray
# INLINE cloneMutableArray #
copyMutableArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableArray (PrimState m) a ->
Int ->
MutableArray (PrimState m) a ->
Int ->
Int ->
m ()
copyMutableArray = checkCMArray "copyMutableArray" PA.copyMutableArray
# INLINE copyMutableArray #
readByteArray ::
forall a m.
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutableByteArray (PrimState m) ->
Int ->
m a
readByteArray = checkIMBArray @a "readByteArray" undefined PA.readByteArray
# INLINE readByteArray #
writeByteArray ::
forall a m.
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutableByteArray (PrimState m) ->
Int ->
a ->
m ()
writeByteArray = checkIMBArray @a "writeByteArray" undefined PA.writeByteArray
indexByteArray ::
forall a.
(CheckCtx) =>
(Prim a) =>
ByteArray ->
Int ->
a
indexByteArray = checkIBArray @a "indexByteArray" undefined PA.indexByteArray
# INLINE indexByteArray #
copyByteArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableByteArray (PrimState m) ->
Int ->
ByteArray ->
Int ->
Int ->
m ()
copyByteArray = checkCBArray "copyByteArray" PA.copyByteArray
# INLINE copyByteArray #
copyMutableByteArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableByteArray (PrimState m) ->
Int ->
MutableByteArray (PrimState m) ->
Int ->
Int ->
m ()
copyMutableByteArray = checkCMBArray "copyMutableByteArray" PA.copyMutableByteArray
# INLINE copyMutableByteArray #
moveByteArray ::
(CheckCtx) =>
(PrimMonad m) =>
MutableByteArray (PrimState m) ->
Int ->
MutableByteArray (PrimState m) ->
Int ->
Int ->
m ()
moveByteArray = checkCMBArray "moveByteArray" PA.moveByteArray
# INLINE moveByteArray #
readPrimArray ::
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutablePrimArray (PrimState m) a ->
Int ->
m a
readPrimArray = checkIMPArray "readPrimArray" PA.readPrimArray
# INLINE readPrimArray #
writePrimArray ::
(CheckCtx) =>
(PrimMonad m) =>
(Prim a) =>
MutablePrimArray (PrimState m) a ->
Int ->
a ->
m ()
writePrimArray = checkIMPArray "writePrimArray" PA.writePrimArray
# INLINE writePrimArray #
indexPrimArray ::
(CheckCtx) =>
(Prim a) =>
PrimArray a ->
Int ->
a
indexPrimArray = checkIPArray "indexPrimArray" PA.indexPrimArray
# INLINE indexPrimArray #
|
9f44fe574f743600e37248993e76d07ba7dc68ce2ac5ab88d14fe8bd3ff2bebb | atlas-engineer/nyxt | panel.lisp | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(in-package :nyxt)
;; TODO: Quite some code could be factored with `internal-page'.
(define-class panel-buffer-source (prompter:source)
((prompter:name "Panel buffers")
(window :accessor window :initarg :window)
(prompter:enable-marks-p t)
(prompter:constructor (lambda (source)
(panel-buffers (window source))))))
(define-command-global delete-panel-buffer (&key (window (current-window))
(panels (prompt
:prompt "Delete a panel buffer"
:sources (make-instance 'panel-buffer-source
:window window))))
"Prompt for panel buffer(s) to be deleted.
When provided, PANELS are deleted instead."
(mapc (curry #'window-delete-panel-buffer window) (uiop:ensure-list panels)))
(define-command-global delete-all-panel-buffers (&key (window (current-window)))
"Delete all the open panel buffers in WINDOW."
(delete-panel-buffer :panels (panel-buffers window)))
(define-class panel-page (internal-page)
((side
:left
:type (member :left :right)
:documentation "The side of the window where the panel is displayed."))
(:metaclass closer-mop:funcallable-standard-class)
(:accessor-name-transformer (class*:make-name-transformer name))
(:export-class-name-p t)
(:export-accessor-names-p t)
(:documentation "Internal page for `panel-buffers'.
The main difference is that their command toggles the panel."))
(defun find-panel-buffer (name)
"Return first panel buffer which URL is a NAME `panel-page'."
(find name (panel-buffers (current-window))
:key (alex:compose #'internal-page-name #'url)))
(defmethod set-internal-page-method ((page panel-page) form)
(when form
(let* ((arglist (second form))
(keywords (nth-value 3 (alex:parse-ordinary-lambda-list arglist)))
(body (cddr form))
(documentation (nth-value 2 (alex:parse-body body :documentation t))))
(closer-mop:ensure-method
page
`(lambda (,@arglist)
,@(when documentation (list documentation))
(declare (ignorable ,@(mappend #'cdar keywords)))
(alex:if-let ((panel-buffer (find-panel-buffer (name ,page))))
(window-delete-panel-buffer (current-window) panel-buffer)
(window-add-panel-buffer
(current-window)
(buffer-load (nyxt-url (name ,page) ,@(mappend #'first keywords))
:buffer (make-instance 'panel-buffer))
(side ,page))))))))
;; TODO: Add define-panel?
(export-always 'define-panel-command)
(defmacro define-panel-command (name (&rest arglist)
(buffer-var title &optional (side :left))
&body body)
"Define a panel buffer and:
- A local command called NAME, creating this panel-buffer or closing it if it's shown already.
- A nyxt:NAME URL for the content of this panel buffer.
Should end with a form returning HTML as a string.
BUFFER-VAR is the variable the created panel will be bound to in the BODY. SIDE
is either :LEFT (default) or :RIGHT.
ARGLIST is arguments for the command and for the underlying page-generating
function. Any argument from it is safe to use in the body of this macro.
Beware: the ARGLIST should have nothing but keyword arguments because it's
mapped to query parameters."
(multiple-value-bind (stripped-body declarations documentation)
(alex:parse-body body :documentation t)
`(progn
(export-always ',name (symbol-package ',name))
(sera:lret ((gf (defgeneric ,name (,@(generalize-lambda-list arglist))
,@(when documentation
`((:documentation ,documentation)))
(:generic-function-class panel-page))))
(let ((wrapped-body '(lambda (,@arglist)
,@(when documentation (list documentation))
,@declarations
(let ((,buffer-var (find-panel-buffer ',name)))
(declare (ignorable ,buffer-var))
,@stripped-body))))
(set-internal-page-method gf wrapped-body)
(setf (slot-value #',name 'visibility) :mode)
(setf (slot-value #',name 'dynamic-title)
,(if (stringp title)
title
(let ((keywords (nth-value 3 (alex:parse-ordinary-lambda-list arglist))))
`(lambda (,@arglist)
(declare (ignorable ,@(mappend #'cdar keywords)))
,title))))
(setf (slot-value #',name 'side) ,side)
(setf (form gf) wrapped-body))))))
(export-always 'define-panel-command-global)
(defmacro define-panel-command-global (name (&rest arglist)
(buffer-var title &optional (side :left))
&body body)
"Define a panel buffer with a global command showing it.
See `define-panel-command' for the description of the arguments."
`(prog1 (define-panel-command ,name (,@arglist) (,buffer-var ,title ,side) ,@body)
(setf (slot-value #',name 'visibility) :global)))
| null | https://raw.githubusercontent.com/atlas-engineer/nyxt/cdcb32c396b6b2907a81204c334e2300f054eb56/source/panel.lisp | lisp | TODO: Quite some code could be factored with `internal-page'.
TODO: Add define-panel? | SPDX - FileCopyrightText : Atlas Engineer LLC
SPDX - License - Identifier : BSD-3 - Clause
(in-package :nyxt)
(define-class panel-buffer-source (prompter:source)
((prompter:name "Panel buffers")
(window :accessor window :initarg :window)
(prompter:enable-marks-p t)
(prompter:constructor (lambda (source)
(panel-buffers (window source))))))
(define-command-global delete-panel-buffer (&key (window (current-window))
(panels (prompt
:prompt "Delete a panel buffer"
:sources (make-instance 'panel-buffer-source
:window window))))
"Prompt for panel buffer(s) to be deleted.
When provided, PANELS are deleted instead."
(mapc (curry #'window-delete-panel-buffer window) (uiop:ensure-list panels)))
(define-command-global delete-all-panel-buffers (&key (window (current-window)))
"Delete all the open panel buffers in WINDOW."
(delete-panel-buffer :panels (panel-buffers window)))
(define-class panel-page (internal-page)
((side
:left
:type (member :left :right)
:documentation "The side of the window where the panel is displayed."))
(:metaclass closer-mop:funcallable-standard-class)
(:accessor-name-transformer (class*:make-name-transformer name))
(:export-class-name-p t)
(:export-accessor-names-p t)
(:documentation "Internal page for `panel-buffers'.
The main difference is that their command toggles the panel."))
(defun find-panel-buffer (name)
"Return first panel buffer which URL is a NAME `panel-page'."
(find name (panel-buffers (current-window))
:key (alex:compose #'internal-page-name #'url)))
(defmethod set-internal-page-method ((page panel-page) form)
(when form
(let* ((arglist (second form))
(keywords (nth-value 3 (alex:parse-ordinary-lambda-list arglist)))
(body (cddr form))
(documentation (nth-value 2 (alex:parse-body body :documentation t))))
(closer-mop:ensure-method
page
`(lambda (,@arglist)
,@(when documentation (list documentation))
(declare (ignorable ,@(mappend #'cdar keywords)))
(alex:if-let ((panel-buffer (find-panel-buffer (name ,page))))
(window-delete-panel-buffer (current-window) panel-buffer)
(window-add-panel-buffer
(current-window)
(buffer-load (nyxt-url (name ,page) ,@(mappend #'first keywords))
:buffer (make-instance 'panel-buffer))
(side ,page))))))))
(export-always 'define-panel-command)
(defmacro define-panel-command (name (&rest arglist)
(buffer-var title &optional (side :left))
&body body)
"Define a panel buffer and:
- A local command called NAME, creating this panel-buffer or closing it if it's shown already.
- A nyxt:NAME URL for the content of this panel buffer.
Should end with a form returning HTML as a string.
BUFFER-VAR is the variable the created panel will be bound to in the BODY. SIDE
is either :LEFT (default) or :RIGHT.
ARGLIST is arguments for the command and for the underlying page-generating
function. Any argument from it is safe to use in the body of this macro.
Beware: the ARGLIST should have nothing but keyword arguments because it's
mapped to query parameters."
(multiple-value-bind (stripped-body declarations documentation)
(alex:parse-body body :documentation t)
`(progn
(export-always ',name (symbol-package ',name))
(sera:lret ((gf (defgeneric ,name (,@(generalize-lambda-list arglist))
,@(when documentation
`((:documentation ,documentation)))
(:generic-function-class panel-page))))
(let ((wrapped-body '(lambda (,@arglist)
,@(when documentation (list documentation))
,@declarations
(let ((,buffer-var (find-panel-buffer ',name)))
(declare (ignorable ,buffer-var))
,@stripped-body))))
(set-internal-page-method gf wrapped-body)
(setf (slot-value #',name 'visibility) :mode)
(setf (slot-value #',name 'dynamic-title)
,(if (stringp title)
title
(let ((keywords (nth-value 3 (alex:parse-ordinary-lambda-list arglist))))
`(lambda (,@arglist)
(declare (ignorable ,@(mappend #'cdar keywords)))
,title))))
(setf (slot-value #',name 'side) ,side)
(setf (form gf) wrapped-body))))))
(export-always 'define-panel-command-global)
(defmacro define-panel-command-global (name (&rest arglist)
(buffer-var title &optional (side :left))
&body body)
"Define a panel buffer with a global command showing it.
See `define-panel-command' for the description of the arguments."
`(prog1 (define-panel-command ,name (,@arglist) (,buffer-var ,title ,side) ,@body)
(setf (slot-value #',name 'visibility) :global)))
|
eb2bd66dea304aeabbda66a03080fb119267137beec9cec6f4e46c9ac17f640b | basho/riak_pipe | riak_pipe_sup.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2011 Basho Technologies , Inc.
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% @doc Supervisor for the `riak_pipe' application.
-module(riak_pipe_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
%% ===================================================================
%% API functions
%% ===================================================================
%% @doc Start the supervisor. It will be registered under the atom
%% `riak_pipe_sup'.
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%% ===================================================================
%% Supervisor callbacks
%% ===================================================================
%% @doc Initialize the supervisor, and start children.
%%
Three children are started immediately :
< ol><li >
%% The vnode master for riak_pipe vnodes (registered under
%% `riak_pipe_vnode_master').
%%</li><li>
%% The pipe builder supervisor (registered under
%% `riak_pipe_builder_sup').
%%</li><li>
%% The pipe fitting supervisor (registred under
%% `riak_pipe_fitting_sup').
%%</li></ol>.
-spec init([]) -> {ok, {{supervisor:strategy(),
pos_integer(),
pos_integer()},
[ supervisor:child_spec() ]}}.
init([]) ->
%% ordsets = enabled traces are represented as ordsets in fitting_details
%% sets = '' sets ''
riak_core_capability:register(
{riak_pipe, trace_format}, [ordsets, sets], sets),
VMaster = {riak_pipe_vnode_master,
{riak_core_vnode_master, start_link, [riak_pipe_vnode]},
permanent, 5000, worker, [riak_core_vnode_master]},
BSup = {riak_pipe_builder_sup,
{riak_pipe_builder_sup, start_link, []},
permanent, 5000, supervisor, [riak_pipe_builder_sup]},
FSup = {riak_pipe_fitting_sup,
{riak_pipe_fitting_sup, start_link, []},
permanent, 5000, supervisor, [riak_pipe_fitting_sup]},
CSup = {riak_pipe_qcover_sup,
{riak_pipe_qcover_sup, start_link, []},
permanent, 5000, supervisor, [riak_pipe_qcover_sup]},
{ok, { {one_for_one, 5, 10}, [VMaster, BSup, FSup, CSup]} }.
| null | https://raw.githubusercontent.com/basho/riak_pipe/a341b653408bd1517ccdd0f54ec27be1005dbeba/src/riak_pipe_sup.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
@doc Supervisor for the `riak_pipe' application.
API
Supervisor callbacks
===================================================================
API functions
===================================================================
@doc Start the supervisor. It will be registered under the atom
`riak_pipe_sup'.
===================================================================
Supervisor callbacks
===================================================================
@doc Initialize the supervisor, and start children.
The vnode master for riak_pipe vnodes (registered under
`riak_pipe_vnode_master').
</li><li>
The pipe builder supervisor (registered under
`riak_pipe_builder_sup').
</li><li>
The pipe fitting supervisor (registred under
`riak_pipe_fitting_sup').
</li></ol>.
ordsets = enabled traces are represented as ordsets in fitting_details
sets = '' sets '' | Copyright ( c ) 2011 Basho Technologies , Inc.
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(riak_pipe_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
-spec start_link() -> {ok, pid()} | ignore | {error, term()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
Three children are started immediately :
< ol><li >
-spec init([]) -> {ok, {{supervisor:strategy(),
pos_integer(),
pos_integer()},
[ supervisor:child_spec() ]}}.
init([]) ->
riak_core_capability:register(
{riak_pipe, trace_format}, [ordsets, sets], sets),
VMaster = {riak_pipe_vnode_master,
{riak_core_vnode_master, start_link, [riak_pipe_vnode]},
permanent, 5000, worker, [riak_core_vnode_master]},
BSup = {riak_pipe_builder_sup,
{riak_pipe_builder_sup, start_link, []},
permanent, 5000, supervisor, [riak_pipe_builder_sup]},
FSup = {riak_pipe_fitting_sup,
{riak_pipe_fitting_sup, start_link, []},
permanent, 5000, supervisor, [riak_pipe_fitting_sup]},
CSup = {riak_pipe_qcover_sup,
{riak_pipe_qcover_sup, start_link, []},
permanent, 5000, supervisor, [riak_pipe_qcover_sup]},
{ok, { {one_for_one, 5, 10}, [VMaster, BSup, FSup, CSup]} }.
|
6918613e564989459ea266e168559b19a872d69f6960bb48648ee6d0ed432233 | ChrisPenner/rasa | Rasa.hs | # language ExistentialQuantification , Rank2Types , ScopedTypeVariables #
module Rasa (rasa) where
import Eve
import Rasa.Internal.Listeners
import Control.Monad
-- | The main function to run rasa.
--
@rasa eventProviders extensions@
--
-- This should be imported by a user-config with and called with an 'Action'
-- containing any extensions which have event listeners.
--
-- > rasa $ do
-- > cursor
-- > vim
-- > slate
rasa :: App () -> IO ()
rasa initialization = void $ eve (initialization >> hooks)
where hooks = beforeEvent_ $ do
dispatchBeforeRender
dispatchOnRender
dispatchAfterRender
| null | https://raw.githubusercontent.com/ChrisPenner/rasa/a2680324849088ee92f063fab091de21c4c2c086/rasa/src/Rasa.hs | haskell | | The main function to run rasa.
This should be imported by a user-config with and called with an 'Action'
containing any extensions which have event listeners.
> rasa $ do
> cursor
> vim
> slate | # language ExistentialQuantification , Rank2Types , ScopedTypeVariables #
module Rasa (rasa) where
import Eve
import Rasa.Internal.Listeners
import Control.Monad
@rasa eventProviders extensions@
rasa :: App () -> IO ()
rasa initialization = void $ eve (initialization >> hooks)
where hooks = beforeEvent_ $ do
dispatchBeforeRender
dispatchOnRender
dispatchAfterRender
|
1b1d16e9de47e083cccbda517fd4b86cfef935ebe5c6bf007d69fcd08022ef40 | dyoo/whalesong | rain-world-program.rkt | #lang whalesong
(require whalesong/world
whalesong/image
whalesong/js)
;; Occupy the whole screen.
(void (call-method body "css" "margin" 0))
(void (call-method body "css" "padding" 0))
(void (call-method body "css" "overflow" "hidden"))
;; Rain falls down the screen.
(define GRAVITY-FACTOR 1)
(define-struct posn (x y))
;; A drop particle describes where it is on screen, what color it is, and
;; how large it is.
(define-struct drop (posn velocity color size))
;; random-drop-particle: drop
;; Generates a random particle.
(define (random-drop)
(make-drop (make-posn (random (viewport-width)) 0)
(+ 5 (random 10)) ;; Get it falling at some random velocity
(random-choice (list "gray" "darkgray"
"white" "blue"
"lightblue"
"darkblue"))
(random 10) ;; with some random size
))
random - choice : ( listof X ) - > X
;; Picks a random element of elts.
(define (random-choice elts)
(list-ref elts (random (length elts))))
;; The world consists of all of the drops in the sky.
listof drop
))
(define (my-filter f l)
(cond
[(null? l)
'()]
[(f (car l))
(cons (car l)
(my-filter f (cdr l)))]
[else
(my-filter f (cdr l))]))
;; tick: world -> world
(define (tick w)
(make-world
(my-filter not-on-floor?
(map drop-descend (cons (random-drop)
(cons (random-drop)
(world-sky w)))))))
;; drop-descend: drop -> drop
;; Makes the drops descend.
(define (drop-descend a-drop)
(cond
[(> (posn-y (drop-posn a-drop)) (viewport-height))
a-drop]
[else
(make-drop (posn-descend (drop-posn a-drop) (drop-velocity a-drop))
(+ GRAVITY-FACTOR (drop-velocity a-drop))
(drop-color a-drop)
(drop-size a-drop))]))
;; posn-descend: posn number -> posn
(define (posn-descend a-posn n)
(make-posn (posn-x a-posn)
(+ n (posn-y a-posn))))
;; on-floor?: drop -> boolean
;; Produces true if the drop has fallen to the floor.
(define (on-floor? a-drop)
(> (posn-y (drop-posn a-drop))
(viewport-height)))
(define (not-on-floor? a-drop) (not (on-floor? a-drop)))
;; make-drop-image: color number -> drop
;; Creates an image of the drop particle.
(define (make-drop-image color size)
(circle size "solid" color))
;; place-drop: drop scene -> scene
(define (place-drop a-drop a-scene)
(place-image (make-drop-image (drop-color a-drop)
(drop-size a-drop))
(posn-x (drop-posn a-drop))
(posn-y (drop-posn a-drop))
a-scene))
(define (my-foldl f acc lst)
(cond
[(null? lst)
acc]
[else
(my-foldl f
(f (car lst) acc)
(cdr lst))]))
;; draw: world -> scene
(define (draw w)
(my-foldl place-drop (empty-scene (viewport-width) (viewport-height)) (world-sky w)))
(big-bang (make-world '())
(to-draw draw)
(on-tick tick))
| null | https://raw.githubusercontent.com/dyoo/whalesong/636e0b4e399e4523136ab45ef4cd1f5a84e88cdc/whalesong/examples/rain-world-program.rkt | racket | Occupy the whole screen.
Rain falls down the screen.
A drop particle describes where it is on screen, what color it is, and
how large it is.
random-drop-particle: drop
Generates a random particle.
Get it falling at some random velocity
with some random size
Picks a random element of elts.
The world consists of all of the drops in the sky.
tick: world -> world
drop-descend: drop -> drop
Makes the drops descend.
posn-descend: posn number -> posn
on-floor?: drop -> boolean
Produces true if the drop has fallen to the floor.
make-drop-image: color number -> drop
Creates an image of the drop particle.
place-drop: drop scene -> scene
draw: world -> scene | #lang whalesong
(require whalesong/world
whalesong/image
whalesong/js)
(void (call-method body "css" "margin" 0))
(void (call-method body "css" "padding" 0))
(void (call-method body "css" "overflow" "hidden"))
(define GRAVITY-FACTOR 1)
(define-struct posn (x y))
(define-struct drop (posn velocity color size))
(define (random-drop)
(make-drop (make-posn (random (viewport-width)) 0)
(random-choice (list "gray" "darkgray"
"white" "blue"
"lightblue"
"darkblue"))
))
random - choice : ( listof X ) - > X
(define (random-choice elts)
(list-ref elts (random (length elts))))
listof drop
))
(define (my-filter f l)
(cond
[(null? l)
'()]
[(f (car l))
(cons (car l)
(my-filter f (cdr l)))]
[else
(my-filter f (cdr l))]))
(define (tick w)
(make-world
(my-filter not-on-floor?
(map drop-descend (cons (random-drop)
(cons (random-drop)
(world-sky w)))))))
(define (drop-descend a-drop)
(cond
[(> (posn-y (drop-posn a-drop)) (viewport-height))
a-drop]
[else
(make-drop (posn-descend (drop-posn a-drop) (drop-velocity a-drop))
(+ GRAVITY-FACTOR (drop-velocity a-drop))
(drop-color a-drop)
(drop-size a-drop))]))
(define (posn-descend a-posn n)
(make-posn (posn-x a-posn)
(+ n (posn-y a-posn))))
(define (on-floor? a-drop)
(> (posn-y (drop-posn a-drop))
(viewport-height)))
(define (not-on-floor? a-drop) (not (on-floor? a-drop)))
(define (make-drop-image color size)
(circle size "solid" color))
(define (place-drop a-drop a-scene)
(place-image (make-drop-image (drop-color a-drop)
(drop-size a-drop))
(posn-x (drop-posn a-drop))
(posn-y (drop-posn a-drop))
a-scene))
(define (my-foldl f acc lst)
(cond
[(null? lst)
acc]
[else
(my-foldl f
(f (car lst) acc)
(cdr lst))]))
(define (draw w)
(my-foldl place-drop (empty-scene (viewport-width) (viewport-height)) (world-sky w)))
(big-bang (make-world '())
(to-draw draw)
(on-tick tick))
|
f0fb9c99b515c5efb544cc1dd1130606415565292c818f2e8c32fe442fff2288 | lokedhs/containers | atomic.lisp | (in-package :receptacle)
(defclass atomic-variable ()
((value :type t
:initarg :value
:initform (error "~s not supplied when creating ~s" :value 'atomic-variable)
:accessor atomic-variable/value)
(lock :type t
:initform (bordeaux-threads:make-lock "Atomic variable lock")
:reader atomic-variable/lock))
(:documentation "Class that holds a single value. The value can be
get or set atomically."))
(defmethod print-object ((obj atomic-variable) stream)
(print-unreadable-object (obj stream :type t :identity nil)
(format stream "~s" (if (slot-boundp obj 'value)
(atomic-variable/value obj)
:not-bound))))
(defun make-atomic-variable (value)
(make-instance 'atomic-variable :value value))
(defgeneric (setf atomic/value) (value variable))
(defmethod (setf atomic/value) (value (variable atomic-variable))
(bordeaux-threads:with-lock-held ((atomic-variable/lock variable))
(setf (atomic-variable/value variable) value)))
(defgeneric atomic/value (variable))
(defmethod atomic/value ((variable atomic-variable))
(bordeaux-threads:with-lock-held ((atomic-variable/lock variable))
(atomic-variable/value variable)))
(defmacro with-atomic-variable ((sym variable) &body body)
(alexandria:with-gensyms (variable-sym)
`(let ((,variable-sym ,variable))
(symbol-macrolet ((,sym (atomic-variable/value ,variable-sym)))
(bordeaux-threads:with-lock-held ((atomic-variable/lock ,variable-sym))
(with-disabled-interrupts
,@body))))))
;;;
CAS implementation
;;;
(defclass cas-wrapper ()
((value :type t
:initarg :value)
#-sbcl
(lock :type t
:initform (bordeaux-threads:make-lock))))
(defmethod print-object ((obj cas-wrapper) stream)
(print-unreadable-object (obj stream :type t :identity t)
(format stream "VALUE ~s"
(if (slot-boundp obj 'value)
(slot-value obj 'value)
:NOT-BOUND))))
(defun make-cas-wrapper (value)
(make-instance 'cas-wrapper :value value))
(defun cas (wrapper old-value new-value)
#+sbcl
(sb-ext:cas (slot-value wrapper 'value) old-value new-value)
#-sbcl
(bordeaux-threads:with-lock-held ((slot-value wrapper 'lock))
(let ((v (slot-value wrapper 'value)))
(when (eq v old-value)
(setf (slot-value wrapper 'value) new-value))
v)))
(defun cas-wrapper/value (wrapper)
(slot-value wrapper 'value))
(defun call-with-cas (wrapper fn)
(loop
for old = (cas-wrapper/value wrapper)
for v = (funcall fn old)
for result = (cas wrapper old v)
when (eq old result)
return v))
(defmacro with-cas-update ((sym wrapper) &body body)
(let ((value (gensym "WRAPPER-VALUE-")))
`(call-with-cas ,wrapper (lambda (,value)
(let ((,sym ,value))
,@body)))))
| null | https://raw.githubusercontent.com/lokedhs/containers/5d4ce688bddd51ee34a4259e37b698b84f650bdf/src/atomic.lisp | lisp | (in-package :receptacle)
(defclass atomic-variable ()
((value :type t
:initarg :value
:initform (error "~s not supplied when creating ~s" :value 'atomic-variable)
:accessor atomic-variable/value)
(lock :type t
:initform (bordeaux-threads:make-lock "Atomic variable lock")
:reader atomic-variable/lock))
(:documentation "Class that holds a single value. The value can be
get or set atomically."))
(defmethod print-object ((obj atomic-variable) stream)
(print-unreadable-object (obj stream :type t :identity nil)
(format stream "~s" (if (slot-boundp obj 'value)
(atomic-variable/value obj)
:not-bound))))
(defun make-atomic-variable (value)
(make-instance 'atomic-variable :value value))
(defgeneric (setf atomic/value) (value variable))
(defmethod (setf atomic/value) (value (variable atomic-variable))
(bordeaux-threads:with-lock-held ((atomic-variable/lock variable))
(setf (atomic-variable/value variable) value)))
(defgeneric atomic/value (variable))
(defmethod atomic/value ((variable atomic-variable))
(bordeaux-threads:with-lock-held ((atomic-variable/lock variable))
(atomic-variable/value variable)))
(defmacro with-atomic-variable ((sym variable) &body body)
(alexandria:with-gensyms (variable-sym)
`(let ((,variable-sym ,variable))
(symbol-macrolet ((,sym (atomic-variable/value ,variable-sym)))
(bordeaux-threads:with-lock-held ((atomic-variable/lock ,variable-sym))
(with-disabled-interrupts
,@body))))))
CAS implementation
(defclass cas-wrapper ()
((value :type t
:initarg :value)
#-sbcl
(lock :type t
:initform (bordeaux-threads:make-lock))))
(defmethod print-object ((obj cas-wrapper) stream)
(print-unreadable-object (obj stream :type t :identity t)
(format stream "VALUE ~s"
(if (slot-boundp obj 'value)
(slot-value obj 'value)
:NOT-BOUND))))
(defun make-cas-wrapper (value)
(make-instance 'cas-wrapper :value value))
(defun cas (wrapper old-value new-value)
#+sbcl
(sb-ext:cas (slot-value wrapper 'value) old-value new-value)
#-sbcl
(bordeaux-threads:with-lock-held ((slot-value wrapper 'lock))
(let ((v (slot-value wrapper 'value)))
(when (eq v old-value)
(setf (slot-value wrapper 'value) new-value))
v)))
(defun cas-wrapper/value (wrapper)
(slot-value wrapper 'value))
(defun call-with-cas (wrapper fn)
(loop
for old = (cas-wrapper/value wrapper)
for v = (funcall fn old)
for result = (cas wrapper old v)
when (eq old result)
return v))
(defmacro with-cas-update ((sym wrapper) &body body)
(let ((value (gensym "WRAPPER-VALUE-")))
`(call-with-cas ,wrapper (lambda (,value)
(let ((,sym ,value))
,@body)))))
| |
d178ffa3f34bb1632aa7850766a1d260d27761bd6541e82a1b440a4f63b131c4 | haskell-hvr/regex-tdfa | Engine_FA.hs | # LANGUAGE CPP #
#if __GLASGOW_HASKELL__ >= 902
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
#endif
-- | This is the code for the main engine. This captures the posix
-- subexpressions. There is also a non-capturing engine, and a
-- testing engine.
--
It is polymorphic over the internal Uncons type class , and
-- specialized to produce the needed variants.
module Text.Regex.TDFA.NewDFA.Engine_FA(execMatch) where
import Data.Array.Base(unsafeRead,unsafeWrite,STUArray(..))
import GHC.Arr(STArray(..))
import GHC.ST(ST(..))
import GHC.Exts(MutableByteArray#,RealWorld,Int#,sizeofMutableByteArray#,unsafeCoerce#)
import Prelude hiding ((!!))
import Control.Monad(when,unless,forM,forM_,liftM2,foldM)
import Data.Array.MArray(MArray(..))
import Data.Array.Unsafe(unsafeFreeze)
import Data.Array.IArray(Array,bounds,assocs,Ix(range))
import qualified Data.IntMap.CharMap2 as CMap(findWithDefault)
import Data.IntMap(IntMap)
import qualified Data.IntMap as IMap(null,toList,lookup,insert)
import Data.Maybe(catMaybes)
import Data.Monoid as Mon(Monoid(..))
import Data.IntSet(IntSet)
import qualified Data.IntSet as ISet(toAscList,null)
import Data.Array.IArray((!))
import Data.List(sortBy,groupBy)
import Data.STRef(STRef,newSTRef,readSTRef,writeSTRef)
import qualified Control.Monad.ST.Strict as S(ST,runST)
import Data.Sequence(Seq,ViewL(..),viewl)
import qualified Data.Sequence as Seq(null)
import qualified Data.ByteString.Char8 as SBS(ByteString)
import qualified Data.ByteString.Lazy.Char8 as LBS(ByteString)
import Text.Regex.Base(MatchArray,MatchOffset,MatchLength)
import Text.Regex.TDFA.Common hiding (indent)
import Text.Regex.TDFA.NewDFA.Uncons(Uncons(uncons))
import Text.Regex.TDFA.NewDFA.MakeTest(test_singleline,test_multiline)
--import Debug.Trace
-- trace :: String -> a -> a
-- trace _ a = a
err :: String -> a
err s = common_error "Text.Regex.TDFA.NewDFA.Engine_FA" s
{-# INLINE (!!) #-}
(!!) :: (MArray a e (S.ST s),Ix i) => a i e -> Int -> S.ST s e
(!!) = unsafeRead
{-# INLINE set #-}
set :: (MArray a e (S.ST s),Ix i) => a i e -> Int -> e -> S.ST s ()
set = unsafeWrite
noSource :: ((Index, Instructions),STUArray s Tag Position,OrbitLog)
noSource = ((-1,err "noSource"),err "noSource",err "noSource")
# SPECIALIZE execMatch : : Regex - > Position - > Char - > ( [ ] ) - > [ MatchArray ] #
# SPECIALIZE execMatch : : Regex - > Position - > Char - > ( Seq Char ) - > [ MatchArray ] #
# SPECIALIZE execMatch : : Regex - > Position - > Char - > SBS.ByteString - > [ MatchArray ] #
# SPECIALIZE execMatch : : Regex - > Position - > Char - > LBS.ByteString - > [ MatchArray ] #
execMatch :: forall text. Uncons text => Regex -> Position -> Char -> text -> [MatchArray]
execMatch (Regex { regex_dfa = DFA {d_id=didIn,d_dt=dtIn}
, regex_init = startState
, regex_b_index = b_index
, regex_b_tags = b_tags_all
, regex_tags = aTags
, regex_groups = aGroups
, regex_compOptions = CompOption { multiline = newline } } )
offsetIn prevIn inputIn = S.runST goNext where
b_tags :: (Tag,Tag)
!b_tags = b_tags_all
orbitTags :: [Tag]
!orbitTags = map fst . filter ((Orbit==).snd) . assocs $ aTags
test :: WhichTest -> Index -> Char -> text -> Bool
!test = mkTest newline
comp :: C s
comp = {-# SCC "matchHere.comp" #-} ditzyComp'3 aTags
goNext :: forall s. ST s [MatchArray]
goNext = {-# SCC "goNext" #-} do
(SScratch s1In s2In (winQ,blank,which)) <- newScratch b_index b_tags
spawnAt b_tags blank startState s1In offsetIn
let next s1 s2 did dt offset prev input = {-# SCC "goNext.next" #-}
case dt of
Testing' {dt_test=wt,dt_a=a,dt_b=b} ->
if test wt offset prev input
then next s1 s2 did a offset prev input
else next s1 s2 did b offset prev input
Simple' {dt_win=w,dt_trans=t,dt_other=o} -> do
unless (IMap.null w) $
processWinner s1 w offset
case uncons input of
Nothing -> finalizeWinner
Just (c,input') ->
case CMap.findWithDefault o c t of
Transition {trans_single=DFA {d_id=did',d_dt=dt'},trans_how=dtrans}
| ISet.null did' -> finalizeWinner
| otherwise -> findTrans s1 s2 did did' dt' dtrans offset c input'
-- compressOrbits gets all the current Tag-0 start information from
the NFA states ; then it loops through all the Orbit tags with
-- compressOrbit.
--
compressOrbit on such a Tag loops through all the NFS states '
-- m_orbit record, discarding ones that are Nothing and discarding
-- ones that are too new to care about (after the cutoff value).
--
compressOrbit then groups the Orbits records by the Tag-0 start
position and the basePos position . Entries in different groups
-- will never be comparable in the future so they can be processed
separately . Groups could probably be even more finely
-- distinguished, as a further optimization, but the justification will
-- be tricky.
--
-- Current Tag-0 values are at most offset and all newly spawned
-- groups will have Tag-0 of at least (succ offset) so the current
-- groups are closed to those spawned in the future. The basePos may
-- be as large as offset and may be overwritten later with values of
offset or larger ( and this will also involve deleting the Orbits
-- record). Thus there could be a future collision between a current
-- group with basePos==offset and an updated record that acquires
-- basePos==offset. By excluding groups with basePos before the
-- current offset the collision between existing and future records
-- is avoided.
--
-- An entry in a group can only collide with that group's
-- descendants. compressOrbit sends each group to the compressGroup
-- command.
--
compressGroup on a single record checks whether it 's Seq can be
-- cleared and if so it will clear it (and set ordinal to Nothing but
-- this this not particularly important).
--
-- compressGroup on many records sorts and groups the members and zips
-- the groups with their new ordinal value. The comparison is based
-- on the old ordinal value, then the inOrbit value, and then the (Seq
-- Position) data.
--
-- The old ordinals of the group will all be Nothing or all be Just,
-- but this condition is neither checked nor violations detected.
-- This comparison is justified because once records get different
-- ordinals assigned they will never change places.
--
The inOrbit Bool is only different if one of them has set the stop
-- position to at most (succ offset). They will obly be compared if
-- the other one leaves, an its stop position will be at least offset.
-- The previous sentence is justified by inspection of the "assemble"
function in the module : there is no ( PostUpdate
-- LeaveOrbitTask) so the largest possible value for the stop Tag is
-- (pred offset). Thus the record with inOrbit==False would beat (be
GT than ) the record with inOrbit==True .
--
The Seq comparison is safe because the largest existing Position
-- value is (pred offset) and the smallest future Position value is
-- offset. The previous sentence is justified by inspection of the
" assemble " function in the module : there is no ( PostUpdate
EnterOrbitTags ) so the largest possible value in the Seq is ( pred
-- offset).
--
The updated Orbits get the new ordinal value and an empty ( Seq
-- Position).
compressOrbits :: MScratch s -> IntSet -> Position -> ST s ()
compressOrbits s1 did offset = do
let getStart state = do start <- maybe (err "compressOrbit,1") (!! 0) =<< m_pos s1 !! state
return (state,start)
Require : cutoff < = offset , MAGIC TUNABLE CONSTANT 50
ss <- mapM getStart (ISet.toAscList did)
let compressOrbit tag = do
mos <- forM ss ( \ p@(state,_start) -> do
mo <- fmap (IMap.lookup tag) (m_orbit s1 !! state)
case mo of
Just orbits | basePos orbits < cutoff -> return (Just (p,orbits))
| otherwise -> return Nothing
_ -> return Nothing )
let compressGroup [((state,_),orbit)] | Seq.null (getOrbits orbit) = return ()
| otherwise =
set (m_orbit s1) state
. (IMap.insert tag $! (orbit { ordinal = Nothing, getOrbits = mempty}))
=<< m_orbit s1 !! state
compressGroup gs = do
let sortPos (_,b1) (_,b2) = compare (ordinal b1) (ordinal b2) `mappend`
compare (inOrbit b2) (inOrbit b1) `mappend`
comparePos (viewl (getOrbits b1)) (viewl (getOrbits b2))
groupPos (_,b1) (_,b2) = ordinal b1 == ordinal b2 && getOrbits b1 == getOrbits b2
gs' = zip [(1::Int)..] (groupBy groupPos . sortBy sortPos $ gs)
forM_ gs' $ \ (!n,eqs) -> do
forM_ eqs $ \ ((state,_),orbit) ->
set (m_orbit s1) state
. (IMap.insert tag $! (orbit { ordinal = Just n, getOrbits = mempty }))
=<< m_orbit s1 !! state
let sorter ((_,a1),b1) ((_,a2),b2) = compare a1 a2 `mappend` compare (basePos b1) (basePos b2)
grouper ((_,a1),b1) ((_,a2),b2) = a1==a2 && basePos b1 == basePos b2
orbitGroups = groupBy grouper . sortBy sorter . catMaybes $ mos
mapM_ compressGroup orbitGroups
mapM_ compressOrbit orbitTags
findTrans has to ( part 1 ) decide , for each destination , " which " of
zero or more source NFA states will be the chosen source . Then it
has to ( part 2 ) perform the transition or spawn . It keeps track of
-- the starting index while doing so, and compares the earliest start
with the stored winners . ( part 3 ) If some winners are ready to be
-- released then the future continuation of the search is placed in
-- "storeNext". If no winners are ready to be released then the
-- computation continues immediately.
findTrans
:: MScratch s
-> MScratch s
-> IntSet
-> SetIndex
-> DT
-> DTrans
-> Index
-> Char
-> text
-> ST s [MatchArray]
findTrans s1 s2 did did' dt' dtrans offset prev' input' = {-# SCC "goNext.findTrans" #-} do
findTrans part 0
MAGIC TUNABLE CONSTANT 100 ( and 100 - 1 ) . TODO : ( offset . & . 127 = = 127 ) instead ?
when (not (null orbitTags) && (offset `rem` 100 == 99)) (compressOrbits s1 did offset)
findTrans part 1
let findTransTo (destIndex,sources) | IMap.null sources =
set which destIndex noSource
| otherwise = do
let prep (sourceIndex,(_dopa,instructions)) = {-# SCC "goNext.findTrans.prep" #-} do
pos <- maybe (err $ "findTrans,1 : "++show (sourceIndex,destIndex,did')) return
=<< m_pos s1 !! sourceIndex
orbit <- m_orbit s1 !! sourceIndex
let orbit' = maybe orbit (\ f -> f offset orbit) (newOrbits instructions)
return ((sourceIndex,instructions),pos,orbit')
challenge x1@((_si1,ins1),_p1,_o1) x2@((_si2,ins2),_p2,_o2) = {-# SCC "goNext.findTrans.challenge" #-} do
check <- comp offset x1 (newPos ins1) x2 (newPos ins2)
if check==LT then return x2 else return x1
first_rest <- mapM prep (IMap.toList sources)
let first:rest = first_rest
set which destIndex =<< foldM challenge first rest
let dl = IMap.toList dtrans
mapM_ findTransTo dl
findTrans part 2
# SCC " goNext.findTrans.performTransTo " #
x@((sourceIndex,_instructions),_pos,_orbit') <- which !! destIndex
unless (sourceIndex == (-1)) $
(updateCopy x offset s2 destIndex)
mapM_ performTransTo dl
findTrans part 3
let offset' = succ offset in seq offset' $ next s2 s1 did' dt' offset' prev' input'
-- The "newWinnerThenProceed" can find both a new non-empty winner and
-- a new empty winner. A new non-empty winner can cause some of the
NFA states that comprise the DFA state to be eliminated , and if the
-- startState is eliminated then it must then be respawned. And
-- imperative flag setting and resetting style is used.
--
-- A non-empty winner from the startState might obscure a potential
-- empty winner (form the startState at the current offset). This
-- winEmpty possibility is also checked for. (unit test pattern ".*")
( further test " ( .+|.+ . ) * " on " aa\n " )
# INLINE processWinner #
processWinner :: MScratch s -> IntMap Instructions -> Position -> ST s ()
processWinner s1 w offset = {-# SCC "goNext.newWinnerThenProceed" #-} do
let prep x@(sourceIndex,instructions) = {-# SCC "goNext.newWinnerThenProceed.prep" #-} do
pos <- maybe (err "newWinnerThenProceed,1") return =<< m_pos s1 !! sourceIndex
startPos <- pos !! 0
orbit <- m_orbit s1 !! sourceIndex
let orbit' = maybe orbit (\ f -> f offset orbit) (newOrbits instructions)
return (startPos,(x,pos,orbit'))
challenge x1@((_si1,ins1),_p1,_o1) x2@((_si2,ins2),_p2,_o2) = {-# SCC "goNext.newWinnerThenProceed.challenge" #-} do
check <- comp offset x1 (newPos ins1) x2 (newPos ins2)
if check==LT then return x2 else return x1
prep'd <- mapM prep (IMap.toList w)
case map snd prep'd of
[] -> return ()
(first:rest) -> newWinner offset =<< foldM challenge first rest
newWinner :: Position -> ((a, Instructions), STUArray s Tag Position, c) -> ST s ()
newWinner preTag ((_sourceIndex,winInstructions),oldPos,_newOrbit) = {-# SCC "goNext.newWinner" #-} do
newerPos <- newA_ b_tags
copySTU oldPos newerPos
doActions preTag newerPos (newPos winInstructions)
putMQ (WScratch newerPos) winQ
finalizeWinner :: ST s [MatchArray]
finalizeWinner = do
mWinner <- readSTRef (mq_mWin winQ)
case mWinner of
Nothing -> return []
Just winner -> resetMQ winQ >> mapM (tagsToGroupsST aGroups) [winner]
-- goNext then ends with the next statement
next s1In s2In didIn dtIn offsetIn prevIn inputIn
# INLINE doActions #
doActions :: Position -> STUArray s Tag Position -> [(Tag, Action)] -> ST s ()
doActions preTag pos ins = mapM_ doAction ins where
postTag = succ preTag
doAction (tag,SetPre) = set pos tag preTag
doAction (tag,SetPost) = set pos tag postTag
doAction (tag,SetVal v) = set pos tag v
----
# INLINE mkTest #
mkTest :: Uncons text => Bool -> WhichTest -> Index -> Char -> text -> Bool
mkTest isMultiline = if isMultiline then test_multiline else test_singleline
----
{- MUTABLE WINNER QUEUE -}
newtype MQ s = MQ { mq_mWin :: STRef s (Maybe (WScratch s)) }
newMQ :: S.ST s (MQ s)
newMQ = do
mWin <- newSTRef Nothing
return (MQ mWin)
resetMQ :: MQ s -> S.ST s ()
resetMQ (MQ {mq_mWin=mWin}) = do
writeSTRef mWin Nothing
putMQ :: WScratch s -> MQ s -> S.ST s ()
putMQ ws (MQ {mq_mWin=mWin}) = do
writeSTRef mWin (Just ws)
{- MUTABLE SCRATCH DATA STRUCTURES -}
data SScratch s = SScratch { _s_1 :: !(MScratch s)
, _s_2 :: !(MScratch s)
, _s_rest :: !( MQ s
, BlankScratch s
, STArray s Index ((Index,Instructions),STUArray s Tag Position,OrbitLog)
)
}
data MScratch s = MScratch { m_pos :: !(STArray s Index (Maybe (STUArray s Tag Position)))
, m_orbit :: !(STArray s Index OrbitLog)
}
newtype BlankScratch s = BlankScratch { _blank_pos :: (STUArray s Tag Position)
}
newtype WScratch s = WScratch { w_pos :: (STUArray s Tag Position)
}
{- DEBUGGING HELPERS -}
indent : : String - > String
indent xs = ' ' : ' ' : xs
showMS : : MScratch s - > Index - > ST s String
showMS s i = do
ma < - m_pos s ! ! i
mc < - m_orbit s ! ! i
a < - case ma of
Nothing - > return " No pos "
Just pos - > fmap show ( getAssocs pos )
let c = show return $ unlines [ " MScratch , index = " + + show i
, indent a
, indent c ]
showWS : : ST s String
showWS ( ) = do
a < - getAssocs pos
return $ unlines [ " WScratch "
, indent ( show a ) ]
indent :: String -> String
indent xs = ' ':' ':xs
showMS :: MScratch s -> Index -> ST s String
showMS s i = do
ma <- m_pos s !! i
mc <- m_orbit s !! i
a <- case ma of
Nothing -> return "No pos"
Just pos -> fmap show (getAssocs pos)
let c = show mc
return $ unlines [ "MScratch, index = "++show i
, indent a
, indent c]
showWS :: WScratch s -> ST s String
showWS (WScratch pos) = do
a <- getAssocs pos
return $ unlines [ "WScratch"
, indent (show a)]
-}
{- CREATING INITIAL MUTABLE SCRATCH DATA STRUCTURES -}
# INLINE newA #
newA :: (MArray (STUArray s) e (ST s)) => (Tag,Tag) -> e -> S.ST s (STUArray s Tag e)
newA b_tags initial = newArray b_tags initial
{-# INLINE newA_ #-}
newA_ :: (MArray (STUArray s) e (ST s)) => (Tag,Tag) -> S.ST s (STUArray s Tag e)
newA_ b_tags = newArray_ b_tags
newScratch :: (Index,Index) -> (Tag,Tag) -> S.ST s (SScratch s)
newScratch b_index b_tags = do
s1 <- newMScratch b_index
s2 <- newMScratch b_index
winQ <- newMQ
blank <- fmap BlankScratch (newA b_tags (-1))
which <- (newArray b_index ((-1,err "newScratch which 1"),err "newScratch which 2",err "newScratch which 3"))
return (SScratch s1 s2 (winQ,blank,which))
newMScratch :: (Index,Index) -> S.ST s (MScratch s)
newMScratch b_index = do
pos's <- newArray b_index Nothing
orbit's <- newArray b_index Mon.mempty
return (MScratch pos's orbit's)
{- COMPOSE A FUNCTION CLOSURE TO COMPARE TAG VALUES -}
newtype F s = F ([F s] -> C s)
type C s = Position
-> ((Int, Instructions), STUArray s Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s Ordering
# INLINE orderOf #
orderOf :: Action -> Action -> Ordering
orderOf post1 post2 =
case (post1,post2) of
(SetPre,SetPre) -> EQ
(SetPost,SetPost) -> EQ
(SetPre,SetPost) -> LT
(SetPost,SetPre) -> GT
(SetVal v1,SetVal v2) -> compare v1 v2
_ -> err $ "bestTrans.compareWith.choose sees incomparable "++show (post1,post2)
ditzyComp'3 :: forall s. Array Tag OP -> C s
ditzyComp'3 aTagOP = comp0 where
(F comp1:compsRest) = allcomps 1
comp0 :: C s
comp0 preTag x1@(_state1,pos1,_orbit1') np1 x2@(_state2,pos2,_orbit2') np2 = do
c <- liftM2 compare (pos2!!0) (pos1!!0) -- reversed since Minimize
case c of
EQ -> comp1 compsRest preTag x1 np1 x2 np2
answer -> return answer
allcomps :: Tag -> [F s]
allcomps tag | tag > top = [F (\ _ _ _ _ _ _ -> return EQ)]
| otherwise =
case aTagOP ! tag of
Orbit -> F (challenge_Orb tag) : allcomps (succ tag)
Maximize -> F (challenge_Max tag) : allcomps (succ tag)
Ignore -> F (challenge_Ignore tag) : allcomps (succ tag)
Minimize -> err "allcomps Minimize"
where top = snd (bounds aTagOP)
challenge_Ignore
:: Int
-> [F s1]
-> Position
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s1 Ordering
challenge_Ignore !tag (F next:comps) preTag x1 np1 x2 np2 =
case np1 of
((t1,_):rest1) | t1==tag ->
case np2 of
((t2,_):rest2) | t2==tag -> next comps preTag x1 rest1 x2 rest2
_ -> next comps preTag x1 rest1 x2 np2
_ -> do
case np2 of
((t2,_):rest2) | t2==tag -> next comps preTag x1 np1 x2 rest2
_ -> next comps preTag x1 np1 x2 np2
challenge_Ignore _ [] _ _ _ _ _ = err "impossible 2347867"
challenge_Max
:: Int
-> [F s1]
-> Position
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s1 Ordering
challenge_Max !tag (F next:comps) preTag x1@(_state1,pos1,_orbit1') np1 x2@(_state2,pos2,_orbit2') np2 =
case np1 of
((t1,b1):rest1) | t1==tag ->
case np2 of
((t2,b2):rest2) | t2==tag ->
if b1==b2 then next comps preTag x1 rest1 x2 rest2
else return (orderOf b1 b2)
_ -> do
p2 <- pos2 !! tag
let p1 = case b1 of SetPre -> preTag
SetPost -> succ preTag
SetVal v -> v
if p1==p2 then next comps preTag x1 rest1 x2 np2
else return (compare p1 p2)
_ -> do
p1 <- pos1 !! tag
case np2 of
((t2,b2):rest2) | t2==tag -> do
let p2 = case b2 of SetPre -> preTag
SetPost -> succ preTag
SetVal v -> v
if p1==p2 then next comps preTag x1 np1 x2 rest2
else return (compare p1 p2)
_ -> do
p2 <- pos2 !! tag
if p1==p2 then next comps preTag x1 np1 x2 np2
else return (compare p1 p2)
challenge_Max _ [] _ _ _ _ _ = err "impossible 9384324"
challenge_Orb
:: Int
-> [F s1]
-> Position
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s1 Ordering
challenge_Orb !tag (F next:comps) preTag x1@(_state1,_pos1,orbit1') np1 x2@(_state2,_pos2,orbit2') np2 =
let s1 = IMap.lookup tag orbit1'
s2 = IMap.lookup tag orbit2'
in case (s1,s2) of
(Nothing,Nothing) -> next comps preTag x1 np1 x2 np2
(Just o1,Just o2) | inOrbit o1 == inOrbit o2 ->
case compare (ordinal o1) (ordinal o2) `mappend`
comparePos (viewl (getOrbits o1)) (viewl (getOrbits o2)) of
EQ -> next comps preTag x1 np1 x2 np2
answer -> return answer
_ -> err $ unlines [ "challenge_Orb is too stupid to handle mismatched orbit data :"
, show(tag,preTag,np1,np2)
, show s1
, show s2
]
challenge_Orb _ [] _ _ _ _ _ = err "impossible 0298347"
comparePos :: (ViewL Position) -> (ViewL Position) -> Ordering
comparePos EmptyL EmptyL = EQ
comparePos EmptyL _ = GT
comparePos _ EmptyL = LT
comparePos (p1 :< ps1) (p2 :< ps2) =
compare p1 p2 `mappend` comparePos (viewl ps1) (viewl ps2)
{- CONVERT WINNERS TO MATCHARRAY -}
tagsToGroupsST :: forall s. Array GroupIndex [GroupInfo] -> WScratch s -> S.ST s MatchArray
tagsToGroupsST aGroups (WScratch {w_pos=pos})= do
let b_max = snd (bounds (aGroups))
ma <- newArray (0,b_max) (-1,0) :: ST s (STArray s Int (MatchOffset,MatchLength))
startPos0 <- pos !! 0
stopPos0 <- pos !! 1
set ma 0 (startPos0,stopPos0-startPos0)
let act _this_index [] = return ()
act this_index ((GroupInfo _ parent start stop flagtag):gs) = do
flagVal <- pos !! flagtag
if (-1) == flagVal then act this_index gs
else do
startPos <- pos !! start
stopPos <- pos !! stop
(startParent,lengthParent) <- ma !! parent
let ok = (0 <= startParent &&
0 <= lengthParent &&
startParent <= startPos &&
stopPos <= startPos + lengthParent)
if not ok then act this_index gs
else set ma this_index (startPos,stopPos-startPos)
forM_ (range (1,b_max)) $ (\i -> act i (aGroups!i))
unsafeFreeze ma
{- MUTABLE TAGGED TRANSITION (returning Tag-0 value) -}
# INLINE spawnAt #
-- Reset the entry at "Index", or allocate such an entry.
-- set tag 0 to the "Position"
spawnAt :: (Tag,Tag) -> BlankScratch s -> Index -> MScratch s -> Position -> S.ST s ()
spawnAt b_tags (BlankScratch blankPos) i s1 thisPos = do
oldPos <- m_pos s1 !! i
pos <- case oldPos of
Nothing -> do
pos' <- newA_ b_tags
set (m_pos s1) i (Just pos')
return pos'
Just pos -> return pos
copySTU blankPos pos
set (m_orbit s1) i $! mempty
set pos 0 thisPos
# INLINE updateCopy #
updateCopy :: ((Index, Instructions), STUArray s Tag Position, OrbitLog)
-> Index
-> MScratch s
-> Int
-> ST s ()
updateCopy ((_i1,instructions),oldPos,newOrbit) preTag s2 i2 = do
b_tags <- getBounds oldPos
newerPos <- maybe (do
a <- newA_ b_tags
set (m_pos s2) i2 (Just a)
return a) return =<< m_pos s2 !! i2
copySTU oldPos newerPos
doActions preTag newerPos (newPos instructions)
set (m_orbit s2) i2 $! newOrbit
{- USING memcpy TO COPY STUARRAY DATA -}
-- #ifdef __GLASGOW_HASKELL__
foreign import ccall unsafe "memcpy"
memcpy :: MutableByteArray# RealWorld -> MutableByteArray# RealWorld -> Int# -> IO ()
Prelude Data . Array . Base > : i STUArray
data STUArray s i e
= STUArray ! i ! i ! Int ( GHC.Prim . MutableByteArray # s )
-- Defined in Data . Array . Base
Prelude Data.Array.Base> :i STUArray
data STUArray s i e
= STUArray !i !i !Int (GHC.Prim.MutableByteArray# s)
-- Defined in Data.Array.Base
-}
This has been updated for ghc 6.8.3 and still works with ghc 6.10.1
# INLINE copySTU #
( STUArray s i e )
copySTU _source@(STUArray _ _ _ msource) _destination@(STUArray _ _ _ mdest) =
-- do b1 <- getBounds s1
-- b2 <- getBounds s2
when ( b1/=b2 ) ( error ( " \n\nWTF copySTU : " + + show ( ) ) )
ST $ \s1# ->
case sizeofMutableByteArray# msource of { n# ->
case unsafeCoerce# memcpy mdest msource n# s1# of { (# s2#, () #) ->
(# s2#, () #) }}
-- # else / * ! _ _ GLASGOW_HASKELL _ _ * /
copySTU : : ( MArray ( STUArray s ) e ( S.ST s))= > STUArray s Tag e - > STUArray s Tag e - > S.ST s ( STUArray s i e )
copySTU source destination = do
b@(start , stop ) < - getBounds source
b ' < - getBounds destination
-- traceCopy ( " > copySTArray " + + show b ) $ do
when ( b/=b ' ) ( fail $ " Text . Regex . TDFA.RunMutState copySTUArray bounds mismatch"++show ( b , b ' ) )
forM _ ( range b ) $ \index - >
set destination index = < < source ! ! index
return destination
-- # endif / * ! _ _ GLASGOW_HASKELL _ _ * /
-- #else /* !__GLASGOW_HASKELL__ */
copySTU :: (MArray (STUArray s) e (S.ST s))=> STUArray s Tag e -> STUArray s Tag e -> S.ST s (STUArray s i e)
copySTU source destination = do
b@(start,stop) <- getBounds source
b' <- getBounds destination
-- traceCopy ("> copySTArray "++show b) $ do
when (b/=b') (fail $ "Text.Regex.TDFA.RunMutState copySTUArray bounds mismatch"++show (b,b'))
forM_ (range b) $ \index ->
set destination index =<< source !! index
return destination
-- #endif /* !__GLASGOW_HASKELL__ */
-}
| null | https://raw.githubusercontent.com/haskell-hvr/regex-tdfa/9a84354663cd80ebdf45d4cb2a57346ef1f636c1/lib/Text/Regex/TDFA/NewDFA/Engine_FA.hs | haskell | | This is the code for the main engine. This captures the posix
subexpressions. There is also a non-capturing engine, and a
testing engine.
specialized to produce the needed variants.
import Debug.Trace
trace :: String -> a -> a
trace _ a = a
# INLINE (!!) #
# INLINE set #
# SCC "matchHere.comp" #
# SCC "goNext" #
# SCC "goNext.next" #
compressOrbits gets all the current Tag-0 start information from
compressOrbit.
m_orbit record, discarding ones that are Nothing and discarding
ones that are too new to care about (after the cutoff value).
will never be comparable in the future so they can be processed
distinguished, as a further optimization, but the justification will
be tricky.
Current Tag-0 values are at most offset and all newly spawned
groups will have Tag-0 of at least (succ offset) so the current
groups are closed to those spawned in the future. The basePos may
be as large as offset and may be overwritten later with values of
record). Thus there could be a future collision between a current
group with basePos==offset and an updated record that acquires
basePos==offset. By excluding groups with basePos before the
current offset the collision between existing and future records
is avoided.
An entry in a group can only collide with that group's
descendants. compressOrbit sends each group to the compressGroup
command.
cleared and if so it will clear it (and set ordinal to Nothing but
this this not particularly important).
compressGroup on many records sorts and groups the members and zips
the groups with their new ordinal value. The comparison is based
on the old ordinal value, then the inOrbit value, and then the (Seq
Position) data.
The old ordinals of the group will all be Nothing or all be Just,
but this condition is neither checked nor violations detected.
This comparison is justified because once records get different
ordinals assigned they will never change places.
position to at most (succ offset). They will obly be compared if
the other one leaves, an its stop position will be at least offset.
The previous sentence is justified by inspection of the "assemble"
LeaveOrbitTask) so the largest possible value for the stop Tag is
(pred offset). Thus the record with inOrbit==False would beat (be
value is (pred offset) and the smallest future Position value is
offset. The previous sentence is justified by inspection of the
offset).
Position).
the starting index while doing so, and compares the earliest start
released then the future continuation of the search is placed in
"storeNext". If no winners are ready to be released then the
computation continues immediately.
# SCC "goNext.findTrans" #
# SCC "goNext.findTrans.prep" #
# SCC "goNext.findTrans.challenge" #
The "newWinnerThenProceed" can find both a new non-empty winner and
a new empty winner. A new non-empty winner can cause some of the
startState is eliminated then it must then be respawned. And
imperative flag setting and resetting style is used.
A non-empty winner from the startState might obscure a potential
empty winner (form the startState at the current offset). This
winEmpty possibility is also checked for. (unit test pattern ".*")
# SCC "goNext.newWinnerThenProceed" #
# SCC "goNext.newWinnerThenProceed.prep" #
# SCC "goNext.newWinnerThenProceed.challenge" #
# SCC "goNext.newWinner" #
goNext then ends with the next statement
--
--
MUTABLE WINNER QUEUE
MUTABLE SCRATCH DATA STRUCTURES
DEBUGGING HELPERS
CREATING INITIAL MUTABLE SCRATCH DATA STRUCTURES
# INLINE newA_ #
COMPOSE A FUNCTION CLOSURE TO COMPARE TAG VALUES
reversed since Minimize
CONVERT WINNERS TO MATCHARRAY
MUTABLE TAGGED TRANSITION (returning Tag-0 value)
Reset the entry at "Index", or allocate such an entry.
set tag 0 to the "Position"
USING memcpy TO COPY STUARRAY DATA
#ifdef __GLASGOW_HASKELL__
Defined in Data . Array . Base
Defined in Data.Array.Base
do b1 <- getBounds s1
b2 <- getBounds s2
# else / * ! _ _ GLASGOW_HASKELL _ _ * /
traceCopy ( " > copySTArray " + + show b ) $ do
# endif / * ! _ _ GLASGOW_HASKELL _ _ * /
#else /* !__GLASGOW_HASKELL__ */
traceCopy ("> copySTArray "++show b) $ do
#endif /* !__GLASGOW_HASKELL__ */ | # LANGUAGE CPP #
#if __GLASGOW_HASKELL__ >= 902
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
#endif
It is polymorphic over the internal Uncons type class , and
module Text.Regex.TDFA.NewDFA.Engine_FA(execMatch) where
import Data.Array.Base(unsafeRead,unsafeWrite,STUArray(..))
import GHC.Arr(STArray(..))
import GHC.ST(ST(..))
import GHC.Exts(MutableByteArray#,RealWorld,Int#,sizeofMutableByteArray#,unsafeCoerce#)
import Prelude hiding ((!!))
import Control.Monad(when,unless,forM,forM_,liftM2,foldM)
import Data.Array.MArray(MArray(..))
import Data.Array.Unsafe(unsafeFreeze)
import Data.Array.IArray(Array,bounds,assocs,Ix(range))
import qualified Data.IntMap.CharMap2 as CMap(findWithDefault)
import Data.IntMap(IntMap)
import qualified Data.IntMap as IMap(null,toList,lookup,insert)
import Data.Maybe(catMaybes)
import Data.Monoid as Mon(Monoid(..))
import Data.IntSet(IntSet)
import qualified Data.IntSet as ISet(toAscList,null)
import Data.Array.IArray((!))
import Data.List(sortBy,groupBy)
import Data.STRef(STRef,newSTRef,readSTRef,writeSTRef)
import qualified Control.Monad.ST.Strict as S(ST,runST)
import Data.Sequence(Seq,ViewL(..),viewl)
import qualified Data.Sequence as Seq(null)
import qualified Data.ByteString.Char8 as SBS(ByteString)
import qualified Data.ByteString.Lazy.Char8 as LBS(ByteString)
import Text.Regex.Base(MatchArray,MatchOffset,MatchLength)
import Text.Regex.TDFA.Common hiding (indent)
import Text.Regex.TDFA.NewDFA.Uncons(Uncons(uncons))
import Text.Regex.TDFA.NewDFA.MakeTest(test_singleline,test_multiline)
err :: String -> a
err s = common_error "Text.Regex.TDFA.NewDFA.Engine_FA" s
(!!) :: (MArray a e (S.ST s),Ix i) => a i e -> Int -> S.ST s e
(!!) = unsafeRead
set :: (MArray a e (S.ST s),Ix i) => a i e -> Int -> e -> S.ST s ()
set = unsafeWrite
noSource :: ((Index, Instructions),STUArray s Tag Position,OrbitLog)
noSource = ((-1,err "noSource"),err "noSource",err "noSource")
# SPECIALIZE execMatch : : Regex - > Position - > Char - > ( [ ] ) - > [ MatchArray ] #
# SPECIALIZE execMatch : : Regex - > Position - > Char - > ( Seq Char ) - > [ MatchArray ] #
# SPECIALIZE execMatch : : Regex - > Position - > Char - > SBS.ByteString - > [ MatchArray ] #
# SPECIALIZE execMatch : : Regex - > Position - > Char - > LBS.ByteString - > [ MatchArray ] #
execMatch :: forall text. Uncons text => Regex -> Position -> Char -> text -> [MatchArray]
execMatch (Regex { regex_dfa = DFA {d_id=didIn,d_dt=dtIn}
, regex_init = startState
, regex_b_index = b_index
, regex_b_tags = b_tags_all
, regex_tags = aTags
, regex_groups = aGroups
, regex_compOptions = CompOption { multiline = newline } } )
offsetIn prevIn inputIn = S.runST goNext where
b_tags :: (Tag,Tag)
!b_tags = b_tags_all
orbitTags :: [Tag]
!orbitTags = map fst . filter ((Orbit==).snd) . assocs $ aTags
test :: WhichTest -> Index -> Char -> text -> Bool
!test = mkTest newline
comp :: C s
goNext :: forall s. ST s [MatchArray]
(SScratch s1In s2In (winQ,blank,which)) <- newScratch b_index b_tags
spawnAt b_tags blank startState s1In offsetIn
case dt of
Testing' {dt_test=wt,dt_a=a,dt_b=b} ->
if test wt offset prev input
then next s1 s2 did a offset prev input
else next s1 s2 did b offset prev input
Simple' {dt_win=w,dt_trans=t,dt_other=o} -> do
unless (IMap.null w) $
processWinner s1 w offset
case uncons input of
Nothing -> finalizeWinner
Just (c,input') ->
case CMap.findWithDefault o c t of
Transition {trans_single=DFA {d_id=did',d_dt=dt'},trans_how=dtrans}
| ISet.null did' -> finalizeWinner
| otherwise -> findTrans s1 s2 did did' dt' dtrans offset c input'
the NFA states ; then it loops through all the Orbit tags with
compressOrbit on such a Tag loops through all the NFS states '
compressOrbit then groups the Orbits records by the Tag-0 start
position and the basePos position . Entries in different groups
separately . Groups could probably be even more finely
offset or larger ( and this will also involve deleting the Orbits
compressGroup on a single record checks whether it 's Seq can be
The inOrbit Bool is only different if one of them has set the stop
function in the module : there is no ( PostUpdate
GT than ) the record with inOrbit==True .
The Seq comparison is safe because the largest existing Position
" assemble " function in the module : there is no ( PostUpdate
EnterOrbitTags ) so the largest possible value in the Seq is ( pred
The updated Orbits get the new ordinal value and an empty ( Seq
compressOrbits :: MScratch s -> IntSet -> Position -> ST s ()
compressOrbits s1 did offset = do
let getStart state = do start <- maybe (err "compressOrbit,1") (!! 0) =<< m_pos s1 !! state
return (state,start)
Require : cutoff < = offset , MAGIC TUNABLE CONSTANT 50
ss <- mapM getStart (ISet.toAscList did)
let compressOrbit tag = do
mos <- forM ss ( \ p@(state,_start) -> do
mo <- fmap (IMap.lookup tag) (m_orbit s1 !! state)
case mo of
Just orbits | basePos orbits < cutoff -> return (Just (p,orbits))
| otherwise -> return Nothing
_ -> return Nothing )
let compressGroup [((state,_),orbit)] | Seq.null (getOrbits orbit) = return ()
| otherwise =
set (m_orbit s1) state
. (IMap.insert tag $! (orbit { ordinal = Nothing, getOrbits = mempty}))
=<< m_orbit s1 !! state
compressGroup gs = do
let sortPos (_,b1) (_,b2) = compare (ordinal b1) (ordinal b2) `mappend`
compare (inOrbit b2) (inOrbit b1) `mappend`
comparePos (viewl (getOrbits b1)) (viewl (getOrbits b2))
groupPos (_,b1) (_,b2) = ordinal b1 == ordinal b2 && getOrbits b1 == getOrbits b2
gs' = zip [(1::Int)..] (groupBy groupPos . sortBy sortPos $ gs)
forM_ gs' $ \ (!n,eqs) -> do
forM_ eqs $ \ ((state,_),orbit) ->
set (m_orbit s1) state
. (IMap.insert tag $! (orbit { ordinal = Just n, getOrbits = mempty }))
=<< m_orbit s1 !! state
let sorter ((_,a1),b1) ((_,a2),b2) = compare a1 a2 `mappend` compare (basePos b1) (basePos b2)
grouper ((_,a1),b1) ((_,a2),b2) = a1==a2 && basePos b1 == basePos b2
orbitGroups = groupBy grouper . sortBy sorter . catMaybes $ mos
mapM_ compressGroup orbitGroups
mapM_ compressOrbit orbitTags
findTrans has to ( part 1 ) decide , for each destination , " which " of
zero or more source NFA states will be the chosen source . Then it
has to ( part 2 ) perform the transition or spawn . It keeps track of
with the stored winners . ( part 3 ) If some winners are ready to be
findTrans
:: MScratch s
-> MScratch s
-> IntSet
-> SetIndex
-> DT
-> DTrans
-> Index
-> Char
-> text
-> ST s [MatchArray]
findTrans part 0
MAGIC TUNABLE CONSTANT 100 ( and 100 - 1 ) . TODO : ( offset . & . 127 = = 127 ) instead ?
when (not (null orbitTags) && (offset `rem` 100 == 99)) (compressOrbits s1 did offset)
findTrans part 1
let findTransTo (destIndex,sources) | IMap.null sources =
set which destIndex noSource
| otherwise = do
pos <- maybe (err $ "findTrans,1 : "++show (sourceIndex,destIndex,did')) return
=<< m_pos s1 !! sourceIndex
orbit <- m_orbit s1 !! sourceIndex
let orbit' = maybe orbit (\ f -> f offset orbit) (newOrbits instructions)
return ((sourceIndex,instructions),pos,orbit')
check <- comp offset x1 (newPos ins1) x2 (newPos ins2)
if check==LT then return x2 else return x1
first_rest <- mapM prep (IMap.toList sources)
let first:rest = first_rest
set which destIndex =<< foldM challenge first rest
let dl = IMap.toList dtrans
mapM_ findTransTo dl
findTrans part 2
# SCC " goNext.findTrans.performTransTo " #
x@((sourceIndex,_instructions),_pos,_orbit') <- which !! destIndex
unless (sourceIndex == (-1)) $
(updateCopy x offset s2 destIndex)
mapM_ performTransTo dl
findTrans part 3
let offset' = succ offset in seq offset' $ next s2 s1 did' dt' offset' prev' input'
NFA states that comprise the DFA state to be eliminated , and if the
( further test " ( .+|.+ . ) * " on " aa\n " )
# INLINE processWinner #
processWinner :: MScratch s -> IntMap Instructions -> Position -> ST s ()
pos <- maybe (err "newWinnerThenProceed,1") return =<< m_pos s1 !! sourceIndex
startPos <- pos !! 0
orbit <- m_orbit s1 !! sourceIndex
let orbit' = maybe orbit (\ f -> f offset orbit) (newOrbits instructions)
return (startPos,(x,pos,orbit'))
check <- comp offset x1 (newPos ins1) x2 (newPos ins2)
if check==LT then return x2 else return x1
prep'd <- mapM prep (IMap.toList w)
case map snd prep'd of
[] -> return ()
(first:rest) -> newWinner offset =<< foldM challenge first rest
newWinner :: Position -> ((a, Instructions), STUArray s Tag Position, c) -> ST s ()
newerPos <- newA_ b_tags
copySTU oldPos newerPos
doActions preTag newerPos (newPos winInstructions)
putMQ (WScratch newerPos) winQ
finalizeWinner :: ST s [MatchArray]
finalizeWinner = do
mWinner <- readSTRef (mq_mWin winQ)
case mWinner of
Nothing -> return []
Just winner -> resetMQ winQ >> mapM (tagsToGroupsST aGroups) [winner]
next s1In s2In didIn dtIn offsetIn prevIn inputIn
# INLINE doActions #
doActions :: Position -> STUArray s Tag Position -> [(Tag, Action)] -> ST s ()
doActions preTag pos ins = mapM_ doAction ins where
postTag = succ preTag
doAction (tag,SetPre) = set pos tag preTag
doAction (tag,SetPost) = set pos tag postTag
doAction (tag,SetVal v) = set pos tag v
# INLINE mkTest #
mkTest :: Uncons text => Bool -> WhichTest -> Index -> Char -> text -> Bool
mkTest isMultiline = if isMultiline then test_multiline else test_singleline
newtype MQ s = MQ { mq_mWin :: STRef s (Maybe (WScratch s)) }
newMQ :: S.ST s (MQ s)
newMQ = do
mWin <- newSTRef Nothing
return (MQ mWin)
resetMQ :: MQ s -> S.ST s ()
resetMQ (MQ {mq_mWin=mWin}) = do
writeSTRef mWin Nothing
putMQ :: WScratch s -> MQ s -> S.ST s ()
putMQ ws (MQ {mq_mWin=mWin}) = do
writeSTRef mWin (Just ws)
data SScratch s = SScratch { _s_1 :: !(MScratch s)
, _s_2 :: !(MScratch s)
, _s_rest :: !( MQ s
, BlankScratch s
, STArray s Index ((Index,Instructions),STUArray s Tag Position,OrbitLog)
)
}
data MScratch s = MScratch { m_pos :: !(STArray s Index (Maybe (STUArray s Tag Position)))
, m_orbit :: !(STArray s Index OrbitLog)
}
newtype BlankScratch s = BlankScratch { _blank_pos :: (STUArray s Tag Position)
}
newtype WScratch s = WScratch { w_pos :: (STUArray s Tag Position)
}
indent : : String - > String
indent xs = ' ' : ' ' : xs
showMS : : MScratch s - > Index - > ST s String
showMS s i = do
ma < - m_pos s ! ! i
mc < - m_orbit s ! ! i
a < - case ma of
Nothing - > return " No pos "
Just pos - > fmap show ( getAssocs pos )
let c = show return $ unlines [ " MScratch , index = " + + show i
, indent a
, indent c ]
showWS : : ST s String
showWS ( ) = do
a < - getAssocs pos
return $ unlines [ " WScratch "
, indent ( show a ) ]
indent :: String -> String
indent xs = ' ':' ':xs
showMS :: MScratch s -> Index -> ST s String
showMS s i = do
ma <- m_pos s !! i
mc <- m_orbit s !! i
a <- case ma of
Nothing -> return "No pos"
Just pos -> fmap show (getAssocs pos)
let c = show mc
return $ unlines [ "MScratch, index = "++show i
, indent a
, indent c]
showWS :: WScratch s -> ST s String
showWS (WScratch pos) = do
a <- getAssocs pos
return $ unlines [ "WScratch"
, indent (show a)]
-}
# INLINE newA #
newA :: (MArray (STUArray s) e (ST s)) => (Tag,Tag) -> e -> S.ST s (STUArray s Tag e)
newA b_tags initial = newArray b_tags initial
newA_ :: (MArray (STUArray s) e (ST s)) => (Tag,Tag) -> S.ST s (STUArray s Tag e)
newA_ b_tags = newArray_ b_tags
newScratch :: (Index,Index) -> (Tag,Tag) -> S.ST s (SScratch s)
newScratch b_index b_tags = do
s1 <- newMScratch b_index
s2 <- newMScratch b_index
winQ <- newMQ
blank <- fmap BlankScratch (newA b_tags (-1))
which <- (newArray b_index ((-1,err "newScratch which 1"),err "newScratch which 2",err "newScratch which 3"))
return (SScratch s1 s2 (winQ,blank,which))
newMScratch :: (Index,Index) -> S.ST s (MScratch s)
newMScratch b_index = do
pos's <- newArray b_index Nothing
orbit's <- newArray b_index Mon.mempty
return (MScratch pos's orbit's)
newtype F s = F ([F s] -> C s)
type C s = Position
-> ((Int, Instructions), STUArray s Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s Ordering
# INLINE orderOf #
orderOf :: Action -> Action -> Ordering
orderOf post1 post2 =
case (post1,post2) of
(SetPre,SetPre) -> EQ
(SetPost,SetPost) -> EQ
(SetPre,SetPost) -> LT
(SetPost,SetPre) -> GT
(SetVal v1,SetVal v2) -> compare v1 v2
_ -> err $ "bestTrans.compareWith.choose sees incomparable "++show (post1,post2)
ditzyComp'3 :: forall s. Array Tag OP -> C s
ditzyComp'3 aTagOP = comp0 where
(F comp1:compsRest) = allcomps 1
comp0 :: C s
comp0 preTag x1@(_state1,pos1,_orbit1') np1 x2@(_state2,pos2,_orbit2') np2 = do
case c of
EQ -> comp1 compsRest preTag x1 np1 x2 np2
answer -> return answer
allcomps :: Tag -> [F s]
allcomps tag | tag > top = [F (\ _ _ _ _ _ _ -> return EQ)]
| otherwise =
case aTagOP ! tag of
Orbit -> F (challenge_Orb tag) : allcomps (succ tag)
Maximize -> F (challenge_Max tag) : allcomps (succ tag)
Ignore -> F (challenge_Ignore tag) : allcomps (succ tag)
Minimize -> err "allcomps Minimize"
where top = snd (bounds aTagOP)
challenge_Ignore
:: Int
-> [F s1]
-> Position
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s1 Ordering
challenge_Ignore !tag (F next:comps) preTag x1 np1 x2 np2 =
case np1 of
((t1,_):rest1) | t1==tag ->
case np2 of
((t2,_):rest2) | t2==tag -> next comps preTag x1 rest1 x2 rest2
_ -> next comps preTag x1 rest1 x2 np2
_ -> do
case np2 of
((t2,_):rest2) | t2==tag -> next comps preTag x1 np1 x2 rest2
_ -> next comps preTag x1 np1 x2 np2
challenge_Ignore _ [] _ _ _ _ _ = err "impossible 2347867"
challenge_Max
:: Int
-> [F s1]
-> Position
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s1 Ordering
challenge_Max !tag (F next:comps) preTag x1@(_state1,pos1,_orbit1') np1 x2@(_state2,pos2,_orbit2') np2 =
case np1 of
((t1,b1):rest1) | t1==tag ->
case np2 of
((t2,b2):rest2) | t2==tag ->
if b1==b2 then next comps preTag x1 rest1 x2 rest2
else return (orderOf b1 b2)
_ -> do
p2 <- pos2 !! tag
let p1 = case b1 of SetPre -> preTag
SetPost -> succ preTag
SetVal v -> v
if p1==p2 then next comps preTag x1 rest1 x2 np2
else return (compare p1 p2)
_ -> do
p1 <- pos1 !! tag
case np2 of
((t2,b2):rest2) | t2==tag -> do
let p2 = case b2 of SetPre -> preTag
SetPost -> succ preTag
SetVal v -> v
if p1==p2 then next comps preTag x1 np1 x2 rest2
else return (compare p1 p2)
_ -> do
p2 <- pos2 !! tag
if p1==p2 then next comps preTag x1 np1 x2 np2
else return (compare p1 p2)
challenge_Max _ [] _ _ _ _ _ = err "impossible 9384324"
challenge_Orb
:: Int
-> [F s1]
-> Position
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ((Int, Instructions), STUArray s1 Tag Position, IntMap Orbits)
-> [(Int, Action)]
-> ST s1 Ordering
challenge_Orb !tag (F next:comps) preTag x1@(_state1,_pos1,orbit1') np1 x2@(_state2,_pos2,orbit2') np2 =
let s1 = IMap.lookup tag orbit1'
s2 = IMap.lookup tag orbit2'
in case (s1,s2) of
(Nothing,Nothing) -> next comps preTag x1 np1 x2 np2
(Just o1,Just o2) | inOrbit o1 == inOrbit o2 ->
case compare (ordinal o1) (ordinal o2) `mappend`
comparePos (viewl (getOrbits o1)) (viewl (getOrbits o2)) of
EQ -> next comps preTag x1 np1 x2 np2
answer -> return answer
_ -> err $ unlines [ "challenge_Orb is too stupid to handle mismatched orbit data :"
, show(tag,preTag,np1,np2)
, show s1
, show s2
]
challenge_Orb _ [] _ _ _ _ _ = err "impossible 0298347"
comparePos :: (ViewL Position) -> (ViewL Position) -> Ordering
comparePos EmptyL EmptyL = EQ
comparePos EmptyL _ = GT
comparePos _ EmptyL = LT
comparePos (p1 :< ps1) (p2 :< ps2) =
compare p1 p2 `mappend` comparePos (viewl ps1) (viewl ps2)
tagsToGroupsST :: forall s. Array GroupIndex [GroupInfo] -> WScratch s -> S.ST s MatchArray
tagsToGroupsST aGroups (WScratch {w_pos=pos})= do
let b_max = snd (bounds (aGroups))
ma <- newArray (0,b_max) (-1,0) :: ST s (STArray s Int (MatchOffset,MatchLength))
startPos0 <- pos !! 0
stopPos0 <- pos !! 1
set ma 0 (startPos0,stopPos0-startPos0)
let act _this_index [] = return ()
act this_index ((GroupInfo _ parent start stop flagtag):gs) = do
flagVal <- pos !! flagtag
if (-1) == flagVal then act this_index gs
else do
startPos <- pos !! start
stopPos <- pos !! stop
(startParent,lengthParent) <- ma !! parent
let ok = (0 <= startParent &&
0 <= lengthParent &&
startParent <= startPos &&
stopPos <= startPos + lengthParent)
if not ok then act this_index gs
else set ma this_index (startPos,stopPos-startPos)
forM_ (range (1,b_max)) $ (\i -> act i (aGroups!i))
unsafeFreeze ma
# INLINE spawnAt #
spawnAt :: (Tag,Tag) -> BlankScratch s -> Index -> MScratch s -> Position -> S.ST s ()
spawnAt b_tags (BlankScratch blankPos) i s1 thisPos = do
oldPos <- m_pos s1 !! i
pos <- case oldPos of
Nothing -> do
pos' <- newA_ b_tags
set (m_pos s1) i (Just pos')
return pos'
Just pos -> return pos
copySTU blankPos pos
set (m_orbit s1) i $! mempty
set pos 0 thisPos
# INLINE updateCopy #
updateCopy :: ((Index, Instructions), STUArray s Tag Position, OrbitLog)
-> Index
-> MScratch s
-> Int
-> ST s ()
updateCopy ((_i1,instructions),oldPos,newOrbit) preTag s2 i2 = do
b_tags <- getBounds oldPos
newerPos <- maybe (do
a <- newA_ b_tags
set (m_pos s2) i2 (Just a)
return a) return =<< m_pos s2 !! i2
copySTU oldPos newerPos
doActions preTag newerPos (newPos instructions)
set (m_orbit s2) i2 $! newOrbit
foreign import ccall unsafe "memcpy"
memcpy :: MutableByteArray# RealWorld -> MutableByteArray# RealWorld -> Int# -> IO ()
Prelude Data . Array . Base > : i STUArray
data STUArray s i e
= STUArray ! i ! i ! Int ( GHC.Prim . MutableByteArray # s )
Prelude Data.Array.Base> :i STUArray
data STUArray s i e
= STUArray !i !i !Int (GHC.Prim.MutableByteArray# s)
-}
This has been updated for ghc 6.8.3 and still works with ghc 6.10.1
# INLINE copySTU #
( STUArray s i e )
copySTU _source@(STUArray _ _ _ msource) _destination@(STUArray _ _ _ mdest) =
when ( b1/=b2 ) ( error ( " \n\nWTF copySTU : " + + show ( ) ) )
ST $ \s1# ->
case sizeofMutableByteArray# msource of { n# ->
case unsafeCoerce# memcpy mdest msource n# s1# of { (# s2#, () #) ->
(# s2#, () #) }}
copySTU : : ( MArray ( STUArray s ) e ( S.ST s))= > STUArray s Tag e - > STUArray s Tag e - > S.ST s ( STUArray s i e )
copySTU source destination = do
b@(start , stop ) < - getBounds source
b ' < - getBounds destination
when ( b/=b ' ) ( fail $ " Text . Regex . TDFA.RunMutState copySTUArray bounds mismatch"++show ( b , b ' ) )
forM _ ( range b ) $ \index - >
set destination index = < < source ! ! index
return destination
copySTU :: (MArray (STUArray s) e (S.ST s))=> STUArray s Tag e -> STUArray s Tag e -> S.ST s (STUArray s i e)
copySTU source destination = do
b@(start,stop) <- getBounds source
b' <- getBounds destination
when (b/=b') (fail $ "Text.Regex.TDFA.RunMutState copySTUArray bounds mismatch"++show (b,b'))
forM_ (range b) $ \index ->
set destination index =<< source !! index
return destination
-}
|
fa029e2c95bdd1cb1c83bcc56a7e54e63b4b88b8361628c0a75e8067f1447003 | Shirakumo/kandria | walkntalk.lisp | (in-package #:org.shirakumo.fraf.kandria)
(defclass walk-textbox (alloy:label)
((markup :initarg :markup :initform () :accessor markup)))
(presentations:define-realization (ui walk-textbox)
((:bg simple:rectangle)
(alloy:margins)
:pattern (colored:color 0 0 0 0.8))
((:label simple:text)
(alloy:margins 20 20 40 20)
alloy:text
:valign :top
:halign :left
:wrap T
:font (setting :display :font)
:size (alloy:un 25)
:pattern colors:white))
(presentations:define-update (ui walk-textbox)
(:label
:markup (markup alloy:renderable)))
(defclass profile-background (alloy:layout-element alloy:renderable) ())
(presentations:define-realization (ui profile-background)
((:bg simple:rectangle)
(alloy:margins)
:pattern (colored:color 0 0 0 0.8)
:z-index -10))
: this sucks .
(defclass walkntalk-layout (org.shirakumo.alloy.layouts.constraint:layout)
((walkntalk :initarg :walkntalk)))
(defclass walkntalk (panel textbox entity)
((name :initform 'walkntalk)
(interaction :initform NIL :accessor interaction)
(interrupt :initform NIL :accessor interrupt)
(interrupt-ip :initform 0 :accessor interrupt-ip)))
(defmethod initialize-instance :after ((walkntalk walkntalk) &key)
(let ((layout (make-instance 'walkntalk-layout :walkntalk walkntalk))
(textbox (alloy:represent (slot-value walkntalk 'text) 'walk-textbox))
(nametag (alloy:represent (slot-value walkntalk 'source) 'nametag))
(background (make-instance 'profile-background)))
(setf (textbox walkntalk) textbox)
(alloy:enter background layout :constraints `((:left 60) (:top 60) (:width 150) (:height 150)))
(alloy:enter (profile walkntalk) layout :constraints `((:left 90) (:top 0) (:width 200) (:height 200)))
(alloy:enter textbox layout :constraints `((:align :bottom ,background) (:right-of ,background 0) (:height 120) (:right 60)))
: for whatever fucking reason trying to use teh relative constraints here results in unsolvable expressions .
(alloy:enter nametag layout :constraints `((:top 60) (:left 210) (:height 30) (:right 60)))
(alloy:finish-structure walkntalk layout (choices walkntalk))))
(defmethod show :before ((textbox walkntalk) &key)
(setf (text textbox) (clear-text-string)))
(defmethod hide :after ((textbox walkntalk))
(harmony:stop (// 'sound 'ui-scroll-dialogue)))
(defmethod interactions ((textbox walkntalk))
(when (interaction textbox)
(list (interaction textbox))))
(defmethod handle ((ev tick) (textbox walkntalk))
(when (shown-p textbox)
(call-next-method)))
(defmethod (setf interaction) :after (value (textbox walkntalk))
(cond ((null value)
(when (shown-p textbox)
(hide textbox)))
((interrupt textbox)
(setf (interrupt-ip textbox) 0))
(T
(reset textbox)
(setf *current-task* (quest:task value))
(setf *current-interaction* value)
(dialogue:run (quest:dialogue value) (vm textbox))
(when (and (not (shown-p textbox))
(not (find-panel 'dialog)))
(show textbox)))))
(defmethod (setf interrupt) :before (value (textbox walkntalk))
(cond (value
(when (null (interrupt textbox))
(setf (interrupt-ip textbox) (ip textbox)))
(reset textbox)
(setf *current-task* (quest:task value))
(setf *current-interaction* value)
(dialogue:run (quest:dialogue value) (vm textbox))
(when (and (not (shown-p textbox))
(not (find-panel 'dialog)))
(show textbox)))
((and (interaction textbox) (interrupt-ip textbox))
(shiftf (ip textbox) (interrupt-ip textbox) NIL)
(setf *current-task* (quest:task (interaction textbox)))
(setf *current-interaction* (interaction textbox))
(dialogue:run (quest:dialogue (interaction textbox)) (vm textbox)))
((shown-p textbox)
(hide textbox))))
(defmethod (setf prompt) :after (value (textbox walkntalk))
(when value
(setf (pause-timer textbox) (setting :gameplay :auto-advance-after))))
(defmethod next-interaction ((textbox walkntalk))
(cond ((interrupt textbox))
(T
(when (interaction textbox)
(quest:complete (interaction textbox)))
(setf (interaction textbox) NIL))))
(defmethod handle ((ev tick) (textbox walkntalk))
(cond ((prompt textbox)
(decf (pause-timer textbox) (dt ev))
(when (<= (pause-timer textbox) 0)
(setf (text textbox) (clear-text-string))
(setf (prompt textbox) NIL)))
((or (interrupt textbox) (interaction textbox))
(call-next-method))))
(defmethod interrupt-walk-n-talk ((string string))
: only avoid recaching if we 're already displaying the same string .
(unless (interrupt (unit 'walkntalk +world+))
(setf (interrupt (unit 'walkntalk +world+))
(make-instance 'stub-interaction :dialogue string))))
(defmethod interrupt-walk-n-talk ((null null))
(setf (interrupt (unit 'walkntalk +world+)) null))
(defmethod walk-n-talk ((string string))
(walk-n-talk (make-instance 'stub-interaction :dialogue string)))
(defmethod walk-n-talk ((cons cons))
(walk-n-talk (make-instance 'stub-interaction :source cons)))
(defmethod walk-n-talk ((interaction interaction))
(setf (interaction (unit 'walkntalk +world+)) interaction))
(defmethod walk-n-talk ((null null))
(setf (interaction (unit 'walkntalk +world+)) null))
(defmethod alloy:render :around ((ui ui) (textbox walkntalk-layout))
(when (< 0 (length (text (slot-value textbox 'walkntalk))))
(call-next-method)))
| null | https://raw.githubusercontent.com/Shirakumo/kandria/94fd727bd93e302c6a28fae33815043d486d794b/ui/walkntalk.lisp | lisp | (in-package #:org.shirakumo.fraf.kandria)
(defclass walk-textbox (alloy:label)
((markup :initarg :markup :initform () :accessor markup)))
(presentations:define-realization (ui walk-textbox)
((:bg simple:rectangle)
(alloy:margins)
:pattern (colored:color 0 0 0 0.8))
((:label simple:text)
(alloy:margins 20 20 40 20)
alloy:text
:valign :top
:halign :left
:wrap T
:font (setting :display :font)
:size (alloy:un 25)
:pattern colors:white))
(presentations:define-update (ui walk-textbox)
(:label
:markup (markup alloy:renderable)))
(defclass profile-background (alloy:layout-element alloy:renderable) ())
(presentations:define-realization (ui profile-background)
((:bg simple:rectangle)
(alloy:margins)
:pattern (colored:color 0 0 0 0.8)
:z-index -10))
: this sucks .
(defclass walkntalk-layout (org.shirakumo.alloy.layouts.constraint:layout)
((walkntalk :initarg :walkntalk)))
(defclass walkntalk (panel textbox entity)
((name :initform 'walkntalk)
(interaction :initform NIL :accessor interaction)
(interrupt :initform NIL :accessor interrupt)
(interrupt-ip :initform 0 :accessor interrupt-ip)))
(defmethod initialize-instance :after ((walkntalk walkntalk) &key)
(let ((layout (make-instance 'walkntalk-layout :walkntalk walkntalk))
(textbox (alloy:represent (slot-value walkntalk 'text) 'walk-textbox))
(nametag (alloy:represent (slot-value walkntalk 'source) 'nametag))
(background (make-instance 'profile-background)))
(setf (textbox walkntalk) textbox)
(alloy:enter background layout :constraints `((:left 60) (:top 60) (:width 150) (:height 150)))
(alloy:enter (profile walkntalk) layout :constraints `((:left 90) (:top 0) (:width 200) (:height 200)))
(alloy:enter textbox layout :constraints `((:align :bottom ,background) (:right-of ,background 0) (:height 120) (:right 60)))
: for whatever fucking reason trying to use teh relative constraints here results in unsolvable expressions .
(alloy:enter nametag layout :constraints `((:top 60) (:left 210) (:height 30) (:right 60)))
(alloy:finish-structure walkntalk layout (choices walkntalk))))
(defmethod show :before ((textbox walkntalk) &key)
(setf (text textbox) (clear-text-string)))
(defmethod hide :after ((textbox walkntalk))
(harmony:stop (// 'sound 'ui-scroll-dialogue)))
(defmethod interactions ((textbox walkntalk))
(when (interaction textbox)
(list (interaction textbox))))
(defmethod handle ((ev tick) (textbox walkntalk))
(when (shown-p textbox)
(call-next-method)))
(defmethod (setf interaction) :after (value (textbox walkntalk))
(cond ((null value)
(when (shown-p textbox)
(hide textbox)))
((interrupt textbox)
(setf (interrupt-ip textbox) 0))
(T
(reset textbox)
(setf *current-task* (quest:task value))
(setf *current-interaction* value)
(dialogue:run (quest:dialogue value) (vm textbox))
(when (and (not (shown-p textbox))
(not (find-panel 'dialog)))
(show textbox)))))
(defmethod (setf interrupt) :before (value (textbox walkntalk))
(cond (value
(when (null (interrupt textbox))
(setf (interrupt-ip textbox) (ip textbox)))
(reset textbox)
(setf *current-task* (quest:task value))
(setf *current-interaction* value)
(dialogue:run (quest:dialogue value) (vm textbox))
(when (and (not (shown-p textbox))
(not (find-panel 'dialog)))
(show textbox)))
((and (interaction textbox) (interrupt-ip textbox))
(shiftf (ip textbox) (interrupt-ip textbox) NIL)
(setf *current-task* (quest:task (interaction textbox)))
(setf *current-interaction* (interaction textbox))
(dialogue:run (quest:dialogue (interaction textbox)) (vm textbox)))
((shown-p textbox)
(hide textbox))))
(defmethod (setf prompt) :after (value (textbox walkntalk))
(when value
(setf (pause-timer textbox) (setting :gameplay :auto-advance-after))))
(defmethod next-interaction ((textbox walkntalk))
(cond ((interrupt textbox))
(T
(when (interaction textbox)
(quest:complete (interaction textbox)))
(setf (interaction textbox) NIL))))
(defmethod handle ((ev tick) (textbox walkntalk))
(cond ((prompt textbox)
(decf (pause-timer textbox) (dt ev))
(when (<= (pause-timer textbox) 0)
(setf (text textbox) (clear-text-string))
(setf (prompt textbox) NIL)))
((or (interrupt textbox) (interaction textbox))
(call-next-method))))
(defmethod interrupt-walk-n-talk ((string string))
: only avoid recaching if we 're already displaying the same string .
(unless (interrupt (unit 'walkntalk +world+))
(setf (interrupt (unit 'walkntalk +world+))
(make-instance 'stub-interaction :dialogue string))))
(defmethod interrupt-walk-n-talk ((null null))
(setf (interrupt (unit 'walkntalk +world+)) null))
(defmethod walk-n-talk ((string string))
(walk-n-talk (make-instance 'stub-interaction :dialogue string)))
(defmethod walk-n-talk ((cons cons))
(walk-n-talk (make-instance 'stub-interaction :source cons)))
(defmethod walk-n-talk ((interaction interaction))
(setf (interaction (unit 'walkntalk +world+)) interaction))
(defmethod walk-n-talk ((null null))
(setf (interaction (unit 'walkntalk +world+)) null))
(defmethod alloy:render :around ((ui ui) (textbox walkntalk-layout))
(when (< 0 (length (text (slot-value textbox 'walkntalk))))
(call-next-method)))
| |
c2dff446a893d2793b9cf7fea973feb81f5263cf6c7ec0e60184988ae6778739 | Dimercel/listopia | indexing-lists.lisp | (defpackage listopia-bench.indexing-lists
(:use :cl
:prove
:listopia-bench.utils)
(:import-from :listopia
:elem-index
:find-index
:find-indices
:elem-indices))
(in-package :listopia-bench.indexing-lists)
(plan nil)
(ok (bench "elem-index" (elem-index 2 '(1 2 3))))
(ok (bench "find-index" (find-index #'keywordp '(1 :foo 3))))
(ok (bench "find-indices" (find-indices #'keywordp '(1 :foo 3 :bar))))
(ok (bench "elem-indices" (elem-indices 42 '(1 42 3 42))))
(finalize)
| null | https://raw.githubusercontent.com/Dimercel/listopia/2d2a1a3c35580252ca0085e15ebf625f73230d60/bench/indexing-lists.lisp | lisp | (defpackage listopia-bench.indexing-lists
(:use :cl
:prove
:listopia-bench.utils)
(:import-from :listopia
:elem-index
:find-index
:find-indices
:elem-indices))
(in-package :listopia-bench.indexing-lists)
(plan nil)
(ok (bench "elem-index" (elem-index 2 '(1 2 3))))
(ok (bench "find-index" (find-index #'keywordp '(1 :foo 3))))
(ok (bench "find-indices" (find-indices #'keywordp '(1 :foo 3 :bar))))
(ok (bench "elem-indices" (elem-indices 42 '(1 42 3 42))))
(finalize)
| |
ab727ac665d9b5992f99bb0d1ee57be4c2f553d2afb5d21d4c3b7bfb77a202d6 | jakemcc/sicp-study | test_ex3_19.clj | (ns test_ex3_19
(:use clojure.test
mypair
ex3_19))
(def has-cycle-odd
(let [a (make-pair :a)
b (make-pair :b)
c (make-pair :c)]
(set-cdr! a b)
(set-cdr! b c)
(set-cdr! c a)
a))
(def has-cycle-even
(let [a (make-pair :a)
b (make-pair :b)
d (make-pair :d)
c (make-pair :c)]
(set-cdr! a b)
(set-cdr! b c)
(set-cdr! c d)
(set-cdr! d a)
a))
(deftest should-detect-cycle
(is (= true (cycles? has-cycle-odd)))
(is (= true (cycles? has-cycle-even))))
(deftest should-not-detect-cycle
(is (= false (cycles? (my-list :a :b :c))))
(is (= false (cycles? (my-list :a :b :c :d)))))
| null | https://raw.githubusercontent.com/jakemcc/sicp-study/3b9e3d6c8cc30ad92b0d9bbcbbbfe36a8413f89d/clojure/section3.3/test/test_ex3_19.clj | clojure | (ns test_ex3_19
(:use clojure.test
mypair
ex3_19))
(def has-cycle-odd
(let [a (make-pair :a)
b (make-pair :b)
c (make-pair :c)]
(set-cdr! a b)
(set-cdr! b c)
(set-cdr! c a)
a))
(def has-cycle-even
(let [a (make-pair :a)
b (make-pair :b)
d (make-pair :d)
c (make-pair :c)]
(set-cdr! a b)
(set-cdr! b c)
(set-cdr! c d)
(set-cdr! d a)
a))
(deftest should-detect-cycle
(is (= true (cycles? has-cycle-odd)))
(is (= true (cycles? has-cycle-even))))
(deftest should-not-detect-cycle
(is (= false (cycles? (my-list :a :b :c))))
(is (= false (cycles? (my-list :a :b :c :d)))))
| |
b4dca2e02f1d14717939a20cc558b7b25e40367476d94bba75e3e4985b0ec33e | erlyaws/yaws | cache_appmod_test.erl | -module(cache_appmod_test).
-export([out/1]).
-include("yaws_api.hrl").
out(Arg) ->
{abs_path, Path} = (Arg#arg.req)#http_request.path,
Opts0 = case yaws_api:queryvar(Arg, "no-cache") of
{ok, "1"} -> [{disable_cache, true}];
_ -> []
end,
Opts1 = [{header, {"X-Appmod", "cache_appmod_test"}}|Opts0],
{page, {Opts1, Path}}.
| null | https://raw.githubusercontent.com/erlyaws/yaws/da198c828e9d95ca2137da7884cddadd73941d13/testsuite/main_SUITE_data/cache_appmod_test.erl | erlang | -module(cache_appmod_test).
-export([out/1]).
-include("yaws_api.hrl").
out(Arg) ->
{abs_path, Path} = (Arg#arg.req)#http_request.path,
Opts0 = case yaws_api:queryvar(Arg, "no-cache") of
{ok, "1"} -> [{disable_cache, true}];
_ -> []
end,
Opts1 = [{header, {"X-Appmod", "cache_appmod_test"}}|Opts0],
{page, {Opts1, Path}}.
| |
32e5eccc15711847fe0cfdcd93f097c222db3131d2af770727d21d2a04b288e1 | Happstack/happstack-server | Types.hs | module Happstack.Server.Types
(Request(..), Response(..), RqBody(..), Input(..), HeaderPair(..),
takeRequestBody, readInputsBody,
rqURL, mkHeaders,
getHeader, getHeaderBS, getHeaderUnsafe,
hasHeader, hasHeaderBS, hasHeaderUnsafe,
setHeader, setHeaderBS, setHeaderUnsafe,
addHeader, addHeaderBS, addHeaderUnsafe,
setCookie , setCookies ,
LogAccess, logMAccess, Conf(..), nullConf, result, resultBS,
redirect, -- redirect_, redirect', redirect'_,
isHTTP1_0, isHTTP1_1,
RsFlags(..), nullRsFlags, contentLength, chunked, noContentLength,
HttpVersion(..), Length(..), Method(..), Headers, continueHTTP,
Host, ContentType(..),
readDec', fromReadS, FromReqURI(..)
) where
import Happstack.Server.Internal.Types
| null | https://raw.githubusercontent.com/Happstack/happstack-server/d9cdb9af5635b1ebd9db0801b66dbf4e58aba66b/src/Happstack/Server/Types.hs | haskell | redirect_, redirect', redirect'_, | module Happstack.Server.Types
(Request(..), Response(..), RqBody(..), Input(..), HeaderPair(..),
takeRequestBody, readInputsBody,
rqURL, mkHeaders,
getHeader, getHeaderBS, getHeaderUnsafe,
hasHeader, hasHeaderBS, hasHeaderUnsafe,
setHeader, setHeaderBS, setHeaderUnsafe,
addHeader, addHeaderBS, addHeaderUnsafe,
setCookie , setCookies ,
LogAccess, logMAccess, Conf(..), nullConf, result, resultBS,
isHTTP1_0, isHTTP1_1,
RsFlags(..), nullRsFlags, contentLength, chunked, noContentLength,
HttpVersion(..), Length(..), Method(..), Headers, continueHTTP,
Host, ContentType(..),
readDec', fromReadS, FromReqURI(..)
) where
import Happstack.Server.Internal.Types
|
9fa3dc1eb4ac69052dea6a49bfa04585113b76181e9beaaef16dd6adc7a9b948 | mejgun/haskell-tdlib | GetInstalledStickerSets.hs | {-# LANGUAGE OverloadedStrings #-}
-- |
module TD.Query.GetInstalledStickerSets where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.StickerType as StickerType
import qualified Utils as U
-- |
Returns a list of installed sticker sets @sticker_type Type of the sticker sets to return
data GetInstalledStickerSets = GetInstalledStickerSets
{ -- |
sticker_type :: Maybe StickerType.StickerType
}
deriving (Eq)
instance Show GetInstalledStickerSets where
show
GetInstalledStickerSets
{ sticker_type = sticker_type_
} =
"GetInstalledStickerSets"
++ U.cc
[ U.p "sticker_type" sticker_type_
]
instance T.ToJSON GetInstalledStickerSets where
toJSON
GetInstalledStickerSets
{ sticker_type = sticker_type_
} =
A.object
[ "@type" A..= T.String "getInstalledStickerSets",
"sticker_type" A..= sticker_type_
]
| null | https://raw.githubusercontent.com/mejgun/haskell-tdlib/cf563ece2c2270b2079e233c73cbc7dfd2f70281/src/TD/Query/GetInstalledStickerSets.hs | haskell | # LANGUAGE OverloadedStrings #
|
|
| |
module TD.Query.GetInstalledStickerSets where
import qualified Data.Aeson as A
import qualified Data.Aeson.Types as T
import qualified TD.Data.StickerType as StickerType
import qualified Utils as U
Returns a list of installed sticker sets @sticker_type Type of the sticker sets to return
data GetInstalledStickerSets = GetInstalledStickerSets
sticker_type :: Maybe StickerType.StickerType
}
deriving (Eq)
instance Show GetInstalledStickerSets where
show
GetInstalledStickerSets
{ sticker_type = sticker_type_
} =
"GetInstalledStickerSets"
++ U.cc
[ U.p "sticker_type" sticker_type_
]
instance T.ToJSON GetInstalledStickerSets where
toJSON
GetInstalledStickerSets
{ sticker_type = sticker_type_
} =
A.object
[ "@type" A..= T.String "getInstalledStickerSets",
"sticker_type" A..= sticker_type_
]
|
ed9d1732de6cd2c6b2a61c004ba8daba11794c7ced88b01878e6e8d3fb687b5f | MinaProtocol/mina | priced_proof.ml | open Core_kernel
open Mina_base
[%%versioned
module Stable = struct
[@@@no_toplevel_latest_type]
module V1 = struct
type 'proof t = 'proof Mina_wire_types.Network_pool.Priced_proof.V1.t =
{ proof : 'proof; fee : Fee_with_prover.Stable.V1.t }
[@@deriving compare, fields, sexp, yojson, hash]
end
end]
type 'proof t = 'proof Stable.Latest.t =
{ proof : 'proof; fee : Fee_with_prover.t }
[@@deriving compare, fields, sexp, yojson, hash]
let map t ~f = { t with proof = f t.proof }
| null | https://raw.githubusercontent.com/MinaProtocol/mina/7a380064e215dc6aa152b76a7c3254949e383b1f/src/lib/network_pool/priced_proof.ml | ocaml | open Core_kernel
open Mina_base
[%%versioned
module Stable = struct
[@@@no_toplevel_latest_type]
module V1 = struct
type 'proof t = 'proof Mina_wire_types.Network_pool.Priced_proof.V1.t =
{ proof : 'proof; fee : Fee_with_prover.Stable.V1.t }
[@@deriving compare, fields, sexp, yojson, hash]
end
end]
type 'proof t = 'proof Stable.Latest.t =
{ proof : 'proof; fee : Fee_with_prover.t }
[@@deriving compare, fields, sexp, yojson, hash]
let map t ~f = { t with proof = f t.proof }
| |
038fd5a52cdc012e7a8ade73d2b6cac54f07130a7cecf2c2d78b62ed3e206ea4 | mirage/irmin | types.ml |
* Copyright ( c ) 2018 - 2022 Tarides < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2018-2022 Tarides <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Ctypes
include Types_intf
module Struct = struct
type config = unit
type repo = unit
type store = unit
type ty = unit
type value = unit
type metadata = unit
type contents = unit
type path = unit
type tree = unit
type commit = unit
type hash = unit
type info = unit
type irmin_string = unit
type path_array = unit
type commit_array = unit
type branch_array = unit
type commit_key = unit
type kinded_key = unit
type remote = unit
end
let config : Struct.config ptr typ = ptr (typedef void "IrminConfig")
let repo : Struct.repo ptr typ = ptr (typedef void "IrminRepo")
let store : Struct.store ptr typ = ptr (typedef void "Irmin")
let ty : Struct.ty ptr typ = ptr (typedef void "IrminType")
let value : Struct.value ptr typ = ptr (typedef void "IrminValue")
let metadata : Struct.metadata ptr typ = ptr (typedef void "IrminMetadata")
let contents : Struct.metadata ptr typ = ptr (typedef void "IrminContents")
let path : Struct.path ptr typ = ptr (typedef void "IrminPath")
let tree : Struct.tree ptr typ = ptr (typedef void "IrminTree")
let commit : Struct.commit ptr typ = ptr (typedef void "IrminCommit")
let hash : Struct.hash ptr typ = ptr (typedef void "IrminHash")
let info : Struct.info ptr typ = ptr (typedef void "IrminInfo")
let remote : Struct.remote ptr typ = ptr (typedef void "IrminRemote")
let irmin_string : Struct.irmin_string ptr typ =
ptr (typedef void "IrminString")
let path_array : Struct.path_array ptr typ = ptr (typedef void "IrminPathArray")
let commit_array : Struct.commit_array ptr typ =
ptr (typedef void "IrminCommitArray")
let branch_array : Struct.branch_array ptr typ =
ptr (typedef void "IrminBranchArray")
let commit_key : Struct.commit_key ptr typ = ptr (typedef void "IrminCommitKey")
let kinded_key : Struct.kinded_key ptr typ = ptr (typedef void "IrminKindedKey")
| null | https://raw.githubusercontent.com/mirage/irmin/abeee121a6db7b085b3c68af50ef24a8d8f9ed05/src/libirmin/types.ml | ocaml |
* Copyright ( c ) 2018 - 2022 Tarides < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2018-2022 Tarides <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
open Ctypes
include Types_intf
module Struct = struct
type config = unit
type repo = unit
type store = unit
type ty = unit
type value = unit
type metadata = unit
type contents = unit
type path = unit
type tree = unit
type commit = unit
type hash = unit
type info = unit
type irmin_string = unit
type path_array = unit
type commit_array = unit
type branch_array = unit
type commit_key = unit
type kinded_key = unit
type remote = unit
end
let config : Struct.config ptr typ = ptr (typedef void "IrminConfig")
let repo : Struct.repo ptr typ = ptr (typedef void "IrminRepo")
let store : Struct.store ptr typ = ptr (typedef void "Irmin")
let ty : Struct.ty ptr typ = ptr (typedef void "IrminType")
let value : Struct.value ptr typ = ptr (typedef void "IrminValue")
let metadata : Struct.metadata ptr typ = ptr (typedef void "IrminMetadata")
let contents : Struct.metadata ptr typ = ptr (typedef void "IrminContents")
let path : Struct.path ptr typ = ptr (typedef void "IrminPath")
let tree : Struct.tree ptr typ = ptr (typedef void "IrminTree")
let commit : Struct.commit ptr typ = ptr (typedef void "IrminCommit")
let hash : Struct.hash ptr typ = ptr (typedef void "IrminHash")
let info : Struct.info ptr typ = ptr (typedef void "IrminInfo")
let remote : Struct.remote ptr typ = ptr (typedef void "IrminRemote")
let irmin_string : Struct.irmin_string ptr typ =
ptr (typedef void "IrminString")
let path_array : Struct.path_array ptr typ = ptr (typedef void "IrminPathArray")
let commit_array : Struct.commit_array ptr typ =
ptr (typedef void "IrminCommitArray")
let branch_array : Struct.branch_array ptr typ =
ptr (typedef void "IrminBranchArray")
let commit_key : Struct.commit_key ptr typ = ptr (typedef void "IrminCommitKey")
let kinded_key : Struct.kinded_key ptr typ = ptr (typedef void "IrminKindedKey")
| |
8d714733a08bad260b1822da01f8e16812e5e233955c6a910c3ee26bf51abd43 | ds-wizard/engine-backend | Detail_PUT.hs | module Wizard.Api.Handler.DocumentTemplateDraft.File.Detail_PUT where
import qualified Data.UUID as U
import Servant
import Shared.Api.Handler.Common
import Shared.Model.Context.TransactionState
import Shared.Model.DocumentTemplate.DocumentTemplate
import Shared.Model.DocumentTemplate.DocumentTemplateJM ()
import Wizard.Api.Handler.Common
import Wizard.Api.Resource.DocumentTemplate.File.DocumentTemplateFileChangeDTO
import Wizard.Api.Resource.DocumentTemplate.File.DocumentTemplateFileChangeJM ()
import Wizard.Model.Context.BaseContext
import Wizard.Service.DocumentTemplate.File.DocumentTemplateFileService
type Detail_PUT =
Header "Authorization" String
:> Header "Host" String
:> ReqBody '[SafeJSON] DocumentTemplateFileChangeDTO
:> "document-template-drafts"
:> Capture "documentTemplateId" String
:> "files"
:> Capture "fileUuid" U.UUID
:> Put '[SafeJSON] (Headers '[Header "x-trace-uuid" String] DocumentTemplateFile)
detail_PUT
:: Maybe String
-> Maybe String
-> DocumentTemplateFileChangeDTO
-> String
-> U.UUID
-> BaseContextM (Headers '[Header "x-trace-uuid" String] DocumentTemplateFile)
detail_PUT mTokenHeader mServerUrl reqDto tmlId fileUuid =
getAuthServiceExecutor mTokenHeader mServerUrl $ \runInAuthService ->
runInAuthService Transactional $ addTraceUuidHeader =<< modifyFile fileUuid reqDto
| null | https://raw.githubusercontent.com/ds-wizard/engine-backend/d392b751192a646064305d3534c57becaa229f28/engine-wizard/src/Wizard/Api/Handler/DocumentTemplateDraft/File/Detail_PUT.hs | haskell | module Wizard.Api.Handler.DocumentTemplateDraft.File.Detail_PUT where
import qualified Data.UUID as U
import Servant
import Shared.Api.Handler.Common
import Shared.Model.Context.TransactionState
import Shared.Model.DocumentTemplate.DocumentTemplate
import Shared.Model.DocumentTemplate.DocumentTemplateJM ()
import Wizard.Api.Handler.Common
import Wizard.Api.Resource.DocumentTemplate.File.DocumentTemplateFileChangeDTO
import Wizard.Api.Resource.DocumentTemplate.File.DocumentTemplateFileChangeJM ()
import Wizard.Model.Context.BaseContext
import Wizard.Service.DocumentTemplate.File.DocumentTemplateFileService
type Detail_PUT =
Header "Authorization" String
:> Header "Host" String
:> ReqBody '[SafeJSON] DocumentTemplateFileChangeDTO
:> "document-template-drafts"
:> Capture "documentTemplateId" String
:> "files"
:> Capture "fileUuid" U.UUID
:> Put '[SafeJSON] (Headers '[Header "x-trace-uuid" String] DocumentTemplateFile)
detail_PUT
:: Maybe String
-> Maybe String
-> DocumentTemplateFileChangeDTO
-> String
-> U.UUID
-> BaseContextM (Headers '[Header "x-trace-uuid" String] DocumentTemplateFile)
detail_PUT mTokenHeader mServerUrl reqDto tmlId fileUuid =
getAuthServiceExecutor mTokenHeader mServerUrl $ \runInAuthService ->
runInAuthService Transactional $ addTraceUuidHeader =<< modifyFile fileUuid reqDto
| |
e2e4eb2bba0e7cd84226783aa7e01dcff6fc3239cef9d89a2df7a0e36852742e | suvash/one-time | qrgen.clj | (ns one-time.qrgen
(:require [one-time.uri :as uri])
(:import net.glxn.qrgen.core.image.ImageType
net.glxn.qrgen.javase.QRCode))
(def ^:private image-types
{:JPG ImageType/JPG
:GIF ImageType/GIF
:PNG ImageType/PNG
:BMP ImageType/BMP})
(defn totp-stream
"Returns a java.io.ByteArrayOutputStream with the totp qrcode"
[{:keys [image-type image-size label user secret]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret])
(image-types image-type)]}
(-> (^String uri/totp-uri {:label label
:secret secret
:user user})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.stream)))
(defn totp-file
"Returns a java.io.File with the totp qrcode"
[{:keys [image-type image-size label user secret]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret])
(image-types image-type)]}
(-> (^String uri/totp-uri {:label label
:secret secret
:user user})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.file)))
(defn hotp-stream
"Returns a java.io.ByteArrayOutputStream with the hotp qrcode"
[{:keys [image-type image-size label user secret counter]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret counter])
(image-types image-type)]}
(-> (^String uri/hotp-uri {:label label
:secret secret
:user user
:counter counter})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.stream)))
(defn hotp-file
"Returns a java.io.File with the hotp qrcode"
[{:keys [image-type image-size label user secret counter]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret counter])
(image-types image-type)]}
(-> (^String uri/hotp-uri {:label label
:secret secret
:user user
:counter counter})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.file)))
| null | https://raw.githubusercontent.com/suvash/one-time/63981bbe1a27eaac80a2bda1b1887c4262c2a61f/src/one_time/qrgen.clj | clojure | (ns one-time.qrgen
(:require [one-time.uri :as uri])
(:import net.glxn.qrgen.core.image.ImageType
net.glxn.qrgen.javase.QRCode))
(def ^:private image-types
{:JPG ImageType/JPG
:GIF ImageType/GIF
:PNG ImageType/PNG
:BMP ImageType/BMP})
(defn totp-stream
"Returns a java.io.ByteArrayOutputStream with the totp qrcode"
[{:keys [image-type image-size label user secret]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret])
(image-types image-type)]}
(-> (^String uri/totp-uri {:label label
:secret secret
:user user})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.stream)))
(defn totp-file
"Returns a java.io.File with the totp qrcode"
[{:keys [image-type image-size label user secret]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret])
(image-types image-type)]}
(-> (^String uri/totp-uri {:label label
:secret secret
:user user})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.file)))
(defn hotp-stream
"Returns a java.io.ByteArrayOutputStream with the hotp qrcode"
[{:keys [image-type image-size label user secret counter]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret counter])
(image-types image-type)]}
(-> (^String uri/hotp-uri {:label label
:secret secret
:user user
:counter counter})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.stream)))
(defn hotp-file
"Returns a java.io.File with the hotp qrcode"
[{:keys [image-type image-size label user secret counter]
:or {image-type :JPG image-size 125}}]
{:pre [(not-any? nil? [label user secret counter])
(image-types image-type)]}
(-> (^String uri/hotp-uri {:label label
:secret secret
:user user
:counter counter})
(QRCode/from)
(.to (image-types image-type))
(.withSize image-size image-size)
(.file)))
| |
216db0e0cca50e80ef8d519893b857251d5256df7943e754197d0898b002e294 | ocaml-multicore/tezos | generators.ml | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2022 Nomadic Labs , < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
let string_gen = QCheck2.Gen.small_string ?gen:None
let public_key_hash_gen :
(Signature.public_key_hash * Signature.public_key * Signature.secret_key)
QCheck2.Gen.t =
let open QCheck2.Gen in
let+ seed = string_size (32 -- 64) in
let seed = Bytes.of_string seed in
Signature.generate_key ~seed ()
(* TODO: /-/issues/2407
move this function to an helper file? *)
let operation_hash_gen : Operation_hash.t QCheck2.Gen.t =
let open QCheck2.Gen in
let+ s = QCheck2.Gen.string_size (return 32) in
Operation_hash.of_string_exn s
let dummy_manager_op_info oph =
{
Plugin.Mempool.operation_hash = oph;
gas_limit = Alpha_context.Gas.Arith.zero;
fee = Alpha_context.Tez.zero;
weight = Q.zero;
}
let dummy_manager_op_info_with_key_gen :
(Plugin.Mempool.manager_op_info * Signature.public_key_hash) QCheck2.Gen.t =
let open QCheck2.Gen in
let+ (oph, (pkh, _, _)) = pair operation_hash_gen public_key_hash_gen in
(dummy_manager_op_info oph, pkh)
let filter_state_gen : Plugin.Mempool.state QCheck2.Gen.t =
let open QCheck2.Gen in
let open Plugin.Mempool in
let+ inputs = small_list (pair operation_hash_gen public_key_hash_gen) in
List.fold_left
(fun state (oph, (pkh, _, _)) ->
match Operation_hash.Map.find oph state.operation_hash_to_manager with
| Some _ -> state
| None ->
let info = dummy_manager_op_info oph in
let prechecked_operations_count =
if Operation_hash.Map.mem oph state.operation_hash_to_manager then
state.prechecked_operations_count
else state.prechecked_operations_count + 1
in
let op_weight = op_weight_of_info info in
let min_prechecked_op_weight =
match state.min_prechecked_op_weight with
| Some mini when Q.(mini.weight < info.weight) -> Some mini
| Some _ | None -> Some op_weight
in
{
state with
op_prechecked_managers =
Signature.Public_key_hash.Map.add
pkh
info
state.op_prechecked_managers;
operation_hash_to_manager =
Operation_hash.Map.add oph pkh state.operation_hash_to_manager;
ops_prechecked =
ManagerOpWeightSet.add op_weight state.ops_prechecked;
prechecked_operations_count;
min_prechecked_op_weight;
})
Plugin.Mempool.empty
inputs
let with_filter_state_operation_gen :
Plugin.Mempool.state ->
(Plugin.Mempool.manager_op_info * Signature.public_key_hash) QCheck2.Gen.t =
fun state ->
let open QCheck2.Gen in
let* use_fresh = bool in
let to_ops map =
Operation_hash.Map.bindings map
|> List.map (fun (oph, pkh) -> (dummy_manager_op_info oph, pkh))
in
if use_fresh || Operation_hash.Map.is_empty state.operation_hash_to_manager
then dummy_manager_op_info_with_key_gen
else oneofl (to_ops state.operation_hash_to_manager)
let filter_state_with_operation_gen :
(Plugin.Mempool.state
* (Plugin.Mempool.manager_op_info * Signature.public_key_hash))
QCheck2.Gen.t =
let open QCheck2.Gen in
filter_state_gen >>= fun state ->
pair (return state) (with_filter_state_operation_gen state)
| null | https://raw.githubusercontent.com/ocaml-multicore/tezos/e4fd21a1cb02d194b3162ab42d512b7c985ee8a9/src/proto_alpha/lib_plugin/test/generators.ml | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
TODO: /-/issues/2407
move this function to an helper file? | Copyright ( c ) 2022 Nomadic Labs , < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
let string_gen = QCheck2.Gen.small_string ?gen:None
let public_key_hash_gen :
(Signature.public_key_hash * Signature.public_key * Signature.secret_key)
QCheck2.Gen.t =
let open QCheck2.Gen in
let+ seed = string_size (32 -- 64) in
let seed = Bytes.of_string seed in
Signature.generate_key ~seed ()
let operation_hash_gen : Operation_hash.t QCheck2.Gen.t =
let open QCheck2.Gen in
let+ s = QCheck2.Gen.string_size (return 32) in
Operation_hash.of_string_exn s
let dummy_manager_op_info oph =
{
Plugin.Mempool.operation_hash = oph;
gas_limit = Alpha_context.Gas.Arith.zero;
fee = Alpha_context.Tez.zero;
weight = Q.zero;
}
let dummy_manager_op_info_with_key_gen :
(Plugin.Mempool.manager_op_info * Signature.public_key_hash) QCheck2.Gen.t =
let open QCheck2.Gen in
let+ (oph, (pkh, _, _)) = pair operation_hash_gen public_key_hash_gen in
(dummy_manager_op_info oph, pkh)
let filter_state_gen : Plugin.Mempool.state QCheck2.Gen.t =
let open QCheck2.Gen in
let open Plugin.Mempool in
let+ inputs = small_list (pair operation_hash_gen public_key_hash_gen) in
List.fold_left
(fun state (oph, (pkh, _, _)) ->
match Operation_hash.Map.find oph state.operation_hash_to_manager with
| Some _ -> state
| None ->
let info = dummy_manager_op_info oph in
let prechecked_operations_count =
if Operation_hash.Map.mem oph state.operation_hash_to_manager then
state.prechecked_operations_count
else state.prechecked_operations_count + 1
in
let op_weight = op_weight_of_info info in
let min_prechecked_op_weight =
match state.min_prechecked_op_weight with
| Some mini when Q.(mini.weight < info.weight) -> Some mini
| Some _ | None -> Some op_weight
in
{
state with
op_prechecked_managers =
Signature.Public_key_hash.Map.add
pkh
info
state.op_prechecked_managers;
operation_hash_to_manager =
Operation_hash.Map.add oph pkh state.operation_hash_to_manager;
ops_prechecked =
ManagerOpWeightSet.add op_weight state.ops_prechecked;
prechecked_operations_count;
min_prechecked_op_weight;
})
Plugin.Mempool.empty
inputs
let with_filter_state_operation_gen :
Plugin.Mempool.state ->
(Plugin.Mempool.manager_op_info * Signature.public_key_hash) QCheck2.Gen.t =
fun state ->
let open QCheck2.Gen in
let* use_fresh = bool in
let to_ops map =
Operation_hash.Map.bindings map
|> List.map (fun (oph, pkh) -> (dummy_manager_op_info oph, pkh))
in
if use_fresh || Operation_hash.Map.is_empty state.operation_hash_to_manager
then dummy_manager_op_info_with_key_gen
else oneofl (to_ops state.operation_hash_to_manager)
let filter_state_with_operation_gen :
(Plugin.Mempool.state
* (Plugin.Mempool.manager_op_info * Signature.public_key_hash))
QCheck2.Gen.t =
let open QCheck2.Gen in
filter_state_gen >>= fun state ->
pair (return state) (with_filter_state_operation_gen state)
|
185c8a3a697c65cfd6f5861153f2987bfa36b689e0334972e9e240309a7fe9b2 | basvandijk/scientific | Internal.hs | # LANGUAGE CPP #
{-# LANGUAGE DeriveDataTypeable #-}
{-# LANGUAGE BangPatterns #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UnboxedTuples #
# LANGUAGE PatternGuards #
module Data.Scientific.Internal
( Scientific
-- * Construction
, scientific
, unsafeScientificFromNormalized
, unsafeScientificFromNonNormalized
-- * Projections
, coefficient
, base10Exponent
-- * Predicates
, isFloating
, isInteger
-- * Conversions
-- ** Rational
, unsafeFromRational
, fromRationalRepetend
, fromRationalRepetendLimited
, fromRationalRepetendUnlimited
, toRationalRepetend
-- ** Floating & integer
, floatingOrInteger
, toRealFloat
, toBoundedRealFloat
, toBoundedInteger
, toUnboundedInteger
, fromFloatDigits
-- * Parsing
, scientificP
-- * Pretty printing
, formatScientific
, FPFormat(..)
, toDecimalDigits
-- * Normalization
, normalize
) where
----------------------------------------------------------------------
-- Imports
----------------------------------------------------------------------
import Control.Exception (throw, ArithException(DivideByZero))
import Control.Monad (mplus)
import Control.Monad.ST (runST)
import Control.DeepSeq (NFData, rnf)
import Data.Binary (Binary, get, put)
import Data.Char (intToDigit, ord)
import Data.Data (Data)
import Data.Hashable (Hashable(..))
import Data.Int (Int8, Int16, Int32, Int64)
import qualified Data.Map as M (Map, empty, insert, lookup)
import Data.Ratio ((%), numerator, denominator)
import Data.Typeable (Typeable)
import qualified Data.Primitive.Array as Primitive
import Data.Word (Word8, Word16, Word32, Word64)
import Math.NumberTheory.Logarithms (integerLog10')
import qualified Numeric (floatToDigits)
import qualified Text.Read as Read
import Text.Read (readPrec)
import qualified Text.ParserCombinators.ReadPrec as ReadPrec
import qualified Text.ParserCombinators.ReadP as ReadP
import Text.ParserCombinators.ReadP ( ReadP )
import Data.Text.Lazy.Builder.RealFloat (FPFormat(..))
#if !MIN_VERSION_base(4,9,0)
import Control.Applicative ((*>))
#endif
#if !MIN_VERSION_base(4,8,0)
import Data.Functor ((<$>))
import Data.Word (Word)
import Control.Applicative ((<*>))
#endif
#if MIN_VERSION_base(4,5,0)
import Data.Bits (unsafeShiftR)
#else
import Data.Bits (shiftR)
#endif
import GHC.Integer (quotRemInteger, quotInteger)
import GHC.Integer.Compat (divInteger)
import Utils (roundTo)
----------------------------------------------------------------------
-- Type
----------------------------------------------------------------------
-- | An arbitrary-precision number represented using
-- < scientific notation>.
--
This type describes the set of all which have a finite
-- decimal expansion.
--
A scientific number with ' coefficient ' @c@ and ' base10Exponent '
corresponds to the ' Fractional ' number : @'fromInteger ' c * 10 ' ^^ ' e@
data Scientific = Scientific
{ coefficient :: !Integer
-- ^ The coefficient of a scientific number.
, base10Exponent :: {-# UNPACK #-} !Int
^ The base-10 exponent of a scientific number .
} deriving (Typeable, Data)
-- | @scientific c e@ constructs a scientific number which corresponds
to the ' Fractional ' number : @'fromInteger ' c * 10 ' ^^ ' e@.
scientific
:: Integer -- ^ coefficient
-> Int -- ^ base-10 exponent
-> Scientific
scientific c e = normalize (Scientific c e)
-- | Unsafe but efficient way to construct a 'Scientific' from an
already normalized ' coefficient ' , i.e. it has no trailing 0s .
unsafeScientificFromNormalized
:: Integer -- ^ coefficient which should be normalized
-> Int -- ^ base-10 exponent
-> Scientific
unsafeScientificFromNormalized = Scientific
-- | Unsafe but efficient way to construct a 'Scientific' from a
-- 'coefficient' which does not have to be normalized (i.e. it may
contain trailing 0s ) . You should supply the number of trailing 0s
in the ' coefficient ' as the second argument .
--
-- This function is useful when parsing a 'Scientific'. The parser
can count the number of trailing 0s and supply that to this
-- function. This will be more efficient than calling 'scientific'
-- because no expensive normalization has to be performed.
unsafeScientificFromNonNormalized
:: Integer -- ^ coefficient
^ number of trailing 0s in the coefficient . This should be positive !
-> Int -- ^ base-10 exponent
-> Scientific
unsafeScientificFromNonNormalized 0 _ _ = Scientific 0 0
unsafeScientificFromNonNormalized c 0 e = Scientific c e
unsafeScientificFromNonNormalized c z e = Scientific (c `quotInteger` magnitude z) (e + z)
----------------------------------------------------------------------
-- Instances
----------------------------------------------------------------------
instance NFData Scientific where
rnf (Scientific _ _) = ()
-- | A hash can be safely calculated from a @Scientific@. No magnitude @10^e@ is
-- calculated so there's no risk of a blowup in space or time when hashing
-- scientific numbers coming from untrusted sources.
instance Hashable Scientific where
hashWithSalt salt (Scientific c e) = salt `hashWithSalt` c `hashWithSalt` e
-- | Note that in the future I intend to change the type of the 'base10Exponent'
from @Int@ to @Integer@. To be forward compatible the @Binary@ instance
already encodes the exponent as ' Integer ' .
instance Binary Scientific where
put (Scientific c e) = put c *> put (toInteger e)
get = Scientific <$> get <*> (fromInteger <$> get)
-- | Scientific numbers can be safely compared for equality. No magnitude @10^e@
-- is calculated so there's no risk of a blowup in space or time when comparing
-- scientific numbers coming from untrusted sources.
instance Eq Scientific where
Scientific c1 e1 == Scientific c2 e2 = c1 == c2 && e1 == e2
-- | Scientific numbers can be safely compared for ordering. No magnitude @10^e@
-- is calculated so there's no risk of a blowup in space or time when comparing
-- scientific numbers coming from untrusted sources.
instance Ord Scientific where
compare (Scientific c1 e1) (Scientific c2 e2)
| c1 == c2 && e1 == e2 = EQ
| c1 < 0 = if c2 < 0 then cmp (-c2) e2 (-c1) e1 else LT
| c1 > 0 = if c2 > 0 then cmp c1 e1 c2 e2 else GT
| otherwise = if c2 > 0 then LT else GT
where
cmp cx ex cy ey
| log10sx < log10sy = LT
| log10sx > log10sy = GT
| d < 0 = if cx <= (cy `quotInteger` magnitude (-d)) then LT else GT
| d > 0 = if cy > (cx `quotInteger` magnitude d) then LT else GT
| otherwise = if cx < cy then LT else GT
where
log10sx = log10cx + ex
log10sy = log10cy + ey
log10cx = integerLog10' cx
log10cy = integerLog10' cy
d = log10cx - log10cy
| /WARNING:/ ' + ' and ' - ' compute the ' Integer ' magnitude : @10^e@ where is
-- the difference between the @'base10Exponent's@ of the arguments. If these
-- methods are applied to arguments which have huge exponents this could fill up
-- all space and crash your program! So don't apply these methods to scientific
-- numbers coming from untrusted sources. The other methods can be used safely.
instance Num Scientific where
Scientific c1 e1 + Scientific c2 e2
| e1 < e2 = scientific (c1 + c2*l) e1
| otherwise = scientific (c1*r + c2 ) e2
where
l = magnitude (e2 - e1)
r = magnitude (e1 - e2)
# INLINABLE ( + ) #
Scientific c1 e1 - Scientific c2 e2
| e1 < e2 = scientific (c1 - c2*l) e1
| otherwise = scientific (c1*r - c2 ) e2
where
l = magnitude (e2 - e1)
r = magnitude (e1 - e2)
# INLINABLE ( - ) #
Scientific c1 e1 * Scientific c2 e2 =
scientific (c1 * c2) (e1 + e2)
{-# INLINABLE (*) #-}
abs (Scientific c e) = Scientific (abs c) e
# INLINABLE abs #
negate (Scientific c e) = Scientific (negate c) e
# INLINABLE negate #
signum (Scientific c _) = Scientific (signum c) 0
# INLINABLE signum #
fromInteger i = scientific i 0
# INLINABLE fromInteger #
| /WARNING:/ ' toRational ' needs to compute the ' Integer ' magnitude :
-- @10^e@. If applied to a huge exponent this could fill up all space
-- and crash your program!
--
-- Avoid applying 'toRational' (or 'realToFrac') to scientific numbers
-- coming from an untrusted source and use 'toRealFloat' instead. The
-- latter guards against excessive space usage.
instance Real Scientific where
toRational (Scientific c e)
| e < 0 = c % magnitude (-e)
| otherwise = (c * magnitude e) % 1
# INLINABLE toRational #
{-# RULES
"realToFrac_toRealFloat_Double"
realToFrac = toRealFloat :: Scientific -> Double #-}
{-# RULES
"realToFrac_toRealFloat_Float"
realToFrac = toRealFloat :: Scientific -> Float #-}
| /WARNING:/ ' recip ' and ' / ' will throw an error when their outputs are
-- < repeating decimals>.
--
These methods also compute ' Integer ' magnitudes ( @10^e@ ) . If these methods
-- are applied to arguments which have huge exponents this could fill up all
-- space and crash your program! So don't apply these methods to scientific
-- numbers coming from untrusted sources.
--
-- 'fromRational' will throw an error when the input 'Rational' is a repeating
-- decimal. Consider using 'fromRationalRepetend' for these rationals which
-- will detect the repetition and indicate where it starts.
instance Fractional Scientific where
recip = fromRational . recip . toRational
Scientific c1 e1 / Scientific c2 e2
| d < 0 = fromRational (x / (fromInteger (magnitude (-d))))
| otherwise = fromRational (x * fromInteger (magnitude d))
where
d = e1 - e2
x = c1 % c2
fromRational rational =
case mbRepetendIx of
Nothing -> s
Just _ix -> error $
"fromRational has been applied to a repeating decimal " ++
"which can't be represented as a Scientific! " ++
"It's better to avoid performing fractional operations on Scientifics " ++
"and convert them to other fractional types like Double as early as possible."
where
(s, mbRepetendIx) = fromRationalRepetendUnlimited rational
-- | Although 'fromRational' is unsafe because it will throw errors on
-- < repeating decimals>,
-- @unsafeFromRational@ is even more unsafe because it will diverge instead (i.e
-- loop and consume all space). Though it will be more efficient because it
-- doesn't need to consume space linear in the number of digits in the resulting
-- scientific to detect the repetition.
--
-- Consider using 'fromRationalRepetend' for these rationals which will detect
-- the repetition and indicate where it starts.
unsafeFromRational :: Rational -> Scientific
unsafeFromRational rational
| d == 0 = throw DivideByZero
| otherwise = positivize (longDiv 0 0) (numerator rational)
where
-- Divide the numerator by the denominator using long division.
longDiv :: Integer -> Int -> (Integer -> Scientific)
longDiv !c !e 0 = scientific c e
longDiv !c !e !n
-- TODO: Use a logarithm here!
| n < d = longDiv (c * 10) (e - 1) (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDiv (c + q) e r
d = denominator rational
-- | Like 'fromRational' and 'unsafeFromRational', this function converts a
-- `Rational` to a `Scientific` but instead of failing or diverging (i.e loop
-- and consume all space) on
-- < repeating decimals>
-- it detects the repeating part, the /repetend/, and returns where it starts.
--
-- To detect the repetition this function consumes space linear in the number of
-- digits in the resulting scientific. In order to bound the space usage an
-- optional limit can be specified. If the number of digits reaches this limit
@Left ( s , will be returned . Here @s@ is the ' Scientific ' constructed so
far and @r@ is the remaining ' Rational ' . @toRational s + r@ yields the
-- original 'Rational'
--
If the limit is not reached or no limit was specified ( s ,
-- mbRepetendIx)@ will be returned. Here @s@ is the 'Scientific' without any
-- repetition and @mbRepetendIx@ specifies if and where in the fractional part
-- the repetend begins.
--
-- For example:
--
@fromRationalRepetend Nothing ( 1 % 28 ) = = Right ( 3.571428e-2 , Just 2)@
--
-- This represents the repeating decimal: @0.03571428571428571428...@
which is sometimes also unambiguously denoted as @0.03(571428)@.
Here the repetend is enclosed in parentheses and starts at the 3rd digit ( index 2 )
-- in the fractional part. Specifying a limit results in the following:
--
@fromRationalRepetend ( Just 4 ) ( 1 % 28 ) = = Left ( 3.5e-2 , 1 % 1400)@
--
-- You can expect the following property to hold.
--
-- @ forall (mbLimit :: Maybe Int) (r :: Rational).
-- r == (case 'fromRationalRepetend' mbLimit r of
-- Left (s, r') -> toRational s + r'
Right ( s , ) - >
-- case mbRepetendIx of
-- Nothing -> toRational s
-- Just repetendIx -> 'toRationalRepetend' s repetendIx)
-- @
fromRationalRepetend
:: Maybe Int -- ^ Optional limit
-> Rational
-> Either (Scientific, Rational)
(Scientific, Maybe Int)
fromRationalRepetend mbLimit rational =
case mbLimit of
Nothing -> Right $ fromRationalRepetendUnlimited rational
Just l -> fromRationalRepetendLimited l rational
-- | Like 'fromRationalRepetend' but always accepts a limit.
fromRationalRepetendLimited
:: Int -- ^ limit
-> Rational
-> Either (Scientific, Rational)
(Scientific, Maybe Int)
fromRationalRepetendLimited l rational
| d == 0 = throw DivideByZero
| num < 0 = case longDiv (-num) of
Left (s, r) -> Left (-s, -r)
Right (s, mb) -> Right (-s, mb)
| otherwise = longDiv num
where
num = numerator rational
longDiv :: Integer -> Either (Scientific, Rational) (Scientific, Maybe Int)
longDiv = longDivWithLimit 0 0 M.empty
longDivWithLimit
:: Integer
-> Int
-> M.Map Integer Int
-> (Integer -> Either (Scientific, Rational)
(Scientific, Maybe Int))
longDivWithLimit !c !e _ns 0 = Right (Scientific c e, Nothing)
longDivWithLimit !c !e ns !n
| Just e' <- M.lookup n ns = Right (scientific c e, Just (-e'))
| e <= (-l) = Left (scientific c e, n % (d * magnitude (-e)))
| n < d = let !ns' = M.insert n e ns
in longDivWithLimit (c * 10) (e - 1) ns' (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDivWithLimit (c + q) e ns r
d = denominator rational
-- | Like 'fromRationalRepetend' but doesn't accept a limit.
fromRationalRepetendUnlimited :: Rational -> (Scientific, Maybe Int)
fromRationalRepetendUnlimited rational
| d == 0 = throw DivideByZero
| num < 0 = case longDiv (-num) of
(s, mb) -> (-s, mb)
| otherwise = longDiv num
where
num = numerator rational
longDiv :: Integer -> (Scientific, Maybe Int)
longDiv = longDivNoLimit 0 0 M.empty
longDivNoLimit :: Integer
-> Int
-> M.Map Integer Int
-> (Integer -> (Scientific, Maybe Int))
longDivNoLimit !c !e _ns 0 = (scientific c e, Nothing)
longDivNoLimit !c !e ns !n
| Just e' <- M.lookup n ns = (scientific c e, Just (-e'))
| n < d = let !ns' = M.insert n e ns
in longDivNoLimit (c * 10) (e - 1) ns' (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDivNoLimit (c + q) e ns r
d = denominator rational
-- |
-- Converts a `Scientific` with a /repetend/ (a repeating part in the fraction),
-- which starts at the given index, into its corresponding 'Rational'.
--
For example to convert the repeating decimal @0.03(571428)@ you would use :
@toRationalRepetend 0.03571428 2 = = 1 % 28@
--
-- Preconditions for @toRationalRepetend s r@:
--
-- * @r >= 0@
--
-- * @r < -(base10Exponent s)@
--
/WARNING:/ needs to compute the ' Integer ' magnitude :
@10^^n@. Where @n@ is based on the ' base10Exponent ` of the scientific . If
-- applied to a huge exponent this could fill up all space and crash your
-- program! So don't apply this function to untrusted input.
--
-- The formula to convert the @Scientific@ @s@
-- with a repetend starting at index @r@ is described in the paper:
-- < turning_repeating_decimals_into_fractions.pdf>
-- and is defined as follows:
--
-- @
-- (fromInteger nonRepetend + repetend % nines) /
-- fromInteger (10^^r)
-- where
-- c = coefficient s
-- e = base10Exponent s
--
-- -- Size of the fractional part.
-- f = (-e)
--
-- -- Size of the repetend.
-- n = f - r
--
-- m = 10^^n
--
-- (nonRepetend, repetend) = c \`quotRem\` m
--
-- nines = m - 1
-- @
-- Also see: 'fromRationalRepetend'.
toRationalRepetend
:: Scientific
-> Int -- ^ Repetend index
-> Rational
toRationalRepetend s r
| r < 0 = error "toRationalRepetend: Negative repetend index!"
| r >= f = error "toRationalRepetend: Repetend index >= than number of digits in the fractional part!"
| otherwise = (fromInteger nonRepetend + repetend % nines) /
fromInteger (magnitude r)
where
c = coefficient s
e = base10Exponent s
-- Size of the fractional part.
f = (-e)
-- Size of the repetend.
n = f - r
m = magnitude n
(#nonRepetend, repetend#) = c `quotRemInteger` m
nines = m - 1
| /WARNING:/ the methods of the @RealFrac@ instance need to compute the
-- magnitude @10^e@. If applied to a huge exponent this could take a long
time . Even worse , when the destination type is unbounded ( i.e. ' Integer ' ) it
-- could fill up all space and crash your program!
instance RealFrac Scientific where
-- | The function 'properFraction' takes a Scientific number @s@
and returns a pair @(n , f)@ such that @s = n+f@ , and :
--
* @n@ is an integral number with the same sign as @s@ ; and
--
-- * @f@ is a fraction with the same type and sign as @s@,
and with absolute value less than @1@.
properFraction s@(Scientific c e)
| e < 0 = if dangerouslySmall c e
then (0, s)
else case c `quotRemInteger` magnitude (-e) of
(#q, r#) -> (fromInteger q, Scientific r e)
| otherwise = (toIntegral c e, 0)
# INLINABLE properFraction #
-- | @'truncate' s@ returns the integer nearest @s@
between zero and @s@
truncate = whenFloating $ \c e ->
if dangerouslySmall c e
then 0
else fromInteger $ c `quotInteger` magnitude (-e)
# INLINABLE truncate #
-- | @'round' s@ returns the nearest integer to @s@;
the even integer if @s@ is equidistant between two integers
round = whenFloating $ \c e ->
if dangerouslySmall c e
then 0
else let (#q, r#) = c `quotRemInteger` magnitude (-e)
n = fromInteger q
m | r < 0 = n - 1
| otherwise = n + 1
f = Scientific r e
in case signum $ coefficient $ abs f - 0.5 of
-1 -> n
0 -> if even n then n else m
1 -> m
_ -> error "round default defn: Bad value"
# INLINABLE round #
-- | @'ceiling' s@ returns the least integer not less than @s@
ceiling = whenFloating $ \c e ->
if dangerouslySmall c e
then if c <= 0
then 0
else 1
else case c `quotRemInteger` magnitude (-e) of
(#q, r#) | r <= 0 -> fromInteger q
| otherwise -> fromInteger (q + 1)
# INLINABLE ceiling #
-- | @'floor' s@ returns the greatest integer not greater than @s@
floor = whenFloating $ \c e ->
if dangerouslySmall c e
then if c < 0
then -1
else 0
else fromInteger (c `divInteger` magnitude (-e))
# INLINABLE floor #
----------------------------------------------------------------------
Internal utilities
----------------------------------------------------------------------
| This function is used in the ' RealFrac ' methods to guard against
-- computing a huge magnitude (-e) which could take up all space.
--
-- Think about parsing a scientific number from an untrusted
string . An attacker could supply 1e-1000000000 . Lets say we want to
-- 'floor' that number to an 'Int'. When we naively try to floor it
-- using:
--
-- @
floor = whenFloating $ \c e - >
-- fromInteger (c `div` magnitude (-e))
-- @
--
We will compute the huge Integer : @magnitude 1000000000@. This
-- computation will quickly fill up all space and crash the program.
--
-- Note that for large /positive/ exponents there is no risk of a
-- space-leak since 'whenFloating' will compute:
--
-- @fromInteger c * magnitude e :: a@
--
-- where @a@ is the target type (Int in this example). So here the
-- space usage is bounded by the target type.
--
-- For large negative exponents we check if the exponent is smaller
-- than some limit (currently -324). In that case we know that the
-- scientific number is really small (unless the coefficient has many
-- digits) so we can immediately return -1 for negative scientific
-- numbers or 0 for positive numbers.
--
-- More precisely if @dangerouslySmall c e@ returns 'True' the
-- scientific number @s@ is guaranteed to be between:
@-0.1 > s < 0.1@.
--
-- Note that we avoid computing the number of decimal digits in c
-- (log10 c) if the exponent is not below the limit.
dangerouslySmall :: Integer -> Int -> Bool
dangerouslySmall c e = e < (-limit) && e < (-integerLog10' (abs c)) - 1
{-# INLINE dangerouslySmall #-}
limit :: Int
limit = maxExpt
positivize :: (Ord a, Num a, Num b) => (a -> b) -> (a -> b)
positivize f x | x < 0 = -(f (-x))
| otherwise = f x
# INLINE positivize #
whenFloating :: (Num a) => (Integer -> Int -> a) -> Scientific -> a
whenFloating f (Scientific c e)
| e < 0 = f c e
| otherwise = toIntegral c e
# INLINE whenFloating #
-- | Precondition: the scientific needs to be an integer: @e >= 0@
toIntegral :: (Num a) => Integer -> Int -> a
toIntegral c e = fromInteger c * magnitude e
{-# INLINE toIntegral #-}
----------------------------------------------------------------------
-- Exponentiation with a cache for the most common numbers.
----------------------------------------------------------------------
| The same limit as in GHC.Float .
maxExpt :: Int
maxExpt = 324
expts10 :: Primitive.Array Integer
expts10 = runST $ do
ma <- Primitive.newArray maxExpt uninitialised
Primitive.writeArray ma 0 1
Primitive.writeArray ma 1 10
let go !ix
| ix == maxExpt = Primitive.unsafeFreezeArray ma
| otherwise = do
Primitive.writeArray ma ix xx
Primitive.writeArray ma (ix+1) (10*xx)
go (ix+2)
where
xx = x * x
x = Primitive.indexArray expts10 half
#if MIN_VERSION_base(4,5,0)
!half = ix `unsafeShiftR` 1
#else
!half = ix `shiftR` 1
#endif
go 2
uninitialised :: error
uninitialised = error "Data.Scientific: uninitialised element"
| @magnitude e = = 10 ^ e@
magnitude :: Num a => Int -> a
magnitude e | e < maxExpt = cachedPow10 e
| otherwise = cachedPow10 hi * 10 ^ (e - hi)
where
cachedPow10 = fromInteger . Primitive.indexArray expts10
hi = maxExpt - 1
----------------------------------------------------------------------
-- Conversions
----------------------------------------------------------------------
| Convert a ' RealFloat ' ( like a ' Double ' or ' Float ' ) into a ' Scientific '
-- number.
--
-- Note that this function uses 'Numeric.floatToDigits' to compute the digits
and exponent of the ' RealFloat ' number . Be aware that the algorithm used in
-- 'Numeric.floatToDigits' doesn't work as expected for some numbers, e.g. as
-- the 'Double' @1e23@ is converted to @9.9999999999999991611392e22@, and that
-- value is shown as @9.999999999999999e22@ rather than the shorter @1e23@; the
algorithm does n't take the rounding direction for values exactly half - way
between two adjacent representable values into account , so if you have a
value with a short decimal representation exactly half - way between two
adjacent representable values , like @5 ^ 23 * 2^e@ for close to 23 , the
-- algorithm doesn't know in which direction the short decimal representation
-- would be rounded and computes more digits
fromFloatDigits :: (RealFloat a) => a -> Scientific
fromFloatDigits 0 = 0
fromFloatDigits rf = positivize fromPositiveRealFloat rf
where
fromPositiveRealFloat r = go digits 0 0
where
(digits, e) = Numeric.floatToDigits 10 r
go :: [Int] -> Integer -> Int -> Scientific
go [] !c !n = Scientific c (e - n)
go (d:ds) !c !n = go ds (c * 10 + toInteger d) (n + 1)
# INLINABLE fromFloatDigits #
{-# SPECIALIZE fromFloatDigits :: Double -> Scientific #-}
{-# SPECIALIZE fromFloatDigits :: Float -> Scientific #-}
| Safely convert a ' Scientific ' number into a ' RealFloat ' ( like a ' Double ' or a
-- 'Float').
--
-- Note that this function uses 'realToFrac' (@'fromRational' . 'toRational'@)
internally but it guards against computing huge Integer magnitudes ( @10^e@ )
-- that could fill up all space and crash your program. If the 'base10Exponent'
-- of the given 'Scientific' is too big or too small to be represented in the
target type , Infinity or 0 will be returned respectively . Use
-- 'toBoundedRealFloat' which explicitly handles this case by returning 'Left'.
--
-- Always prefer 'toRealFloat' over 'realToFrac' when converting from scientific
-- numbers coming from an untrusted source.
toRealFloat :: (RealFloat a) => Scientific -> a
toRealFloat = either id id . toBoundedRealFloat
# INLINABLE toRealFloat #
# INLINABLE toBoundedRealFloat #
{-# SPECIALIZE toRealFloat :: Scientific -> Double #-}
{-# SPECIALIZE toRealFloat :: Scientific -> Float #-}
{-# SPECIALIZE toBoundedRealFloat :: Scientific -> Either Double Double #-}
{-# SPECIALIZE toBoundedRealFloat :: Scientific -> Either Float Float #-}
-- | Preciser version of `toRealFloat`. If the 'base10Exponent' of the given
-- 'Scientific' is too big or too small to be represented in the target type,
Infinity or 0 will be returned as ' Left ' .
toBoundedRealFloat :: forall a. (RealFloat a) => Scientific -> Either a a
toBoundedRealFloat s@(Scientific c e)
| c == 0 = Right 0
Infinity
else Right $ fromRational ((c * magnitude e) % 1)
| e < -limit = if e < loLimit && e + d < loLimit then Left $ sign 0
else Right $ fromRational (c % magnitude (-e))
| otherwise = Right $ fromRational (toRational s)
-- We can't use realToFrac here
-- because that will cause an infinite loop
-- when the function is specialized for Double and Float
-- caused by the realToFrac_toRealFloat_Double/Float rewrite RULEs.
where
hiLimit, loLimit :: Int
hiLimit = ceiling (fromIntegral hi * log10Radix)
loLimit = floor (fromIntegral lo * log10Radix) -
ceiling (fromIntegral digits * log10Radix)
log10Radix :: Double
log10Radix = logBase 10 $ fromInteger radix
radix = floatRadix (undefined :: a)
digits = floatDigits (undefined :: a)
(lo, hi) = floatRange (undefined :: a)
d = integerLog10' (abs c)
sign x | c < 0 = -x
| otherwise = x
-- | Convert a `Scientific` to a bounded integer.
--
-- If the given `Scientific` is not an integer or doesn't fit in the
-- target representation, it will return `Nothing`.
--
This function also guards against computing huge Integer magnitudes ( @10^e@ )
-- that could fill up all space and crash your program.
toBoundedInteger :: forall i. (Integral i, Bounded i) => Scientific -> Maybe i
toBoundedInteger s@(Scientific c e)
| isFloating s || dangerouslyBig || outsideBounds n = Nothing
| otherwise = Just $ fromInteger n
where
dangerouslyBig = e > limit &&
e > integerLog10' (max (abs iMinBound) (abs iMaxBound))
outsideBounds i = i < iMinBound || i > iMaxBound
iMinBound = toInteger (minBound :: i)
iMaxBound = toInteger (maxBound :: i)
-- This should not be evaluated if the given Scientific is dangerouslyBig
-- since it could consume all space and crash the process:
n :: Integer
n = toIntegral c e
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int8 #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int16 #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int32 #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int64 #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word8 #-}
# SPECIALIZE toBoundedInteger : : Scientific - > Maybe
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word32 #-}
{-# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word64 #-}
-- | Convert a `Scientific` to an 'Integer'. Return 'Nothing' when the input is
-- floating-point.
--
-- /WARNING:/ To convert the @Scientific@ to an @Integer@ the magnitude @10^e@
-- needs to be computed. If applied to a huge exponent this could fill up all
-- space and crash your program! So don't apply this function to untrusted
-- input.
toUnboundedInteger :: Scientific -> Maybe Integer
toUnboundedInteger s@(Scientific c e)
| isInteger s = Just (toIntegral c e)
| otherwise = Nothing
-- | @floatingOrInteger@ determines if the scientific is floating point or
-- integer.
--
-- In case it's floating-point the scientific is converted to the desired
' RealFloat ' using ' toRealFloat ' and wrapped in ' Left ' .
--
-- In case it's integer to scientific is converted to the desired 'Integral' and
-- wrapped in 'Right'.
--
-- /WARNING:/ To convert the scientific to an integral the magnitude @10^e@
-- needs to be computed. If applied to a huge exponent this could take a long
time . Even worse , when the destination type is unbounded ( i.e. ' Integer ' ) it
-- could fill up all space and crash your program! So don't apply this function
-- to untrusted input or use 'toBoundedInteger' instead.
--
-- Also see: 'isFloating' or 'isInteger'.
floatingOrInteger :: (RealFloat r, Integral i) => Scientific -> Either r i
floatingOrInteger s@(Scientific c e)
| isInteger s = Right (toIntegral c e)
| otherwise = Left (toRealFloat s)
{-# INLINABLE floatingOrInteger #-}
----------------------------------------------------------------------
-- Predicates
----------------------------------------------------------------------
-- | Return 'True' if the scientific is a floating point, 'False' otherwise.
--
-- Also see: 'floatingOrInteger'.
isFloating :: Scientific -> Bool
isFloating = not . isInteger
-- | Return 'True' if the scientific is an integer, 'False' otherwise.
--
-- Also see: 'floatingOrInteger'.
isInteger :: Scientific -> Bool
isInteger s = base10Exponent s >= 0
----------------------------------------------------------------------
-- Parsing
----------------------------------------------------------------------
-- | Supports the skipping of parentheses and whitespaces. Example:
--
-- > > read " ( (( -1.0e+3 ) ))" :: Scientific
-- > -1000.0
--
-- (Note: This @Read@ instance makes internal use of
' scientificP ' to parse the floating - point number . )
instance Read Scientific where
readPrec = Read.parens $ ReadPrec.lift (ReadP.skipSpaces >> scientificP)
-- A strict pair
data S2 = S2 !Integer {-# UNPACK #-}!Int
data S3 = S3 !Integer {-# UNPACK #-}!Int {-# UNPACK #-}!Int
-- | A parser for parsing a floating-point
-- number into a 'Scientific' value. Example:
--
> > import Text . ParserCombinators . ( readP_to_S )
> > readP_to_S scientificP " 3 "
-- > [(3.0,"")]
> > readP_to_S scientificP " 3.0e2 "
-- > [(3.0,"e2"),(300.0,"")]
> > readP_to_S scientificP " +3.0e+2 "
-- > [(3.0,"e+2"),(300.0,"")]
> > readP_to_S scientificP " -3.0e-2 "
-- > [(-3.0,"e-2"),(-3.0e-2,"")]
--
-- Note: This parser only parses the number itself; it does
-- not parse any surrounding parentheses or whitespaces.
scientificP :: ReadP Scientific
scientificP = do
pos <- positive
S2 n z1 <- foldDigits stepC (S2 0 0)
let s = S3 n z1 0
S3 coeff z expnt <- (ReadP.satisfy (== '.') >> foldDigits stepF s)
ReadP.<++ return s
let signedCoeff | pos = coeff
| otherwise = (-coeff)
(ReadP.satisfy isE >>
((unsafeScientificFromNonNormalized signedCoeff z . (expnt +)) <$> eP)) `mplus`
return (unsafeScientificFromNonNormalized signedCoeff z expnt)
where
positive :: ReadP Bool
positive = (('+' ==) <$> ReadP.satisfy isSign) `mplus` return True
stepC :: S2 -> Int -> S2
stepC (S2 c z) 0 = S2 (c * 10) (z + 1)
stepC (S2 c _z) d = S2 (c * 10 + toInteger d) 0
stepF :: S3 -> Int -> S3
stepF (S3 c z e) 0 = S3 (c * 10) (z + 1) (e - 1)
stepF (S3 c _z e) d = S3 (c * 10 + toInteger d) 0 (e - 1)
stepE :: Int -> Int -> Int
stepE e d = e * 10 + d
eP :: ReadP Int
eP = do posE <- positive
e <- foldDigits stepE 0
if posE
then return e
else return (-e)
foldDigits :: (a -> Int -> a) -> a -> ReadP a
foldDigits f z = do
c <- ReadP.satisfy isDecimal
let digit = ord c - 48
a = f z digit
ReadP.look >>= go a
where
go !a [] = return a
go !a (c:cs)
| isDecimal c = do
_ <- ReadP.get
let digit = ord c - 48
go (f a digit) cs
| otherwise = return a
isDecimal :: Char -> Bool
isDecimal c = c >= '0' && c <= '9'
# INLINE isDecimal #
isSign :: Char -> Bool
isSign c = c == '-' || c == '+'
# INLINE isSign #
isE :: Char -> Bool
isE c = c == 'e' || c == 'E'
# INLINE isE #
----------------------------------------------------------------------
-- Pretty Printing
----------------------------------------------------------------------
-- | See 'formatScientific' if you need more control over the rendering.
instance Show Scientific where
showsPrec d s
| coefficient s < 0 = showParen (d > prefixMinusPrec) $
showChar '-' . showPositive (-s)
| otherwise = showPositive s
where
prefixMinusPrec :: Int
prefixMinusPrec = 6
showPositive :: Scientific -> ShowS
showPositive = showString . fmtAsGeneric . toDecimalDigits
fmtAsGeneric :: ([Int], Int) -> String
fmtAsGeneric x@(_is, e)
| e < 0 || e > 7 = fmtAsExponent x
| otherwise = fmtAsFixed x
fmtAsExponent :: ([Int], Int) -> String
fmtAsExponent (is, e) =
case ds of
"0" -> "0.0e0"
[d] -> d : '.' :'0' : 'e' : show_e'
(d:ds') -> d : '.' : ds' ++ ('e' : show_e')
[] -> error "formatScientific/doFmt/FFExponent: []"
where
show_e' = show (e-1)
ds = map intToDigit is
fmtAsFixed :: ([Int], Int) -> String
fmtAsFixed (is, e)
| e <= 0 = '0':'.':(replicate (-e) '0' ++ ds)
| otherwise =
let
f 0 s rs = mk0 (reverse s) ++ '.':mk0 rs
f n s "" = f (n-1) ('0':s) ""
f n s (r:rs) = f (n-1) (r:s) rs
in
f e "" ds
where
mk0 "" = "0"
mk0 ls = ls
ds = map intToDigit is
-- | Like 'show' but provides rendering options.
formatScientific :: FPFormat
-> Maybe Int -- ^ Number of decimal places to render.
-> Scientific
-> String
formatScientific format mbDecs s
| coefficient s < 0 = '-':formatPositiveScientific (-s)
| otherwise = formatPositiveScientific s
where
formatPositiveScientific :: Scientific -> String
formatPositiveScientific s' = case format of
Generic -> fmtAsGeneric $ toDecimalDigits s'
Exponent -> fmtAsExponentMbDecs $ toDecimalDigits s'
Fixed -> fmtAsFixedMbDecs $ toDecimalDigits s'
fmtAsGeneric :: ([Int], Int) -> String
fmtAsGeneric x@(_is, e)
| e < 0 || e > 7 = fmtAsExponentMbDecs x
| otherwise = fmtAsFixedMbDecs x
fmtAsExponentMbDecs :: ([Int], Int) -> String
fmtAsExponentMbDecs x = case mbDecs of
Nothing -> fmtAsExponent x
Just dec -> fmtAsExponentDecs dec x
fmtAsFixedMbDecs :: ([Int], Int) -> String
fmtAsFixedMbDecs x = case mbDecs of
Nothing -> fmtAsFixed x
Just dec -> fmtAsFixedDecs dec x
fmtAsExponentDecs :: Int -> ([Int], Int) -> String
fmtAsExponentDecs dec (is, e) =
let dec' = max dec 1 in
case is of
[0] -> '0' :'.' : take dec' (repeat '0') ++ "e0"
_ ->
let
(ei,is') = roundTo (dec'+1) is
(d:ds') = map intToDigit (if ei > 0 then init is' else is')
in
d:'.':ds' ++ 'e':show (e-1+ei)
fmtAsFixedDecs :: Int -> ([Int], Int) -> String
fmtAsFixedDecs dec (is, e) =
let dec' = max dec 0 in
if e >= 0 then
let
(ei,is') = roundTo (dec' + e) is
(ls,rs) = splitAt (e+ei) (map intToDigit is')
in
mk0 ls ++ (if null rs then "" else '.':rs)
else
let
(ei,is') = roundTo dec' (replicate (-e) 0 ++ is)
d:ds' = map intToDigit (if ei > 0 then is' else 0:is')
in
d : (if null ds' then "" else '.':ds')
where
mk0 ls = case ls of { "" -> "0" ; _ -> ls}
----------------------------------------------------------------------
-- | Similar to 'Numeric.floatToDigits', @toDecimalDigits@ takes a
-- positive 'Scientific' number, and returns a list of digits and
a base-10 exponent . In particular , if , and
--
-- > toDecimalDigits x = ([d1,d2,...,dn], e)
--
-- then
--
1 . @n > = 1@
2 . @x = 0.d1d2 ... dn * ( 10^^e)@
3 . @0 < = di < = 9@
4 . @null $ takeWhile (= = 0 ) $ reverse [ d1,d2, ... ,dn]@
--
-- The last property means that the coefficient is normalized, i.e. doesn't
contain trailing zeros .
toDecimalDigits :: Scientific -> ([Int], Int)
toDecimalDigits (Scientific 0 _) = ([0], 0)
toDecimalDigits (Scientific c e) = go c 0 []
where
go :: Integer -> Int -> [Int] -> ([Int], Int)
go 0 !n ds = (ds, ne) where !ne = n + e
go i !n ds = case i `quotRemInteger` 10 of
(# q, r #) -> go q (n+1) (d:ds)
where
!d = fromIntegral r
----------------------------------------------------------------------
-- Normalization
----------------------------------------------------------------------
{-# DEPRECATED normalize "Scientific numbers are now normalized on construction so the normalize function is no longer needed." #-}
| Normalize a scientific number by dividing out powers of 10 from the
-- 'coefficient' and incrementing the 'base10Exponent' each time.
normalize :: Scientific -> Scientific
normalize (Scientific c e)
| c > 0 = normalizePositive c e
| c < 0 = -(normalizePositive (-c) e)
| otherwise {- c == 0 -} = Scientific 0 0
normalizePositive :: Integer -> Int -> Scientific
normalizePositive !c !e = case quotRemInteger c 10 of
(# c', r #)
| r == 0 -> normalizePositive c' (e+1)
| otherwise -> Scientific c e
| null | https://raw.githubusercontent.com/basvandijk/scientific/7071b9b9b3c7317218dce2c4a1c613d1c21431e8/src/Data/Scientific/Internal.hs | haskell | # LANGUAGE DeriveDataTypeable #
# LANGUAGE BangPatterns #
* Construction
* Projections
* Predicates
* Conversions
** Rational
** Floating & integer
* Parsing
* Pretty printing
* Normalization
--------------------------------------------------------------------
Imports
--------------------------------------------------------------------
--------------------------------------------------------------------
Type
--------------------------------------------------------------------
| An arbitrary-precision number represented using
< scientific notation>.
decimal expansion.
^ The coefficient of a scientific number.
# UNPACK #
| @scientific c e@ constructs a scientific number which corresponds
^ coefficient
^ base-10 exponent
| Unsafe but efficient way to construct a 'Scientific' from an
^ coefficient which should be normalized
^ base-10 exponent
| Unsafe but efficient way to construct a 'Scientific' from a
'coefficient' which does not have to be normalized (i.e. it may
This function is useful when parsing a 'Scientific'. The parser
function. This will be more efficient than calling 'scientific'
because no expensive normalization has to be performed.
^ coefficient
^ base-10 exponent
--------------------------------------------------------------------
Instances
--------------------------------------------------------------------
| A hash can be safely calculated from a @Scientific@. No magnitude @10^e@ is
calculated so there's no risk of a blowup in space or time when hashing
scientific numbers coming from untrusted sources.
| Note that in the future I intend to change the type of the 'base10Exponent'
| Scientific numbers can be safely compared for equality. No magnitude @10^e@
is calculated so there's no risk of a blowup in space or time when comparing
scientific numbers coming from untrusted sources.
| Scientific numbers can be safely compared for ordering. No magnitude @10^e@
is calculated so there's no risk of a blowup in space or time when comparing
scientific numbers coming from untrusted sources.
the difference between the @'base10Exponent's@ of the arguments. If these
methods are applied to arguments which have huge exponents this could fill up
all space and crash your program! So don't apply these methods to scientific
numbers coming from untrusted sources. The other methods can be used safely.
# INLINABLE (*) #
@10^e@. If applied to a huge exponent this could fill up all space
and crash your program!
Avoid applying 'toRational' (or 'realToFrac') to scientific numbers
coming from an untrusted source and use 'toRealFloat' instead. The
latter guards against excessive space usage.
# RULES
"realToFrac_toRealFloat_Double"
realToFrac = toRealFloat :: Scientific -> Double #
# RULES
"realToFrac_toRealFloat_Float"
realToFrac = toRealFloat :: Scientific -> Float #
< repeating decimals>.
are applied to arguments which have huge exponents this could fill up all
space and crash your program! So don't apply these methods to scientific
numbers coming from untrusted sources.
'fromRational' will throw an error when the input 'Rational' is a repeating
decimal. Consider using 'fromRationalRepetend' for these rationals which
will detect the repetition and indicate where it starts.
| Although 'fromRational' is unsafe because it will throw errors on
< repeating decimals>,
@unsafeFromRational@ is even more unsafe because it will diverge instead (i.e
loop and consume all space). Though it will be more efficient because it
doesn't need to consume space linear in the number of digits in the resulting
scientific to detect the repetition.
Consider using 'fromRationalRepetend' for these rationals which will detect
the repetition and indicate where it starts.
Divide the numerator by the denominator using long division.
TODO: Use a logarithm here!
| Like 'fromRational' and 'unsafeFromRational', this function converts a
`Rational` to a `Scientific` but instead of failing or diverging (i.e loop
and consume all space) on
< repeating decimals>
it detects the repeating part, the /repetend/, and returns where it starts.
To detect the repetition this function consumes space linear in the number of
digits in the resulting scientific. In order to bound the space usage an
optional limit can be specified. If the number of digits reaches this limit
original 'Rational'
mbRepetendIx)@ will be returned. Here @s@ is the 'Scientific' without any
repetition and @mbRepetendIx@ specifies if and where in the fractional part
the repetend begins.
For example:
This represents the repeating decimal: @0.03571428571428571428...@
in the fractional part. Specifying a limit results in the following:
You can expect the following property to hold.
@ forall (mbLimit :: Maybe Int) (r :: Rational).
r == (case 'fromRationalRepetend' mbLimit r of
Left (s, r') -> toRational s + r'
case mbRepetendIx of
Nothing -> toRational s
Just repetendIx -> 'toRationalRepetend' s repetendIx)
@
^ Optional limit
| Like 'fromRationalRepetend' but always accepts a limit.
^ limit
| Like 'fromRationalRepetend' but doesn't accept a limit.
|
Converts a `Scientific` with a /repetend/ (a repeating part in the fraction),
which starts at the given index, into its corresponding 'Rational'.
Preconditions for @toRationalRepetend s r@:
* @r >= 0@
* @r < -(base10Exponent s)@
applied to a huge exponent this could fill up all space and crash your
program! So don't apply this function to untrusted input.
The formula to convert the @Scientific@ @s@
with a repetend starting at index @r@ is described in the paper:
< turning_repeating_decimals_into_fractions.pdf>
and is defined as follows:
@
(fromInteger nonRepetend + repetend % nines) /
fromInteger (10^^r)
where
c = coefficient s
e = base10Exponent s
-- Size of the fractional part.
f = (-e)
-- Size of the repetend.
n = f - r
m = 10^^n
(nonRepetend, repetend) = c \`quotRem\` m
nines = m - 1
@
Also see: 'fromRationalRepetend'.
^ Repetend index
Size of the fractional part.
Size of the repetend.
magnitude @10^e@. If applied to a huge exponent this could take a long
could fill up all space and crash your program!
| The function 'properFraction' takes a Scientific number @s@
* @f@ is a fraction with the same type and sign as @s@,
| @'truncate' s@ returns the integer nearest @s@
| @'round' s@ returns the nearest integer to @s@;
| @'ceiling' s@ returns the least integer not less than @s@
| @'floor' s@ returns the greatest integer not greater than @s@
--------------------------------------------------------------------
--------------------------------------------------------------------
computing a huge magnitude (-e) which could take up all space.
Think about parsing a scientific number from an untrusted
'floor' that number to an 'Int'. When we naively try to floor it
using:
@
fromInteger (c `div` magnitude (-e))
@
computation will quickly fill up all space and crash the program.
Note that for large /positive/ exponents there is no risk of a
space-leak since 'whenFloating' will compute:
@fromInteger c * magnitude e :: a@
where @a@ is the target type (Int in this example). So here the
space usage is bounded by the target type.
For large negative exponents we check if the exponent is smaller
than some limit (currently -324). In that case we know that the
scientific number is really small (unless the coefficient has many
digits) so we can immediately return -1 for negative scientific
numbers or 0 for positive numbers.
More precisely if @dangerouslySmall c e@ returns 'True' the
scientific number @s@ is guaranteed to be between:
Note that we avoid computing the number of decimal digits in c
(log10 c) if the exponent is not below the limit.
# INLINE dangerouslySmall #
| Precondition: the scientific needs to be an integer: @e >= 0@
# INLINE toIntegral #
--------------------------------------------------------------------
Exponentiation with a cache for the most common numbers.
--------------------------------------------------------------------
--------------------------------------------------------------------
Conversions
--------------------------------------------------------------------
number.
Note that this function uses 'Numeric.floatToDigits' to compute the digits
'Numeric.floatToDigits' doesn't work as expected for some numbers, e.g. as
the 'Double' @1e23@ is converted to @9.9999999999999991611392e22@, and that
value is shown as @9.999999999999999e22@ rather than the shorter @1e23@; the
algorithm doesn't know in which direction the short decimal representation
would be rounded and computes more digits
# SPECIALIZE fromFloatDigits :: Double -> Scientific #
# SPECIALIZE fromFloatDigits :: Float -> Scientific #
'Float').
Note that this function uses 'realToFrac' (@'fromRational' . 'toRational'@)
that could fill up all space and crash your program. If the 'base10Exponent'
of the given 'Scientific' is too big or too small to be represented in the
'toBoundedRealFloat' which explicitly handles this case by returning 'Left'.
Always prefer 'toRealFloat' over 'realToFrac' when converting from scientific
numbers coming from an untrusted source.
# SPECIALIZE toRealFloat :: Scientific -> Double #
# SPECIALIZE toRealFloat :: Scientific -> Float #
# SPECIALIZE toBoundedRealFloat :: Scientific -> Either Double Double #
# SPECIALIZE toBoundedRealFloat :: Scientific -> Either Float Float #
| Preciser version of `toRealFloat`. If the 'base10Exponent' of the given
'Scientific' is too big or too small to be represented in the target type,
We can't use realToFrac here
because that will cause an infinite loop
when the function is specialized for Double and Float
caused by the realToFrac_toRealFloat_Double/Float rewrite RULEs.
| Convert a `Scientific` to a bounded integer.
If the given `Scientific` is not an integer or doesn't fit in the
target representation, it will return `Nothing`.
that could fill up all space and crash your program.
This should not be evaluated if the given Scientific is dangerouslyBig
since it could consume all space and crash the process:
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int8 #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int16 #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int32 #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Int64 #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word8 #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word32 #
# SPECIALIZE toBoundedInteger :: Scientific -> Maybe Word64 #
| Convert a `Scientific` to an 'Integer'. Return 'Nothing' when the input is
floating-point.
/WARNING:/ To convert the @Scientific@ to an @Integer@ the magnitude @10^e@
needs to be computed. If applied to a huge exponent this could fill up all
space and crash your program! So don't apply this function to untrusted
input.
| @floatingOrInteger@ determines if the scientific is floating point or
integer.
In case it's floating-point the scientific is converted to the desired
In case it's integer to scientific is converted to the desired 'Integral' and
wrapped in 'Right'.
/WARNING:/ To convert the scientific to an integral the magnitude @10^e@
needs to be computed. If applied to a huge exponent this could take a long
could fill up all space and crash your program! So don't apply this function
to untrusted input or use 'toBoundedInteger' instead.
Also see: 'isFloating' or 'isInteger'.
# INLINABLE floatingOrInteger #
--------------------------------------------------------------------
Predicates
--------------------------------------------------------------------
| Return 'True' if the scientific is a floating point, 'False' otherwise.
Also see: 'floatingOrInteger'.
| Return 'True' if the scientific is an integer, 'False' otherwise.
Also see: 'floatingOrInteger'.
--------------------------------------------------------------------
Parsing
--------------------------------------------------------------------
| Supports the skipping of parentheses and whitespaces. Example:
> > read " ( (( -1.0e+3 ) ))" :: Scientific
> -1000.0
(Note: This @Read@ instance makes internal use of
A strict pair
# UNPACK #
# UNPACK #
# UNPACK #
| A parser for parsing a floating-point
number into a 'Scientific' value. Example:
> [(3.0,"")]
> [(3.0,"e2"),(300.0,"")]
> [(3.0,"e+2"),(300.0,"")]
> [(-3.0,"e-2"),(-3.0e-2,"")]
Note: This parser only parses the number itself; it does
not parse any surrounding parentheses or whitespaces.
--------------------------------------------------------------------
Pretty Printing
--------------------------------------------------------------------
| See 'formatScientific' if you need more control over the rendering.
| Like 'show' but provides rendering options.
^ Number of decimal places to render.
--------------------------------------------------------------------
| Similar to 'Numeric.floatToDigits', @toDecimalDigits@ takes a
positive 'Scientific' number, and returns a list of digits and
> toDecimalDigits x = ([d1,d2,...,dn], e)
then
The last property means that the coefficient is normalized, i.e. doesn't
--------------------------------------------------------------------
Normalization
--------------------------------------------------------------------
# DEPRECATED normalize "Scientific numbers are now normalized on construction so the normalize function is no longer needed." #
'coefficient' and incrementing the 'base10Exponent' each time.
c == 0 | # LANGUAGE CPP #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE UnboxedTuples #
# LANGUAGE PatternGuards #
module Data.Scientific.Internal
( Scientific
, scientific
, unsafeScientificFromNormalized
, unsafeScientificFromNonNormalized
, coefficient
, base10Exponent
, isFloating
, isInteger
, unsafeFromRational
, fromRationalRepetend
, fromRationalRepetendLimited
, fromRationalRepetendUnlimited
, toRationalRepetend
, floatingOrInteger
, toRealFloat
, toBoundedRealFloat
, toBoundedInteger
, toUnboundedInteger
, fromFloatDigits
, scientificP
, formatScientific
, FPFormat(..)
, toDecimalDigits
, normalize
) where
import Control.Exception (throw, ArithException(DivideByZero))
import Control.Monad (mplus)
import Control.Monad.ST (runST)
import Control.DeepSeq (NFData, rnf)
import Data.Binary (Binary, get, put)
import Data.Char (intToDigit, ord)
import Data.Data (Data)
import Data.Hashable (Hashable(..))
import Data.Int (Int8, Int16, Int32, Int64)
import qualified Data.Map as M (Map, empty, insert, lookup)
import Data.Ratio ((%), numerator, denominator)
import Data.Typeable (Typeable)
import qualified Data.Primitive.Array as Primitive
import Data.Word (Word8, Word16, Word32, Word64)
import Math.NumberTheory.Logarithms (integerLog10')
import qualified Numeric (floatToDigits)
import qualified Text.Read as Read
import Text.Read (readPrec)
import qualified Text.ParserCombinators.ReadPrec as ReadPrec
import qualified Text.ParserCombinators.ReadP as ReadP
import Text.ParserCombinators.ReadP ( ReadP )
import Data.Text.Lazy.Builder.RealFloat (FPFormat(..))
#if !MIN_VERSION_base(4,9,0)
import Control.Applicative ((*>))
#endif
#if !MIN_VERSION_base(4,8,0)
import Data.Functor ((<$>))
import Data.Word (Word)
import Control.Applicative ((<*>))
#endif
#if MIN_VERSION_base(4,5,0)
import Data.Bits (unsafeShiftR)
#else
import Data.Bits (shiftR)
#endif
import GHC.Integer (quotRemInteger, quotInteger)
import GHC.Integer.Compat (divInteger)
import Utils (roundTo)
This type describes the set of all which have a finite
A scientific number with ' coefficient ' @c@ and ' base10Exponent '
corresponds to the ' Fractional ' number : @'fromInteger ' c * 10 ' ^^ ' e@
data Scientific = Scientific
{ coefficient :: !Integer
^ The base-10 exponent of a scientific number .
} deriving (Typeable, Data)
to the ' Fractional ' number : @'fromInteger ' c * 10 ' ^^ ' e@.
scientific
-> Scientific
scientific c e = normalize (Scientific c e)
already normalized ' coefficient ' , i.e. it has no trailing 0s .
unsafeScientificFromNormalized
-> Scientific
unsafeScientificFromNormalized = Scientific
contain trailing 0s ) . You should supply the number of trailing 0s
in the ' coefficient ' as the second argument .
can count the number of trailing 0s and supply that to this
unsafeScientificFromNonNormalized
^ number of trailing 0s in the coefficient . This should be positive !
-> Scientific
unsafeScientificFromNonNormalized 0 _ _ = Scientific 0 0
unsafeScientificFromNonNormalized c 0 e = Scientific c e
unsafeScientificFromNonNormalized c z e = Scientific (c `quotInteger` magnitude z) (e + z)
instance NFData Scientific where
rnf (Scientific _ _) = ()
instance Hashable Scientific where
hashWithSalt salt (Scientific c e) = salt `hashWithSalt` c `hashWithSalt` e
from @Int@ to @Integer@. To be forward compatible the @Binary@ instance
already encodes the exponent as ' Integer ' .
instance Binary Scientific where
put (Scientific c e) = put c *> put (toInteger e)
get = Scientific <$> get <*> (fromInteger <$> get)
instance Eq Scientific where
Scientific c1 e1 == Scientific c2 e2 = c1 == c2 && e1 == e2
instance Ord Scientific where
compare (Scientific c1 e1) (Scientific c2 e2)
| c1 == c2 && e1 == e2 = EQ
| c1 < 0 = if c2 < 0 then cmp (-c2) e2 (-c1) e1 else LT
| c1 > 0 = if c2 > 0 then cmp c1 e1 c2 e2 else GT
| otherwise = if c2 > 0 then LT else GT
where
cmp cx ex cy ey
| log10sx < log10sy = LT
| log10sx > log10sy = GT
| d < 0 = if cx <= (cy `quotInteger` magnitude (-d)) then LT else GT
| d > 0 = if cy > (cx `quotInteger` magnitude d) then LT else GT
| otherwise = if cx < cy then LT else GT
where
log10sx = log10cx + ex
log10sy = log10cy + ey
log10cx = integerLog10' cx
log10cy = integerLog10' cy
d = log10cx - log10cy
| /WARNING:/ ' + ' and ' - ' compute the ' Integer ' magnitude : @10^e@ where is
instance Num Scientific where
Scientific c1 e1 + Scientific c2 e2
| e1 < e2 = scientific (c1 + c2*l) e1
| otherwise = scientific (c1*r + c2 ) e2
where
l = magnitude (e2 - e1)
r = magnitude (e1 - e2)
# INLINABLE ( + ) #
Scientific c1 e1 - Scientific c2 e2
| e1 < e2 = scientific (c1 - c2*l) e1
| otherwise = scientific (c1*r - c2 ) e2
where
l = magnitude (e2 - e1)
r = magnitude (e1 - e2)
# INLINABLE ( - ) #
Scientific c1 e1 * Scientific c2 e2 =
scientific (c1 * c2) (e1 + e2)
abs (Scientific c e) = Scientific (abs c) e
# INLINABLE abs #
negate (Scientific c e) = Scientific (negate c) e
# INLINABLE negate #
signum (Scientific c _) = Scientific (signum c) 0
# INLINABLE signum #
fromInteger i = scientific i 0
# INLINABLE fromInteger #
| /WARNING:/ ' toRational ' needs to compute the ' Integer ' magnitude :
instance Real Scientific where
toRational (Scientific c e)
| e < 0 = c % magnitude (-e)
| otherwise = (c * magnitude e) % 1
# INLINABLE toRational #
| /WARNING:/ ' recip ' and ' / ' will throw an error when their outputs are
These methods also compute ' Integer ' magnitudes ( @10^e@ ) . If these methods
instance Fractional Scientific where
recip = fromRational . recip . toRational
Scientific c1 e1 / Scientific c2 e2
| d < 0 = fromRational (x / (fromInteger (magnitude (-d))))
| otherwise = fromRational (x * fromInteger (magnitude d))
where
d = e1 - e2
x = c1 % c2
fromRational rational =
case mbRepetendIx of
Nothing -> s
Just _ix -> error $
"fromRational has been applied to a repeating decimal " ++
"which can't be represented as a Scientific! " ++
"It's better to avoid performing fractional operations on Scientifics " ++
"and convert them to other fractional types like Double as early as possible."
where
(s, mbRepetendIx) = fromRationalRepetendUnlimited rational
unsafeFromRational :: Rational -> Scientific
unsafeFromRational rational
| d == 0 = throw DivideByZero
| otherwise = positivize (longDiv 0 0) (numerator rational)
where
longDiv :: Integer -> Int -> (Integer -> Scientific)
longDiv !c !e 0 = scientific c e
longDiv !c !e !n
| n < d = longDiv (c * 10) (e - 1) (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDiv (c + q) e r
d = denominator rational
@Left ( s , will be returned . Here @s@ is the ' Scientific ' constructed so
far and @r@ is the remaining ' Rational ' . @toRational s + r@ yields the
If the limit is not reached or no limit was specified ( s ,
@fromRationalRepetend Nothing ( 1 % 28 ) = = Right ( 3.571428e-2 , Just 2)@
which is sometimes also unambiguously denoted as @0.03(571428)@.
Here the repetend is enclosed in parentheses and starts at the 3rd digit ( index 2 )
@fromRationalRepetend ( Just 4 ) ( 1 % 28 ) = = Left ( 3.5e-2 , 1 % 1400)@
Right ( s , ) - >
fromRationalRepetend
-> Rational
-> Either (Scientific, Rational)
(Scientific, Maybe Int)
fromRationalRepetend mbLimit rational =
case mbLimit of
Nothing -> Right $ fromRationalRepetendUnlimited rational
Just l -> fromRationalRepetendLimited l rational
fromRationalRepetendLimited
-> Rational
-> Either (Scientific, Rational)
(Scientific, Maybe Int)
fromRationalRepetendLimited l rational
| d == 0 = throw DivideByZero
| num < 0 = case longDiv (-num) of
Left (s, r) -> Left (-s, -r)
Right (s, mb) -> Right (-s, mb)
| otherwise = longDiv num
where
num = numerator rational
longDiv :: Integer -> Either (Scientific, Rational) (Scientific, Maybe Int)
longDiv = longDivWithLimit 0 0 M.empty
longDivWithLimit
:: Integer
-> Int
-> M.Map Integer Int
-> (Integer -> Either (Scientific, Rational)
(Scientific, Maybe Int))
longDivWithLimit !c !e _ns 0 = Right (Scientific c e, Nothing)
longDivWithLimit !c !e ns !n
| Just e' <- M.lookup n ns = Right (scientific c e, Just (-e'))
| e <= (-l) = Left (scientific c e, n % (d * magnitude (-e)))
| n < d = let !ns' = M.insert n e ns
in longDivWithLimit (c * 10) (e - 1) ns' (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDivWithLimit (c + q) e ns r
d = denominator rational
fromRationalRepetendUnlimited :: Rational -> (Scientific, Maybe Int)
fromRationalRepetendUnlimited rational
| d == 0 = throw DivideByZero
| num < 0 = case longDiv (-num) of
(s, mb) -> (-s, mb)
| otherwise = longDiv num
where
num = numerator rational
longDiv :: Integer -> (Scientific, Maybe Int)
longDiv = longDivNoLimit 0 0 M.empty
longDivNoLimit :: Integer
-> Int
-> M.Map Integer Int
-> (Integer -> (Scientific, Maybe Int))
longDivNoLimit !c !e _ns 0 = (scientific c e, Nothing)
longDivNoLimit !c !e ns !n
| Just e' <- M.lookup n ns = (scientific c e, Just (-e'))
| n < d = let !ns' = M.insert n e ns
in longDivNoLimit (c * 10) (e - 1) ns' (n * 10)
| otherwise = case n `quotRemInteger` d of
(#q, r#) -> longDivNoLimit (c + q) e ns r
d = denominator rational
For example to convert the repeating decimal @0.03(571428)@ you would use :
@toRationalRepetend 0.03571428 2 = = 1 % 28@
/WARNING:/ needs to compute the ' Integer ' magnitude :
@10^^n@. Where @n@ is based on the ' base10Exponent ` of the scientific . If
toRationalRepetend
:: Scientific
-> Rational
toRationalRepetend s r
| r < 0 = error "toRationalRepetend: Negative repetend index!"
| r >= f = error "toRationalRepetend: Repetend index >= than number of digits in the fractional part!"
| otherwise = (fromInteger nonRepetend + repetend % nines) /
fromInteger (magnitude r)
where
c = coefficient s
e = base10Exponent s
f = (-e)
n = f - r
m = magnitude n
(#nonRepetend, repetend#) = c `quotRemInteger` m
nines = m - 1
| /WARNING:/ the methods of the @RealFrac@ instance need to compute the
time . Even worse , when the destination type is unbounded ( i.e. ' Integer ' ) it
instance RealFrac Scientific where
and returns a pair @(n , f)@ such that @s = n+f@ , and :
* @n@ is an integral number with the same sign as @s@ ; and
and with absolute value less than @1@.
properFraction s@(Scientific c e)
| e < 0 = if dangerouslySmall c e
then (0, s)
else case c `quotRemInteger` magnitude (-e) of
(#q, r#) -> (fromInteger q, Scientific r e)
| otherwise = (toIntegral c e, 0)
# INLINABLE properFraction #
between zero and @s@
truncate = whenFloating $ \c e ->
if dangerouslySmall c e
then 0
else fromInteger $ c `quotInteger` magnitude (-e)
# INLINABLE truncate #
the even integer if @s@ is equidistant between two integers
round = whenFloating $ \c e ->
if dangerouslySmall c e
then 0
else let (#q, r#) = c `quotRemInteger` magnitude (-e)
n = fromInteger q
m | r < 0 = n - 1
| otherwise = n + 1
f = Scientific r e
in case signum $ coefficient $ abs f - 0.5 of
-1 -> n
0 -> if even n then n else m
1 -> m
_ -> error "round default defn: Bad value"
# INLINABLE round #
ceiling = whenFloating $ \c e ->
if dangerouslySmall c e
then if c <= 0
then 0
else 1
else case c `quotRemInteger` magnitude (-e) of
(#q, r#) | r <= 0 -> fromInteger q
| otherwise -> fromInteger (q + 1)
# INLINABLE ceiling #
floor = whenFloating $ \c e ->
if dangerouslySmall c e
then if c < 0
then -1
else 0
else fromInteger (c `divInteger` magnitude (-e))
# INLINABLE floor #
Internal utilities
| This function is used in the ' RealFrac ' methods to guard against
string . An attacker could supply 1e-1000000000 . Lets say we want to
floor = whenFloating $ \c e - >
We will compute the huge Integer : @magnitude 1000000000@. This
@-0.1 > s < 0.1@.
dangerouslySmall :: Integer -> Int -> Bool
dangerouslySmall c e = e < (-limit) && e < (-integerLog10' (abs c)) - 1
limit :: Int
limit = maxExpt
positivize :: (Ord a, Num a, Num b) => (a -> b) -> (a -> b)
positivize f x | x < 0 = -(f (-x))
| otherwise = f x
# INLINE positivize #
whenFloating :: (Num a) => (Integer -> Int -> a) -> Scientific -> a
whenFloating f (Scientific c e)
| e < 0 = f c e
| otherwise = toIntegral c e
# INLINE whenFloating #
toIntegral :: (Num a) => Integer -> Int -> a
toIntegral c e = fromInteger c * magnitude e
| The same limit as in GHC.Float .
maxExpt :: Int
maxExpt = 324
expts10 :: Primitive.Array Integer
expts10 = runST $ do
ma <- Primitive.newArray maxExpt uninitialised
Primitive.writeArray ma 0 1
Primitive.writeArray ma 1 10
let go !ix
| ix == maxExpt = Primitive.unsafeFreezeArray ma
| otherwise = do
Primitive.writeArray ma ix xx
Primitive.writeArray ma (ix+1) (10*xx)
go (ix+2)
where
xx = x * x
x = Primitive.indexArray expts10 half
#if MIN_VERSION_base(4,5,0)
!half = ix `unsafeShiftR` 1
#else
!half = ix `shiftR` 1
#endif
go 2
uninitialised :: error
uninitialised = error "Data.Scientific: uninitialised element"
| @magnitude e = = 10 ^ e@
magnitude :: Num a => Int -> a
magnitude e | e < maxExpt = cachedPow10 e
| otherwise = cachedPow10 hi * 10 ^ (e - hi)
where
cachedPow10 = fromInteger . Primitive.indexArray expts10
hi = maxExpt - 1
| Convert a ' RealFloat ' ( like a ' Double ' or ' Float ' ) into a ' Scientific '
and exponent of the ' RealFloat ' number . Be aware that the algorithm used in
algorithm does n't take the rounding direction for values exactly half - way
between two adjacent representable values into account , so if you have a
value with a short decimal representation exactly half - way between two
adjacent representable values , like @5 ^ 23 * 2^e@ for close to 23 , the
fromFloatDigits :: (RealFloat a) => a -> Scientific
fromFloatDigits 0 = 0
fromFloatDigits rf = positivize fromPositiveRealFloat rf
where
fromPositiveRealFloat r = go digits 0 0
where
(digits, e) = Numeric.floatToDigits 10 r
go :: [Int] -> Integer -> Int -> Scientific
go [] !c !n = Scientific c (e - n)
go (d:ds) !c !n = go ds (c * 10 + toInteger d) (n + 1)
# INLINABLE fromFloatDigits #
| Safely convert a ' Scientific ' number into a ' RealFloat ' ( like a ' Double ' or a
internally but it guards against computing huge Integer magnitudes ( @10^e@ )
target type , Infinity or 0 will be returned respectively . Use
toRealFloat :: (RealFloat a) => Scientific -> a
toRealFloat = either id id . toBoundedRealFloat
# INLINABLE toRealFloat #
# INLINABLE toBoundedRealFloat #
Infinity or 0 will be returned as ' Left ' .
toBoundedRealFloat :: forall a. (RealFloat a) => Scientific -> Either a a
toBoundedRealFloat s@(Scientific c e)
| c == 0 = Right 0
Infinity
else Right $ fromRational ((c * magnitude e) % 1)
| e < -limit = if e < loLimit && e + d < loLimit then Left $ sign 0
else Right $ fromRational (c % magnitude (-e))
| otherwise = Right $ fromRational (toRational s)
where
hiLimit, loLimit :: Int
hiLimit = ceiling (fromIntegral hi * log10Radix)
loLimit = floor (fromIntegral lo * log10Radix) -
ceiling (fromIntegral digits * log10Radix)
log10Radix :: Double
log10Radix = logBase 10 $ fromInteger radix
radix = floatRadix (undefined :: a)
digits = floatDigits (undefined :: a)
(lo, hi) = floatRange (undefined :: a)
d = integerLog10' (abs c)
sign x | c < 0 = -x
| otherwise = x
This function also guards against computing huge Integer magnitudes ( @10^e@ )
toBoundedInteger :: forall i. (Integral i, Bounded i) => Scientific -> Maybe i
toBoundedInteger s@(Scientific c e)
| isFloating s || dangerouslyBig || outsideBounds n = Nothing
| otherwise = Just $ fromInteger n
where
dangerouslyBig = e > limit &&
e > integerLog10' (max (abs iMinBound) (abs iMaxBound))
outsideBounds i = i < iMinBound || i > iMaxBound
iMinBound = toInteger (minBound :: i)
iMaxBound = toInteger (maxBound :: i)
n :: Integer
n = toIntegral c e
# SPECIALIZE toBoundedInteger : : Scientific - > Maybe
toUnboundedInteger :: Scientific -> Maybe Integer
toUnboundedInteger s@(Scientific c e)
| isInteger s = Just (toIntegral c e)
| otherwise = Nothing
' RealFloat ' using ' toRealFloat ' and wrapped in ' Left ' .
time . Even worse , when the destination type is unbounded ( i.e. ' Integer ' ) it
floatingOrInteger :: (RealFloat r, Integral i) => Scientific -> Either r i
floatingOrInteger s@(Scientific c e)
| isInteger s = Right (toIntegral c e)
| otherwise = Left (toRealFloat s)
isFloating :: Scientific -> Bool
isFloating = not . isInteger
isInteger :: Scientific -> Bool
isInteger s = base10Exponent s >= 0
' scientificP ' to parse the floating - point number . )
instance Read Scientific where
readPrec = Read.parens $ ReadPrec.lift (ReadP.skipSpaces >> scientificP)
> > import Text . ParserCombinators . ( readP_to_S )
> > readP_to_S scientificP " 3 "
> > readP_to_S scientificP " 3.0e2 "
> > readP_to_S scientificP " +3.0e+2 "
> > readP_to_S scientificP " -3.0e-2 "
scientificP :: ReadP Scientific
scientificP = do
pos <- positive
S2 n z1 <- foldDigits stepC (S2 0 0)
let s = S3 n z1 0
S3 coeff z expnt <- (ReadP.satisfy (== '.') >> foldDigits stepF s)
ReadP.<++ return s
let signedCoeff | pos = coeff
| otherwise = (-coeff)
(ReadP.satisfy isE >>
((unsafeScientificFromNonNormalized signedCoeff z . (expnt +)) <$> eP)) `mplus`
return (unsafeScientificFromNonNormalized signedCoeff z expnt)
where
positive :: ReadP Bool
positive = (('+' ==) <$> ReadP.satisfy isSign) `mplus` return True
stepC :: S2 -> Int -> S2
stepC (S2 c z) 0 = S2 (c * 10) (z + 1)
stepC (S2 c _z) d = S2 (c * 10 + toInteger d) 0
stepF :: S3 -> Int -> S3
stepF (S3 c z e) 0 = S3 (c * 10) (z + 1) (e - 1)
stepF (S3 c _z e) d = S3 (c * 10 + toInteger d) 0 (e - 1)
stepE :: Int -> Int -> Int
stepE e d = e * 10 + d
eP :: ReadP Int
eP = do posE <- positive
e <- foldDigits stepE 0
if posE
then return e
else return (-e)
foldDigits :: (a -> Int -> a) -> a -> ReadP a
foldDigits f z = do
c <- ReadP.satisfy isDecimal
let digit = ord c - 48
a = f z digit
ReadP.look >>= go a
where
go !a [] = return a
go !a (c:cs)
| isDecimal c = do
_ <- ReadP.get
let digit = ord c - 48
go (f a digit) cs
| otherwise = return a
isDecimal :: Char -> Bool
isDecimal c = c >= '0' && c <= '9'
# INLINE isDecimal #
isSign :: Char -> Bool
isSign c = c == '-' || c == '+'
# INLINE isSign #
isE :: Char -> Bool
isE c = c == 'e' || c == 'E'
# INLINE isE #
instance Show Scientific where
showsPrec d s
| coefficient s < 0 = showParen (d > prefixMinusPrec) $
showChar '-' . showPositive (-s)
| otherwise = showPositive s
where
prefixMinusPrec :: Int
prefixMinusPrec = 6
showPositive :: Scientific -> ShowS
showPositive = showString . fmtAsGeneric . toDecimalDigits
fmtAsGeneric :: ([Int], Int) -> String
fmtAsGeneric x@(_is, e)
| e < 0 || e > 7 = fmtAsExponent x
| otherwise = fmtAsFixed x
fmtAsExponent :: ([Int], Int) -> String
fmtAsExponent (is, e) =
case ds of
"0" -> "0.0e0"
[d] -> d : '.' :'0' : 'e' : show_e'
(d:ds') -> d : '.' : ds' ++ ('e' : show_e')
[] -> error "formatScientific/doFmt/FFExponent: []"
where
show_e' = show (e-1)
ds = map intToDigit is
fmtAsFixed :: ([Int], Int) -> String
fmtAsFixed (is, e)
| e <= 0 = '0':'.':(replicate (-e) '0' ++ ds)
| otherwise =
let
f 0 s rs = mk0 (reverse s) ++ '.':mk0 rs
f n s "" = f (n-1) ('0':s) ""
f n s (r:rs) = f (n-1) (r:s) rs
in
f e "" ds
where
mk0 "" = "0"
mk0 ls = ls
ds = map intToDigit is
formatScientific :: FPFormat
-> Scientific
-> String
formatScientific format mbDecs s
| coefficient s < 0 = '-':formatPositiveScientific (-s)
| otherwise = formatPositiveScientific s
where
formatPositiveScientific :: Scientific -> String
formatPositiveScientific s' = case format of
Generic -> fmtAsGeneric $ toDecimalDigits s'
Exponent -> fmtAsExponentMbDecs $ toDecimalDigits s'
Fixed -> fmtAsFixedMbDecs $ toDecimalDigits s'
fmtAsGeneric :: ([Int], Int) -> String
fmtAsGeneric x@(_is, e)
| e < 0 || e > 7 = fmtAsExponentMbDecs x
| otherwise = fmtAsFixedMbDecs x
fmtAsExponentMbDecs :: ([Int], Int) -> String
fmtAsExponentMbDecs x = case mbDecs of
Nothing -> fmtAsExponent x
Just dec -> fmtAsExponentDecs dec x
fmtAsFixedMbDecs :: ([Int], Int) -> String
fmtAsFixedMbDecs x = case mbDecs of
Nothing -> fmtAsFixed x
Just dec -> fmtAsFixedDecs dec x
fmtAsExponentDecs :: Int -> ([Int], Int) -> String
fmtAsExponentDecs dec (is, e) =
let dec' = max dec 1 in
case is of
[0] -> '0' :'.' : take dec' (repeat '0') ++ "e0"
_ ->
let
(ei,is') = roundTo (dec'+1) is
(d:ds') = map intToDigit (if ei > 0 then init is' else is')
in
d:'.':ds' ++ 'e':show (e-1+ei)
fmtAsFixedDecs :: Int -> ([Int], Int) -> String
fmtAsFixedDecs dec (is, e) =
let dec' = max dec 0 in
if e >= 0 then
let
(ei,is') = roundTo (dec' + e) is
(ls,rs) = splitAt (e+ei) (map intToDigit is')
in
mk0 ls ++ (if null rs then "" else '.':rs)
else
let
(ei,is') = roundTo dec' (replicate (-e) 0 ++ is)
d:ds' = map intToDigit (if ei > 0 then is' else 0:is')
in
d : (if null ds' then "" else '.':ds')
where
mk0 ls = case ls of { "" -> "0" ; _ -> ls}
a base-10 exponent . In particular , if , and
1 . @n > = 1@
2 . @x = 0.d1d2 ... dn * ( 10^^e)@
3 . @0 < = di < = 9@
4 . @null $ takeWhile (= = 0 ) $ reverse [ d1,d2, ... ,dn]@
contain trailing zeros .
toDecimalDigits :: Scientific -> ([Int], Int)
toDecimalDigits (Scientific 0 _) = ([0], 0)
toDecimalDigits (Scientific c e) = go c 0 []
where
go :: Integer -> Int -> [Int] -> ([Int], Int)
go 0 !n ds = (ds, ne) where !ne = n + e
go i !n ds = case i `quotRemInteger` 10 of
(# q, r #) -> go q (n+1) (d:ds)
where
!d = fromIntegral r
| Normalize a scientific number by dividing out powers of 10 from the
normalize :: Scientific -> Scientific
normalize (Scientific c e)
| c > 0 = normalizePositive c e
| c < 0 = -(normalizePositive (-c) e)
normalizePositive :: Integer -> Int -> Scientific
normalizePositive !c !e = case quotRemInteger c 10 of
(# c', r #)
| r == 0 -> normalizePositive c' (e+1)
| otherwise -> Scientific c e
|
e85bd124f9b42de0656a8dd37f631e0db1c725ba8dcac3c2bedaa7600cfaf979 | archaelus/rabbitmq-erlang-client | amqp_channel.erl | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
%% compliance with the License. You may obtain a copy of the License at
%% /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
%% License for the specific language governing rights and limitations
%% under the License.
%%
The Original Code is the RabbitMQ Erlang Client .
%%
The Initial Developers of the Original Code are LShift Ltd. ,
Cohesive Financial Technologies LLC . , and Rabbit Technologies Ltd.
%%
Portions created by LShift Ltd. , Cohesive Financial
Technologies LLC . , and Rabbit Technologies Ltd. are Copyright ( C )
2007 LShift Ltd. , Cohesive Financial Technologies LLC . , and Rabbit
Technologies Ltd. ;
%%
%% All Rights Reserved.
%%
Contributor(s ): < > .
%%
%% @doc This module encapsulates the client's view of an AMQP channel. Each
%% server side channel is represented by an amqp_channel process on the client
%% side. Channel processes are created using the {@link amqp_connection}
%% module, but channels are respsonsible for closing themselves. Channel
%% processes are linked to the connnection process from which they were
%% created.
-module(amqp_channel).
-include("amqp_client.hrl").
-behaviour(gen_server).
-export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2,
handle_info/2]).
-export([call/2, call/3, cast/2, cast/3]).
-export([subscribe/3]).
-export([close/1, close/3]).
-export([register_return_handler/2]).
-export([register_flow_handler/2]).
-define(TIMEOUT_FLUSH, 60000).
-define(TIMEOUT_CLOSE_OK, 3000).
-record(c_state, {number,
parent_connection,
reader_pid,
writer_pid,
driver,
rpc_requests = queue:new(),
anon_sub_requests = queue:new(),
tagged_sub_requests = dict:new(),
closing = false,
return_handler_pid = none,
flow_control = false,
flow_handler_pid = none,
consumers = dict:new()}).
%% This diagram shows the interaction between the different component
%% processes in an AMQP client scenario.
%%
%% message* / reply* +-------+
%% +---------------------- | queue |
%% | +-------+
%% |
%% | +-----+
%% v | |
%% request reply* | v
%% +------+ -------+ +--------------+ <------+ +----------------+
%% | User | | | amqp_channel | | | direct_channel |
%% +------+ <------+ +--------------+ -------+ +----------------+
%% response / | request
%% cast/call / |
%% / | message
%% / v
%% +-------------+/ +----------+
%% | Pending RPC | | Consumer |
%% +-------------+ +----------+
%% |
%% [consumer tag --> consumer pid]
%%
%% These notifications are processed asynchronously via
%% handle_info/2 callbacks
%%---------------------------------------------------------------------------
%% Type Definitions
%%---------------------------------------------------------------------------
%% @type amqp_command().
%% This abstract datatype represents the set of commands that comprise the
%% AMQP execution model. As indicated in the overview, the attributes of each
%% commands in the execution model are described in the protocol
documentation . The Erlang record definitions are autogenerated from a
parseable version of the specification .
%% @type content() = #'basic.publish'{} |
%% #'basic.deliver'{} |
%% #'basic.return'{}.
These are the content bearing commands .
%%---------------------------------------------------------------------------
AMQP Channel API methods
%%---------------------------------------------------------------------------
%% @spec (Channel, amqp_command()) -> amqp_command()
%% where
%% Channel = pid()
@doc This is a generic RPC mechanism that sends an AMQP command and
%% receives an AMQP command as a response. This function blocks until the
%% response is returned.
call(Channel, Method) ->
gen_server:call(Channel, {call, Method, none}, infinity).
%% @spec (Channel, amqp_command(), content()) -> ok | blocked | closing
%% where
%% Channel = pid()
%% @doc This sends an AMQP command with content and waits for a synchronous
%% response. Generally this is used with the #basic.publish{} command.
%% This will return a blocked atom if either the server has throttled the
%% client for flow control reasons or if the channel is shutting down due to a
%% broker initiated close.
%% It will return a closing atom if the channel is in the process of shutting
%% down.
%% Note that the synchronicity only means that the client has transmitted the
%% command to the broker. It does not imply that the broker has accepted
%% responsibility for the message. To acheive guaranteed delivery, this
%% function would have to be called within the context of a transaction.
call(Channel, Method, Content) ->
gen_server:call(Channel, {call, Method, Content}, infinity).
%% @spec (Channel, amqp_command()) -> ok
%% @doc Asynchronous variant of {@link call/2}
cast(Channel, Method) ->
gen_server:cast(Channel, {cast, Method, none}).
%% @spec (Channel, amqp_command(), content()) -> ok
%% @doc Asynchronous variant of {@link call/3}
cast(Channel, Method, Content) ->
gen_server:cast(Channel, {cast, Method, Content}).
%% @spec (Channel) -> ok
%% where
%% Channel = pid()
@doc Closes the channel , invokes close(Channel , 200 , & lt;<"Goodbye " > > ) .
close(Channel) ->
close(Channel, 200, <<"Goodbye">>).
%% @spec (Channel, Code, Text) -> ok
%% where
%% Channel = pid()
%% Code = integer()
%% Text = binary()
%% @doc Closes the channel, allowing the caller to supply a reply code and
%% text.
close(Channel, Code, Text) ->
Close = #'channel.close'{reply_text = Text,
reply_code = Code,
class_id = 0,
method_id = 0},
#'channel.close_ok'{} = call(Channel, Close),
ok.
%%---------------------------------------------------------------------------
%% Consumer registration (API)
%%---------------------------------------------------------------------------
%% @type consume() = #'basic.consume'{}.
The command that is used to subscribe a consumer to a queue .
%% @spec (Channel, consume(), Consumer) -> amqp_command()
%% where
%% Channel = pid()
%% Consumer = pid()
%% @doc Creates a subscription to a queue. This subscribes a consumer pid to
the queue defined in the # ' basic.consume ' { } command record . Note that both
%% both the process invoking this method and the supplied consumer process
%% receive an acknowledgement of the subscription. The calling process will
%% receive the acknowledgement as the return value of this function, whereas
%% the consumer process will receive the notification asynchronously.
subscribe(Channel, BasicConsume = #'basic.consume'{}, Consumer) ->
gen_server:call(Channel, {subscribe, BasicConsume, Consumer}, infinity).
@spec ( Channel , ReturnHandler ) - > ok
%% where
%% Channel = pid()
ReturnHandler = pid ( )
%% @doc This registers a handler to deal with returned messages. The
%% registered process will receive #basic.return{} commands.
register_return_handler(Channel, ReturnHandler) ->
gen_server:cast(Channel, {register_return_handler, ReturnHandler} ).
@spec ( Channel , FlowHandler ) - > ok
%% where
%% Channel = pid()
FlowHandler = pid ( )
%% @doc This registers a handler to deal with channel flow notifications.
The registered process will receive # channel.flow { } commands .
register_flow_handler(Channel, FlowHandler) ->
gen_server:cast(Channel, {register_flow_handler, FlowHandler} ).
%%---------------------------------------------------------------------------
%% RPC mechanism
%%---------------------------------------------------------------------------
rpc_top_half(Method, Content, From,
State0 = #c_state{rpc_requests = RequestQueue}) ->
Enqueue the incoming RPC request to serialize RPC dispatching
State1 = State0#c_state{
rpc_requests = queue:in({From, Method, Content}, RequestQueue)},
IsFirstElement = queue:is_empty(RequestQueue),
if IsFirstElement -> do_rpc(State1);
true -> State1
end.
rpc_bottom_half(Reply, State = #c_state{rpc_requests = RequestQueue}) ->
case queue:out(RequestQueue) of
{empty, _} ->
exit(empty_rpc_bottom_half);
{{value, {From, _Method, _Content}}, NewRequestQueue} ->
gen_server:reply(From, Reply),
do_rpc(State#c_state{rpc_requests = NewRequestQueue})
end.
do_rpc(State = #c_state{rpc_requests = RequestQueue,
closing = Closing}) ->
case queue:peek(RequestQueue) of
{value, {_From, Method = #'channel.close'{}, Content}} ->
do(Method, Content, State),
State#c_state{closing = just_channel};
{value, {_From, Method, Content}} ->
do(Method, Content, State),
State;
empty ->
case Closing of
{connection, Reason} -> self() ! {shutdown, Reason};
_ -> ok
end,
State
end.
%%---------------------------------------------------------------------------
Internal plumbing
%%---------------------------------------------------------------------------
do(Method, Content, #c_state{writer_pid = Writer,
driver = Driver}) ->
amqp_channel_util:do(Driver, Writer, Method, Content).
resolve_consumer(_ConsumerTag, #c_state{consumers = []}) ->
exit(no_consumers_registered);
resolve_consumer(ConsumerTag, #c_state{consumers = Consumers}) ->
dict:fetch(ConsumerTag, Consumers).
register_consumer(ConsumerTag, Consumer,
State = #c_state{consumers = Consumers0}) ->
Consumers1 = dict:store(ConsumerTag, Consumer, Consumers0),
State#c_state{consumers = Consumers1}.
unregister_consumer(ConsumerTag,
State = #c_state{consumers = Consumers0}) ->
Consumers1 = dict:erase(ConsumerTag, Consumers0),
State#c_state{consumers = Consumers1}.
amqp_msg(none) ->
none;
amqp_msg(Content) ->
{Props, Payload} = rabbit_basic:from_content(Content),
#amqp_msg{props = Props, payload = Payload}.
build_content(none) ->
none;
build_content(#amqp_msg{props = Props, payload = Payload}) ->
rabbit_basic:build_content(Props, Payload).
check_block(_Method, _AmqpMsg, #c_state{closing = just_channel}) ->
channel_closing;
check_block(_Method, _AmqpMsg, #c_state{closing = {connection, _}}) ->
connection_closing;
check_block(_Method, none, #c_state{}) ->
ok;
check_block(_Method, _AmqpMsg, #c_state{flow_control = true}) ->
blocked;
check_block(_Method, _AmqpMsg, #c_state{}) ->
ok.
shutdown_with_reason({_, 200, _}, State) ->
{stop, normal, State};
shutdown_with_reason(Reason, State) ->
{stop, Reason, State}.
%%---------------------------------------------------------------------------
%% Handling of methods from the server
%%---------------------------------------------------------------------------
handle_method(Method, Content, #c_state{closing = Closing} = State) ->
case {Method, Content} of
%% Handle 'channel.close': send 'channel.close_ok' and stop channel
{#'channel.close'{reply_code = ReplyCode,
reply_text = ReplyText}, none} ->
do(#'channel.close_ok'{}, none, State),
{stop, {server_initiated_close, ReplyCode, ReplyText}, State};
%% Handle 'channel.close_ok': stop channel
{CloseOk = #'channel.close_ok'{}, none} ->
{stop, normal, rpc_bottom_half(CloseOk, State)};
_ ->
case Closing of
%% Drop all incomming traffic except 'channel.close' and
%% 'channel.close_ok' when channel is closing (has sent
%% 'channel.close')
just_channel ->
?LOG_INFO("Channel (~p): dropping method ~p from server "
"because channel is closing~n",
[self(), {Method, Content}]),
{noreply, State};
%% Standard handling of incoming method
_ ->
handle_regular_method(Method, amqp_msg(Content), State)
end
end.
handle_regular_method(
#'basic.consume_ok'{consumer_tag = ConsumerTag} = ConsumeOk, none,
#c_state{tagged_sub_requests = Tagged,
anon_sub_requests = Anon} = State) ->
{_From, Consumer, State0} =
case dict:find(ConsumerTag, Tagged) of
{ok, {F, C}} ->
NewTagged = dict:erase(ConsumerTag,Tagged),
{F, C, State#c_state{tagged_sub_requests = NewTagged}};
error ->
case queue:out(Anon) of
{empty, _} ->
exit({anonymous_queue_empty, ConsumerTag});
{{value, {F, C}}, NewAnon} ->
{F, C, State#c_state{anon_sub_requests = NewAnon}}
end
end,
Consumer ! ConsumeOk,
State1 = register_consumer(ConsumerTag, Consumer, State0),
{noreply, rpc_bottom_half(ConsumeOk, State1)};
handle_regular_method(
#'basic.cancel_ok'{consumer_tag = ConsumerTag} = CancelOk, none,
#c_state{} = State) ->
Consumer = resolve_consumer(ConsumerTag, State),
Consumer ! CancelOk,
NewState = unregister_consumer(ConsumerTag, State),
{noreply, rpc_bottom_half(CancelOk, NewState)};
%% Handle 'channel.flow'
%% If flow_control flag is defined, it informs the flow control handler to
%% suspend submitting any content bearing methods
handle_regular_method(#'channel.flow'{active = Active} = Flow, none,
#c_state{flow_handler_pid = FlowHandler} = State) ->
case FlowHandler of
none -> ok;
_ -> FlowHandler ! Flow
end,
do(#'channel.flow_ok'{active = Active}, none, State),
{noreply, State#c_state{flow_control = not(Active)}};
handle_regular_method(#'basic.deliver'{consumer_tag = ConsumerTag} = Deliver,
AmqpMsg, State) ->
Consumer = resolve_consumer(ConsumerTag, State),
Consumer ! {Deliver, AmqpMsg},
{noreply, State};
handle_regular_method(
#'basic.return'{} = BasicReturn, AmqpMsg,
#c_state{return_handler_pid = ReturnHandler} = State) ->
case ReturnHandler of
none -> ?LOG_WARN("Channel (~p): received {~p, ~p} but there is no "
"return handler registered~n",
[self(), BasicReturn, AmqpMsg]);
_ -> ReturnHandler ! {BasicReturn, AmqpMsg}
end,
{noreply, State};
handle_regular_method(Method, none, State) ->
{noreply, rpc_bottom_half(Method, State)};
handle_regular_method(Method, Content, State) ->
{noreply, rpc_bottom_half({Method, Content}, State)}.
%%---------------------------------------------------------------------------
%% gen_server callbacks
%%---------------------------------------------------------------------------
@private
init({ParentConnection, ChannelNumber, Driver, StartArgs}) ->
process_flag(trap_exit, true),
{ReaderPid, WriterPid} =
amqp_channel_util:start_channel_infrastructure(Driver, ChannelNumber,
StartArgs),
InitialState = #c_state{parent_connection = ParentConnection,
number = ChannelNumber,
driver = Driver,
reader_pid = ReaderPid,
writer_pid = WriterPid},
{ok, InitialState}.
%% Standard implementation of the call/{2,3} command
@private
handle_call({call, Method, AmqpMsg}, From, State) ->
case check_block(Method, AmqpMsg, State) of
ok -> Content = build_content(AmqpMsg),
case rabbit_framing:is_method_synchronous(Method) of
true ->
{noreply, rpc_top_half(Method, Content, From,
State)};
false ->
do(Method, Content, State),
{reply, ok, State}
end;
BlockReply -> {reply, BlockReply, State}
end;
%% Standard implementation of the subscribe/3 command
@private
handle_call({subscribe, #'basic.consume'{consumer_tag = Tag} = Method, Consumer},
From, #c_state{tagged_sub_requests = Tagged,
anon_sub_requests = Anon} = State) ->
case check_block(Method, none, State) of
ok ->
{NewMethod, NewState} =
if Tag =:= undefined orelse size(Tag) == 0 ->
NewAnon = queue:in({From,Consumer}, Anon),
{Method#'basic.consume'{consumer_tag = <<"">>},
State#c_state{anon_sub_requests = NewAnon}};
is_binary(Tag) ->
TODO test whether this tag already exists , either in
%% the pending tagged request map or in general as
%% already subscribed consumer
NewTagged = dict:store(Tag,{From,Consumer}, Tagged),
{Method,
State#c_state{tagged_sub_requests = NewTagged}}
end,
{noreply, rpc_top_half(NewMethod, none, From, NewState)};
BlockReply ->
{reply, BlockReply, State}
end.
%% Standard implementation of the cast/{2,3} command
@private
handle_cast({cast, Method, AmqpMsg} = Cast, State) ->
case check_block(Method, AmqpMsg, State) of
ok -> do(Method, build_content(AmqpMsg), State);
BlockReply -> ?LOG_INFO("Channel (~p): discarding method in cast ~p."
"Reason: ~p~n", [self(), Cast, BlockReply])
end,
{noreply, State};
%% Registers a handler to process return messages
@private
handle_cast({register_return_handler, ReturnHandler}, State) ->
link(ReturnHandler),
{noreply, State#c_state{return_handler_pid = ReturnHandler}};
%% Registers a handler to process flow control messages
@private
handle_cast({register_flow_handler, FlowHandler}, State) ->
link(FlowHandler),
{noreply, State#c_state{flow_handler_pid = FlowHandler}};
@private
handle_cast({notify_sent, _Peer}, State) ->
{noreply, State};
%% This callback is invoked when a network channel sends messages
%% to this gen_server instance
@private
handle_cast({method, Method, Content}, State) ->
handle_method(Method, Content, State).
%% These callbacks are invoked when a direct channel sends messages
%% to this gen_server instance
@private
handle_info({send_command, Method}, State) ->
handle_method(Method, none, State);
@private
handle_info({send_command, Method, Content}, State) ->
handle_method(Method, Content, State);
%% Handles the delivery of a message from a direct channel
@private
handle_info({send_command_and_notify, Q, ChPid, Method, Content}, State) ->
handle_method(Method, Content, State),
rabbit_amqqueue:notify_sent(Q, ChPid),
{noreply, State};
@private
handle_info(shutdown, State) ->
{stop, normal, State};
@private
handle_info({shutdown, Reason}, State) ->
shutdown_with_reason(Reason, State);
@private
handle_info({shutdown, FailShutdownReason, InitialReason},
#c_state{number = Number} = State) ->
case FailShutdownReason of
{connection_closing, timed_out_flushing_channel} ->
?LOG_WARN("Channel ~p closing: timed out flushing while connection "
"closing~n", [Number]);
{connection_closing, timed_out_waiting_close_ok} ->
?LOG_WARN("Channel ~p closing: timed out waiting for "
"channel.close_ok while connection closing~n", [Number])
end,
{stop, {FailShutdownReason, InitialReason}, State};
%% Handles the situation when the connection closes without closing the channel
%% beforehand. The channel must block all further RPCs,
%% flush the RPC queue (optional), and terminate
@private
handle_info({connection_closing, CloseType, Reason},
#c_state{rpc_requests = RpcQueue,
closing = Closing} = State) ->
case {CloseType, Closing, queue:is_empty(RpcQueue)} of
{flush, false, false} ->
erlang:send_after(?TIMEOUT_FLUSH, self(),
{shutdown,
{connection_closing, timed_out_flushing_channel},
Reason}),
{noreply, State#c_state{closing = {connection, Reason}}};
{flush, just_channel, false} ->
erlang:send_after(?TIMEOUT_CLOSE_OK, self(),
{shutdown,
{connection_closing, timed_out_waiting_close_ok},
Reason}),
{noreply, State};
_ ->
shutdown_with_reason(Reason, State)
end;
%% This is for a channel exception that is sent by the direct
%% rabbit_channel process
@private
handle_info({channel_exit, _Channel, #amqp_error{name = ErrorName,
explanation = Expl} = Error},
State = #c_state{number = Number}) ->
?LOG_WARN("Channel ~p closing: server sent error ~p~n", [Number, Error]),
{_, Code, _} = rabbit_framing:lookup_amqp_exception(ErrorName),
{stop, {server_initiated_close, Code, Expl}, State};
%%---------------------------------------------------------------------------
%% Trap exits
%%---------------------------------------------------------------------------
%% Handle parent connection exit
@private
handle_info({'EXIT', ConnectionPid, Reason},
State = #c_state{number = ChannelNumber,
parent_connection = ConnectionPid}) ->
?LOG_WARN("Channel ~p closing: parent connection died. Reason: ~p~n",
[ChannelNumber, Reason]),
{stop, {parent_connection_died, ConnectionPid, Reason}, State};
%% Handle writer exit
@private
handle_info({'EXIT', WriterPid, Reason},
State = #c_state{number = ChannelNumber,
writer_pid = WriterPid}) ->
?LOG_WARN("Channel ~p closing: received exit signal from writer. "
"Reason: ~p~n", [ChannelNumber, Reason]),
{stop, {writer_died, WriterPid, Reason}, State};
%% Handle reader exit
@private
handle_info({'EXIT', ReaderPid, Reason},
State = #c_state{number = ChannelNumber,
reader_pid = ReaderPid}) ->
?LOG_WARN("Channel ~p closing: received exit signal from reader. "
"Reason: ~p~n", [ChannelNumber, Reason]),
{stop, {reader_died, ReaderPid, Reason}, State};
%% Handle flow handler exit
@private
handle_info({'EXIT', FlowHandler, Reason},
State = #c_state{number = ChannelNumber,
flow_handler_pid = FlowHandler}) ->
?LOG_INFO("Channel ~p: unregistering flow handler because it is "
"closing: ~p~n", [ChannelNumber, Reason]),
{noreply, State#c_state{flow_handler_pid = none}};
%% Handle return handler exit
@private
handle_info({'EXIT', ReturnHandler, Reason},
State = #c_state{number = ChannelNumber,
return_handler_pid = ReturnHandler}) ->
?LOG_INFO("Channel ~p: unregistering return handler because it is "
"closing: ~p~n", [ChannelNumber, Reason]),
{noreply, State#c_state{return_handler_pid = none}};
%% Handle other exit
@private
handle_info({'EXIT', Pid, Reason}, State = #c_state{number = ChannelNumber}) ->
?LOG_WARN("Channel ~p closing: received unexpected exit signal from (~p). "
"Reason: ~p~n", [ChannelNumber, Pid, Reason]),
{stop, {unexpected_exit_signal, Pid, Reason}, State}.
%%---------------------------------------------------------------------------
%% Rest of the gen_server callbacks
%%---------------------------------------------------------------------------
@private
terminate(_Reason, #c_state{driver = Driver,
reader_pid = ReaderPid,
writer_pid = WriterPid}) ->
amqp_channel_util:terminate_channel_infrastructure(
Driver, {ReaderPid, WriterPid}).
@private
code_change(_OldVsn, State, _Extra) ->
State.
| null | https://raw.githubusercontent.com/archaelus/rabbitmq-erlang-client/e2f507c4ab8b7f57c2c700dbabc8079861f5a0a4/src/amqp_channel.erl | erlang | compliance with the License. You may obtain a copy of the License at
/
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See the
License for the specific language governing rights and limitations
under the License.
All Rights Reserved.
@doc This module encapsulates the client's view of an AMQP channel. Each
server side channel is represented by an amqp_channel process on the client
side. Channel processes are created using the {@link amqp_connection}
module, but channels are respsonsible for closing themselves. Channel
processes are linked to the connnection process from which they were
created.
This diagram shows the interaction between the different component
processes in an AMQP client scenario.
message* / reply* +-------+
+---------------------- | queue |
| +-------+
|
| +-----+
v | |
request reply* | v
+------+ -------+ +--------------+ <------+ +----------------+
| User | | | amqp_channel | | | direct_channel |
+------+ <------+ +--------------+ -------+ +----------------+
response / | request
cast/call / |
/ | message
/ v
+-------------+/ +----------+
| Pending RPC | | Consumer |
+-------------+ +----------+
|
[consumer tag --> consumer pid]
These notifications are processed asynchronously via
handle_info/2 callbacks
---------------------------------------------------------------------------
Type Definitions
---------------------------------------------------------------------------
@type amqp_command().
This abstract datatype represents the set of commands that comprise the
AMQP execution model. As indicated in the overview, the attributes of each
commands in the execution model are described in the protocol
@type content() = #'basic.publish'{} |
#'basic.deliver'{} |
#'basic.return'{}.
---------------------------------------------------------------------------
---------------------------------------------------------------------------
@spec (Channel, amqp_command()) -> amqp_command()
where
Channel = pid()
receives an AMQP command as a response. This function blocks until the
response is returned.
@spec (Channel, amqp_command(), content()) -> ok | blocked | closing
where
Channel = pid()
@doc This sends an AMQP command with content and waits for a synchronous
response. Generally this is used with the #basic.publish{} command.
This will return a blocked atom if either the server has throttled the
client for flow control reasons or if the channel is shutting down due to a
broker initiated close.
It will return a closing atom if the channel is in the process of shutting
down.
Note that the synchronicity only means that the client has transmitted the
command to the broker. It does not imply that the broker has accepted
responsibility for the message. To acheive guaranteed delivery, this
function would have to be called within the context of a transaction.
@spec (Channel, amqp_command()) -> ok
@doc Asynchronous variant of {@link call/2}
@spec (Channel, amqp_command(), content()) -> ok
@doc Asynchronous variant of {@link call/3}
@spec (Channel) -> ok
where
Channel = pid()
@spec (Channel, Code, Text) -> ok
where
Channel = pid()
Code = integer()
Text = binary()
@doc Closes the channel, allowing the caller to supply a reply code and
text.
---------------------------------------------------------------------------
Consumer registration (API)
---------------------------------------------------------------------------
@type consume() = #'basic.consume'{}.
@spec (Channel, consume(), Consumer) -> amqp_command()
where
Channel = pid()
Consumer = pid()
@doc Creates a subscription to a queue. This subscribes a consumer pid to
both the process invoking this method and the supplied consumer process
receive an acknowledgement of the subscription. The calling process will
receive the acknowledgement as the return value of this function, whereas
the consumer process will receive the notification asynchronously.
where
Channel = pid()
@doc This registers a handler to deal with returned messages. The
registered process will receive #basic.return{} commands.
where
Channel = pid()
@doc This registers a handler to deal with channel flow notifications.
---------------------------------------------------------------------------
RPC mechanism
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
---------------------------------------------------------------------------
Handling of methods from the server
---------------------------------------------------------------------------
Handle 'channel.close': send 'channel.close_ok' and stop channel
Handle 'channel.close_ok': stop channel
Drop all incomming traffic except 'channel.close' and
'channel.close_ok' when channel is closing (has sent
'channel.close')
Standard handling of incoming method
Handle 'channel.flow'
If flow_control flag is defined, it informs the flow control handler to
suspend submitting any content bearing methods
---------------------------------------------------------------------------
gen_server callbacks
---------------------------------------------------------------------------
Standard implementation of the call/{2,3} command
Standard implementation of the subscribe/3 command
the pending tagged request map or in general as
already subscribed consumer
Standard implementation of the cast/{2,3} command
Registers a handler to process return messages
Registers a handler to process flow control messages
This callback is invoked when a network channel sends messages
to this gen_server instance
These callbacks are invoked when a direct channel sends messages
to this gen_server instance
Handles the delivery of a message from a direct channel
Handles the situation when the connection closes without closing the channel
beforehand. The channel must block all further RPCs,
flush the RPC queue (optional), and terminate
This is for a channel exception that is sent by the direct
rabbit_channel process
---------------------------------------------------------------------------
Trap exits
---------------------------------------------------------------------------
Handle parent connection exit
Handle writer exit
Handle reader exit
Handle flow handler exit
Handle return handler exit
Handle other exit
---------------------------------------------------------------------------
Rest of the gen_server callbacks
--------------------------------------------------------------------------- | The contents of this file are subject to the Mozilla Public License
Version 1.1 ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Original Code is the RabbitMQ Erlang Client .
The Initial Developers of the Original Code are LShift Ltd. ,
Cohesive Financial Technologies LLC . , and Rabbit Technologies Ltd.
Portions created by LShift Ltd. , Cohesive Financial
Technologies LLC . , and Rabbit Technologies Ltd. are Copyright ( C )
2007 LShift Ltd. , Cohesive Financial Technologies LLC . , and Rabbit
Technologies Ltd. ;
Contributor(s ): < > .
-module(amqp_channel).
-include("amqp_client.hrl").
-behaviour(gen_server).
-export([init/1, terminate/2, code_change/3, handle_call/3, handle_cast/2,
handle_info/2]).
-export([call/2, call/3, cast/2, cast/3]).
-export([subscribe/3]).
-export([close/1, close/3]).
-export([register_return_handler/2]).
-export([register_flow_handler/2]).
-define(TIMEOUT_FLUSH, 60000).
-define(TIMEOUT_CLOSE_OK, 3000).
-record(c_state, {number,
parent_connection,
reader_pid,
writer_pid,
driver,
rpc_requests = queue:new(),
anon_sub_requests = queue:new(),
tagged_sub_requests = dict:new(),
closing = false,
return_handler_pid = none,
flow_control = false,
flow_handler_pid = none,
consumers = dict:new()}).
documentation . The Erlang record definitions are autogenerated from a
parseable version of the specification .
These are the content bearing commands .
AMQP Channel API methods
@doc This is a generic RPC mechanism that sends an AMQP command and
call(Channel, Method) ->
gen_server:call(Channel, {call, Method, none}, infinity).
call(Channel, Method, Content) ->
gen_server:call(Channel, {call, Method, Content}, infinity).
cast(Channel, Method) ->
gen_server:cast(Channel, {cast, Method, none}).
cast(Channel, Method, Content) ->
gen_server:cast(Channel, {cast, Method, Content}).
@doc Closes the channel , invokes close(Channel , 200 , & lt;<"Goodbye " > > ) .
close(Channel) ->
close(Channel, 200, <<"Goodbye">>).
close(Channel, Code, Text) ->
Close = #'channel.close'{reply_text = Text,
reply_code = Code,
class_id = 0,
method_id = 0},
#'channel.close_ok'{} = call(Channel, Close),
ok.
The command that is used to subscribe a consumer to a queue .
the queue defined in the # ' basic.consume ' { } command record . Note that both
subscribe(Channel, BasicConsume = #'basic.consume'{}, Consumer) ->
gen_server:call(Channel, {subscribe, BasicConsume, Consumer}, infinity).
@spec ( Channel , ReturnHandler ) - > ok
ReturnHandler = pid ( )
register_return_handler(Channel, ReturnHandler) ->
gen_server:cast(Channel, {register_return_handler, ReturnHandler} ).
@spec ( Channel , FlowHandler ) - > ok
FlowHandler = pid ( )
The registered process will receive # channel.flow { } commands .
register_flow_handler(Channel, FlowHandler) ->
gen_server:cast(Channel, {register_flow_handler, FlowHandler} ).
rpc_top_half(Method, Content, From,
State0 = #c_state{rpc_requests = RequestQueue}) ->
Enqueue the incoming RPC request to serialize RPC dispatching
State1 = State0#c_state{
rpc_requests = queue:in({From, Method, Content}, RequestQueue)},
IsFirstElement = queue:is_empty(RequestQueue),
if IsFirstElement -> do_rpc(State1);
true -> State1
end.
rpc_bottom_half(Reply, State = #c_state{rpc_requests = RequestQueue}) ->
case queue:out(RequestQueue) of
{empty, _} ->
exit(empty_rpc_bottom_half);
{{value, {From, _Method, _Content}}, NewRequestQueue} ->
gen_server:reply(From, Reply),
do_rpc(State#c_state{rpc_requests = NewRequestQueue})
end.
do_rpc(State = #c_state{rpc_requests = RequestQueue,
closing = Closing}) ->
case queue:peek(RequestQueue) of
{value, {_From, Method = #'channel.close'{}, Content}} ->
do(Method, Content, State),
State#c_state{closing = just_channel};
{value, {_From, Method, Content}} ->
do(Method, Content, State),
State;
empty ->
case Closing of
{connection, Reason} -> self() ! {shutdown, Reason};
_ -> ok
end,
State
end.
Internal plumbing
do(Method, Content, #c_state{writer_pid = Writer,
driver = Driver}) ->
amqp_channel_util:do(Driver, Writer, Method, Content).
resolve_consumer(_ConsumerTag, #c_state{consumers = []}) ->
exit(no_consumers_registered);
resolve_consumer(ConsumerTag, #c_state{consumers = Consumers}) ->
dict:fetch(ConsumerTag, Consumers).
register_consumer(ConsumerTag, Consumer,
State = #c_state{consumers = Consumers0}) ->
Consumers1 = dict:store(ConsumerTag, Consumer, Consumers0),
State#c_state{consumers = Consumers1}.
unregister_consumer(ConsumerTag,
State = #c_state{consumers = Consumers0}) ->
Consumers1 = dict:erase(ConsumerTag, Consumers0),
State#c_state{consumers = Consumers1}.
amqp_msg(none) ->
none;
amqp_msg(Content) ->
{Props, Payload} = rabbit_basic:from_content(Content),
#amqp_msg{props = Props, payload = Payload}.
build_content(none) ->
none;
build_content(#amqp_msg{props = Props, payload = Payload}) ->
rabbit_basic:build_content(Props, Payload).
check_block(_Method, _AmqpMsg, #c_state{closing = just_channel}) ->
channel_closing;
check_block(_Method, _AmqpMsg, #c_state{closing = {connection, _}}) ->
connection_closing;
check_block(_Method, none, #c_state{}) ->
ok;
check_block(_Method, _AmqpMsg, #c_state{flow_control = true}) ->
blocked;
check_block(_Method, _AmqpMsg, #c_state{}) ->
ok.
shutdown_with_reason({_, 200, _}, State) ->
{stop, normal, State};
shutdown_with_reason(Reason, State) ->
{stop, Reason, State}.
handle_method(Method, Content, #c_state{closing = Closing} = State) ->
case {Method, Content} of
{#'channel.close'{reply_code = ReplyCode,
reply_text = ReplyText}, none} ->
do(#'channel.close_ok'{}, none, State),
{stop, {server_initiated_close, ReplyCode, ReplyText}, State};
{CloseOk = #'channel.close_ok'{}, none} ->
{stop, normal, rpc_bottom_half(CloseOk, State)};
_ ->
case Closing of
just_channel ->
?LOG_INFO("Channel (~p): dropping method ~p from server "
"because channel is closing~n",
[self(), {Method, Content}]),
{noreply, State};
_ ->
handle_regular_method(Method, amqp_msg(Content), State)
end
end.
handle_regular_method(
#'basic.consume_ok'{consumer_tag = ConsumerTag} = ConsumeOk, none,
#c_state{tagged_sub_requests = Tagged,
anon_sub_requests = Anon} = State) ->
{_From, Consumer, State0} =
case dict:find(ConsumerTag, Tagged) of
{ok, {F, C}} ->
NewTagged = dict:erase(ConsumerTag,Tagged),
{F, C, State#c_state{tagged_sub_requests = NewTagged}};
error ->
case queue:out(Anon) of
{empty, _} ->
exit({anonymous_queue_empty, ConsumerTag});
{{value, {F, C}}, NewAnon} ->
{F, C, State#c_state{anon_sub_requests = NewAnon}}
end
end,
Consumer ! ConsumeOk,
State1 = register_consumer(ConsumerTag, Consumer, State0),
{noreply, rpc_bottom_half(ConsumeOk, State1)};
handle_regular_method(
#'basic.cancel_ok'{consumer_tag = ConsumerTag} = CancelOk, none,
#c_state{} = State) ->
Consumer = resolve_consumer(ConsumerTag, State),
Consumer ! CancelOk,
NewState = unregister_consumer(ConsumerTag, State),
{noreply, rpc_bottom_half(CancelOk, NewState)};
handle_regular_method(#'channel.flow'{active = Active} = Flow, none,
#c_state{flow_handler_pid = FlowHandler} = State) ->
case FlowHandler of
none -> ok;
_ -> FlowHandler ! Flow
end,
do(#'channel.flow_ok'{active = Active}, none, State),
{noreply, State#c_state{flow_control = not(Active)}};
handle_regular_method(#'basic.deliver'{consumer_tag = ConsumerTag} = Deliver,
AmqpMsg, State) ->
Consumer = resolve_consumer(ConsumerTag, State),
Consumer ! {Deliver, AmqpMsg},
{noreply, State};
handle_regular_method(
#'basic.return'{} = BasicReturn, AmqpMsg,
#c_state{return_handler_pid = ReturnHandler} = State) ->
case ReturnHandler of
none -> ?LOG_WARN("Channel (~p): received {~p, ~p} but there is no "
"return handler registered~n",
[self(), BasicReturn, AmqpMsg]);
_ -> ReturnHandler ! {BasicReturn, AmqpMsg}
end,
{noreply, State};
handle_regular_method(Method, none, State) ->
{noreply, rpc_bottom_half(Method, State)};
handle_regular_method(Method, Content, State) ->
{noreply, rpc_bottom_half({Method, Content}, State)}.
@private
init({ParentConnection, ChannelNumber, Driver, StartArgs}) ->
process_flag(trap_exit, true),
{ReaderPid, WriterPid} =
amqp_channel_util:start_channel_infrastructure(Driver, ChannelNumber,
StartArgs),
InitialState = #c_state{parent_connection = ParentConnection,
number = ChannelNumber,
driver = Driver,
reader_pid = ReaderPid,
writer_pid = WriterPid},
{ok, InitialState}.
@private
handle_call({call, Method, AmqpMsg}, From, State) ->
case check_block(Method, AmqpMsg, State) of
ok -> Content = build_content(AmqpMsg),
case rabbit_framing:is_method_synchronous(Method) of
true ->
{noreply, rpc_top_half(Method, Content, From,
State)};
false ->
do(Method, Content, State),
{reply, ok, State}
end;
BlockReply -> {reply, BlockReply, State}
end;
@private
handle_call({subscribe, #'basic.consume'{consumer_tag = Tag} = Method, Consumer},
From, #c_state{tagged_sub_requests = Tagged,
anon_sub_requests = Anon} = State) ->
case check_block(Method, none, State) of
ok ->
{NewMethod, NewState} =
if Tag =:= undefined orelse size(Tag) == 0 ->
NewAnon = queue:in({From,Consumer}, Anon),
{Method#'basic.consume'{consumer_tag = <<"">>},
State#c_state{anon_sub_requests = NewAnon}};
is_binary(Tag) ->
TODO test whether this tag already exists , either in
NewTagged = dict:store(Tag,{From,Consumer}, Tagged),
{Method,
State#c_state{tagged_sub_requests = NewTagged}}
end,
{noreply, rpc_top_half(NewMethod, none, From, NewState)};
BlockReply ->
{reply, BlockReply, State}
end.
@private
handle_cast({cast, Method, AmqpMsg} = Cast, State) ->
case check_block(Method, AmqpMsg, State) of
ok -> do(Method, build_content(AmqpMsg), State);
BlockReply -> ?LOG_INFO("Channel (~p): discarding method in cast ~p."
"Reason: ~p~n", [self(), Cast, BlockReply])
end,
{noreply, State};
@private
handle_cast({register_return_handler, ReturnHandler}, State) ->
link(ReturnHandler),
{noreply, State#c_state{return_handler_pid = ReturnHandler}};
@private
handle_cast({register_flow_handler, FlowHandler}, State) ->
link(FlowHandler),
{noreply, State#c_state{flow_handler_pid = FlowHandler}};
@private
handle_cast({notify_sent, _Peer}, State) ->
{noreply, State};
@private
handle_cast({method, Method, Content}, State) ->
handle_method(Method, Content, State).
@private
handle_info({send_command, Method}, State) ->
handle_method(Method, none, State);
@private
handle_info({send_command, Method, Content}, State) ->
handle_method(Method, Content, State);
@private
handle_info({send_command_and_notify, Q, ChPid, Method, Content}, State) ->
handle_method(Method, Content, State),
rabbit_amqqueue:notify_sent(Q, ChPid),
{noreply, State};
@private
handle_info(shutdown, State) ->
{stop, normal, State};
@private
handle_info({shutdown, Reason}, State) ->
shutdown_with_reason(Reason, State);
@private
handle_info({shutdown, FailShutdownReason, InitialReason},
#c_state{number = Number} = State) ->
case FailShutdownReason of
{connection_closing, timed_out_flushing_channel} ->
?LOG_WARN("Channel ~p closing: timed out flushing while connection "
"closing~n", [Number]);
{connection_closing, timed_out_waiting_close_ok} ->
?LOG_WARN("Channel ~p closing: timed out waiting for "
"channel.close_ok while connection closing~n", [Number])
end,
{stop, {FailShutdownReason, InitialReason}, State};
@private
handle_info({connection_closing, CloseType, Reason},
#c_state{rpc_requests = RpcQueue,
closing = Closing} = State) ->
case {CloseType, Closing, queue:is_empty(RpcQueue)} of
{flush, false, false} ->
erlang:send_after(?TIMEOUT_FLUSH, self(),
{shutdown,
{connection_closing, timed_out_flushing_channel},
Reason}),
{noreply, State#c_state{closing = {connection, Reason}}};
{flush, just_channel, false} ->
erlang:send_after(?TIMEOUT_CLOSE_OK, self(),
{shutdown,
{connection_closing, timed_out_waiting_close_ok},
Reason}),
{noreply, State};
_ ->
shutdown_with_reason(Reason, State)
end;
@private
handle_info({channel_exit, _Channel, #amqp_error{name = ErrorName,
explanation = Expl} = Error},
State = #c_state{number = Number}) ->
?LOG_WARN("Channel ~p closing: server sent error ~p~n", [Number, Error]),
{_, Code, _} = rabbit_framing:lookup_amqp_exception(ErrorName),
{stop, {server_initiated_close, Code, Expl}, State};
@private
handle_info({'EXIT', ConnectionPid, Reason},
State = #c_state{number = ChannelNumber,
parent_connection = ConnectionPid}) ->
?LOG_WARN("Channel ~p closing: parent connection died. Reason: ~p~n",
[ChannelNumber, Reason]),
{stop, {parent_connection_died, ConnectionPid, Reason}, State};
@private
handle_info({'EXIT', WriterPid, Reason},
State = #c_state{number = ChannelNumber,
writer_pid = WriterPid}) ->
?LOG_WARN("Channel ~p closing: received exit signal from writer. "
"Reason: ~p~n", [ChannelNumber, Reason]),
{stop, {writer_died, WriterPid, Reason}, State};
@private
handle_info({'EXIT', ReaderPid, Reason},
State = #c_state{number = ChannelNumber,
reader_pid = ReaderPid}) ->
?LOG_WARN("Channel ~p closing: received exit signal from reader. "
"Reason: ~p~n", [ChannelNumber, Reason]),
{stop, {reader_died, ReaderPid, Reason}, State};
@private
handle_info({'EXIT', FlowHandler, Reason},
State = #c_state{number = ChannelNumber,
flow_handler_pid = FlowHandler}) ->
?LOG_INFO("Channel ~p: unregistering flow handler because it is "
"closing: ~p~n", [ChannelNumber, Reason]),
{noreply, State#c_state{flow_handler_pid = none}};
@private
handle_info({'EXIT', ReturnHandler, Reason},
State = #c_state{number = ChannelNumber,
return_handler_pid = ReturnHandler}) ->
?LOG_INFO("Channel ~p: unregistering return handler because it is "
"closing: ~p~n", [ChannelNumber, Reason]),
{noreply, State#c_state{return_handler_pid = none}};
@private
handle_info({'EXIT', Pid, Reason}, State = #c_state{number = ChannelNumber}) ->
?LOG_WARN("Channel ~p closing: received unexpected exit signal from (~p). "
"Reason: ~p~n", [ChannelNumber, Pid, Reason]),
{stop, {unexpected_exit_signal, Pid, Reason}, State}.
@private
terminate(_Reason, #c_state{driver = Driver,
reader_pid = ReaderPid,
writer_pid = WriterPid}) ->
amqp_channel_util:terminate_channel_infrastructure(
Driver, {ReaderPid, WriterPid}).
@private
code_change(_OldVsn, State, _Extra) ->
State.
|
e755c173d84c0c8f61f4ddde0f287fd685ac5885fe607d549d286e8e9209df83 | jwiegley/notes | Types.hs | mergeUpdateMaps :: (Maybe a -> Maybe a -> Maybe a)
-> Map Prelude.FilePath a
-> Map Prelude.FilePath a
-> Map Prelude.FilePath a
mergeUpdateMaps f x y =
M.foldlWithKey'
(\m k mx -> case mx of
Nothing -> m
Just x -> M.insert k x m)
M.empty
$ M.unionWith f (M.map Just x) (M.map Just y)
mergeSourceUpdateMaps :: Map Prelude.FilePath UpdateData
-> Map Prelude.FilePath UpdateData
-> Map Prelude.FilePath UpdateData
mergeSourceUpdateMaps = mergeUpdateMaps f
where
f _ x@(Just (UpdateSource _)) = x
f _ (Just DeleteSource) = Nothing
f _ _ = Nothing
| null | https://raw.githubusercontent.com/jwiegley/notes/24574b02bfd869845faa1521854f90e4e8bf5e9a/gists/f719a3d41696d48f6005/gists/6260766/Types.hs | haskell | mergeUpdateMaps :: (Maybe a -> Maybe a -> Maybe a)
-> Map Prelude.FilePath a
-> Map Prelude.FilePath a
-> Map Prelude.FilePath a
mergeUpdateMaps f x y =
M.foldlWithKey'
(\m k mx -> case mx of
Nothing -> m
Just x -> M.insert k x m)
M.empty
$ M.unionWith f (M.map Just x) (M.map Just y)
mergeSourceUpdateMaps :: Map Prelude.FilePath UpdateData
-> Map Prelude.FilePath UpdateData
-> Map Prelude.FilePath UpdateData
mergeSourceUpdateMaps = mergeUpdateMaps f
where
f _ x@(Just (UpdateSource _)) = x
f _ (Just DeleteSource) = Nothing
f _ _ = Nothing
| |
288f7a7ff27cfb8be39c357417b98ccb820124f33cad0dac5d35b62f0f634258 | sixohsix/tak | Editor.hs | # LANGUAGE NoImplicitPrelude #
module Tak.Editor where
import Prelude as P
import qualified Data.Text as DT
import qualified Data.Text.IO as DTIO
import System.Directory (doesFileExist)
import System.IO
import Control.Arrow ( (>>>) )
import Control.Monad ( (>=>) )
import Control.Lens
import Data.Map as Map
import Tak.Types as TT
import Tak.Range
import Tak.Text
import Tak.Buffer
import Tak.Display
import Tak.Editor.Cursor
import Tak.Editor.Undo
import Tak.Editor.Edit
import Tak.Editor.Selection
import Tak.Editor.Replace
import Tak.Config
instance Editor SimpleEditor where
render editor height width = do
let lScroll = lineScroll editor
displayedBuffer = bufferDropLines (lineScroll editor) (buffer editor)
mRange = maybe Nothing (\r -> Just $ r `shiftRange` (Pos (-lScroll) 0)) (currentSelection editor)
renderBuffer Crop displayedBuffer mRange height width
setCursor (screenPos editor)
ignoreEvt :: (SimpleEditor -> SimpleEditor) -> GlobalState -> IO GlobalState
ignoreEvt f evt = return $ over editor f evt
ie = ignoreEvt
insertIfChar :: Event -> GlobalState -> IO GlobalState
insertIfChar evt gst = case evt of
KeyEvent (KeyChar c) -> (ignoreEvt $ insertChar c) gst
otherwise -> return gst
readOnlyCommands = Map.fromList [
(KeyEvent KeyUp, ie cursorUp),
(KeyEvent KeyDown, ie cursorDown),
(KeyEvent KeyLeft, ie cursorLeft),
(KeyEvent KeyRight, ie cursorRight),
(KeyEvent KeyPageDown, ie cursorPageDown),
(KeyEvent KeyPageUp, ie cursorPageUp),
(KeyEvent $ KeyCtrlChar 'A', ie cursorBeginningOfLine),
(KeyEvent KeyHome, ie cursorBeginningOfLine),
(KeyEvent $ KeyCtrlChar 'E', ie cursorEndOfLine),
(KeyEvent KeyEnd, ie cursorEndOfLine),
(KeyEvent $ KeyCtrlChar '@', ie startSelecting),
(KeyEvent $ KeyCtrlChar 'C', (return . copySelection) >=> (ie cancelSelecting)),
(KeyEvent $ KeyCtrlChar 'G', ie cancelSelecting),
(KeyEvent KeyCtrlUp, ie cursorPrevPara),
(KeyEvent KeyCtrlDown, ie cursorNextPara),
(KeyEvent KeyCtrlRight, ie cursorNextWord),
(KeyEvent KeyCtrlLeft, ie cursorPrevWord),
(KeyEvent $ KeyEscaped $ KeyUp, ie cursorPrevPara),
(KeyEvent $ KeyEscaped $ KeyDown, ie cursorNextPara),
(KeyEvent $ KeyEscaped $ KeyRight, ie cursorNextWord),
(KeyEvent $ KeyEscaped $ KeyLeft, ie cursorPrevWord),
(KeyEvent KeyCtrlHome, ie cursorFirstPos),
(KeyEvent KeyCtrlEnd, ie cursorLastPos)
]
editCommands = Map.fromList [
(KeyEvent KeyEnter, ie insertLinebreak),
(KeyEvent KeyDel, ie deleteChar),
(KeyEvent $ KeyCtrlChar 'I', ie insertTab),
(KeyEvent $ KeyCtrlChar 'Z', ie undo),
(KeyEvent $ KeyCtrlChar 'K', ie killLine),
(KeyEvent $ KeyCtrlChar 'X', \gst -> ((copySelection >>> (ie deleteSelection)) gst)),
(KeyEvent $ KeyCtrlChar 'V', return . pasteAtInsertPos),
(KeyEvent $ KeyCtrlChar 'P', replaceRegionWithShellCmd "echo hello")
]
defaultModeHandler :: Event -> GlobalState -> IO GlobalState
defaultModeHandler evt = findWithDefault (insertIfChar evt) evt (editCommands `Map.union` readOnlyCommands)
defaultEditorMode = Mode defaultModeHandler
simpleEditorFromFile :: String -> IO (SimpleEditor)
simpleEditorFromFile filename = do
fileExists <- doesFileExist filename
s <- if fileExists
then do
h <- openFile filename ReadMode
hSetEncoding h utf8_bom
hSetNewlineMode h universalNewlineMode
contents <- DTIO.hGetContents h
hClose h
return contents
else return DT.empty
let buf = textToBuffer s
pos <- getInitialPosition filename
return $ fixScroll $ defaultSimpleEditor {
buffer = buf,
fileName = filename,
cursorPos = pos
}
| null | https://raw.githubusercontent.com/sixohsix/tak/6310d19faa683156933dde38666c11dc087d79ea/src/Tak/Editor.hs | haskell | # LANGUAGE NoImplicitPrelude #
module Tak.Editor where
import Prelude as P
import qualified Data.Text as DT
import qualified Data.Text.IO as DTIO
import System.Directory (doesFileExist)
import System.IO
import Control.Arrow ( (>>>) )
import Control.Monad ( (>=>) )
import Control.Lens
import Data.Map as Map
import Tak.Types as TT
import Tak.Range
import Tak.Text
import Tak.Buffer
import Tak.Display
import Tak.Editor.Cursor
import Tak.Editor.Undo
import Tak.Editor.Edit
import Tak.Editor.Selection
import Tak.Editor.Replace
import Tak.Config
instance Editor SimpleEditor where
render editor height width = do
let lScroll = lineScroll editor
displayedBuffer = bufferDropLines (lineScroll editor) (buffer editor)
mRange = maybe Nothing (\r -> Just $ r `shiftRange` (Pos (-lScroll) 0)) (currentSelection editor)
renderBuffer Crop displayedBuffer mRange height width
setCursor (screenPos editor)
ignoreEvt :: (SimpleEditor -> SimpleEditor) -> GlobalState -> IO GlobalState
ignoreEvt f evt = return $ over editor f evt
ie = ignoreEvt
insertIfChar :: Event -> GlobalState -> IO GlobalState
insertIfChar evt gst = case evt of
KeyEvent (KeyChar c) -> (ignoreEvt $ insertChar c) gst
otherwise -> return gst
readOnlyCommands = Map.fromList [
(KeyEvent KeyUp, ie cursorUp),
(KeyEvent KeyDown, ie cursorDown),
(KeyEvent KeyLeft, ie cursorLeft),
(KeyEvent KeyRight, ie cursorRight),
(KeyEvent KeyPageDown, ie cursorPageDown),
(KeyEvent KeyPageUp, ie cursorPageUp),
(KeyEvent $ KeyCtrlChar 'A', ie cursorBeginningOfLine),
(KeyEvent KeyHome, ie cursorBeginningOfLine),
(KeyEvent $ KeyCtrlChar 'E', ie cursorEndOfLine),
(KeyEvent KeyEnd, ie cursorEndOfLine),
(KeyEvent $ KeyCtrlChar '@', ie startSelecting),
(KeyEvent $ KeyCtrlChar 'C', (return . copySelection) >=> (ie cancelSelecting)),
(KeyEvent $ KeyCtrlChar 'G', ie cancelSelecting),
(KeyEvent KeyCtrlUp, ie cursorPrevPara),
(KeyEvent KeyCtrlDown, ie cursorNextPara),
(KeyEvent KeyCtrlRight, ie cursorNextWord),
(KeyEvent KeyCtrlLeft, ie cursorPrevWord),
(KeyEvent $ KeyEscaped $ KeyUp, ie cursorPrevPara),
(KeyEvent $ KeyEscaped $ KeyDown, ie cursorNextPara),
(KeyEvent $ KeyEscaped $ KeyRight, ie cursorNextWord),
(KeyEvent $ KeyEscaped $ KeyLeft, ie cursorPrevWord),
(KeyEvent KeyCtrlHome, ie cursorFirstPos),
(KeyEvent KeyCtrlEnd, ie cursorLastPos)
]
editCommands = Map.fromList [
(KeyEvent KeyEnter, ie insertLinebreak),
(KeyEvent KeyDel, ie deleteChar),
(KeyEvent $ KeyCtrlChar 'I', ie insertTab),
(KeyEvent $ KeyCtrlChar 'Z', ie undo),
(KeyEvent $ KeyCtrlChar 'K', ie killLine),
(KeyEvent $ KeyCtrlChar 'X', \gst -> ((copySelection >>> (ie deleteSelection)) gst)),
(KeyEvent $ KeyCtrlChar 'V', return . pasteAtInsertPos),
(KeyEvent $ KeyCtrlChar 'P', replaceRegionWithShellCmd "echo hello")
]
defaultModeHandler :: Event -> GlobalState -> IO GlobalState
defaultModeHandler evt = findWithDefault (insertIfChar evt) evt (editCommands `Map.union` readOnlyCommands)
defaultEditorMode = Mode defaultModeHandler
simpleEditorFromFile :: String -> IO (SimpleEditor)
simpleEditorFromFile filename = do
fileExists <- doesFileExist filename
s <- if fileExists
then do
h <- openFile filename ReadMode
hSetEncoding h utf8_bom
hSetNewlineMode h universalNewlineMode
contents <- DTIO.hGetContents h
hClose h
return contents
else return DT.empty
let buf = textToBuffer s
pos <- getInitialPosition filename
return $ fixScroll $ defaultSimpleEditor {
buffer = buf,
fileName = filename,
cursorPos = pos
}
| |
0ef6e5dba66577ee273dfaccd48646fa771fd13615fe0f9c095dd9d4a0f5d766 | danielecapo/sfont | parametric.rkt | #lang racket
(require "private/parametric/fontwriter.rkt"
"private/parametric/path.rkt")
(provide (all-from-out "private/parametric/fontwriter.rkt")
(all-from-out "private/parametric/path.rkt")) | null | https://raw.githubusercontent.com/danielecapo/sfont/c854f9734f15f4c7cd4b98e041b8c961faa3eef2/sfont/parametric.rkt | racket | #lang racket
(require "private/parametric/fontwriter.rkt"
"private/parametric/path.rkt")
(provide (all-from-out "private/parametric/fontwriter.rkt")
(all-from-out "private/parametric/path.rkt")) | |
2b78b271c72429e7422e0425acf93bf547109f0be880566eb3a6b6a81ee86da6 | slyrus/abcl | print.lisp | ;;; print.lisp
;;;
Copyright ( C ) 2004 - 2006
$ Id$
;;;
;;; This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation ; either version 2
of the License , or ( at your option ) any later version .
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program; if not, write to the Free Software
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
;;;
;;; As a special exception, the copyright holders of this library give you
;;; permission to link this library with independent modules to produce an
;;; executable, regardless of the license terms of these independent
;;; modules, and to copy and distribute the resulting executable under
;;; terms of your choice, provided that you also meet, for each linked
;;; independent module, the terms and conditions of the license of that
;;; module. An independent module is a module which is not derived from
;;; or based on this library. If you modify this library, you may extend
;;; this exception to your version of the library, but you are not
;;; obligated to do so. If you do not wish to do so, delete this
;;; exception statement from your version.
Adapted from SBCL .
(in-package #:system)
;;; Can this object contain other objects?
(defun compound-object-p (x)
(or (consp x)
(typep x 'structure-object)
(typep x 'standard-object)
(typep x '(array t *))))
;;; Punt if INDEX is equal or larger then *PRINT-LENGTH* (and
;;; *PRINT-READABLY* is NIL) by outputting \"...\" and returning from
the block named NIL .
(defmacro punt-print-if-too-long (index stream)
`(when (and (not *print-readably*)
*print-length*
(>= ,index *print-length*))
(write-string "..." ,stream)
(return)))
(defun output-integer (integer stream)
;; (%output-object integer stream))
(if (xp::xp-structure-p stream)
(let ((s (sys::%write-to-string integer)))
(xp::write-string++ s stream 0 (length s)))
(%output-object integer stream)))
(defun output-list (list stream)
(cond ((and (null *print-readably*)
*print-level*
(>= *current-print-level* *print-level*))
(write-char #\# stream))
(t
(let ((*current-print-level* (1+ *current-print-level*)))
(write-char #\( stream)
(let ((*current-print-length* 0)
(list list))
(loop
(punt-print-if-too-long *current-print-length* stream)
(output-object (pop list) stream)
(unless list
(return))
(when (or (atom list)
(check-for-circularity list))
(write-string " . " stream)
(output-object list stream)
(return))
(write-char #\space stream)
(incf *current-print-length*)))
(write-char #\) stream))))
list)
;;; Output the abbreviated #< form of an array.
(defun output-terse-array (array stream)
(let ((*print-level* nil)
(*print-length* nil))
(print-unreadable-object (array stream :type t :identity t))))
(defun array-readably-printable-p (array)
(and (eq (array-element-type array) t)
(let ((zero (position 0 (array-dimensions array)))
(number (position 0 (array-dimensions array)
:test (complement #'eql)
:from-end t)))
(or (null zero) (null number) (> zero number)))))
(defun output-vector (vector stream)
(declare (vector vector))
(cond ((stringp vector)
(assert nil)
(sys::%output-object vector stream))
((not (or *print-array* *print-readably*))
(output-terse-array vector stream))
((bit-vector-p vector)
(assert nil)
(sys::%output-object vector stream))
(t
(when (and *print-readably*
(not (array-readably-printable-p vector)))
(error 'print-not-readable :object vector))
(cond ((and (null *print-readably*)
*print-level*
(>= *current-print-level* *print-level*))
(write-char #\# stream))
(t
(let ((*current-print-level* (1+ *current-print-level*)))
(write-string "#(" stream)
(dotimes (i (length vector))
(unless (zerop i)
(write-char #\space stream))
(punt-print-if-too-long i stream)
(output-object (aref vector i) stream))
(write-string ")" stream))))))
vector)
(defun output-ugly-object (object stream)
(cond ((consp object)
(output-list object stream))
((and (vectorp object)
(not (stringp object))
(not (bit-vector-p object)))
(output-vector object stream))
((structure-object-p object)
(cond
((and (null *print-readably*)
*print-level*
(>= *current-print-level* *print-level*))
(write-char #\# stream))
(t
(print-object object stream))))
((standard-object-p object)
(print-object object stream))
((java::java-object-p object)
(print-object object stream))
((xp::xp-structure-p stream)
(let ((s (sys::%write-to-string object)))
(xp::write-string++ s stream 0 (length s))))
((functionp object)
(print-object object stream))
(t
(%output-object object stream))))
;;;; circularity detection stuff
;;; When *PRINT-CIRCLE* is T, this gets bound to a hash table that
;;; (eventually) ends up with entries for every object printed. When
;;; we are initially looking for circularities, we enter a T when we
find an object for the first time , and a 0 when we encounter an
object a second time around . When we are actually printing , the 0
entries get changed to the actual marker value when they are first
;;; printed.
(defvar *circularity-hash-table* nil)
When NIL , we are just looking for circularities . After we have
;;; found them all, this gets bound to 0. Then whenever we need a new
;;; marker, it is incremented.
(defvar *circularity-counter* nil)
;;; Check to see whether OBJECT is a circular reference, and return
;;; something non-NIL if it is. If ASSIGN is T, then the number to use
in the # n= and # n # noise is assigned at this time .
;;; If ASSIGN is true, reference bookkeeping will only be done for
;;; existing entries, no new references will be recorded!
;;;
;;; Note: CHECK-FOR-CIRCULARITY must be called *exactly* once with
;;; ASSIGN true, or the circularity detection noise will get confused
about when to use # n= and when to use # n # . If this returns non - NIL
when is true , then you must call HANDLE - CIRCULARITY on it .
If CHECK - FOR - CIRCULARITY returns : INITIATE as the second value ,
;;; you need to initiate the circularity detection noise, e.g. bind
;;; *CIRCULARITY-HASH-TABLE* and *CIRCULARITY-COUNTER* to suitable values
;;; (see #'OUTPUT-OBJECT for an example).
(defun check-for-circularity (object &optional assign)
(cond ((null *print-circle*)
;; Don't bother, nobody cares.
nil)
((null *circularity-hash-table*)
(values nil :initiate))
((null *circularity-counter*)
(ecase (gethash object *circularity-hash-table*)
((nil)
;; first encounter
(setf (gethash object *circularity-hash-table*) t)
;; We need to keep looking.
nil)
((t)
second encounter
(setf (gethash object *circularity-hash-table*) 0)
;; It's a circular reference.
t)
(0
;; It's a circular reference.
t)))
(t
(let ((value (gethash object *circularity-hash-table*)))
(case value
((nil t)
If NIL , we found an object that was n't there the
first time around . If T , this object appears exactly
;; once. Either way, just print the thing without any
;; special processing. Note: you might argue that
;; finding a new object means that something is broken,
;; but this can happen. If someone uses the ~@<...~:>
;; format directive, it conses a new list each time
;; though format (i.e. the &REST list), so we will have
different .
nil)
(0
(if assign
(let ((value (incf *circularity-counter*)))
;; first occurrence of this object: Set the counter.
(setf (gethash object *circularity-hash-table*) value)
value)
t))
(t
second or later occurrence
(- value)))))))
;;; Handle the results of CHECK-FOR-CIRCULARITY. If this returns T then
you should go ahead and print the object . If it returns NIL , then
;;; you should blow it off.
(defun handle-circularity (marker stream)
(case marker
(:initiate
;; Someone forgot to initiate circularity detection.
(let ((*print-circle* nil))
(error "trying to use CHECK-FOR-CIRCULARITY when ~
circularity checking isn't initiated")))
((t)
It 's a second ( or later ) reference to the object while we are
;; just looking. So don't bother groveling it again.
nil)
(t
;; (write-char #\# stream)
( let ( ( * print - base * 10 )
;; (*print-radix* nil))
(cond ((minusp marker)
;; (output-integer (- marker) stream)
;; (write-char #\# stream)
(print-reference marker stream)
nil)
(t
;; (output-integer marker stream)
( write - char # \= stream )
(print-label marker stream)
t)))))
(defun print-label (marker stream)
(write-char #\# stream)
(let ((*print-base* 10)
(*print-radix* nil))
(output-integer marker stream))
(write-char #\= stream))
(defun print-reference (marker stream)
(write-char #\# stream)
(let ((*print-base* 10)
(*print-radix* nil))
(output-integer (- marker) stream))
(write-char #\# stream))
;;;; OUTPUT-OBJECT -- the main entry point
Objects whose print representation identifies them EQLly do n't need to be
;; checked for circularity.
(defun uniquely-identified-by-print-p (x)
(or (numberp x)
(characterp x)
(and (symbolp x)
(symbol-package x))))
(defun %print-object (object stream)
(if *print-pretty*
(xp::output-pretty-object object stream)
(output-ugly-object object stream)))
(defun %check-object (object stream)
(multiple-value-bind (marker initiate)
(check-for-circularity object t)
(if (eq initiate :initiate)
;; Initialize circularity detection.
(let ((*circularity-hash-table* (make-hash-table :test 'eq)))
(%check-object object (make-broadcast-stream))
(let ((*circularity-counter* 0))
(%check-object object stream)))
;; Otherwise...
(if marker
(when (handle-circularity marker stream)
(%print-object object stream))
(%print-object object stream)))))
;;; Output OBJECT to STREAM observing all printer control variables.
(defun output-object (object stream)
(cond ((or (not *print-circle*)
(uniquely-identified-by-print-p object))
(%print-object object stream))
;; If we have already started circularity detection, this object might
;; be a shared reference. If we have not, then if it is a compound
;; object, it might contain a circular reference to itself or multiple
;; shared references.
((or *circularity-hash-table*
(compound-object-p object))
(%check-object object stream))
(t
(%print-object object stream)))
object)
(provide "PRINT")
| null | https://raw.githubusercontent.com/slyrus/abcl/881f733fdbf4b722865318a7d2abe2ff8fdad96e/src/org/armedbear/lisp/print.lisp | lisp | print.lisp
This program is free software; you can redistribute it and/or
either version 2
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from
or based on this library. If you modify this library, you may extend
this exception to your version of the library, but you are not
obligated to do so. If you do not wish to do so, delete this
exception statement from your version.
Can this object contain other objects?
Punt if INDEX is equal or larger then *PRINT-LENGTH* (and
*PRINT-READABLY* is NIL) by outputting \"...\" and returning from
(%output-object integer stream))
Output the abbreviated #< form of an array.
circularity detection stuff
When *PRINT-CIRCLE* is T, this gets bound to a hash table that
(eventually) ends up with entries for every object printed. When
we are initially looking for circularities, we enter a T when we
printed.
found them all, this gets bound to 0. Then whenever we need a new
marker, it is incremented.
Check to see whether OBJECT is a circular reference, and return
something non-NIL if it is. If ASSIGN is T, then the number to use
If ASSIGN is true, reference bookkeeping will only be done for
existing entries, no new references will be recorded!
Note: CHECK-FOR-CIRCULARITY must be called *exactly* once with
ASSIGN true, or the circularity detection noise will get confused
you need to initiate the circularity detection noise, e.g. bind
*CIRCULARITY-HASH-TABLE* and *CIRCULARITY-COUNTER* to suitable values
(see #'OUTPUT-OBJECT for an example).
Don't bother, nobody cares.
first encounter
We need to keep looking.
It's a circular reference.
It's a circular reference.
once. Either way, just print the thing without any
special processing. Note: you might argue that
finding a new object means that something is broken,
but this can happen. If someone uses the ~@<...~:>
format directive, it conses a new list each time
though format (i.e. the &REST list), so we will have
first occurrence of this object: Set the counter.
Handle the results of CHECK-FOR-CIRCULARITY. If this returns T then
you should blow it off.
Someone forgot to initiate circularity detection.
just looking. So don't bother groveling it again.
(write-char #\# stream)
(*print-radix* nil))
(output-integer (- marker) stream)
(write-char #\# stream)
(output-integer marker stream)
OUTPUT-OBJECT -- the main entry point
checked for circularity.
Initialize circularity detection.
Otherwise...
Output OBJECT to STREAM observing all printer control variables.
If we have already started circularity detection, this object might
be a shared reference. If we have not, then if it is a compound
object, it might contain a circular reference to itself or multiple
shared references. | Copyright ( C ) 2004 - 2006
$ Id$
modify it under the terms of the GNU General Public License
of the License , or ( at your option ) any later version .
You should have received a copy of the GNU General Public License
Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
Adapted from SBCL .
(in-package #:system)
(defun compound-object-p (x)
(or (consp x)
(typep x 'structure-object)
(typep x 'standard-object)
(typep x '(array t *))))
the block named NIL .
(defmacro punt-print-if-too-long (index stream)
`(when (and (not *print-readably*)
*print-length*
(>= ,index *print-length*))
(write-string "..." ,stream)
(return)))
(defun output-integer (integer stream)
(if (xp::xp-structure-p stream)
(let ((s (sys::%write-to-string integer)))
(xp::write-string++ s stream 0 (length s)))
(%output-object integer stream)))
(defun output-list (list stream)
(cond ((and (null *print-readably*)
*print-level*
(>= *current-print-level* *print-level*))
(write-char #\# stream))
(t
(let ((*current-print-level* (1+ *current-print-level*)))
(write-char #\( stream)
(let ((*current-print-length* 0)
(list list))
(loop
(punt-print-if-too-long *current-print-length* stream)
(output-object (pop list) stream)
(unless list
(return))
(when (or (atom list)
(check-for-circularity list))
(write-string " . " stream)
(output-object list stream)
(return))
(write-char #\space stream)
(incf *current-print-length*)))
(write-char #\) stream))))
list)
(defun output-terse-array (array stream)
(let ((*print-level* nil)
(*print-length* nil))
(print-unreadable-object (array stream :type t :identity t))))
(defun array-readably-printable-p (array)
(and (eq (array-element-type array) t)
(let ((zero (position 0 (array-dimensions array)))
(number (position 0 (array-dimensions array)
:test (complement #'eql)
:from-end t)))
(or (null zero) (null number) (> zero number)))))
(defun output-vector (vector stream)
(declare (vector vector))
(cond ((stringp vector)
(assert nil)
(sys::%output-object vector stream))
((not (or *print-array* *print-readably*))
(output-terse-array vector stream))
((bit-vector-p vector)
(assert nil)
(sys::%output-object vector stream))
(t
(when (and *print-readably*
(not (array-readably-printable-p vector)))
(error 'print-not-readable :object vector))
(cond ((and (null *print-readably*)
*print-level*
(>= *current-print-level* *print-level*))
(write-char #\# stream))
(t
(let ((*current-print-level* (1+ *current-print-level*)))
(write-string "#(" stream)
(dotimes (i (length vector))
(unless (zerop i)
(write-char #\space stream))
(punt-print-if-too-long i stream)
(output-object (aref vector i) stream))
(write-string ")" stream))))))
vector)
(defun output-ugly-object (object stream)
(cond ((consp object)
(output-list object stream))
((and (vectorp object)
(not (stringp object))
(not (bit-vector-p object)))
(output-vector object stream))
((structure-object-p object)
(cond
((and (null *print-readably*)
*print-level*
(>= *current-print-level* *print-level*))
(write-char #\# stream))
(t
(print-object object stream))))
((standard-object-p object)
(print-object object stream))
((java::java-object-p object)
(print-object object stream))
((xp::xp-structure-p stream)
(let ((s (sys::%write-to-string object)))
(xp::write-string++ s stream 0 (length s))))
((functionp object)
(print-object object stream))
(t
(%output-object object stream))))
find an object for the first time , and a 0 when we encounter an
object a second time around . When we are actually printing , the 0
entries get changed to the actual marker value when they are first
(defvar *circularity-hash-table* nil)
When NIL , we are just looking for circularities . After we have
(defvar *circularity-counter* nil)
in the # n= and # n # noise is assigned at this time .
about when to use # n= and when to use # n # . If this returns non - NIL
when is true , then you must call HANDLE - CIRCULARITY on it .
If CHECK - FOR - CIRCULARITY returns : INITIATE as the second value ,
(defun check-for-circularity (object &optional assign)
(cond ((null *print-circle*)
nil)
((null *circularity-hash-table*)
(values nil :initiate))
((null *circularity-counter*)
(ecase (gethash object *circularity-hash-table*)
((nil)
(setf (gethash object *circularity-hash-table*) t)
nil)
((t)
second encounter
(setf (gethash object *circularity-hash-table*) 0)
t)
(0
t)))
(t
(let ((value (gethash object *circularity-hash-table*)))
(case value
((nil t)
If NIL , we found an object that was n't there the
first time around . If T , this object appears exactly
different .
nil)
(0
(if assign
(let ((value (incf *circularity-counter*)))
(setf (gethash object *circularity-hash-table*) value)
value)
t))
(t
second or later occurrence
(- value)))))))
you should go ahead and print the object . If it returns NIL , then
(defun handle-circularity (marker stream)
(case marker
(:initiate
(let ((*print-circle* nil))
(error "trying to use CHECK-FOR-CIRCULARITY when ~
circularity checking isn't initiated")))
((t)
It 's a second ( or later ) reference to the object while we are
nil)
(t
( let ( ( * print - base * 10 )
(cond ((minusp marker)
(print-reference marker stream)
nil)
(t
( write - char # \= stream )
(print-label marker stream)
t)))))
(defun print-label (marker stream)
(write-char #\# stream)
(let ((*print-base* 10)
(*print-radix* nil))
(output-integer marker stream))
(write-char #\= stream))
(defun print-reference (marker stream)
(write-char #\# stream)
(let ((*print-base* 10)
(*print-radix* nil))
(output-integer (- marker) stream))
(write-char #\# stream))
Objects whose print representation identifies them EQLly do n't need to be
(defun uniquely-identified-by-print-p (x)
(or (numberp x)
(characterp x)
(and (symbolp x)
(symbol-package x))))
(defun %print-object (object stream)
(if *print-pretty*
(xp::output-pretty-object object stream)
(output-ugly-object object stream)))
(defun %check-object (object stream)
(multiple-value-bind (marker initiate)
(check-for-circularity object t)
(if (eq initiate :initiate)
(let ((*circularity-hash-table* (make-hash-table :test 'eq)))
(%check-object object (make-broadcast-stream))
(let ((*circularity-counter* 0))
(%check-object object stream)))
(if marker
(when (handle-circularity marker stream)
(%print-object object stream))
(%print-object object stream)))))
(defun output-object (object stream)
(cond ((or (not *print-circle*)
(uniquely-identified-by-print-p object))
(%print-object object stream))
((or *circularity-hash-table*
(compound-object-p object))
(%check-object object stream))
(t
(%print-object object stream)))
object)
(provide "PRINT")
|
d3fe2f89feac42820d2dbbd2f6666a4f28924f93ebe00a990be48a8240649567 | jarvinet/scheme | init.scm | ; This file is read on startup of my scheme implementation
(define (fib n)
(if (< n 2)
n
(+ (fib (- n 2))
(fib (- n 1)))))
| null | https://raw.githubusercontent.com/jarvinet/scheme/47633d7fc4d82d739a62ceec75c111f6549b1650/old/try3/init.scm | scheme | This file is read on startup of my scheme implementation |
(define (fib n)
(if (< n 2)
n
(+ (fib (- n 2))
(fib (- n 1)))))
|
65ae5c59abe429c4d05897f5f3828a4da645d2973a79fbd989b70c6711c0153a | nuprl/gradual-typing-performance | world.rkt | #lang typed/racket
(require "base-types.rkt")
(require benchmark-util)
(require/typed/check "bset.rkt"
[blocks-union (-> BSet BSet BSet)]
[blocks-max-x (-> BSet Real)]
[blocks-min-x (-> BSet Real)]
[blocks-max-y (-> BSet Real)])
(require/typed/check "tetras.rkt"
[tetra-move (-> Real Real Tetra Tetra)]
[tetra-rotate-ccw (-> Tetra Tetra)]
[tetra-rotate-cw (-> Tetra Tetra)]
[tetra-overlaps-blocks? (-> Tetra BSet Boolean)]
[tetra-change-color (-> Tetra Color Tetra)])
(require/typed/check "aux.rkt"
[list-pick-random (-> (Listof Tetra) Tetra)]
[neg-1 Negative-Fixnum]
[tetras (Listof Tetra)])
(require/typed/check "elim.rkt"
[eliminate-full-rows (-> BSet BSet)])
(require/typed/check "consts.rkt"
[board-height Integer]
[board-width Integer])
(provide world-key-move
next-world
ghost-blocks)
#;
(provide/contract [world-key-move (WORLD/C string? . -> . WORLD/C)]
[next-world (WORLD/C . -> . WORLD/C)]
[ghost-blocks (WORLD/C . -> . BSET/C)])
Add the current tetra 's blocks onto the world 's block list ,
;; and create a new tetra.
(: touchdown (-> World World))
(define (touchdown w)
(world (list-pick-random tetras)
(eliminate-full-rows (blocks-union (tetra-blocks (world-tetra w))
(world-blocks w)))))
;; Take the current tetra and move it down until it lands.
(: world-jump-down (-> World World))
(define (world-jump-down w)
(cond [(landed? w) w]
[else (world-jump-down (world (tetra-move 0 1 (world-tetra w))
(world-blocks w)))]))
;; Has the current tetra landed on blocks?
I.e. , if we move the tetra down 1 , will it touch any existing blocks ?
(: landed-on-blocks? (-> World Boolean))
(define (landed-on-blocks? w)
(tetra-overlaps-blocks? (tetra-move 0 1 (world-tetra w))
(world-blocks w)))
;; Has the current tetra landed on the floor?
(: landed-on-floor? (-> World Boolean))
(define (landed-on-floor? w)
(= (blocks-max-y (tetra-blocks (world-tetra w)))
(sub1 board-height)))
;; Has the current tetra landed?
(: landed? (-> World Boolean))
(define (landed? w)
(or (landed-on-blocks? w)
(landed-on-floor? w)))
;; Step the world, either touchdown or move the tetra down on step.
(: next-world (-> World World))
(define (next-world w)
(cond [(landed? w) (touchdown w)]
[else (world (tetra-move 0 1 (world-tetra w))
(world-blocks w))]))
;; Make a world with the new tetra *IF* if doesn't lie on top of some other
;; block or lie off the board. Otherwise, no change.
(: try-new-tetra (-> World Tetra World))
(define (try-new-tetra w new-tetra)
(cond [(or (< (blocks-min-x (tetra-blocks new-tetra)) 0)
(>= (blocks-max-x (tetra-blocks new-tetra)) board-width)
(tetra-overlaps-blocks? new-tetra (world-blocks w)))
w]
[else (world new-tetra (world-blocks w))]))
Move the Tetra by the given X & Y displacement , but only if you can .
;; Otherwise stay put.
(: world-move (-> Real Real World World))
(define (world-move dx dy w)
(try-new-tetra w (tetra-move dx dy (world-tetra w))))
Rotate the Tetra 90 degrees counterclockwise , but only if you can .
;; Otherwise stay put.
(: world-rotate-ccw (-> World World))
(define (world-rotate-ccw w)
(try-new-tetra w (tetra-rotate-ccw (world-tetra w))))
Rotate the Tetra 90 degrees clockwise , but only if you can .
;; Otherwise stay put.
(: world-rotate-cw (-> World World))
(define (world-rotate-cw w)
(try-new-tetra w (tetra-rotate-cw (world-tetra w))))
;; Gray blocks representing where the current tetra would land.
(: ghost-blocks (-> World BSet))
(define (ghost-blocks w)
(tetra-blocks (tetra-change-color (world-tetra (world-jump-down w))
'gray)))
;; Move the world according to the given key event.
(: world-key-move (-> World String World))
(define (world-key-move w k)
(cond [(equal? k "left") (world-move neg-1 0 w)]
[(equal? k "right") (world-move 1 0 w)]
[(equal? k "down") (world-jump-down w)]
[(equal? k "a") (world-rotate-ccw w)]
[(equal? k "s") (world-rotate-cw w)]
[else w]))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/experimental/unsafe/tetris/typed/world.rkt | racket |
and create a new tetra.
Take the current tetra and move it down until it lands.
Has the current tetra landed on blocks?
Has the current tetra landed on the floor?
Has the current tetra landed?
Step the world, either touchdown or move the tetra down on step.
Make a world with the new tetra *IF* if doesn't lie on top of some other
block or lie off the board. Otherwise, no change.
Otherwise stay put.
Otherwise stay put.
Otherwise stay put.
Gray blocks representing where the current tetra would land.
Move the world according to the given key event. | #lang typed/racket
(require "base-types.rkt")
(require benchmark-util)
(require/typed/check "bset.rkt"
[blocks-union (-> BSet BSet BSet)]
[blocks-max-x (-> BSet Real)]
[blocks-min-x (-> BSet Real)]
[blocks-max-y (-> BSet Real)])
(require/typed/check "tetras.rkt"
[tetra-move (-> Real Real Tetra Tetra)]
[tetra-rotate-ccw (-> Tetra Tetra)]
[tetra-rotate-cw (-> Tetra Tetra)]
[tetra-overlaps-blocks? (-> Tetra BSet Boolean)]
[tetra-change-color (-> Tetra Color Tetra)])
(require/typed/check "aux.rkt"
[list-pick-random (-> (Listof Tetra) Tetra)]
[neg-1 Negative-Fixnum]
[tetras (Listof Tetra)])
(require/typed/check "elim.rkt"
[eliminate-full-rows (-> BSet BSet)])
(require/typed/check "consts.rkt"
[board-height Integer]
[board-width Integer])
(provide world-key-move
next-world
ghost-blocks)
(provide/contract [world-key-move (WORLD/C string? . -> . WORLD/C)]
[next-world (WORLD/C . -> . WORLD/C)]
[ghost-blocks (WORLD/C . -> . BSET/C)])
Add the current tetra 's blocks onto the world 's block list ,
(: touchdown (-> World World))
(define (touchdown w)
(world (list-pick-random tetras)
(eliminate-full-rows (blocks-union (tetra-blocks (world-tetra w))
(world-blocks w)))))
(: world-jump-down (-> World World))
(define (world-jump-down w)
(cond [(landed? w) w]
[else (world-jump-down (world (tetra-move 0 1 (world-tetra w))
(world-blocks w)))]))
I.e. , if we move the tetra down 1 , will it touch any existing blocks ?
(: landed-on-blocks? (-> World Boolean))
(define (landed-on-blocks? w)
(tetra-overlaps-blocks? (tetra-move 0 1 (world-tetra w))
(world-blocks w)))
(: landed-on-floor? (-> World Boolean))
(define (landed-on-floor? w)
(= (blocks-max-y (tetra-blocks (world-tetra w)))
(sub1 board-height)))
(: landed? (-> World Boolean))
(define (landed? w)
(or (landed-on-blocks? w)
(landed-on-floor? w)))
(: next-world (-> World World))
(define (next-world w)
(cond [(landed? w) (touchdown w)]
[else (world (tetra-move 0 1 (world-tetra w))
(world-blocks w))]))
(: try-new-tetra (-> World Tetra World))
(define (try-new-tetra w new-tetra)
(cond [(or (< (blocks-min-x (tetra-blocks new-tetra)) 0)
(>= (blocks-max-x (tetra-blocks new-tetra)) board-width)
(tetra-overlaps-blocks? new-tetra (world-blocks w)))
w]
[else (world new-tetra (world-blocks w))]))
Move the Tetra by the given X & Y displacement , but only if you can .
(: world-move (-> Real Real World World))
(define (world-move dx dy w)
(try-new-tetra w (tetra-move dx dy (world-tetra w))))
Rotate the Tetra 90 degrees counterclockwise , but only if you can .
(: world-rotate-ccw (-> World World))
(define (world-rotate-ccw w)
(try-new-tetra w (tetra-rotate-ccw (world-tetra w))))
Rotate the Tetra 90 degrees clockwise , but only if you can .
(: world-rotate-cw (-> World World))
(define (world-rotate-cw w)
(try-new-tetra w (tetra-rotate-cw (world-tetra w))))
(: ghost-blocks (-> World BSet))
(define (ghost-blocks w)
(tetra-blocks (tetra-change-color (world-tetra (world-jump-down w))
'gray)))
(: world-key-move (-> World String World))
(define (world-key-move w k)
(cond [(equal? k "left") (world-move neg-1 0 w)]
[(equal? k "right") (world-move 1 0 w)]
[(equal? k "down") (world-jump-down w)]
[(equal? k "a") (world-rotate-ccw w)]
[(equal? k "s") (world-rotate-cw w)]
[else w]))
|
f9323a4a7fab02a2e30ae1e6b4807b3b3a9256b8a8cdc78406f312bc789bff09 | Clojure2D/clojure2d-examples | mutual_attraction_2_8.clj | (ns NOC.ch02.mutual-attraction-2-8
(:require [clojure2d.color :as c]
[clojure2d.core :refer :all]
[fastmath.vector :as v]
[fastmath.core :as m]
[fastmath.random :as r])
(:import fastmath.vector.Vec2))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(def ^:const ^int w 640)
(def ^:const ^int h 360)
(def ^:const ^double g 0.4)
(def ^:const ^int movers-no 20)
(defprotocol MoverProto
(attract [m v m2])
(update-and-draw [m other canvas]))
(deftype Mover [^Vec2 position
^Vec2 velocity
^double mass
^long id]
MoverProto
(attract [m1 v m2]
(if (== id (.id ^Mover m2))
v
(let [force (v/sub (.position ^Mover m2) position)
distance (m/constrain (v/mag force) 5.0 25.0)
strength (/ (* g mass (.mass ^Mover m2)) (m/sq distance))]
(v/add v (-> force
v/normalize
(v/mult strength)
(v/div mass))))))
(update-and-draw [m other canvas]
(let [acceleration (reduce (partial attract m) (Vec2. 0.0 0.0) other)
nvelocity (v/add velocity acceleration)
^Vec2 nposition (v/add position nvelocity)
s (* 24.0 mass)]
(-> canvas
(set-color :black 100)
(ellipse (.x nposition) (.y nposition) s s)
(set-stroke 2.0)
(set-color :black)
(ellipse (.x nposition) (.y nposition) s s true))
(Mover. nposition nvelocity mass id))))
(def counter (make-counter))
(defn make-mover
""
[x y m]
(Mover. (Vec2. x y) (Vec2. 0 0) m (counter)))
(defn draw
""
[canvas window framecount state]
(let [movers (or state (repeatedly movers-no #(make-mover (r/drand w) (r/drand h) (r/drand 0.1 2))))]
(set-background canvas :white)
(mapv #(update-and-draw % movers canvas) movers)))
(def window (show-window (canvas w h) "Mutual attraction 2_8" draw))
| null | https://raw.githubusercontent.com/Clojure2D/clojure2d-examples/9de82f5ac0737b7e78e07a17cf03ac577d973817/src/NOC/ch02/mutual_attraction_2_8.clj | clojure | (ns NOC.ch02.mutual-attraction-2-8
(:require [clojure2d.color :as c]
[clojure2d.core :refer :all]
[fastmath.vector :as v]
[fastmath.core :as m]
[fastmath.random :as r])
(:import fastmath.vector.Vec2))
(set! *warn-on-reflection* true)
(set! *unchecked-math* :warn-on-boxed)
(def ^:const ^int w 640)
(def ^:const ^int h 360)
(def ^:const ^double g 0.4)
(def ^:const ^int movers-no 20)
(defprotocol MoverProto
(attract [m v m2])
(update-and-draw [m other canvas]))
(deftype Mover [^Vec2 position
^Vec2 velocity
^double mass
^long id]
MoverProto
(attract [m1 v m2]
(if (== id (.id ^Mover m2))
v
(let [force (v/sub (.position ^Mover m2) position)
distance (m/constrain (v/mag force) 5.0 25.0)
strength (/ (* g mass (.mass ^Mover m2)) (m/sq distance))]
(v/add v (-> force
v/normalize
(v/mult strength)
(v/div mass))))))
(update-and-draw [m other canvas]
(let [acceleration (reduce (partial attract m) (Vec2. 0.0 0.0) other)
nvelocity (v/add velocity acceleration)
^Vec2 nposition (v/add position nvelocity)
s (* 24.0 mass)]
(-> canvas
(set-color :black 100)
(ellipse (.x nposition) (.y nposition) s s)
(set-stroke 2.0)
(set-color :black)
(ellipse (.x nposition) (.y nposition) s s true))
(Mover. nposition nvelocity mass id))))
(def counter (make-counter))
(defn make-mover
""
[x y m]
(Mover. (Vec2. x y) (Vec2. 0 0) m (counter)))
(defn draw
""
[canvas window framecount state]
(let [movers (or state (repeatedly movers-no #(make-mover (r/drand w) (r/drand h) (r/drand 0.1 2))))]
(set-background canvas :white)
(mapv #(update-and-draw % movers canvas) movers)))
(def window (show-window (canvas w h) "Mutual attraction 2_8" draw))
| |
86d9e84cf2e75cd65e555356397cb708bd83665ee547ca8059a0c07e176f005c | namin/logically | lf1.clj | (ns logically.exp.lf1
(:refer-clojure :exclude [==])
(:use [clojure.core.logic :exclude [is] :as l]))
(defn solve-for* [clause]
(letfn [(solve [goals]
(conde
[(== goals ())]
[(fresh [g gs c a b]
(conso g gs goals)
(== g [c a])
(clause c a b)
(solve b)
(solve gs))]))]
solve))
(defn solve-for [clause]
(let [solver* (solve-for* clause)]
(fn [c a] (solver* [[c a]]))))
(def typ 'typ)
(defmacro defc [name & clauses]
(let [c (gensym "c")
a (gensym "a")
b (gensym "b")]
`(do
(declare ~(symbol (str `~name '*)))
~@(map (fn [[tag [ps & spec]]]
(let [vspec (vec spec)
tspec (vec (map (fn [s] (if (vector? s) (nth s 1) s)) vspec))
n (count tspec)
ty (nth tspec (- n 1))
ts (vec (map (fn [x s] (if (vector? s) (nth s 0) (gensym "x"))) (range 0 (- n 1)) vspec))]
`(do
(defn ~tag [~@ps ~@ts] (cons '~tag [~@ps ~@ts]))
(defn ~(symbol (str `~tag '-typechecks)) []
~(if (= ty typ)
`true
`(not
(empty?
(run 1 [q#]
(fresh [~@ps ~@ts]
(~(symbol (str `~name '*))
[[~ty ~typ] ~@(for [i (range 0 (- n 1))
:let [si (tspec i)]]
`[~si ~typ])])))))))
(defn ~(symbol (str `~tag '-clause)) [~c ~a ~b]
(fresh [~@ps ~@ts]
(== ~c (cons '~tag [~@ps ~@ts]))
(== ~a ~ty)
(== ~b [~@(for [i (range 0 (- n 1))
:let [ti (ts i)
si (tspec i)]]
`[~ti ~si])]))))))
clauses)
(defn ~(symbol (str `~name '-ok)) []
(and
~@(map (fn [[tag _]]
`(if (~(symbol (str `~tag '-typechecks))) true
(do (println (str '~tag " clause does not typecheck."))
false)))
clauses)))
(defn ~(symbol (str `~name '-clauses)) [~c ~a ~b]
(conde
~@(map (fn [[tag _]]
`[(~(symbol (str `~tag '-clause)) ~c ~a ~b)])
clauses)))
(def ~(symbol (str `~name '*)) (solve-for* ~(symbol (str `~name '-clauses))))
(def ~name (solve-for ~(symbol (str `~name '-clauses)))))))
| null | https://raw.githubusercontent.com/namin/logically/49e814e04ff0f5f20efa75122c0b869e400487ac/src/logically/exp/lf1.clj | clojure | (ns logically.exp.lf1
(:refer-clojure :exclude [==])
(:use [clojure.core.logic :exclude [is] :as l]))
(defn solve-for* [clause]
(letfn [(solve [goals]
(conde
[(== goals ())]
[(fresh [g gs c a b]
(conso g gs goals)
(== g [c a])
(clause c a b)
(solve b)
(solve gs))]))]
solve))
(defn solve-for [clause]
(let [solver* (solve-for* clause)]
(fn [c a] (solver* [[c a]]))))
(def typ 'typ)
(defmacro defc [name & clauses]
(let [c (gensym "c")
a (gensym "a")
b (gensym "b")]
`(do
(declare ~(symbol (str `~name '*)))
~@(map (fn [[tag [ps & spec]]]
(let [vspec (vec spec)
tspec (vec (map (fn [s] (if (vector? s) (nth s 1) s)) vspec))
n (count tspec)
ty (nth tspec (- n 1))
ts (vec (map (fn [x s] (if (vector? s) (nth s 0) (gensym "x"))) (range 0 (- n 1)) vspec))]
`(do
(defn ~tag [~@ps ~@ts] (cons '~tag [~@ps ~@ts]))
(defn ~(symbol (str `~tag '-typechecks)) []
~(if (= ty typ)
`true
`(not
(empty?
(run 1 [q#]
(fresh [~@ps ~@ts]
(~(symbol (str `~name '*))
[[~ty ~typ] ~@(for [i (range 0 (- n 1))
:let [si (tspec i)]]
`[~si ~typ])])))))))
(defn ~(symbol (str `~tag '-clause)) [~c ~a ~b]
(fresh [~@ps ~@ts]
(== ~c (cons '~tag [~@ps ~@ts]))
(== ~a ~ty)
(== ~b [~@(for [i (range 0 (- n 1))
:let [ti (ts i)
si (tspec i)]]
`[~ti ~si])]))))))
clauses)
(defn ~(symbol (str `~name '-ok)) []
(and
~@(map (fn [[tag _]]
`(if (~(symbol (str `~tag '-typechecks))) true
(do (println (str '~tag " clause does not typecheck."))
false)))
clauses)))
(defn ~(symbol (str `~name '-clauses)) [~c ~a ~b]
(conde
~@(map (fn [[tag _]]
`[(~(symbol (str `~tag '-clause)) ~c ~a ~b)])
clauses)))
(def ~(symbol (str `~name '*)) (solve-for* ~(symbol (str `~name '-clauses))))
(def ~name (solve-for ~(symbol (str `~name '-clauses)))))))
| |
75d30c7c529de83fbf2c19c99c5e60ac8876097c2bf98a88bd6a86616993cd7a | bhaskara/programmable-reinforcement-learning | cache.lisp |
(defpackage cache
(:documentation "Functions to do with caches.
Types
-----
<fifo-cache>
Operations
----------
add
lookup
reset
reset-stats
size")
(:use
utils
cl)
(:export
<fifo-cache>
add
size
reset-stats
reset
lookup))
(in-package cache)
(defclass <fifo-cache> ()
((v :type (simple-array * 1) :reader v :writer set-v)
(test :type function :initarg :test :initform #'equalp :reader test)
(size :initarg :size :type fixnum :reader size)
(search-ptr :type fixnum :initform 0 :accessor search-ptr)
(ptr :type fixnum :initform 0 :accessor cache-ptr))
(:documentation
"A simple implementation of FIFO caches. Create using make-instance.
Conceptually, uses a circular array to store the items and looks them up by searching through the array.
Initargs
:size - size of cache. Required.
:test - test for equality. Optional, #'equalp by default."))
(defvar *hits* 0)
(defvar *misses* 0)
(defmethod reset ((c <fifo-cache>))
(fill (v c) nil)
(setf (cache-ptr c) 0)
(values))
(defmethod initialize-instance :after ((c <fifo-cache>) &rest args
&key (size nil))
(assert (integerp size))
(set-v (make-array size) c))
(defun add (item val c)
(setf (svref (v c) (cache-ptr c)) (cons item val)
(search-ptr c) (cache-ptr c))
(if (>= (cache-ptr c) (1- (size c)))
(setf (cache-ptr c) 0)
(incf (cache-ptr c)))
(values))
(defun lookup (item c)
(let* ((v (v c))
(pos
;; search starting from search-ptr
(or (position item v :test (test c) :key #'car :start (search-ptr c))
(position item v :test (test c) :key #'car :end (search-ptr c)))))
(if pos
(let ((val (cdr (svref v pos))))
(incf *hits*)
(setf (search-ptr c) pos) ;; set the search-ptr to pos
(values val t))
(progn (incf *misses*) (values nil nil)))))
(defun reset-stats ()
(setf *hits* 0
*misses* 0)) | null | https://raw.githubusercontent.com/bhaskara/programmable-reinforcement-learning/8afc98116a8f78163b3f86076498d84b3f596217/lisp/data-struct/cache.lisp | lisp | search starting from search-ptr
set the search-ptr to pos
|
(defpackage cache
(:documentation "Functions to do with caches.
Types
-----
<fifo-cache>
Operations
----------
add
lookup
reset
reset-stats
size")
(:use
utils
cl)
(:export
<fifo-cache>
add
size
reset-stats
reset
lookup))
(in-package cache)
(defclass <fifo-cache> ()
((v :type (simple-array * 1) :reader v :writer set-v)
(test :type function :initarg :test :initform #'equalp :reader test)
(size :initarg :size :type fixnum :reader size)
(search-ptr :type fixnum :initform 0 :accessor search-ptr)
(ptr :type fixnum :initform 0 :accessor cache-ptr))
(:documentation
"A simple implementation of FIFO caches. Create using make-instance.
Conceptually, uses a circular array to store the items and looks them up by searching through the array.
Initargs
:size - size of cache. Required.
:test - test for equality. Optional, #'equalp by default."))
(defvar *hits* 0)
(defvar *misses* 0)
(defmethod reset ((c <fifo-cache>))
(fill (v c) nil)
(setf (cache-ptr c) 0)
(values))
(defmethod initialize-instance :after ((c <fifo-cache>) &rest args
&key (size nil))
(assert (integerp size))
(set-v (make-array size) c))
(defun add (item val c)
(setf (svref (v c) (cache-ptr c)) (cons item val)
(search-ptr c) (cache-ptr c))
(if (>= (cache-ptr c) (1- (size c)))
(setf (cache-ptr c) 0)
(incf (cache-ptr c)))
(values))
(defun lookup (item c)
(let* ((v (v c))
(pos
(or (position item v :test (test c) :key #'car :start (search-ptr c))
(position item v :test (test c) :key #'car :end (search-ptr c)))))
(if pos
(let ((val (cdr (svref v pos))))
(incf *hits*)
(values val t))
(progn (incf *misses*) (values nil nil)))))
(defun reset-stats ()
(setf *hits* 0
*misses* 0)) |
6e9e7cc565c8a00685771ef11631ae39b171b0dec789525a41553003504e27a1 | chaoxu/fancy-walks | A.hs | {-# OPTIONS_GHC -O2 #-}
import Data.List
import Data.Maybe
import Data.Char
import Data.Array
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Graph
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
a <- replicateM n (BS.unpack <$> readString)
return (n,a)
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
readLine = state $ BS.span (not . isEoln) . BS.dropWhile isEoln
isEoln ch = ch == '\r' || ch == '\n'
main = do
input <- evalState parseInput <$> BS.getContents
forM_ (zip [1..] input) $ \(cas, params) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ show (solve params)
solve (n, a') = go a
where
a = map last1 a'
last1 str = if elem '1' str then 1 + last1 (tail str) else 0
go [] = 0
go xs
| hi == [] = error "impossible"
| otherwise = (length lo) + go nxs
where
(lo, hi) = span (>1) xs
nxs = map (max 0.pred) (lo ++ tail hi)
| null | https://raw.githubusercontent.com/chaoxu/fancy-walks/952fcc345883181144131f839aa61e36f488998d/code.google.com/codejam/Google%20Code%20Jam%202009/Round%202/A.hs | haskell | # OPTIONS_GHC -O2 # |
import Data.List
import Data.Maybe
import Data.Char
import Data.Array
import Data.Int
import Data.Ratio
import Data.Bits
import Data.Function
import Data.Ord
import Control.Monad.State
import Control.Monad
import Control.Applicative
import Data.ByteString.Char8 (ByteString)
import qualified Data.ByteString.Char8 as BS
import Data.Set (Set)
import qualified Data.Set as Set
import Data.Map (Map)
import qualified Data.Map as Map
import Data.IntMap (IntMap)
import qualified Data.IntMap as IntMap
import Data.Sequence (Seq)
import qualified Data.Sequence as Seq
import qualified Data.Foldable as F
import Data.Graph
parseInput = do
cas <- readInt
replicateM cas $ do
n <- readInt
a <- replicateM n (BS.unpack <$> readString)
return (n,a)
where
readInt = state $ fromJust . BS.readInt . BS.dropWhile isSpace
readInteger = state $ fromJust . BS.readInteger . BS.dropWhile isSpace
readString = state $ BS.span (not . isSpace) . BS.dropWhile isSpace
readLine = state $ BS.span (not . isEoln) . BS.dropWhile isEoln
isEoln ch = ch == '\r' || ch == '\n'
main = do
input <- evalState parseInput <$> BS.getContents
forM_ (zip [1..] input) $ \(cas, params) -> do
putStrLn $ "Case #" ++ show cas ++ ": " ++ show (solve params)
solve (n, a') = go a
where
a = map last1 a'
last1 str = if elem '1' str then 1 + last1 (tail str) else 0
go [] = 0
go xs
| hi == [] = error "impossible"
| otherwise = (length lo) + go nxs
where
(lo, hi) = span (>1) xs
nxs = map (max 0.pred) (lo ++ tail hi)
|
5129a34dc004521231844b7cfd52d24c201d7a30d3c0b16996fc96501991ab51 | INRIA/zelus | basics.ml | let stdform_of_float pref suf f =
Printf.sprintf
(Scanf.format_from_string
(Printf.sprintf "%%%d.%de" pref suf)
"%e")
f
let output_line output_item out ss =
let pr s = (output_string out "\t"; output_item out s) in
if List.length ss = 0 then ()
else (output_item out (List.hd ss); List.iter pr (List.tl ss));
output_string out "\n"
let output_strings out ss = output_line output_string out ss
let output_quoted_strings out ss =
output_line (fun oc s -> (Printf.fprintf oc "\"%s\"" s; flush oc)) out ss
let output_floats out ss =
output_line (fun oc s -> (Printf.fprintf oc "%.15e" s; flush oc)) out ss
Compare two floats for equality , see :
* -software.com/papers/comparingfloats/comparingfloats.htm
* -software.com/papers/comparingfloats/comparingfloats.htm
*)
let float_eq max_relative_error f1 f2 =
if abs_float (f1 -. f2) < min_float
absolute error check for numbers around to zero
else
let rel_error =
if abs_float f1 > abs_float f2
then abs_float ((f1 -. f2) /. f1)
else abs_float ((f1 -. f2) /. f2)
in
(rel_error <= max_relative_error)
99.9999 % accuracy
let (=~=) = float_eq 0.000001
let on x y = x && y
let bad_sgn e = if e = 0.0 then 0.0 else if e > 0.0 then 1.0 else -1.0
let exit n = exit n
| null | https://raw.githubusercontent.com/INRIA/zelus/685428574b0f9100ad5a41bbaa416cd7a2506d5e/lib/std/basics.ml | ocaml | let stdform_of_float pref suf f =
Printf.sprintf
(Scanf.format_from_string
(Printf.sprintf "%%%d.%de" pref suf)
"%e")
f
let output_line output_item out ss =
let pr s = (output_string out "\t"; output_item out s) in
if List.length ss = 0 then ()
else (output_item out (List.hd ss); List.iter pr (List.tl ss));
output_string out "\n"
let output_strings out ss = output_line output_string out ss
let output_quoted_strings out ss =
output_line (fun oc s -> (Printf.fprintf oc "\"%s\"" s; flush oc)) out ss
let output_floats out ss =
output_line (fun oc s -> (Printf.fprintf oc "%.15e" s; flush oc)) out ss
Compare two floats for equality , see :
* -software.com/papers/comparingfloats/comparingfloats.htm
* -software.com/papers/comparingfloats/comparingfloats.htm
*)
let float_eq max_relative_error f1 f2 =
if abs_float (f1 -. f2) < min_float
absolute error check for numbers around to zero
else
let rel_error =
if abs_float f1 > abs_float f2
then abs_float ((f1 -. f2) /. f1)
else abs_float ((f1 -. f2) /. f2)
in
(rel_error <= max_relative_error)
99.9999 % accuracy
let (=~=) = float_eq 0.000001
let on x y = x && y
let bad_sgn e = if e = 0.0 then 0.0 else if e > 0.0 then 1.0 else -1.0
let exit n = exit n
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.