_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
12889ceb2b1c25d6a664a3bcf8b49c6d995bb57e892cc6cf90ec5abf0c13349b | takikawa/racket-ppa | gen-match.rkt | #lang racket/base
(require "patterns.rkt" "compiler.rkt"
syntax/stx syntax/parse racket/syntax
(for-template racket/base (only-in "runtime.rkt" match:error fail syntax-srclocs)))
(provide go go/one)
;; this transforms `match'-style clauses into ones acceptable to `go'
;; go : syntax syntax syntax -> syntax
(define (go/one parse stx expr clauses)
(define-syntax-class cl
#:description "a clause with a pattern and a result"
(pattern [p . rhs]
#:with res (syntax/loc this-syntax [(p) . rhs])))
(syntax-parse clauses
#:context stx
[(c:cl ...)
(go parse stx (quasisyntax/loc expr (#,expr))
#'(c.res ...))]))
;; this parses the clauses using parse, then compiles them
;; go : syntax syntax syntax -> syntax
(define (go parse stx es clauses)
(with-disappeared-uses
(syntax-parse clauses
#:context stx
[([pats . rhs] ...)
(unless (syntax->list es)
(raise-syntax-error 'match* "expected a sequence of expressions to match" es))
(define/with-syntax form-name
(syntax-case stx ()
[(fname . _)
(identifier? #'fname)
(syntax-e #'fname)]
[_ 'match]))
(define len (length (syntax->list es)))
(define srcloc-stx (datum->syntax #f 'srcloc stx))
(define/with-syntax (xs ...) (generate-temporaries es))
(define/with-syntax (exprs ...) es)
(define/with-syntax outer-fail (generate-temporary #'fail))
(define/with-syntax orig-expr (if (= 1 len) (stx-car #'(xs ...)) #'(list xs ...)))
(define/with-syntax raise-error
(quasisyntax/loc stx (match:error orig-expr (syntax-srclocs (quote-syntax #,srcloc-stx)) 'form-name)))
(define parsed-clauses
(for/list ([clause (syntax->list clauses)]
[pats (syntax->list #'(pats ...))]
[rhs (syntax->list #'(rhs ...))])
(unless (syntax->list pats)
(raise-syntax-error 'match* "expected a sequence of patterns" pats))
(define lp (length (syntax->list pats)))
(unless (= len lp)
(raise-syntax-error
'match (format "wrong number of match clauses, expected ~a and got ~a" len lp) pats))
(define (mk unm rhs)
(make-Row (for/list ([p (syntax->list pats)]) (parse p))
(syntax-property
(quasisyntax/loc stx
(let () . #,rhs))
'feature-profile:pattern-matching 'antimark)
unm null))
(syntax-parse rhs
[()
(raise-syntax-error
'match
"expected at least one expression on the right-hand side"
clause)]
[(#:when e)
(raise-syntax-error
'match
"expected at least one expression on the right-hand side after #:when clause"
clause)]
[(#:when e rest ...) (mk #f #'((if e (let () rest ...) (fail))))]
[(((~datum =>) unm:id) . rhs) (mk #'unm #'rhs)]
[(((~datum =>) unm) . rhs)
(raise-syntax-error 'match
"expected an identifier after `=>`"
#'unm)]
[_ (mk #f rhs)])))
(define/with-syntax body
(compile* (syntax->list #'(xs ...)) parsed-clauses #'outer-fail))
(define/with-syntax (exprs* ...)
(for/list ([e (in-list (syntax->list #'(exprs ...)))])
(syntax-property e 'feature-profile:pattern-matching 'antimark)))
(syntax-property
(quasisyntax/loc stx
(let ([xs exprs*] ...)
(let ([outer-fail
#,(syntax-property
#'(λ () raise-error)
'typechecker:called-in-tail-position #t)])
body)))
'feature-profile:pattern-matching #t)])))
| null | https://raw.githubusercontent.com/takikawa/racket-ppa/5f2031309f6359c61a8dfd1fec0b77bbf9fb78df/collects/racket/match/gen-match.rkt | racket | this transforms `match'-style clauses into ones acceptable to `go'
go : syntax syntax syntax -> syntax
this parses the clauses using parse, then compiles them
go : syntax syntax syntax -> syntax | #lang racket/base
(require "patterns.rkt" "compiler.rkt"
syntax/stx syntax/parse racket/syntax
(for-template racket/base (only-in "runtime.rkt" match:error fail syntax-srclocs)))
(provide go go/one)
(define (go/one parse stx expr clauses)
(define-syntax-class cl
#:description "a clause with a pattern and a result"
(pattern [p . rhs]
#:with res (syntax/loc this-syntax [(p) . rhs])))
(syntax-parse clauses
#:context stx
[(c:cl ...)
(go parse stx (quasisyntax/loc expr (#,expr))
#'(c.res ...))]))
(define (go parse stx es clauses)
(with-disappeared-uses
(syntax-parse clauses
#:context stx
[([pats . rhs] ...)
(unless (syntax->list es)
(raise-syntax-error 'match* "expected a sequence of expressions to match" es))
(define/with-syntax form-name
(syntax-case stx ()
[(fname . _)
(identifier? #'fname)
(syntax-e #'fname)]
[_ 'match]))
(define len (length (syntax->list es)))
(define srcloc-stx (datum->syntax #f 'srcloc stx))
(define/with-syntax (xs ...) (generate-temporaries es))
(define/with-syntax (exprs ...) es)
(define/with-syntax outer-fail (generate-temporary #'fail))
(define/with-syntax orig-expr (if (= 1 len) (stx-car #'(xs ...)) #'(list xs ...)))
(define/with-syntax raise-error
(quasisyntax/loc stx (match:error orig-expr (syntax-srclocs (quote-syntax #,srcloc-stx)) 'form-name)))
(define parsed-clauses
(for/list ([clause (syntax->list clauses)]
[pats (syntax->list #'(pats ...))]
[rhs (syntax->list #'(rhs ...))])
(unless (syntax->list pats)
(raise-syntax-error 'match* "expected a sequence of patterns" pats))
(define lp (length (syntax->list pats)))
(unless (= len lp)
(raise-syntax-error
'match (format "wrong number of match clauses, expected ~a and got ~a" len lp) pats))
(define (mk unm rhs)
(make-Row (for/list ([p (syntax->list pats)]) (parse p))
(syntax-property
(quasisyntax/loc stx
(let () . #,rhs))
'feature-profile:pattern-matching 'antimark)
unm null))
(syntax-parse rhs
[()
(raise-syntax-error
'match
"expected at least one expression on the right-hand side"
clause)]
[(#:when e)
(raise-syntax-error
'match
"expected at least one expression on the right-hand side after #:when clause"
clause)]
[(#:when e rest ...) (mk #f #'((if e (let () rest ...) (fail))))]
[(((~datum =>) unm:id) . rhs) (mk #'unm #'rhs)]
[(((~datum =>) unm) . rhs)
(raise-syntax-error 'match
"expected an identifier after `=>`"
#'unm)]
[_ (mk #f rhs)])))
(define/with-syntax body
(compile* (syntax->list #'(xs ...)) parsed-clauses #'outer-fail))
(define/with-syntax (exprs* ...)
(for/list ([e (in-list (syntax->list #'(exprs ...)))])
(syntax-property e 'feature-profile:pattern-matching 'antimark)))
(syntax-property
(quasisyntax/loc stx
(let ([xs exprs*] ...)
(let ([outer-fail
#,(syntax-property
#'(λ () raise-error)
'typechecker:called-in-tail-position #t)])
body)))
'feature-profile:pattern-matching #t)])))
|
6b1544393a2352d255e7491d056dc2eaddb416ab18992617501fe44cbb8cc898 | pixlsus/registry.gimp.org_static | tapered_stroke_along_path.scm | ; tapered_stroke_along_path.scm
by
;
Version 1.0 ( 20080926 )
; Description
;
; paints a tapered stroke along the path
;
; License:
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
The GNU Public License is available at
;
(define (tapered_stroke_along_path img inLayer inPath inStartWidth inEndWidth inSpacing inCurve)
(let*
(
(width (car (gimp-image-width img)))
(height (car (gimp-image-height img)))
(brushTemp 0)
(varRadius (/ inStartWidth 2.0))
(varFirstStroke (aref (cadr (gimp-vectors-get-strokes inPath)) 0))
(varPathLength (car (gimp-vectors-stroke-get-length inPath varFirstStroke 1)))
(varCounter 0)
(*newpoint* (cons-array 2 'double))
(varTemp 0)
(varRepaint 0)
)
; it begins here
(gimp-context-push)
(gimp-image-undo-group-start img)
;logging
;(gimp-message-set-handler ERROR-CONSOLE)
;(gimp-message-set-handler CONSOLE)
;(gimp-message-set-handler MESSAGE-BOX)
or start GIMP wwith " gimp --console - messages " to spawn a console box
;then use this:
( gimp - message " foobar " )
;testing for functions defined
( if ( defined ? ' plug - in - shift ) ( gimp - message " It Exists " ) ( gimp - message " Does nt Exist " ) )
;Uses:
;gimp-vectors-stroke-get-point-at-dist
;gimp-vectors-stroke-get-length
;
;Set up Brush
(set! brushTemp (car (gimp-brush-new "TaperedStrokeBrush")))
(gimp-brush-set-shape brushTemp BRUSH-GENERATED-CIRCLE)
(gimp-brush-set-hardness brushTemp 1)
(gimp-brush-set-radius brushTemp varRadius)
(gimp-brush-set-spacing brushTemp inSpacing)
(gimp-brush-set-spikes brushTemp 2)
(gimp-brush-set-aspect-ratio brushTemp 1)
(gimp-brush-set-angle brushTemp 0)
(gimp-context-set-brush brushTemp)
;walk the path
(while (<= varCounter varPathLength)
(set! varTemp (gimp-vectors-stroke-get-point-at-dist inPath varFirstStroke varCounter 1))
(if (and (<> (list-ref varTemp 0) 0) (<> (list-ref varTemp 1) 0))
(begin
(aset *newpoint* 0 (list-ref varTemp 0)) ; set the paint array
(aset *newpoint* 1 (list-ref varTemp 1))
(gimp-paintbrush-default inLayer 2 *newpoint*) ; paint point with paintbrush
)
)
0.02 is 2/100 to turn the inSpacing to a percent
(set! varRadius (/ (+ (* (pow (/ varCounter varPathLength) inCurve) (- inEndWidth inStartWidth)) inStartWidth) 2.0))
(set! varRepaint (+ varRepaint 1))
(if (> varRepaint 25)
(begin
(set! varRepaint 0)
(gimp-displays-flush)
(gimp-progress-update (/ varCounter varPathLength))
)
)
(gimp-brush-set-radius brushTemp varRadius)
)
(gimp-brush-delete brushTemp)
;done
(gimp-image-undo-group-end img)
(gimp-progress-end)
(gimp-displays-flush)
(gimp-context-pop)
)
)
(script-fu-register "tapered_stroke_along_path"
"<Image>/Edit/Tapered Stroke Path..."
"Paint a Tapering Stroke Along the Path in the Current Colour"
"Rob Antonishen"
"Rob Antonishen"
"Sept 2008"
"RGB* GRAY*"
SF-IMAGE "image" 0
SF-DRAWABLE "drawable" 0
SF-VECTORS "Path to Stroke" -1
SF-ADJUSTMENT "Stroke Start Width" (list 20 0.2 100 1 10 1 SF-SLIDER)
SF-ADJUSTMENT "Stroke End Width" (list 1.0 0.2 200 1 10 1 SF-SLIDER)
SF-ADJUSTMENT "Brush Spacing" (list 20 0.1 200 1 10 1 SF-SLIDER)
SF-ADJUSTMENT "Taper Exponent (1=linear)" (list 5 0.2 10 1 1 1 SF-SPINNER)
)
| null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/tapered_stroke_along_path.scm | scheme | tapered_stroke_along_path.scm
Description
paints a tapered stroke along the path
License:
This program is free software; you can redistribute it and/or modify
either version 2 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
it begins here
logging
(gimp-message-set-handler ERROR-CONSOLE)
(gimp-message-set-handler CONSOLE)
(gimp-message-set-handler MESSAGE-BOX)
then use this:
testing for functions defined
Uses:
gimp-vectors-stroke-get-point-at-dist
gimp-vectors-stroke-get-length
Set up Brush
walk the path
set the paint array
paint point with paintbrush
done
| by
Version 1.0 ( 20080926 )
it under the terms of the GNU General Public License as published by
The GNU Public License is available at
(define (tapered_stroke_along_path img inLayer inPath inStartWidth inEndWidth inSpacing inCurve)
(let*
(
(width (car (gimp-image-width img)))
(height (car (gimp-image-height img)))
(brushTemp 0)
(varRadius (/ inStartWidth 2.0))
(varFirstStroke (aref (cadr (gimp-vectors-get-strokes inPath)) 0))
(varPathLength (car (gimp-vectors-stroke-get-length inPath varFirstStroke 1)))
(varCounter 0)
(*newpoint* (cons-array 2 'double))
(varTemp 0)
(varRepaint 0)
)
(gimp-context-push)
(gimp-image-undo-group-start img)
or start GIMP wwith " gimp --console - messages " to spawn a console box
( gimp - message " foobar " )
( if ( defined ? ' plug - in - shift ) ( gimp - message " It Exists " ) ( gimp - message " Does nt Exist " ) )
(set! brushTemp (car (gimp-brush-new "TaperedStrokeBrush")))
(gimp-brush-set-shape brushTemp BRUSH-GENERATED-CIRCLE)
(gimp-brush-set-hardness brushTemp 1)
(gimp-brush-set-radius brushTemp varRadius)
(gimp-brush-set-spacing brushTemp inSpacing)
(gimp-brush-set-spikes brushTemp 2)
(gimp-brush-set-aspect-ratio brushTemp 1)
(gimp-brush-set-angle brushTemp 0)
(gimp-context-set-brush brushTemp)
(while (<= varCounter varPathLength)
(set! varTemp (gimp-vectors-stroke-get-point-at-dist inPath varFirstStroke varCounter 1))
(if (and (<> (list-ref varTemp 0) 0) (<> (list-ref varTemp 1) 0))
(begin
(aset *newpoint* 1 (list-ref varTemp 1))
)
)
0.02 is 2/100 to turn the inSpacing to a percent
(set! varRadius (/ (+ (* (pow (/ varCounter varPathLength) inCurve) (- inEndWidth inStartWidth)) inStartWidth) 2.0))
(set! varRepaint (+ varRepaint 1))
(if (> varRepaint 25)
(begin
(set! varRepaint 0)
(gimp-displays-flush)
(gimp-progress-update (/ varCounter varPathLength))
)
)
(gimp-brush-set-radius brushTemp varRadius)
)
(gimp-brush-delete brushTemp)
(gimp-image-undo-group-end img)
(gimp-progress-end)
(gimp-displays-flush)
(gimp-context-pop)
)
)
(script-fu-register "tapered_stroke_along_path"
"<Image>/Edit/Tapered Stroke Path..."
"Paint a Tapering Stroke Along the Path in the Current Colour"
"Rob Antonishen"
"Rob Antonishen"
"Sept 2008"
"RGB* GRAY*"
SF-IMAGE "image" 0
SF-DRAWABLE "drawable" 0
SF-VECTORS "Path to Stroke" -1
SF-ADJUSTMENT "Stroke Start Width" (list 20 0.2 100 1 10 1 SF-SLIDER)
SF-ADJUSTMENT "Stroke End Width" (list 1.0 0.2 200 1 10 1 SF-SLIDER)
SF-ADJUSTMENT "Brush Spacing" (list 20 0.1 200 1 10 1 SF-SLIDER)
SF-ADJUSTMENT "Taper Exponent (1=linear)" (list 5 0.2 10 1 1 1 SF-SPINNER)
)
|
6be9d3da14f88b37a1c0b3558649191e96cb225c6741a33956f2cc197b9a90d5 | prg-titech/Kani-CUDA | profile1.rkt | 0 0 0 0 0 0 0 0 0 0 3 3 9 9 3
0 0 0 1 0 0 0 0 0 0 3 3 9 9 3
0 0 0 2 0 0 0 0 0 0 3 3 9 9 3
0 0 0 3 0 0 0 0 0 0 3 3 9 9 3
0 0 0 4 0 0 0 0 0 0 3 3 9 9 3
0 0 0 5 0 0 0 0 0 0 3 3 9 9 3
0 0 0 6 0 0 0 0 0 0 3 3 9 9 3
0 0 0 7 0 0 0 0 0 0 3 3 9 9 3
0 0 0 8 0 0 0 0 0 0 3 3 9 9 3
0 0 0 N 0 0 0 0 0 0 3 3 9 9 3
1 0 1 0 1 0 1 1 0 1 3 3 9 9 3
1 0 1 1 1 0 1 1 0 1 3 3 9 9 3
1 0 1 2 1 0 1 1 0 1 3 3 9 9 3
1 0 1 3 1 0 1 1 0 1 3 3 9 9 3
1 0 1 4 1 0 1 1 0 1 3 3 9 9 3
1 0 1 5 1 0 1 1 0 1 3 3 9 9 3
1 0 1 6 1 0 1 1 0 1 3 3 9 9 3
1 0 1 7 1 0 1 1 0 1 3 3 9 9 3
1 0 1 8 1 0 1 1 0 1 3 3 9 9 3
1 0 1 N 1 0 1 1 0 1 3 3 9 9 3
2 0 2 0 2 0 2 2 0 2 3 3 9 9 3
2 0 2 1 2 0 2 2 0 2 3 3 9 9 3
2 0 2 2 2 0 2 2 0 2 3 3 9 9 3
2 0 2 3 2 0 2 2 0 2 3 3 9 9 3
2 0 2 4 2 0 2 2 0 2 3 3 9 9 3
2 0 2 5 2 0 2 2 0 2 3 3 9 9 3
2 0 2 6 2 0 2 2 0 2 3 3 9 9 3
2 0 2 7 2 0 2 2 0 2 3 3 9 9 3
2 0 2 8 2 0 2 2 0 2 3 3 9 9 3
2 0 2 N 2 0 2 2 0 2 3 3 9 9 3
3 0 9 0 0 1 9 0 1 3 3 3 9 9 3
3 0 9 1 0 1 9 0 1 3 3 3 9 9 3
3 0 9 2 0 1 9 0 1 3 3 3 9 9 3
3 0 9 3 0 1 9 0 1 3 3 3 9 9 3
3 0 9 4 0 1 9 0 1 3 3 3 9 9 3
3 0 9 5 0 1 9 0 1 3 3 3 9 9 3
3 0 9 6 0 1 9 0 1 3 3 3 9 9 3
3 0 9 7 0 1 9 0 1 3 3 3 9 9 3
3 0 9 8 0 1 9 0 1 3 3 3 9 9 3
3 0 9 N 0 1 9 0 1 3 3 3 9 9 3
4 0 10 0 1 1 10 1 1 4 3 3 9 9 3
4 0 10 1 1 1 10 1 1 4 3 3 9 9 3
4 0 10 2 1 1 10 1 1 4 3 3 9 9 3
4 0 10 3 1 1 10 1 1 4 3 3 9 9 3
4 0 10 4 1 1 10 1 1 4 3 3 9 9 3
4 0 10 5 1 1 10 1 1 4 3 3 9 9 3
4 0 10 6 1 1 10 1 1 4 3 3 9 9 3
4 0 10 7 1 1 10 1 1 4 3 3 9 9 3
4 0 10 8 1 1 10 1 1 4 3 3 9 9 3
4 0 10 N 1 1 10 1 1 4 3 3 9 9 3
5 0 11 0 2 1 11 2 1 5 3 3 9 9 3
5 0 11 1 2 1 11 2 1 5 3 3 9 9 3
5 0 11 2 2 1 11 2 1 5 3 3 9 9 3
5 0 11 3 2 1 11 2 1 5 3 3 9 9 3
5 0 11 4 2 1 11 2 1 5 3 3 9 9 3
5 0 11 5 2 1 11 2 1 5 3 3 9 9 3
5 0 11 6 2 1 11 2 1 5 3 3 9 9 3
5 0 11 7 2 1 11 2 1 5 3 3 9 9 3
5 0 11 8 2 1 11 2 1 5 3 3 9 9 3
5 0 11 N 2 1 11 2 1 5 3 3 9 9 3
6 0 18 0 0 2 18 0 2 6 3 3 9 9 3
6 0 18 1 0 2 18 0 2 6 3 3 9 9 3
6 0 18 2 0 2 18 0 2 6 3 3 9 9 3
6 0 18 3 0 2 18 0 2 6 3 3 9 9 3
6 0 18 4 0 2 18 0 2 6 3 3 9 9 3
6 0 18 5 0 2 18 0 2 6 3 3 9 9 3
6 0 18 6 0 2 18 0 2 6 3 3 9 9 3
6 0 18 7 0 2 18 0 2 6 3 3 9 9 3
6 0 18 8 0 2 18 0 2 6 3 3 9 9 3
6 0 18 N 0 2 18 0 2 6 3 3 9 9 3
7 0 19 0 1 2 19 1 2 7 3 3 9 9 3
7 0 19 1 1 2 19 1 2 7 3 3 9 9 3
7 0 19 2 1 2 19 1 2 7 3 3 9 9 3
7 0 19 3 1 2 19 1 2 7 3 3 9 9 3
7 0 19 4 1 2 19 1 2 7 3 3 9 9 3
7 0 19 5 1 2 19 1 2 7 3 3 9 9 3
7 0 19 6 1 2 19 1 2 7 3 3 9 9 3
7 0 19 7 1 2 19 1 2 7 3 3 9 9 3
7 0 19 8 1 2 19 1 2 7 3 3 9 9 3
7 0 19 N 1 2 19 1 2 7 3 3 9 9 3
8 0 20 0 2 2 20 2 2 8 3 3 9 9 3
8 0 20 1 2 2 20 2 2 8 3 3 9 9 3
8 0 20 2 2 2 20 2 2 8 3 3 9 9 3
8 0 20 3 2 2 20 2 2 8 3 3 9 9 3
8 0 20 4 2 2 20 2 2 8 3 3 9 9 3
8 0 20 5 2 2 20 2 2 8 3 3 9 9 3
8 0 20 6 2 2 20 2 2 8 3 3 9 9 3
8 0 20 7 2 2 20 2 2 8 3 3 9 9 3
8 0 20 8 2 2 20 2 2 8 3 3 9 9 3
8 0 20 N 2 2 20 2 2 8 3 3 9 9 3
0 0 81 0 0 0 81 0 0 0 3 3 9 9 3
0 0 81 1 0 0 81 0 0 0 3 3 9 9 3
0 0 81 2 0 0 81 0 0 0 3 3 9 9 3
0 0 81 3 0 0 81 0 0 0 3 3 9 9 3
0 0 81 4 0 0 81 0 0 0 3 3 9 9 3
0 0 81 5 0 0 81 0 0 0 3 3 9 9 3
0 0 81 6 0 0 81 0 0 0 3 3 9 9 3
0 0 81 7 0 0 81 0 0 0 3 3 9 9 3
0 0 81 8 0 0 81 0 0 0 3 3 9 9 3
0 0 81 N 0 0 81 0 0 0 3 3 9 9 3
1 0 82 0 1 0 82 1 0 1 3 3 9 9 3
1 0 82 1 1 0 82 1 0 1 3 3 9 9 3
1 0 82 2 1 0 82 1 0 1 3 3 9 9 3
1 0 82 3 1 0 82 1 0 1 3 3 9 9 3
1 0 82 4 1 0 82 1 0 1 3 3 9 9 3
1 0 82 5 1 0 82 1 0 1 3 3 9 9 3
1 0 82 6 1 0 82 1 0 1 3 3 9 9 3
1 0 82 7 1 0 82 1 0 1 3 3 9 9 3
1 0 82 8 1 0 82 1 0 1 3 3 9 9 3
1 0 82 N 1 0 82 1 0 1 3 3 9 9 3
2 0 83 0 2 0 83 2 0 2 3 3 9 9 3
2 0 83 1 2 0 83 2 0 2 3 3 9 9 3
2 0 83 2 2 0 83 2 0 2 3 3 9 9 3
2 0 83 3 2 0 83 2 0 2 3 3 9 9 3
2 0 83 4 2 0 83 2 0 2 3 3 9 9 3
2 0 83 5 2 0 83 2 0 2 3 3 9 9 3
2 0 83 6 2 0 83 2 0 2 3 3 9 9 3
2 0 83 7 2 0 83 2 0 2 3 3 9 9 3
2 0 83 8 2 0 83 2 0 2 3 3 9 9 3
2 0 83 N 2 0 83 2 0 2 3 3 9 9 3
3 0 90 0 0 1 90 0 1 3 3 3 9 9 3
3 0 90 1 0 1 90 0 1 3 3 3 9 9 3
3 0 90 2 0 1 90 0 1 3 3 3 9 9 3
3 0 90 3 0 1 90 0 1 3 3 3 9 9 3
3 0 90 4 0 1 90 0 1 3 3 3 9 9 3
3 0 90 5 0 1 90 0 1 3 3 3 9 9 3
3 0 90 6 0 1 90 0 1 3 3 3 9 9 3
3 0 90 7 0 1 90 0 1 3 3 3 9 9 3
3 0 90 8 0 1 90 0 1 3 3 3 9 9 3
3 0 90 N 0 1 90 0 1 3 3 3 9 9 3
4 0 91 0 1 1 91 1 1 4 3 3 9 9 3
4 0 91 1 1 1 91 1 1 4 3 3 9 9 3
4 0 91 2 1 1 91 1 1 4 3 3 9 9 3
4 0 91 3 1 1 91 1 1 4 3 3 9 9 3
4 0 91 4 1 1 91 1 1 4 3 3 9 9 3
4 0 91 5 1 1 91 1 1 4 3 3 9 9 3
4 0 91 6 1 1 91 1 1 4 3 3 9 9 3
4 0 91 7 1 1 91 1 1 4 3 3 9 9 3
4 0 91 8 1 1 91 1 1 4 3 3 9 9 3
4 0 91 N 1 1 91 1 1 4 3 3 9 9 3
5 0 92 0 2 1 92 2 1 5 3 3 9 9 3
5 0 92 1 2 1 92 2 1 5 3 3 9 9 3
5 0 92 2 2 1 92 2 1 5 3 3 9 9 3
5 0 92 3 2 1 92 2 1 5 3 3 9 9 3
5 0 92 4 2 1 92 2 1 5 3 3 9 9 3
5 0 92 5 2 1 92 2 1 5 3 3 9 9 3
5 0 92 6 2 1 92 2 1 5 3 3 9 9 3
5 0 92 7 2 1 92 2 1 5 3 3 9 9 3
5 0 92 8 2 1 92 2 1 5 3 3 9 9 3
5 0 92 N 2 1 92 2 1 5 3 3 9 9 3
6 0 99 0 0 2 99 0 2 6 3 3 9 9 3
6 0 99 1 0 2 99 0 2 6 3 3 9 9 3
6 0 99 2 0 2 99 0 2 6 3 3 9 9 3
6 0 99 3 0 2 99 0 2 6 3 3 9 9 3
6 0 99 4 0 2 99 0 2 6 3 3 9 9 3
6 0 99 5 0 2 99 0 2 6 3 3 9 9 3
6 0 99 6 0 2 99 0 2 6 3 3 9 9 3
6 0 99 7 0 2 99 0 2 6 3 3 9 9 3
6 0 99 8 0 2 99 0 2 6 3 3 9 9 3
6 0 99 N 0 2 99 0 2 6 3 3 9 9 3
7 0 100 0 1 2 100 1 2 7 3 3 9 9 3
7 0 100 1 1 2 100 1 2 7 3 3 9 9 3
7 0 100 2 1 2 100 1 2 7 3 3 9 9 3
7 0 100 3 1 2 100 1 2 7 3 3 9 9 3
7 0 100 4 1 2 100 1 2 7 3 3 9 9 3
7 0 100 5 1 2 100 1 2 7 3 3 9 9 3
7 0 100 6 1 2 100 1 2 7 3 3 9 9 3
7 0 100 7 1 2 100 1 2 7 3 3 9 9 3
7 0 100 8 1 2 100 1 2 7 3 3 9 9 3
7 0 100 N 1 2 100 1 2 7 3 3 9 9 3
8 0 101 0 2 2 101 2 2 8 3 3 9 9 3
8 0 101 1 2 2 101 2 2 8 3 3 9 9 3
8 0 101 2 2 2 101 2 2 8 3 3 9 9 3
8 0 101 3 2 2 101 2 2 8 3 3 9 9 3
8 0 101 4 2 2 101 2 2 8 3 3 9 9 3
8 0 101 5 2 2 101 2 2 8 3 3 9 9 3
8 0 101 6 2 2 101 2 2 8 3 3 9 9 3
8 0 101 7 2 2 101 2 2 8 3 3 9 9 3
8 0 101 8 2 2 101 2 2 8 3 3 9 9 3
8 0 101 N 2 2 101 2 2 8 3 3 9 9 3
0 0 162 0 0 0 162 0 0 0 3 3 9 9 3
0 0 162 1 0 0 162 0 0 0 3 3 9 9 3
0 0 162 2 0 0 162 0 0 0 3 3 9 9 3
0 0 162 3 0 0 162 0 0 0 3 3 9 9 3
0 0 162 4 0 0 162 0 0 0 3 3 9 9 3
0 0 162 5 0 0 162 0 0 0 3 3 9 9 3
0 0 162 6 0 0 162 0 0 0 3 3 9 9 3
0 0 162 7 0 0 162 0 0 0 3 3 9 9 3
0 0 162 8 0 0 162 0 0 0 3 3 9 9 3
0 0 162 N 0 0 162 0 0 0 3 3 9 9 3
1 0 163 0 1 0 163 1 0 1 3 3 9 9 3
1 0 163 1 1 0 163 1 0 1 3 3 9 9 3
1 0 163 2 1 0 163 1 0 1 3 3 9 9 3
1 0 163 3 1 0 163 1 0 1 3 3 9 9 3
1 0 163 4 1 0 163 1 0 1 3 3 9 9 3
1 0 163 5 1 0 163 1 0 1 3 3 9 9 3
1 0 163 6 1 0 163 1 0 1 3 3 9 9 3
1 0 163 7 1 0 163 1 0 1 3 3 9 9 3
1 0 163 8 1 0 163 1 0 1 3 3 9 9 3
1 0 163 N 1 0 163 1 0 1 3 3 9 9 3
2 0 164 0 2 0 164 2 0 2 3 3 9 9 3
2 0 164 1 2 0 164 2 0 2 3 3 9 9 3
2 0 164 2 2 0 164 2 0 2 3 3 9 9 3
2 0 164 3 2 0 164 2 0 2 3 3 9 9 3
2 0 164 4 2 0 164 2 0 2 3 3 9 9 3
2 0 164 5 2 0 164 2 0 2 3 3 9 9 3
2 0 164 6 2 0 164 2 0 2 3 3 9 9 3
2 0 164 7 2 0 164 2 0 2 3 3 9 9 3
2 0 164 8 2 0 164 2 0 2 3 3 9 9 3
2 0 164 N 2 0 164 2 0 2 3 3 9 9 3
3 0 171 0 0 1 171 0 1 3 3 3 9 9 3
3 0 171 1 0 1 171 0 1 3 3 3 9 9 3
3 0 171 2 0 1 171 0 1 3 3 3 9 9 3
3 0 171 3 0 1 171 0 1 3 3 3 9 9 3
3 0 171 4 0 1 171 0 1 3 3 3 9 9 3
3 0 171 5 0 1 171 0 1 3 3 3 9 9 3
3 0 171 6 0 1 171 0 1 3 3 3 9 9 3
3 0 171 7 0 1 171 0 1 3 3 3 9 9 3
3 0 171 8 0 1 171 0 1 3 3 3 9 9 3
3 0 171 N 0 1 171 0 1 3 3 3 9 9 3
4 0 172 0 1 1 172 1 1 4 3 3 9 9 3
4 0 172 1 1 1 172 1 1 4 3 3 9 9 3
4 0 172 2 1 1 172 1 1 4 3 3 9 9 3
4 0 172 3 1 1 172 1 1 4 3 3 9 9 3
4 0 172 4 1 1 172 1 1 4 3 3 9 9 3
4 0 172 5 1 1 172 1 1 4 3 3 9 9 3
4 0 172 6 1 1 172 1 1 4 3 3 9 9 3
4 0 172 7 1 1 172 1 1 4 3 3 9 9 3
4 0 172 8 1 1 172 1 1 4 3 3 9 9 3
4 0 172 N 1 1 172 1 1 4 3 3 9 9 3
5 0 173 0 2 1 173 2 1 5 3 3 9 9 3
5 0 173 1 2 1 173 2 1 5 3 3 9 9 3
5 0 173 2 2 1 173 2 1 5 3 3 9 9 3
5 0 173 3 2 1 173 2 1 5 3 3 9 9 3
5 0 173 4 2 1 173 2 1 5 3 3 9 9 3
5 0 173 5 2 1 173 2 1 5 3 3 9 9 3
5 0 173 6 2 1 173 2 1 5 3 3 9 9 3
5 0 173 7 2 1 173 2 1 5 3 3 9 9 3
5 0 173 8 2 1 173 2 1 5 3 3 9 9 3
5 0 173 N 2 1 173 2 1 5 3 3 9 9 3
6 0 180 0 0 2 180 0 2 6 3 3 9 9 3
6 0 180 1 0 2 180 0 2 6 3 3 9 9 3
6 0 180 2 0 2 180 0 2 6 3 3 9 9 3
6 0 180 3 0 2 180 0 2 6 3 3 9 9 3
6 0 180 4 0 2 180 0 2 6 3 3 9 9 3
6 0 180 5 0 2 180 0 2 6 3 3 9 9 3
6 0 180 6 0 2 180 0 2 6 3 3 9 9 3
6 0 180 7 0 2 180 0 2 6 3 3 9 9 3
6 0 180 8 0 2 180 0 2 6 3 3 9 9 3
6 0 180 N 0 2 180 0 2 6 3 3 9 9 3
7 0 181 0 1 2 181 1 2 7 3 3 9 9 3
7 0 181 1 1 2 181 1 2 7 3 3 9 9 3
7 0 181 2 1 2 181 1 2 7 3 3 9 9 3
7 0 181 3 1 2 181 1 2 7 3 3 9 9 3
7 0 181 4 1 2 181 1 2 7 3 3 9 9 3
7 0 181 5 1 2 181 1 2 7 3 3 9 9 3
7 0 181 6 1 2 181 1 2 7 3 3 9 9 3
7 0 181 7 1 2 181 1 2 7 3 3 9 9 3
7 0 181 8 1 2 181 1 2 7 3 3 9 9 3
7 0 181 N 1 2 181 1 2 7 3 3 9 9 3
8 0 182 0 2 2 182 2 2 8 3 3 9 9 3
8 0 182 1 2 2 182 2 2 8 3 3 9 9 3
8 0 182 2 2 2 182 2 2 8 3 3 9 9 3
8 0 182 3 2 2 182 2 2 8 3 3 9 9 3
8 0 182 4 2 2 182 2 2 8 3 3 9 9 3
8 0 182 5 2 2 182 2 2 8 3 3 9 9 3
8 0 182 6 2 2 182 2 2 8 3 3 9 9 3
8 0 182 7 2 2 182 2 2 8 3 3 9 9 3
8 0 182 8 2 2 182 2 2 8 3 3 9 9 3
8 0 182 N 2 2 182 2 2 8 3 3 9 9 3
0 1 3 0 3 0 3 0 0 0 3 3 9 9 3
0 1 3 1 3 0 3 0 0 0 3 3 9 9 3
0 1 3 2 3 0 3 0 0 0 3 3 9 9 3
0 1 3 3 3 0 3 0 0 0 3 3 9 9 3
0 1 3 4 3 0 3 0 0 0 3 3 9 9 3
0 1 3 5 3 0 3 0 0 0 3 3 9 9 3
0 1 3 6 3 0 3 0 0 0 3 3 9 9 3
0 1 3 7 3 0 3 0 0 0 3 3 9 9 3
0 1 3 8 3 0 3 0 0 0 3 3 9 9 3
0 1 3 N 3 0 3 0 0 0 3 3 9 9 3
1 1 4 0 4 0 4 1 0 1 3 3 9 9 3
1 1 4 1 4 0 4 1 0 1 3 3 9 9 3
1 1 4 2 4 0 4 1 0 1 3 3 9 9 3
1 1 4 3 4 0 4 1 0 1 3 3 9 9 3
1 1 4 4 4 0 4 1 0 1 3 3 9 9 3
1 1 4 5 4 0 4 1 0 1 3 3 9 9 3
1 1 4 6 4 0 4 1 0 1 3 3 9 9 3
1 1 4 7 4 0 4 1 0 1 3 3 9 9 3
1 1 4 8 4 0 4 1 0 1 3 3 9 9 3
1 1 4 N 4 0 4 1 0 1 3 3 9 9 3
2 1 5 0 5 0 5 2 0 2 3 3 9 9 3
2 1 5 1 5 0 5 2 0 2 3 3 9 9 3
2 1 5 2 5 0 5 2 0 2 3 3 9 9 3
2 1 5 3 5 0 5 2 0 2 3 3 9 9 3
2 1 5 4 5 0 5 2 0 2 3 3 9 9 3
2 1 5 5 5 0 5 2 0 2 3 3 9 9 3
2 1 5 6 5 0 5 2 0 2 3 3 9 9 3
2 1 5 7 5 0 5 2 0 2 3 3 9 9 3
2 1 5 8 5 0 5 2 0 2 3 3 9 9 3
2 1 5 N 5 0 5 2 0 2 3 3 9 9 3
3 1 12 0 3 1 12 0 1 3 3 3 9 9 3
3 1 12 1 3 1 12 0 1 3 3 3 9 9 3
3 1 12 2 3 1 12 0 1 3 3 3 9 9 3
3 1 12 3 3 1 12 0 1 3 3 3 9 9 3
3 1 12 4 3 1 12 0 1 3 3 3 9 9 3
3 1 12 5 3 1 12 0 1 3 3 3 9 9 3
3 1 12 6 3 1 12 0 1 3 3 3 9 9 3
3 1 12 7 3 1 12 0 1 3 3 3 9 9 3
3 1 12 8 3 1 12 0 1 3 3 3 9 9 3
3 1 12 N 3 1 12 0 1 3 3 3 9 9 3
4 1 13 0 4 1 13 1 1 4 3 3 9 9 3
4 1 13 1 4 1 13 1 1 4 3 3 9 9 3
4 1 13 2 4 1 13 1 1 4 3 3 9 9 3
4 1 13 3 4 1 13 1 1 4 3 3 9 9 3
4 1 13 4 4 1 13 1 1 4 3 3 9 9 3
4 1 13 5 4 1 13 1 1 4 3 3 9 9 3
4 1 13 6 4 1 13 1 1 4 3 3 9 9 3
4 1 13 7 4 1 13 1 1 4 3 3 9 9 3
4 1 13 8 4 1 13 1 1 4 3 3 9 9 3
4 1 13 N 4 1 13 1 1 4 3 3 9 9 3
5 1 14 0 5 1 14 2 1 5 3 3 9 9 3
5 1 14 1 5 1 14 2 1 5 3 3 9 9 3
5 1 14 2 5 1 14 2 1 5 3 3 9 9 3
5 1 14 3 5 1 14 2 1 5 3 3 9 9 3
5 1 14 4 5 1 14 2 1 5 3 3 9 9 3
5 1 14 5 5 1 14 2 1 5 3 3 9 9 3
5 1 14 6 5 1 14 2 1 5 3 3 9 9 3
5 1 14 7 5 1 14 2 1 5 3 3 9 9 3
5 1 14 8 5 1 14 2 1 5 3 3 9 9 3
5 1 14 N 5 1 14 2 1 5 3 3 9 9 3
6 1 21 0 3 2 21 0 2 6 3 3 9 9 3
6 1 21 1 3 2 21 0 2 6 3 3 9 9 3
6 1 21 2 3 2 21 0 2 6 3 3 9 9 3
6 1 21 3 3 2 21 0 2 6 3 3 9 9 3
6 1 21 4 3 2 21 0 2 6 3 3 9 9 3
6 1 21 5 3 2 21 0 2 6 3 3 9 9 3
6 1 21 6 3 2 21 0 2 6 3 3 9 9 3
6 1 21 7 3 2 21 0 2 6 3 3 9 9 3
6 1 21 8 3 2 21 0 2 6 3 3 9 9 3
6 1 21 N 3 2 21 0 2 6 3 3 9 9 3
7 1 22 0 4 2 22 1 2 7 3 3 9 9 3
7 1 22 1 4 2 22 1 2 7 3 3 9 9 3
7 1 22 2 4 2 22 1 2 7 3 3 9 9 3
7 1 22 3 4 2 22 1 2 7 3 3 9 9 3
7 1 22 4 4 2 22 1 2 7 3 3 9 9 3
7 1 22 5 4 2 22 1 2 7 3 3 9 9 3
7 1 22 6 4 2 22 1 2 7 3 3 9 9 3
7 1 22 7 4 2 22 1 2 7 3 3 9 9 3
7 1 22 8 4 2 22 1 2 7 3 3 9 9 3
7 1 22 N 4 2 22 1 2 7 3 3 9 9 3
8 1 23 0 5 2 23 2 2 8 3 3 9 9 3
8 1 23 1 5 2 23 2 2 8 3 3 9 9 3
8 1 23 2 5 2 23 2 2 8 3 3 9 9 3
8 1 23 3 5 2 23 2 2 8 3 3 9 9 3
8 1 23 4 5 2 23 2 2 8 3 3 9 9 3
8 1 23 5 5 2 23 2 2 8 3 3 9 9 3
8 1 23 6 5 2 23 2 2 8 3 3 9 9 3
8 1 23 7 5 2 23 2 2 8 3 3 9 9 3
8 1 23 8 5 2 23 2 2 8 3 3 9 9 3
8 1 23 N 5 2 23 2 2 8 3 3 9 9 3
0 1 84 0 3 0 84 0 0 0 3 3 9 9 3
0 1 84 1 3 0 84 0 0 0 3 3 9 9 3
0 1 84 2 3 0 84 0 0 0 3 3 9 9 3
0 1 84 3 3 0 84 0 0 0 3 3 9 9 3
0 1 84 4 3 0 84 0 0 0 3 3 9 9 3
0 1 84 5 3 0 84 0 0 0 3 3 9 9 3
0 1 84 6 3 0 84 0 0 0 3 3 9 9 3
0 1 84 7 3 0 84 0 0 0 3 3 9 9 3
0 1 84 8 3 0 84 0 0 0 3 3 9 9 3
0 1 84 N 3 0 84 0 0 0 3 3 9 9 3
1 1 85 0 4 0 85 1 0 1 3 3 9 9 3
1 1 85 1 4 0 85 1 0 1 3 3 9 9 3
1 1 85 2 4 0 85 1 0 1 3 3 9 9 3
1 1 85 3 4 0 85 1 0 1 3 3 9 9 3
1 1 85 4 4 0 85 1 0 1 3 3 9 9 3
1 1 85 5 4 0 85 1 0 1 3 3 9 9 3
1 1 85 6 4 0 85 1 0 1 3 3 9 9 3
1 1 85 7 4 0 85 1 0 1 3 3 9 9 3
1 1 85 8 4 0 85 1 0 1 3 3 9 9 3
1 1 85 N 4 0 85 1 0 1 3 3 9 9 3
2 1 86 0 5 0 86 2 0 2 3 3 9 9 3
2 1 86 1 5 0 86 2 0 2 3 3 9 9 3
2 1 86 2 5 0 86 2 0 2 3 3 9 9 3
2 1 86 3 5 0 86 2 0 2 3 3 9 9 3
2 1 86 4 5 0 86 2 0 2 3 3 9 9 3
2 1 86 5 5 0 86 2 0 2 3 3 9 9 3
2 1 86 6 5 0 86 2 0 2 3 3 9 9 3
2 1 86 7 5 0 86 2 0 2 3 3 9 9 3
2 1 86 8 5 0 86 2 0 2 3 3 9 9 3
2 1 86 N 5 0 86 2 0 2 3 3 9 9 3
3 1 93 0 3 1 93 0 1 3 3 3 9 9 3
3 1 93 1 3 1 93 0 1 3 3 3 9 9 3
3 1 93 2 3 1 93 0 1 3 3 3 9 9 3
3 1 93 3 3 1 93 0 1 3 3 3 9 9 3
3 1 93 4 3 1 93 0 1 3 3 3 9 9 3
3 1 93 5 3 1 93 0 1 3 3 3 9 9 3
3 1 93 6 3 1 93 0 1 3 3 3 9 9 3
3 1 93 7 3 1 93 0 1 3 3 3 9 9 3
3 1 93 8 3 1 93 0 1 3 3 3 9 9 3
3 1 93 N 3 1 93 0 1 3 3 3 9 9 3
4 1 94 0 4 1 94 1 1 4 3 3 9 9 3
4 1 94 1 4 1 94 1 1 4 3 3 9 9 3
4 1 94 2 4 1 94 1 1 4 3 3 9 9 3
4 1 94 3 4 1 94 1 1 4 3 3 9 9 3
4 1 94 4 4 1 94 1 1 4 3 3 9 9 3
4 1 94 5 4 1 94 1 1 4 3 3 9 9 3
4 1 94 6 4 1 94 1 1 4 3 3 9 9 3
4 1 94 7 4 1 94 1 1 4 3 3 9 9 3
4 1 94 8 4 1 94 1 1 4 3 3 9 9 3
4 1 94 N 4 1 94 1 1 4 3 3 9 9 3
5 1 95 0 5 1 95 2 1 5 3 3 9 9 3
5 1 95 1 5 1 95 2 1 5 3 3 9 9 3
5 1 95 2 5 1 95 2 1 5 3 3 9 9 3
5 1 95 3 5 1 95 2 1 5 3 3 9 9 3
5 1 95 4 5 1 95 2 1 5 3 3 9 9 3
5 1 95 5 5 1 95 2 1 5 3 3 9 9 3
5 1 95 6 5 1 95 2 1 5 3 3 9 9 3
5 1 95 7 5 1 95 2 1 5 3 3 9 9 3
5 1 95 8 5 1 95 2 1 5 3 3 9 9 3
5 1 95 N 5 1 95 2 1 5 3 3 9 9 3
6 1 102 0 3 2 102 0 2 6 3 3 9 9 3
6 1 102 1 3 2 102 0 2 6 3 3 9 9 3
6 1 102 2 3 2 102 0 2 6 3 3 9 9 3
6 1 102 3 3 2 102 0 2 6 3 3 9 9 3
6 1 102 4 3 2 102 0 2 6 3 3 9 9 3
6 1 102 5 3 2 102 0 2 6 3 3 9 9 3
6 1 102 6 3 2 102 0 2 6 3 3 9 9 3
6 1 102 7 3 2 102 0 2 6 3 3 9 9 3
6 1 102 8 3 2 102 0 2 6 3 3 9 9 3
6 1 102 N 3 2 102 0 2 6 3 3 9 9 3
7 1 103 0 4 2 103 1 2 7 3 3 9 9 3
7 1 103 1 4 2 103 1 2 7 3 3 9 9 3
7 1 103 2 4 2 103 1 2 7 3 3 9 9 3
7 1 103 3 4 2 103 1 2 7 3 3 9 9 3
7 1 103 4 4 2 103 1 2 7 3 3 9 9 3
7 1 103 5 4 2 103 1 2 7 3 3 9 9 3
7 1 103 6 4 2 103 1 2 7 3 3 9 9 3
7 1 103 7 4 2 103 1 2 7 3 3 9 9 3
7 1 103 8 4 2 103 1 2 7 3 3 9 9 3
7 1 103 N 4 2 103 1 2 7 3 3 9 9 3
8 1 104 0 5 2 104 2 2 8 3 3 9 9 3
8 1 104 1 5 2 104 2 2 8 3 3 9 9 3
8 1 104 2 5 2 104 2 2 8 3 3 9 9 3
8 1 104 3 5 2 104 2 2 8 3 3 9 9 3
8 1 104 4 5 2 104 2 2 8 3 3 9 9 3
8 1 104 5 5 2 104 2 2 8 3 3 9 9 3
8 1 104 6 5 2 104 2 2 8 3 3 9 9 3
8 1 104 7 5 2 104 2 2 8 3 3 9 9 3
8 1 104 8 5 2 104 2 2 8 3 3 9 9 3
8 1 104 N 5 2 104 2 2 8 3 3 9 9 3
0 1 165 0 3 0 165 0 0 0 3 3 9 9 3
0 1 165 1 3 0 165 0 0 0 3 3 9 9 3
0 1 165 2 3 0 165 0 0 0 3 3 9 9 3
0 1 165 3 3 0 165 0 0 0 3 3 9 9 3
0 1 165 4 3 0 165 0 0 0 3 3 9 9 3
0 1 165 5 3 0 165 0 0 0 3 3 9 9 3
0 1 165 6 3 0 165 0 0 0 3 3 9 9 3
0 1 165 7 3 0 165 0 0 0 3 3 9 9 3
0 1 165 8 3 0 165 0 0 0 3 3 9 9 3
0 1 165 N 3 0 165 0 0 0 3 3 9 9 3
1 1 166 0 4 0 166 1 0 1 3 3 9 9 3
1 1 166 1 4 0 166 1 0 1 3 3 9 9 3
1 1 166 2 4 0 166 1 0 1 3 3 9 9 3
1 1 166 3 4 0 166 1 0 1 3 3 9 9 3
1 1 166 4 4 0 166 1 0 1 3 3 9 9 3
1 1 166 5 4 0 166 1 0 1 3 3 9 9 3
1 1 166 6 4 0 166 1 0 1 3 3 9 9 3
1 1 166 7 4 0 166 1 0 1 3 3 9 9 3
1 1 166 8 4 0 166 1 0 1 3 3 9 9 3
1 1 166 N 4 0 166 1 0 1 3 3 9 9 3
2 1 167 0 5 0 167 2 0 2 3 3 9 9 3
2 1 167 1 5 0 167 2 0 2 3 3 9 9 3
2 1 167 2 5 0 167 2 0 2 3 3 9 9 3
2 1 167 3 5 0 167 2 0 2 3 3 9 9 3
2 1 167 4 5 0 167 2 0 2 3 3 9 9 3
2 1 167 5 5 0 167 2 0 2 3 3 9 9 3
2 1 167 6 5 0 167 2 0 2 3 3 9 9 3
2 1 167 7 5 0 167 2 0 2 3 3 9 9 3
2 1 167 8 5 0 167 2 0 2 3 3 9 9 3
2 1 167 N 5 0 167 2 0 2 3 3 9 9 3
3 1 174 0 3 1 174 0 1 3 3 3 9 9 3
3 1 174 1 3 1 174 0 1 3 3 3 9 9 3
3 1 174 2 3 1 174 0 1 3 3 3 9 9 3
3 1 174 3 3 1 174 0 1 3 3 3 9 9 3
3 1 174 4 3 1 174 0 1 3 3 3 9 9 3
3 1 174 5 3 1 174 0 1 3 3 3 9 9 3
3 1 174 6 3 1 174 0 1 3 3 3 9 9 3
3 1 174 7 3 1 174 0 1 3 3 3 9 9 3
3 1 174 8 3 1 174 0 1 3 3 3 9 9 3
3 1 174 N 3 1 174 0 1 3 3 3 9 9 3
4 1 175 0 4 1 175 1 1 4 3 3 9 9 3
4 1 175 1 4 1 175 1 1 4 3 3 9 9 3
4 1 175 2 4 1 175 1 1 4 3 3 9 9 3
4 1 175 3 4 1 175 1 1 4 3 3 9 9 3
4 1 175 4 4 1 175 1 1 4 3 3 9 9 3
4 1 175 5 4 1 175 1 1 4 3 3 9 9 3
4 1 175 6 4 1 175 1 1 4 3 3 9 9 3
4 1 175 7 4 1 175 1 1 4 3 3 9 9 3
4 1 175 8 4 1 175 1 1 4 3 3 9 9 3
4 1 175 N 4 1 175 1 1 4 3 3 9 9 3
5 1 176 0 5 1 176 2 1 5 3 3 9 9 3
5 1 176 1 5 1 176 2 1 5 3 3 9 9 3
5 1 176 2 5 1 176 2 1 5 3 3 9 9 3
5 1 176 3 5 1 176 2 1 5 3 3 9 9 3
5 1 176 4 5 1 176 2 1 5 3 3 9 9 3
5 1 176 5 5 1 176 2 1 5 3 3 9 9 3
5 1 176 6 5 1 176 2 1 5 3 3 9 9 3
5 1 176 7 5 1 176 2 1 5 3 3 9 9 3
5 1 176 8 5 1 176 2 1 5 3 3 9 9 3
5 1 176 N 5 1 176 2 1 5 3 3 9 9 3
6 1 183 0 3 2 183 0 2 6 3 3 9 9 3
6 1 183 1 3 2 183 0 2 6 3 3 9 9 3
6 1 183 2 3 2 183 0 2 6 3 3 9 9 3
6 1 183 3 3 2 183 0 2 6 3 3 9 9 3
6 1 183 4 3 2 183 0 2 6 3 3 9 9 3
6 1 183 5 3 2 183 0 2 6 3 3 9 9 3
6 1 183 6 3 2 183 0 2 6 3 3 9 9 3
6 1 183 7 3 2 183 0 2 6 3 3 9 9 3
6 1 183 8 3 2 183 0 2 6 3 3 9 9 3
6 1 183 N 3 2 183 0 2 6 3 3 9 9 3
7 1 184 0 4 2 184 1 2 7 3 3 9 9 3
7 1 184 1 4 2 184 1 2 7 3 3 9 9 3
7 1 184 2 4 2 184 1 2 7 3 3 9 9 3
7 1 184 3 4 2 184 1 2 7 3 3 9 9 3
7 1 184 4 4 2 184 1 2 7 3 3 9 9 3
7 1 184 5 4 2 184 1 2 7 3 3 9 9 3
7 1 184 6 4 2 184 1 2 7 3 3 9 9 3
7 1 184 7 4 2 184 1 2 7 3 3 9 9 3
7 1 184 8 4 2 184 1 2 7 3 3 9 9 3
7 1 184 N 4 2 184 1 2 7 3 3 9 9 3
8 1 185 0 5 2 185 2 2 8 3 3 9 9 3
8 1 185 1 5 2 185 2 2 8 3 3 9 9 3
8 1 185 2 5 2 185 2 2 8 3 3 9 9 3
8 1 185 3 5 2 185 2 2 8 3 3 9 9 3
8 1 185 4 5 2 185 2 2 8 3 3 9 9 3
8 1 185 5 5 2 185 2 2 8 3 3 9 9 3
8 1 185 6 5 2 185 2 2 8 3 3 9 9 3
8 1 185 7 5 2 185 2 2 8 3 3 9 9 3
8 1 185 8 5 2 185 2 2 8 3 3 9 9 3
8 1 185 N 5 2 185 2 2 8 3 3 9 9 3
0 2 6 0 6 0 6 0 0 0 3 3 9 9 3
0 2 6 1 6 0 6 0 0 0 3 3 9 9 3
0 2 6 2 6 0 6 0 0 0 3 3 9 9 3
0 2 6 3 6 0 6 0 0 0 3 3 9 9 3
0 2 6 4 6 0 6 0 0 0 3 3 9 9 3
0 2 6 5 6 0 6 0 0 0 3 3 9 9 3
0 2 6 6 6 0 6 0 0 0 3 3 9 9 3
0 2 6 7 6 0 6 0 0 0 3 3 9 9 3
0 2 6 8 6 0 6 0 0 0 3 3 9 9 3
0 2 6 N 6 0 6 0 0 0 3 3 9 9 3
1 2 7 0 7 0 7 1 0 1 3 3 9 9 3
1 2 7 1 7 0 7 1 0 1 3 3 9 9 3
1 2 7 2 7 0 7 1 0 1 3 3 9 9 3
1 2 7 3 7 0 7 1 0 1 3 3 9 9 3
1 2 7 4 7 0 7 1 0 1 3 3 9 9 3
1 2 7 5 7 0 7 1 0 1 3 3 9 9 3
1 2 7 6 7 0 7 1 0 1 3 3 9 9 3
1 2 7 7 7 0 7 1 0 1 3 3 9 9 3
1 2 7 8 7 0 7 1 0 1 3 3 9 9 3
1 2 7 N 7 0 7 1 0 1 3 3 9 9 3
2 2 8 0 8 0 8 2 0 2 3 3 9 9 3
2 2 8 1 8 0 8 2 0 2 3 3 9 9 3
2 2 8 2 8 0 8 2 0 2 3 3 9 9 3
2 2 8 3 8 0 8 2 0 2 3 3 9 9 3
2 2 8 4 8 0 8 2 0 2 3 3 9 9 3
2 2 8 5 8 0 8 2 0 2 3 3 9 9 3
2 2 8 6 8 0 8 2 0 2 3 3 9 9 3
2 2 8 7 8 0 8 2 0 2 3 3 9 9 3
2 2 8 8 8 0 8 2 0 2 3 3 9 9 3
2 2 8 N 8 0 8 2 0 2 3 3 9 9 3
3 2 15 0 6 1 15 0 1 3 3 3 9 9 3
3 2 15 1 6 1 15 0 1 3 3 3 9 9 3
3 2 15 2 6 1 15 0 1 3 3 3 9 9 3
3 2 15 3 6 1 15 0 1 3 3 3 9 9 3
3 2 15 4 6 1 15 0 1 3 3 3 9 9 3
3 2 15 5 6 1 15 0 1 3 3 3 9 9 3
3 2 15 6 6 1 15 0 1 3 3 3 9 9 3
3 2 15 7 6 1 15 0 1 3 3 3 9 9 3
3 2 15 8 6 1 15 0 1 3 3 3 9 9 3
3 2 15 N 6 1 15 0 1 3 3 3 9 9 3
4 2 16 0 7 1 16 1 1 4 3 3 9 9 3
4 2 16 1 7 1 16 1 1 4 3 3 9 9 3
4 2 16 2 7 1 16 1 1 4 3 3 9 9 3
4 2 16 3 7 1 16 1 1 4 3 3 9 9 3
4 2 16 4 7 1 16 1 1 4 3 3 9 9 3
4 2 16 5 7 1 16 1 1 4 3 3 9 9 3
4 2 16 6 7 1 16 1 1 4 3 3 9 9 3
4 2 16 7 7 1 16 1 1 4 3 3 9 9 3
4 2 16 8 7 1 16 1 1 4 3 3 9 9 3
4 2 16 N 7 1 16 1 1 4 3 3 9 9 3
5 2 17 0 8 1 17 2 1 5 3 3 9 9 3
5 2 17 1 8 1 17 2 1 5 3 3 9 9 3
5 2 17 2 8 1 17 2 1 5 3 3 9 9 3
5 2 17 3 8 1 17 2 1 5 3 3 9 9 3
5 2 17 4 8 1 17 2 1 5 3 3 9 9 3
5 2 17 5 8 1 17 2 1 5 3 3 9 9 3
5 2 17 6 8 1 17 2 1 5 3 3 9 9 3
5 2 17 7 8 1 17 2 1 5 3 3 9 9 3
5 2 17 8 8 1 17 2 1 5 3 3 9 9 3
5 2 17 N 8 1 17 2 1 5 3 3 9 9 3
6 2 24 0 6 2 24 0 2 6 3 3 9 9 3
6 2 24 1 6 2 24 0 2 6 3 3 9 9 3
6 2 24 2 6 2 24 0 2 6 3 3 9 9 3
6 2 24 3 6 2 24 0 2 6 3 3 9 9 3
6 2 24 4 6 2 24 0 2 6 3 3 9 9 3
6 2 24 5 6 2 24 0 2 6 3 3 9 9 3
6 2 24 6 6 2 24 0 2 6 3 3 9 9 3
6 2 24 7 6 2 24 0 2 6 3 3 9 9 3
6 2 24 8 6 2 24 0 2 6 3 3 9 9 3
6 2 24 N 6 2 24 0 2 6 3 3 9 9 3
7 2 25 0 7 2 25 1 2 7 3 3 9 9 3
7 2 25 1 7 2 25 1 2 7 3 3 9 9 3
7 2 25 2 7 2 25 1 2 7 3 3 9 9 3
7 2 25 3 7 2 25 1 2 7 3 3 9 9 3
7 2 25 4 7 2 25 1 2 7 3 3 9 9 3
7 2 25 5 7 2 25 1 2 7 3 3 9 9 3
7 2 25 6 7 2 25 1 2 7 3 3 9 9 3
7 2 25 7 7 2 25 1 2 7 3 3 9 9 3
7 2 25 8 7 2 25 1 2 7 3 3 9 9 3
7 2 25 N 7 2 25 1 2 7 3 3 9 9 3
8 2 26 0 8 2 26 2 2 8 3 3 9 9 3
8 2 26 1 8 2 26 2 2 8 3 3 9 9 3
8 2 26 2 8 2 26 2 2 8 3 3 9 9 3
8 2 26 3 8 2 26 2 2 8 3 3 9 9 3
8 2 26 4 8 2 26 2 2 8 3 3 9 9 3
8 2 26 5 8 2 26 2 2 8 3 3 9 9 3
8 2 26 6 8 2 26 2 2 8 3 3 9 9 3
8 2 26 7 8 2 26 2 2 8 3 3 9 9 3
8 2 26 8 8 2 26 2 2 8 3 3 9 9 3
8 2 26 N 8 2 26 2 2 8 3 3 9 9 3
0 2 87 0 6 0 87 0 0 0 3 3 9 9 3
0 2 87 1 6 0 87 0 0 0 3 3 9 9 3
0 2 87 2 6 0 87 0 0 0 3 3 9 9 3
0 2 87 3 6 0 87 0 0 0 3 3 9 9 3
0 2 87 4 6 0 87 0 0 0 3 3 9 9 3
0 2 87 5 6 0 87 0 0 0 3 3 9 9 3
0 2 87 6 6 0 87 0 0 0 3 3 9 9 3
0 2 87 7 6 0 87 0 0 0 3 3 9 9 3
0 2 87 8 6 0 87 0 0 0 3 3 9 9 3
0 2 87 N 6 0 87 0 0 0 3 3 9 9 3
1 2 88 0 7 0 88 1 0 1 3 3 9 9 3
1 2 88 1 7 0 88 1 0 1 3 3 9 9 3
1 2 88 2 7 0 88 1 0 1 3 3 9 9 3
1 2 88 3 7 0 88 1 0 1 3 3 9 9 3
1 2 88 4 7 0 88 1 0 1 3 3 9 9 3
1 2 88 5 7 0 88 1 0 1 3 3 9 9 3
1 2 88 6 7 0 88 1 0 1 3 3 9 9 3
1 2 88 7 7 0 88 1 0 1 3 3 9 9 3
1 2 88 8 7 0 88 1 0 1 3 3 9 9 3
1 2 88 N 7 0 88 1 0 1 3 3 9 9 3
2 2 89 0 8 0 89 2 0 2 3 3 9 9 3
2 2 89 1 8 0 89 2 0 2 3 3 9 9 3
2 2 89 2 8 0 89 2 0 2 3 3 9 9 3
2 2 89 3 8 0 89 2 0 2 3 3 9 9 3
2 2 89 4 8 0 89 2 0 2 3 3 9 9 3
2 2 89 5 8 0 89 2 0 2 3 3 9 9 3
2 2 89 6 8 0 89 2 0 2 3 3 9 9 3
2 2 89 7 8 0 89 2 0 2 3 3 9 9 3
2 2 89 8 8 0 89 2 0 2 3 3 9 9 3
2 2 89 N 8 0 89 2 0 2 3 3 9 9 3
3 2 96 0 6 1 96 0 1 3 3 3 9 9 3
3 2 96 1 6 1 96 0 1 3 3 3 9 9 3
3 2 96 2 6 1 96 0 1 3 3 3 9 9 3
3 2 96 3 6 1 96 0 1 3 3 3 9 9 3
3 2 96 4 6 1 96 0 1 3 3 3 9 9 3
3 2 96 5 6 1 96 0 1 3 3 3 9 9 3
3 2 96 6 6 1 96 0 1 3 3 3 9 9 3
3 2 96 7 6 1 96 0 1 3 3 3 9 9 3
3 2 96 8 6 1 96 0 1 3 3 3 9 9 3
3 2 96 N 6 1 96 0 1 3 3 3 9 9 3
4 2 97 0 7 1 97 1 1 4 3 3 9 9 3
4 2 97 1 7 1 97 1 1 4 3 3 9 9 3
4 2 97 2 7 1 97 1 1 4 3 3 9 9 3
4 2 97 3 7 1 97 1 1 4 3 3 9 9 3
4 2 97 4 7 1 97 1 1 4 3 3 9 9 3
4 2 97 5 7 1 97 1 1 4 3 3 9 9 3
4 2 97 6 7 1 97 1 1 4 3 3 9 9 3
4 2 97 7 7 1 97 1 1 4 3 3 9 9 3
4 2 97 8 7 1 97 1 1 4 3 3 9 9 3
4 2 97 N 7 1 97 1 1 4 3 3 9 9 3
5 2 98 0 8 1 98 2 1 5 3 3 9 9 3
5 2 98 1 8 1 98 2 1 5 3 3 9 9 3
5 2 98 2 8 1 98 2 1 5 3 3 9 9 3
5 2 98 3 8 1 98 2 1 5 3 3 9 9 3
5 2 98 4 8 1 98 2 1 5 3 3 9 9 3
5 2 98 5 8 1 98 2 1 5 3 3 9 9 3
5 2 98 6 8 1 98 2 1 5 3 3 9 9 3
5 2 98 7 8 1 98 2 1 5 3 3 9 9 3
5 2 98 8 8 1 98 2 1 5 3 3 9 9 3
5 2 98 N 8 1 98 2 1 5 3 3 9 9 3
6 2 105 0 6 2 105 0 2 6 3 3 9 9 3
6 2 105 1 6 2 105 0 2 6 3 3 9 9 3
6 2 105 2 6 2 105 0 2 6 3 3 9 9 3
6 2 105 3 6 2 105 0 2 6 3 3 9 9 3
6 2 105 4 6 2 105 0 2 6 3 3 9 9 3
6 2 105 5 6 2 105 0 2 6 3 3 9 9 3
6 2 105 6 6 2 105 0 2 6 3 3 9 9 3
6 2 105 7 6 2 105 0 2 6 3 3 9 9 3
6 2 105 8 6 2 105 0 2 6 3 3 9 9 3
6 2 105 N 6 2 105 0 2 6 3 3 9 9 3
7 2 106 0 7 2 106 1 2 7 3 3 9 9 3
7 2 106 1 7 2 106 1 2 7 3 3 9 9 3
7 2 106 2 7 2 106 1 2 7 3 3 9 9 3
7 2 106 3 7 2 106 1 2 7 3 3 9 9 3
7 2 106 4 7 2 106 1 2 7 3 3 9 9 3
7 2 106 5 7 2 106 1 2 7 3 3 9 9 3
7 2 106 6 7 2 106 1 2 7 3 3 9 9 3
7 2 106 7 7 2 106 1 2 7 3 3 9 9 3
7 2 106 8 7 2 106 1 2 7 3 3 9 9 3
7 2 106 N 7 2 106 1 2 7 3 3 9 9 3
8 2 107 0 8 2 107 2 2 8 3 3 9 9 3
8 2 107 1 8 2 107 2 2 8 3 3 9 9 3
8 2 107 2 8 2 107 2 2 8 3 3 9 9 3
8 2 107 3 8 2 107 2 2 8 3 3 9 9 3
8 2 107 4 8 2 107 2 2 8 3 3 9 9 3
8 2 107 5 8 2 107 2 2 8 3 3 9 9 3
8 2 107 6 8 2 107 2 2 8 3 3 9 9 3
8 2 107 7 8 2 107 2 2 8 3 3 9 9 3
8 2 107 8 8 2 107 2 2 8 3 3 9 9 3
8 2 107 N 8 2 107 2 2 8 3 3 9 9 3
0 2 168 0 6 0 168 0 0 0 3 3 9 9 3
0 2 168 1 6 0 168 0 0 0 3 3 9 9 3
0 2 168 2 6 0 168 0 0 0 3 3 9 9 3
0 2 168 3 6 0 168 0 0 0 3 3 9 9 3
0 2 168 4 6 0 168 0 0 0 3 3 9 9 3
0 2 168 5 6 0 168 0 0 0 3 3 9 9 3
0 2 168 6 6 0 168 0 0 0 3 3 9 9 3
0 2 168 7 6 0 168 0 0 0 3 3 9 9 3
0 2 168 8 6 0 168 0 0 0 3 3 9 9 3
0 2 168 N 6 0 168 0 0 0 3 3 9 9 3
1 2 169 0 7 0 169 1 0 1 3 3 9 9 3
1 2 169 1 7 0 169 1 0 1 3 3 9 9 3
1 2 169 2 7 0 169 1 0 1 3 3 9 9 3
1 2 169 3 7 0 169 1 0 1 3 3 9 9 3
1 2 169 4 7 0 169 1 0 1 3 3 9 9 3
1 2 169 5 7 0 169 1 0 1 3 3 9 9 3
1 2 169 6 7 0 169 1 0 1 3 3 9 9 3
1 2 169 7 7 0 169 1 0 1 3 3 9 9 3
1 2 169 8 7 0 169 1 0 1 3 3 9 9 3
1 2 169 N 7 0 169 1 0 1 3 3 9 9 3
2 2 170 0 8 0 170 2 0 2 3 3 9 9 3
2 2 170 1 8 0 170 2 0 2 3 3 9 9 3
2 2 170 2 8 0 170 2 0 2 3 3 9 9 3
2 2 170 3 8 0 170 2 0 2 3 3 9 9 3
2 2 170 4 8 0 170 2 0 2 3 3 9 9 3
2 2 170 5 8 0 170 2 0 2 3 3 9 9 3
2 2 170 6 8 0 170 2 0 2 3 3 9 9 3
2 2 170 7 8 0 170 2 0 2 3 3 9 9 3
2 2 170 8 8 0 170 2 0 2 3 3 9 9 3
2 2 170 N 8 0 170 2 0 2 3 3 9 9 3
3 2 177 0 6 1 177 0 1 3 3 3 9 9 3
3 2 177 1 6 1 177 0 1 3 3 3 9 9 3
3 2 177 2 6 1 177 0 1 3 3 3 9 9 3
3 2 177 3 6 1 177 0 1 3 3 3 9 9 3
3 2 177 4 6 1 177 0 1 3 3 3 9 9 3
3 2 177 5 6 1 177 0 1 3 3 3 9 9 3
3 2 177 6 6 1 177 0 1 3 3 3 9 9 3
3 2 177 7 6 1 177 0 1 3 3 3 9 9 3
3 2 177 8 6 1 177 0 1 3 3 3 9 9 3
3 2 177 N 6 1 177 0 1 3 3 3 9 9 3
4 2 178 0 7 1 178 1 1 4 3 3 9 9 3
4 2 178 1 7 1 178 1 1 4 3 3 9 9 3
4 2 178 2 7 1 178 1 1 4 3 3 9 9 3
4 2 178 3 7 1 178 1 1 4 3 3 9 9 3
4 2 178 4 7 1 178 1 1 4 3 3 9 9 3
4 2 178 5 7 1 178 1 1 4 3 3 9 9 3
4 2 178 6 7 1 178 1 1 4 3 3 9 9 3
4 2 178 7 7 1 178 1 1 4 3 3 9 9 3
4 2 178 8 7 1 178 1 1 4 3 3 9 9 3
4 2 178 N 7 1 178 1 1 4 3 3 9 9 3
5 2 179 0 8 1 179 2 1 5 3 3 9 9 3
5 2 179 1 8 1 179 2 1 5 3 3 9 9 3
5 2 179 2 8 1 179 2 1 5 3 3 9 9 3
5 2 179 3 8 1 179 2 1 5 3 3 9 9 3
5 2 179 4 8 1 179 2 1 5 3 3 9 9 3
5 2 179 5 8 1 179 2 1 5 3 3 9 9 3
5 2 179 6 8 1 179 2 1 5 3 3 9 9 3
5 2 179 7 8 1 179 2 1 5 3 3 9 9 3
5 2 179 8 8 1 179 2 1 5 3 3 9 9 3
5 2 179 N 8 1 179 2 1 5 3 3 9 9 3
6 2 186 0 6 2 186 0 2 6 3 3 9 9 3
6 2 186 1 6 2 186 0 2 6 3 3 9 9 3
6 2 186 2 6 2 186 0 2 6 3 3 9 9 3
6 2 186 3 6 2 186 0 2 6 3 3 9 9 3
6 2 186 4 6 2 186 0 2 6 3 3 9 9 3
6 2 186 5 6 2 186 0 2 6 3 3 9 9 3
6 2 186 6 6 2 186 0 2 6 3 3 9 9 3
6 2 186 7 6 2 186 0 2 6 3 3 9 9 3
6 2 186 8 6 2 186 0 2 6 3 3 9 9 3
6 2 186 N 6 2 186 0 2 6 3 3 9 9 3
7 2 187 0 7 2 187 1 2 7 3 3 9 9 3
7 2 187 1 7 2 187 1 2 7 3 3 9 9 3
7 2 187 2 7 2 187 1 2 7 3 3 9 9 3
7 2 187 3 7 2 187 1 2 7 3 3 9 9 3
7 2 187 4 7 2 187 1 2 7 3 3 9 9 3
7 2 187 5 7 2 187 1 2 7 3 3 9 9 3
7 2 187 6 7 2 187 1 2 7 3 3 9 9 3
7 2 187 7 7 2 187 1 2 7 3 3 9 9 3
7 2 187 8 7 2 187 1 2 7 3 3 9 9 3
7 2 187 N 7 2 187 1 2 7 3 3 9 9 3
8 2 188 0 8 2 188 2 2 8 3 3 9 9 3
8 2 188 1 8 2 188 2 2 8 3 3 9 9 3
8 2 188 2 8 2 188 2 2 8 3 3 9 9 3
8 2 188 3 8 2 188 2 2 8 3 3 9 9 3
8 2 188 4 8 2 188 2 2 8 3 3 9 9 3
8 2 188 5 8 2 188 2 2 8 3 3 9 9 3
8 2 188 6 8 2 188 2 2 8 3 3 9 9 3
8 2 188 7 8 2 188 2 2 8 3 3 9 9 3
8 2 188 8 8 2 188 2 2 8 3 3 9 9 3
8 2 188 N 8 2 188 2 2 8 3 3 9 9 3
0 3 27 0 0 3 27 0 0 0 3 3 9 9 3
0 3 27 1 0 3 27 0 0 0 3 3 9 9 3
0 3 27 2 0 3 27 0 0 0 3 3 9 9 3
0 3 27 3 0 3 27 0 0 0 3 3 9 9 3
0 3 27 4 0 3 27 0 0 0 3 3 9 9 3
0 3 27 5 0 3 27 0 0 0 3 3 9 9 3
0 3 27 6 0 3 27 0 0 0 3 3 9 9 3
0 3 27 7 0 3 27 0 0 0 3 3 9 9 3
0 3 27 8 0 3 27 0 0 0 3 3 9 9 3
0 3 27 N 0 3 27 0 0 0 3 3 9 9 3
1 3 28 0 1 3 28 1 0 1 3 3 9 9 3
1 3 28 1 1 3 28 1 0 1 3 3 9 9 3
1 3 28 2 1 3 28 1 0 1 3 3 9 9 3
1 3 28 3 1 3 28 1 0 1 3 3 9 9 3
1 3 28 4 1 3 28 1 0 1 3 3 9 9 3
1 3 28 5 1 3 28 1 0 1 3 3 9 9 3
1 3 28 6 1 3 28 1 0 1 3 3 9 9 3
1 3 28 7 1 3 28 1 0 1 3 3 9 9 3
1 3 28 8 1 3 28 1 0 1 3 3 9 9 3
1 3 28 N 1 3 28 1 0 1 3 3 9 9 3
2 3 29 0 2 3 29 2 0 2 3 3 9 9 3
2 3 29 1 2 3 29 2 0 2 3 3 9 9 3
2 3 29 2 2 3 29 2 0 2 3 3 9 9 3
2 3 29 3 2 3 29 2 0 2 3 3 9 9 3
2 3 29 4 2 3 29 2 0 2 3 3 9 9 3
2 3 29 5 2 3 29 2 0 2 3 3 9 9 3
2 3 29 6 2 3 29 2 0 2 3 3 9 9 3
2 3 29 7 2 3 29 2 0 2 3 3 9 9 3
2 3 29 8 2 3 29 2 0 2 3 3 9 9 3
2 3 29 N 2 3 29 2 0 2 3 3 9 9 3
3 3 36 0 0 4 36 0 1 3 3 3 9 9 3
3 3 36 1 0 4 36 0 1 3 3 3 9 9 3
3 3 36 2 0 4 36 0 1 3 3 3 9 9 3
3 3 36 3 0 4 36 0 1 3 3 3 9 9 3
3 3 36 4 0 4 36 0 1 3 3 3 9 9 3
3 3 36 5 0 4 36 0 1 3 3 3 9 9 3
3 3 36 6 0 4 36 0 1 3 3 3 9 9 3
3 3 36 7 0 4 36 0 1 3 3 3 9 9 3
3 3 36 8 0 4 36 0 1 3 3 3 9 9 3
3 3 36 N 0 4 36 0 1 3 3 3 9 9 3
4 3 37 0 1 4 37 1 1 4 3 3 9 9 3
4 3 37 1 1 4 37 1 1 4 3 3 9 9 3
4 3 37 2 1 4 37 1 1 4 3 3 9 9 3
4 3 37 3 1 4 37 1 1 4 3 3 9 9 3
4 3 37 4 1 4 37 1 1 4 3 3 9 9 3
4 3 37 5 1 4 37 1 1 4 3 3 9 9 3
4 3 37 6 1 4 37 1 1 4 3 3 9 9 3
4 3 37 7 1 4 37 1 1 4 3 3 9 9 3
4 3 37 8 1 4 37 1 1 4 3 3 9 9 3
4 3 37 N 1 4 37 1 1 4 3 3 9 9 3
5 3 38 0 2 4 38 2 1 5 3 3 9 9 3
5 3 38 1 2 4 38 2 1 5 3 3 9 9 3
5 3 38 2 2 4 38 2 1 5 3 3 9 9 3
5 3 38 3 2 4 38 2 1 5 3 3 9 9 3
5 3 38 4 2 4 38 2 1 5 3 3 9 9 3
5 3 38 5 2 4 38 2 1 5 3 3 9 9 3
5 3 38 6 2 4 38 2 1 5 3 3 9 9 3
5 3 38 7 2 4 38 2 1 5 3 3 9 9 3
5 3 38 8 2 4 38 2 1 5 3 3 9 9 3
5 3 38 N 2 4 38 2 1 5 3 3 9 9 3
6 3 45 0 0 5 45 0 2 6 3 3 9 9 3
6 3 45 1 0 5 45 0 2 6 3 3 9 9 3
6 3 45 2 0 5 45 0 2 6 3 3 9 9 3
6 3 45 3 0 5 45 0 2 6 3 3 9 9 3
6 3 45 4 0 5 45 0 2 6 3 3 9 9 3
6 3 45 5 0 5 45 0 2 6 3 3 9 9 3
6 3 45 6 0 5 45 0 2 6 3 3 9 9 3
6 3 45 7 0 5 45 0 2 6 3 3 9 9 3
6 3 45 8 0 5 45 0 2 6 3 3 9 9 3
6 3 45 N 0 5 45 0 2 6 3 3 9 9 3
7 3 46 0 1 5 46 1 2 7 3 3 9 9 3
7 3 46 1 1 5 46 1 2 7 3 3 9 9 3
7 3 46 2 1 5 46 1 2 7 3 3 9 9 3
7 3 46 3 1 5 46 1 2 7 3 3 9 9 3
7 3 46 4 1 5 46 1 2 7 3 3 9 9 3
7 3 46 5 1 5 46 1 2 7 3 3 9 9 3
7 3 46 6 1 5 46 1 2 7 3 3 9 9 3
7 3 46 7 1 5 46 1 2 7 3 3 9 9 3
7 3 46 8 1 5 46 1 2 7 3 3 9 9 3
7 3 46 N 1 5 46 1 2 7 3 3 9 9 3
8 3 47 0 2 5 47 2 2 8 3 3 9 9 3
8 3 47 1 2 5 47 2 2 8 3 3 9 9 3
8 3 47 2 2 5 47 2 2 8 3 3 9 9 3
8 3 47 3 2 5 47 2 2 8 3 3 9 9 3
8 3 47 4 2 5 47 2 2 8 3 3 9 9 3
8 3 47 5 2 5 47 2 2 8 3 3 9 9 3
8 3 47 6 2 5 47 2 2 8 3 3 9 9 3
8 3 47 7 2 5 47 2 2 8 3 3 9 9 3
8 3 47 8 2 5 47 2 2 8 3 3 9 9 3
8 3 47 N 2 5 47 2 2 8 3 3 9 9 3
0 3 108 0 0 3 108 0 0 0 3 3 9 9 3
0 3 108 1 0 3 108 0 0 0 3 3 9 9 3
0 3 108 2 0 3 108 0 0 0 3 3 9 9 3
0 3 108 3 0 3 108 0 0 0 3 3 9 9 3
0 3 108 4 0 3 108 0 0 0 3 3 9 9 3
0 3 108 5 0 3 108 0 0 0 3 3 9 9 3
0 3 108 6 0 3 108 0 0 0 3 3 9 9 3
0 3 108 7 0 3 108 0 0 0 3 3 9 9 3
0 3 108 8 0 3 108 0 0 0 3 3 9 9 3
0 3 108 N 0 3 108 0 0 0 3 3 9 9 3
1 3 109 0 1 3 109 1 0 1 3 3 9 9 3
1 3 109 1 1 3 109 1 0 1 3 3 9 9 3
1 3 109 2 1 3 109 1 0 1 3 3 9 9 3
1 3 109 3 1 3 109 1 0 1 3 3 9 9 3
1 3 109 4 1 3 109 1 0 1 3 3 9 9 3
1 3 109 5 1 3 109 1 0 1 3 3 9 9 3
1 3 109 6 1 3 109 1 0 1 3 3 9 9 3
1 3 109 7 1 3 109 1 0 1 3 3 9 9 3
1 3 109 8 1 3 109 1 0 1 3 3 9 9 3
1 3 109 N 1 3 109 1 0 1 3 3 9 9 3
2 3 110 0 2 3 110 2 0 2 3 3 9 9 3
2 3 110 1 2 3 110 2 0 2 3 3 9 9 3
2 3 110 2 2 3 110 2 0 2 3 3 9 9 3
2 3 110 3 2 3 110 2 0 2 3 3 9 9 3
2 3 110 4 2 3 110 2 0 2 3 3 9 9 3
2 3 110 5 2 3 110 2 0 2 3 3 9 9 3
2 3 110 6 2 3 110 2 0 2 3 3 9 9 3
2 3 110 7 2 3 110 2 0 2 3 3 9 9 3
2 3 110 8 2 3 110 2 0 2 3 3 9 9 3
2 3 110 N 2 3 110 2 0 2 3 3 9 9 3
3 3 117 0 0 4 117 0 1 3 3 3 9 9 3
3 3 117 1 0 4 117 0 1 3 3 3 9 9 3
3 3 117 2 0 4 117 0 1 3 3 3 9 9 3
3 3 117 3 0 4 117 0 1 3 3 3 9 9 3
3 3 117 4 0 4 117 0 1 3 3 3 9 9 3
3 3 117 5 0 4 117 0 1 3 3 3 9 9 3
3 3 117 6 0 4 117 0 1 3 3 3 9 9 3
3 3 117 7 0 4 117 0 1 3 3 3 9 9 3
3 3 117 8 0 4 117 0 1 3 3 3 9 9 3
3 3 117 N 0 4 117 0 1 3 3 3 9 9 3
4 3 118 0 1 4 118 1 1 4 3 3 9 9 3
4 3 118 1 1 4 118 1 1 4 3 3 9 9 3
4 3 118 2 1 4 118 1 1 4 3 3 9 9 3
4 3 118 3 1 4 118 1 1 4 3 3 9 9 3
4 3 118 4 1 4 118 1 1 4 3 3 9 9 3
4 3 118 5 1 4 118 1 1 4 3 3 9 9 3
4 3 118 6 1 4 118 1 1 4 3 3 9 9 3
4 3 118 7 1 4 118 1 1 4 3 3 9 9 3
4 3 118 8 1 4 118 1 1 4 3 3 9 9 3
4 3 118 N 1 4 118 1 1 4 3 3 9 9 3
5 3 119 0 2 4 119 2 1 5 3 3 9 9 3
5 3 119 1 2 4 119 2 1 5 3 3 9 9 3
5 3 119 2 2 4 119 2 1 5 3 3 9 9 3
5 3 119 3 2 4 119 2 1 5 3 3 9 9 3
5 3 119 4 2 4 119 2 1 5 3 3 9 9 3
5 3 119 5 2 4 119 2 1 5 3 3 9 9 3
5 3 119 6 2 4 119 2 1 5 3 3 9 9 3
5 3 119 7 2 4 119 2 1 5 3 3 9 9 3
5 3 119 8 2 4 119 2 1 5 3 3 9 9 3
5 3 119 N 2 4 119 2 1 5 3 3 9 9 3
6 3 126 0 0 5 126 0 2 6 3 3 9 9 3
6 3 126 1 0 5 126 0 2 6 3 3 9 9 3
6 3 126 2 0 5 126 0 2 6 3 3 9 9 3
6 3 126 3 0 5 126 0 2 6 3 3 9 9 3
6 3 126 4 0 5 126 0 2 6 3 3 9 9 3
6 3 126 5 0 5 126 0 2 6 3 3 9 9 3
6 3 126 6 0 5 126 0 2 6 3 3 9 9 3
6 3 126 7 0 5 126 0 2 6 3 3 9 9 3
6 3 126 8 0 5 126 0 2 6 3 3 9 9 3
6 3 126 N 0 5 126 0 2 6 3 3 9 9 3
7 3 127 0 1 5 127 1 2 7 3 3 9 9 3
7 3 127 1 1 5 127 1 2 7 3 3 9 9 3
7 3 127 2 1 5 127 1 2 7 3 3 9 9 3
7 3 127 3 1 5 127 1 2 7 3 3 9 9 3
7 3 127 4 1 5 127 1 2 7 3 3 9 9 3
7 3 127 5 1 5 127 1 2 7 3 3 9 9 3
7 3 127 6 1 5 127 1 2 7 3 3 9 9 3
7 3 127 7 1 5 127 1 2 7 3 3 9 9 3
7 3 127 8 1 5 127 1 2 7 3 3 9 9 3
7 3 127 N 1 5 127 1 2 7 3 3 9 9 3
8 3 128 0 2 5 128 2 2 8 3 3 9 9 3
8 3 128 1 2 5 128 2 2 8 3 3 9 9 3
8 3 128 2 2 5 128 2 2 8 3 3 9 9 3
8 3 128 3 2 5 128 2 2 8 3 3 9 9 3
8 3 128 4 2 5 128 2 2 8 3 3 9 9 3
8 3 128 5 2 5 128 2 2 8 3 3 9 9 3
8 3 128 6 2 5 128 2 2 8 3 3 9 9 3
8 3 128 7 2 5 128 2 2 8 3 3 9 9 3
8 3 128 8 2 5 128 2 2 8 3 3 9 9 3
8 3 128 N 2 5 128 2 2 8 3 3 9 9 3
0 3 189 0 0 3 189 0 0 0 3 3 9 9 3
0 3 189 1 0 3 189 0 0 0 3 3 9 9 3
0 3 189 2 0 3 189 0 0 0 3 3 9 9 3
0 3 189 3 0 3 189 0 0 0 3 3 9 9 3
0 3 189 4 0 3 189 0 0 0 3 3 9 9 3
0 3 189 5 0 3 189 0 0 0 3 3 9 9 3
0 3 189 6 0 3 189 0 0 0 3 3 9 9 3
0 3 189 7 0 3 189 0 0 0 3 3 9 9 3
0 3 189 8 0 3 189 0 0 0 3 3 9 9 3
0 3 189 N 0 3 189 0 0 0 3 3 9 9 3
1 3 190 0 1 3 190 1 0 1 3 3 9 9 3
1 3 190 1 1 3 190 1 0 1 3 3 9 9 3
1 3 190 2 1 3 190 1 0 1 3 3 9 9 3
1 3 190 3 1 3 190 1 0 1 3 3 9 9 3
1 3 190 4 1 3 190 1 0 1 3 3 9 9 3
1 3 190 5 1 3 190 1 0 1 3 3 9 9 3
1 3 190 6 1 3 190 1 0 1 3 3 9 9 3
1 3 190 7 1 3 190 1 0 1 3 3 9 9 3
1 3 190 8 1 3 190 1 0 1 3 3 9 9 3
1 3 190 N 1 3 190 1 0 1 3 3 9 9 3
2 3 191 0 2 3 191 2 0 2 3 3 9 9 3
2 3 191 1 2 3 191 2 0 2 3 3 9 9 3
2 3 191 2 2 3 191 2 0 2 3 3 9 9 3
2 3 191 3 2 3 191 2 0 2 3 3 9 9 3
2 3 191 4 2 3 191 2 0 2 3 3 9 9 3
2 3 191 5 2 3 191 2 0 2 3 3 9 9 3
2 3 191 6 2 3 191 2 0 2 3 3 9 9 3
2 3 191 7 2 3 191 2 0 2 3 3 9 9 3
2 3 191 8 2 3 191 2 0 2 3 3 9 9 3
2 3 191 N 2 3 191 2 0 2 3 3 9 9 3
3 3 198 0 0 4 198 0 1 3 3 3 9 9 3
3 3 198 1 0 4 198 0 1 3 3 3 9 9 3
3 3 198 2 0 4 198 0 1 3 3 3 9 9 3
3 3 198 3 0 4 198 0 1 3 3 3 9 9 3
3 3 198 4 0 4 198 0 1 3 3 3 9 9 3
3 3 198 5 0 4 198 0 1 3 3 3 9 9 3
3 3 198 6 0 4 198 0 1 3 3 3 9 9 3
3 3 198 7 0 4 198 0 1 3 3 3 9 9 3
3 3 198 8 0 4 198 0 1 3 3 3 9 9 3
3 3 198 N 0 4 198 0 1 3 3 3 9 9 3
4 3 199 0 1 4 199 1 1 4 3 3 9 9 3
4 3 199 1 1 4 199 1 1 4 3 3 9 9 3
4 3 199 2 1 4 199 1 1 4 3 3 9 9 3
4 3 199 3 1 4 199 1 1 4 3 3 9 9 3
4 3 199 4 1 4 199 1 1 4 3 3 9 9 3
4 3 199 5 1 4 199 1 1 4 3 3 9 9 3
4 3 199 6 1 4 199 1 1 4 3 3 9 9 3
4 3 199 7 1 4 199 1 1 4 3 3 9 9 3
4 3 199 8 1 4 199 1 1 4 3 3 9 9 3
4 3 199 N 1 4 199 1 1 4 3 3 9 9 3
5 3 200 0 2 4 200 2 1 5 3 3 9 9 3
5 3 200 1 2 4 200 2 1 5 3 3 9 9 3
5 3 200 2 2 4 200 2 1 5 3 3 9 9 3
5 3 200 3 2 4 200 2 1 5 3 3 9 9 3
5 3 200 4 2 4 200 2 1 5 3 3 9 9 3
5 3 200 5 2 4 200 2 1 5 3 3 9 9 3
5 3 200 6 2 4 200 2 1 5 3 3 9 9 3
5 3 200 7 2 4 200 2 1 5 3 3 9 9 3
5 3 200 8 2 4 200 2 1 5 3 3 9 9 3
5 3 200 N 2 4 200 2 1 5 3 3 9 9 3
6 3 207 0 0 5 207 0 2 6 3 3 9 9 3
6 3 207 1 0 5 207 0 2 6 3 3 9 9 3
6 3 207 2 0 5 207 0 2 6 3 3 9 9 3
6 3 207 3 0 5 207 0 2 6 3 3 9 9 3
6 3 207 4 0 5 207 0 2 6 3 3 9 9 3
6 3 207 5 0 5 207 0 2 6 3 3 9 9 3
6 3 207 6 0 5 207 0 2 6 3 3 9 9 3
6 3 207 7 0 5 207 0 2 6 3 3 9 9 3
6 3 207 8 0 5 207 0 2 6 3 3 9 9 3
6 3 207 N 0 5 207 0 2 6 3 3 9 9 3
7 3 208 0 1 5 208 1 2 7 3 3 9 9 3
7 3 208 1 1 5 208 1 2 7 3 3 9 9 3
7 3 208 2 1 5 208 1 2 7 3 3 9 9 3
7 3 208 3 1 5 208 1 2 7 3 3 9 9 3
7 3 208 4 1 5 208 1 2 7 3 3 9 9 3
7 3 208 5 1 5 208 1 2 7 3 3 9 9 3
7 3 208 6 1 5 208 1 2 7 3 3 9 9 3
7 3 208 7 1 5 208 1 2 7 3 3 9 9 3
7 3 208 8 1 5 208 1 2 7 3 3 9 9 3
7 3 208 N 1 5 208 1 2 7 3 3 9 9 3
8 3 209 0 2 5 209 2 2 8 3 3 9 9 3
8 3 209 1 2 5 209 2 2 8 3 3 9 9 3
8 3 209 2 2 5 209 2 2 8 3 3 9 9 3
8 3 209 3 2 5 209 2 2 8 3 3 9 9 3
8 3 209 4 2 5 209 2 2 8 3 3 9 9 3
8 3 209 5 2 5 209 2 2 8 3 3 9 9 3
8 3 209 6 2 5 209 2 2 8 3 3 9 9 3
8 3 209 7 2 5 209 2 2 8 3 3 9 9 3
8 3 209 8 2 5 209 2 2 8 3 3 9 9 3
8 3 209 N 2 5 209 2 2 8 3 3 9 9 3
0 4 30 0 3 3 30 0 0 0 3 3 9 9 3
0 4 30 1 3 3 30 0 0 0 3 3 9 9 3
0 4 30 2 3 3 30 0 0 0 3 3 9 9 3
0 4 30 3 3 3 30 0 0 0 3 3 9 9 3
0 4 30 4 3 3 30 0 0 0 3 3 9 9 3
0 4 30 5 3 3 30 0 0 0 3 3 9 9 3
0 4 30 6 3 3 30 0 0 0 3 3 9 9 3
0 4 30 7 3 3 30 0 0 0 3 3 9 9 3
0 4 30 8 3 3 30 0 0 0 3 3 9 9 3
0 4 30 N 3 3 30 0 0 0 3 3 9 9 3
1 4 31 0 4 3 31 1 0 1 3 3 9 9 3
1 4 31 1 4 3 31 1 0 1 3 3 9 9 3
1 4 31 2 4 3 31 1 0 1 3 3 9 9 3
1 4 31 3 4 3 31 1 0 1 3 3 9 9 3
1 4 31 4 4 3 31 1 0 1 3 3 9 9 3
1 4 31 5 4 3 31 1 0 1 3 3 9 9 3
1 4 31 6 4 3 31 1 0 1 3 3 9 9 3
1 4 31 7 4 3 31 1 0 1 3 3 9 9 3
1 4 31 8 4 3 31 1 0 1 3 3 9 9 3
1 4 31 N 4 3 31 1 0 1 3 3 9 9 3
2 4 32 0 5 3 32 2 0 2 3 3 9 9 3
2 4 32 1 5 3 32 2 0 2 3 3 9 9 3
2 4 32 2 5 3 32 2 0 2 3 3 9 9 3
2 4 32 3 5 3 32 2 0 2 3 3 9 9 3
2 4 32 4 5 3 32 2 0 2 3 3 9 9 3
2 4 32 5 5 3 32 2 0 2 3 3 9 9 3
2 4 32 6 5 3 32 2 0 2 3 3 9 9 3
2 4 32 7 5 3 32 2 0 2 3 3 9 9 3
2 4 32 8 5 3 32 2 0 2 3 3 9 9 3
2 4 32 N 5 3 32 2 0 2 3 3 9 9 3
3 4 39 0 3 4 39 0 1 3 3 3 9 9 3
3 4 39 1 3 4 39 0 1 3 3 3 9 9 3
3 4 39 2 3 4 39 0 1 3 3 3 9 9 3
3 4 39 3 3 4 39 0 1 3 3 3 9 9 3
3 4 39 4 3 4 39 0 1 3 3 3 9 9 3
3 4 39 5 3 4 39 0 1 3 3 3 9 9 3
3 4 39 6 3 4 39 0 1 3 3 3 9 9 3
3 4 39 7 3 4 39 0 1 3 3 3 9 9 3
3 4 39 8 3 4 39 0 1 3 3 3 9 9 3
3 4 39 N 3 4 39 0 1 3 3 3 9 9 3
4 4 40 0 4 4 40 1 1 4 3 3 9 9 3
4 4 40 1 4 4 40 1 1 4 3 3 9 9 3
4 4 40 2 4 4 40 1 1 4 3 3 9 9 3
4 4 40 3 4 4 40 1 1 4 3 3 9 9 3
4 4 40 4 4 4 40 1 1 4 3 3 9 9 3
4 4 40 5 4 4 40 1 1 4 3 3 9 9 3
4 4 40 6 4 4 40 1 1 4 3 3 9 9 3
4 4 40 7 4 4 40 1 1 4 3 3 9 9 3
4 4 40 8 4 4 40 1 1 4 3 3 9 9 3
4 4 40 N 4 4 40 1 1 4 3 3 9 9 3
5 4 41 0 5 4 41 2 1 5 3 3 9 9 3
5 4 41 1 5 4 41 2 1 5 3 3 9 9 3
5 4 41 2 5 4 41 2 1 5 3 3 9 9 3
5 4 41 3 5 4 41 2 1 5 3 3 9 9 3
5 4 41 4 5 4 41 2 1 5 3 3 9 9 3
5 4 41 5 5 4 41 2 1 5 3 3 9 9 3
5 4 41 6 5 4 41 2 1 5 3 3 9 9 3
5 4 41 7 5 4 41 2 1 5 3 3 9 9 3
5 4 41 8 5 4 41 2 1 5 3 3 9 9 3
5 4 41 N 5 4 41 2 1 5 3 3 9 9 3
6 4 48 0 3 5 48 0 2 6 3 3 9 9 3
6 4 48 1 3 5 48 0 2 6 3 3 9 9 3
6 4 48 2 3 5 48 0 2 6 3 3 9 9 3
6 4 48 3 3 5 48 0 2 6 3 3 9 9 3
6 4 48 4 3 5 48 0 2 6 3 3 9 9 3
6 4 48 5 3 5 48 0 2 6 3 3 9 9 3
6 4 48 6 3 5 48 0 2 6 3 3 9 9 3
6 4 48 7 3 5 48 0 2 6 3 3 9 9 3
6 4 48 8 3 5 48 0 2 6 3 3 9 9 3
6 4 48 N 3 5 48 0 2 6 3 3 9 9 3
7 4 49 0 4 5 49 1 2 7 3 3 9 9 3
7 4 49 1 4 5 49 1 2 7 3 3 9 9 3
7 4 49 2 4 5 49 1 2 7 3 3 9 9 3
7 4 49 3 4 5 49 1 2 7 3 3 9 9 3
7 4 49 4 4 5 49 1 2 7 3 3 9 9 3
7 4 49 5 4 5 49 1 2 7 3 3 9 9 3
7 4 49 6 4 5 49 1 2 7 3 3 9 9 3
7 4 49 7 4 5 49 1 2 7 3 3 9 9 3
7 4 49 8 4 5 49 1 2 7 3 3 9 9 3
7 4 49 N 4 5 49 1 2 7 3 3 9 9 3
8 4 50 0 5 5 50 2 2 8 3 3 9 9 3
8 4 50 1 5 5 50 2 2 8 3 3 9 9 3
8 4 50 2 5 5 50 2 2 8 3 3 9 9 3
8 4 50 3 5 5 50 2 2 8 3 3 9 9 3
8 4 50 4 5 5 50 2 2 8 3 3 9 9 3
8 4 50 5 5 5 50 2 2 8 3 3 9 9 3
8 4 50 6 5 5 50 2 2 8 3 3 9 9 3
8 4 50 7 5 5 50 2 2 8 3 3 9 9 3
8 4 50 8 5 5 50 2 2 8 3 3 9 9 3
8 4 50 N 5 5 50 2 2 8 3 3 9 9 3
0 4 111 0 3 3 111 0 0 0 3 3 9 9 3
0 4 111 1 3 3 111 0 0 0 3 3 9 9 3
0 4 111 2 3 3 111 0 0 0 3 3 9 9 3
0 4 111 3 3 3 111 0 0 0 3 3 9 9 3
0 4 111 4 3 3 111 0 0 0 3 3 9 9 3
0 4 111 5 3 3 111 0 0 0 3 3 9 9 3
0 4 111 6 3 3 111 0 0 0 3 3 9 9 3
0 4 111 7 3 3 111 0 0 0 3 3 9 9 3
0 4 111 8 3 3 111 0 0 0 3 3 9 9 3
0 4 111 N 3 3 111 0 0 0 3 3 9 9 3
1 4 112 0 4 3 112 1 0 1 3 3 9 9 3
1 4 112 1 4 3 112 1 0 1 3 3 9 9 3
1 4 112 2 4 3 112 1 0 1 3 3 9 9 3
1 4 112 3 4 3 112 1 0 1 3 3 9 9 3
1 4 112 4 4 3 112 1 0 1 3 3 9 9 3
1 4 112 5 4 3 112 1 0 1 3 3 9 9 3
1 4 112 6 4 3 112 1 0 1 3 3 9 9 3
1 4 112 7 4 3 112 1 0 1 3 3 9 9 3
1 4 112 8 4 3 112 1 0 1 3 3 9 9 3
1 4 112 N 4 3 112 1 0 1 3 3 9 9 3
2 4 113 0 5 3 113 2 0 2 3 3 9 9 3
2 4 113 1 5 3 113 2 0 2 3 3 9 9 3
2 4 113 2 5 3 113 2 0 2 3 3 9 9 3
2 4 113 3 5 3 113 2 0 2 3 3 9 9 3
2 4 113 4 5 3 113 2 0 2 3 3 9 9 3
2 4 113 5 5 3 113 2 0 2 3 3 9 9 3
2 4 113 6 5 3 113 2 0 2 3 3 9 9 3
2 4 113 7 5 3 113 2 0 2 3 3 9 9 3
2 4 113 8 5 3 113 2 0 2 3 3 9 9 3
2 4 113 N 5 3 113 2 0 2 3 3 9 9 3
3 4 120 0 3 4 120 0 1 3 3 3 9 9 3
3 4 120 1 3 4 120 0 1 3 3 3 9 9 3
3 4 120 2 3 4 120 0 1 3 3 3 9 9 3
3 4 120 3 3 4 120 0 1 3 3 3 9 9 3
3 4 120 4 3 4 120 0 1 3 3 3 9 9 3
3 4 120 5 3 4 120 0 1 3 3 3 9 9 3
3 4 120 6 3 4 120 0 1 3 3 3 9 9 3
3 4 120 7 3 4 120 0 1 3 3 3 9 9 3
3 4 120 8 3 4 120 0 1 3 3 3 9 9 3
3 4 120 N 3 4 120 0 1 3 3 3 9 9 3
4 4 121 0 4 4 121 1 1 4 3 3 9 9 3
4 4 121 1 4 4 121 1 1 4 3 3 9 9 3
4 4 121 2 4 4 121 1 1 4 3 3 9 9 3
4 4 121 3 4 4 121 1 1 4 3 3 9 9 3
4 4 121 4 4 4 121 1 1 4 3 3 9 9 3
4 4 121 5 4 4 121 1 1 4 3 3 9 9 3
4 4 121 6 4 4 121 1 1 4 3 3 9 9 3
4 4 121 7 4 4 121 1 1 4 3 3 9 9 3
4 4 121 8 4 4 121 1 1 4 3 3 9 9 3
4 4 121 N 4 4 121 1 1 4 3 3 9 9 3
5 4 122 0 5 4 122 2 1 5 3 3 9 9 3
5 4 122 1 5 4 122 2 1 5 3 3 9 9 3
5 4 122 2 5 4 122 2 1 5 3 3 9 9 3
5 4 122 3 5 4 122 2 1 5 3 3 9 9 3
5 4 122 4 5 4 122 2 1 5 3 3 9 9 3
5 4 122 5 5 4 122 2 1 5 3 3 9 9 3
5 4 122 6 5 4 122 2 1 5 3 3 9 9 3
5 4 122 7 5 4 122 2 1 5 3 3 9 9 3
5 4 122 8 5 4 122 2 1 5 3 3 9 9 3
5 4 122 N 5 4 122 2 1 5 3 3 9 9 3
6 4 129 0 3 5 129 0 2 6 3 3 9 9 3
6 4 129 1 3 5 129 0 2 6 3 3 9 9 3
6 4 129 2 3 5 129 0 2 6 3 3 9 9 3
6 4 129 3 3 5 129 0 2 6 3 3 9 9 3
6 4 129 4 3 5 129 0 2 6 3 3 9 9 3
6 4 129 5 3 5 129 0 2 6 3 3 9 9 3
6 4 129 6 3 5 129 0 2 6 3 3 9 9 3
6 4 129 7 3 5 129 0 2 6 3 3 9 9 3
6 4 129 8 3 5 129 0 2 6 3 3 9 9 3
6 4 129 N 3 5 129 0 2 6 3 3 9 9 3
7 4 130 0 4 5 130 1 2 7 3 3 9 9 3
7 4 130 1 4 5 130 1 2 7 3 3 9 9 3
7 4 130 2 4 5 130 1 2 7 3 3 9 9 3
7 4 130 3 4 5 130 1 2 7 3 3 9 9 3
7 4 130 4 4 5 130 1 2 7 3 3 9 9 3
7 4 130 5 4 5 130 1 2 7 3 3 9 9 3
7 4 130 6 4 5 130 1 2 7 3 3 9 9 3
7 4 130 7 4 5 130 1 2 7 3 3 9 9 3
7 4 130 8 4 5 130 1 2 7 3 3 9 9 3
7 4 130 N 4 5 130 1 2 7 3 3 9 9 3
8 4 131 0 5 5 131 2 2 8 3 3 9 9 3
8 4 131 1 5 5 131 2 2 8 3 3 9 9 3
8 4 131 2 5 5 131 2 2 8 3 3 9 9 3
8 4 131 3 5 5 131 2 2 8 3 3 9 9 3
8 4 131 4 5 5 131 2 2 8 3 3 9 9 3
8 4 131 5 5 5 131 2 2 8 3 3 9 9 3
8 4 131 6 5 5 131 2 2 8 3 3 9 9 3
8 4 131 7 5 5 131 2 2 8 3 3 9 9 3
8 4 131 8 5 5 131 2 2 8 3 3 9 9 3
8 4 131 N 5 5 131 2 2 8 3 3 9 9 3
0 4 192 0 3 3 192 0 0 0 3 3 9 9 3
0 4 192 1 3 3 192 0 0 0 3 3 9 9 3
0 4 192 2 3 3 192 0 0 0 3 3 9 9 3
0 4 192 3 3 3 192 0 0 0 3 3 9 9 3
0 4 192 4 3 3 192 0 0 0 3 3 9 9 3
0 4 192 5 3 3 192 0 0 0 3 3 9 9 3
0 4 192 6 3 3 192 0 0 0 3 3 9 9 3
0 4 192 7 3 3 192 0 0 0 3 3 9 9 3
0 4 192 8 3 3 192 0 0 0 3 3 9 9 3
0 4 192 N 3 3 192 0 0 0 3 3 9 9 3
1 4 193 0 4 3 193 1 0 1 3 3 9 9 3
1 4 193 1 4 3 193 1 0 1 3 3 9 9 3
1 4 193 2 4 3 193 1 0 1 3 3 9 9 3
1 4 193 3 4 3 193 1 0 1 3 3 9 9 3
1 4 193 4 4 3 193 1 0 1 3 3 9 9 3
1 4 193 5 4 3 193 1 0 1 3 3 9 9 3
1 4 193 6 4 3 193 1 0 1 3 3 9 9 3
1 4 193 7 4 3 193 1 0 1 3 3 9 9 3
1 4 193 8 4 3 193 1 0 1 3 3 9 9 3
1 4 193 N 4 3 193 1 0 1 3 3 9 9 3
2 4 194 0 5 3 194 2 0 2 3 3 9 9 3
2 4 194 1 5 3 194 2 0 2 3 3 9 9 3
2 4 194 2 5 3 194 2 0 2 3 3 9 9 3
2 4 194 3 5 3 194 2 0 2 3 3 9 9 3
2 4 194 4 5 3 194 2 0 2 3 3 9 9 3
2 4 194 5 5 3 194 2 0 2 3 3 9 9 3
2 4 194 6 5 3 194 2 0 2 3 3 9 9 3
2 4 194 7 5 3 194 2 0 2 3 3 9 9 3
2 4 194 8 5 3 194 2 0 2 3 3 9 9 3
2 4 194 N 5 3 194 2 0 2 3 3 9 9 3
3 4 201 0 3 4 201 0 1 3 3 3 9 9 3
3 4 201 1 3 4 201 0 1 3 3 3 9 9 3
3 4 201 2 3 4 201 0 1 3 3 3 9 9 3
3 4 201 3 3 4 201 0 1 3 3 3 9 9 3
3 4 201 4 3 4 201 0 1 3 3 3 9 9 3
3 4 201 5 3 4 201 0 1 3 3 3 9 9 3
3 4 201 6 3 4 201 0 1 3 3 3 9 9 3
3 4 201 7 3 4 201 0 1 3 3 3 9 9 3
3 4 201 8 3 4 201 0 1 3 3 3 9 9 3
3 4 201 N 3 4 201 0 1 3 3 3 9 9 3
4 4 202 0 4 4 202 1 1 4 3 3 9 9 3
4 4 202 1 4 4 202 1 1 4 3 3 9 9 3
4 4 202 2 4 4 202 1 1 4 3 3 9 9 3
4 4 202 3 4 4 202 1 1 4 3 3 9 9 3
4 4 202 4 4 4 202 1 1 4 3 3 9 9 3
4 4 202 5 4 4 202 1 1 4 3 3 9 9 3
4 4 202 6 4 4 202 1 1 4 3 3 9 9 3
4 4 202 7 4 4 202 1 1 4 3 3 9 9 3
4 4 202 8 4 4 202 1 1 4 3 3 9 9 3
4 4 202 N 4 4 202 1 1 4 3 3 9 9 3
5 4 203 0 5 4 203 2 1 5 3 3 9 9 3
5 4 203 1 5 4 203 2 1 5 3 3 9 9 3
5 4 203 2 5 4 203 2 1 5 3 3 9 9 3
5 4 203 3 5 4 203 2 1 5 3 3 9 9 3
5 4 203 4 5 4 203 2 1 5 3 3 9 9 3
5 4 203 5 5 4 203 2 1 5 3 3 9 9 3
5 4 203 6 5 4 203 2 1 5 3 3 9 9 3
5 4 203 7 5 4 203 2 1 5 3 3 9 9 3
5 4 203 8 5 4 203 2 1 5 3 3 9 9 3
5 4 203 N 5 4 203 2 1 5 3 3 9 9 3
6 4 210 0 3 5 210 0 2 6 3 3 9 9 3
6 4 210 1 3 5 210 0 2 6 3 3 9 9 3
6 4 210 2 3 5 210 0 2 6 3 3 9 9 3
6 4 210 3 3 5 210 0 2 6 3 3 9 9 3
6 4 210 4 3 5 210 0 2 6 3 3 9 9 3
6 4 210 5 3 5 210 0 2 6 3 3 9 9 3
6 4 210 6 3 5 210 0 2 6 3 3 9 9 3
6 4 210 7 3 5 210 0 2 6 3 3 9 9 3
6 4 210 8 3 5 210 0 2 6 3 3 9 9 3
6 4 210 N 3 5 210 0 2 6 3 3 9 9 3
7 4 211 0 4 5 211 1 2 7 3 3 9 9 3
7 4 211 1 4 5 211 1 2 7 3 3 9 9 3
7 4 211 2 4 5 211 1 2 7 3 3 9 9 3
7 4 211 3 4 5 211 1 2 7 3 3 9 9 3
7 4 211 4 4 5 211 1 2 7 3 3 9 9 3
7 4 211 5 4 5 211 1 2 7 3 3 9 9 3
7 4 211 6 4 5 211 1 2 7 3 3 9 9 3
7 4 211 7 4 5 211 1 2 7 3 3 9 9 3
7 4 211 8 4 5 211 1 2 7 3 3 9 9 3
7 4 211 N 4 5 211 1 2 7 3 3 9 9 3
8 4 212 0 5 5 212 2 2 8 3 3 9 9 3
8 4 212 1 5 5 212 2 2 8 3 3 9 9 3
8 4 212 2 5 5 212 2 2 8 3 3 9 9 3
8 4 212 3 5 5 212 2 2 8 3 3 9 9 3
8 4 212 4 5 5 212 2 2 8 3 3 9 9 3
8 4 212 5 5 5 212 2 2 8 3 3 9 9 3
8 4 212 6 5 5 212 2 2 8 3 3 9 9 3
8 4 212 7 5 5 212 2 2 8 3 3 9 9 3
8 4 212 8 5 5 212 2 2 8 3 3 9 9 3
8 4 212 N 5 5 212 2 2 8 3 3 9 9 3
0 5 33 0 6 3 33 0 0 0 3 3 9 9 3
0 5 33 1 6 3 33 0 0 0 3 3 9 9 3
0 5 33 2 6 3 33 0 0 0 3 3 9 9 3
0 5 33 3 6 3 33 0 0 0 3 3 9 9 3
0 5 33 4 6 3 33 0 0 0 3 3 9 9 3
0 5 33 5 6 3 33 0 0 0 3 3 9 9 3
0 5 33 6 6 3 33 0 0 0 3 3 9 9 3
0 5 33 7 6 3 33 0 0 0 3 3 9 9 3
0 5 33 8 6 3 33 0 0 0 3 3 9 9 3
0 5 33 N 6 3 33 0 0 0 3 3 9 9 3
1 5 34 0 7 3 34 1 0 1 3 3 9 9 3
1 5 34 1 7 3 34 1 0 1 3 3 9 9 3
1 5 34 2 7 3 34 1 0 1 3 3 9 9 3
1 5 34 3 7 3 34 1 0 1 3 3 9 9 3
1 5 34 4 7 3 34 1 0 1 3 3 9 9 3
1 5 34 5 7 3 34 1 0 1 3 3 9 9 3
1 5 34 6 7 3 34 1 0 1 3 3 9 9 3
1 5 34 7 7 3 34 1 0 1 3 3 9 9 3
1 5 34 8 7 3 34 1 0 1 3 3 9 9 3
1 5 34 N 7 3 34 1 0 1 3 3 9 9 3
2 5 35 0 8 3 35 2 0 2 3 3 9 9 3
2 5 35 1 8 3 35 2 0 2 3 3 9 9 3
2 5 35 2 8 3 35 2 0 2 3 3 9 9 3
2 5 35 3 8 3 35 2 0 2 3 3 9 9 3
2 5 35 4 8 3 35 2 0 2 3 3 9 9 3
2 5 35 5 8 3 35 2 0 2 3 3 9 9 3
2 5 35 6 8 3 35 2 0 2 3 3 9 9 3
2 5 35 7 8 3 35 2 0 2 3 3 9 9 3
2 5 35 8 8 3 35 2 0 2 3 3 9 9 3
2 5 35 N 8 3 35 2 0 2 3 3 9 9 3
3 5 42 0 6 4 42 0 1 3 3 3 9 9 3
3 5 42 1 6 4 42 0 1 3 3 3 9 9 3
3 5 42 2 6 4 42 0 1 3 3 3 9 9 3
3 5 42 3 6 4 42 0 1 3 3 3 9 9 3
3 5 42 4 6 4 42 0 1 3 3 3 9 9 3
3 5 42 5 6 4 42 0 1 3 3 3 9 9 3
3 5 42 6 6 4 42 0 1 3 3 3 9 9 3
3 5 42 7 6 4 42 0 1 3 3 3 9 9 3
3 5 42 8 6 4 42 0 1 3 3 3 9 9 3
3 5 42 N 6 4 42 0 1 3 3 3 9 9 3
4 5 43 0 7 4 43 1 1 4 3 3 9 9 3
4 5 43 1 7 4 43 1 1 4 3 3 9 9 3
4 5 43 2 7 4 43 1 1 4 3 3 9 9 3
4 5 43 3 7 4 43 1 1 4 3 3 9 9 3
4 5 43 4 7 4 43 1 1 4 3 3 9 9 3
4 5 43 5 7 4 43 1 1 4 3 3 9 9 3
4 5 43 6 7 4 43 1 1 4 3 3 9 9 3
4 5 43 7 7 4 43 1 1 4 3 3 9 9 3
4 5 43 8 7 4 43 1 1 4 3 3 9 9 3
4 5 43 N 7 4 43 1 1 4 3 3 9 9 3
5 5 44 0 8 4 44 2 1 5 3 3 9 9 3
5 5 44 1 8 4 44 2 1 5 3 3 9 9 3
5 5 44 2 8 4 44 2 1 5 3 3 9 9 3
5 5 44 3 8 4 44 2 1 5 3 3 9 9 3
5 5 44 4 8 4 44 2 1 5 3 3 9 9 3
5 5 44 5 8 4 44 2 1 5 3 3 9 9 3
5 5 44 6 8 4 44 2 1 5 3 3 9 9 3
5 5 44 7 8 4 44 2 1 5 3 3 9 9 3
5 5 44 8 8 4 44 2 1 5 3 3 9 9 3
5 5 44 N 8 4 44 2 1 5 3 3 9 9 3
6 5 51 0 6 5 51 0 2 6 3 3 9 9 3
6 5 51 1 6 5 51 0 2 6 3 3 9 9 3
6 5 51 2 6 5 51 0 2 6 3 3 9 9 3
6 5 51 3 6 5 51 0 2 6 3 3 9 9 3
6 5 51 4 6 5 51 0 2 6 3 3 9 9 3
6 5 51 5 6 5 51 0 2 6 3 3 9 9 3
6 5 51 6 6 5 51 0 2 6 3 3 9 9 3
6 5 51 7 6 5 51 0 2 6 3 3 9 9 3
6 5 51 8 6 5 51 0 2 6 3 3 9 9 3
6 5 51 N 6 5 51 0 2 6 3 3 9 9 3
7 5 52 0 7 5 52 1 2 7 3 3 9 9 3
7 5 52 1 7 5 52 1 2 7 3 3 9 9 3
7 5 52 2 7 5 52 1 2 7 3 3 9 9 3
7 5 52 3 7 5 52 1 2 7 3 3 9 9 3
7 5 52 4 7 5 52 1 2 7 3 3 9 9 3
7 5 52 5 7 5 52 1 2 7 3 3 9 9 3
7 5 52 6 7 5 52 1 2 7 3 3 9 9 3
7 5 52 7 7 5 52 1 2 7 3 3 9 9 3
7 5 52 8 7 5 52 1 2 7 3 3 9 9 3
7 5 52 N 7 5 52 1 2 7 3 3 9 9 3
8 5 53 0 8 5 53 2 2 8 3 3 9 9 3
8 5 53 1 8 5 53 2 2 8 3 3 9 9 3
8 5 53 2 8 5 53 2 2 8 3 3 9 9 3
8 5 53 3 8 5 53 2 2 8 3 3 9 9 3
8 5 53 4 8 5 53 2 2 8 3 3 9 9 3
8 5 53 5 8 5 53 2 2 8 3 3 9 9 3
8 5 53 6 8 5 53 2 2 8 3 3 9 9 3
8 5 53 7 8 5 53 2 2 8 3 3 9 9 3
8 5 53 8 8 5 53 2 2 8 3 3 9 9 3
8 5 53 N 8 5 53 2 2 8 3 3 9 9 3
0 5 114 0 6 3 114 0 0 0 3 3 9 9 3
0 5 114 1 6 3 114 0 0 0 3 3 9 9 3
0 5 114 2 6 3 114 0 0 0 3 3 9 9 3
0 5 114 3 6 3 114 0 0 0 3 3 9 9 3
0 5 114 4 6 3 114 0 0 0 3 3 9 9 3
0 5 114 5 6 3 114 0 0 0 3 3 9 9 3
0 5 114 6 6 3 114 0 0 0 3 3 9 9 3
0 5 114 7 6 3 114 0 0 0 3 3 9 9 3
0 5 114 8 6 3 114 0 0 0 3 3 9 9 3
0 5 114 N 6 3 114 0 0 0 3 3 9 9 3
1 5 115 0 7 3 115 1 0 1 3 3 9 9 3
1 5 115 1 7 3 115 1 0 1 3 3 9 9 3
1 5 115 2 7 3 115 1 0 1 3 3 9 9 3
1 5 115 3 7 3 115 1 0 1 3 3 9 9 3
1 5 115 4 7 3 115 1 0 1 3 3 9 9 3
1 5 115 5 7 3 115 1 0 1 3 3 9 9 3
1 5 115 6 7 3 115 1 0 1 3 3 9 9 3
1 5 115 7 7 3 115 1 0 1 3 3 9 9 3
1 5 115 8 7 3 115 1 0 1 3 3 9 9 3
1 5 115 N 7 3 115 1 0 1 3 3 9 9 3
2 5 116 0 8 3 116 2 0 2 3 3 9 9 3
2 5 116 1 8 3 116 2 0 2 3 3 9 9 3
2 5 116 2 8 3 116 2 0 2 3 3 9 9 3
2 5 116 3 8 3 116 2 0 2 3 3 9 9 3
2 5 116 4 8 3 116 2 0 2 3 3 9 9 3
2 5 116 5 8 3 116 2 0 2 3 3 9 9 3
2 5 116 6 8 3 116 2 0 2 3 3 9 9 3
2 5 116 7 8 3 116 2 0 2 3 3 9 9 3
2 5 116 8 8 3 116 2 0 2 3 3 9 9 3
2 5 116 N 8 3 116 2 0 2 3 3 9 9 3
3 5 123 0 6 4 123 0 1 3 3 3 9 9 3
3 5 123 1 6 4 123 0 1 3 3 3 9 9 3
3 5 123 2 6 4 123 0 1 3 3 3 9 9 3
3 5 123 3 6 4 123 0 1 3 3 3 9 9 3
3 5 123 4 6 4 123 0 1 3 3 3 9 9 3
3 5 123 5 6 4 123 0 1 3 3 3 9 9 3
3 5 123 6 6 4 123 0 1 3 3 3 9 9 3
3 5 123 7 6 4 123 0 1 3 3 3 9 9 3
3 5 123 8 6 4 123 0 1 3 3 3 9 9 3
3 5 123 N 6 4 123 0 1 3 3 3 9 9 3
4 5 124 0 7 4 124 1 1 4 3 3 9 9 3
4 5 124 1 7 4 124 1 1 4 3 3 9 9 3
4 5 124 2 7 4 124 1 1 4 3 3 9 9 3
4 5 124 3 7 4 124 1 1 4 3 3 9 9 3
4 5 124 4 7 4 124 1 1 4 3 3 9 9 3
4 5 124 5 7 4 124 1 1 4 3 3 9 9 3
4 5 124 6 7 4 124 1 1 4 3 3 9 9 3
4 5 124 7 7 4 124 1 1 4 3 3 9 9 3
4 5 124 8 7 4 124 1 1 4 3 3 9 9 3
4 5 124 N 7 4 124 1 1 4 3 3 9 9 3
5 5 125 0 8 4 125 2 1 5 3 3 9 9 3
5 5 125 1 8 4 125 2 1 5 3 3 9 9 3
5 5 125 2 8 4 125 2 1 5 3 3 9 9 3
5 5 125 3 8 4 125 2 1 5 3 3 9 9 3
5 5 125 4 8 4 125 2 1 5 3 3 9 9 3
5 5 125 5 8 4 125 2 1 5 3 3 9 9 3
5 5 125 6 8 4 125 2 1 5 3 3 9 9 3
5 5 125 7 8 4 125 2 1 5 3 3 9 9 3
5 5 125 8 8 4 125 2 1 5 3 3 9 9 3
5 5 125 N 8 4 125 2 1 5 3 3 9 9 3
6 5 132 0 6 5 132 0 2 6 3 3 9 9 3
6 5 132 1 6 5 132 0 2 6 3 3 9 9 3
6 5 132 2 6 5 132 0 2 6 3 3 9 9 3
6 5 132 3 6 5 132 0 2 6 3 3 9 9 3
6 5 132 4 6 5 132 0 2 6 3 3 9 9 3
6 5 132 5 6 5 132 0 2 6 3 3 9 9 3
6 5 132 6 6 5 132 0 2 6 3 3 9 9 3
6 5 132 7 6 5 132 0 2 6 3 3 9 9 3
6 5 132 8 6 5 132 0 2 6 3 3 9 9 3
6 5 132 N 6 5 132 0 2 6 3 3 9 9 3
7 5 133 0 7 5 133 1 2 7 3 3 9 9 3
7 5 133 1 7 5 133 1 2 7 3 3 9 9 3
7 5 133 2 7 5 133 1 2 7 3 3 9 9 3
7 5 133 3 7 5 133 1 2 7 3 3 9 9 3
7 5 133 4 7 5 133 1 2 7 3 3 9 9 3
7 5 133 5 7 5 133 1 2 7 3 3 9 9 3
7 5 133 6 7 5 133 1 2 7 3 3 9 9 3
7 5 133 7 7 5 133 1 2 7 3 3 9 9 3
7 5 133 8 7 5 133 1 2 7 3 3 9 9 3
7 5 133 N 7 5 133 1 2 7 3 3 9 9 3
8 5 134 0 8 5 134 2 2 8 3 3 9 9 3
8 5 134 1 8 5 134 2 2 8 3 3 9 9 3
8 5 134 2 8 5 134 2 2 8 3 3 9 9 3
8 5 134 3 8 5 134 2 2 8 3 3 9 9 3
8 5 134 4 8 5 134 2 2 8 3 3 9 9 3
8 5 134 5 8 5 134 2 2 8 3 3 9 9 3
8 5 134 6 8 5 134 2 2 8 3 3 9 9 3
8 5 134 7 8 5 134 2 2 8 3 3 9 9 3
8 5 134 8 8 5 134 2 2 8 3 3 9 9 3
8 5 134 N 8 5 134 2 2 8 3 3 9 9 3
0 5 195 0 6 3 195 0 0 0 3 3 9 9 3
0 5 195 1 6 3 195 0 0 0 3 3 9 9 3
0 5 195 2 6 3 195 0 0 0 3 3 9 9 3
0 5 195 3 6 3 195 0 0 0 3 3 9 9 3
0 5 195 4 6 3 195 0 0 0 3 3 9 9 3
0 5 195 5 6 3 195 0 0 0 3 3 9 9 3
0 5 195 6 6 3 195 0 0 0 3 3 9 9 3
0 5 195 7 6 3 195 0 0 0 3 3 9 9 3
0 5 195 8 6 3 195 0 0 0 3 3 9 9 3
0 5 195 N 6 3 195 0 0 0 3 3 9 9 3
1 5 196 0 7 3 196 1 0 1 3 3 9 9 3
1 5 196 1 7 3 196 1 0 1 3 3 9 9 3
1 5 196 2 7 3 196 1 0 1 3 3 9 9 3
1 5 196 3 7 3 196 1 0 1 3 3 9 9 3
1 5 196 4 7 3 196 1 0 1 3 3 9 9 3
1 5 196 5 7 3 196 1 0 1 3 3 9 9 3
1 5 196 6 7 3 196 1 0 1 3 3 9 9 3
1 5 196 7 7 3 196 1 0 1 3 3 9 9 3
1 5 196 8 7 3 196 1 0 1 3 3 9 9 3
1 5 196 N 7 3 196 1 0 1 3 3 9 9 3
2 5 197 0 8 3 197 2 0 2 3 3 9 9 3
2 5 197 1 8 3 197 2 0 2 3 3 9 9 3
2 5 197 2 8 3 197 2 0 2 3 3 9 9 3
2 5 197 3 8 3 197 2 0 2 3 3 9 9 3
2 5 197 4 8 3 197 2 0 2 3 3 9 9 3
2 5 197 5 8 3 197 2 0 2 3 3 9 9 3
2 5 197 6 8 3 197 2 0 2 3 3 9 9 3
2 5 197 7 8 3 197 2 0 2 3 3 9 9 3
2 5 197 8 8 3 197 2 0 2 3 3 9 9 3
2 5 197 N 8 3 197 2 0 2 3 3 9 9 3
3 5 204 0 6 4 204 0 1 3 3 3 9 9 3
3 5 204 1 6 4 204 0 1 3 3 3 9 9 3
3 5 204 2 6 4 204 0 1 3 3 3 9 9 3
3 5 204 3 6 4 204 0 1 3 3 3 9 9 3
3 5 204 4 6 4 204 0 1 3 3 3 9 9 3
3 5 204 5 6 4 204 0 1 3 3 3 9 9 3
3 5 204 6 6 4 204 0 1 3 3 3 9 9 3
3 5 204 7 6 4 204 0 1 3 3 3 9 9 3
3 5 204 8 6 4 204 0 1 3 3 3 9 9 3
3 5 204 N 6 4 204 0 1 3 3 3 9 9 3
4 5 205 0 7 4 205 1 1 4 3 3 9 9 3
4 5 205 1 7 4 205 1 1 4 3 3 9 9 3
4 5 205 2 7 4 205 1 1 4 3 3 9 9 3
4 5 205 3 7 4 205 1 1 4 3 3 9 9 3
4 5 205 4 7 4 205 1 1 4 3 3 9 9 3
4 5 205 5 7 4 205 1 1 4 3 3 9 9 3
4 5 205 6 7 4 205 1 1 4 3 3 9 9 3
4 5 205 7 7 4 205 1 1 4 3 3 9 9 3
4 5 205 8 7 4 205 1 1 4 3 3 9 9 3
4 5 205 N 7 4 205 1 1 4 3 3 9 9 3
5 5 206 0 8 4 206 2 1 5 3 3 9 9 3
5 5 206 1 8 4 206 2 1 5 3 3 9 9 3
5 5 206 2 8 4 206 2 1 5 3 3 9 9 3
5 5 206 3 8 4 206 2 1 5 3 3 9 9 3
5 5 206 4 8 4 206 2 1 5 3 3 9 9 3
5 5 206 5 8 4 206 2 1 5 3 3 9 9 3
5 5 206 6 8 4 206 2 1 5 3 3 9 9 3
5 5 206 7 8 4 206 2 1 5 3 3 9 9 3
5 5 206 8 8 4 206 2 1 5 3 3 9 9 3
5 5 206 N 8 4 206 2 1 5 3 3 9 9 3
6 5 213 0 6 5 213 0 2 6 3 3 9 9 3
6 5 213 1 6 5 213 0 2 6 3 3 9 9 3
6 5 213 2 6 5 213 0 2 6 3 3 9 9 3
6 5 213 3 6 5 213 0 2 6 3 3 9 9 3
6 5 213 4 6 5 213 0 2 6 3 3 9 9 3
6 5 213 5 6 5 213 0 2 6 3 3 9 9 3
6 5 213 6 6 5 213 0 2 6 3 3 9 9 3
6 5 213 7 6 5 213 0 2 6 3 3 9 9 3
6 5 213 8 6 5 213 0 2 6 3 3 9 9 3
6 5 213 N 6 5 213 0 2 6 3 3 9 9 3
7 5 214 0 7 5 214 1 2 7 3 3 9 9 3
7 5 214 1 7 5 214 1 2 7 3 3 9 9 3
7 5 214 2 7 5 214 1 2 7 3 3 9 9 3
7 5 214 3 7 5 214 1 2 7 3 3 9 9 3
7 5 214 4 7 5 214 1 2 7 3 3 9 9 3
7 5 214 5 7 5 214 1 2 7 3 3 9 9 3
7 5 214 6 7 5 214 1 2 7 3 3 9 9 3
7 5 214 7 7 5 214 1 2 7 3 3 9 9 3
7 5 214 8 7 5 214 1 2 7 3 3 9 9 3
7 5 214 N 7 5 214 1 2 7 3 3 9 9 3
8 5 215 0 8 5 215 2 2 8 3 3 9 9 3
8 5 215 1 8 5 215 2 2 8 3 3 9 9 3
8 5 215 2 8 5 215 2 2 8 3 3 9 9 3
8 5 215 3 8 5 215 2 2 8 3 3 9 9 3
8 5 215 4 8 5 215 2 2 8 3 3 9 9 3
8 5 215 5 8 5 215 2 2 8 3 3 9 9 3
8 5 215 6 8 5 215 2 2 8 3 3 9 9 3
8 5 215 7 8 5 215 2 2 8 3 3 9 9 3
8 5 215 8 8 5 215 2 2 8 3 3 9 9 3
8 5 215 N 8 5 215 2 2 8 3 3 9 9 3
0 6 54 0 0 6 54 0 0 0 3 3 9 9 3
0 6 54 1 0 6 54 0 0 0 3 3 9 9 3
0 6 54 2 0 6 54 0 0 0 3 3 9 9 3
0 6 54 3 0 6 54 0 0 0 3 3 9 9 3
0 6 54 4 0 6 54 0 0 0 3 3 9 9 3
0 6 54 5 0 6 54 0 0 0 3 3 9 9 3
0 6 54 6 0 6 54 0 0 0 3 3 9 9 3
0 6 54 7 0 6 54 0 0 0 3 3 9 9 3
0 6 54 8 0 6 54 0 0 0 3 3 9 9 3
0 6 54 N 0 6 54 0 0 0 3 3 9 9 3
1 6 55 0 1 6 55 1 0 1 3 3 9 9 3
1 6 55 1 1 6 55 1 0 1 3 3 9 9 3
1 6 55 2 1 6 55 1 0 1 3 3 9 9 3
1 6 55 3 1 6 55 1 0 1 3 3 9 9 3
1 6 55 4 1 6 55 1 0 1 3 3 9 9 3
1 6 55 5 1 6 55 1 0 1 3 3 9 9 3
1 6 55 6 1 6 55 1 0 1 3 3 9 9 3
1 6 55 7 1 6 55 1 0 1 3 3 9 9 3
1 6 55 8 1 6 55 1 0 1 3 3 9 9 3
1 6 55 N 1 6 55 1 0 1 3 3 9 9 3
2 6 56 0 2 6 56 2 0 2 3 3 9 9 3
2 6 56 1 2 6 56 2 0 2 3 3 9 9 3
2 6 56 2 2 6 56 2 0 2 3 3 9 9 3
2 6 56 3 2 6 56 2 0 2 3 3 9 9 3
2 6 56 4 2 6 56 2 0 2 3 3 9 9 3
2 6 56 5 2 6 56 2 0 2 3 3 9 9 3
2 6 56 6 2 6 56 2 0 2 3 3 9 9 3
2 6 56 7 2 6 56 2 0 2 3 3 9 9 3
2 6 56 8 2 6 56 2 0 2 3 3 9 9 3
2 6 56 N 2 6 56 2 0 2 3 3 9 9 3
3 6 63 0 0 7 63 0 1 3 3 3 9 9 3
3 6 63 1 0 7 63 0 1 3 3 3 9 9 3
3 6 63 2 0 7 63 0 1 3 3 3 9 9 3
3 6 63 3 0 7 63 0 1 3 3 3 9 9 3
3 6 63 4 0 7 63 0 1 3 3 3 9 9 3
3 6 63 5 0 7 63 0 1 3 3 3 9 9 3
3 6 63 6 0 7 63 0 1 3 3 3 9 9 3
3 6 63 7 0 7 63 0 1 3 3 3 9 9 3
3 6 63 8 0 7 63 0 1 3 3 3 9 9 3
3 6 63 N 0 7 63 0 1 3 3 3 9 9 3
4 6 64 0 1 7 64 1 1 4 3 3 9 9 3
4 6 64 1 1 7 64 1 1 4 3 3 9 9 3
4 6 64 2 1 7 64 1 1 4 3 3 9 9 3
4 6 64 3 1 7 64 1 1 4 3 3 9 9 3
4 6 64 4 1 7 64 1 1 4 3 3 9 9 3
4 6 64 5 1 7 64 1 1 4 3 3 9 9 3
4 6 64 6 1 7 64 1 1 4 3 3 9 9 3
4 6 64 7 1 7 64 1 1 4 3 3 9 9 3
4 6 64 8 1 7 64 1 1 4 3 3 9 9 3
4 6 64 N 1 7 64 1 1 4 3 3 9 9 3
5 6 65 0 2 7 65 2 1 5 3 3 9 9 3
5 6 65 1 2 7 65 2 1 5 3 3 9 9 3
5 6 65 2 2 7 65 2 1 5 3 3 9 9 3
5 6 65 3 2 7 65 2 1 5 3 3 9 9 3
5 6 65 4 2 7 65 2 1 5 3 3 9 9 3
5 6 65 5 2 7 65 2 1 5 3 3 9 9 3
5 6 65 6 2 7 65 2 1 5 3 3 9 9 3
5 6 65 7 2 7 65 2 1 5 3 3 9 9 3
5 6 65 8 2 7 65 2 1 5 3 3 9 9 3
5 6 65 N 2 7 65 2 1 5 3 3 9 9 3
6 6 72 0 0 8 72 0 2 6 3 3 9 9 3
6 6 72 1 0 8 72 0 2 6 3 3 9 9 3
6 6 72 2 0 8 72 0 2 6 3 3 9 9 3
6 6 72 3 0 8 72 0 2 6 3 3 9 9 3
6 6 72 4 0 8 72 0 2 6 3 3 9 9 3
6 6 72 5 0 8 72 0 2 6 3 3 9 9 3
6 6 72 6 0 8 72 0 2 6 3 3 9 9 3
6 6 72 7 0 8 72 0 2 6 3 3 9 9 3
6 6 72 8 0 8 72 0 2 6 3 3 9 9 3
6 6 72 N 0 8 72 0 2 6 3 3 9 9 3
7 6 73 0 1 8 73 1 2 7 3 3 9 9 3
7 6 73 1 1 8 73 1 2 7 3 3 9 9 3
7 6 73 2 1 8 73 1 2 7 3 3 9 9 3
7 6 73 3 1 8 73 1 2 7 3 3 9 9 3
7 6 73 4 1 8 73 1 2 7 3 3 9 9 3
7 6 73 5 1 8 73 1 2 7 3 3 9 9 3
7 6 73 6 1 8 73 1 2 7 3 3 9 9 3
7 6 73 7 1 8 73 1 2 7 3 3 9 9 3
7 6 73 8 1 8 73 1 2 7 3 3 9 9 3
7 6 73 N 1 8 73 1 2 7 3 3 9 9 3
8 6 74 0 2 8 74 2 2 8 3 3 9 9 3
8 6 74 1 2 8 74 2 2 8 3 3 9 9 3
8 6 74 2 2 8 74 2 2 8 3 3 9 9 3
8 6 74 3 2 8 74 2 2 8 3 3 9 9 3
8 6 74 4 2 8 74 2 2 8 3 3 9 9 3
8 6 74 5 2 8 74 2 2 8 3 3 9 9 3
8 6 74 6 2 8 74 2 2 8 3 3 9 9 3
8 6 74 7 2 8 74 2 2 8 3 3 9 9 3
8 6 74 8 2 8 74 2 2 8 3 3 9 9 3
8 6 74 N 2 8 74 2 2 8 3 3 9 9 3
0 6 135 0 0 6 135 0 0 0 3 3 9 9 3
0 6 135 1 0 6 135 0 0 0 3 3 9 9 3
0 6 135 2 0 6 135 0 0 0 3 3 9 9 3
0 6 135 3 0 6 135 0 0 0 3 3 9 9 3
0 6 135 4 0 6 135 0 0 0 3 3 9 9 3
0 6 135 5 0 6 135 0 0 0 3 3 9 9 3
0 6 135 6 0 6 135 0 0 0 3 3 9 9 3
0 6 135 7 0 6 135 0 0 0 3 3 9 9 3
0 6 135 8 0 6 135 0 0 0 3 3 9 9 3
0 6 135 N 0 6 135 0 0 0 3 3 9 9 3
1 6 136 0 1 6 136 1 0 1 3 3 9 9 3
1 6 136 1 1 6 136 1 0 1 3 3 9 9 3
1 6 136 2 1 6 136 1 0 1 3 3 9 9 3
1 6 136 3 1 6 136 1 0 1 3 3 9 9 3
1 6 136 4 1 6 136 1 0 1 3 3 9 9 3
1 6 136 5 1 6 136 1 0 1 3 3 9 9 3
1 6 136 6 1 6 136 1 0 1 3 3 9 9 3
1 6 136 7 1 6 136 1 0 1 3 3 9 9 3
1 6 136 8 1 6 136 1 0 1 3 3 9 9 3
1 6 136 N 1 6 136 1 0 1 3 3 9 9 3
2 6 137 0 2 6 137 2 0 2 3 3 9 9 3
2 6 137 1 2 6 137 2 0 2 3 3 9 9 3
2 6 137 2 2 6 137 2 0 2 3 3 9 9 3
2 6 137 3 2 6 137 2 0 2 3 3 9 9 3
2 6 137 4 2 6 137 2 0 2 3 3 9 9 3
2 6 137 5 2 6 137 2 0 2 3 3 9 9 3
2 6 137 6 2 6 137 2 0 2 3 3 9 9 3
2 6 137 7 2 6 137 2 0 2 3 3 9 9 3
2 6 137 8 2 6 137 2 0 2 3 3 9 9 3
2 6 137 N 2 6 137 2 0 2 3 3 9 9 3
3 6 144 0 0 7 144 0 1 3 3 3 9 9 3
3 6 144 1 0 7 144 0 1 3 3 3 9 9 3
3 6 144 2 0 7 144 0 1 3 3 3 9 9 3
3 6 144 3 0 7 144 0 1 3 3 3 9 9 3
3 6 144 4 0 7 144 0 1 3 3 3 9 9 3
3 6 144 5 0 7 144 0 1 3 3 3 9 9 3
3 6 144 6 0 7 144 0 1 3 3 3 9 9 3
3 6 144 7 0 7 144 0 1 3 3 3 9 9 3
3 6 144 8 0 7 144 0 1 3 3 3 9 9 3
3 6 144 N 0 7 144 0 1 3 3 3 9 9 3
4 6 145 0 1 7 145 1 1 4 3 3 9 9 3
4 6 145 1 1 7 145 1 1 4 3 3 9 9 3
4 6 145 2 1 7 145 1 1 4 3 3 9 9 3
4 6 145 3 1 7 145 1 1 4 3 3 9 9 3
4 6 145 4 1 7 145 1 1 4 3 3 9 9 3
4 6 145 5 1 7 145 1 1 4 3 3 9 9 3
4 6 145 6 1 7 145 1 1 4 3 3 9 9 3
4 6 145 7 1 7 145 1 1 4 3 3 9 9 3
4 6 145 8 1 7 145 1 1 4 3 3 9 9 3
4 6 145 N 1 7 145 1 1 4 3 3 9 9 3
5 6 146 0 2 7 146 2 1 5 3 3 9 9 3
5 6 146 1 2 7 146 2 1 5 3 3 9 9 3
5 6 146 2 2 7 146 2 1 5 3 3 9 9 3
5 6 146 3 2 7 146 2 1 5 3 3 9 9 3
5 6 146 4 2 7 146 2 1 5 3 3 9 9 3
5 6 146 5 2 7 146 2 1 5 3 3 9 9 3
5 6 146 6 2 7 146 2 1 5 3 3 9 9 3
5 6 146 7 2 7 146 2 1 5 3 3 9 9 3
5 6 146 8 2 7 146 2 1 5 3 3 9 9 3
5 6 146 N 2 7 146 2 1 5 3 3 9 9 3
6 6 153 0 0 8 153 0 2 6 3 3 9 9 3
6 6 153 1 0 8 153 0 2 6 3 3 9 9 3
6 6 153 2 0 8 153 0 2 6 3 3 9 9 3
6 6 153 3 0 8 153 0 2 6 3 3 9 9 3
6 6 153 4 0 8 153 0 2 6 3 3 9 9 3
6 6 153 5 0 8 153 0 2 6 3 3 9 9 3
6 6 153 6 0 8 153 0 2 6 3 3 9 9 3
6 6 153 7 0 8 153 0 2 6 3 3 9 9 3
6 6 153 8 0 8 153 0 2 6 3 3 9 9 3
6 6 153 N 0 8 153 0 2 6 3 3 9 9 3
7 6 154 0 1 8 154 1 2 7 3 3 9 9 3
7 6 154 1 1 8 154 1 2 7 3 3 9 9 3
7 6 154 2 1 8 154 1 2 7 3 3 9 9 3
7 6 154 3 1 8 154 1 2 7 3 3 9 9 3
7 6 154 4 1 8 154 1 2 7 3 3 9 9 3
7 6 154 5 1 8 154 1 2 7 3 3 9 9 3
7 6 154 6 1 8 154 1 2 7 3 3 9 9 3
7 6 154 7 1 8 154 1 2 7 3 3 9 9 3
7 6 154 8 1 8 154 1 2 7 3 3 9 9 3
7 6 154 N 1 8 154 1 2 7 3 3 9 9 3
8 6 155 0 2 8 155 2 2 8 3 3 9 9 3
8 6 155 1 2 8 155 2 2 8 3 3 9 9 3
8 6 155 2 2 8 155 2 2 8 3 3 9 9 3
8 6 155 3 2 8 155 2 2 8 3 3 9 9 3
8 6 155 4 2 8 155 2 2 8 3 3 9 9 3
8 6 155 5 2 8 155 2 2 8 3 3 9 9 3
8 6 155 6 2 8 155 2 2 8 3 3 9 9 3
8 6 155 7 2 8 155 2 2 8 3 3 9 9 3
8 6 155 8 2 8 155 2 2 8 3 3 9 9 3
8 6 155 N 2 8 155 2 2 8 3 3 9 9 3
0 6 216 0 0 6 216 0 0 0 3 3 9 9 3
0 6 216 1 0 6 216 0 0 0 3 3 9 9 3
0 6 216 2 0 6 216 0 0 0 3 3 9 9 3
0 6 216 3 0 6 216 0 0 0 3 3 9 9 3
0 6 216 4 0 6 216 0 0 0 3 3 9 9 3
0 6 216 5 0 6 216 0 0 0 3 3 9 9 3
0 6 216 6 0 6 216 0 0 0 3 3 9 9 3
0 6 216 7 0 6 216 0 0 0 3 3 9 9 3
0 6 216 8 0 6 216 0 0 0 3 3 9 9 3
0 6 216 N 0 6 216 0 0 0 3 3 9 9 3
1 6 217 0 1 6 217 1 0 1 3 3 9 9 3
1 6 217 1 1 6 217 1 0 1 3 3 9 9 3
1 6 217 2 1 6 217 1 0 1 3 3 9 9 3
1 6 217 3 1 6 217 1 0 1 3 3 9 9 3
1 6 217 4 1 6 217 1 0 1 3 3 9 9 3
1 6 217 5 1 6 217 1 0 1 3 3 9 9 3
1 6 217 6 1 6 217 1 0 1 3 3 9 9 3
1 6 217 7 1 6 217 1 0 1 3 3 9 9 3
1 6 217 8 1 6 217 1 0 1 3 3 9 9 3
1 6 217 N 1 6 217 1 0 1 3 3 9 9 3
2 6 218 0 2 6 218 2 0 2 3 3 9 9 3
2 6 218 1 2 6 218 2 0 2 3 3 9 9 3
2 6 218 2 2 6 218 2 0 2 3 3 9 9 3
2 6 218 3 2 6 218 2 0 2 3 3 9 9 3
2 6 218 4 2 6 218 2 0 2 3 3 9 9 3
2 6 218 5 2 6 218 2 0 2 3 3 9 9 3
2 6 218 6 2 6 218 2 0 2 3 3 9 9 3
2 6 218 7 2 6 218 2 0 2 3 3 9 9 3
2 6 218 8 2 6 218 2 0 2 3 3 9 9 3
2 6 218 N 2 6 218 2 0 2 3 3 9 9 3
3 6 225 0 0 7 225 0 1 3 3 3 9 9 3
3 6 225 1 0 7 225 0 1 3 3 3 9 9 3
3 6 225 2 0 7 225 0 1 3 3 3 9 9 3
3 6 225 3 0 7 225 0 1 3 3 3 9 9 3
3 6 225 4 0 7 225 0 1 3 3 3 9 9 3
3 6 225 5 0 7 225 0 1 3 3 3 9 9 3
3 6 225 6 0 7 225 0 1 3 3 3 9 9 3
3 6 225 7 0 7 225 0 1 3 3 3 9 9 3
3 6 225 8 0 7 225 0 1 3 3 3 9 9 3
3 6 225 N 0 7 225 0 1 3 3 3 9 9 3
4 6 226 0 1 7 226 1 1 4 3 3 9 9 3
4 6 226 1 1 7 226 1 1 4 3 3 9 9 3
4 6 226 2 1 7 226 1 1 4 3 3 9 9 3
4 6 226 3 1 7 226 1 1 4 3 3 9 9 3
4 6 226 4 1 7 226 1 1 4 3 3 9 9 3
4 6 226 5 1 7 226 1 1 4 3 3 9 9 3
4 6 226 6 1 7 226 1 1 4 3 3 9 9 3
4 6 226 7 1 7 226 1 1 4 3 3 9 9 3
4 6 226 8 1 7 226 1 1 4 3 3 9 9 3
4 6 226 N 1 7 226 1 1 4 3 3 9 9 3
5 6 227 0 2 7 227 2 1 5 3 3 9 9 3
5 6 227 1 2 7 227 2 1 5 3 3 9 9 3
5 6 227 2 2 7 227 2 1 5 3 3 9 9 3
5 6 227 3 2 7 227 2 1 5 3 3 9 9 3
5 6 227 4 2 7 227 2 1 5 3 3 9 9 3
5 6 227 5 2 7 227 2 1 5 3 3 9 9 3
5 6 227 6 2 7 227 2 1 5 3 3 9 9 3
5 6 227 7 2 7 227 2 1 5 3 3 9 9 3
5 6 227 8 2 7 227 2 1 5 3 3 9 9 3
5 6 227 N 2 7 227 2 1 5 3 3 9 9 3
6 6 234 0 0 8 234 0 2 6 3 3 9 9 3
6 6 234 1 0 8 234 0 2 6 3 3 9 9 3
6 6 234 2 0 8 234 0 2 6 3 3 9 9 3
6 6 234 3 0 8 234 0 2 6 3 3 9 9 3
6 6 234 4 0 8 234 0 2 6 3 3 9 9 3
6 6 234 5 0 8 234 0 2 6 3 3 9 9 3
6 6 234 6 0 8 234 0 2 6 3 3 9 9 3
6 6 234 7 0 8 234 0 2 6 3 3 9 9 3
6 6 234 8 0 8 234 0 2 6 3 3 9 9 3
6 6 234 N 0 8 234 0 2 6 3 3 9 9 3
7 6 235 0 1 8 235 1 2 7 3 3 9 9 3
7 6 235 1 1 8 235 1 2 7 3 3 9 9 3
7 6 235 2 1 8 235 1 2 7 3 3 9 9 3
7 6 235 3 1 8 235 1 2 7 3 3 9 9 3
7 6 235 4 1 8 235 1 2 7 3 3 9 9 3
7 6 235 5 1 8 235 1 2 7 3 3 9 9 3
7 6 235 6 1 8 235 1 2 7 3 3 9 9 3
7 6 235 7 1 8 235 1 2 7 3 3 9 9 3
7 6 235 8 1 8 235 1 2 7 3 3 9 9 3
7 6 235 N 1 8 235 1 2 7 3 3 9 9 3
8 6 236 0 2 8 236 2 2 8 3 3 9 9 3
8 6 236 1 2 8 236 2 2 8 3 3 9 9 3
8 6 236 2 2 8 236 2 2 8 3 3 9 9 3
8 6 236 3 2 8 236 2 2 8 3 3 9 9 3
8 6 236 4 2 8 236 2 2 8 3 3 9 9 3
8 6 236 5 2 8 236 2 2 8 3 3 9 9 3
8 6 236 6 2 8 236 2 2 8 3 3 9 9 3
8 6 236 7 2 8 236 2 2 8 3 3 9 9 3
8 6 236 8 2 8 236 2 2 8 3 3 9 9 3
8 6 236 N 2 8 236 2 2 8 3 3 9 9 3
0 7 57 0 3 6 57 0 0 0 3 3 9 9 3
0 7 57 1 3 6 57 0 0 0 3 3 9 9 3
0 7 57 2 3 6 57 0 0 0 3 3 9 9 3
0 7 57 3 3 6 57 0 0 0 3 3 9 9 3
0 7 57 4 3 6 57 0 0 0 3 3 9 9 3
0 7 57 5 3 6 57 0 0 0 3 3 9 9 3
0 7 57 6 3 6 57 0 0 0 3 3 9 9 3
0 7 57 7 3 6 57 0 0 0 3 3 9 9 3
0 7 57 8 3 6 57 0 0 0 3 3 9 9 3
0 7 57 N 3 6 57 0 0 0 3 3 9 9 3
1 7 58 0 4 6 58 1 0 1 3 3 9 9 3
1 7 58 1 4 6 58 1 0 1 3 3 9 9 3
1 7 58 2 4 6 58 1 0 1 3 3 9 9 3
1 7 58 3 4 6 58 1 0 1 3 3 9 9 3
1 7 58 4 4 6 58 1 0 1 3 3 9 9 3
1 7 58 5 4 6 58 1 0 1 3 3 9 9 3
1 7 58 6 4 6 58 1 0 1 3 3 9 9 3
1 7 58 7 4 6 58 1 0 1 3 3 9 9 3
1 7 58 8 4 6 58 1 0 1 3 3 9 9 3
1 7 58 N 4 6 58 1 0 1 3 3 9 9 3
2 7 59 0 5 6 59 2 0 2 3 3 9 9 3
2 7 59 1 5 6 59 2 0 2 3 3 9 9 3
2 7 59 2 5 6 59 2 0 2 3 3 9 9 3
2 7 59 3 5 6 59 2 0 2 3 3 9 9 3
2 7 59 4 5 6 59 2 0 2 3 3 9 9 3
2 7 59 5 5 6 59 2 0 2 3 3 9 9 3
2 7 59 6 5 6 59 2 0 2 3 3 9 9 3
2 7 59 7 5 6 59 2 0 2 3 3 9 9 3
2 7 59 8 5 6 59 2 0 2 3 3 9 9 3
2 7 59 N 5 6 59 2 0 2 3 3 9 9 3
3 7 66 0 3 7 66 0 1 3 3 3 9 9 3
3 7 66 1 3 7 66 0 1 3 3 3 9 9 3
3 7 66 2 3 7 66 0 1 3 3 3 9 9 3
3 7 66 3 3 7 66 0 1 3 3 3 9 9 3
3 7 66 4 3 7 66 0 1 3 3 3 9 9 3
3 7 66 5 3 7 66 0 1 3 3 3 9 9 3
3 7 66 6 3 7 66 0 1 3 3 3 9 9 3
3 7 66 7 3 7 66 0 1 3 3 3 9 9 3
3 7 66 8 3 7 66 0 1 3 3 3 9 9 3
3 7 66 N 3 7 66 0 1 3 3 3 9 9 3
4 7 67 0 4 7 67 1 1 4 3 3 9 9 3
4 7 67 1 4 7 67 1 1 4 3 3 9 9 3
4 7 67 2 4 7 67 1 1 4 3 3 9 9 3
4 7 67 3 4 7 67 1 1 4 3 3 9 9 3
4 7 67 4 4 7 67 1 1 4 3 3 9 9 3
4 7 67 5 4 7 67 1 1 4 3 3 9 9 3
4 7 67 6 4 7 67 1 1 4 3 3 9 9 3
4 7 67 7 4 7 67 1 1 4 3 3 9 9 3
4 7 67 8 4 7 67 1 1 4 3 3 9 9 3
4 7 67 N 4 7 67 1 1 4 3 3 9 9 3
5 7 68 0 5 7 68 2 1 5 3 3 9 9 3
5 7 68 1 5 7 68 2 1 5 3 3 9 9 3
5 7 68 2 5 7 68 2 1 5 3 3 9 9 3
5 7 68 3 5 7 68 2 1 5 3 3 9 9 3
5 7 68 4 5 7 68 2 1 5 3 3 9 9 3
5 7 68 5 5 7 68 2 1 5 3 3 9 9 3
5 7 68 6 5 7 68 2 1 5 3 3 9 9 3
5 7 68 7 5 7 68 2 1 5 3 3 9 9 3
5 7 68 8 5 7 68 2 1 5 3 3 9 9 3
5 7 68 N 5 7 68 2 1 5 3 3 9 9 3
6 7 75 0 3 8 75 0 2 6 3 3 9 9 3
6 7 75 1 3 8 75 0 2 6 3 3 9 9 3
6 7 75 2 3 8 75 0 2 6 3 3 9 9 3
6 7 75 3 3 8 75 0 2 6 3 3 9 9 3
6 7 75 4 3 8 75 0 2 6 3 3 9 9 3
6 7 75 5 3 8 75 0 2 6 3 3 9 9 3
6 7 75 6 3 8 75 0 2 6 3 3 9 9 3
6 7 75 7 3 8 75 0 2 6 3 3 9 9 3
6 7 75 8 3 8 75 0 2 6 3 3 9 9 3
6 7 75 N 3 8 75 0 2 6 3 3 9 9 3
7 7 76 0 4 8 76 1 2 7 3 3 9 9 3
7 7 76 1 4 8 76 1 2 7 3 3 9 9 3
7 7 76 2 4 8 76 1 2 7 3 3 9 9 3
7 7 76 3 4 8 76 1 2 7 3 3 9 9 3
7 7 76 4 4 8 76 1 2 7 3 3 9 9 3
7 7 76 5 4 8 76 1 2 7 3 3 9 9 3
7 7 76 6 4 8 76 1 2 7 3 3 9 9 3
7 7 76 7 4 8 76 1 2 7 3 3 9 9 3
7 7 76 8 4 8 76 1 2 7 3 3 9 9 3
7 7 76 N 4 8 76 1 2 7 3 3 9 9 3
8 7 77 0 5 8 77 2 2 8 3 3 9 9 3
8 7 77 1 5 8 77 2 2 8 3 3 9 9 3
8 7 77 2 5 8 77 2 2 8 3 3 9 9 3
8 7 77 3 5 8 77 2 2 8 3 3 9 9 3
8 7 77 4 5 8 77 2 2 8 3 3 9 9 3
8 7 77 5 5 8 77 2 2 8 3 3 9 9 3
8 7 77 6 5 8 77 2 2 8 3 3 9 9 3
8 7 77 7 5 8 77 2 2 8 3 3 9 9 3
8 7 77 8 5 8 77 2 2 8 3 3 9 9 3
8 7 77 N 5 8 77 2 2 8 3 3 9 9 3
0 7 138 0 3 6 138 0 0 0 3 3 9 9 3
0 7 138 1 3 6 138 0 0 0 3 3 9 9 3
0 7 138 2 3 6 138 0 0 0 3 3 9 9 3
0 7 138 3 3 6 138 0 0 0 3 3 9 9 3
0 7 138 4 3 6 138 0 0 0 3 3 9 9 3
0 7 138 5 3 6 138 0 0 0 3 3 9 9 3
0 7 138 6 3 6 138 0 0 0 3 3 9 9 3
0 7 138 7 3 6 138 0 0 0 3 3 9 9 3
0 7 138 8 3 6 138 0 0 0 3 3 9 9 3
0 7 138 N 3 6 138 0 0 0 3 3 9 9 3
1 7 139 0 4 6 139 1 0 1 3 3 9 9 3
1 7 139 1 4 6 139 1 0 1 3 3 9 9 3
1 7 139 2 4 6 139 1 0 1 3 3 9 9 3
1 7 139 3 4 6 139 1 0 1 3 3 9 9 3
1 7 139 4 4 6 139 1 0 1 3 3 9 9 3
1 7 139 5 4 6 139 1 0 1 3 3 9 9 3
1 7 139 6 4 6 139 1 0 1 3 3 9 9 3
1 7 139 7 4 6 139 1 0 1 3 3 9 9 3
1 7 139 8 4 6 139 1 0 1 3 3 9 9 3
1 7 139 N 4 6 139 1 0 1 3 3 9 9 3
2 7 140 0 5 6 140 2 0 2 3 3 9 9 3
2 7 140 1 5 6 140 2 0 2 3 3 9 9 3
2 7 140 2 5 6 140 2 0 2 3 3 9 9 3
2 7 140 3 5 6 140 2 0 2 3 3 9 9 3
2 7 140 4 5 6 140 2 0 2 3 3 9 9 3
2 7 140 5 5 6 140 2 0 2 3 3 9 9 3
2 7 140 6 5 6 140 2 0 2 3 3 9 9 3
2 7 140 7 5 6 140 2 0 2 3 3 9 9 3
2 7 140 8 5 6 140 2 0 2 3 3 9 9 3
2 7 140 N 5 6 140 2 0 2 3 3 9 9 3
3 7 147 0 3 7 147 0 1 3 3 3 9 9 3
3 7 147 1 3 7 147 0 1 3 3 3 9 9 3
3 7 147 2 3 7 147 0 1 3 3 3 9 9 3
3 7 147 3 3 7 147 0 1 3 3 3 9 9 3
3 7 147 4 3 7 147 0 1 3 3 3 9 9 3
3 7 147 5 3 7 147 0 1 3 3 3 9 9 3
3 7 147 6 3 7 147 0 1 3 3 3 9 9 3
3 7 147 7 3 7 147 0 1 3 3 3 9 9 3
3 7 147 8 3 7 147 0 1 3 3 3 9 9 3
3 7 147 N 3 7 147 0 1 3 3 3 9 9 3
4 7 148 0 4 7 148 1 1 4 3 3 9 9 3
4 7 148 1 4 7 148 1 1 4 3 3 9 9 3
4 7 148 2 4 7 148 1 1 4 3 3 9 9 3
4 7 148 3 4 7 148 1 1 4 3 3 9 9 3
4 7 148 4 4 7 148 1 1 4 3 3 9 9 3
4 7 148 5 4 7 148 1 1 4 3 3 9 9 3
4 7 148 6 4 7 148 1 1 4 3 3 9 9 3
4 7 148 7 4 7 148 1 1 4 3 3 9 9 3
4 7 148 8 4 7 148 1 1 4 3 3 9 9 3
4 7 148 N 4 7 148 1 1 4 3 3 9 9 3
5 7 149 0 5 7 149 2 1 5 3 3 9 9 3
5 7 149 1 5 7 149 2 1 5 3 3 9 9 3
5 7 149 2 5 7 149 2 1 5 3 3 9 9 3
5 7 149 3 5 7 149 2 1 5 3 3 9 9 3
5 7 149 4 5 7 149 2 1 5 3 3 9 9 3
5 7 149 5 5 7 149 2 1 5 3 3 9 9 3
5 7 149 6 5 7 149 2 1 5 3 3 9 9 3
5 7 149 7 5 7 149 2 1 5 3 3 9 9 3
5 7 149 8 5 7 149 2 1 5 3 3 9 9 3
5 7 149 N 5 7 149 2 1 5 3 3 9 9 3
6 7 156 0 3 8 156 0 2 6 3 3 9 9 3
6 7 156 1 3 8 156 0 2 6 3 3 9 9 3
6 7 156 2 3 8 156 0 2 6 3 3 9 9 3
6 7 156 3 3 8 156 0 2 6 3 3 9 9 3
6 7 156 4 3 8 156 0 2 6 3 3 9 9 3
6 7 156 5 3 8 156 0 2 6 3 3 9 9 3
6 7 156 6 3 8 156 0 2 6 3 3 9 9 3
6 7 156 7 3 8 156 0 2 6 3 3 9 9 3
6 7 156 8 3 8 156 0 2 6 3 3 9 9 3
6 7 156 N 3 8 156 0 2 6 3 3 9 9 3
7 7 157 0 4 8 157 1 2 7 3 3 9 9 3
7 7 157 1 4 8 157 1 2 7 3 3 9 9 3
7 7 157 2 4 8 157 1 2 7 3 3 9 9 3
7 7 157 3 4 8 157 1 2 7 3 3 9 9 3
7 7 157 4 4 8 157 1 2 7 3 3 9 9 3
7 7 157 5 4 8 157 1 2 7 3 3 9 9 3
7 7 157 6 4 8 157 1 2 7 3 3 9 9 3
7 7 157 7 4 8 157 1 2 7 3 3 9 9 3
7 7 157 8 4 8 157 1 2 7 3 3 9 9 3
7 7 157 N 4 8 157 1 2 7 3 3 9 9 3
8 7 158 0 5 8 158 2 2 8 3 3 9 9 3
8 7 158 1 5 8 158 2 2 8 3 3 9 9 3
8 7 158 2 5 8 158 2 2 8 3 3 9 9 3
8 7 158 3 5 8 158 2 2 8 3 3 9 9 3
8 7 158 4 5 8 158 2 2 8 3 3 9 9 3
8 7 158 5 5 8 158 2 2 8 3 3 9 9 3
8 7 158 6 5 8 158 2 2 8 3 3 9 9 3
8 7 158 7 5 8 158 2 2 8 3 3 9 9 3
8 7 158 8 5 8 158 2 2 8 3 3 9 9 3
8 7 158 N 5 8 158 2 2 8 3 3 9 9 3
0 7 219 0 3 6 219 0 0 0 3 3 9 9 3
0 7 219 1 3 6 219 0 0 0 3 3 9 9 3
0 7 219 2 3 6 219 0 0 0 3 3 9 9 3
0 7 219 3 3 6 219 0 0 0 3 3 9 9 3
0 7 219 4 3 6 219 0 0 0 3 3 9 9 3
0 7 219 5 3 6 219 0 0 0 3 3 9 9 3
0 7 219 6 3 6 219 0 0 0 3 3 9 9 3
0 7 219 7 3 6 219 0 0 0 3 3 9 9 3
0 7 219 8 3 6 219 0 0 0 3 3 9 9 3
0 7 219 N 3 6 219 0 0 0 3 3 9 9 3
1 7 220 0 4 6 220 1 0 1 3 3 9 9 3
1 7 220 1 4 6 220 1 0 1 3 3 9 9 3
1 7 220 2 4 6 220 1 0 1 3 3 9 9 3
1 7 220 3 4 6 220 1 0 1 3 3 9 9 3
1 7 220 4 4 6 220 1 0 1 3 3 9 9 3
1 7 220 5 4 6 220 1 0 1 3 3 9 9 3
1 7 220 6 4 6 220 1 0 1 3 3 9 9 3
1 7 220 7 4 6 220 1 0 1 3 3 9 9 3
1 7 220 8 4 6 220 1 0 1 3 3 9 9 3
1 7 220 N 4 6 220 1 0 1 3 3 9 9 3
2 7 221 0 5 6 221 2 0 2 3 3 9 9 3
2 7 221 1 5 6 221 2 0 2 3 3 9 9 3
2 7 221 2 5 6 221 2 0 2 3 3 9 9 3
2 7 221 3 5 6 221 2 0 2 3 3 9 9 3
2 7 221 4 5 6 221 2 0 2 3 3 9 9 3
2 7 221 5 5 6 221 2 0 2 3 3 9 9 3
2 7 221 6 5 6 221 2 0 2 3 3 9 9 3
2 7 221 7 5 6 221 2 0 2 3 3 9 9 3
2 7 221 8 5 6 221 2 0 2 3 3 9 9 3
2 7 221 N 5 6 221 2 0 2 3 3 9 9 3
3 7 228 0 3 7 228 0 1 3 3 3 9 9 3
3 7 228 1 3 7 228 0 1 3 3 3 9 9 3
3 7 228 2 3 7 228 0 1 3 3 3 9 9 3
3 7 228 3 3 7 228 0 1 3 3 3 9 9 3
3 7 228 4 3 7 228 0 1 3 3 3 9 9 3
3 7 228 5 3 7 228 0 1 3 3 3 9 9 3
3 7 228 6 3 7 228 0 1 3 3 3 9 9 3
3 7 228 7 3 7 228 0 1 3 3 3 9 9 3
3 7 228 8 3 7 228 0 1 3 3 3 9 9 3
3 7 228 N 3 7 228 0 1 3 3 3 9 9 3
4 7 229 0 4 7 229 1 1 4 3 3 9 9 3
4 7 229 1 4 7 229 1 1 4 3 3 9 9 3
4 7 229 2 4 7 229 1 1 4 3 3 9 9 3
4 7 229 3 4 7 229 1 1 4 3 3 9 9 3
4 7 229 4 4 7 229 1 1 4 3 3 9 9 3
4 7 229 5 4 7 229 1 1 4 3 3 9 9 3
4 7 229 6 4 7 229 1 1 4 3 3 9 9 3
4 7 229 7 4 7 229 1 1 4 3 3 9 9 3
4 7 229 8 4 7 229 1 1 4 3 3 9 9 3
4 7 229 N 4 7 229 1 1 4 3 3 9 9 3
5 7 230 0 5 7 230 2 1 5 3 3 9 9 3
5 7 230 1 5 7 230 2 1 5 3 3 9 9 3
5 7 230 2 5 7 230 2 1 5 3 3 9 9 3
5 7 230 3 5 7 230 2 1 5 3 3 9 9 3
5 7 230 4 5 7 230 2 1 5 3 3 9 9 3
5 7 230 5 5 7 230 2 1 5 3 3 9 9 3
5 7 230 6 5 7 230 2 1 5 3 3 9 9 3
5 7 230 7 5 7 230 2 1 5 3 3 9 9 3
5 7 230 8 5 7 230 2 1 5 3 3 9 9 3
5 7 230 N 5 7 230 2 1 5 3 3 9 9 3
6 7 237 0 3 8 237 0 2 6 3 3 9 9 3
6 7 237 1 3 8 237 0 2 6 3 3 9 9 3
6 7 237 2 3 8 237 0 2 6 3 3 9 9 3
6 7 237 3 3 8 237 0 2 6 3 3 9 9 3
6 7 237 4 3 8 237 0 2 6 3 3 9 9 3
6 7 237 5 3 8 237 0 2 6 3 3 9 9 3
6 7 237 6 3 8 237 0 2 6 3 3 9 9 3
6 7 237 7 3 8 237 0 2 6 3 3 9 9 3
6 7 237 8 3 8 237 0 2 6 3 3 9 9 3
6 7 237 N 3 8 237 0 2 6 3 3 9 9 3
7 7 238 0 4 8 238 1 2 7 3 3 9 9 3
7 7 238 1 4 8 238 1 2 7 3 3 9 9 3
7 7 238 2 4 8 238 1 2 7 3 3 9 9 3
7 7 238 3 4 8 238 1 2 7 3 3 9 9 3
7 7 238 4 4 8 238 1 2 7 3 3 9 9 3
7 7 238 5 4 8 238 1 2 7 3 3 9 9 3
7 7 238 6 4 8 238 1 2 7 3 3 9 9 3
7 7 238 7 4 8 238 1 2 7 3 3 9 9 3
7 7 238 8 4 8 238 1 2 7 3 3 9 9 3
7 7 238 N 4 8 238 1 2 7 3 3 9 9 3
8 7 239 0 5 8 239 2 2 8 3 3 9 9 3
8 7 239 1 5 8 239 2 2 8 3 3 9 9 3
8 7 239 2 5 8 239 2 2 8 3 3 9 9 3
8 7 239 3 5 8 239 2 2 8 3 3 9 9 3
8 7 239 4 5 8 239 2 2 8 3 3 9 9 3
8 7 239 5 5 8 239 2 2 8 3 3 9 9 3
8 7 239 6 5 8 239 2 2 8 3 3 9 9 3
8 7 239 7 5 8 239 2 2 8 3 3 9 9 3
8 7 239 8 5 8 239 2 2 8 3 3 9 9 3
8 7 239 N 5 8 239 2 2 8 3 3 9 9 3
0 8 60 0 6 6 60 0 0 0 3 3 9 9 3
0 8 60 1 6 6 60 0 0 0 3 3 9 9 3
0 8 60 2 6 6 60 0 0 0 3 3 9 9 3
0 8 60 3 6 6 60 0 0 0 3 3 9 9 3
0 8 60 4 6 6 60 0 0 0 3 3 9 9 3
0 8 60 5 6 6 60 0 0 0 3 3 9 9 3
0 8 60 6 6 6 60 0 0 0 3 3 9 9 3
0 8 60 7 6 6 60 0 0 0 3 3 9 9 3
0 8 60 8 6 6 60 0 0 0 3 3 9 9 3
0 8 60 N 6 6 60 0 0 0 3 3 9 9 3
1 8 61 0 7 6 61 1 0 1 3 3 9 9 3
1 8 61 1 7 6 61 1 0 1 3 3 9 9 3
1 8 61 2 7 6 61 1 0 1 3 3 9 9 3
1 8 61 3 7 6 61 1 0 1 3 3 9 9 3
1 8 61 4 7 6 61 1 0 1 3 3 9 9 3
1 8 61 5 7 6 61 1 0 1 3 3 9 9 3
1 8 61 6 7 6 61 1 0 1 3 3 9 9 3
1 8 61 7 7 6 61 1 0 1 3 3 9 9 3
1 8 61 8 7 6 61 1 0 1 3 3 9 9 3
1 8 61 N 7 6 61 1 0 1 3 3 9 9 3
2 8 62 0 8 6 62 2 0 2 3 3 9 9 3
2 8 62 1 8 6 62 2 0 2 3 3 9 9 3
2 8 62 2 8 6 62 2 0 2 3 3 9 9 3
2 8 62 3 8 6 62 2 0 2 3 3 9 9 3
2 8 62 4 8 6 62 2 0 2 3 3 9 9 3
2 8 62 5 8 6 62 2 0 2 3 3 9 9 3
2 8 62 6 8 6 62 2 0 2 3 3 9 9 3
2 8 62 7 8 6 62 2 0 2 3 3 9 9 3
2 8 62 8 8 6 62 2 0 2 3 3 9 9 3
2 8 62 N 8 6 62 2 0 2 3 3 9 9 3
3 8 69 0 6 7 69 0 1 3 3 3 9 9 3
3 8 69 1 6 7 69 0 1 3 3 3 9 9 3
3 8 69 2 6 7 69 0 1 3 3 3 9 9 3
3 8 69 3 6 7 69 0 1 3 3 3 9 9 3
3 8 69 4 6 7 69 0 1 3 3 3 9 9 3
3 8 69 5 6 7 69 0 1 3 3 3 9 9 3
3 8 69 6 6 7 69 0 1 3 3 3 9 9 3
3 8 69 7 6 7 69 0 1 3 3 3 9 9 3
3 8 69 8 6 7 69 0 1 3 3 3 9 9 3
3 8 69 N 6 7 69 0 1 3 3 3 9 9 3
4 8 70 0 7 7 70 1 1 4 3 3 9 9 3
4 8 70 1 7 7 70 1 1 4 3 3 9 9 3
4 8 70 2 7 7 70 1 1 4 3 3 9 9 3
4 8 70 3 7 7 70 1 1 4 3 3 9 9 3
4 8 70 4 7 7 70 1 1 4 3 3 9 9 3
4 8 70 5 7 7 70 1 1 4 3 3 9 9 3
4 8 70 6 7 7 70 1 1 4 3 3 9 9 3
4 8 70 7 7 7 70 1 1 4 3 3 9 9 3
4 8 70 8 7 7 70 1 1 4 3 3 9 9 3
4 8 70 N 7 7 70 1 1 4 3 3 9 9 3
5 8 71 0 8 7 71 2 1 5 3 3 9 9 3
5 8 71 1 8 7 71 2 1 5 3 3 9 9 3
5 8 71 2 8 7 71 2 1 5 3 3 9 9 3
5 8 71 3 8 7 71 2 1 5 3 3 9 9 3
5 8 71 4 8 7 71 2 1 5 3 3 9 9 3
5 8 71 5 8 7 71 2 1 5 3 3 9 9 3
5 8 71 6 8 7 71 2 1 5 3 3 9 9 3
5 8 71 7 8 7 71 2 1 5 3 3 9 9 3
5 8 71 8 8 7 71 2 1 5 3 3 9 9 3
5 8 71 N 8 7 71 2 1 5 3 3 9 9 3
6 8 78 0 6 8 78 0 2 6 3 3 9 9 3
6 8 78 1 6 8 78 0 2 6 3 3 9 9 3
6 8 78 2 6 8 78 0 2 6 3 3 9 9 3
6 8 78 3 6 8 78 0 2 6 3 3 9 9 3
6 8 78 4 6 8 78 0 2 6 3 3 9 9 3
6 8 78 5 6 8 78 0 2 6 3 3 9 9 3
6 8 78 6 6 8 78 0 2 6 3 3 9 9 3
6 8 78 7 6 8 78 0 2 6 3 3 9 9 3
6 8 78 8 6 8 78 0 2 6 3 3 9 9 3
6 8 78 N 6 8 78 0 2 6 3 3 9 9 3
7 8 79 0 7 8 79 1 2 7 3 3 9 9 3
7 8 79 1 7 8 79 1 2 7 3 3 9 9 3
7 8 79 2 7 8 79 1 2 7 3 3 9 9 3
7 8 79 3 7 8 79 1 2 7 3 3 9 9 3
7 8 79 4 7 8 79 1 2 7 3 3 9 9 3
7 8 79 5 7 8 79 1 2 7 3 3 9 9 3
7 8 79 6 7 8 79 1 2 7 3 3 9 9 3
7 8 79 7 7 8 79 1 2 7 3 3 9 9 3
7 8 79 8 7 8 79 1 2 7 3 3 9 9 3
7 8 79 N 7 8 79 1 2 7 3 3 9 9 3
8 8 80 0 8 8 80 2 2 8 3 3 9 9 3
8 8 80 1 8 8 80 2 2 8 3 3 9 9 3
8 8 80 2 8 8 80 2 2 8 3 3 9 9 3
8 8 80 3 8 8 80 2 2 8 3 3 9 9 3
8 8 80 4 8 8 80 2 2 8 3 3 9 9 3
8 8 80 5 8 8 80 2 2 8 3 3 9 9 3
8 8 80 6 8 8 80 2 2 8 3 3 9 9 3
8 8 80 7 8 8 80 2 2 8 3 3 9 9 3
8 8 80 8 8 8 80 2 2 8 3 3 9 9 3
8 8 80 N 8 8 80 2 2 8 3 3 9 9 3
0 8 141 0 6 6 141 0 0 0 3 3 9 9 3
0 8 141 1 6 6 141 0 0 0 3 3 9 9 3
0 8 141 2 6 6 141 0 0 0 3 3 9 9 3
0 8 141 3 6 6 141 0 0 0 3 3 9 9 3
0 8 141 4 6 6 141 0 0 0 3 3 9 9 3
0 8 141 5 6 6 141 0 0 0 3 3 9 9 3
0 8 141 6 6 6 141 0 0 0 3 3 9 9 3
0 8 141 7 6 6 141 0 0 0 3 3 9 9 3
0 8 141 8 6 6 141 0 0 0 3 3 9 9 3
0 8 141 N 6 6 141 0 0 0 3 3 9 9 3
1 8 142 0 7 6 142 1 0 1 3 3 9 9 3
1 8 142 1 7 6 142 1 0 1 3 3 9 9 3
1 8 142 2 7 6 142 1 0 1 3 3 9 9 3
1 8 142 3 7 6 142 1 0 1 3 3 9 9 3
1 8 142 4 7 6 142 1 0 1 3 3 9 9 3
1 8 142 5 7 6 142 1 0 1 3 3 9 9 3
1 8 142 6 7 6 142 1 0 1 3 3 9 9 3
1 8 142 7 7 6 142 1 0 1 3 3 9 9 3
1 8 142 8 7 6 142 1 0 1 3 3 9 9 3
1 8 142 N 7 6 142 1 0 1 3 3 9 9 3
2 8 143 0 8 6 143 2 0 2 3 3 9 9 3
2 8 143 1 8 6 143 2 0 2 3 3 9 9 3
2 8 143 2 8 6 143 2 0 2 3 3 9 9 3
2 8 143 3 8 6 143 2 0 2 3 3 9 9 3
2 8 143 4 8 6 143 2 0 2 3 3 9 9 3
2 8 143 5 8 6 143 2 0 2 3 3 9 9 3
2 8 143 6 8 6 143 2 0 2 3 3 9 9 3
2 8 143 7 8 6 143 2 0 2 3 3 9 9 3
2 8 143 8 8 6 143 2 0 2 3 3 9 9 3
2 8 143 N 8 6 143 2 0 2 3 3 9 9 3
3 8 150 0 6 7 150 0 1 3 3 3 9 9 3
3 8 150 1 6 7 150 0 1 3 3 3 9 9 3
3 8 150 2 6 7 150 0 1 3 3 3 9 9 3
3 8 150 3 6 7 150 0 1 3 3 3 9 9 3
3 8 150 4 6 7 150 0 1 3 3 3 9 9 3
3 8 150 5 6 7 150 0 1 3 3 3 9 9 3
3 8 150 6 6 7 150 0 1 3 3 3 9 9 3
3 8 150 7 6 7 150 0 1 3 3 3 9 9 3
3 8 150 8 6 7 150 0 1 3 3 3 9 9 3
3 8 150 N 6 7 150 0 1 3 3 3 9 9 3
4 8 151 0 7 7 151 1 1 4 3 3 9 9 3
4 8 151 1 7 7 151 1 1 4 3 3 9 9 3
4 8 151 2 7 7 151 1 1 4 3 3 9 9 3
4 8 151 3 7 7 151 1 1 4 3 3 9 9 3
4 8 151 4 7 7 151 1 1 4 3 3 9 9 3
4 8 151 5 7 7 151 1 1 4 3 3 9 9 3
4 8 151 6 7 7 151 1 1 4 3 3 9 9 3
4 8 151 7 7 7 151 1 1 4 3 3 9 9 3
4 8 151 8 7 7 151 1 1 4 3 3 9 9 3
4 8 151 N 7 7 151 1 1 4 3 3 9 9 3
5 8 152 0 8 7 152 2 1 5 3 3 9 9 3
5 8 152 1 8 7 152 2 1 5 3 3 9 9 3
5 8 152 2 8 7 152 2 1 5 3 3 9 9 3
5 8 152 3 8 7 152 2 1 5 3 3 9 9 3
5 8 152 4 8 7 152 2 1 5 3 3 9 9 3
5 8 152 5 8 7 152 2 1 5 3 3 9 9 3
5 8 152 6 8 7 152 2 1 5 3 3 9 9 3
5 8 152 7 8 7 152 2 1 5 3 3 9 9 3
5 8 152 8 8 7 152 2 1 5 3 3 9 9 3
5 8 152 N 8 7 152 2 1 5 3 3 9 9 3
6 8 159 0 6 8 159 0 2 6 3 3 9 9 3
6 8 159 1 6 8 159 0 2 6 3 3 9 9 3
6 8 159 2 6 8 159 0 2 6 3 3 9 9 3
6 8 159 3 6 8 159 0 2 6 3 3 9 9 3
6 8 159 4 6 8 159 0 2 6 3 3 9 9 3
6 8 159 5 6 8 159 0 2 6 3 3 9 9 3
6 8 159 6 6 8 159 0 2 6 3 3 9 9 3
6 8 159 7 6 8 159 0 2 6 3 3 9 9 3
6 8 159 8 6 8 159 0 2 6 3 3 9 9 3
6 8 159 N 6 8 159 0 2 6 3 3 9 9 3
7 8 160 0 7 8 160 1 2 7 3 3 9 9 3
7 8 160 1 7 8 160 1 2 7 3 3 9 9 3
7 8 160 2 7 8 160 1 2 7 3 3 9 9 3
7 8 160 3 7 8 160 1 2 7 3 3 9 9 3
7 8 160 4 7 8 160 1 2 7 3 3 9 9 3
7 8 160 5 7 8 160 1 2 7 3 3 9 9 3
7 8 160 6 7 8 160 1 2 7 3 3 9 9 3
7 8 160 7 7 8 160 1 2 7 3 3 9 9 3
7 8 160 8 7 8 160 1 2 7 3 3 9 9 3
7 8 160 N 7 8 160 1 2 7 3 3 9 9 3
8 8 161 0 8 8 161 2 2 8 3 3 9 9 3
8 8 161 1 8 8 161 2 2 8 3 3 9 9 3
8 8 161 2 8 8 161 2 2 8 3 3 9 9 3
8 8 161 3 8 8 161 2 2 8 3 3 9 9 3
8 8 161 4 8 8 161 2 2 8 3 3 9 9 3
8 8 161 5 8 8 161 2 2 8 3 3 9 9 3
8 8 161 6 8 8 161 2 2 8 3 3 9 9 3
8 8 161 7 8 8 161 2 2 8 3 3 9 9 3
8 8 161 8 8 8 161 2 2 8 3 3 9 9 3
8 8 161 N 8 8 161 2 2 8 3 3 9 9 3
0 8 222 0 6 6 222 0 0 0 3 3 9 9 3
0 8 222 1 6 6 222 0 0 0 3 3 9 9 3
0 8 222 2 6 6 222 0 0 0 3 3 9 9 3
0 8 222 3 6 6 222 0 0 0 3 3 9 9 3
0 8 222 4 6 6 222 0 0 0 3 3 9 9 3
0 8 222 5 6 6 222 0 0 0 3 3 9 9 3
0 8 222 6 6 6 222 0 0 0 3 3 9 9 3
0 8 222 7 6 6 222 0 0 0 3 3 9 9 3
0 8 222 8 6 6 222 0 0 0 3 3 9 9 3
0 8 222 N 6 6 222 0 0 0 3 3 9 9 3
1 8 223 0 7 6 223 1 0 1 3 3 9 9 3
1 8 223 1 7 6 223 1 0 1 3 3 9 9 3
1 8 223 2 7 6 223 1 0 1 3 3 9 9 3
1 8 223 3 7 6 223 1 0 1 3 3 9 9 3
1 8 223 4 7 6 223 1 0 1 3 3 9 9 3
1 8 223 5 7 6 223 1 0 1 3 3 9 9 3
1 8 223 6 7 6 223 1 0 1 3 3 9 9 3
1 8 223 7 7 6 223 1 0 1 3 3 9 9 3
1 8 223 8 7 6 223 1 0 1 3 3 9 9 3
1 8 223 N 7 6 223 1 0 1 3 3 9 9 3
2 8 224 0 8 6 224 2 0 2 3 3 9 9 3
2 8 224 1 8 6 224 2 0 2 3 3 9 9 3
2 8 224 2 8 6 224 2 0 2 3 3 9 9 3
2 8 224 3 8 6 224 2 0 2 3 3 9 9 3
2 8 224 4 8 6 224 2 0 2 3 3 9 9 3
2 8 224 5 8 6 224 2 0 2 3 3 9 9 3
2 8 224 6 8 6 224 2 0 2 3 3 9 9 3
2 8 224 7 8 6 224 2 0 2 3 3 9 9 3
2 8 224 8 8 6 224 2 0 2 3 3 9 9 3
2 8 224 N 8 6 224 2 0 2 3 3 9 9 3
3 8 231 0 6 7 231 0 1 3 3 3 9 9 3
3 8 231 1 6 7 231 0 1 3 3 3 9 9 3
3 8 231 2 6 7 231 0 1 3 3 3 9 9 3
3 8 231 3 6 7 231 0 1 3 3 3 9 9 3
3 8 231 4 6 7 231 0 1 3 3 3 9 9 3
3 8 231 5 6 7 231 0 1 3 3 3 9 9 3
3 8 231 6 6 7 231 0 1 3 3 3 9 9 3
3 8 231 7 6 7 231 0 1 3 3 3 9 9 3
3 8 231 8 6 7 231 0 1 3 3 3 9 9 3
3 8 231 N 6 7 231 0 1 3 3 3 9 9 3
4 8 232 0 7 7 232 1 1 4 3 3 9 9 3
4 8 232 1 7 7 232 1 1 4 3 3 9 9 3
4 8 232 2 7 7 232 1 1 4 3 3 9 9 3
4 8 232 3 7 7 232 1 1 4 3 3 9 9 3
4 8 232 4 7 7 232 1 1 4 3 3 9 9 3
4 8 232 5 7 7 232 1 1 4 3 3 9 9 3
4 8 232 6 7 7 232 1 1 4 3 3 9 9 3
4 8 232 7 7 7 232 1 1 4 3 3 9 9 3
4 8 232 8 7 7 232 1 1 4 3 3 9 9 3
4 8 232 N 7 7 232 1 1 4 3 3 9 9 3
5 8 233 0 8 7 233 2 1 5 3 3 9 9 3
5 8 233 1 8 7 233 2 1 5 3 3 9 9 3
5 8 233 2 8 7 233 2 1 5 3 3 9 9 3
5 8 233 3 8 7 233 2 1 5 3 3 9 9 3
5 8 233 4 8 7 233 2 1 5 3 3 9 9 3
5 8 233 5 8 7 233 2 1 5 3 3 9 9 3
5 8 233 6 8 7 233 2 1 5 3 3 9 9 3
5 8 233 7 8 7 233 2 1 5 3 3 9 9 3
5 8 233 8 8 7 233 2 1 5 3 3 9 9 3
5 8 233 N 8 7 233 2 1 5 3 3 9 9 3
6 8 240 0 6 8 240 0 2 6 3 3 9 9 3
6 8 240 1 6 8 240 0 2 6 3 3 9 9 3
6 8 240 2 6 8 240 0 2 6 3 3 9 9 3
6 8 240 3 6 8 240 0 2 6 3 3 9 9 3
6 8 240 4 6 8 240 0 2 6 3 3 9 9 3
6 8 240 5 6 8 240 0 2 6 3 3 9 9 3
6 8 240 6 6 8 240 0 2 6 3 3 9 9 3
6 8 240 7 6 8 240 0 2 6 3 3 9 9 3
6 8 240 8 6 8 240 0 2 6 3 3 9 9 3
6 8 240 N 6 8 240 0 2 6 3 3 9 9 3
7 8 241 0 7 8 241 1 2 7 3 3 9 9 3
7 8 241 1 7 8 241 1 2 7 3 3 9 9 3
7 8 241 2 7 8 241 1 2 7 3 3 9 9 3
7 8 241 3 7 8 241 1 2 7 3 3 9 9 3
7 8 241 4 7 8 241 1 2 7 3 3 9 9 3
7 8 241 5 7 8 241 1 2 7 3 3 9 9 3
7 8 241 6 7 8 241 1 2 7 3 3 9 9 3
7 8 241 7 7 8 241 1 2 7 3 3 9 9 3
7 8 241 8 7 8 241 1 2 7 3 3 9 9 3
7 8 241 N 7 8 241 1 2 7 3 3 9 9 3
8 8 242 0 8 8 242 2 2 8 3 3 9 9 3
8 8 242 1 8 8 242 2 2 8 3 3 9 9 3
8 8 242 2 8 8 242 2 2 8 3 3 9 9 3
8 8 242 3 8 8 242 2 2 8 3 3 9 9 3
8 8 242 4 8 8 242 2 2 8 3 3 9 9 3
8 8 242 5 8 8 242 2 2 8 3 3 9 9 3
8 8 242 6 8 8 242 2 2 8 3 3 9 9 3
8 8 242 7 8 8 242 2 2 8 3 3 9 9 3
8 8 242 8 8 8 242 2 2 8 3 3 9 9 3
8 8 242 N 8 8 242 2 2 8 3 3 9 9 3
| null | https://raw.githubusercontent.com/prg-titech/Kani-CUDA/e97c4bede43a5fc4031a7d2cfc32d71b01ac26c4/Emulator/Examples/Diffusion3d/profile1.rkt | racket | 0 0 0 0 0 0 0 0 0 0 3 3 9 9 3
0 0 0 1 0 0 0 0 0 0 3 3 9 9 3
0 0 0 2 0 0 0 0 0 0 3 3 9 9 3
0 0 0 3 0 0 0 0 0 0 3 3 9 9 3
0 0 0 4 0 0 0 0 0 0 3 3 9 9 3
0 0 0 5 0 0 0 0 0 0 3 3 9 9 3
0 0 0 6 0 0 0 0 0 0 3 3 9 9 3
0 0 0 7 0 0 0 0 0 0 3 3 9 9 3
0 0 0 8 0 0 0 0 0 0 3 3 9 9 3
0 0 0 N 0 0 0 0 0 0 3 3 9 9 3
1 0 1 0 1 0 1 1 0 1 3 3 9 9 3
1 0 1 1 1 0 1 1 0 1 3 3 9 9 3
1 0 1 2 1 0 1 1 0 1 3 3 9 9 3
1 0 1 3 1 0 1 1 0 1 3 3 9 9 3
1 0 1 4 1 0 1 1 0 1 3 3 9 9 3
1 0 1 5 1 0 1 1 0 1 3 3 9 9 3
1 0 1 6 1 0 1 1 0 1 3 3 9 9 3
1 0 1 7 1 0 1 1 0 1 3 3 9 9 3
1 0 1 8 1 0 1 1 0 1 3 3 9 9 3
1 0 1 N 1 0 1 1 0 1 3 3 9 9 3
2 0 2 0 2 0 2 2 0 2 3 3 9 9 3
2 0 2 1 2 0 2 2 0 2 3 3 9 9 3
2 0 2 2 2 0 2 2 0 2 3 3 9 9 3
2 0 2 3 2 0 2 2 0 2 3 3 9 9 3
2 0 2 4 2 0 2 2 0 2 3 3 9 9 3
2 0 2 5 2 0 2 2 0 2 3 3 9 9 3
2 0 2 6 2 0 2 2 0 2 3 3 9 9 3
2 0 2 7 2 0 2 2 0 2 3 3 9 9 3
2 0 2 8 2 0 2 2 0 2 3 3 9 9 3
2 0 2 N 2 0 2 2 0 2 3 3 9 9 3
3 0 9 0 0 1 9 0 1 3 3 3 9 9 3
3 0 9 1 0 1 9 0 1 3 3 3 9 9 3
3 0 9 2 0 1 9 0 1 3 3 3 9 9 3
3 0 9 3 0 1 9 0 1 3 3 3 9 9 3
3 0 9 4 0 1 9 0 1 3 3 3 9 9 3
3 0 9 5 0 1 9 0 1 3 3 3 9 9 3
3 0 9 6 0 1 9 0 1 3 3 3 9 9 3
3 0 9 7 0 1 9 0 1 3 3 3 9 9 3
3 0 9 8 0 1 9 0 1 3 3 3 9 9 3
3 0 9 N 0 1 9 0 1 3 3 3 9 9 3
4 0 10 0 1 1 10 1 1 4 3 3 9 9 3
4 0 10 1 1 1 10 1 1 4 3 3 9 9 3
4 0 10 2 1 1 10 1 1 4 3 3 9 9 3
4 0 10 3 1 1 10 1 1 4 3 3 9 9 3
4 0 10 4 1 1 10 1 1 4 3 3 9 9 3
4 0 10 5 1 1 10 1 1 4 3 3 9 9 3
4 0 10 6 1 1 10 1 1 4 3 3 9 9 3
4 0 10 7 1 1 10 1 1 4 3 3 9 9 3
4 0 10 8 1 1 10 1 1 4 3 3 9 9 3
4 0 10 N 1 1 10 1 1 4 3 3 9 9 3
5 0 11 0 2 1 11 2 1 5 3 3 9 9 3
5 0 11 1 2 1 11 2 1 5 3 3 9 9 3
5 0 11 2 2 1 11 2 1 5 3 3 9 9 3
5 0 11 3 2 1 11 2 1 5 3 3 9 9 3
5 0 11 4 2 1 11 2 1 5 3 3 9 9 3
5 0 11 5 2 1 11 2 1 5 3 3 9 9 3
5 0 11 6 2 1 11 2 1 5 3 3 9 9 3
5 0 11 7 2 1 11 2 1 5 3 3 9 9 3
5 0 11 8 2 1 11 2 1 5 3 3 9 9 3
5 0 11 N 2 1 11 2 1 5 3 3 9 9 3
6 0 18 0 0 2 18 0 2 6 3 3 9 9 3
6 0 18 1 0 2 18 0 2 6 3 3 9 9 3
6 0 18 2 0 2 18 0 2 6 3 3 9 9 3
6 0 18 3 0 2 18 0 2 6 3 3 9 9 3
6 0 18 4 0 2 18 0 2 6 3 3 9 9 3
6 0 18 5 0 2 18 0 2 6 3 3 9 9 3
6 0 18 6 0 2 18 0 2 6 3 3 9 9 3
6 0 18 7 0 2 18 0 2 6 3 3 9 9 3
6 0 18 8 0 2 18 0 2 6 3 3 9 9 3
6 0 18 N 0 2 18 0 2 6 3 3 9 9 3
7 0 19 0 1 2 19 1 2 7 3 3 9 9 3
7 0 19 1 1 2 19 1 2 7 3 3 9 9 3
7 0 19 2 1 2 19 1 2 7 3 3 9 9 3
7 0 19 3 1 2 19 1 2 7 3 3 9 9 3
7 0 19 4 1 2 19 1 2 7 3 3 9 9 3
7 0 19 5 1 2 19 1 2 7 3 3 9 9 3
7 0 19 6 1 2 19 1 2 7 3 3 9 9 3
7 0 19 7 1 2 19 1 2 7 3 3 9 9 3
7 0 19 8 1 2 19 1 2 7 3 3 9 9 3
7 0 19 N 1 2 19 1 2 7 3 3 9 9 3
8 0 20 0 2 2 20 2 2 8 3 3 9 9 3
8 0 20 1 2 2 20 2 2 8 3 3 9 9 3
8 0 20 2 2 2 20 2 2 8 3 3 9 9 3
8 0 20 3 2 2 20 2 2 8 3 3 9 9 3
8 0 20 4 2 2 20 2 2 8 3 3 9 9 3
8 0 20 5 2 2 20 2 2 8 3 3 9 9 3
8 0 20 6 2 2 20 2 2 8 3 3 9 9 3
8 0 20 7 2 2 20 2 2 8 3 3 9 9 3
8 0 20 8 2 2 20 2 2 8 3 3 9 9 3
8 0 20 N 2 2 20 2 2 8 3 3 9 9 3
0 0 81 0 0 0 81 0 0 0 3 3 9 9 3
0 0 81 1 0 0 81 0 0 0 3 3 9 9 3
0 0 81 2 0 0 81 0 0 0 3 3 9 9 3
0 0 81 3 0 0 81 0 0 0 3 3 9 9 3
0 0 81 4 0 0 81 0 0 0 3 3 9 9 3
0 0 81 5 0 0 81 0 0 0 3 3 9 9 3
0 0 81 6 0 0 81 0 0 0 3 3 9 9 3
0 0 81 7 0 0 81 0 0 0 3 3 9 9 3
0 0 81 8 0 0 81 0 0 0 3 3 9 9 3
0 0 81 N 0 0 81 0 0 0 3 3 9 9 3
1 0 82 0 1 0 82 1 0 1 3 3 9 9 3
1 0 82 1 1 0 82 1 0 1 3 3 9 9 3
1 0 82 2 1 0 82 1 0 1 3 3 9 9 3
1 0 82 3 1 0 82 1 0 1 3 3 9 9 3
1 0 82 4 1 0 82 1 0 1 3 3 9 9 3
1 0 82 5 1 0 82 1 0 1 3 3 9 9 3
1 0 82 6 1 0 82 1 0 1 3 3 9 9 3
1 0 82 7 1 0 82 1 0 1 3 3 9 9 3
1 0 82 8 1 0 82 1 0 1 3 3 9 9 3
1 0 82 N 1 0 82 1 0 1 3 3 9 9 3
2 0 83 0 2 0 83 2 0 2 3 3 9 9 3
2 0 83 1 2 0 83 2 0 2 3 3 9 9 3
2 0 83 2 2 0 83 2 0 2 3 3 9 9 3
2 0 83 3 2 0 83 2 0 2 3 3 9 9 3
2 0 83 4 2 0 83 2 0 2 3 3 9 9 3
2 0 83 5 2 0 83 2 0 2 3 3 9 9 3
2 0 83 6 2 0 83 2 0 2 3 3 9 9 3
2 0 83 7 2 0 83 2 0 2 3 3 9 9 3
2 0 83 8 2 0 83 2 0 2 3 3 9 9 3
2 0 83 N 2 0 83 2 0 2 3 3 9 9 3
3 0 90 0 0 1 90 0 1 3 3 3 9 9 3
3 0 90 1 0 1 90 0 1 3 3 3 9 9 3
3 0 90 2 0 1 90 0 1 3 3 3 9 9 3
3 0 90 3 0 1 90 0 1 3 3 3 9 9 3
3 0 90 4 0 1 90 0 1 3 3 3 9 9 3
3 0 90 5 0 1 90 0 1 3 3 3 9 9 3
3 0 90 6 0 1 90 0 1 3 3 3 9 9 3
3 0 90 7 0 1 90 0 1 3 3 3 9 9 3
3 0 90 8 0 1 90 0 1 3 3 3 9 9 3
3 0 90 N 0 1 90 0 1 3 3 3 9 9 3
4 0 91 0 1 1 91 1 1 4 3 3 9 9 3
4 0 91 1 1 1 91 1 1 4 3 3 9 9 3
4 0 91 2 1 1 91 1 1 4 3 3 9 9 3
4 0 91 3 1 1 91 1 1 4 3 3 9 9 3
4 0 91 4 1 1 91 1 1 4 3 3 9 9 3
4 0 91 5 1 1 91 1 1 4 3 3 9 9 3
4 0 91 6 1 1 91 1 1 4 3 3 9 9 3
4 0 91 7 1 1 91 1 1 4 3 3 9 9 3
4 0 91 8 1 1 91 1 1 4 3 3 9 9 3
4 0 91 N 1 1 91 1 1 4 3 3 9 9 3
5 0 92 0 2 1 92 2 1 5 3 3 9 9 3
5 0 92 1 2 1 92 2 1 5 3 3 9 9 3
5 0 92 2 2 1 92 2 1 5 3 3 9 9 3
5 0 92 3 2 1 92 2 1 5 3 3 9 9 3
5 0 92 4 2 1 92 2 1 5 3 3 9 9 3
5 0 92 5 2 1 92 2 1 5 3 3 9 9 3
5 0 92 6 2 1 92 2 1 5 3 3 9 9 3
5 0 92 7 2 1 92 2 1 5 3 3 9 9 3
5 0 92 8 2 1 92 2 1 5 3 3 9 9 3
5 0 92 N 2 1 92 2 1 5 3 3 9 9 3
6 0 99 0 0 2 99 0 2 6 3 3 9 9 3
6 0 99 1 0 2 99 0 2 6 3 3 9 9 3
6 0 99 2 0 2 99 0 2 6 3 3 9 9 3
6 0 99 3 0 2 99 0 2 6 3 3 9 9 3
6 0 99 4 0 2 99 0 2 6 3 3 9 9 3
6 0 99 5 0 2 99 0 2 6 3 3 9 9 3
6 0 99 6 0 2 99 0 2 6 3 3 9 9 3
6 0 99 7 0 2 99 0 2 6 3 3 9 9 3
6 0 99 8 0 2 99 0 2 6 3 3 9 9 3
6 0 99 N 0 2 99 0 2 6 3 3 9 9 3
7 0 100 0 1 2 100 1 2 7 3 3 9 9 3
7 0 100 1 1 2 100 1 2 7 3 3 9 9 3
7 0 100 2 1 2 100 1 2 7 3 3 9 9 3
7 0 100 3 1 2 100 1 2 7 3 3 9 9 3
7 0 100 4 1 2 100 1 2 7 3 3 9 9 3
7 0 100 5 1 2 100 1 2 7 3 3 9 9 3
7 0 100 6 1 2 100 1 2 7 3 3 9 9 3
7 0 100 7 1 2 100 1 2 7 3 3 9 9 3
7 0 100 8 1 2 100 1 2 7 3 3 9 9 3
7 0 100 N 1 2 100 1 2 7 3 3 9 9 3
8 0 101 0 2 2 101 2 2 8 3 3 9 9 3
8 0 101 1 2 2 101 2 2 8 3 3 9 9 3
8 0 101 2 2 2 101 2 2 8 3 3 9 9 3
8 0 101 3 2 2 101 2 2 8 3 3 9 9 3
8 0 101 4 2 2 101 2 2 8 3 3 9 9 3
8 0 101 5 2 2 101 2 2 8 3 3 9 9 3
8 0 101 6 2 2 101 2 2 8 3 3 9 9 3
8 0 101 7 2 2 101 2 2 8 3 3 9 9 3
8 0 101 8 2 2 101 2 2 8 3 3 9 9 3
8 0 101 N 2 2 101 2 2 8 3 3 9 9 3
0 0 162 0 0 0 162 0 0 0 3 3 9 9 3
0 0 162 1 0 0 162 0 0 0 3 3 9 9 3
0 0 162 2 0 0 162 0 0 0 3 3 9 9 3
0 0 162 3 0 0 162 0 0 0 3 3 9 9 3
0 0 162 4 0 0 162 0 0 0 3 3 9 9 3
0 0 162 5 0 0 162 0 0 0 3 3 9 9 3
0 0 162 6 0 0 162 0 0 0 3 3 9 9 3
0 0 162 7 0 0 162 0 0 0 3 3 9 9 3
0 0 162 8 0 0 162 0 0 0 3 3 9 9 3
0 0 162 N 0 0 162 0 0 0 3 3 9 9 3
1 0 163 0 1 0 163 1 0 1 3 3 9 9 3
1 0 163 1 1 0 163 1 0 1 3 3 9 9 3
1 0 163 2 1 0 163 1 0 1 3 3 9 9 3
1 0 163 3 1 0 163 1 0 1 3 3 9 9 3
1 0 163 4 1 0 163 1 0 1 3 3 9 9 3
1 0 163 5 1 0 163 1 0 1 3 3 9 9 3
1 0 163 6 1 0 163 1 0 1 3 3 9 9 3
1 0 163 7 1 0 163 1 0 1 3 3 9 9 3
1 0 163 8 1 0 163 1 0 1 3 3 9 9 3
1 0 163 N 1 0 163 1 0 1 3 3 9 9 3
2 0 164 0 2 0 164 2 0 2 3 3 9 9 3
2 0 164 1 2 0 164 2 0 2 3 3 9 9 3
2 0 164 2 2 0 164 2 0 2 3 3 9 9 3
2 0 164 3 2 0 164 2 0 2 3 3 9 9 3
2 0 164 4 2 0 164 2 0 2 3 3 9 9 3
2 0 164 5 2 0 164 2 0 2 3 3 9 9 3
2 0 164 6 2 0 164 2 0 2 3 3 9 9 3
2 0 164 7 2 0 164 2 0 2 3 3 9 9 3
2 0 164 8 2 0 164 2 0 2 3 3 9 9 3
2 0 164 N 2 0 164 2 0 2 3 3 9 9 3
3 0 171 0 0 1 171 0 1 3 3 3 9 9 3
3 0 171 1 0 1 171 0 1 3 3 3 9 9 3
3 0 171 2 0 1 171 0 1 3 3 3 9 9 3
3 0 171 3 0 1 171 0 1 3 3 3 9 9 3
3 0 171 4 0 1 171 0 1 3 3 3 9 9 3
3 0 171 5 0 1 171 0 1 3 3 3 9 9 3
3 0 171 6 0 1 171 0 1 3 3 3 9 9 3
3 0 171 7 0 1 171 0 1 3 3 3 9 9 3
3 0 171 8 0 1 171 0 1 3 3 3 9 9 3
3 0 171 N 0 1 171 0 1 3 3 3 9 9 3
4 0 172 0 1 1 172 1 1 4 3 3 9 9 3
4 0 172 1 1 1 172 1 1 4 3 3 9 9 3
4 0 172 2 1 1 172 1 1 4 3 3 9 9 3
4 0 172 3 1 1 172 1 1 4 3 3 9 9 3
4 0 172 4 1 1 172 1 1 4 3 3 9 9 3
4 0 172 5 1 1 172 1 1 4 3 3 9 9 3
4 0 172 6 1 1 172 1 1 4 3 3 9 9 3
4 0 172 7 1 1 172 1 1 4 3 3 9 9 3
4 0 172 8 1 1 172 1 1 4 3 3 9 9 3
4 0 172 N 1 1 172 1 1 4 3 3 9 9 3
5 0 173 0 2 1 173 2 1 5 3 3 9 9 3
5 0 173 1 2 1 173 2 1 5 3 3 9 9 3
5 0 173 2 2 1 173 2 1 5 3 3 9 9 3
5 0 173 3 2 1 173 2 1 5 3 3 9 9 3
5 0 173 4 2 1 173 2 1 5 3 3 9 9 3
5 0 173 5 2 1 173 2 1 5 3 3 9 9 3
5 0 173 6 2 1 173 2 1 5 3 3 9 9 3
5 0 173 7 2 1 173 2 1 5 3 3 9 9 3
5 0 173 8 2 1 173 2 1 5 3 3 9 9 3
5 0 173 N 2 1 173 2 1 5 3 3 9 9 3
6 0 180 0 0 2 180 0 2 6 3 3 9 9 3
6 0 180 1 0 2 180 0 2 6 3 3 9 9 3
6 0 180 2 0 2 180 0 2 6 3 3 9 9 3
6 0 180 3 0 2 180 0 2 6 3 3 9 9 3
6 0 180 4 0 2 180 0 2 6 3 3 9 9 3
6 0 180 5 0 2 180 0 2 6 3 3 9 9 3
6 0 180 6 0 2 180 0 2 6 3 3 9 9 3
6 0 180 7 0 2 180 0 2 6 3 3 9 9 3
6 0 180 8 0 2 180 0 2 6 3 3 9 9 3
6 0 180 N 0 2 180 0 2 6 3 3 9 9 3
7 0 181 0 1 2 181 1 2 7 3 3 9 9 3
7 0 181 1 1 2 181 1 2 7 3 3 9 9 3
7 0 181 2 1 2 181 1 2 7 3 3 9 9 3
7 0 181 3 1 2 181 1 2 7 3 3 9 9 3
7 0 181 4 1 2 181 1 2 7 3 3 9 9 3
7 0 181 5 1 2 181 1 2 7 3 3 9 9 3
7 0 181 6 1 2 181 1 2 7 3 3 9 9 3
7 0 181 7 1 2 181 1 2 7 3 3 9 9 3
7 0 181 8 1 2 181 1 2 7 3 3 9 9 3
7 0 181 N 1 2 181 1 2 7 3 3 9 9 3
8 0 182 0 2 2 182 2 2 8 3 3 9 9 3
8 0 182 1 2 2 182 2 2 8 3 3 9 9 3
8 0 182 2 2 2 182 2 2 8 3 3 9 9 3
8 0 182 3 2 2 182 2 2 8 3 3 9 9 3
8 0 182 4 2 2 182 2 2 8 3 3 9 9 3
8 0 182 5 2 2 182 2 2 8 3 3 9 9 3
8 0 182 6 2 2 182 2 2 8 3 3 9 9 3
8 0 182 7 2 2 182 2 2 8 3 3 9 9 3
8 0 182 8 2 2 182 2 2 8 3 3 9 9 3
8 0 182 N 2 2 182 2 2 8 3 3 9 9 3
0 1 3 0 3 0 3 0 0 0 3 3 9 9 3
0 1 3 1 3 0 3 0 0 0 3 3 9 9 3
0 1 3 2 3 0 3 0 0 0 3 3 9 9 3
0 1 3 3 3 0 3 0 0 0 3 3 9 9 3
0 1 3 4 3 0 3 0 0 0 3 3 9 9 3
0 1 3 5 3 0 3 0 0 0 3 3 9 9 3
0 1 3 6 3 0 3 0 0 0 3 3 9 9 3
0 1 3 7 3 0 3 0 0 0 3 3 9 9 3
0 1 3 8 3 0 3 0 0 0 3 3 9 9 3
0 1 3 N 3 0 3 0 0 0 3 3 9 9 3
1 1 4 0 4 0 4 1 0 1 3 3 9 9 3
1 1 4 1 4 0 4 1 0 1 3 3 9 9 3
1 1 4 2 4 0 4 1 0 1 3 3 9 9 3
1 1 4 3 4 0 4 1 0 1 3 3 9 9 3
1 1 4 4 4 0 4 1 0 1 3 3 9 9 3
1 1 4 5 4 0 4 1 0 1 3 3 9 9 3
1 1 4 6 4 0 4 1 0 1 3 3 9 9 3
1 1 4 7 4 0 4 1 0 1 3 3 9 9 3
1 1 4 8 4 0 4 1 0 1 3 3 9 9 3
1 1 4 N 4 0 4 1 0 1 3 3 9 9 3
2 1 5 0 5 0 5 2 0 2 3 3 9 9 3
2 1 5 1 5 0 5 2 0 2 3 3 9 9 3
2 1 5 2 5 0 5 2 0 2 3 3 9 9 3
2 1 5 3 5 0 5 2 0 2 3 3 9 9 3
2 1 5 4 5 0 5 2 0 2 3 3 9 9 3
2 1 5 5 5 0 5 2 0 2 3 3 9 9 3
2 1 5 6 5 0 5 2 0 2 3 3 9 9 3
2 1 5 7 5 0 5 2 0 2 3 3 9 9 3
2 1 5 8 5 0 5 2 0 2 3 3 9 9 3
2 1 5 N 5 0 5 2 0 2 3 3 9 9 3
3 1 12 0 3 1 12 0 1 3 3 3 9 9 3
3 1 12 1 3 1 12 0 1 3 3 3 9 9 3
3 1 12 2 3 1 12 0 1 3 3 3 9 9 3
3 1 12 3 3 1 12 0 1 3 3 3 9 9 3
3 1 12 4 3 1 12 0 1 3 3 3 9 9 3
3 1 12 5 3 1 12 0 1 3 3 3 9 9 3
3 1 12 6 3 1 12 0 1 3 3 3 9 9 3
3 1 12 7 3 1 12 0 1 3 3 3 9 9 3
3 1 12 8 3 1 12 0 1 3 3 3 9 9 3
3 1 12 N 3 1 12 0 1 3 3 3 9 9 3
4 1 13 0 4 1 13 1 1 4 3 3 9 9 3
4 1 13 1 4 1 13 1 1 4 3 3 9 9 3
4 1 13 2 4 1 13 1 1 4 3 3 9 9 3
4 1 13 3 4 1 13 1 1 4 3 3 9 9 3
4 1 13 4 4 1 13 1 1 4 3 3 9 9 3
4 1 13 5 4 1 13 1 1 4 3 3 9 9 3
4 1 13 6 4 1 13 1 1 4 3 3 9 9 3
4 1 13 7 4 1 13 1 1 4 3 3 9 9 3
4 1 13 8 4 1 13 1 1 4 3 3 9 9 3
4 1 13 N 4 1 13 1 1 4 3 3 9 9 3
5 1 14 0 5 1 14 2 1 5 3 3 9 9 3
5 1 14 1 5 1 14 2 1 5 3 3 9 9 3
5 1 14 2 5 1 14 2 1 5 3 3 9 9 3
5 1 14 3 5 1 14 2 1 5 3 3 9 9 3
5 1 14 4 5 1 14 2 1 5 3 3 9 9 3
5 1 14 5 5 1 14 2 1 5 3 3 9 9 3
5 1 14 6 5 1 14 2 1 5 3 3 9 9 3
5 1 14 7 5 1 14 2 1 5 3 3 9 9 3
5 1 14 8 5 1 14 2 1 5 3 3 9 9 3
5 1 14 N 5 1 14 2 1 5 3 3 9 9 3
6 1 21 0 3 2 21 0 2 6 3 3 9 9 3
6 1 21 1 3 2 21 0 2 6 3 3 9 9 3
6 1 21 2 3 2 21 0 2 6 3 3 9 9 3
6 1 21 3 3 2 21 0 2 6 3 3 9 9 3
6 1 21 4 3 2 21 0 2 6 3 3 9 9 3
6 1 21 5 3 2 21 0 2 6 3 3 9 9 3
6 1 21 6 3 2 21 0 2 6 3 3 9 9 3
6 1 21 7 3 2 21 0 2 6 3 3 9 9 3
6 1 21 8 3 2 21 0 2 6 3 3 9 9 3
6 1 21 N 3 2 21 0 2 6 3 3 9 9 3
7 1 22 0 4 2 22 1 2 7 3 3 9 9 3
7 1 22 1 4 2 22 1 2 7 3 3 9 9 3
7 1 22 2 4 2 22 1 2 7 3 3 9 9 3
7 1 22 3 4 2 22 1 2 7 3 3 9 9 3
7 1 22 4 4 2 22 1 2 7 3 3 9 9 3
7 1 22 5 4 2 22 1 2 7 3 3 9 9 3
7 1 22 6 4 2 22 1 2 7 3 3 9 9 3
7 1 22 7 4 2 22 1 2 7 3 3 9 9 3
7 1 22 8 4 2 22 1 2 7 3 3 9 9 3
7 1 22 N 4 2 22 1 2 7 3 3 9 9 3
8 1 23 0 5 2 23 2 2 8 3 3 9 9 3
8 1 23 1 5 2 23 2 2 8 3 3 9 9 3
8 1 23 2 5 2 23 2 2 8 3 3 9 9 3
8 1 23 3 5 2 23 2 2 8 3 3 9 9 3
8 1 23 4 5 2 23 2 2 8 3 3 9 9 3
8 1 23 5 5 2 23 2 2 8 3 3 9 9 3
8 1 23 6 5 2 23 2 2 8 3 3 9 9 3
8 1 23 7 5 2 23 2 2 8 3 3 9 9 3
8 1 23 8 5 2 23 2 2 8 3 3 9 9 3
8 1 23 N 5 2 23 2 2 8 3 3 9 9 3
0 1 84 0 3 0 84 0 0 0 3 3 9 9 3
0 1 84 1 3 0 84 0 0 0 3 3 9 9 3
0 1 84 2 3 0 84 0 0 0 3 3 9 9 3
0 1 84 3 3 0 84 0 0 0 3 3 9 9 3
0 1 84 4 3 0 84 0 0 0 3 3 9 9 3
0 1 84 5 3 0 84 0 0 0 3 3 9 9 3
0 1 84 6 3 0 84 0 0 0 3 3 9 9 3
0 1 84 7 3 0 84 0 0 0 3 3 9 9 3
0 1 84 8 3 0 84 0 0 0 3 3 9 9 3
0 1 84 N 3 0 84 0 0 0 3 3 9 9 3
1 1 85 0 4 0 85 1 0 1 3 3 9 9 3
1 1 85 1 4 0 85 1 0 1 3 3 9 9 3
1 1 85 2 4 0 85 1 0 1 3 3 9 9 3
1 1 85 3 4 0 85 1 0 1 3 3 9 9 3
1 1 85 4 4 0 85 1 0 1 3 3 9 9 3
1 1 85 5 4 0 85 1 0 1 3 3 9 9 3
1 1 85 6 4 0 85 1 0 1 3 3 9 9 3
1 1 85 7 4 0 85 1 0 1 3 3 9 9 3
1 1 85 8 4 0 85 1 0 1 3 3 9 9 3
1 1 85 N 4 0 85 1 0 1 3 3 9 9 3
2 1 86 0 5 0 86 2 0 2 3 3 9 9 3
2 1 86 1 5 0 86 2 0 2 3 3 9 9 3
2 1 86 2 5 0 86 2 0 2 3 3 9 9 3
2 1 86 3 5 0 86 2 0 2 3 3 9 9 3
2 1 86 4 5 0 86 2 0 2 3 3 9 9 3
2 1 86 5 5 0 86 2 0 2 3 3 9 9 3
2 1 86 6 5 0 86 2 0 2 3 3 9 9 3
2 1 86 7 5 0 86 2 0 2 3 3 9 9 3
2 1 86 8 5 0 86 2 0 2 3 3 9 9 3
2 1 86 N 5 0 86 2 0 2 3 3 9 9 3
3 1 93 0 3 1 93 0 1 3 3 3 9 9 3
3 1 93 1 3 1 93 0 1 3 3 3 9 9 3
3 1 93 2 3 1 93 0 1 3 3 3 9 9 3
3 1 93 3 3 1 93 0 1 3 3 3 9 9 3
3 1 93 4 3 1 93 0 1 3 3 3 9 9 3
3 1 93 5 3 1 93 0 1 3 3 3 9 9 3
3 1 93 6 3 1 93 0 1 3 3 3 9 9 3
3 1 93 7 3 1 93 0 1 3 3 3 9 9 3
3 1 93 8 3 1 93 0 1 3 3 3 9 9 3
3 1 93 N 3 1 93 0 1 3 3 3 9 9 3
4 1 94 0 4 1 94 1 1 4 3 3 9 9 3
4 1 94 1 4 1 94 1 1 4 3 3 9 9 3
4 1 94 2 4 1 94 1 1 4 3 3 9 9 3
4 1 94 3 4 1 94 1 1 4 3 3 9 9 3
4 1 94 4 4 1 94 1 1 4 3 3 9 9 3
4 1 94 5 4 1 94 1 1 4 3 3 9 9 3
4 1 94 6 4 1 94 1 1 4 3 3 9 9 3
4 1 94 7 4 1 94 1 1 4 3 3 9 9 3
4 1 94 8 4 1 94 1 1 4 3 3 9 9 3
4 1 94 N 4 1 94 1 1 4 3 3 9 9 3
5 1 95 0 5 1 95 2 1 5 3 3 9 9 3
5 1 95 1 5 1 95 2 1 5 3 3 9 9 3
5 1 95 2 5 1 95 2 1 5 3 3 9 9 3
5 1 95 3 5 1 95 2 1 5 3 3 9 9 3
5 1 95 4 5 1 95 2 1 5 3 3 9 9 3
5 1 95 5 5 1 95 2 1 5 3 3 9 9 3
5 1 95 6 5 1 95 2 1 5 3 3 9 9 3
5 1 95 7 5 1 95 2 1 5 3 3 9 9 3
5 1 95 8 5 1 95 2 1 5 3 3 9 9 3
5 1 95 N 5 1 95 2 1 5 3 3 9 9 3
6 1 102 0 3 2 102 0 2 6 3 3 9 9 3
6 1 102 1 3 2 102 0 2 6 3 3 9 9 3
6 1 102 2 3 2 102 0 2 6 3 3 9 9 3
6 1 102 3 3 2 102 0 2 6 3 3 9 9 3
6 1 102 4 3 2 102 0 2 6 3 3 9 9 3
6 1 102 5 3 2 102 0 2 6 3 3 9 9 3
6 1 102 6 3 2 102 0 2 6 3 3 9 9 3
6 1 102 7 3 2 102 0 2 6 3 3 9 9 3
6 1 102 8 3 2 102 0 2 6 3 3 9 9 3
6 1 102 N 3 2 102 0 2 6 3 3 9 9 3
7 1 103 0 4 2 103 1 2 7 3 3 9 9 3
7 1 103 1 4 2 103 1 2 7 3 3 9 9 3
7 1 103 2 4 2 103 1 2 7 3 3 9 9 3
7 1 103 3 4 2 103 1 2 7 3 3 9 9 3
7 1 103 4 4 2 103 1 2 7 3 3 9 9 3
7 1 103 5 4 2 103 1 2 7 3 3 9 9 3
7 1 103 6 4 2 103 1 2 7 3 3 9 9 3
7 1 103 7 4 2 103 1 2 7 3 3 9 9 3
7 1 103 8 4 2 103 1 2 7 3 3 9 9 3
7 1 103 N 4 2 103 1 2 7 3 3 9 9 3
8 1 104 0 5 2 104 2 2 8 3 3 9 9 3
8 1 104 1 5 2 104 2 2 8 3 3 9 9 3
8 1 104 2 5 2 104 2 2 8 3 3 9 9 3
8 1 104 3 5 2 104 2 2 8 3 3 9 9 3
8 1 104 4 5 2 104 2 2 8 3 3 9 9 3
8 1 104 5 5 2 104 2 2 8 3 3 9 9 3
8 1 104 6 5 2 104 2 2 8 3 3 9 9 3
8 1 104 7 5 2 104 2 2 8 3 3 9 9 3
8 1 104 8 5 2 104 2 2 8 3 3 9 9 3
8 1 104 N 5 2 104 2 2 8 3 3 9 9 3
0 1 165 0 3 0 165 0 0 0 3 3 9 9 3
0 1 165 1 3 0 165 0 0 0 3 3 9 9 3
0 1 165 2 3 0 165 0 0 0 3 3 9 9 3
0 1 165 3 3 0 165 0 0 0 3 3 9 9 3
0 1 165 4 3 0 165 0 0 0 3 3 9 9 3
0 1 165 5 3 0 165 0 0 0 3 3 9 9 3
0 1 165 6 3 0 165 0 0 0 3 3 9 9 3
0 1 165 7 3 0 165 0 0 0 3 3 9 9 3
0 1 165 8 3 0 165 0 0 0 3 3 9 9 3
0 1 165 N 3 0 165 0 0 0 3 3 9 9 3
1 1 166 0 4 0 166 1 0 1 3 3 9 9 3
1 1 166 1 4 0 166 1 0 1 3 3 9 9 3
1 1 166 2 4 0 166 1 0 1 3 3 9 9 3
1 1 166 3 4 0 166 1 0 1 3 3 9 9 3
1 1 166 4 4 0 166 1 0 1 3 3 9 9 3
1 1 166 5 4 0 166 1 0 1 3 3 9 9 3
1 1 166 6 4 0 166 1 0 1 3 3 9 9 3
1 1 166 7 4 0 166 1 0 1 3 3 9 9 3
1 1 166 8 4 0 166 1 0 1 3 3 9 9 3
1 1 166 N 4 0 166 1 0 1 3 3 9 9 3
2 1 167 0 5 0 167 2 0 2 3 3 9 9 3
2 1 167 1 5 0 167 2 0 2 3 3 9 9 3
2 1 167 2 5 0 167 2 0 2 3 3 9 9 3
2 1 167 3 5 0 167 2 0 2 3 3 9 9 3
2 1 167 4 5 0 167 2 0 2 3 3 9 9 3
2 1 167 5 5 0 167 2 0 2 3 3 9 9 3
2 1 167 6 5 0 167 2 0 2 3 3 9 9 3
2 1 167 7 5 0 167 2 0 2 3 3 9 9 3
2 1 167 8 5 0 167 2 0 2 3 3 9 9 3
2 1 167 N 5 0 167 2 0 2 3 3 9 9 3
3 1 174 0 3 1 174 0 1 3 3 3 9 9 3
3 1 174 1 3 1 174 0 1 3 3 3 9 9 3
3 1 174 2 3 1 174 0 1 3 3 3 9 9 3
3 1 174 3 3 1 174 0 1 3 3 3 9 9 3
3 1 174 4 3 1 174 0 1 3 3 3 9 9 3
3 1 174 5 3 1 174 0 1 3 3 3 9 9 3
3 1 174 6 3 1 174 0 1 3 3 3 9 9 3
3 1 174 7 3 1 174 0 1 3 3 3 9 9 3
3 1 174 8 3 1 174 0 1 3 3 3 9 9 3
3 1 174 N 3 1 174 0 1 3 3 3 9 9 3
4 1 175 0 4 1 175 1 1 4 3 3 9 9 3
4 1 175 1 4 1 175 1 1 4 3 3 9 9 3
4 1 175 2 4 1 175 1 1 4 3 3 9 9 3
4 1 175 3 4 1 175 1 1 4 3 3 9 9 3
4 1 175 4 4 1 175 1 1 4 3 3 9 9 3
4 1 175 5 4 1 175 1 1 4 3 3 9 9 3
4 1 175 6 4 1 175 1 1 4 3 3 9 9 3
4 1 175 7 4 1 175 1 1 4 3 3 9 9 3
4 1 175 8 4 1 175 1 1 4 3 3 9 9 3
4 1 175 N 4 1 175 1 1 4 3 3 9 9 3
5 1 176 0 5 1 176 2 1 5 3 3 9 9 3
5 1 176 1 5 1 176 2 1 5 3 3 9 9 3
5 1 176 2 5 1 176 2 1 5 3 3 9 9 3
5 1 176 3 5 1 176 2 1 5 3 3 9 9 3
5 1 176 4 5 1 176 2 1 5 3 3 9 9 3
5 1 176 5 5 1 176 2 1 5 3 3 9 9 3
5 1 176 6 5 1 176 2 1 5 3 3 9 9 3
5 1 176 7 5 1 176 2 1 5 3 3 9 9 3
5 1 176 8 5 1 176 2 1 5 3 3 9 9 3
5 1 176 N 5 1 176 2 1 5 3 3 9 9 3
6 1 183 0 3 2 183 0 2 6 3 3 9 9 3
6 1 183 1 3 2 183 0 2 6 3 3 9 9 3
6 1 183 2 3 2 183 0 2 6 3 3 9 9 3
6 1 183 3 3 2 183 0 2 6 3 3 9 9 3
6 1 183 4 3 2 183 0 2 6 3 3 9 9 3
6 1 183 5 3 2 183 0 2 6 3 3 9 9 3
6 1 183 6 3 2 183 0 2 6 3 3 9 9 3
6 1 183 7 3 2 183 0 2 6 3 3 9 9 3
6 1 183 8 3 2 183 0 2 6 3 3 9 9 3
6 1 183 N 3 2 183 0 2 6 3 3 9 9 3
7 1 184 0 4 2 184 1 2 7 3 3 9 9 3
7 1 184 1 4 2 184 1 2 7 3 3 9 9 3
7 1 184 2 4 2 184 1 2 7 3 3 9 9 3
7 1 184 3 4 2 184 1 2 7 3 3 9 9 3
7 1 184 4 4 2 184 1 2 7 3 3 9 9 3
7 1 184 5 4 2 184 1 2 7 3 3 9 9 3
7 1 184 6 4 2 184 1 2 7 3 3 9 9 3
7 1 184 7 4 2 184 1 2 7 3 3 9 9 3
7 1 184 8 4 2 184 1 2 7 3 3 9 9 3
7 1 184 N 4 2 184 1 2 7 3 3 9 9 3
8 1 185 0 5 2 185 2 2 8 3 3 9 9 3
8 1 185 1 5 2 185 2 2 8 3 3 9 9 3
8 1 185 2 5 2 185 2 2 8 3 3 9 9 3
8 1 185 3 5 2 185 2 2 8 3 3 9 9 3
8 1 185 4 5 2 185 2 2 8 3 3 9 9 3
8 1 185 5 5 2 185 2 2 8 3 3 9 9 3
8 1 185 6 5 2 185 2 2 8 3 3 9 9 3
8 1 185 7 5 2 185 2 2 8 3 3 9 9 3
8 1 185 8 5 2 185 2 2 8 3 3 9 9 3
8 1 185 N 5 2 185 2 2 8 3 3 9 9 3
0 2 6 0 6 0 6 0 0 0 3 3 9 9 3
0 2 6 1 6 0 6 0 0 0 3 3 9 9 3
0 2 6 2 6 0 6 0 0 0 3 3 9 9 3
0 2 6 3 6 0 6 0 0 0 3 3 9 9 3
0 2 6 4 6 0 6 0 0 0 3 3 9 9 3
0 2 6 5 6 0 6 0 0 0 3 3 9 9 3
0 2 6 6 6 0 6 0 0 0 3 3 9 9 3
0 2 6 7 6 0 6 0 0 0 3 3 9 9 3
0 2 6 8 6 0 6 0 0 0 3 3 9 9 3
0 2 6 N 6 0 6 0 0 0 3 3 9 9 3
1 2 7 0 7 0 7 1 0 1 3 3 9 9 3
1 2 7 1 7 0 7 1 0 1 3 3 9 9 3
1 2 7 2 7 0 7 1 0 1 3 3 9 9 3
1 2 7 3 7 0 7 1 0 1 3 3 9 9 3
1 2 7 4 7 0 7 1 0 1 3 3 9 9 3
1 2 7 5 7 0 7 1 0 1 3 3 9 9 3
1 2 7 6 7 0 7 1 0 1 3 3 9 9 3
1 2 7 7 7 0 7 1 0 1 3 3 9 9 3
1 2 7 8 7 0 7 1 0 1 3 3 9 9 3
1 2 7 N 7 0 7 1 0 1 3 3 9 9 3
2 2 8 0 8 0 8 2 0 2 3 3 9 9 3
2 2 8 1 8 0 8 2 0 2 3 3 9 9 3
2 2 8 2 8 0 8 2 0 2 3 3 9 9 3
2 2 8 3 8 0 8 2 0 2 3 3 9 9 3
2 2 8 4 8 0 8 2 0 2 3 3 9 9 3
2 2 8 5 8 0 8 2 0 2 3 3 9 9 3
2 2 8 6 8 0 8 2 0 2 3 3 9 9 3
2 2 8 7 8 0 8 2 0 2 3 3 9 9 3
2 2 8 8 8 0 8 2 0 2 3 3 9 9 3
2 2 8 N 8 0 8 2 0 2 3 3 9 9 3
3 2 15 0 6 1 15 0 1 3 3 3 9 9 3
3 2 15 1 6 1 15 0 1 3 3 3 9 9 3
3 2 15 2 6 1 15 0 1 3 3 3 9 9 3
3 2 15 3 6 1 15 0 1 3 3 3 9 9 3
3 2 15 4 6 1 15 0 1 3 3 3 9 9 3
3 2 15 5 6 1 15 0 1 3 3 3 9 9 3
3 2 15 6 6 1 15 0 1 3 3 3 9 9 3
3 2 15 7 6 1 15 0 1 3 3 3 9 9 3
3 2 15 8 6 1 15 0 1 3 3 3 9 9 3
3 2 15 N 6 1 15 0 1 3 3 3 9 9 3
4 2 16 0 7 1 16 1 1 4 3 3 9 9 3
4 2 16 1 7 1 16 1 1 4 3 3 9 9 3
4 2 16 2 7 1 16 1 1 4 3 3 9 9 3
4 2 16 3 7 1 16 1 1 4 3 3 9 9 3
4 2 16 4 7 1 16 1 1 4 3 3 9 9 3
4 2 16 5 7 1 16 1 1 4 3 3 9 9 3
4 2 16 6 7 1 16 1 1 4 3 3 9 9 3
4 2 16 7 7 1 16 1 1 4 3 3 9 9 3
4 2 16 8 7 1 16 1 1 4 3 3 9 9 3
4 2 16 N 7 1 16 1 1 4 3 3 9 9 3
5 2 17 0 8 1 17 2 1 5 3 3 9 9 3
5 2 17 1 8 1 17 2 1 5 3 3 9 9 3
5 2 17 2 8 1 17 2 1 5 3 3 9 9 3
5 2 17 3 8 1 17 2 1 5 3 3 9 9 3
5 2 17 4 8 1 17 2 1 5 3 3 9 9 3
5 2 17 5 8 1 17 2 1 5 3 3 9 9 3
5 2 17 6 8 1 17 2 1 5 3 3 9 9 3
5 2 17 7 8 1 17 2 1 5 3 3 9 9 3
5 2 17 8 8 1 17 2 1 5 3 3 9 9 3
5 2 17 N 8 1 17 2 1 5 3 3 9 9 3
6 2 24 0 6 2 24 0 2 6 3 3 9 9 3
6 2 24 1 6 2 24 0 2 6 3 3 9 9 3
6 2 24 2 6 2 24 0 2 6 3 3 9 9 3
6 2 24 3 6 2 24 0 2 6 3 3 9 9 3
6 2 24 4 6 2 24 0 2 6 3 3 9 9 3
6 2 24 5 6 2 24 0 2 6 3 3 9 9 3
6 2 24 6 6 2 24 0 2 6 3 3 9 9 3
6 2 24 7 6 2 24 0 2 6 3 3 9 9 3
6 2 24 8 6 2 24 0 2 6 3 3 9 9 3
6 2 24 N 6 2 24 0 2 6 3 3 9 9 3
7 2 25 0 7 2 25 1 2 7 3 3 9 9 3
7 2 25 1 7 2 25 1 2 7 3 3 9 9 3
7 2 25 2 7 2 25 1 2 7 3 3 9 9 3
7 2 25 3 7 2 25 1 2 7 3 3 9 9 3
7 2 25 4 7 2 25 1 2 7 3 3 9 9 3
7 2 25 5 7 2 25 1 2 7 3 3 9 9 3
7 2 25 6 7 2 25 1 2 7 3 3 9 9 3
7 2 25 7 7 2 25 1 2 7 3 3 9 9 3
7 2 25 8 7 2 25 1 2 7 3 3 9 9 3
7 2 25 N 7 2 25 1 2 7 3 3 9 9 3
8 2 26 0 8 2 26 2 2 8 3 3 9 9 3
8 2 26 1 8 2 26 2 2 8 3 3 9 9 3
8 2 26 2 8 2 26 2 2 8 3 3 9 9 3
8 2 26 3 8 2 26 2 2 8 3 3 9 9 3
8 2 26 4 8 2 26 2 2 8 3 3 9 9 3
8 2 26 5 8 2 26 2 2 8 3 3 9 9 3
8 2 26 6 8 2 26 2 2 8 3 3 9 9 3
8 2 26 7 8 2 26 2 2 8 3 3 9 9 3
8 2 26 8 8 2 26 2 2 8 3 3 9 9 3
8 2 26 N 8 2 26 2 2 8 3 3 9 9 3
0 2 87 0 6 0 87 0 0 0 3 3 9 9 3
0 2 87 1 6 0 87 0 0 0 3 3 9 9 3
0 2 87 2 6 0 87 0 0 0 3 3 9 9 3
0 2 87 3 6 0 87 0 0 0 3 3 9 9 3
0 2 87 4 6 0 87 0 0 0 3 3 9 9 3
0 2 87 5 6 0 87 0 0 0 3 3 9 9 3
0 2 87 6 6 0 87 0 0 0 3 3 9 9 3
0 2 87 7 6 0 87 0 0 0 3 3 9 9 3
0 2 87 8 6 0 87 0 0 0 3 3 9 9 3
0 2 87 N 6 0 87 0 0 0 3 3 9 9 3
1 2 88 0 7 0 88 1 0 1 3 3 9 9 3
1 2 88 1 7 0 88 1 0 1 3 3 9 9 3
1 2 88 2 7 0 88 1 0 1 3 3 9 9 3
1 2 88 3 7 0 88 1 0 1 3 3 9 9 3
1 2 88 4 7 0 88 1 0 1 3 3 9 9 3
1 2 88 5 7 0 88 1 0 1 3 3 9 9 3
1 2 88 6 7 0 88 1 0 1 3 3 9 9 3
1 2 88 7 7 0 88 1 0 1 3 3 9 9 3
1 2 88 8 7 0 88 1 0 1 3 3 9 9 3
1 2 88 N 7 0 88 1 0 1 3 3 9 9 3
2 2 89 0 8 0 89 2 0 2 3 3 9 9 3
2 2 89 1 8 0 89 2 0 2 3 3 9 9 3
2 2 89 2 8 0 89 2 0 2 3 3 9 9 3
2 2 89 3 8 0 89 2 0 2 3 3 9 9 3
2 2 89 4 8 0 89 2 0 2 3 3 9 9 3
2 2 89 5 8 0 89 2 0 2 3 3 9 9 3
2 2 89 6 8 0 89 2 0 2 3 3 9 9 3
2 2 89 7 8 0 89 2 0 2 3 3 9 9 3
2 2 89 8 8 0 89 2 0 2 3 3 9 9 3
2 2 89 N 8 0 89 2 0 2 3 3 9 9 3
3 2 96 0 6 1 96 0 1 3 3 3 9 9 3
3 2 96 1 6 1 96 0 1 3 3 3 9 9 3
3 2 96 2 6 1 96 0 1 3 3 3 9 9 3
3 2 96 3 6 1 96 0 1 3 3 3 9 9 3
3 2 96 4 6 1 96 0 1 3 3 3 9 9 3
3 2 96 5 6 1 96 0 1 3 3 3 9 9 3
3 2 96 6 6 1 96 0 1 3 3 3 9 9 3
3 2 96 7 6 1 96 0 1 3 3 3 9 9 3
3 2 96 8 6 1 96 0 1 3 3 3 9 9 3
3 2 96 N 6 1 96 0 1 3 3 3 9 9 3
4 2 97 0 7 1 97 1 1 4 3 3 9 9 3
4 2 97 1 7 1 97 1 1 4 3 3 9 9 3
4 2 97 2 7 1 97 1 1 4 3 3 9 9 3
4 2 97 3 7 1 97 1 1 4 3 3 9 9 3
4 2 97 4 7 1 97 1 1 4 3 3 9 9 3
4 2 97 5 7 1 97 1 1 4 3 3 9 9 3
4 2 97 6 7 1 97 1 1 4 3 3 9 9 3
4 2 97 7 7 1 97 1 1 4 3 3 9 9 3
4 2 97 8 7 1 97 1 1 4 3 3 9 9 3
4 2 97 N 7 1 97 1 1 4 3 3 9 9 3
5 2 98 0 8 1 98 2 1 5 3 3 9 9 3
5 2 98 1 8 1 98 2 1 5 3 3 9 9 3
5 2 98 2 8 1 98 2 1 5 3 3 9 9 3
5 2 98 3 8 1 98 2 1 5 3 3 9 9 3
5 2 98 4 8 1 98 2 1 5 3 3 9 9 3
5 2 98 5 8 1 98 2 1 5 3 3 9 9 3
5 2 98 6 8 1 98 2 1 5 3 3 9 9 3
5 2 98 7 8 1 98 2 1 5 3 3 9 9 3
5 2 98 8 8 1 98 2 1 5 3 3 9 9 3
5 2 98 N 8 1 98 2 1 5 3 3 9 9 3
6 2 105 0 6 2 105 0 2 6 3 3 9 9 3
6 2 105 1 6 2 105 0 2 6 3 3 9 9 3
6 2 105 2 6 2 105 0 2 6 3 3 9 9 3
6 2 105 3 6 2 105 0 2 6 3 3 9 9 3
6 2 105 4 6 2 105 0 2 6 3 3 9 9 3
6 2 105 5 6 2 105 0 2 6 3 3 9 9 3
6 2 105 6 6 2 105 0 2 6 3 3 9 9 3
6 2 105 7 6 2 105 0 2 6 3 3 9 9 3
6 2 105 8 6 2 105 0 2 6 3 3 9 9 3
6 2 105 N 6 2 105 0 2 6 3 3 9 9 3
7 2 106 0 7 2 106 1 2 7 3 3 9 9 3
7 2 106 1 7 2 106 1 2 7 3 3 9 9 3
7 2 106 2 7 2 106 1 2 7 3 3 9 9 3
7 2 106 3 7 2 106 1 2 7 3 3 9 9 3
7 2 106 4 7 2 106 1 2 7 3 3 9 9 3
7 2 106 5 7 2 106 1 2 7 3 3 9 9 3
7 2 106 6 7 2 106 1 2 7 3 3 9 9 3
7 2 106 7 7 2 106 1 2 7 3 3 9 9 3
7 2 106 8 7 2 106 1 2 7 3 3 9 9 3
7 2 106 N 7 2 106 1 2 7 3 3 9 9 3
8 2 107 0 8 2 107 2 2 8 3 3 9 9 3
8 2 107 1 8 2 107 2 2 8 3 3 9 9 3
8 2 107 2 8 2 107 2 2 8 3 3 9 9 3
8 2 107 3 8 2 107 2 2 8 3 3 9 9 3
8 2 107 4 8 2 107 2 2 8 3 3 9 9 3
8 2 107 5 8 2 107 2 2 8 3 3 9 9 3
8 2 107 6 8 2 107 2 2 8 3 3 9 9 3
8 2 107 7 8 2 107 2 2 8 3 3 9 9 3
8 2 107 8 8 2 107 2 2 8 3 3 9 9 3
8 2 107 N 8 2 107 2 2 8 3 3 9 9 3
0 2 168 0 6 0 168 0 0 0 3 3 9 9 3
0 2 168 1 6 0 168 0 0 0 3 3 9 9 3
0 2 168 2 6 0 168 0 0 0 3 3 9 9 3
0 2 168 3 6 0 168 0 0 0 3 3 9 9 3
0 2 168 4 6 0 168 0 0 0 3 3 9 9 3
0 2 168 5 6 0 168 0 0 0 3 3 9 9 3
0 2 168 6 6 0 168 0 0 0 3 3 9 9 3
0 2 168 7 6 0 168 0 0 0 3 3 9 9 3
0 2 168 8 6 0 168 0 0 0 3 3 9 9 3
0 2 168 N 6 0 168 0 0 0 3 3 9 9 3
1 2 169 0 7 0 169 1 0 1 3 3 9 9 3
1 2 169 1 7 0 169 1 0 1 3 3 9 9 3
1 2 169 2 7 0 169 1 0 1 3 3 9 9 3
1 2 169 3 7 0 169 1 0 1 3 3 9 9 3
1 2 169 4 7 0 169 1 0 1 3 3 9 9 3
1 2 169 5 7 0 169 1 0 1 3 3 9 9 3
1 2 169 6 7 0 169 1 0 1 3 3 9 9 3
1 2 169 7 7 0 169 1 0 1 3 3 9 9 3
1 2 169 8 7 0 169 1 0 1 3 3 9 9 3
1 2 169 N 7 0 169 1 0 1 3 3 9 9 3
2 2 170 0 8 0 170 2 0 2 3 3 9 9 3
2 2 170 1 8 0 170 2 0 2 3 3 9 9 3
2 2 170 2 8 0 170 2 0 2 3 3 9 9 3
2 2 170 3 8 0 170 2 0 2 3 3 9 9 3
2 2 170 4 8 0 170 2 0 2 3 3 9 9 3
2 2 170 5 8 0 170 2 0 2 3 3 9 9 3
2 2 170 6 8 0 170 2 0 2 3 3 9 9 3
2 2 170 7 8 0 170 2 0 2 3 3 9 9 3
2 2 170 8 8 0 170 2 0 2 3 3 9 9 3
2 2 170 N 8 0 170 2 0 2 3 3 9 9 3
3 2 177 0 6 1 177 0 1 3 3 3 9 9 3
3 2 177 1 6 1 177 0 1 3 3 3 9 9 3
3 2 177 2 6 1 177 0 1 3 3 3 9 9 3
3 2 177 3 6 1 177 0 1 3 3 3 9 9 3
3 2 177 4 6 1 177 0 1 3 3 3 9 9 3
3 2 177 5 6 1 177 0 1 3 3 3 9 9 3
3 2 177 6 6 1 177 0 1 3 3 3 9 9 3
3 2 177 7 6 1 177 0 1 3 3 3 9 9 3
3 2 177 8 6 1 177 0 1 3 3 3 9 9 3
3 2 177 N 6 1 177 0 1 3 3 3 9 9 3
4 2 178 0 7 1 178 1 1 4 3 3 9 9 3
4 2 178 1 7 1 178 1 1 4 3 3 9 9 3
4 2 178 2 7 1 178 1 1 4 3 3 9 9 3
4 2 178 3 7 1 178 1 1 4 3 3 9 9 3
4 2 178 4 7 1 178 1 1 4 3 3 9 9 3
4 2 178 5 7 1 178 1 1 4 3 3 9 9 3
4 2 178 6 7 1 178 1 1 4 3 3 9 9 3
4 2 178 7 7 1 178 1 1 4 3 3 9 9 3
4 2 178 8 7 1 178 1 1 4 3 3 9 9 3
4 2 178 N 7 1 178 1 1 4 3 3 9 9 3
5 2 179 0 8 1 179 2 1 5 3 3 9 9 3
5 2 179 1 8 1 179 2 1 5 3 3 9 9 3
5 2 179 2 8 1 179 2 1 5 3 3 9 9 3
5 2 179 3 8 1 179 2 1 5 3 3 9 9 3
5 2 179 4 8 1 179 2 1 5 3 3 9 9 3
5 2 179 5 8 1 179 2 1 5 3 3 9 9 3
5 2 179 6 8 1 179 2 1 5 3 3 9 9 3
5 2 179 7 8 1 179 2 1 5 3 3 9 9 3
5 2 179 8 8 1 179 2 1 5 3 3 9 9 3
5 2 179 N 8 1 179 2 1 5 3 3 9 9 3
6 2 186 0 6 2 186 0 2 6 3 3 9 9 3
6 2 186 1 6 2 186 0 2 6 3 3 9 9 3
6 2 186 2 6 2 186 0 2 6 3 3 9 9 3
6 2 186 3 6 2 186 0 2 6 3 3 9 9 3
6 2 186 4 6 2 186 0 2 6 3 3 9 9 3
6 2 186 5 6 2 186 0 2 6 3 3 9 9 3
6 2 186 6 6 2 186 0 2 6 3 3 9 9 3
6 2 186 7 6 2 186 0 2 6 3 3 9 9 3
6 2 186 8 6 2 186 0 2 6 3 3 9 9 3
6 2 186 N 6 2 186 0 2 6 3 3 9 9 3
7 2 187 0 7 2 187 1 2 7 3 3 9 9 3
7 2 187 1 7 2 187 1 2 7 3 3 9 9 3
7 2 187 2 7 2 187 1 2 7 3 3 9 9 3
7 2 187 3 7 2 187 1 2 7 3 3 9 9 3
7 2 187 4 7 2 187 1 2 7 3 3 9 9 3
7 2 187 5 7 2 187 1 2 7 3 3 9 9 3
7 2 187 6 7 2 187 1 2 7 3 3 9 9 3
7 2 187 7 7 2 187 1 2 7 3 3 9 9 3
7 2 187 8 7 2 187 1 2 7 3 3 9 9 3
7 2 187 N 7 2 187 1 2 7 3 3 9 9 3
8 2 188 0 8 2 188 2 2 8 3 3 9 9 3
8 2 188 1 8 2 188 2 2 8 3 3 9 9 3
8 2 188 2 8 2 188 2 2 8 3 3 9 9 3
8 2 188 3 8 2 188 2 2 8 3 3 9 9 3
8 2 188 4 8 2 188 2 2 8 3 3 9 9 3
8 2 188 5 8 2 188 2 2 8 3 3 9 9 3
8 2 188 6 8 2 188 2 2 8 3 3 9 9 3
8 2 188 7 8 2 188 2 2 8 3 3 9 9 3
8 2 188 8 8 2 188 2 2 8 3 3 9 9 3
8 2 188 N 8 2 188 2 2 8 3 3 9 9 3
0 3 27 0 0 3 27 0 0 0 3 3 9 9 3
0 3 27 1 0 3 27 0 0 0 3 3 9 9 3
0 3 27 2 0 3 27 0 0 0 3 3 9 9 3
0 3 27 3 0 3 27 0 0 0 3 3 9 9 3
0 3 27 4 0 3 27 0 0 0 3 3 9 9 3
0 3 27 5 0 3 27 0 0 0 3 3 9 9 3
0 3 27 6 0 3 27 0 0 0 3 3 9 9 3
0 3 27 7 0 3 27 0 0 0 3 3 9 9 3
0 3 27 8 0 3 27 0 0 0 3 3 9 9 3
0 3 27 N 0 3 27 0 0 0 3 3 9 9 3
1 3 28 0 1 3 28 1 0 1 3 3 9 9 3
1 3 28 1 1 3 28 1 0 1 3 3 9 9 3
1 3 28 2 1 3 28 1 0 1 3 3 9 9 3
1 3 28 3 1 3 28 1 0 1 3 3 9 9 3
1 3 28 4 1 3 28 1 0 1 3 3 9 9 3
1 3 28 5 1 3 28 1 0 1 3 3 9 9 3
1 3 28 6 1 3 28 1 0 1 3 3 9 9 3
1 3 28 7 1 3 28 1 0 1 3 3 9 9 3
1 3 28 8 1 3 28 1 0 1 3 3 9 9 3
1 3 28 N 1 3 28 1 0 1 3 3 9 9 3
2 3 29 0 2 3 29 2 0 2 3 3 9 9 3
2 3 29 1 2 3 29 2 0 2 3 3 9 9 3
2 3 29 2 2 3 29 2 0 2 3 3 9 9 3
2 3 29 3 2 3 29 2 0 2 3 3 9 9 3
2 3 29 4 2 3 29 2 0 2 3 3 9 9 3
2 3 29 5 2 3 29 2 0 2 3 3 9 9 3
2 3 29 6 2 3 29 2 0 2 3 3 9 9 3
2 3 29 7 2 3 29 2 0 2 3 3 9 9 3
2 3 29 8 2 3 29 2 0 2 3 3 9 9 3
2 3 29 N 2 3 29 2 0 2 3 3 9 9 3
3 3 36 0 0 4 36 0 1 3 3 3 9 9 3
3 3 36 1 0 4 36 0 1 3 3 3 9 9 3
3 3 36 2 0 4 36 0 1 3 3 3 9 9 3
3 3 36 3 0 4 36 0 1 3 3 3 9 9 3
3 3 36 4 0 4 36 0 1 3 3 3 9 9 3
3 3 36 5 0 4 36 0 1 3 3 3 9 9 3
3 3 36 6 0 4 36 0 1 3 3 3 9 9 3
3 3 36 7 0 4 36 0 1 3 3 3 9 9 3
3 3 36 8 0 4 36 0 1 3 3 3 9 9 3
3 3 36 N 0 4 36 0 1 3 3 3 9 9 3
4 3 37 0 1 4 37 1 1 4 3 3 9 9 3
4 3 37 1 1 4 37 1 1 4 3 3 9 9 3
4 3 37 2 1 4 37 1 1 4 3 3 9 9 3
4 3 37 3 1 4 37 1 1 4 3 3 9 9 3
4 3 37 4 1 4 37 1 1 4 3 3 9 9 3
4 3 37 5 1 4 37 1 1 4 3 3 9 9 3
4 3 37 6 1 4 37 1 1 4 3 3 9 9 3
4 3 37 7 1 4 37 1 1 4 3 3 9 9 3
4 3 37 8 1 4 37 1 1 4 3 3 9 9 3
4 3 37 N 1 4 37 1 1 4 3 3 9 9 3
5 3 38 0 2 4 38 2 1 5 3 3 9 9 3
5 3 38 1 2 4 38 2 1 5 3 3 9 9 3
5 3 38 2 2 4 38 2 1 5 3 3 9 9 3
5 3 38 3 2 4 38 2 1 5 3 3 9 9 3
5 3 38 4 2 4 38 2 1 5 3 3 9 9 3
5 3 38 5 2 4 38 2 1 5 3 3 9 9 3
5 3 38 6 2 4 38 2 1 5 3 3 9 9 3
5 3 38 7 2 4 38 2 1 5 3 3 9 9 3
5 3 38 8 2 4 38 2 1 5 3 3 9 9 3
5 3 38 N 2 4 38 2 1 5 3 3 9 9 3
6 3 45 0 0 5 45 0 2 6 3 3 9 9 3
6 3 45 1 0 5 45 0 2 6 3 3 9 9 3
6 3 45 2 0 5 45 0 2 6 3 3 9 9 3
6 3 45 3 0 5 45 0 2 6 3 3 9 9 3
6 3 45 4 0 5 45 0 2 6 3 3 9 9 3
6 3 45 5 0 5 45 0 2 6 3 3 9 9 3
6 3 45 6 0 5 45 0 2 6 3 3 9 9 3
6 3 45 7 0 5 45 0 2 6 3 3 9 9 3
6 3 45 8 0 5 45 0 2 6 3 3 9 9 3
6 3 45 N 0 5 45 0 2 6 3 3 9 9 3
7 3 46 0 1 5 46 1 2 7 3 3 9 9 3
7 3 46 1 1 5 46 1 2 7 3 3 9 9 3
7 3 46 2 1 5 46 1 2 7 3 3 9 9 3
7 3 46 3 1 5 46 1 2 7 3 3 9 9 3
7 3 46 4 1 5 46 1 2 7 3 3 9 9 3
7 3 46 5 1 5 46 1 2 7 3 3 9 9 3
7 3 46 6 1 5 46 1 2 7 3 3 9 9 3
7 3 46 7 1 5 46 1 2 7 3 3 9 9 3
7 3 46 8 1 5 46 1 2 7 3 3 9 9 3
7 3 46 N 1 5 46 1 2 7 3 3 9 9 3
8 3 47 0 2 5 47 2 2 8 3 3 9 9 3
8 3 47 1 2 5 47 2 2 8 3 3 9 9 3
8 3 47 2 2 5 47 2 2 8 3 3 9 9 3
8 3 47 3 2 5 47 2 2 8 3 3 9 9 3
8 3 47 4 2 5 47 2 2 8 3 3 9 9 3
8 3 47 5 2 5 47 2 2 8 3 3 9 9 3
8 3 47 6 2 5 47 2 2 8 3 3 9 9 3
8 3 47 7 2 5 47 2 2 8 3 3 9 9 3
8 3 47 8 2 5 47 2 2 8 3 3 9 9 3
8 3 47 N 2 5 47 2 2 8 3 3 9 9 3
0 3 108 0 0 3 108 0 0 0 3 3 9 9 3
0 3 108 1 0 3 108 0 0 0 3 3 9 9 3
0 3 108 2 0 3 108 0 0 0 3 3 9 9 3
0 3 108 3 0 3 108 0 0 0 3 3 9 9 3
0 3 108 4 0 3 108 0 0 0 3 3 9 9 3
0 3 108 5 0 3 108 0 0 0 3 3 9 9 3
0 3 108 6 0 3 108 0 0 0 3 3 9 9 3
0 3 108 7 0 3 108 0 0 0 3 3 9 9 3
0 3 108 8 0 3 108 0 0 0 3 3 9 9 3
0 3 108 N 0 3 108 0 0 0 3 3 9 9 3
1 3 109 0 1 3 109 1 0 1 3 3 9 9 3
1 3 109 1 1 3 109 1 0 1 3 3 9 9 3
1 3 109 2 1 3 109 1 0 1 3 3 9 9 3
1 3 109 3 1 3 109 1 0 1 3 3 9 9 3
1 3 109 4 1 3 109 1 0 1 3 3 9 9 3
1 3 109 5 1 3 109 1 0 1 3 3 9 9 3
1 3 109 6 1 3 109 1 0 1 3 3 9 9 3
1 3 109 7 1 3 109 1 0 1 3 3 9 9 3
1 3 109 8 1 3 109 1 0 1 3 3 9 9 3
1 3 109 N 1 3 109 1 0 1 3 3 9 9 3
2 3 110 0 2 3 110 2 0 2 3 3 9 9 3
2 3 110 1 2 3 110 2 0 2 3 3 9 9 3
2 3 110 2 2 3 110 2 0 2 3 3 9 9 3
2 3 110 3 2 3 110 2 0 2 3 3 9 9 3
2 3 110 4 2 3 110 2 0 2 3 3 9 9 3
2 3 110 5 2 3 110 2 0 2 3 3 9 9 3
2 3 110 6 2 3 110 2 0 2 3 3 9 9 3
2 3 110 7 2 3 110 2 0 2 3 3 9 9 3
2 3 110 8 2 3 110 2 0 2 3 3 9 9 3
2 3 110 N 2 3 110 2 0 2 3 3 9 9 3
3 3 117 0 0 4 117 0 1 3 3 3 9 9 3
3 3 117 1 0 4 117 0 1 3 3 3 9 9 3
3 3 117 2 0 4 117 0 1 3 3 3 9 9 3
3 3 117 3 0 4 117 0 1 3 3 3 9 9 3
3 3 117 4 0 4 117 0 1 3 3 3 9 9 3
3 3 117 5 0 4 117 0 1 3 3 3 9 9 3
3 3 117 6 0 4 117 0 1 3 3 3 9 9 3
3 3 117 7 0 4 117 0 1 3 3 3 9 9 3
3 3 117 8 0 4 117 0 1 3 3 3 9 9 3
3 3 117 N 0 4 117 0 1 3 3 3 9 9 3
4 3 118 0 1 4 118 1 1 4 3 3 9 9 3
4 3 118 1 1 4 118 1 1 4 3 3 9 9 3
4 3 118 2 1 4 118 1 1 4 3 3 9 9 3
4 3 118 3 1 4 118 1 1 4 3 3 9 9 3
4 3 118 4 1 4 118 1 1 4 3 3 9 9 3
4 3 118 5 1 4 118 1 1 4 3 3 9 9 3
4 3 118 6 1 4 118 1 1 4 3 3 9 9 3
4 3 118 7 1 4 118 1 1 4 3 3 9 9 3
4 3 118 8 1 4 118 1 1 4 3 3 9 9 3
4 3 118 N 1 4 118 1 1 4 3 3 9 9 3
5 3 119 0 2 4 119 2 1 5 3 3 9 9 3
5 3 119 1 2 4 119 2 1 5 3 3 9 9 3
5 3 119 2 2 4 119 2 1 5 3 3 9 9 3
5 3 119 3 2 4 119 2 1 5 3 3 9 9 3
5 3 119 4 2 4 119 2 1 5 3 3 9 9 3
5 3 119 5 2 4 119 2 1 5 3 3 9 9 3
5 3 119 6 2 4 119 2 1 5 3 3 9 9 3
5 3 119 7 2 4 119 2 1 5 3 3 9 9 3
5 3 119 8 2 4 119 2 1 5 3 3 9 9 3
5 3 119 N 2 4 119 2 1 5 3 3 9 9 3
6 3 126 0 0 5 126 0 2 6 3 3 9 9 3
6 3 126 1 0 5 126 0 2 6 3 3 9 9 3
6 3 126 2 0 5 126 0 2 6 3 3 9 9 3
6 3 126 3 0 5 126 0 2 6 3 3 9 9 3
6 3 126 4 0 5 126 0 2 6 3 3 9 9 3
6 3 126 5 0 5 126 0 2 6 3 3 9 9 3
6 3 126 6 0 5 126 0 2 6 3 3 9 9 3
6 3 126 7 0 5 126 0 2 6 3 3 9 9 3
6 3 126 8 0 5 126 0 2 6 3 3 9 9 3
6 3 126 N 0 5 126 0 2 6 3 3 9 9 3
7 3 127 0 1 5 127 1 2 7 3 3 9 9 3
7 3 127 1 1 5 127 1 2 7 3 3 9 9 3
7 3 127 2 1 5 127 1 2 7 3 3 9 9 3
7 3 127 3 1 5 127 1 2 7 3 3 9 9 3
7 3 127 4 1 5 127 1 2 7 3 3 9 9 3
7 3 127 5 1 5 127 1 2 7 3 3 9 9 3
7 3 127 6 1 5 127 1 2 7 3 3 9 9 3
7 3 127 7 1 5 127 1 2 7 3 3 9 9 3
7 3 127 8 1 5 127 1 2 7 3 3 9 9 3
7 3 127 N 1 5 127 1 2 7 3 3 9 9 3
8 3 128 0 2 5 128 2 2 8 3 3 9 9 3
8 3 128 1 2 5 128 2 2 8 3 3 9 9 3
8 3 128 2 2 5 128 2 2 8 3 3 9 9 3
8 3 128 3 2 5 128 2 2 8 3 3 9 9 3
8 3 128 4 2 5 128 2 2 8 3 3 9 9 3
8 3 128 5 2 5 128 2 2 8 3 3 9 9 3
8 3 128 6 2 5 128 2 2 8 3 3 9 9 3
8 3 128 7 2 5 128 2 2 8 3 3 9 9 3
8 3 128 8 2 5 128 2 2 8 3 3 9 9 3
8 3 128 N 2 5 128 2 2 8 3 3 9 9 3
0 3 189 0 0 3 189 0 0 0 3 3 9 9 3
0 3 189 1 0 3 189 0 0 0 3 3 9 9 3
0 3 189 2 0 3 189 0 0 0 3 3 9 9 3
0 3 189 3 0 3 189 0 0 0 3 3 9 9 3
0 3 189 4 0 3 189 0 0 0 3 3 9 9 3
0 3 189 5 0 3 189 0 0 0 3 3 9 9 3
0 3 189 6 0 3 189 0 0 0 3 3 9 9 3
0 3 189 7 0 3 189 0 0 0 3 3 9 9 3
0 3 189 8 0 3 189 0 0 0 3 3 9 9 3
0 3 189 N 0 3 189 0 0 0 3 3 9 9 3
1 3 190 0 1 3 190 1 0 1 3 3 9 9 3
1 3 190 1 1 3 190 1 0 1 3 3 9 9 3
1 3 190 2 1 3 190 1 0 1 3 3 9 9 3
1 3 190 3 1 3 190 1 0 1 3 3 9 9 3
1 3 190 4 1 3 190 1 0 1 3 3 9 9 3
1 3 190 5 1 3 190 1 0 1 3 3 9 9 3
1 3 190 6 1 3 190 1 0 1 3 3 9 9 3
1 3 190 7 1 3 190 1 0 1 3 3 9 9 3
1 3 190 8 1 3 190 1 0 1 3 3 9 9 3
1 3 190 N 1 3 190 1 0 1 3 3 9 9 3
2 3 191 0 2 3 191 2 0 2 3 3 9 9 3
2 3 191 1 2 3 191 2 0 2 3 3 9 9 3
2 3 191 2 2 3 191 2 0 2 3 3 9 9 3
2 3 191 3 2 3 191 2 0 2 3 3 9 9 3
2 3 191 4 2 3 191 2 0 2 3 3 9 9 3
2 3 191 5 2 3 191 2 0 2 3 3 9 9 3
2 3 191 6 2 3 191 2 0 2 3 3 9 9 3
2 3 191 7 2 3 191 2 0 2 3 3 9 9 3
2 3 191 8 2 3 191 2 0 2 3 3 9 9 3
2 3 191 N 2 3 191 2 0 2 3 3 9 9 3
3 3 198 0 0 4 198 0 1 3 3 3 9 9 3
3 3 198 1 0 4 198 0 1 3 3 3 9 9 3
3 3 198 2 0 4 198 0 1 3 3 3 9 9 3
3 3 198 3 0 4 198 0 1 3 3 3 9 9 3
3 3 198 4 0 4 198 0 1 3 3 3 9 9 3
3 3 198 5 0 4 198 0 1 3 3 3 9 9 3
3 3 198 6 0 4 198 0 1 3 3 3 9 9 3
3 3 198 7 0 4 198 0 1 3 3 3 9 9 3
3 3 198 8 0 4 198 0 1 3 3 3 9 9 3
3 3 198 N 0 4 198 0 1 3 3 3 9 9 3
4 3 199 0 1 4 199 1 1 4 3 3 9 9 3
4 3 199 1 1 4 199 1 1 4 3 3 9 9 3
4 3 199 2 1 4 199 1 1 4 3 3 9 9 3
4 3 199 3 1 4 199 1 1 4 3 3 9 9 3
4 3 199 4 1 4 199 1 1 4 3 3 9 9 3
4 3 199 5 1 4 199 1 1 4 3 3 9 9 3
4 3 199 6 1 4 199 1 1 4 3 3 9 9 3
4 3 199 7 1 4 199 1 1 4 3 3 9 9 3
4 3 199 8 1 4 199 1 1 4 3 3 9 9 3
4 3 199 N 1 4 199 1 1 4 3 3 9 9 3
5 3 200 0 2 4 200 2 1 5 3 3 9 9 3
5 3 200 1 2 4 200 2 1 5 3 3 9 9 3
5 3 200 2 2 4 200 2 1 5 3 3 9 9 3
5 3 200 3 2 4 200 2 1 5 3 3 9 9 3
5 3 200 4 2 4 200 2 1 5 3 3 9 9 3
5 3 200 5 2 4 200 2 1 5 3 3 9 9 3
5 3 200 6 2 4 200 2 1 5 3 3 9 9 3
5 3 200 7 2 4 200 2 1 5 3 3 9 9 3
5 3 200 8 2 4 200 2 1 5 3 3 9 9 3
5 3 200 N 2 4 200 2 1 5 3 3 9 9 3
6 3 207 0 0 5 207 0 2 6 3 3 9 9 3
6 3 207 1 0 5 207 0 2 6 3 3 9 9 3
6 3 207 2 0 5 207 0 2 6 3 3 9 9 3
6 3 207 3 0 5 207 0 2 6 3 3 9 9 3
6 3 207 4 0 5 207 0 2 6 3 3 9 9 3
6 3 207 5 0 5 207 0 2 6 3 3 9 9 3
6 3 207 6 0 5 207 0 2 6 3 3 9 9 3
6 3 207 7 0 5 207 0 2 6 3 3 9 9 3
6 3 207 8 0 5 207 0 2 6 3 3 9 9 3
6 3 207 N 0 5 207 0 2 6 3 3 9 9 3
7 3 208 0 1 5 208 1 2 7 3 3 9 9 3
7 3 208 1 1 5 208 1 2 7 3 3 9 9 3
7 3 208 2 1 5 208 1 2 7 3 3 9 9 3
7 3 208 3 1 5 208 1 2 7 3 3 9 9 3
7 3 208 4 1 5 208 1 2 7 3 3 9 9 3
7 3 208 5 1 5 208 1 2 7 3 3 9 9 3
7 3 208 6 1 5 208 1 2 7 3 3 9 9 3
7 3 208 7 1 5 208 1 2 7 3 3 9 9 3
7 3 208 8 1 5 208 1 2 7 3 3 9 9 3
7 3 208 N 1 5 208 1 2 7 3 3 9 9 3
8 3 209 0 2 5 209 2 2 8 3 3 9 9 3
8 3 209 1 2 5 209 2 2 8 3 3 9 9 3
8 3 209 2 2 5 209 2 2 8 3 3 9 9 3
8 3 209 3 2 5 209 2 2 8 3 3 9 9 3
8 3 209 4 2 5 209 2 2 8 3 3 9 9 3
8 3 209 5 2 5 209 2 2 8 3 3 9 9 3
8 3 209 6 2 5 209 2 2 8 3 3 9 9 3
8 3 209 7 2 5 209 2 2 8 3 3 9 9 3
8 3 209 8 2 5 209 2 2 8 3 3 9 9 3
8 3 209 N 2 5 209 2 2 8 3 3 9 9 3
0 4 30 0 3 3 30 0 0 0 3 3 9 9 3
0 4 30 1 3 3 30 0 0 0 3 3 9 9 3
0 4 30 2 3 3 30 0 0 0 3 3 9 9 3
0 4 30 3 3 3 30 0 0 0 3 3 9 9 3
0 4 30 4 3 3 30 0 0 0 3 3 9 9 3
0 4 30 5 3 3 30 0 0 0 3 3 9 9 3
0 4 30 6 3 3 30 0 0 0 3 3 9 9 3
0 4 30 7 3 3 30 0 0 0 3 3 9 9 3
0 4 30 8 3 3 30 0 0 0 3 3 9 9 3
0 4 30 N 3 3 30 0 0 0 3 3 9 9 3
1 4 31 0 4 3 31 1 0 1 3 3 9 9 3
1 4 31 1 4 3 31 1 0 1 3 3 9 9 3
1 4 31 2 4 3 31 1 0 1 3 3 9 9 3
1 4 31 3 4 3 31 1 0 1 3 3 9 9 3
1 4 31 4 4 3 31 1 0 1 3 3 9 9 3
1 4 31 5 4 3 31 1 0 1 3 3 9 9 3
1 4 31 6 4 3 31 1 0 1 3 3 9 9 3
1 4 31 7 4 3 31 1 0 1 3 3 9 9 3
1 4 31 8 4 3 31 1 0 1 3 3 9 9 3
1 4 31 N 4 3 31 1 0 1 3 3 9 9 3
2 4 32 0 5 3 32 2 0 2 3 3 9 9 3
2 4 32 1 5 3 32 2 0 2 3 3 9 9 3
2 4 32 2 5 3 32 2 0 2 3 3 9 9 3
2 4 32 3 5 3 32 2 0 2 3 3 9 9 3
2 4 32 4 5 3 32 2 0 2 3 3 9 9 3
2 4 32 5 5 3 32 2 0 2 3 3 9 9 3
2 4 32 6 5 3 32 2 0 2 3 3 9 9 3
2 4 32 7 5 3 32 2 0 2 3 3 9 9 3
2 4 32 8 5 3 32 2 0 2 3 3 9 9 3
2 4 32 N 5 3 32 2 0 2 3 3 9 9 3
3 4 39 0 3 4 39 0 1 3 3 3 9 9 3
3 4 39 1 3 4 39 0 1 3 3 3 9 9 3
3 4 39 2 3 4 39 0 1 3 3 3 9 9 3
3 4 39 3 3 4 39 0 1 3 3 3 9 9 3
3 4 39 4 3 4 39 0 1 3 3 3 9 9 3
3 4 39 5 3 4 39 0 1 3 3 3 9 9 3
3 4 39 6 3 4 39 0 1 3 3 3 9 9 3
3 4 39 7 3 4 39 0 1 3 3 3 9 9 3
3 4 39 8 3 4 39 0 1 3 3 3 9 9 3
3 4 39 N 3 4 39 0 1 3 3 3 9 9 3
4 4 40 0 4 4 40 1 1 4 3 3 9 9 3
4 4 40 1 4 4 40 1 1 4 3 3 9 9 3
4 4 40 2 4 4 40 1 1 4 3 3 9 9 3
4 4 40 3 4 4 40 1 1 4 3 3 9 9 3
4 4 40 4 4 4 40 1 1 4 3 3 9 9 3
4 4 40 5 4 4 40 1 1 4 3 3 9 9 3
4 4 40 6 4 4 40 1 1 4 3 3 9 9 3
4 4 40 7 4 4 40 1 1 4 3 3 9 9 3
4 4 40 8 4 4 40 1 1 4 3 3 9 9 3
4 4 40 N 4 4 40 1 1 4 3 3 9 9 3
5 4 41 0 5 4 41 2 1 5 3 3 9 9 3
5 4 41 1 5 4 41 2 1 5 3 3 9 9 3
5 4 41 2 5 4 41 2 1 5 3 3 9 9 3
5 4 41 3 5 4 41 2 1 5 3 3 9 9 3
5 4 41 4 5 4 41 2 1 5 3 3 9 9 3
5 4 41 5 5 4 41 2 1 5 3 3 9 9 3
5 4 41 6 5 4 41 2 1 5 3 3 9 9 3
5 4 41 7 5 4 41 2 1 5 3 3 9 9 3
5 4 41 8 5 4 41 2 1 5 3 3 9 9 3
5 4 41 N 5 4 41 2 1 5 3 3 9 9 3
6 4 48 0 3 5 48 0 2 6 3 3 9 9 3
6 4 48 1 3 5 48 0 2 6 3 3 9 9 3
6 4 48 2 3 5 48 0 2 6 3 3 9 9 3
6 4 48 3 3 5 48 0 2 6 3 3 9 9 3
6 4 48 4 3 5 48 0 2 6 3 3 9 9 3
6 4 48 5 3 5 48 0 2 6 3 3 9 9 3
6 4 48 6 3 5 48 0 2 6 3 3 9 9 3
6 4 48 7 3 5 48 0 2 6 3 3 9 9 3
6 4 48 8 3 5 48 0 2 6 3 3 9 9 3
6 4 48 N 3 5 48 0 2 6 3 3 9 9 3
7 4 49 0 4 5 49 1 2 7 3 3 9 9 3
7 4 49 1 4 5 49 1 2 7 3 3 9 9 3
7 4 49 2 4 5 49 1 2 7 3 3 9 9 3
7 4 49 3 4 5 49 1 2 7 3 3 9 9 3
7 4 49 4 4 5 49 1 2 7 3 3 9 9 3
7 4 49 5 4 5 49 1 2 7 3 3 9 9 3
7 4 49 6 4 5 49 1 2 7 3 3 9 9 3
7 4 49 7 4 5 49 1 2 7 3 3 9 9 3
7 4 49 8 4 5 49 1 2 7 3 3 9 9 3
7 4 49 N 4 5 49 1 2 7 3 3 9 9 3
8 4 50 0 5 5 50 2 2 8 3 3 9 9 3
8 4 50 1 5 5 50 2 2 8 3 3 9 9 3
8 4 50 2 5 5 50 2 2 8 3 3 9 9 3
8 4 50 3 5 5 50 2 2 8 3 3 9 9 3
8 4 50 4 5 5 50 2 2 8 3 3 9 9 3
8 4 50 5 5 5 50 2 2 8 3 3 9 9 3
8 4 50 6 5 5 50 2 2 8 3 3 9 9 3
8 4 50 7 5 5 50 2 2 8 3 3 9 9 3
8 4 50 8 5 5 50 2 2 8 3 3 9 9 3
8 4 50 N 5 5 50 2 2 8 3 3 9 9 3
0 4 111 0 3 3 111 0 0 0 3 3 9 9 3
0 4 111 1 3 3 111 0 0 0 3 3 9 9 3
0 4 111 2 3 3 111 0 0 0 3 3 9 9 3
0 4 111 3 3 3 111 0 0 0 3 3 9 9 3
0 4 111 4 3 3 111 0 0 0 3 3 9 9 3
0 4 111 5 3 3 111 0 0 0 3 3 9 9 3
0 4 111 6 3 3 111 0 0 0 3 3 9 9 3
0 4 111 7 3 3 111 0 0 0 3 3 9 9 3
0 4 111 8 3 3 111 0 0 0 3 3 9 9 3
0 4 111 N 3 3 111 0 0 0 3 3 9 9 3
1 4 112 0 4 3 112 1 0 1 3 3 9 9 3
1 4 112 1 4 3 112 1 0 1 3 3 9 9 3
1 4 112 2 4 3 112 1 0 1 3 3 9 9 3
1 4 112 3 4 3 112 1 0 1 3 3 9 9 3
1 4 112 4 4 3 112 1 0 1 3 3 9 9 3
1 4 112 5 4 3 112 1 0 1 3 3 9 9 3
1 4 112 6 4 3 112 1 0 1 3 3 9 9 3
1 4 112 7 4 3 112 1 0 1 3 3 9 9 3
1 4 112 8 4 3 112 1 0 1 3 3 9 9 3
1 4 112 N 4 3 112 1 0 1 3 3 9 9 3
2 4 113 0 5 3 113 2 0 2 3 3 9 9 3
2 4 113 1 5 3 113 2 0 2 3 3 9 9 3
2 4 113 2 5 3 113 2 0 2 3 3 9 9 3
2 4 113 3 5 3 113 2 0 2 3 3 9 9 3
2 4 113 4 5 3 113 2 0 2 3 3 9 9 3
2 4 113 5 5 3 113 2 0 2 3 3 9 9 3
2 4 113 6 5 3 113 2 0 2 3 3 9 9 3
2 4 113 7 5 3 113 2 0 2 3 3 9 9 3
2 4 113 8 5 3 113 2 0 2 3 3 9 9 3
2 4 113 N 5 3 113 2 0 2 3 3 9 9 3
3 4 120 0 3 4 120 0 1 3 3 3 9 9 3
3 4 120 1 3 4 120 0 1 3 3 3 9 9 3
3 4 120 2 3 4 120 0 1 3 3 3 9 9 3
3 4 120 3 3 4 120 0 1 3 3 3 9 9 3
3 4 120 4 3 4 120 0 1 3 3 3 9 9 3
3 4 120 5 3 4 120 0 1 3 3 3 9 9 3
3 4 120 6 3 4 120 0 1 3 3 3 9 9 3
3 4 120 7 3 4 120 0 1 3 3 3 9 9 3
3 4 120 8 3 4 120 0 1 3 3 3 9 9 3
3 4 120 N 3 4 120 0 1 3 3 3 9 9 3
4 4 121 0 4 4 121 1 1 4 3 3 9 9 3
4 4 121 1 4 4 121 1 1 4 3 3 9 9 3
4 4 121 2 4 4 121 1 1 4 3 3 9 9 3
4 4 121 3 4 4 121 1 1 4 3 3 9 9 3
4 4 121 4 4 4 121 1 1 4 3 3 9 9 3
4 4 121 5 4 4 121 1 1 4 3 3 9 9 3
4 4 121 6 4 4 121 1 1 4 3 3 9 9 3
4 4 121 7 4 4 121 1 1 4 3 3 9 9 3
4 4 121 8 4 4 121 1 1 4 3 3 9 9 3
4 4 121 N 4 4 121 1 1 4 3 3 9 9 3
5 4 122 0 5 4 122 2 1 5 3 3 9 9 3
5 4 122 1 5 4 122 2 1 5 3 3 9 9 3
5 4 122 2 5 4 122 2 1 5 3 3 9 9 3
5 4 122 3 5 4 122 2 1 5 3 3 9 9 3
5 4 122 4 5 4 122 2 1 5 3 3 9 9 3
5 4 122 5 5 4 122 2 1 5 3 3 9 9 3
5 4 122 6 5 4 122 2 1 5 3 3 9 9 3
5 4 122 7 5 4 122 2 1 5 3 3 9 9 3
5 4 122 8 5 4 122 2 1 5 3 3 9 9 3
5 4 122 N 5 4 122 2 1 5 3 3 9 9 3
6 4 129 0 3 5 129 0 2 6 3 3 9 9 3
6 4 129 1 3 5 129 0 2 6 3 3 9 9 3
6 4 129 2 3 5 129 0 2 6 3 3 9 9 3
6 4 129 3 3 5 129 0 2 6 3 3 9 9 3
6 4 129 4 3 5 129 0 2 6 3 3 9 9 3
6 4 129 5 3 5 129 0 2 6 3 3 9 9 3
6 4 129 6 3 5 129 0 2 6 3 3 9 9 3
6 4 129 7 3 5 129 0 2 6 3 3 9 9 3
6 4 129 8 3 5 129 0 2 6 3 3 9 9 3
6 4 129 N 3 5 129 0 2 6 3 3 9 9 3
7 4 130 0 4 5 130 1 2 7 3 3 9 9 3
7 4 130 1 4 5 130 1 2 7 3 3 9 9 3
7 4 130 2 4 5 130 1 2 7 3 3 9 9 3
7 4 130 3 4 5 130 1 2 7 3 3 9 9 3
7 4 130 4 4 5 130 1 2 7 3 3 9 9 3
7 4 130 5 4 5 130 1 2 7 3 3 9 9 3
7 4 130 6 4 5 130 1 2 7 3 3 9 9 3
7 4 130 7 4 5 130 1 2 7 3 3 9 9 3
7 4 130 8 4 5 130 1 2 7 3 3 9 9 3
7 4 130 N 4 5 130 1 2 7 3 3 9 9 3
8 4 131 0 5 5 131 2 2 8 3 3 9 9 3
8 4 131 1 5 5 131 2 2 8 3 3 9 9 3
8 4 131 2 5 5 131 2 2 8 3 3 9 9 3
8 4 131 3 5 5 131 2 2 8 3 3 9 9 3
8 4 131 4 5 5 131 2 2 8 3 3 9 9 3
8 4 131 5 5 5 131 2 2 8 3 3 9 9 3
8 4 131 6 5 5 131 2 2 8 3 3 9 9 3
8 4 131 7 5 5 131 2 2 8 3 3 9 9 3
8 4 131 8 5 5 131 2 2 8 3 3 9 9 3
8 4 131 N 5 5 131 2 2 8 3 3 9 9 3
0 4 192 0 3 3 192 0 0 0 3 3 9 9 3
0 4 192 1 3 3 192 0 0 0 3 3 9 9 3
0 4 192 2 3 3 192 0 0 0 3 3 9 9 3
0 4 192 3 3 3 192 0 0 0 3 3 9 9 3
0 4 192 4 3 3 192 0 0 0 3 3 9 9 3
0 4 192 5 3 3 192 0 0 0 3 3 9 9 3
0 4 192 6 3 3 192 0 0 0 3 3 9 9 3
0 4 192 7 3 3 192 0 0 0 3 3 9 9 3
0 4 192 8 3 3 192 0 0 0 3 3 9 9 3
0 4 192 N 3 3 192 0 0 0 3 3 9 9 3
1 4 193 0 4 3 193 1 0 1 3 3 9 9 3
1 4 193 1 4 3 193 1 0 1 3 3 9 9 3
1 4 193 2 4 3 193 1 0 1 3 3 9 9 3
1 4 193 3 4 3 193 1 0 1 3 3 9 9 3
1 4 193 4 4 3 193 1 0 1 3 3 9 9 3
1 4 193 5 4 3 193 1 0 1 3 3 9 9 3
1 4 193 6 4 3 193 1 0 1 3 3 9 9 3
1 4 193 7 4 3 193 1 0 1 3 3 9 9 3
1 4 193 8 4 3 193 1 0 1 3 3 9 9 3
1 4 193 N 4 3 193 1 0 1 3 3 9 9 3
2 4 194 0 5 3 194 2 0 2 3 3 9 9 3
2 4 194 1 5 3 194 2 0 2 3 3 9 9 3
2 4 194 2 5 3 194 2 0 2 3 3 9 9 3
2 4 194 3 5 3 194 2 0 2 3 3 9 9 3
2 4 194 4 5 3 194 2 0 2 3 3 9 9 3
2 4 194 5 5 3 194 2 0 2 3 3 9 9 3
2 4 194 6 5 3 194 2 0 2 3 3 9 9 3
2 4 194 7 5 3 194 2 0 2 3 3 9 9 3
2 4 194 8 5 3 194 2 0 2 3 3 9 9 3
2 4 194 N 5 3 194 2 0 2 3 3 9 9 3
3 4 201 0 3 4 201 0 1 3 3 3 9 9 3
3 4 201 1 3 4 201 0 1 3 3 3 9 9 3
3 4 201 2 3 4 201 0 1 3 3 3 9 9 3
3 4 201 3 3 4 201 0 1 3 3 3 9 9 3
3 4 201 4 3 4 201 0 1 3 3 3 9 9 3
3 4 201 5 3 4 201 0 1 3 3 3 9 9 3
3 4 201 6 3 4 201 0 1 3 3 3 9 9 3
3 4 201 7 3 4 201 0 1 3 3 3 9 9 3
3 4 201 8 3 4 201 0 1 3 3 3 9 9 3
3 4 201 N 3 4 201 0 1 3 3 3 9 9 3
4 4 202 0 4 4 202 1 1 4 3 3 9 9 3
4 4 202 1 4 4 202 1 1 4 3 3 9 9 3
4 4 202 2 4 4 202 1 1 4 3 3 9 9 3
4 4 202 3 4 4 202 1 1 4 3 3 9 9 3
4 4 202 4 4 4 202 1 1 4 3 3 9 9 3
4 4 202 5 4 4 202 1 1 4 3 3 9 9 3
4 4 202 6 4 4 202 1 1 4 3 3 9 9 3
4 4 202 7 4 4 202 1 1 4 3 3 9 9 3
4 4 202 8 4 4 202 1 1 4 3 3 9 9 3
4 4 202 N 4 4 202 1 1 4 3 3 9 9 3
5 4 203 0 5 4 203 2 1 5 3 3 9 9 3
5 4 203 1 5 4 203 2 1 5 3 3 9 9 3
5 4 203 2 5 4 203 2 1 5 3 3 9 9 3
5 4 203 3 5 4 203 2 1 5 3 3 9 9 3
5 4 203 4 5 4 203 2 1 5 3 3 9 9 3
5 4 203 5 5 4 203 2 1 5 3 3 9 9 3
5 4 203 6 5 4 203 2 1 5 3 3 9 9 3
5 4 203 7 5 4 203 2 1 5 3 3 9 9 3
5 4 203 8 5 4 203 2 1 5 3 3 9 9 3
5 4 203 N 5 4 203 2 1 5 3 3 9 9 3
6 4 210 0 3 5 210 0 2 6 3 3 9 9 3
6 4 210 1 3 5 210 0 2 6 3 3 9 9 3
6 4 210 2 3 5 210 0 2 6 3 3 9 9 3
6 4 210 3 3 5 210 0 2 6 3 3 9 9 3
6 4 210 4 3 5 210 0 2 6 3 3 9 9 3
6 4 210 5 3 5 210 0 2 6 3 3 9 9 3
6 4 210 6 3 5 210 0 2 6 3 3 9 9 3
6 4 210 7 3 5 210 0 2 6 3 3 9 9 3
6 4 210 8 3 5 210 0 2 6 3 3 9 9 3
6 4 210 N 3 5 210 0 2 6 3 3 9 9 3
7 4 211 0 4 5 211 1 2 7 3 3 9 9 3
7 4 211 1 4 5 211 1 2 7 3 3 9 9 3
7 4 211 2 4 5 211 1 2 7 3 3 9 9 3
7 4 211 3 4 5 211 1 2 7 3 3 9 9 3
7 4 211 4 4 5 211 1 2 7 3 3 9 9 3
7 4 211 5 4 5 211 1 2 7 3 3 9 9 3
7 4 211 6 4 5 211 1 2 7 3 3 9 9 3
7 4 211 7 4 5 211 1 2 7 3 3 9 9 3
7 4 211 8 4 5 211 1 2 7 3 3 9 9 3
7 4 211 N 4 5 211 1 2 7 3 3 9 9 3
8 4 212 0 5 5 212 2 2 8 3 3 9 9 3
8 4 212 1 5 5 212 2 2 8 3 3 9 9 3
8 4 212 2 5 5 212 2 2 8 3 3 9 9 3
8 4 212 3 5 5 212 2 2 8 3 3 9 9 3
8 4 212 4 5 5 212 2 2 8 3 3 9 9 3
8 4 212 5 5 5 212 2 2 8 3 3 9 9 3
8 4 212 6 5 5 212 2 2 8 3 3 9 9 3
8 4 212 7 5 5 212 2 2 8 3 3 9 9 3
8 4 212 8 5 5 212 2 2 8 3 3 9 9 3
8 4 212 N 5 5 212 2 2 8 3 3 9 9 3
0 5 33 0 6 3 33 0 0 0 3 3 9 9 3
0 5 33 1 6 3 33 0 0 0 3 3 9 9 3
0 5 33 2 6 3 33 0 0 0 3 3 9 9 3
0 5 33 3 6 3 33 0 0 0 3 3 9 9 3
0 5 33 4 6 3 33 0 0 0 3 3 9 9 3
0 5 33 5 6 3 33 0 0 0 3 3 9 9 3
0 5 33 6 6 3 33 0 0 0 3 3 9 9 3
0 5 33 7 6 3 33 0 0 0 3 3 9 9 3
0 5 33 8 6 3 33 0 0 0 3 3 9 9 3
0 5 33 N 6 3 33 0 0 0 3 3 9 9 3
1 5 34 0 7 3 34 1 0 1 3 3 9 9 3
1 5 34 1 7 3 34 1 0 1 3 3 9 9 3
1 5 34 2 7 3 34 1 0 1 3 3 9 9 3
1 5 34 3 7 3 34 1 0 1 3 3 9 9 3
1 5 34 4 7 3 34 1 0 1 3 3 9 9 3
1 5 34 5 7 3 34 1 0 1 3 3 9 9 3
1 5 34 6 7 3 34 1 0 1 3 3 9 9 3
1 5 34 7 7 3 34 1 0 1 3 3 9 9 3
1 5 34 8 7 3 34 1 0 1 3 3 9 9 3
1 5 34 N 7 3 34 1 0 1 3 3 9 9 3
2 5 35 0 8 3 35 2 0 2 3 3 9 9 3
2 5 35 1 8 3 35 2 0 2 3 3 9 9 3
2 5 35 2 8 3 35 2 0 2 3 3 9 9 3
2 5 35 3 8 3 35 2 0 2 3 3 9 9 3
2 5 35 4 8 3 35 2 0 2 3 3 9 9 3
2 5 35 5 8 3 35 2 0 2 3 3 9 9 3
2 5 35 6 8 3 35 2 0 2 3 3 9 9 3
2 5 35 7 8 3 35 2 0 2 3 3 9 9 3
2 5 35 8 8 3 35 2 0 2 3 3 9 9 3
2 5 35 N 8 3 35 2 0 2 3 3 9 9 3
3 5 42 0 6 4 42 0 1 3 3 3 9 9 3
3 5 42 1 6 4 42 0 1 3 3 3 9 9 3
3 5 42 2 6 4 42 0 1 3 3 3 9 9 3
3 5 42 3 6 4 42 0 1 3 3 3 9 9 3
3 5 42 4 6 4 42 0 1 3 3 3 9 9 3
3 5 42 5 6 4 42 0 1 3 3 3 9 9 3
3 5 42 6 6 4 42 0 1 3 3 3 9 9 3
3 5 42 7 6 4 42 0 1 3 3 3 9 9 3
3 5 42 8 6 4 42 0 1 3 3 3 9 9 3
3 5 42 N 6 4 42 0 1 3 3 3 9 9 3
4 5 43 0 7 4 43 1 1 4 3 3 9 9 3
4 5 43 1 7 4 43 1 1 4 3 3 9 9 3
4 5 43 2 7 4 43 1 1 4 3 3 9 9 3
4 5 43 3 7 4 43 1 1 4 3 3 9 9 3
4 5 43 4 7 4 43 1 1 4 3 3 9 9 3
4 5 43 5 7 4 43 1 1 4 3 3 9 9 3
4 5 43 6 7 4 43 1 1 4 3 3 9 9 3
4 5 43 7 7 4 43 1 1 4 3 3 9 9 3
4 5 43 8 7 4 43 1 1 4 3 3 9 9 3
4 5 43 N 7 4 43 1 1 4 3 3 9 9 3
5 5 44 0 8 4 44 2 1 5 3 3 9 9 3
5 5 44 1 8 4 44 2 1 5 3 3 9 9 3
5 5 44 2 8 4 44 2 1 5 3 3 9 9 3
5 5 44 3 8 4 44 2 1 5 3 3 9 9 3
5 5 44 4 8 4 44 2 1 5 3 3 9 9 3
5 5 44 5 8 4 44 2 1 5 3 3 9 9 3
5 5 44 6 8 4 44 2 1 5 3 3 9 9 3
5 5 44 7 8 4 44 2 1 5 3 3 9 9 3
5 5 44 8 8 4 44 2 1 5 3 3 9 9 3
5 5 44 N 8 4 44 2 1 5 3 3 9 9 3
6 5 51 0 6 5 51 0 2 6 3 3 9 9 3
6 5 51 1 6 5 51 0 2 6 3 3 9 9 3
6 5 51 2 6 5 51 0 2 6 3 3 9 9 3
6 5 51 3 6 5 51 0 2 6 3 3 9 9 3
6 5 51 4 6 5 51 0 2 6 3 3 9 9 3
6 5 51 5 6 5 51 0 2 6 3 3 9 9 3
6 5 51 6 6 5 51 0 2 6 3 3 9 9 3
6 5 51 7 6 5 51 0 2 6 3 3 9 9 3
6 5 51 8 6 5 51 0 2 6 3 3 9 9 3
6 5 51 N 6 5 51 0 2 6 3 3 9 9 3
7 5 52 0 7 5 52 1 2 7 3 3 9 9 3
7 5 52 1 7 5 52 1 2 7 3 3 9 9 3
7 5 52 2 7 5 52 1 2 7 3 3 9 9 3
7 5 52 3 7 5 52 1 2 7 3 3 9 9 3
7 5 52 4 7 5 52 1 2 7 3 3 9 9 3
7 5 52 5 7 5 52 1 2 7 3 3 9 9 3
7 5 52 6 7 5 52 1 2 7 3 3 9 9 3
7 5 52 7 7 5 52 1 2 7 3 3 9 9 3
7 5 52 8 7 5 52 1 2 7 3 3 9 9 3
7 5 52 N 7 5 52 1 2 7 3 3 9 9 3
8 5 53 0 8 5 53 2 2 8 3 3 9 9 3
8 5 53 1 8 5 53 2 2 8 3 3 9 9 3
8 5 53 2 8 5 53 2 2 8 3 3 9 9 3
8 5 53 3 8 5 53 2 2 8 3 3 9 9 3
8 5 53 4 8 5 53 2 2 8 3 3 9 9 3
8 5 53 5 8 5 53 2 2 8 3 3 9 9 3
8 5 53 6 8 5 53 2 2 8 3 3 9 9 3
8 5 53 7 8 5 53 2 2 8 3 3 9 9 3
8 5 53 8 8 5 53 2 2 8 3 3 9 9 3
8 5 53 N 8 5 53 2 2 8 3 3 9 9 3
0 5 114 0 6 3 114 0 0 0 3 3 9 9 3
0 5 114 1 6 3 114 0 0 0 3 3 9 9 3
0 5 114 2 6 3 114 0 0 0 3 3 9 9 3
0 5 114 3 6 3 114 0 0 0 3 3 9 9 3
0 5 114 4 6 3 114 0 0 0 3 3 9 9 3
0 5 114 5 6 3 114 0 0 0 3 3 9 9 3
0 5 114 6 6 3 114 0 0 0 3 3 9 9 3
0 5 114 7 6 3 114 0 0 0 3 3 9 9 3
0 5 114 8 6 3 114 0 0 0 3 3 9 9 3
0 5 114 N 6 3 114 0 0 0 3 3 9 9 3
1 5 115 0 7 3 115 1 0 1 3 3 9 9 3
1 5 115 1 7 3 115 1 0 1 3 3 9 9 3
1 5 115 2 7 3 115 1 0 1 3 3 9 9 3
1 5 115 3 7 3 115 1 0 1 3 3 9 9 3
1 5 115 4 7 3 115 1 0 1 3 3 9 9 3
1 5 115 5 7 3 115 1 0 1 3 3 9 9 3
1 5 115 6 7 3 115 1 0 1 3 3 9 9 3
1 5 115 7 7 3 115 1 0 1 3 3 9 9 3
1 5 115 8 7 3 115 1 0 1 3 3 9 9 3
1 5 115 N 7 3 115 1 0 1 3 3 9 9 3
2 5 116 0 8 3 116 2 0 2 3 3 9 9 3
2 5 116 1 8 3 116 2 0 2 3 3 9 9 3
2 5 116 2 8 3 116 2 0 2 3 3 9 9 3
2 5 116 3 8 3 116 2 0 2 3 3 9 9 3
2 5 116 4 8 3 116 2 0 2 3 3 9 9 3
2 5 116 5 8 3 116 2 0 2 3 3 9 9 3
2 5 116 6 8 3 116 2 0 2 3 3 9 9 3
2 5 116 7 8 3 116 2 0 2 3 3 9 9 3
2 5 116 8 8 3 116 2 0 2 3 3 9 9 3
2 5 116 N 8 3 116 2 0 2 3 3 9 9 3
3 5 123 0 6 4 123 0 1 3 3 3 9 9 3
3 5 123 1 6 4 123 0 1 3 3 3 9 9 3
3 5 123 2 6 4 123 0 1 3 3 3 9 9 3
3 5 123 3 6 4 123 0 1 3 3 3 9 9 3
3 5 123 4 6 4 123 0 1 3 3 3 9 9 3
3 5 123 5 6 4 123 0 1 3 3 3 9 9 3
3 5 123 6 6 4 123 0 1 3 3 3 9 9 3
3 5 123 7 6 4 123 0 1 3 3 3 9 9 3
3 5 123 8 6 4 123 0 1 3 3 3 9 9 3
3 5 123 N 6 4 123 0 1 3 3 3 9 9 3
4 5 124 0 7 4 124 1 1 4 3 3 9 9 3
4 5 124 1 7 4 124 1 1 4 3 3 9 9 3
4 5 124 2 7 4 124 1 1 4 3 3 9 9 3
4 5 124 3 7 4 124 1 1 4 3 3 9 9 3
4 5 124 4 7 4 124 1 1 4 3 3 9 9 3
4 5 124 5 7 4 124 1 1 4 3 3 9 9 3
4 5 124 6 7 4 124 1 1 4 3 3 9 9 3
4 5 124 7 7 4 124 1 1 4 3 3 9 9 3
4 5 124 8 7 4 124 1 1 4 3 3 9 9 3
4 5 124 N 7 4 124 1 1 4 3 3 9 9 3
5 5 125 0 8 4 125 2 1 5 3 3 9 9 3
5 5 125 1 8 4 125 2 1 5 3 3 9 9 3
5 5 125 2 8 4 125 2 1 5 3 3 9 9 3
5 5 125 3 8 4 125 2 1 5 3 3 9 9 3
5 5 125 4 8 4 125 2 1 5 3 3 9 9 3
5 5 125 5 8 4 125 2 1 5 3 3 9 9 3
5 5 125 6 8 4 125 2 1 5 3 3 9 9 3
5 5 125 7 8 4 125 2 1 5 3 3 9 9 3
5 5 125 8 8 4 125 2 1 5 3 3 9 9 3
5 5 125 N 8 4 125 2 1 5 3 3 9 9 3
6 5 132 0 6 5 132 0 2 6 3 3 9 9 3
6 5 132 1 6 5 132 0 2 6 3 3 9 9 3
6 5 132 2 6 5 132 0 2 6 3 3 9 9 3
6 5 132 3 6 5 132 0 2 6 3 3 9 9 3
6 5 132 4 6 5 132 0 2 6 3 3 9 9 3
6 5 132 5 6 5 132 0 2 6 3 3 9 9 3
6 5 132 6 6 5 132 0 2 6 3 3 9 9 3
6 5 132 7 6 5 132 0 2 6 3 3 9 9 3
6 5 132 8 6 5 132 0 2 6 3 3 9 9 3
6 5 132 N 6 5 132 0 2 6 3 3 9 9 3
7 5 133 0 7 5 133 1 2 7 3 3 9 9 3
7 5 133 1 7 5 133 1 2 7 3 3 9 9 3
7 5 133 2 7 5 133 1 2 7 3 3 9 9 3
7 5 133 3 7 5 133 1 2 7 3 3 9 9 3
7 5 133 4 7 5 133 1 2 7 3 3 9 9 3
7 5 133 5 7 5 133 1 2 7 3 3 9 9 3
7 5 133 6 7 5 133 1 2 7 3 3 9 9 3
7 5 133 7 7 5 133 1 2 7 3 3 9 9 3
7 5 133 8 7 5 133 1 2 7 3 3 9 9 3
7 5 133 N 7 5 133 1 2 7 3 3 9 9 3
8 5 134 0 8 5 134 2 2 8 3 3 9 9 3
8 5 134 1 8 5 134 2 2 8 3 3 9 9 3
8 5 134 2 8 5 134 2 2 8 3 3 9 9 3
8 5 134 3 8 5 134 2 2 8 3 3 9 9 3
8 5 134 4 8 5 134 2 2 8 3 3 9 9 3
8 5 134 5 8 5 134 2 2 8 3 3 9 9 3
8 5 134 6 8 5 134 2 2 8 3 3 9 9 3
8 5 134 7 8 5 134 2 2 8 3 3 9 9 3
8 5 134 8 8 5 134 2 2 8 3 3 9 9 3
8 5 134 N 8 5 134 2 2 8 3 3 9 9 3
0 5 195 0 6 3 195 0 0 0 3 3 9 9 3
0 5 195 1 6 3 195 0 0 0 3 3 9 9 3
0 5 195 2 6 3 195 0 0 0 3 3 9 9 3
0 5 195 3 6 3 195 0 0 0 3 3 9 9 3
0 5 195 4 6 3 195 0 0 0 3 3 9 9 3
0 5 195 5 6 3 195 0 0 0 3 3 9 9 3
0 5 195 6 6 3 195 0 0 0 3 3 9 9 3
0 5 195 7 6 3 195 0 0 0 3 3 9 9 3
0 5 195 8 6 3 195 0 0 0 3 3 9 9 3
0 5 195 N 6 3 195 0 0 0 3 3 9 9 3
1 5 196 0 7 3 196 1 0 1 3 3 9 9 3
1 5 196 1 7 3 196 1 0 1 3 3 9 9 3
1 5 196 2 7 3 196 1 0 1 3 3 9 9 3
1 5 196 3 7 3 196 1 0 1 3 3 9 9 3
1 5 196 4 7 3 196 1 0 1 3 3 9 9 3
1 5 196 5 7 3 196 1 0 1 3 3 9 9 3
1 5 196 6 7 3 196 1 0 1 3 3 9 9 3
1 5 196 7 7 3 196 1 0 1 3 3 9 9 3
1 5 196 8 7 3 196 1 0 1 3 3 9 9 3
1 5 196 N 7 3 196 1 0 1 3 3 9 9 3
2 5 197 0 8 3 197 2 0 2 3 3 9 9 3
2 5 197 1 8 3 197 2 0 2 3 3 9 9 3
2 5 197 2 8 3 197 2 0 2 3 3 9 9 3
2 5 197 3 8 3 197 2 0 2 3 3 9 9 3
2 5 197 4 8 3 197 2 0 2 3 3 9 9 3
2 5 197 5 8 3 197 2 0 2 3 3 9 9 3
2 5 197 6 8 3 197 2 0 2 3 3 9 9 3
2 5 197 7 8 3 197 2 0 2 3 3 9 9 3
2 5 197 8 8 3 197 2 0 2 3 3 9 9 3
2 5 197 N 8 3 197 2 0 2 3 3 9 9 3
3 5 204 0 6 4 204 0 1 3 3 3 9 9 3
3 5 204 1 6 4 204 0 1 3 3 3 9 9 3
3 5 204 2 6 4 204 0 1 3 3 3 9 9 3
3 5 204 3 6 4 204 0 1 3 3 3 9 9 3
3 5 204 4 6 4 204 0 1 3 3 3 9 9 3
3 5 204 5 6 4 204 0 1 3 3 3 9 9 3
3 5 204 6 6 4 204 0 1 3 3 3 9 9 3
3 5 204 7 6 4 204 0 1 3 3 3 9 9 3
3 5 204 8 6 4 204 0 1 3 3 3 9 9 3
3 5 204 N 6 4 204 0 1 3 3 3 9 9 3
4 5 205 0 7 4 205 1 1 4 3 3 9 9 3
4 5 205 1 7 4 205 1 1 4 3 3 9 9 3
4 5 205 2 7 4 205 1 1 4 3 3 9 9 3
4 5 205 3 7 4 205 1 1 4 3 3 9 9 3
4 5 205 4 7 4 205 1 1 4 3 3 9 9 3
4 5 205 5 7 4 205 1 1 4 3 3 9 9 3
4 5 205 6 7 4 205 1 1 4 3 3 9 9 3
4 5 205 7 7 4 205 1 1 4 3 3 9 9 3
4 5 205 8 7 4 205 1 1 4 3 3 9 9 3
4 5 205 N 7 4 205 1 1 4 3 3 9 9 3
5 5 206 0 8 4 206 2 1 5 3 3 9 9 3
5 5 206 1 8 4 206 2 1 5 3 3 9 9 3
5 5 206 2 8 4 206 2 1 5 3 3 9 9 3
5 5 206 3 8 4 206 2 1 5 3 3 9 9 3
5 5 206 4 8 4 206 2 1 5 3 3 9 9 3
5 5 206 5 8 4 206 2 1 5 3 3 9 9 3
5 5 206 6 8 4 206 2 1 5 3 3 9 9 3
5 5 206 7 8 4 206 2 1 5 3 3 9 9 3
5 5 206 8 8 4 206 2 1 5 3 3 9 9 3
5 5 206 N 8 4 206 2 1 5 3 3 9 9 3
6 5 213 0 6 5 213 0 2 6 3 3 9 9 3
6 5 213 1 6 5 213 0 2 6 3 3 9 9 3
6 5 213 2 6 5 213 0 2 6 3 3 9 9 3
6 5 213 3 6 5 213 0 2 6 3 3 9 9 3
6 5 213 4 6 5 213 0 2 6 3 3 9 9 3
6 5 213 5 6 5 213 0 2 6 3 3 9 9 3
6 5 213 6 6 5 213 0 2 6 3 3 9 9 3
6 5 213 7 6 5 213 0 2 6 3 3 9 9 3
6 5 213 8 6 5 213 0 2 6 3 3 9 9 3
6 5 213 N 6 5 213 0 2 6 3 3 9 9 3
7 5 214 0 7 5 214 1 2 7 3 3 9 9 3
7 5 214 1 7 5 214 1 2 7 3 3 9 9 3
7 5 214 2 7 5 214 1 2 7 3 3 9 9 3
7 5 214 3 7 5 214 1 2 7 3 3 9 9 3
7 5 214 4 7 5 214 1 2 7 3 3 9 9 3
7 5 214 5 7 5 214 1 2 7 3 3 9 9 3
7 5 214 6 7 5 214 1 2 7 3 3 9 9 3
7 5 214 7 7 5 214 1 2 7 3 3 9 9 3
7 5 214 8 7 5 214 1 2 7 3 3 9 9 3
7 5 214 N 7 5 214 1 2 7 3 3 9 9 3
8 5 215 0 8 5 215 2 2 8 3 3 9 9 3
8 5 215 1 8 5 215 2 2 8 3 3 9 9 3
8 5 215 2 8 5 215 2 2 8 3 3 9 9 3
8 5 215 3 8 5 215 2 2 8 3 3 9 9 3
8 5 215 4 8 5 215 2 2 8 3 3 9 9 3
8 5 215 5 8 5 215 2 2 8 3 3 9 9 3
8 5 215 6 8 5 215 2 2 8 3 3 9 9 3
8 5 215 7 8 5 215 2 2 8 3 3 9 9 3
8 5 215 8 8 5 215 2 2 8 3 3 9 9 3
8 5 215 N 8 5 215 2 2 8 3 3 9 9 3
0 6 54 0 0 6 54 0 0 0 3 3 9 9 3
0 6 54 1 0 6 54 0 0 0 3 3 9 9 3
0 6 54 2 0 6 54 0 0 0 3 3 9 9 3
0 6 54 3 0 6 54 0 0 0 3 3 9 9 3
0 6 54 4 0 6 54 0 0 0 3 3 9 9 3
0 6 54 5 0 6 54 0 0 0 3 3 9 9 3
0 6 54 6 0 6 54 0 0 0 3 3 9 9 3
0 6 54 7 0 6 54 0 0 0 3 3 9 9 3
0 6 54 8 0 6 54 0 0 0 3 3 9 9 3
0 6 54 N 0 6 54 0 0 0 3 3 9 9 3
1 6 55 0 1 6 55 1 0 1 3 3 9 9 3
1 6 55 1 1 6 55 1 0 1 3 3 9 9 3
1 6 55 2 1 6 55 1 0 1 3 3 9 9 3
1 6 55 3 1 6 55 1 0 1 3 3 9 9 3
1 6 55 4 1 6 55 1 0 1 3 3 9 9 3
1 6 55 5 1 6 55 1 0 1 3 3 9 9 3
1 6 55 6 1 6 55 1 0 1 3 3 9 9 3
1 6 55 7 1 6 55 1 0 1 3 3 9 9 3
1 6 55 8 1 6 55 1 0 1 3 3 9 9 3
1 6 55 N 1 6 55 1 0 1 3 3 9 9 3
2 6 56 0 2 6 56 2 0 2 3 3 9 9 3
2 6 56 1 2 6 56 2 0 2 3 3 9 9 3
2 6 56 2 2 6 56 2 0 2 3 3 9 9 3
2 6 56 3 2 6 56 2 0 2 3 3 9 9 3
2 6 56 4 2 6 56 2 0 2 3 3 9 9 3
2 6 56 5 2 6 56 2 0 2 3 3 9 9 3
2 6 56 6 2 6 56 2 0 2 3 3 9 9 3
2 6 56 7 2 6 56 2 0 2 3 3 9 9 3
2 6 56 8 2 6 56 2 0 2 3 3 9 9 3
2 6 56 N 2 6 56 2 0 2 3 3 9 9 3
3 6 63 0 0 7 63 0 1 3 3 3 9 9 3
3 6 63 1 0 7 63 0 1 3 3 3 9 9 3
3 6 63 2 0 7 63 0 1 3 3 3 9 9 3
3 6 63 3 0 7 63 0 1 3 3 3 9 9 3
3 6 63 4 0 7 63 0 1 3 3 3 9 9 3
3 6 63 5 0 7 63 0 1 3 3 3 9 9 3
3 6 63 6 0 7 63 0 1 3 3 3 9 9 3
3 6 63 7 0 7 63 0 1 3 3 3 9 9 3
3 6 63 8 0 7 63 0 1 3 3 3 9 9 3
3 6 63 N 0 7 63 0 1 3 3 3 9 9 3
4 6 64 0 1 7 64 1 1 4 3 3 9 9 3
4 6 64 1 1 7 64 1 1 4 3 3 9 9 3
4 6 64 2 1 7 64 1 1 4 3 3 9 9 3
4 6 64 3 1 7 64 1 1 4 3 3 9 9 3
4 6 64 4 1 7 64 1 1 4 3 3 9 9 3
4 6 64 5 1 7 64 1 1 4 3 3 9 9 3
4 6 64 6 1 7 64 1 1 4 3 3 9 9 3
4 6 64 7 1 7 64 1 1 4 3 3 9 9 3
4 6 64 8 1 7 64 1 1 4 3 3 9 9 3
4 6 64 N 1 7 64 1 1 4 3 3 9 9 3
5 6 65 0 2 7 65 2 1 5 3 3 9 9 3
5 6 65 1 2 7 65 2 1 5 3 3 9 9 3
5 6 65 2 2 7 65 2 1 5 3 3 9 9 3
5 6 65 3 2 7 65 2 1 5 3 3 9 9 3
5 6 65 4 2 7 65 2 1 5 3 3 9 9 3
5 6 65 5 2 7 65 2 1 5 3 3 9 9 3
5 6 65 6 2 7 65 2 1 5 3 3 9 9 3
5 6 65 7 2 7 65 2 1 5 3 3 9 9 3
5 6 65 8 2 7 65 2 1 5 3 3 9 9 3
5 6 65 N 2 7 65 2 1 5 3 3 9 9 3
6 6 72 0 0 8 72 0 2 6 3 3 9 9 3
6 6 72 1 0 8 72 0 2 6 3 3 9 9 3
6 6 72 2 0 8 72 0 2 6 3 3 9 9 3
6 6 72 3 0 8 72 0 2 6 3 3 9 9 3
6 6 72 4 0 8 72 0 2 6 3 3 9 9 3
6 6 72 5 0 8 72 0 2 6 3 3 9 9 3
6 6 72 6 0 8 72 0 2 6 3 3 9 9 3
6 6 72 7 0 8 72 0 2 6 3 3 9 9 3
6 6 72 8 0 8 72 0 2 6 3 3 9 9 3
6 6 72 N 0 8 72 0 2 6 3 3 9 9 3
7 6 73 0 1 8 73 1 2 7 3 3 9 9 3
7 6 73 1 1 8 73 1 2 7 3 3 9 9 3
7 6 73 2 1 8 73 1 2 7 3 3 9 9 3
7 6 73 3 1 8 73 1 2 7 3 3 9 9 3
7 6 73 4 1 8 73 1 2 7 3 3 9 9 3
7 6 73 5 1 8 73 1 2 7 3 3 9 9 3
7 6 73 6 1 8 73 1 2 7 3 3 9 9 3
7 6 73 7 1 8 73 1 2 7 3 3 9 9 3
7 6 73 8 1 8 73 1 2 7 3 3 9 9 3
7 6 73 N 1 8 73 1 2 7 3 3 9 9 3
8 6 74 0 2 8 74 2 2 8 3 3 9 9 3
8 6 74 1 2 8 74 2 2 8 3 3 9 9 3
8 6 74 2 2 8 74 2 2 8 3 3 9 9 3
8 6 74 3 2 8 74 2 2 8 3 3 9 9 3
8 6 74 4 2 8 74 2 2 8 3 3 9 9 3
8 6 74 5 2 8 74 2 2 8 3 3 9 9 3
8 6 74 6 2 8 74 2 2 8 3 3 9 9 3
8 6 74 7 2 8 74 2 2 8 3 3 9 9 3
8 6 74 8 2 8 74 2 2 8 3 3 9 9 3
8 6 74 N 2 8 74 2 2 8 3 3 9 9 3
0 6 135 0 0 6 135 0 0 0 3 3 9 9 3
0 6 135 1 0 6 135 0 0 0 3 3 9 9 3
0 6 135 2 0 6 135 0 0 0 3 3 9 9 3
0 6 135 3 0 6 135 0 0 0 3 3 9 9 3
0 6 135 4 0 6 135 0 0 0 3 3 9 9 3
0 6 135 5 0 6 135 0 0 0 3 3 9 9 3
0 6 135 6 0 6 135 0 0 0 3 3 9 9 3
0 6 135 7 0 6 135 0 0 0 3 3 9 9 3
0 6 135 8 0 6 135 0 0 0 3 3 9 9 3
0 6 135 N 0 6 135 0 0 0 3 3 9 9 3
1 6 136 0 1 6 136 1 0 1 3 3 9 9 3
1 6 136 1 1 6 136 1 0 1 3 3 9 9 3
1 6 136 2 1 6 136 1 0 1 3 3 9 9 3
1 6 136 3 1 6 136 1 0 1 3 3 9 9 3
1 6 136 4 1 6 136 1 0 1 3 3 9 9 3
1 6 136 5 1 6 136 1 0 1 3 3 9 9 3
1 6 136 6 1 6 136 1 0 1 3 3 9 9 3
1 6 136 7 1 6 136 1 0 1 3 3 9 9 3
1 6 136 8 1 6 136 1 0 1 3 3 9 9 3
1 6 136 N 1 6 136 1 0 1 3 3 9 9 3
2 6 137 0 2 6 137 2 0 2 3 3 9 9 3
2 6 137 1 2 6 137 2 0 2 3 3 9 9 3
2 6 137 2 2 6 137 2 0 2 3 3 9 9 3
2 6 137 3 2 6 137 2 0 2 3 3 9 9 3
2 6 137 4 2 6 137 2 0 2 3 3 9 9 3
2 6 137 5 2 6 137 2 0 2 3 3 9 9 3
2 6 137 6 2 6 137 2 0 2 3 3 9 9 3
2 6 137 7 2 6 137 2 0 2 3 3 9 9 3
2 6 137 8 2 6 137 2 0 2 3 3 9 9 3
2 6 137 N 2 6 137 2 0 2 3 3 9 9 3
3 6 144 0 0 7 144 0 1 3 3 3 9 9 3
3 6 144 1 0 7 144 0 1 3 3 3 9 9 3
3 6 144 2 0 7 144 0 1 3 3 3 9 9 3
3 6 144 3 0 7 144 0 1 3 3 3 9 9 3
3 6 144 4 0 7 144 0 1 3 3 3 9 9 3
3 6 144 5 0 7 144 0 1 3 3 3 9 9 3
3 6 144 6 0 7 144 0 1 3 3 3 9 9 3
3 6 144 7 0 7 144 0 1 3 3 3 9 9 3
3 6 144 8 0 7 144 0 1 3 3 3 9 9 3
3 6 144 N 0 7 144 0 1 3 3 3 9 9 3
4 6 145 0 1 7 145 1 1 4 3 3 9 9 3
4 6 145 1 1 7 145 1 1 4 3 3 9 9 3
4 6 145 2 1 7 145 1 1 4 3 3 9 9 3
4 6 145 3 1 7 145 1 1 4 3 3 9 9 3
4 6 145 4 1 7 145 1 1 4 3 3 9 9 3
4 6 145 5 1 7 145 1 1 4 3 3 9 9 3
4 6 145 6 1 7 145 1 1 4 3 3 9 9 3
4 6 145 7 1 7 145 1 1 4 3 3 9 9 3
4 6 145 8 1 7 145 1 1 4 3 3 9 9 3
4 6 145 N 1 7 145 1 1 4 3 3 9 9 3
5 6 146 0 2 7 146 2 1 5 3 3 9 9 3
5 6 146 1 2 7 146 2 1 5 3 3 9 9 3
5 6 146 2 2 7 146 2 1 5 3 3 9 9 3
5 6 146 3 2 7 146 2 1 5 3 3 9 9 3
5 6 146 4 2 7 146 2 1 5 3 3 9 9 3
5 6 146 5 2 7 146 2 1 5 3 3 9 9 3
5 6 146 6 2 7 146 2 1 5 3 3 9 9 3
5 6 146 7 2 7 146 2 1 5 3 3 9 9 3
5 6 146 8 2 7 146 2 1 5 3 3 9 9 3
5 6 146 N 2 7 146 2 1 5 3 3 9 9 3
6 6 153 0 0 8 153 0 2 6 3 3 9 9 3
6 6 153 1 0 8 153 0 2 6 3 3 9 9 3
6 6 153 2 0 8 153 0 2 6 3 3 9 9 3
6 6 153 3 0 8 153 0 2 6 3 3 9 9 3
6 6 153 4 0 8 153 0 2 6 3 3 9 9 3
6 6 153 5 0 8 153 0 2 6 3 3 9 9 3
6 6 153 6 0 8 153 0 2 6 3 3 9 9 3
6 6 153 7 0 8 153 0 2 6 3 3 9 9 3
6 6 153 8 0 8 153 0 2 6 3 3 9 9 3
6 6 153 N 0 8 153 0 2 6 3 3 9 9 3
7 6 154 0 1 8 154 1 2 7 3 3 9 9 3
7 6 154 1 1 8 154 1 2 7 3 3 9 9 3
7 6 154 2 1 8 154 1 2 7 3 3 9 9 3
7 6 154 3 1 8 154 1 2 7 3 3 9 9 3
7 6 154 4 1 8 154 1 2 7 3 3 9 9 3
7 6 154 5 1 8 154 1 2 7 3 3 9 9 3
7 6 154 6 1 8 154 1 2 7 3 3 9 9 3
7 6 154 7 1 8 154 1 2 7 3 3 9 9 3
7 6 154 8 1 8 154 1 2 7 3 3 9 9 3
7 6 154 N 1 8 154 1 2 7 3 3 9 9 3
8 6 155 0 2 8 155 2 2 8 3 3 9 9 3
8 6 155 1 2 8 155 2 2 8 3 3 9 9 3
8 6 155 2 2 8 155 2 2 8 3 3 9 9 3
8 6 155 3 2 8 155 2 2 8 3 3 9 9 3
8 6 155 4 2 8 155 2 2 8 3 3 9 9 3
8 6 155 5 2 8 155 2 2 8 3 3 9 9 3
8 6 155 6 2 8 155 2 2 8 3 3 9 9 3
8 6 155 7 2 8 155 2 2 8 3 3 9 9 3
8 6 155 8 2 8 155 2 2 8 3 3 9 9 3
8 6 155 N 2 8 155 2 2 8 3 3 9 9 3
0 6 216 0 0 6 216 0 0 0 3 3 9 9 3
0 6 216 1 0 6 216 0 0 0 3 3 9 9 3
0 6 216 2 0 6 216 0 0 0 3 3 9 9 3
0 6 216 3 0 6 216 0 0 0 3 3 9 9 3
0 6 216 4 0 6 216 0 0 0 3 3 9 9 3
0 6 216 5 0 6 216 0 0 0 3 3 9 9 3
0 6 216 6 0 6 216 0 0 0 3 3 9 9 3
0 6 216 7 0 6 216 0 0 0 3 3 9 9 3
0 6 216 8 0 6 216 0 0 0 3 3 9 9 3
0 6 216 N 0 6 216 0 0 0 3 3 9 9 3
1 6 217 0 1 6 217 1 0 1 3 3 9 9 3
1 6 217 1 1 6 217 1 0 1 3 3 9 9 3
1 6 217 2 1 6 217 1 0 1 3 3 9 9 3
1 6 217 3 1 6 217 1 0 1 3 3 9 9 3
1 6 217 4 1 6 217 1 0 1 3 3 9 9 3
1 6 217 5 1 6 217 1 0 1 3 3 9 9 3
1 6 217 6 1 6 217 1 0 1 3 3 9 9 3
1 6 217 7 1 6 217 1 0 1 3 3 9 9 3
1 6 217 8 1 6 217 1 0 1 3 3 9 9 3
1 6 217 N 1 6 217 1 0 1 3 3 9 9 3
2 6 218 0 2 6 218 2 0 2 3 3 9 9 3
2 6 218 1 2 6 218 2 0 2 3 3 9 9 3
2 6 218 2 2 6 218 2 0 2 3 3 9 9 3
2 6 218 3 2 6 218 2 0 2 3 3 9 9 3
2 6 218 4 2 6 218 2 0 2 3 3 9 9 3
2 6 218 5 2 6 218 2 0 2 3 3 9 9 3
2 6 218 6 2 6 218 2 0 2 3 3 9 9 3
2 6 218 7 2 6 218 2 0 2 3 3 9 9 3
2 6 218 8 2 6 218 2 0 2 3 3 9 9 3
2 6 218 N 2 6 218 2 0 2 3 3 9 9 3
3 6 225 0 0 7 225 0 1 3 3 3 9 9 3
3 6 225 1 0 7 225 0 1 3 3 3 9 9 3
3 6 225 2 0 7 225 0 1 3 3 3 9 9 3
3 6 225 3 0 7 225 0 1 3 3 3 9 9 3
3 6 225 4 0 7 225 0 1 3 3 3 9 9 3
3 6 225 5 0 7 225 0 1 3 3 3 9 9 3
3 6 225 6 0 7 225 0 1 3 3 3 9 9 3
3 6 225 7 0 7 225 0 1 3 3 3 9 9 3
3 6 225 8 0 7 225 0 1 3 3 3 9 9 3
3 6 225 N 0 7 225 0 1 3 3 3 9 9 3
4 6 226 0 1 7 226 1 1 4 3 3 9 9 3
4 6 226 1 1 7 226 1 1 4 3 3 9 9 3
4 6 226 2 1 7 226 1 1 4 3 3 9 9 3
4 6 226 3 1 7 226 1 1 4 3 3 9 9 3
4 6 226 4 1 7 226 1 1 4 3 3 9 9 3
4 6 226 5 1 7 226 1 1 4 3 3 9 9 3
4 6 226 6 1 7 226 1 1 4 3 3 9 9 3
4 6 226 7 1 7 226 1 1 4 3 3 9 9 3
4 6 226 8 1 7 226 1 1 4 3 3 9 9 3
4 6 226 N 1 7 226 1 1 4 3 3 9 9 3
5 6 227 0 2 7 227 2 1 5 3 3 9 9 3
5 6 227 1 2 7 227 2 1 5 3 3 9 9 3
5 6 227 2 2 7 227 2 1 5 3 3 9 9 3
5 6 227 3 2 7 227 2 1 5 3 3 9 9 3
5 6 227 4 2 7 227 2 1 5 3 3 9 9 3
5 6 227 5 2 7 227 2 1 5 3 3 9 9 3
5 6 227 6 2 7 227 2 1 5 3 3 9 9 3
5 6 227 7 2 7 227 2 1 5 3 3 9 9 3
5 6 227 8 2 7 227 2 1 5 3 3 9 9 3
5 6 227 N 2 7 227 2 1 5 3 3 9 9 3
6 6 234 0 0 8 234 0 2 6 3 3 9 9 3
6 6 234 1 0 8 234 0 2 6 3 3 9 9 3
6 6 234 2 0 8 234 0 2 6 3 3 9 9 3
6 6 234 3 0 8 234 0 2 6 3 3 9 9 3
6 6 234 4 0 8 234 0 2 6 3 3 9 9 3
6 6 234 5 0 8 234 0 2 6 3 3 9 9 3
6 6 234 6 0 8 234 0 2 6 3 3 9 9 3
6 6 234 7 0 8 234 0 2 6 3 3 9 9 3
6 6 234 8 0 8 234 0 2 6 3 3 9 9 3
6 6 234 N 0 8 234 0 2 6 3 3 9 9 3
7 6 235 0 1 8 235 1 2 7 3 3 9 9 3
7 6 235 1 1 8 235 1 2 7 3 3 9 9 3
7 6 235 2 1 8 235 1 2 7 3 3 9 9 3
7 6 235 3 1 8 235 1 2 7 3 3 9 9 3
7 6 235 4 1 8 235 1 2 7 3 3 9 9 3
7 6 235 5 1 8 235 1 2 7 3 3 9 9 3
7 6 235 6 1 8 235 1 2 7 3 3 9 9 3
7 6 235 7 1 8 235 1 2 7 3 3 9 9 3
7 6 235 8 1 8 235 1 2 7 3 3 9 9 3
7 6 235 N 1 8 235 1 2 7 3 3 9 9 3
8 6 236 0 2 8 236 2 2 8 3 3 9 9 3
8 6 236 1 2 8 236 2 2 8 3 3 9 9 3
8 6 236 2 2 8 236 2 2 8 3 3 9 9 3
8 6 236 3 2 8 236 2 2 8 3 3 9 9 3
8 6 236 4 2 8 236 2 2 8 3 3 9 9 3
8 6 236 5 2 8 236 2 2 8 3 3 9 9 3
8 6 236 6 2 8 236 2 2 8 3 3 9 9 3
8 6 236 7 2 8 236 2 2 8 3 3 9 9 3
8 6 236 8 2 8 236 2 2 8 3 3 9 9 3
8 6 236 N 2 8 236 2 2 8 3 3 9 9 3
0 7 57 0 3 6 57 0 0 0 3 3 9 9 3
0 7 57 1 3 6 57 0 0 0 3 3 9 9 3
0 7 57 2 3 6 57 0 0 0 3 3 9 9 3
0 7 57 3 3 6 57 0 0 0 3 3 9 9 3
0 7 57 4 3 6 57 0 0 0 3 3 9 9 3
0 7 57 5 3 6 57 0 0 0 3 3 9 9 3
0 7 57 6 3 6 57 0 0 0 3 3 9 9 3
0 7 57 7 3 6 57 0 0 0 3 3 9 9 3
0 7 57 8 3 6 57 0 0 0 3 3 9 9 3
0 7 57 N 3 6 57 0 0 0 3 3 9 9 3
1 7 58 0 4 6 58 1 0 1 3 3 9 9 3
1 7 58 1 4 6 58 1 0 1 3 3 9 9 3
1 7 58 2 4 6 58 1 0 1 3 3 9 9 3
1 7 58 3 4 6 58 1 0 1 3 3 9 9 3
1 7 58 4 4 6 58 1 0 1 3 3 9 9 3
1 7 58 5 4 6 58 1 0 1 3 3 9 9 3
1 7 58 6 4 6 58 1 0 1 3 3 9 9 3
1 7 58 7 4 6 58 1 0 1 3 3 9 9 3
1 7 58 8 4 6 58 1 0 1 3 3 9 9 3
1 7 58 N 4 6 58 1 0 1 3 3 9 9 3
2 7 59 0 5 6 59 2 0 2 3 3 9 9 3
2 7 59 1 5 6 59 2 0 2 3 3 9 9 3
2 7 59 2 5 6 59 2 0 2 3 3 9 9 3
2 7 59 3 5 6 59 2 0 2 3 3 9 9 3
2 7 59 4 5 6 59 2 0 2 3 3 9 9 3
2 7 59 5 5 6 59 2 0 2 3 3 9 9 3
2 7 59 6 5 6 59 2 0 2 3 3 9 9 3
2 7 59 7 5 6 59 2 0 2 3 3 9 9 3
2 7 59 8 5 6 59 2 0 2 3 3 9 9 3
2 7 59 N 5 6 59 2 0 2 3 3 9 9 3
3 7 66 0 3 7 66 0 1 3 3 3 9 9 3
3 7 66 1 3 7 66 0 1 3 3 3 9 9 3
3 7 66 2 3 7 66 0 1 3 3 3 9 9 3
3 7 66 3 3 7 66 0 1 3 3 3 9 9 3
3 7 66 4 3 7 66 0 1 3 3 3 9 9 3
3 7 66 5 3 7 66 0 1 3 3 3 9 9 3
3 7 66 6 3 7 66 0 1 3 3 3 9 9 3
3 7 66 7 3 7 66 0 1 3 3 3 9 9 3
3 7 66 8 3 7 66 0 1 3 3 3 9 9 3
3 7 66 N 3 7 66 0 1 3 3 3 9 9 3
4 7 67 0 4 7 67 1 1 4 3 3 9 9 3
4 7 67 1 4 7 67 1 1 4 3 3 9 9 3
4 7 67 2 4 7 67 1 1 4 3 3 9 9 3
4 7 67 3 4 7 67 1 1 4 3 3 9 9 3
4 7 67 4 4 7 67 1 1 4 3 3 9 9 3
4 7 67 5 4 7 67 1 1 4 3 3 9 9 3
4 7 67 6 4 7 67 1 1 4 3 3 9 9 3
4 7 67 7 4 7 67 1 1 4 3 3 9 9 3
4 7 67 8 4 7 67 1 1 4 3 3 9 9 3
4 7 67 N 4 7 67 1 1 4 3 3 9 9 3
5 7 68 0 5 7 68 2 1 5 3 3 9 9 3
5 7 68 1 5 7 68 2 1 5 3 3 9 9 3
5 7 68 2 5 7 68 2 1 5 3 3 9 9 3
5 7 68 3 5 7 68 2 1 5 3 3 9 9 3
5 7 68 4 5 7 68 2 1 5 3 3 9 9 3
5 7 68 5 5 7 68 2 1 5 3 3 9 9 3
5 7 68 6 5 7 68 2 1 5 3 3 9 9 3
5 7 68 7 5 7 68 2 1 5 3 3 9 9 3
5 7 68 8 5 7 68 2 1 5 3 3 9 9 3
5 7 68 N 5 7 68 2 1 5 3 3 9 9 3
6 7 75 0 3 8 75 0 2 6 3 3 9 9 3
6 7 75 1 3 8 75 0 2 6 3 3 9 9 3
6 7 75 2 3 8 75 0 2 6 3 3 9 9 3
6 7 75 3 3 8 75 0 2 6 3 3 9 9 3
6 7 75 4 3 8 75 0 2 6 3 3 9 9 3
6 7 75 5 3 8 75 0 2 6 3 3 9 9 3
6 7 75 6 3 8 75 0 2 6 3 3 9 9 3
6 7 75 7 3 8 75 0 2 6 3 3 9 9 3
6 7 75 8 3 8 75 0 2 6 3 3 9 9 3
6 7 75 N 3 8 75 0 2 6 3 3 9 9 3
7 7 76 0 4 8 76 1 2 7 3 3 9 9 3
7 7 76 1 4 8 76 1 2 7 3 3 9 9 3
7 7 76 2 4 8 76 1 2 7 3 3 9 9 3
7 7 76 3 4 8 76 1 2 7 3 3 9 9 3
7 7 76 4 4 8 76 1 2 7 3 3 9 9 3
7 7 76 5 4 8 76 1 2 7 3 3 9 9 3
7 7 76 6 4 8 76 1 2 7 3 3 9 9 3
7 7 76 7 4 8 76 1 2 7 3 3 9 9 3
7 7 76 8 4 8 76 1 2 7 3 3 9 9 3
7 7 76 N 4 8 76 1 2 7 3 3 9 9 3
8 7 77 0 5 8 77 2 2 8 3 3 9 9 3
8 7 77 1 5 8 77 2 2 8 3 3 9 9 3
8 7 77 2 5 8 77 2 2 8 3 3 9 9 3
8 7 77 3 5 8 77 2 2 8 3 3 9 9 3
8 7 77 4 5 8 77 2 2 8 3 3 9 9 3
8 7 77 5 5 8 77 2 2 8 3 3 9 9 3
8 7 77 6 5 8 77 2 2 8 3 3 9 9 3
8 7 77 7 5 8 77 2 2 8 3 3 9 9 3
8 7 77 8 5 8 77 2 2 8 3 3 9 9 3
8 7 77 N 5 8 77 2 2 8 3 3 9 9 3
0 7 138 0 3 6 138 0 0 0 3 3 9 9 3
0 7 138 1 3 6 138 0 0 0 3 3 9 9 3
0 7 138 2 3 6 138 0 0 0 3 3 9 9 3
0 7 138 3 3 6 138 0 0 0 3 3 9 9 3
0 7 138 4 3 6 138 0 0 0 3 3 9 9 3
0 7 138 5 3 6 138 0 0 0 3 3 9 9 3
0 7 138 6 3 6 138 0 0 0 3 3 9 9 3
0 7 138 7 3 6 138 0 0 0 3 3 9 9 3
0 7 138 8 3 6 138 0 0 0 3 3 9 9 3
0 7 138 N 3 6 138 0 0 0 3 3 9 9 3
1 7 139 0 4 6 139 1 0 1 3 3 9 9 3
1 7 139 1 4 6 139 1 0 1 3 3 9 9 3
1 7 139 2 4 6 139 1 0 1 3 3 9 9 3
1 7 139 3 4 6 139 1 0 1 3 3 9 9 3
1 7 139 4 4 6 139 1 0 1 3 3 9 9 3
1 7 139 5 4 6 139 1 0 1 3 3 9 9 3
1 7 139 6 4 6 139 1 0 1 3 3 9 9 3
1 7 139 7 4 6 139 1 0 1 3 3 9 9 3
1 7 139 8 4 6 139 1 0 1 3 3 9 9 3
1 7 139 N 4 6 139 1 0 1 3 3 9 9 3
2 7 140 0 5 6 140 2 0 2 3 3 9 9 3
2 7 140 1 5 6 140 2 0 2 3 3 9 9 3
2 7 140 2 5 6 140 2 0 2 3 3 9 9 3
2 7 140 3 5 6 140 2 0 2 3 3 9 9 3
2 7 140 4 5 6 140 2 0 2 3 3 9 9 3
2 7 140 5 5 6 140 2 0 2 3 3 9 9 3
2 7 140 6 5 6 140 2 0 2 3 3 9 9 3
2 7 140 7 5 6 140 2 0 2 3 3 9 9 3
2 7 140 8 5 6 140 2 0 2 3 3 9 9 3
2 7 140 N 5 6 140 2 0 2 3 3 9 9 3
3 7 147 0 3 7 147 0 1 3 3 3 9 9 3
3 7 147 1 3 7 147 0 1 3 3 3 9 9 3
3 7 147 2 3 7 147 0 1 3 3 3 9 9 3
3 7 147 3 3 7 147 0 1 3 3 3 9 9 3
3 7 147 4 3 7 147 0 1 3 3 3 9 9 3
3 7 147 5 3 7 147 0 1 3 3 3 9 9 3
3 7 147 6 3 7 147 0 1 3 3 3 9 9 3
3 7 147 7 3 7 147 0 1 3 3 3 9 9 3
3 7 147 8 3 7 147 0 1 3 3 3 9 9 3
3 7 147 N 3 7 147 0 1 3 3 3 9 9 3
4 7 148 0 4 7 148 1 1 4 3 3 9 9 3
4 7 148 1 4 7 148 1 1 4 3 3 9 9 3
4 7 148 2 4 7 148 1 1 4 3 3 9 9 3
4 7 148 3 4 7 148 1 1 4 3 3 9 9 3
4 7 148 4 4 7 148 1 1 4 3 3 9 9 3
4 7 148 5 4 7 148 1 1 4 3 3 9 9 3
4 7 148 6 4 7 148 1 1 4 3 3 9 9 3
4 7 148 7 4 7 148 1 1 4 3 3 9 9 3
4 7 148 8 4 7 148 1 1 4 3 3 9 9 3
4 7 148 N 4 7 148 1 1 4 3 3 9 9 3
5 7 149 0 5 7 149 2 1 5 3 3 9 9 3
5 7 149 1 5 7 149 2 1 5 3 3 9 9 3
5 7 149 2 5 7 149 2 1 5 3 3 9 9 3
5 7 149 3 5 7 149 2 1 5 3 3 9 9 3
5 7 149 4 5 7 149 2 1 5 3 3 9 9 3
5 7 149 5 5 7 149 2 1 5 3 3 9 9 3
5 7 149 6 5 7 149 2 1 5 3 3 9 9 3
5 7 149 7 5 7 149 2 1 5 3 3 9 9 3
5 7 149 8 5 7 149 2 1 5 3 3 9 9 3
5 7 149 N 5 7 149 2 1 5 3 3 9 9 3
6 7 156 0 3 8 156 0 2 6 3 3 9 9 3
6 7 156 1 3 8 156 0 2 6 3 3 9 9 3
6 7 156 2 3 8 156 0 2 6 3 3 9 9 3
6 7 156 3 3 8 156 0 2 6 3 3 9 9 3
6 7 156 4 3 8 156 0 2 6 3 3 9 9 3
6 7 156 5 3 8 156 0 2 6 3 3 9 9 3
6 7 156 6 3 8 156 0 2 6 3 3 9 9 3
6 7 156 7 3 8 156 0 2 6 3 3 9 9 3
6 7 156 8 3 8 156 0 2 6 3 3 9 9 3
6 7 156 N 3 8 156 0 2 6 3 3 9 9 3
7 7 157 0 4 8 157 1 2 7 3 3 9 9 3
7 7 157 1 4 8 157 1 2 7 3 3 9 9 3
7 7 157 2 4 8 157 1 2 7 3 3 9 9 3
7 7 157 3 4 8 157 1 2 7 3 3 9 9 3
7 7 157 4 4 8 157 1 2 7 3 3 9 9 3
7 7 157 5 4 8 157 1 2 7 3 3 9 9 3
7 7 157 6 4 8 157 1 2 7 3 3 9 9 3
7 7 157 7 4 8 157 1 2 7 3 3 9 9 3
7 7 157 8 4 8 157 1 2 7 3 3 9 9 3
7 7 157 N 4 8 157 1 2 7 3 3 9 9 3
8 7 158 0 5 8 158 2 2 8 3 3 9 9 3
8 7 158 1 5 8 158 2 2 8 3 3 9 9 3
8 7 158 2 5 8 158 2 2 8 3 3 9 9 3
8 7 158 3 5 8 158 2 2 8 3 3 9 9 3
8 7 158 4 5 8 158 2 2 8 3 3 9 9 3
8 7 158 5 5 8 158 2 2 8 3 3 9 9 3
8 7 158 6 5 8 158 2 2 8 3 3 9 9 3
8 7 158 7 5 8 158 2 2 8 3 3 9 9 3
8 7 158 8 5 8 158 2 2 8 3 3 9 9 3
8 7 158 N 5 8 158 2 2 8 3 3 9 9 3
0 7 219 0 3 6 219 0 0 0 3 3 9 9 3
0 7 219 1 3 6 219 0 0 0 3 3 9 9 3
0 7 219 2 3 6 219 0 0 0 3 3 9 9 3
0 7 219 3 3 6 219 0 0 0 3 3 9 9 3
0 7 219 4 3 6 219 0 0 0 3 3 9 9 3
0 7 219 5 3 6 219 0 0 0 3 3 9 9 3
0 7 219 6 3 6 219 0 0 0 3 3 9 9 3
0 7 219 7 3 6 219 0 0 0 3 3 9 9 3
0 7 219 8 3 6 219 0 0 0 3 3 9 9 3
0 7 219 N 3 6 219 0 0 0 3 3 9 9 3
1 7 220 0 4 6 220 1 0 1 3 3 9 9 3
1 7 220 1 4 6 220 1 0 1 3 3 9 9 3
1 7 220 2 4 6 220 1 0 1 3 3 9 9 3
1 7 220 3 4 6 220 1 0 1 3 3 9 9 3
1 7 220 4 4 6 220 1 0 1 3 3 9 9 3
1 7 220 5 4 6 220 1 0 1 3 3 9 9 3
1 7 220 6 4 6 220 1 0 1 3 3 9 9 3
1 7 220 7 4 6 220 1 0 1 3 3 9 9 3
1 7 220 8 4 6 220 1 0 1 3 3 9 9 3
1 7 220 N 4 6 220 1 0 1 3 3 9 9 3
2 7 221 0 5 6 221 2 0 2 3 3 9 9 3
2 7 221 1 5 6 221 2 0 2 3 3 9 9 3
2 7 221 2 5 6 221 2 0 2 3 3 9 9 3
2 7 221 3 5 6 221 2 0 2 3 3 9 9 3
2 7 221 4 5 6 221 2 0 2 3 3 9 9 3
2 7 221 5 5 6 221 2 0 2 3 3 9 9 3
2 7 221 6 5 6 221 2 0 2 3 3 9 9 3
2 7 221 7 5 6 221 2 0 2 3 3 9 9 3
2 7 221 8 5 6 221 2 0 2 3 3 9 9 3
2 7 221 N 5 6 221 2 0 2 3 3 9 9 3
3 7 228 0 3 7 228 0 1 3 3 3 9 9 3
3 7 228 1 3 7 228 0 1 3 3 3 9 9 3
3 7 228 2 3 7 228 0 1 3 3 3 9 9 3
3 7 228 3 3 7 228 0 1 3 3 3 9 9 3
3 7 228 4 3 7 228 0 1 3 3 3 9 9 3
3 7 228 5 3 7 228 0 1 3 3 3 9 9 3
3 7 228 6 3 7 228 0 1 3 3 3 9 9 3
3 7 228 7 3 7 228 0 1 3 3 3 9 9 3
3 7 228 8 3 7 228 0 1 3 3 3 9 9 3
3 7 228 N 3 7 228 0 1 3 3 3 9 9 3
4 7 229 0 4 7 229 1 1 4 3 3 9 9 3
4 7 229 1 4 7 229 1 1 4 3 3 9 9 3
4 7 229 2 4 7 229 1 1 4 3 3 9 9 3
4 7 229 3 4 7 229 1 1 4 3 3 9 9 3
4 7 229 4 4 7 229 1 1 4 3 3 9 9 3
4 7 229 5 4 7 229 1 1 4 3 3 9 9 3
4 7 229 6 4 7 229 1 1 4 3 3 9 9 3
4 7 229 7 4 7 229 1 1 4 3 3 9 9 3
4 7 229 8 4 7 229 1 1 4 3 3 9 9 3
4 7 229 N 4 7 229 1 1 4 3 3 9 9 3
5 7 230 0 5 7 230 2 1 5 3 3 9 9 3
5 7 230 1 5 7 230 2 1 5 3 3 9 9 3
5 7 230 2 5 7 230 2 1 5 3 3 9 9 3
5 7 230 3 5 7 230 2 1 5 3 3 9 9 3
5 7 230 4 5 7 230 2 1 5 3 3 9 9 3
5 7 230 5 5 7 230 2 1 5 3 3 9 9 3
5 7 230 6 5 7 230 2 1 5 3 3 9 9 3
5 7 230 7 5 7 230 2 1 5 3 3 9 9 3
5 7 230 8 5 7 230 2 1 5 3 3 9 9 3
5 7 230 N 5 7 230 2 1 5 3 3 9 9 3
6 7 237 0 3 8 237 0 2 6 3 3 9 9 3
6 7 237 1 3 8 237 0 2 6 3 3 9 9 3
6 7 237 2 3 8 237 0 2 6 3 3 9 9 3
6 7 237 3 3 8 237 0 2 6 3 3 9 9 3
6 7 237 4 3 8 237 0 2 6 3 3 9 9 3
6 7 237 5 3 8 237 0 2 6 3 3 9 9 3
6 7 237 6 3 8 237 0 2 6 3 3 9 9 3
6 7 237 7 3 8 237 0 2 6 3 3 9 9 3
6 7 237 8 3 8 237 0 2 6 3 3 9 9 3
6 7 237 N 3 8 237 0 2 6 3 3 9 9 3
7 7 238 0 4 8 238 1 2 7 3 3 9 9 3
7 7 238 1 4 8 238 1 2 7 3 3 9 9 3
7 7 238 2 4 8 238 1 2 7 3 3 9 9 3
7 7 238 3 4 8 238 1 2 7 3 3 9 9 3
7 7 238 4 4 8 238 1 2 7 3 3 9 9 3
7 7 238 5 4 8 238 1 2 7 3 3 9 9 3
7 7 238 6 4 8 238 1 2 7 3 3 9 9 3
7 7 238 7 4 8 238 1 2 7 3 3 9 9 3
7 7 238 8 4 8 238 1 2 7 3 3 9 9 3
7 7 238 N 4 8 238 1 2 7 3 3 9 9 3
8 7 239 0 5 8 239 2 2 8 3 3 9 9 3
8 7 239 1 5 8 239 2 2 8 3 3 9 9 3
8 7 239 2 5 8 239 2 2 8 3 3 9 9 3
8 7 239 3 5 8 239 2 2 8 3 3 9 9 3
8 7 239 4 5 8 239 2 2 8 3 3 9 9 3
8 7 239 5 5 8 239 2 2 8 3 3 9 9 3
8 7 239 6 5 8 239 2 2 8 3 3 9 9 3
8 7 239 7 5 8 239 2 2 8 3 3 9 9 3
8 7 239 8 5 8 239 2 2 8 3 3 9 9 3
8 7 239 N 5 8 239 2 2 8 3 3 9 9 3
0 8 60 0 6 6 60 0 0 0 3 3 9 9 3
0 8 60 1 6 6 60 0 0 0 3 3 9 9 3
0 8 60 2 6 6 60 0 0 0 3 3 9 9 3
0 8 60 3 6 6 60 0 0 0 3 3 9 9 3
0 8 60 4 6 6 60 0 0 0 3 3 9 9 3
0 8 60 5 6 6 60 0 0 0 3 3 9 9 3
0 8 60 6 6 6 60 0 0 0 3 3 9 9 3
0 8 60 7 6 6 60 0 0 0 3 3 9 9 3
0 8 60 8 6 6 60 0 0 0 3 3 9 9 3
0 8 60 N 6 6 60 0 0 0 3 3 9 9 3
1 8 61 0 7 6 61 1 0 1 3 3 9 9 3
1 8 61 1 7 6 61 1 0 1 3 3 9 9 3
1 8 61 2 7 6 61 1 0 1 3 3 9 9 3
1 8 61 3 7 6 61 1 0 1 3 3 9 9 3
1 8 61 4 7 6 61 1 0 1 3 3 9 9 3
1 8 61 5 7 6 61 1 0 1 3 3 9 9 3
1 8 61 6 7 6 61 1 0 1 3 3 9 9 3
1 8 61 7 7 6 61 1 0 1 3 3 9 9 3
1 8 61 8 7 6 61 1 0 1 3 3 9 9 3
1 8 61 N 7 6 61 1 0 1 3 3 9 9 3
2 8 62 0 8 6 62 2 0 2 3 3 9 9 3
2 8 62 1 8 6 62 2 0 2 3 3 9 9 3
2 8 62 2 8 6 62 2 0 2 3 3 9 9 3
2 8 62 3 8 6 62 2 0 2 3 3 9 9 3
2 8 62 4 8 6 62 2 0 2 3 3 9 9 3
2 8 62 5 8 6 62 2 0 2 3 3 9 9 3
2 8 62 6 8 6 62 2 0 2 3 3 9 9 3
2 8 62 7 8 6 62 2 0 2 3 3 9 9 3
2 8 62 8 8 6 62 2 0 2 3 3 9 9 3
2 8 62 N 8 6 62 2 0 2 3 3 9 9 3
3 8 69 0 6 7 69 0 1 3 3 3 9 9 3
3 8 69 1 6 7 69 0 1 3 3 3 9 9 3
3 8 69 2 6 7 69 0 1 3 3 3 9 9 3
3 8 69 3 6 7 69 0 1 3 3 3 9 9 3
3 8 69 4 6 7 69 0 1 3 3 3 9 9 3
3 8 69 5 6 7 69 0 1 3 3 3 9 9 3
3 8 69 6 6 7 69 0 1 3 3 3 9 9 3
3 8 69 7 6 7 69 0 1 3 3 3 9 9 3
3 8 69 8 6 7 69 0 1 3 3 3 9 9 3
3 8 69 N 6 7 69 0 1 3 3 3 9 9 3
4 8 70 0 7 7 70 1 1 4 3 3 9 9 3
4 8 70 1 7 7 70 1 1 4 3 3 9 9 3
4 8 70 2 7 7 70 1 1 4 3 3 9 9 3
4 8 70 3 7 7 70 1 1 4 3 3 9 9 3
4 8 70 4 7 7 70 1 1 4 3 3 9 9 3
4 8 70 5 7 7 70 1 1 4 3 3 9 9 3
4 8 70 6 7 7 70 1 1 4 3 3 9 9 3
4 8 70 7 7 7 70 1 1 4 3 3 9 9 3
4 8 70 8 7 7 70 1 1 4 3 3 9 9 3
4 8 70 N 7 7 70 1 1 4 3 3 9 9 3
5 8 71 0 8 7 71 2 1 5 3 3 9 9 3
5 8 71 1 8 7 71 2 1 5 3 3 9 9 3
5 8 71 2 8 7 71 2 1 5 3 3 9 9 3
5 8 71 3 8 7 71 2 1 5 3 3 9 9 3
5 8 71 4 8 7 71 2 1 5 3 3 9 9 3
5 8 71 5 8 7 71 2 1 5 3 3 9 9 3
5 8 71 6 8 7 71 2 1 5 3 3 9 9 3
5 8 71 7 8 7 71 2 1 5 3 3 9 9 3
5 8 71 8 8 7 71 2 1 5 3 3 9 9 3
5 8 71 N 8 7 71 2 1 5 3 3 9 9 3
6 8 78 0 6 8 78 0 2 6 3 3 9 9 3
6 8 78 1 6 8 78 0 2 6 3 3 9 9 3
6 8 78 2 6 8 78 0 2 6 3 3 9 9 3
6 8 78 3 6 8 78 0 2 6 3 3 9 9 3
6 8 78 4 6 8 78 0 2 6 3 3 9 9 3
6 8 78 5 6 8 78 0 2 6 3 3 9 9 3
6 8 78 6 6 8 78 0 2 6 3 3 9 9 3
6 8 78 7 6 8 78 0 2 6 3 3 9 9 3
6 8 78 8 6 8 78 0 2 6 3 3 9 9 3
6 8 78 N 6 8 78 0 2 6 3 3 9 9 3
7 8 79 0 7 8 79 1 2 7 3 3 9 9 3
7 8 79 1 7 8 79 1 2 7 3 3 9 9 3
7 8 79 2 7 8 79 1 2 7 3 3 9 9 3
7 8 79 3 7 8 79 1 2 7 3 3 9 9 3
7 8 79 4 7 8 79 1 2 7 3 3 9 9 3
7 8 79 5 7 8 79 1 2 7 3 3 9 9 3
7 8 79 6 7 8 79 1 2 7 3 3 9 9 3
7 8 79 7 7 8 79 1 2 7 3 3 9 9 3
7 8 79 8 7 8 79 1 2 7 3 3 9 9 3
7 8 79 N 7 8 79 1 2 7 3 3 9 9 3
8 8 80 0 8 8 80 2 2 8 3 3 9 9 3
8 8 80 1 8 8 80 2 2 8 3 3 9 9 3
8 8 80 2 8 8 80 2 2 8 3 3 9 9 3
8 8 80 3 8 8 80 2 2 8 3 3 9 9 3
8 8 80 4 8 8 80 2 2 8 3 3 9 9 3
8 8 80 5 8 8 80 2 2 8 3 3 9 9 3
8 8 80 6 8 8 80 2 2 8 3 3 9 9 3
8 8 80 7 8 8 80 2 2 8 3 3 9 9 3
8 8 80 8 8 8 80 2 2 8 3 3 9 9 3
8 8 80 N 8 8 80 2 2 8 3 3 9 9 3
0 8 141 0 6 6 141 0 0 0 3 3 9 9 3
0 8 141 1 6 6 141 0 0 0 3 3 9 9 3
0 8 141 2 6 6 141 0 0 0 3 3 9 9 3
0 8 141 3 6 6 141 0 0 0 3 3 9 9 3
0 8 141 4 6 6 141 0 0 0 3 3 9 9 3
0 8 141 5 6 6 141 0 0 0 3 3 9 9 3
0 8 141 6 6 6 141 0 0 0 3 3 9 9 3
0 8 141 7 6 6 141 0 0 0 3 3 9 9 3
0 8 141 8 6 6 141 0 0 0 3 3 9 9 3
0 8 141 N 6 6 141 0 0 0 3 3 9 9 3
1 8 142 0 7 6 142 1 0 1 3 3 9 9 3
1 8 142 1 7 6 142 1 0 1 3 3 9 9 3
1 8 142 2 7 6 142 1 0 1 3 3 9 9 3
1 8 142 3 7 6 142 1 0 1 3 3 9 9 3
1 8 142 4 7 6 142 1 0 1 3 3 9 9 3
1 8 142 5 7 6 142 1 0 1 3 3 9 9 3
1 8 142 6 7 6 142 1 0 1 3 3 9 9 3
1 8 142 7 7 6 142 1 0 1 3 3 9 9 3
1 8 142 8 7 6 142 1 0 1 3 3 9 9 3
1 8 142 N 7 6 142 1 0 1 3 3 9 9 3
2 8 143 0 8 6 143 2 0 2 3 3 9 9 3
2 8 143 1 8 6 143 2 0 2 3 3 9 9 3
2 8 143 2 8 6 143 2 0 2 3 3 9 9 3
2 8 143 3 8 6 143 2 0 2 3 3 9 9 3
2 8 143 4 8 6 143 2 0 2 3 3 9 9 3
2 8 143 5 8 6 143 2 0 2 3 3 9 9 3
2 8 143 6 8 6 143 2 0 2 3 3 9 9 3
2 8 143 7 8 6 143 2 0 2 3 3 9 9 3
2 8 143 8 8 6 143 2 0 2 3 3 9 9 3
2 8 143 N 8 6 143 2 0 2 3 3 9 9 3
3 8 150 0 6 7 150 0 1 3 3 3 9 9 3
3 8 150 1 6 7 150 0 1 3 3 3 9 9 3
3 8 150 2 6 7 150 0 1 3 3 3 9 9 3
3 8 150 3 6 7 150 0 1 3 3 3 9 9 3
3 8 150 4 6 7 150 0 1 3 3 3 9 9 3
3 8 150 5 6 7 150 0 1 3 3 3 9 9 3
3 8 150 6 6 7 150 0 1 3 3 3 9 9 3
3 8 150 7 6 7 150 0 1 3 3 3 9 9 3
3 8 150 8 6 7 150 0 1 3 3 3 9 9 3
3 8 150 N 6 7 150 0 1 3 3 3 9 9 3
4 8 151 0 7 7 151 1 1 4 3 3 9 9 3
4 8 151 1 7 7 151 1 1 4 3 3 9 9 3
4 8 151 2 7 7 151 1 1 4 3 3 9 9 3
4 8 151 3 7 7 151 1 1 4 3 3 9 9 3
4 8 151 4 7 7 151 1 1 4 3 3 9 9 3
4 8 151 5 7 7 151 1 1 4 3 3 9 9 3
4 8 151 6 7 7 151 1 1 4 3 3 9 9 3
4 8 151 7 7 7 151 1 1 4 3 3 9 9 3
4 8 151 8 7 7 151 1 1 4 3 3 9 9 3
4 8 151 N 7 7 151 1 1 4 3 3 9 9 3
5 8 152 0 8 7 152 2 1 5 3 3 9 9 3
5 8 152 1 8 7 152 2 1 5 3 3 9 9 3
5 8 152 2 8 7 152 2 1 5 3 3 9 9 3
5 8 152 3 8 7 152 2 1 5 3 3 9 9 3
5 8 152 4 8 7 152 2 1 5 3 3 9 9 3
5 8 152 5 8 7 152 2 1 5 3 3 9 9 3
5 8 152 6 8 7 152 2 1 5 3 3 9 9 3
5 8 152 7 8 7 152 2 1 5 3 3 9 9 3
5 8 152 8 8 7 152 2 1 5 3 3 9 9 3
5 8 152 N 8 7 152 2 1 5 3 3 9 9 3
6 8 159 0 6 8 159 0 2 6 3 3 9 9 3
6 8 159 1 6 8 159 0 2 6 3 3 9 9 3
6 8 159 2 6 8 159 0 2 6 3 3 9 9 3
6 8 159 3 6 8 159 0 2 6 3 3 9 9 3
6 8 159 4 6 8 159 0 2 6 3 3 9 9 3
6 8 159 5 6 8 159 0 2 6 3 3 9 9 3
6 8 159 6 6 8 159 0 2 6 3 3 9 9 3
6 8 159 7 6 8 159 0 2 6 3 3 9 9 3
6 8 159 8 6 8 159 0 2 6 3 3 9 9 3
6 8 159 N 6 8 159 0 2 6 3 3 9 9 3
7 8 160 0 7 8 160 1 2 7 3 3 9 9 3
7 8 160 1 7 8 160 1 2 7 3 3 9 9 3
7 8 160 2 7 8 160 1 2 7 3 3 9 9 3
7 8 160 3 7 8 160 1 2 7 3 3 9 9 3
7 8 160 4 7 8 160 1 2 7 3 3 9 9 3
7 8 160 5 7 8 160 1 2 7 3 3 9 9 3
7 8 160 6 7 8 160 1 2 7 3 3 9 9 3
7 8 160 7 7 8 160 1 2 7 3 3 9 9 3
7 8 160 8 7 8 160 1 2 7 3 3 9 9 3
7 8 160 N 7 8 160 1 2 7 3 3 9 9 3
8 8 161 0 8 8 161 2 2 8 3 3 9 9 3
8 8 161 1 8 8 161 2 2 8 3 3 9 9 3
8 8 161 2 8 8 161 2 2 8 3 3 9 9 3
8 8 161 3 8 8 161 2 2 8 3 3 9 9 3
8 8 161 4 8 8 161 2 2 8 3 3 9 9 3
8 8 161 5 8 8 161 2 2 8 3 3 9 9 3
8 8 161 6 8 8 161 2 2 8 3 3 9 9 3
8 8 161 7 8 8 161 2 2 8 3 3 9 9 3
8 8 161 8 8 8 161 2 2 8 3 3 9 9 3
8 8 161 N 8 8 161 2 2 8 3 3 9 9 3
0 8 222 0 6 6 222 0 0 0 3 3 9 9 3
0 8 222 1 6 6 222 0 0 0 3 3 9 9 3
0 8 222 2 6 6 222 0 0 0 3 3 9 9 3
0 8 222 3 6 6 222 0 0 0 3 3 9 9 3
0 8 222 4 6 6 222 0 0 0 3 3 9 9 3
0 8 222 5 6 6 222 0 0 0 3 3 9 9 3
0 8 222 6 6 6 222 0 0 0 3 3 9 9 3
0 8 222 7 6 6 222 0 0 0 3 3 9 9 3
0 8 222 8 6 6 222 0 0 0 3 3 9 9 3
0 8 222 N 6 6 222 0 0 0 3 3 9 9 3
1 8 223 0 7 6 223 1 0 1 3 3 9 9 3
1 8 223 1 7 6 223 1 0 1 3 3 9 9 3
1 8 223 2 7 6 223 1 0 1 3 3 9 9 3
1 8 223 3 7 6 223 1 0 1 3 3 9 9 3
1 8 223 4 7 6 223 1 0 1 3 3 9 9 3
1 8 223 5 7 6 223 1 0 1 3 3 9 9 3
1 8 223 6 7 6 223 1 0 1 3 3 9 9 3
1 8 223 7 7 6 223 1 0 1 3 3 9 9 3
1 8 223 8 7 6 223 1 0 1 3 3 9 9 3
1 8 223 N 7 6 223 1 0 1 3 3 9 9 3
2 8 224 0 8 6 224 2 0 2 3 3 9 9 3
2 8 224 1 8 6 224 2 0 2 3 3 9 9 3
2 8 224 2 8 6 224 2 0 2 3 3 9 9 3
2 8 224 3 8 6 224 2 0 2 3 3 9 9 3
2 8 224 4 8 6 224 2 0 2 3 3 9 9 3
2 8 224 5 8 6 224 2 0 2 3 3 9 9 3
2 8 224 6 8 6 224 2 0 2 3 3 9 9 3
2 8 224 7 8 6 224 2 0 2 3 3 9 9 3
2 8 224 8 8 6 224 2 0 2 3 3 9 9 3
2 8 224 N 8 6 224 2 0 2 3 3 9 9 3
3 8 231 0 6 7 231 0 1 3 3 3 9 9 3
3 8 231 1 6 7 231 0 1 3 3 3 9 9 3
3 8 231 2 6 7 231 0 1 3 3 3 9 9 3
3 8 231 3 6 7 231 0 1 3 3 3 9 9 3
3 8 231 4 6 7 231 0 1 3 3 3 9 9 3
3 8 231 5 6 7 231 0 1 3 3 3 9 9 3
3 8 231 6 6 7 231 0 1 3 3 3 9 9 3
3 8 231 7 6 7 231 0 1 3 3 3 9 9 3
3 8 231 8 6 7 231 0 1 3 3 3 9 9 3
3 8 231 N 6 7 231 0 1 3 3 3 9 9 3
4 8 232 0 7 7 232 1 1 4 3 3 9 9 3
4 8 232 1 7 7 232 1 1 4 3 3 9 9 3
4 8 232 2 7 7 232 1 1 4 3 3 9 9 3
4 8 232 3 7 7 232 1 1 4 3 3 9 9 3
4 8 232 4 7 7 232 1 1 4 3 3 9 9 3
4 8 232 5 7 7 232 1 1 4 3 3 9 9 3
4 8 232 6 7 7 232 1 1 4 3 3 9 9 3
4 8 232 7 7 7 232 1 1 4 3 3 9 9 3
4 8 232 8 7 7 232 1 1 4 3 3 9 9 3
4 8 232 N 7 7 232 1 1 4 3 3 9 9 3
5 8 233 0 8 7 233 2 1 5 3 3 9 9 3
5 8 233 1 8 7 233 2 1 5 3 3 9 9 3
5 8 233 2 8 7 233 2 1 5 3 3 9 9 3
5 8 233 3 8 7 233 2 1 5 3 3 9 9 3
5 8 233 4 8 7 233 2 1 5 3 3 9 9 3
5 8 233 5 8 7 233 2 1 5 3 3 9 9 3
5 8 233 6 8 7 233 2 1 5 3 3 9 9 3
5 8 233 7 8 7 233 2 1 5 3 3 9 9 3
5 8 233 8 8 7 233 2 1 5 3 3 9 9 3
5 8 233 N 8 7 233 2 1 5 3 3 9 9 3
6 8 240 0 6 8 240 0 2 6 3 3 9 9 3
6 8 240 1 6 8 240 0 2 6 3 3 9 9 3
6 8 240 2 6 8 240 0 2 6 3 3 9 9 3
6 8 240 3 6 8 240 0 2 6 3 3 9 9 3
6 8 240 4 6 8 240 0 2 6 3 3 9 9 3
6 8 240 5 6 8 240 0 2 6 3 3 9 9 3
6 8 240 6 6 8 240 0 2 6 3 3 9 9 3
6 8 240 7 6 8 240 0 2 6 3 3 9 9 3
6 8 240 8 6 8 240 0 2 6 3 3 9 9 3
6 8 240 N 6 8 240 0 2 6 3 3 9 9 3
7 8 241 0 7 8 241 1 2 7 3 3 9 9 3
7 8 241 1 7 8 241 1 2 7 3 3 9 9 3
7 8 241 2 7 8 241 1 2 7 3 3 9 9 3
7 8 241 3 7 8 241 1 2 7 3 3 9 9 3
7 8 241 4 7 8 241 1 2 7 3 3 9 9 3
7 8 241 5 7 8 241 1 2 7 3 3 9 9 3
7 8 241 6 7 8 241 1 2 7 3 3 9 9 3
7 8 241 7 7 8 241 1 2 7 3 3 9 9 3
7 8 241 8 7 8 241 1 2 7 3 3 9 9 3
7 8 241 N 7 8 241 1 2 7 3 3 9 9 3
8 8 242 0 8 8 242 2 2 8 3 3 9 9 3
8 8 242 1 8 8 242 2 2 8 3 3 9 9 3
8 8 242 2 8 8 242 2 2 8 3 3 9 9 3
8 8 242 3 8 8 242 2 2 8 3 3 9 9 3
8 8 242 4 8 8 242 2 2 8 3 3 9 9 3
8 8 242 5 8 8 242 2 2 8 3 3 9 9 3
8 8 242 6 8 8 242 2 2 8 3 3 9 9 3
8 8 242 7 8 8 242 2 2 8 3 3 9 9 3
8 8 242 8 8 8 242 2 2 8 3 3 9 9 3
8 8 242 N 8 8 242 2 2 8 3 3 9 9 3
| |
5bddfb42992d527a099fec7d33f932769d65f6785ad11200f7dcd6dfc1c8cc5b | tisnik/clojure-examples | project.clj | ;
( C ) Copyright 2021
;
; All rights reserved. This program and the accompanying materials
; are made available under the terms of the Eclipse Public License v1.0
; which accompanies this distribution, and is available at
-v10.html
;
; Contributors:
;
(defproject matrix2 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[net.mikera/core.matrix "0.34.0"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-project-edn "0.3.0"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot matrix2.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
| null | https://raw.githubusercontent.com/tisnik/clojure-examples/cf9b3484a332ebd93bc585f051802c333ebde3df/matrix2/project.clj | clojure |
All rights reserved. This program and the accompanying materials
are made available under the terms of the Eclipse Public License v1.0
which accompanies this distribution, and is available at
Contributors:
| ( C ) Copyright 2021
-v10.html
(defproject matrix2 "0.1.0-SNAPSHOT"
:description "FIXME: write description"
:url ""
:license {:name "Eclipse Public License"
:url "-v10.html"}
:dependencies [[org.clojure/clojure "1.10.1"]
[net.mikera/core.matrix "0.34.0"]]
:plugins [[lein-codox "0.10.7"]
[test2junit "1.1.0"]
[ lein - test - out " 0.3.1 " ]
[lein-cloverage "1.0.7-SNAPSHOT"]
[lein-kibit "0.1.8"]
[lein-clean-m2 "0.1.2"]
[lein-project-edn "0.3.0"]
[lein-marginalia "0.9.1"]]
:main ^:skip-aot matrix2.core
:target-path "target/%s"
:profiles {:uberjar {:aot :all}})
|
99abefa3fa48243d2c05249b11a7fbf7460d52f0e8fd1c495ac479e4becde592 | evertedsphere/noether | Group.hs | module Noether.Algebra.Single.Group where
import Noether.Lemmata.TypeFu
import Noether.Algebra.Single.Cancellative
import Noether.Algebra.Single.Commutative
import Noether.Algebra.Single.Magma
import Noether.Algebra.Single.Monoid
type family GroupS (op :: k) (a :: Type) = (r :: GroupE)
data GroupE
= Group_Monoid_Cancellative MonoidE CancellativeE
| GroupNamed Symbol GroupE
class GroupK (op :: k) a (s :: GroupE)
instance (MonoidK op a zm, CancellativeK op a zl) =>
GroupK op a (Group_Monoid_Cancellative zm zl)
instance (KnownSymbol sym, GroupK op a s) =>
GroupK op a (GroupNamed sym s)
type GroupC op a = GroupK op a (GroupS op a)
| null | https://raw.githubusercontent.com/evertedsphere/noether/c4223f64b9df5b0dbbeec1fea726bfff7f5810f5/library/Noether/Algebra/Single/Group.hs | haskell | module Noether.Algebra.Single.Group where
import Noether.Lemmata.TypeFu
import Noether.Algebra.Single.Cancellative
import Noether.Algebra.Single.Commutative
import Noether.Algebra.Single.Magma
import Noether.Algebra.Single.Monoid
type family GroupS (op :: k) (a :: Type) = (r :: GroupE)
data GroupE
= Group_Monoid_Cancellative MonoidE CancellativeE
| GroupNamed Symbol GroupE
class GroupK (op :: k) a (s :: GroupE)
instance (MonoidK op a zm, CancellativeK op a zl) =>
GroupK op a (Group_Monoid_Cancellative zm zl)
instance (KnownSymbol sym, GroupK op a s) =>
GroupK op a (GroupNamed sym s)
type GroupC op a = GroupK op a (GroupS op a)
| |
b8d1f17e8d7601d49d1ca1817feb2e15864cde871ff084daeb296caa38f3ae9b | tlaplus/tlapm | newversion.ml |
* Copyright ( C ) 2012 INRIA and Microsoft Corporation
* Copyright (C) 2012 INRIA and Microsoft Corporation
*)
open Printf
open Scanf
Change the following two lines for a new version
let major = 1
let minor = 5
(* The rest of the file has no magic numbers *)
let same_version = ref false
let loud = ref true
let release = ref false
let rv_exists = Sys.file_exists "src/release_version.ml"
let arglist = [
"-same", Arg.Set same_version, "don't update version strings";
"-quiet", Arg.Clear loud, "do not print any messages";
"-release", Arg.Set release, "tag this version as the released version";
]
let () = Arg.parse arglist (fun _ -> ()) ""
let () =
let f = open_in "src/version.ml" in
let (oldmajor, oldminor, oldmicro) =
try
ignore (input_line f) ;
let ib = Scanning.from_channel f in
bscanf ib
"let (major,minor,micro) = (%d,%d,%d)"
(fun oldmajor oldminor oldmicro -> (oldmajor, oldminor, oldmicro))
with Scan_failure _ ->
fprintf stderr "Warning: could not parse old version number\n%!";
(0, 0, 0)
in
close_in f ;
let (major, minor, micro) =
if !same_version then
(oldmajor, oldminor, oldmicro)
else if (major <> oldmajor || minor <> oldminor) then
(major, minor, 0)
else
(major, minor, oldmicro + 1)
in
if not rv_exists then failwith "src/release_version.ml does not exist";
let f = open_in "src/release_version.ml" in
let (oldrmajor, oldrminor, oldrmicro) =
try
ignore (input_line f) ;
let ib = Scanning.from_channel f in
bscanf ib
"let (major,minor,micro) = (%d,%d,%d)"
(fun oldrmajor oldrminor oldrmicro ->
(oldrmajor, oldrminor, oldrmicro))
with Scan_failure _ ->
fprintf stderr "Warning: could not parse old release version number\n%!";
(0, 0, 0)
in
close_in f;
let version = Printf.sprintf "%d.%d.%d" major minor micro in
if !loud then printf "Next version will be: %s\n%!" version;
let release_version =
if !release
then version
else Printf.sprintf "%d.%d.%d" oldrmajor oldrminor oldrmicro
in
let f = open_out "src/version.ml" in
fprintf f "(* AUTOMATICALLY GENERATED by tools/newversion.ml \
-- DO NOT EDIT *)\n" ;
fprintf f "let (major,minor,micro) = (%d,%d,%d);;\n" major minor micro;
close_out f;
let f = open_out "src/release_version.ml" in
fprintf f "(* AUTOMATICALLY GENERATED by tools/newversion.ml \
-- DO NOT EDIT *)\n" ;
if !release then begin
fprintf f "let (major,minor,micro) = (%d,%d,%d);;\n" major minor micro
end else begin
fprintf f "let (major,minor,micro) = (%d,%d,%d);;\n"
oldrmajor oldrminor oldrmicro
end;
close_out f;
if !loud then printf "Created src/version.ml\n\
Created src/release_version.ml\n%!" ;
let ac_in = open_in "tools/configure.ac.in" in
let (acf, ac) = Filename.open_temp_file "configure" ".ac" in
at_exit (fun () -> Sys.remove acf) ;
let vreg = Str.regexp (Str.quote "@VERSION@") in
let rvreg = Str.regexp (Str.quote "@RELEASEVERSION@") in
begin try while true do
let line = input_line ac_in in
let line = Str.global_replace vreg version line in
let line = Str.global_replace rvreg release_version line in
output_string ac line ; output_string ac "\n" ;
done with End_of_file -> ()
end;
close_in ac_in ;
close_out ac ;
let ret = Sys.command ("autoconf -I tools -o configure " ^ acf) in
if ret <> 0 then failwith "calling autoconf" ;
if !loud then printf "Created configure\n%!" ;
flush Pervasives.stdout ;
if !loud then printf "Now run ./configure to rebuild the Makefile\n%!"
;;
| null | https://raw.githubusercontent.com/tlaplus/tlapm/b82e2fd049c5bc1b14508ae16890666c6928975f/tools/newversion.ml | ocaml | The rest of the file has no magic numbers
AUTOMATICALLY GENERATED by tools/newversion.ml \
-- DO NOT EDIT
AUTOMATICALLY GENERATED by tools/newversion.ml \
-- DO NOT EDIT |
* Copyright ( C ) 2012 INRIA and Microsoft Corporation
* Copyright (C) 2012 INRIA and Microsoft Corporation
*)
open Printf
open Scanf
Change the following two lines for a new version
let major = 1
let minor = 5
let same_version = ref false
let loud = ref true
let release = ref false
let rv_exists = Sys.file_exists "src/release_version.ml"
let arglist = [
"-same", Arg.Set same_version, "don't update version strings";
"-quiet", Arg.Clear loud, "do not print any messages";
"-release", Arg.Set release, "tag this version as the released version";
]
let () = Arg.parse arglist (fun _ -> ()) ""
let () =
let f = open_in "src/version.ml" in
let (oldmajor, oldminor, oldmicro) =
try
ignore (input_line f) ;
let ib = Scanning.from_channel f in
bscanf ib
"let (major,minor,micro) = (%d,%d,%d)"
(fun oldmajor oldminor oldmicro -> (oldmajor, oldminor, oldmicro))
with Scan_failure _ ->
fprintf stderr "Warning: could not parse old version number\n%!";
(0, 0, 0)
in
close_in f ;
let (major, minor, micro) =
if !same_version then
(oldmajor, oldminor, oldmicro)
else if (major <> oldmajor || minor <> oldminor) then
(major, minor, 0)
else
(major, minor, oldmicro + 1)
in
if not rv_exists then failwith "src/release_version.ml does not exist";
let f = open_in "src/release_version.ml" in
let (oldrmajor, oldrminor, oldrmicro) =
try
ignore (input_line f) ;
let ib = Scanning.from_channel f in
bscanf ib
"let (major,minor,micro) = (%d,%d,%d)"
(fun oldrmajor oldrminor oldrmicro ->
(oldrmajor, oldrminor, oldrmicro))
with Scan_failure _ ->
fprintf stderr "Warning: could not parse old release version number\n%!";
(0, 0, 0)
in
close_in f;
let version = Printf.sprintf "%d.%d.%d" major minor micro in
if !loud then printf "Next version will be: %s\n%!" version;
let release_version =
if !release
then version
else Printf.sprintf "%d.%d.%d" oldrmajor oldrminor oldrmicro
in
let f = open_out "src/version.ml" in
fprintf f "let (major,minor,micro) = (%d,%d,%d);;\n" major minor micro;
close_out f;
let f = open_out "src/release_version.ml" in
if !release then begin
fprintf f "let (major,minor,micro) = (%d,%d,%d);;\n" major minor micro
end else begin
fprintf f "let (major,minor,micro) = (%d,%d,%d);;\n"
oldrmajor oldrminor oldrmicro
end;
close_out f;
if !loud then printf "Created src/version.ml\n\
Created src/release_version.ml\n%!" ;
let ac_in = open_in "tools/configure.ac.in" in
let (acf, ac) = Filename.open_temp_file "configure" ".ac" in
at_exit (fun () -> Sys.remove acf) ;
let vreg = Str.regexp (Str.quote "@VERSION@") in
let rvreg = Str.regexp (Str.quote "@RELEASEVERSION@") in
begin try while true do
let line = input_line ac_in in
let line = Str.global_replace vreg version line in
let line = Str.global_replace rvreg release_version line in
output_string ac line ; output_string ac "\n" ;
done with End_of_file -> ()
end;
close_in ac_in ;
close_out ac ;
let ret = Sys.command ("autoconf -I tools -o configure " ^ acf) in
if ret <> 0 then failwith "calling autoconf" ;
if !loud then printf "Created configure\n%!" ;
flush Pervasives.stdout ;
if !loud then printf "Now run ./configure to rebuild the Makefile\n%!"
;;
|
a10e3703671917e8acb1b34815b3cdfeda5d3983fb0c7879fd5c0e28ab9b49a3 | ndmitchell/catch | Reduce.hs |
module Reduce(reduce, reduces, reduceWithM, reducesWithM, propMapReduceM, propMapReduce) where
import Req
import General
import DataRep
import Data.Proposition
import Control.Monad
import Control.Monad.Identity
import Yhc.Core
import Data.List
import Data.Maybe
-- DRIVERS
reduces :: Core -> Reqs -> Reqs
reduces core reqs = propMap (reduce core) reqs
reduce :: Core -> Req -> Reqs
reduce core req@(Req expr vals) = case expr of
_ | vals == valsTrue -> propTrue
| vals == valsFalse -> propFalse
CoreApp (CoreFun x) _ | not $ "." `isPrefixOf` x -> propLit req
CoreVar x -> propLit req
_ -> reduces core $ reduceOne core req
reduce core x = propLit x
-- take a function that reduces a Call
-- and reduce the entire thing
reducesWithM :: Core -> (Req -> IO Reqs) -> Reqs -> IO Reqs
reducesWithM core f reqs = propMapReduceM core (reduceWithM core f) reqs
reduceWithM :: Core -> (Req -> IO Reqs) -> Req -> IO Reqs
reduceWithM core f req@(Req expr vals) = case expr of
_ | vals == valsTrue -> return propTrue
| vals == valsFalse -> return propFalse
CoreApp (CoreFun x) _ | not $ "." `isPrefixOf` x -> f req >>= reducesWithM core f
CoreVar x -> return $ propLit req
_ -> reducesWithM core f $ reduceOne core req
reduceWithM core f x = return $ propLit x
propMapReduceM :: Monad m => Core -> (Req -> m Reqs) -> Reqs -> m Reqs
propMapReduceM core f x = propMapM (liftM (reduces core) . f) x
propMapReduce :: Core -> (Req -> Reqs) -> Reqs -> Reqs
propMapReduce core f x = runIdentity $ propMapReduceM core (return . f) x
CORE LOGIC
apply 1 step reduction to a Sel or a Make
-- this function does the real work!
reduceOne :: Core -> Req -> Reqs
reduceOne core req@(Req expr vals) = case expr of
CoreApp (CoreFun ('.':y)) [x] -> propLit $ Req x (integrate core vals y)
CoreApp (CoreCon y) xs -> propOrs $ map f $ differentiate core y vals
where
f vs = propAnds $ map (\(x,v) -> propLit $ Req x [v]) $ zip xs vs
CoreCase on alts -> propAnds $ map f alts
where
allCtrs = ctorNames core $ fromCoreCon $ fst $ head alts
seenCtrs = [x | (CoreCon x, _) <- alts]
f (CoreCon ctr, rhs) = g (delete ctr allCtrs) rhs
f (CoreVar _, rhs) = g seenCtrs rhs
g ctrs ex = propLit (Req on $ anyCtor core ctrs) `propOr` propLit (Req ex vals)
CoreApp (CorePrim "error") _ -> propTrue -- since will never return anything
CoreApp (CorePrim x) ys -> propFalse -- absolutely no idea what the result is
CorePrim x -> propFalse
c | isCoreConst c -> propFalse -- if you care, abstract before here
_ -> error $ "reduceOne: " ++ show req
| null | https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/gadget/Reduce.hs | haskell | DRIVERS
take a function that reduces a Call
and reduce the entire thing
this function does the real work!
since will never return anything
absolutely no idea what the result is
if you care, abstract before here |
module Reduce(reduce, reduces, reduceWithM, reducesWithM, propMapReduceM, propMapReduce) where
import Req
import General
import DataRep
import Data.Proposition
import Control.Monad
import Control.Monad.Identity
import Yhc.Core
import Data.List
import Data.Maybe
reduces :: Core -> Reqs -> Reqs
reduces core reqs = propMap (reduce core) reqs
reduce :: Core -> Req -> Reqs
reduce core req@(Req expr vals) = case expr of
_ | vals == valsTrue -> propTrue
| vals == valsFalse -> propFalse
CoreApp (CoreFun x) _ | not $ "." `isPrefixOf` x -> propLit req
CoreVar x -> propLit req
_ -> reduces core $ reduceOne core req
reduce core x = propLit x
reducesWithM :: Core -> (Req -> IO Reqs) -> Reqs -> IO Reqs
reducesWithM core f reqs = propMapReduceM core (reduceWithM core f) reqs
reduceWithM :: Core -> (Req -> IO Reqs) -> Req -> IO Reqs
reduceWithM core f req@(Req expr vals) = case expr of
_ | vals == valsTrue -> return propTrue
| vals == valsFalse -> return propFalse
CoreApp (CoreFun x) _ | not $ "." `isPrefixOf` x -> f req >>= reducesWithM core f
CoreVar x -> return $ propLit req
_ -> reducesWithM core f $ reduceOne core req
reduceWithM core f x = return $ propLit x
propMapReduceM :: Monad m => Core -> (Req -> m Reqs) -> Reqs -> m Reqs
propMapReduceM core f x = propMapM (liftM (reduces core) . f) x
propMapReduce :: Core -> (Req -> Reqs) -> Reqs -> Reqs
propMapReduce core f x = runIdentity $ propMapReduceM core (return . f) x
CORE LOGIC
apply 1 step reduction to a Sel or a Make
reduceOne :: Core -> Req -> Reqs
reduceOne core req@(Req expr vals) = case expr of
CoreApp (CoreFun ('.':y)) [x] -> propLit $ Req x (integrate core vals y)
CoreApp (CoreCon y) xs -> propOrs $ map f $ differentiate core y vals
where
f vs = propAnds $ map (\(x,v) -> propLit $ Req x [v]) $ zip xs vs
CoreCase on alts -> propAnds $ map f alts
where
allCtrs = ctorNames core $ fromCoreCon $ fst $ head alts
seenCtrs = [x | (CoreCon x, _) <- alts]
f (CoreCon ctr, rhs) = g (delete ctr allCtrs) rhs
f (CoreVar _, rhs) = g seenCtrs rhs
g ctrs ex = propLit (Req on $ anyCtor core ctrs) `propOr` propLit (Req ex vals)
CorePrim x -> propFalse
_ -> error $ "reduceOne: " ++ show req
|
277d2fce56f967a049d5ef2c8f4bf840dd0cb0464ab64f0bbee0a279cba5f236 | graninas/Hydra | Meteor.hs | {-# LANGUAGE DeriveAnyClass #-}
# LANGUAGE DuplicateRecordFields #
module Astro.API.Meteor where
import Hydra.Prelude
data MeteorTemplate = MeteorTemplate
{ size :: Int32
, mass :: Int32
, azimuth :: Int32
, altitude :: Int32
}
deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON)
| null | https://raw.githubusercontent.com/graninas/Hydra/7ce16e705c2ce166f954dcca3910f258ae9b950c/app/astro/src/Astro/API/Meteor.hs | haskell | # LANGUAGE DeriveAnyClass # | # LANGUAGE DuplicateRecordFields #
module Astro.API.Meteor where
import Hydra.Prelude
data MeteorTemplate = MeteorTemplate
{ size :: Int32
, mass :: Int32
, azimuth :: Int32
, altitude :: Int32
}
deriving (Show, Eq, Ord, Generic, ToJSON, FromJSON)
|
d2fef5193e521db700ceb4b34a1e4a78c62f925a2252c35f311a68ac692c26c7 | fpco/inline-c | Exceptions.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE ViewPatterns #
# DEPRECATED " Language . C.Inline . . Exceptions is deprecated in favor of Language . C.Inline . . Exception which changes the CppException data type to preserve the exception for custom error handling . " #
CppException(CppHaskellException)
, pattern Language.C.Inline.Cpp.Exceptions.CppStdException
, pattern Language.C.Inline.Cpp.Exceptions.CppOtherException
, toSomeException
, throwBlock
, tryBlock
, catchBlock
) where
import Data.ByteString (ByteString)
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import qualified Data.Text as T
import Language.C.Inline.Cpp.Exception
bsToChars :: ByteString -> String
bsToChars = T.unpack . T.decodeUtf8With T.lenientDecode
cppStdExceptionMessage :: CppException -> Maybe String
cppStdExceptionMessage (Language.C.Inline.Cpp.Exception.CppStdException _ s (Just t)) = Just $ "Exception: " <> bsToChars s <> "; type: " <> bsToChars t
cppStdExceptionMessage (Language.C.Inline.Cpp.Exception.CppStdException _ s Nothing) = Just $ "Exception: " <> bsToChars s <> "; type: not available (please use g++ or clang)"
cppStdExceptionMessage _ = Nothing
cppNonStdExceptionType :: CppException -> Maybe (Maybe String)
cppNonStdExceptionType (CppNonStdException _ mt) = Just (fmap bsToChars mt)
cppNonStdExceptionType _ = Nothing
pattern CppStdException :: String -> CppException
pattern CppStdException s <- (cppStdExceptionMessage -> Just s)
pattern CppOtherException :: Maybe String -> CppException
pattern CppOtherException mt <- (cppNonStdExceptionType -> Just mt)
| null | https://raw.githubusercontent.com/fpco/inline-c/063e8876137bf73b378797cf57819076bf78bb76/inline-c-cpp/src/Language/C/Inline/Cpp/Exceptions.hs | haskell | # LANGUAGE PatternSynonyms #
# LANGUAGE ViewPatterns #
# DEPRECATED " Language . C.Inline . . Exceptions is deprecated in favor of Language . C.Inline . . Exception which changes the CppException data type to preserve the exception for custom error handling . " #
CppException(CppHaskellException)
, pattern Language.C.Inline.Cpp.Exceptions.CppStdException
, pattern Language.C.Inline.Cpp.Exceptions.CppOtherException
, toSomeException
, throwBlock
, tryBlock
, catchBlock
) where
import Data.ByteString (ByteString)
import qualified Data.Text.Encoding as T
import qualified Data.Text.Encoding.Error as T
import qualified Data.Text as T
import Language.C.Inline.Cpp.Exception
bsToChars :: ByteString -> String
bsToChars = T.unpack . T.decodeUtf8With T.lenientDecode
cppStdExceptionMessage :: CppException -> Maybe String
cppStdExceptionMessage (Language.C.Inline.Cpp.Exception.CppStdException _ s (Just t)) = Just $ "Exception: " <> bsToChars s <> "; type: " <> bsToChars t
cppStdExceptionMessage (Language.C.Inline.Cpp.Exception.CppStdException _ s Nothing) = Just $ "Exception: " <> bsToChars s <> "; type: not available (please use g++ or clang)"
cppStdExceptionMessage _ = Nothing
cppNonStdExceptionType :: CppException -> Maybe (Maybe String)
cppNonStdExceptionType (CppNonStdException _ mt) = Just (fmap bsToChars mt)
cppNonStdExceptionType _ = Nothing
pattern CppStdException :: String -> CppException
pattern CppStdException s <- (cppStdExceptionMessage -> Just s)
pattern CppOtherException :: Maybe String -> CppException
pattern CppOtherException mt <- (cppNonStdExceptionType -> Just mt)
| |
ee401af212b2f7872bf57701d15117bfa488762d0620c88d370c43859f19940e | cyverse-archive/DiscoveryEnvironmentBackend | index.clj | (ns infosquito.index
(:require [clojure-commons.file-utils :as file]))
(defn indexable?
[index-base collection]
(let [home (file/path-join index-base "home")
trash (file/path-join index-base "trash")
home-trash (file/path-join trash "home")]
(and (not= index-base collection)
(not= home collection)
(not= trash collection)
(not= home-trash collection)
(not= home (file/dirname collection))
(not= home-trash (file/dirname collection))))) | null | https://raw.githubusercontent.com/cyverse-archive/DiscoveryEnvironmentBackend/7f6177078c1a1cb6d11e62f12cfe2e22d669635b/services/Infosquito/src/infosquito/index.clj | clojure | (ns infosquito.index
(:require [clojure-commons.file-utils :as file]))
(defn indexable?
[index-base collection]
(let [home (file/path-join index-base "home")
trash (file/path-join index-base "trash")
home-trash (file/path-join trash "home")]
(and (not= index-base collection)
(not= home collection)
(not= trash collection)
(not= home-trash collection)
(not= home (file/dirname collection))
(not= home-trash (file/dirname collection))))) | |
b26b077fa75090a1b92049408c232c9dd5f23dff79765c4df5d9baa02c17d5cc | 2600hz-archive/whistle | t_evtsub.erl | -module(t_evtsub).
-include("crossbar.hrl").
%% Test the full API using ibrowse to make calls to the rest endpoint
%% -export([start_full_test/0]).
%% start_full_test() ->
%% ibrowse:start(),
%% logger:start(),
%% UrlBase = ":8000/v1/accounts",
Headers = [ { " X - Auth - Token " , " - test " }
, { " Content - Type " , " application / json " }
%% ,{"Accept", "application/json"}
%% ],
%% EmptyEvtSubResp = [{[<<"data">>, <<"streams">>], []}
%% ,{[<<"data">>, <<"events">>], ?EMPTY_JSON_OBJECT}
%% ],
MaxEvents = 5 ,
PutJSON = get_put_json(<<"directory.authn_req " > > , MaxEvents ) ,
%% DeleteJSON = get_delete_json(false),
logger : format_log(info , " GET ~s ~ n " , [ UrlBase ] ) ,
{ ok , " 200 " , _ , JSON } = ibrowse : send_req(UrlBase , Headers , get ) ,
%% AcctJObj = mochijson2:decode(JSON),
AcctId = wh_json : get_value([<<"data " > > , 1 , < < " i d " > > ] , AcctJObj ) ,
%% UrlEvtBase = lists:flatten([UrlBase, "/", wh_util:to_list(AcctId), "/evtsub/"]),
%% try
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " GET ~s ~ n " , [ UrlEvtBase ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , get ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " PUT ~s ~s ~ n " , [ UrlEvtBase , PutJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , put , PutJSON ) , " 200 " , [ { [ < < " data " > > , < < " streams " > > ] , [ < < " directory.authn_req " > > ] } ] ) ,
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " PUT ~s ~s ~ n " , [ UrlEvtBase , PutJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , put , PutJSON ) , " 200 " , [ { [ < < " data " > > , < < " streams " > > ] , [ < < " directory.authn_req " > > ] } ] ) ,
PublishNTimes = 25 , % divisible by MaxEvents please
lists : foreach(fun ( _ ) - > publish_authn_req ( ) end , lists : seq(1 , PublishNTimes ) ) ,
%% CmpFun = fun(V) ->
%% logger:format_log(info, "Len == ~p~n", [length(V)]),
length(V ) = : = MaxEvents
%% end,
%% lists:foreach(fun(_) ->
logger : format_log(info , " GET ~s ~ n " , [ UrlEvtBase ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , get ) , " 200 " , [ { [ < < " data " > > , < < " events " > > , < < " directory.authn_req " > > ]
,
%% }])
end , lists : seq(1 , PublishNTimes div MaxEvents ) ) ,
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
%% logger:format_log(info, "Testing evtsub successful~n", [])
%% catch
%% E:R ->
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(error , " Error ~p:~p ~ n ~ p ~ n " , [ E , R , erlang : get_stacktrace ( ) ] )
%% end.
verify_resp({_,Code,_,JSON } , Code , Rules ) - >
%% logger:format_log(info, "JSON: ~s~n", [JSON]),
%% JObj = mochijson2:decode(JSON),
%% lists:all(
%% fun({KeyPath, Fun}) when is_function(Fun) ->
%% V = wh_json:get_value(KeyPath, JObj),
%% Fun(V);
( { KeyPath , Result } ) - >
%% V = wh_json:get_value(KeyPath, JObj),
logger : format_log(info , " ~p : Is ~p = = ~p ~ n " , [ KeyPath , V , Result ] ) ,
%% V == Result
%% end, Rules);
%% verify_resp(_, _, _) -> false.
get_put_json(Stream , ) - >
%% mochijson2:encode({struct, [{<<"data">>, {struct, [{<<"stream">>, Stream}
, { < < " max_events " > > , }
%% ]
%% }
%% }
%% ]
%% }).
%% get_delete_json(Flush) ->
%% mochijson2:encode({struct, [{<<"data">>, {struct, [{<<"flush">>, Flush}]}}]}).
publish_authn_req ( ) - >
%% JSON = [123,[34,<<"Auth-Domain">>,34],58,[34,<<"auth_realm">>,34],44,[34,<<"Auth-User">>,34],58,[34,<<"auth_user">>,34],44,[34,<<"Orig-IP">>,34],58
%% ,[34,<<"1.2.3.4">>,34],44,[34,<<"From">>,34],58,[34,<<"">>,34],44,[34,<<"To">>,34],58,[34,<<"">>,34]
, 44,[34,<<"Msg - ID">>,34],58,[34,<<"id">>,34],44,[34,<<"App - Version">>,34],58,[34,<<"vsn">>,34],44,[34,<<"App - Name">>,34],58,[34,<<"app">>,34 ]
%% ,44,[34,<<"Event-Name">>,34],58,[34,<<"authn_req">>,34],44,[34,<<"Event-Category">>,34],58,[34,<<"directory">>,34],44,[34,<<"Server-ID">>,34]
%% ,58,[34,<<"srv">>,34],125],
%% amqp_util:callmgr_publish(whapps_controller:get_amqp_host(), JSON, <<"application/json">>, <<"auth.req">>).
| null | https://raw.githubusercontent.com/2600hz-archive/whistle/1a256604f0d037fac409ad5a55b6b17e545dcbf9/whistle_apps/apps/crossbar/src/t/t_evtsub.erl | erlang | Test the full API using ibrowse to make calls to the rest endpoint
-export([start_full_test/0]).
start_full_test() ->
ibrowse:start(),
logger:start(),
UrlBase = ":8000/v1/accounts",
,{"Accept", "application/json"}
],
EmptyEvtSubResp = [{[<<"data">>, <<"streams">>], []}
,{[<<"data">>, <<"events">>], ?EMPTY_JSON_OBJECT}
],
DeleteJSON = get_delete_json(false),
AcctJObj = mochijson2:decode(JSON),
UrlEvtBase = lists:flatten([UrlBase, "/", wh_util:to_list(AcctId), "/evtsub/"]),
try
divisible by MaxEvents please
CmpFun = fun(V) ->
logger:format_log(info, "Len == ~p~n", [length(V)]),
end,
lists:foreach(fun(_) ->
}])
logger:format_log(info, "Testing evtsub successful~n", [])
catch
E:R ->
end.
logger:format_log(info, "JSON: ~s~n", [JSON]),
JObj = mochijson2:decode(JSON),
lists:all(
fun({KeyPath, Fun}) when is_function(Fun) ->
V = wh_json:get_value(KeyPath, JObj),
Fun(V);
V = wh_json:get_value(KeyPath, JObj),
V == Result
end, Rules);
verify_resp(_, _, _) -> false.
mochijson2:encode({struct, [{<<"data">>, {struct, [{<<"stream">>, Stream}
]
}
}
]
}).
get_delete_json(Flush) ->
mochijson2:encode({struct, [{<<"data">>, {struct, [{<<"flush">>, Flush}]}}]}).
JSON = [123,[34,<<"Auth-Domain">>,34],58,[34,<<"auth_realm">>,34],44,[34,<<"Auth-User">>,34],58,[34,<<"auth_user">>,34],44,[34,<<"Orig-IP">>,34],58
,[34,<<"1.2.3.4">>,34],44,[34,<<"From">>,34],58,[34,<<"">>,34],44,[34,<<"To">>,34],58,[34,<<"">>,34]
,44,[34,<<"Event-Name">>,34],58,[34,<<"authn_req">>,34],44,[34,<<"Event-Category">>,34],58,[34,<<"directory">>,34],44,[34,<<"Server-ID">>,34]
,58,[34,<<"srv">>,34],125],
amqp_util:callmgr_publish(whapps_controller:get_amqp_host(), JSON, <<"application/json">>, <<"auth.req">>). | -module(t_evtsub).
-include("crossbar.hrl").
Headers = [ { " X - Auth - Token " , " - test " }
, { " Content - Type " , " application / json " }
MaxEvents = 5 ,
PutJSON = get_put_json(<<"directory.authn_req " > > , MaxEvents ) ,
logger : format_log(info , " GET ~s ~ n " , [ UrlBase ] ) ,
{ ok , " 200 " , _ , JSON } = ibrowse : send_req(UrlBase , Headers , get ) ,
AcctId = wh_json : get_value([<<"data " > > , 1 , < < " i d " > > ] , AcctJObj ) ,
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " GET ~s ~ n " , [ UrlEvtBase ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , get ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " PUT ~s ~s ~ n " , [ UrlEvtBase , PutJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , put , PutJSON ) , " 200 " , [ { [ < < " data " > > , < < " streams " > > ] , [ < < " directory.authn_req " > > ] } ] ) ,
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " PUT ~s ~s ~ n " , [ UrlEvtBase , PutJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , put , PutJSON ) , " 200 " , [ { [ < < " data " > > , < < " streams " > > ] , [ < < " directory.authn_req " > > ] } ] ) ,
lists : foreach(fun ( _ ) - > publish_authn_req ( ) end , lists : seq(1 , PublishNTimes ) ) ,
length(V ) = : = MaxEvents
logger : format_log(info , " GET ~s ~ n " , [ UrlEvtBase ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , get ) , " 200 " , [ { [ < < " data " > > , < < " events " > > , < < " directory.authn_req " > > ]
,
end , lists : seq(1 , PublishNTimes div MaxEvents ) ) ,
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(info , " DELETE ~s ~s ~ n " , [ UrlEvtBase , DeleteJSON ] ) ,
true = verify_resp(ibrowse : send_req(UrlEvtBase , Headers , delete , DeleteJSON ) , " 200 " , EmptyEvtSubResp ) ,
logger : format_log(error , " Error ~p:~p ~ n ~ p ~ n " , [ E , R , erlang : get_stacktrace ( ) ] )
verify_resp({_,Code,_,JSON } , Code , Rules ) - >
( { KeyPath , Result } ) - >
logger : format_log(info , " ~p : Is ~p = = ~p ~ n " , [ KeyPath , V , Result ] ) ,
get_put_json(Stream , ) - >
, { < < " max_events " > > , }
publish_authn_req ( ) - >
, 44,[34,<<"Msg - ID">>,34],58,[34,<<"id">>,34],44,[34,<<"App - Version">>,34],58,[34,<<"vsn">>,34],44,[34,<<"App - Name">>,34],58,[34,<<"app">>,34 ]
|
b88da25289649ca3da3a1b3090ecc999a8b9bc1be1a7334725b3fe396e23d4a3 | OlivierSohn/hamazed | Spec.hs | import Test.Imj.Jitter
main :: IO ()
main = do
testThreadDelay
| null | https://raw.githubusercontent.com/OlivierSohn/hamazed/6c2b20d839ede7b8651fb7b425cb27ea93808a4a/imj-game-synths/test/Spec.hs | haskell | import Test.Imj.Jitter
main :: IO ()
main = do
testThreadDelay
| |
1fa75e0c25b07bd32fe1390e9b513c403fda29ced67255dafc3c4bcda5434338 | johnswanson/tictag | beeminder.clj | (ns tictag.beeminder
(:require [org.httpkit.client :as http]
[cheshire.core :as cheshire]
[taoensso.timbre :as timbre]
[clojure.string :as str]
[clojure.data :refer [diff]]
[clj-time.core :as t]
[clj-time.periodic :as p]
[tictag.db :as db]
[tictag.beeminder-matching :refer [match?]]
[tictag.utils :as utils]))
(timbre/refer-timbre)
(defn goal-url [user goal]
(format "" user goal))
(defn datapoints-url [user goal]
(format "" user goal))
(defn datapoints [auth-token user goal]
(:datapoints
(cheshire/parse-string
(:body
@(http/request {:url (goal-url user goal)
:method :get
:query-params {:auth_token auth-token
:datapoints true}}))
true)))
(defn update-datapoint! [auth-token user goal datapoint]
(http/request {:url (format ""
user goal (:id datapoint))
:method :put
:query-params {:auth_token auth-token
:value (:value datapoint)}}))
(defn create-datapoint! [auth-token user goal datapoint]
(http/request {:url (format ""
user goal)
:method :post
:query-params {:auth_token auth-token
:value (:value datapoint)
:daystamp (:daystamp datapoint)}}))
(defn save-datapoint! [auth-token user goal datapoint]
(if (:id datapoint)
(update-datapoint! auth-token user goal datapoint)
(create-datapoint! auth-token user goal datapoint)))
(defn delete-datapoint! [auth-token user goal datapoint]
(when (:id datapoint)
(http/request {:url (format ""
user goal (:id datapoint))
:method :delete
:query-params {:auth_token auth-token}})))
(defn past-week-days []
(set (map db/local-day (take 7 (p/periodic-seq (t/now) (t/days -1))))))
(defn days-matching-tag [tags rows]
(->> rows
(filter #(match? tags (:tags %)))
(map :local-day)
(frequencies)))
(defn sync! [{:keys [db tagtime]} user]
(debug [:beeminder-sync
{:user (:id user)
:enabled? (:enabled? (:beeminder user))}])
(when (:enabled? (:beeminder user))
(when-let [goals (seq (db/get-goals* db (:beeminder user)))]
(trace [:beeminder-sync
{:goals goals}])
(let [in-past-week? (past-week-days)
rows (filter #(in-past-week? (:local-day %))
(db/get-pings-by-user (:db db) user))]
(doseq [{:keys [goal/goal goal/tags]} goals]
(trace [:beeminder-sync
{:name goal
:tags tags}])
(let [{:keys [username token]} (:beeminder user)
days (days-matching-tag tags rows)
existing-datapoints (filter
#(in-past-week? (:daystamp %))
(datapoints
token
username
goal))
existing-map (group-by :daystamp existing-datapoints)
to-save (filter :value
(for [[daystamp value] days
:let [hours (* (/ (:gap tagtime) 60 60) value)
{id :id old-value :value}
(first
(existing-map daystamp))]]
{:id id
:daystamp daystamp
:value (when (or (not old-value)
(not= (float old-value) (float hours)))
(float hours))}))
to-delete (concat
(remove (fn [{:keys [daystamp]}]
(days daystamp))
existing-datapoints)
(flatten
(remove nil?
(map rest (vals existing-map)))))
save-futures (doall (map #(save-datapoint! token username goal %) to-save))
delete-futures (doall (map #(delete-datapoint! token username goal %) to-delete))]
(doseq [resp (concat save-futures delete-futures)]
(if-not (utils/success? @resp)
(throw (ex-info "Failue to update beeminder" @resp))
(timbre/trace (get-in @resp [:opts :method]) (:status @resp))))))))))
(defn user-for [token]
(let [resp (-> (http/request {:url ""
:method :get
:query-params {:auth_token token}})
(deref))]
(if (= (:status resp) 200)
(cheshire/parse-string (:body resp) true)
nil)))
| null | https://raw.githubusercontent.com/johnswanson/tictag/89140b5084817690ec417b07b7d095ba7677f4e0/src/clj/tictag/beeminder.clj | clojure | (ns tictag.beeminder
(:require [org.httpkit.client :as http]
[cheshire.core :as cheshire]
[taoensso.timbre :as timbre]
[clojure.string :as str]
[clojure.data :refer [diff]]
[clj-time.core :as t]
[clj-time.periodic :as p]
[tictag.db :as db]
[tictag.beeminder-matching :refer [match?]]
[tictag.utils :as utils]))
(timbre/refer-timbre)
(defn goal-url [user goal]
(format "" user goal))
(defn datapoints-url [user goal]
(format "" user goal))
(defn datapoints [auth-token user goal]
(:datapoints
(cheshire/parse-string
(:body
@(http/request {:url (goal-url user goal)
:method :get
:query-params {:auth_token auth-token
:datapoints true}}))
true)))
(defn update-datapoint! [auth-token user goal datapoint]
(http/request {:url (format ""
user goal (:id datapoint))
:method :put
:query-params {:auth_token auth-token
:value (:value datapoint)}}))
(defn create-datapoint! [auth-token user goal datapoint]
(http/request {:url (format ""
user goal)
:method :post
:query-params {:auth_token auth-token
:value (:value datapoint)
:daystamp (:daystamp datapoint)}}))
(defn save-datapoint! [auth-token user goal datapoint]
(if (:id datapoint)
(update-datapoint! auth-token user goal datapoint)
(create-datapoint! auth-token user goal datapoint)))
(defn delete-datapoint! [auth-token user goal datapoint]
(when (:id datapoint)
(http/request {:url (format ""
user goal (:id datapoint))
:method :delete
:query-params {:auth_token auth-token}})))
(defn past-week-days []
(set (map db/local-day (take 7 (p/periodic-seq (t/now) (t/days -1))))))
(defn days-matching-tag [tags rows]
(->> rows
(filter #(match? tags (:tags %)))
(map :local-day)
(frequencies)))
(defn sync! [{:keys [db tagtime]} user]
(debug [:beeminder-sync
{:user (:id user)
:enabled? (:enabled? (:beeminder user))}])
(when (:enabled? (:beeminder user))
(when-let [goals (seq (db/get-goals* db (:beeminder user)))]
(trace [:beeminder-sync
{:goals goals}])
(let [in-past-week? (past-week-days)
rows (filter #(in-past-week? (:local-day %))
(db/get-pings-by-user (:db db) user))]
(doseq [{:keys [goal/goal goal/tags]} goals]
(trace [:beeminder-sync
{:name goal
:tags tags}])
(let [{:keys [username token]} (:beeminder user)
days (days-matching-tag tags rows)
existing-datapoints (filter
#(in-past-week? (:daystamp %))
(datapoints
token
username
goal))
existing-map (group-by :daystamp existing-datapoints)
to-save (filter :value
(for [[daystamp value] days
:let [hours (* (/ (:gap tagtime) 60 60) value)
{id :id old-value :value}
(first
(existing-map daystamp))]]
{:id id
:daystamp daystamp
:value (when (or (not old-value)
(not= (float old-value) (float hours)))
(float hours))}))
to-delete (concat
(remove (fn [{:keys [daystamp]}]
(days daystamp))
existing-datapoints)
(flatten
(remove nil?
(map rest (vals existing-map)))))
save-futures (doall (map #(save-datapoint! token username goal %) to-save))
delete-futures (doall (map #(delete-datapoint! token username goal %) to-delete))]
(doseq [resp (concat save-futures delete-futures)]
(if-not (utils/success? @resp)
(throw (ex-info "Failue to update beeminder" @resp))
(timbre/trace (get-in @resp [:opts :method]) (:status @resp))))))))))
(defn user-for [token]
(let [resp (-> (http/request {:url ""
:method :get
:query-params {:auth_token token}})
(deref))]
(if (= (:status resp) 200)
(cheshire/parse-string (:body resp) true)
nil)))
| |
6e48f60b6271fd4bb86010262f714db2a74de9cbea77acd336ec2d0c188cc92f | erlang/rebar3 | ec_rbdict.erl | %%% vi:ts=4 sw=4 et
Copyright ( c ) 2008 . All rights reserved .
%%%
%%% Redistribution and use in source and binary forms, with or without
%%% modification, are permitted provided that the following conditions
%%% are met:
%%%
1 . Redistributions of source code must retain the above copyright
%%% notice, this list of conditions and the following disclaimer.
2 . Redistributions in binary form must reproduce the above copyright
%%% notice, this list of conditions and the following disclaimer in the
%%% documentation and/or other materials provided with the distribution.
%%%
%%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
%%% LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
%%% FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
COPYRIGHT HOLDERS OR FOR ANY DIRECT , INDIRECT ,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING ,
BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
%%% LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
%%% ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
%%% POSSIBILITY OF SUCH DAMAGE.
%%%-------------------------------------------------------------------
2008
%%%
%%% @doc
%%%
Rbdict implements a Key - Value dictionary . An rbdict is a
%%% representation of a dictionary, where a red-black tree is used to
%%% store the keys and values.
%%%
%%% This module implents exactly the same interface as the module
ec_dictionary but with a defined representation . One difference is
that while dict considers two keys as different if they do not
match (= : =) , this module considers two keys as different if and
%%% only if they do not compare equal (==).
%%%
The algorithms here are taken directly from Okasaki and
in ML / Scheme . The interface is compatible with the standard dict
%%% interface.
%%%
%%% The following structures are used to build the the RB-dict:
%%%
{ r , Left , Key , , Right }
{ b , Left , Key , , Right }
%%% empty
%%%
It is interesting to note that expanding out the first argument of
l / rbalance , the colour , in store etc . is actually slower than not
%%% doing it. Measured.
%%%
%%% see ec_dictionary
%%% @end
%%%-------------------------------------------------------------------
-module(ec_rbdict).
-behaviour(ec_dictionary).
%% Standard interface.
-export([add/3, from_list/1, get/2, get/3, has_key/2,
has_value/2, new/0, remove/2, size/1, to_list/1,
keys/1]).
-export_type([dictionary/2]).
%%%===================================================================
%%% Types
%%%===================================================================
%% This should be opaque, but that kills dialyzer so for now we export it
%% however you should not rely on the internal representation here
-type dictionary(K, V) :: empty | {color(),
dictionary(K, V),
ec_dictionary:key(K),
ec_dictionary:value(V),
dictionary(K, V)}.
-type color() :: r | b.
%%%===================================================================
%%% API
%%%===================================================================
-spec new() -> dictionary(_K, _V).
new() -> empty.
-spec has_key(ec_dictionary:key(K), dictionary(K, _V)) -> boolean().
has_key(_, empty) ->
false;
has_key(K, {_, Left, K1, _, _}) when K < K1 ->
has_key(K, Left);
has_key(K, {_, _, K1, _, Right}) when K > K1 ->
has_key(K, Right);
has_key(_, {_, _, _, _, _}) ->
true.
-spec get(ec_dictionary:key(K), dictionary(K, V)) -> ec_dictionary:value(V).
get(_, empty) ->
throw(not_found);
get(K, {_, Left, K1, _, _}) when K < K1 ->
get(K, Left);
get(K, {_, _, K1, _, Right}) when K > K1 ->
get(K, Right);
get(_, {_, _, _, Val, _}) ->
Val.
-spec get(ec_dictionary:key(K),
ec_dictionary:value(V),
dictionary(K, V)) -> ec_dictionary:value(V).
get(_, Default, empty) ->
Default;
get(K, Default, {_, Left, K1, _, _}) when K < K1 ->
get(K, Default, Left);
get(K, Default, {_, _, K1, _, Right}) when K > K1 ->
get(K, Default, Right);
get(_, _, {_, _, _, Val, _}) ->
Val.
-spec add(ec_dictionary:key(K), ec_dictionary:value(V),
dictionary(K, V)) -> dictionary(K, V).
add(Key, Value, Dict) ->
{_, L, K1, V1, R} = add1(Key, Value, Dict),
{b, L, K1, V1, R}.
-spec remove(ec_dictionary:key(K), dictionary(K, V)) -> dictionary(K, V).
remove(Key, Dictionary) ->
{Dict1, _} = erase_aux(Key, Dictionary), Dict1.
-spec has_value(ec_dictionary:value(V), dictionary(_K, V)) -> boolean().
has_value(Value, Dict) ->
fold(fun (_, NValue, _) when NValue == Value -> true;
(_, _, Acc) -> Acc
end,
false, Dict).
-spec size(dictionary(_K, _V)) -> non_neg_integer().
size(T) ->
size1(T).
-spec to_list(dictionary(K, V)) ->
[{ec_dictionary:key(K), ec_dictionary:value(V)}].
to_list(T) ->
to_list(T, []).
-spec from_list([{ec_dictionary:key(K), ec_dictionary:value(V)}]) ->
dictionary(K, V).
from_list(L) ->
lists:foldl(fun ({K, V}, D) ->
add(K, V, D)
end, new(),
L).
-spec keys(dictionary(K, _V)) -> [ec_dictionary:key(K)].
keys(Dict) ->
keys(Dict, []).
%%%===================================================================
Enternal functions
%%%===================================================================
-spec keys(dictionary(K, _V), [ec_dictionary:key(K)]) ->
[ec_dictionary:key(K)].
keys(empty, Tail) ->
Tail;
keys({_, L, K, _, R}, Tail) ->
keys(L, [K | keys(R, Tail)]).
-spec erase_aux(ec_dictionary:key(K), dictionary(K, V)) ->
{dictionary(K, V), boolean()}.
erase_aux(_, empty) ->
{empty, false};
erase_aux(K, {b, A, Xk, Xv, B}) ->
if K < Xk ->
{A1, Dec} = erase_aux(K, A),
if Dec ->
unbalright(b, A1, Xk, Xv, B);
true ->
{{b, A1, Xk, Xv, B}, false}
end;
K > Xk ->
{B1, Dec} = erase_aux(K, B),
if Dec ->
unballeft(b, A, Xk, Xv, B1);
true ->
{{b, A, Xk, Xv, B1}, false}
end;
true ->
case B of
empty ->
blackify(A);
_ ->
{B1, {Mk, Mv}, Dec} = erase_min(B),
if Dec ->
unballeft(b, A, Mk, Mv, B1);
true ->
{{b, A, Mk, Mv, B1}, false}
end
end
end;
erase_aux(K, {r, A, Xk, Xv, B}) ->
if K < Xk ->
{A1, Dec} = erase_aux(K, A),
if Dec ->
unbalright(r, A1, Xk, Xv, B);
true ->
{{r, A1, Xk, Xv, B}, false}
end;
K > Xk ->
{B1, Dec} = erase_aux(K, B),
if Dec ->
unballeft(r, A, Xk, Xv, B1);
true ->
{{r, A, Xk, Xv, B1}, false}
end;
true ->
case B of
empty ->
{A, false};
_ ->
{B1, {Mk, Mv}, Dec} = erase_min(B),
if Dec ->
unballeft(r, A, Mk, Mv, B1);
true ->
{{r, A, Mk, Mv, B1}, false}
end
end
end.
-spec erase_min(dictionary(K, V)) ->
{dictionary(K, V), {ec_dictionary:key(K), ec_dictionary:value(V)}, boolean()}.
erase_min({b, empty, Xk, Xv, empty}) ->
{empty, {Xk, Xv}, true};
erase_min({b, empty, Xk, Xv, {r, A, Yk, Yv, B}}) ->
{{b, A, Yk, Yv, B}, {Xk, Xv}, false};
erase_min({b, empty, _, _, {b, _, _, _, _}}) ->
exit(boom);
erase_min({r, empty, Xk, Xv, A}) ->
{A, {Xk, Xv}, false};
erase_min({b, A, Xk, Xv, B}) ->
{A1, Min, Dec} = erase_min(A),
if Dec ->
{T, Dec1} = unbalright(b, A1, Xk, Xv, B),
{T, Min, Dec1};
true -> {{b, A1, Xk, Xv, B}, Min, false}
end;
erase_min({r, A, Xk, Xv, B}) ->
{A1, Min, Dec} = erase_min(A),
if Dec ->
{T, Dec1} = unbalright(r, A1, Xk, Xv, B),
{T, Min, Dec1};
true -> {{r, A1, Xk, Xv, B}, Min, false}
end.
blackify({r, A, K, V, B}) -> {{b, A, K, V, B}, false};
blackify(Node) -> {Node, true}.
unballeft(r, {b, A, Xk, Xv, B}, Yk, Yv, C) ->
{lbalance(b, {r, A, Xk, Xv, B}, Yk, Yv, C), false};
unballeft(b, {b, A, Xk, Xv, B}, Yk, Yv, C) ->
{lbalance(b, {r, A, Xk, Xv, B}, Yk, Yv, C), true};
unballeft(b, {r, A, Xk, Xv, {b, B, Yk, Yv, C}}, Zk, Zv,
D) ->
{{b, A, Xk, Xv,
lbalance(b, {r, B, Yk, Yv, C}, Zk, Zv, D)},
false}.
unbalright(r, A, Xk, Xv, {b, B, Yk, Yv, C}) ->
{rbalance(b, A, Xk, Xv, {r, B, Yk, Yv, C}), false};
unbalright(b, A, Xk, Xv, {b, B, Yk, Yv, C}) ->
{rbalance(b, A, Xk, Xv, {r, B, Yk, Yv, C}), true};
unbalright(b, A, Xk, Xv,
{r, {b, B, Yk, Yv, C}, Zk, Zv, D}) ->
{{b, rbalance(b, A, Xk, Xv, {r, B, Yk, Yv, C}), Zk, Zv,
D},
false}.
-spec fold(fun((ec_dictionary:key(K), ec_dictionary:value(V), any()) -> any()),
any(), dictionary(K, V)) -> any().
fold(_, Acc, empty) -> Acc;
fold(F, Acc, {_, A, Xk, Xv, B}) ->
fold(F, F(Xk, Xv, fold(F, Acc, B)), A).
add1(K, V, empty) -> {r, empty, K, V, empty};
add1(K, V, {C, Left, K1, V1, Right}) when K < K1 ->
lbalance(C, add1(K, V, Left), K1, V1, Right);
add1(K, V, {C, Left, K1, V1, Right}) when K > K1 ->
rbalance(C, Left, K1, V1, add1(K, V, Right));
add1(K, V, {C, L, _, _, R}) -> {C, L, K, V, R}.
size1(empty) -> 0;
size1({_, L, _, _, R}) -> size1(L) + size1(R) + 1.
to_list(empty, List) -> List;
to_list({_, A, Xk, Xv, B}, List) ->
to_list(A, [{Xk, Xv} | to_list(B, List)]).
%% Balance a tree afer (possibly) adding a node to the left/right.
-spec lbalance(color(), dictionary(K, V),
ec_dictionary:key(K), ec_dictionary:value(V),
dictionary(K, V)) ->
dictionary(K, V).
lbalance(b, {r, {r, A, Xk, Xv, B}, Yk, Yv, C}, Zk, Zv,
D) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
lbalance(b, {r, A, Xk, Xv, {r, B, Yk, Yv, C}}, Zk, Zv,
D) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
lbalance(C, A, Xk, Xv, B) -> {C, A, Xk, Xv, B}.
-spec rbalance(color(), dictionary(K, V),
ec_dictionary:key(K), ec_dictionary:value(V),
dictionary(K, V)) ->
dictionary(K, V).
rbalance(b, A, Xk, Xv,
{r, {r, B, Yk, Yv, C}, Zk, Zv, D}) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
rbalance(b, A, Xk, Xv,
{r, B, Yk, Yv, {r, C, Zk, Zv, D}}) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
rbalance(C, A, Xk, Xv, B) -> {C, A, Xk, Xv, B}.
| null | https://raw.githubusercontent.com/erlang/rebar3/048412ed4593e19097f4fa91747593aac6706afb/vendor/erlware_commons/src/ec_rbdict.erl | erlang | vi:ts=4 sw=4 et
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS
FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE
LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN
ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
POSSIBILITY OF SUCH DAMAGE.
-------------------------------------------------------------------
@doc
representation of a dictionary, where a red-black tree is used to
store the keys and values.
This module implents exactly the same interface as the module
only if they do not compare equal (==).
interface.
The following structures are used to build the the RB-dict:
empty
doing it. Measured.
see ec_dictionary
@end
-------------------------------------------------------------------
Standard interface.
===================================================================
Types
===================================================================
This should be opaque, but that kills dialyzer so for now we export it
however you should not rely on the internal representation here
===================================================================
API
===================================================================
===================================================================
===================================================================
Balance a tree afer (possibly) adding a node to the left/right. | Copyright ( c ) 2008 . All rights reserved .
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
COPYRIGHT HOLDERS OR FOR ANY DIRECT , INDIRECT ,
INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING ,
BUT NOT LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ;
LOSS OF USE , DATA , OR PROFITS ; OR BUSINESS INTERRUPTION ) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT
2008
Rbdict implements a Key - Value dictionary . An rbdict is a
ec_dictionary but with a defined representation . One difference is
that while dict considers two keys as different if they do not
match (= : =) , this module considers two keys as different if and
The algorithms here are taken directly from Okasaki and
in ML / Scheme . The interface is compatible with the standard dict
{ r , Left , Key , , Right }
{ b , Left , Key , , Right }
It is interesting to note that expanding out the first argument of
l / rbalance , the colour , in store etc . is actually slower than not
-module(ec_rbdict).
-behaviour(ec_dictionary).
-export([add/3, from_list/1, get/2, get/3, has_key/2,
has_value/2, new/0, remove/2, size/1, to_list/1,
keys/1]).
-export_type([dictionary/2]).
-type dictionary(K, V) :: empty | {color(),
dictionary(K, V),
ec_dictionary:key(K),
ec_dictionary:value(V),
dictionary(K, V)}.
-type color() :: r | b.
-spec new() -> dictionary(_K, _V).
new() -> empty.
-spec has_key(ec_dictionary:key(K), dictionary(K, _V)) -> boolean().
has_key(_, empty) ->
false;
has_key(K, {_, Left, K1, _, _}) when K < K1 ->
has_key(K, Left);
has_key(K, {_, _, K1, _, Right}) when K > K1 ->
has_key(K, Right);
has_key(_, {_, _, _, _, _}) ->
true.
-spec get(ec_dictionary:key(K), dictionary(K, V)) -> ec_dictionary:value(V).
get(_, empty) ->
throw(not_found);
get(K, {_, Left, K1, _, _}) when K < K1 ->
get(K, Left);
get(K, {_, _, K1, _, Right}) when K > K1 ->
get(K, Right);
get(_, {_, _, _, Val, _}) ->
Val.
-spec get(ec_dictionary:key(K),
ec_dictionary:value(V),
dictionary(K, V)) -> ec_dictionary:value(V).
get(_, Default, empty) ->
Default;
get(K, Default, {_, Left, K1, _, _}) when K < K1 ->
get(K, Default, Left);
get(K, Default, {_, _, K1, _, Right}) when K > K1 ->
get(K, Default, Right);
get(_, _, {_, _, _, Val, _}) ->
Val.
-spec add(ec_dictionary:key(K), ec_dictionary:value(V),
dictionary(K, V)) -> dictionary(K, V).
add(Key, Value, Dict) ->
{_, L, K1, V1, R} = add1(Key, Value, Dict),
{b, L, K1, V1, R}.
-spec remove(ec_dictionary:key(K), dictionary(K, V)) -> dictionary(K, V).
remove(Key, Dictionary) ->
{Dict1, _} = erase_aux(Key, Dictionary), Dict1.
-spec has_value(ec_dictionary:value(V), dictionary(_K, V)) -> boolean().
has_value(Value, Dict) ->
fold(fun (_, NValue, _) when NValue == Value -> true;
(_, _, Acc) -> Acc
end,
false, Dict).
-spec size(dictionary(_K, _V)) -> non_neg_integer().
size(T) ->
size1(T).
-spec to_list(dictionary(K, V)) ->
[{ec_dictionary:key(K), ec_dictionary:value(V)}].
to_list(T) ->
to_list(T, []).
-spec from_list([{ec_dictionary:key(K), ec_dictionary:value(V)}]) ->
dictionary(K, V).
from_list(L) ->
lists:foldl(fun ({K, V}, D) ->
add(K, V, D)
end, new(),
L).
-spec keys(dictionary(K, _V)) -> [ec_dictionary:key(K)].
keys(Dict) ->
keys(Dict, []).
Enternal functions
-spec keys(dictionary(K, _V), [ec_dictionary:key(K)]) ->
[ec_dictionary:key(K)].
keys(empty, Tail) ->
Tail;
keys({_, L, K, _, R}, Tail) ->
keys(L, [K | keys(R, Tail)]).
-spec erase_aux(ec_dictionary:key(K), dictionary(K, V)) ->
{dictionary(K, V), boolean()}.
erase_aux(_, empty) ->
{empty, false};
erase_aux(K, {b, A, Xk, Xv, B}) ->
if K < Xk ->
{A1, Dec} = erase_aux(K, A),
if Dec ->
unbalright(b, A1, Xk, Xv, B);
true ->
{{b, A1, Xk, Xv, B}, false}
end;
K > Xk ->
{B1, Dec} = erase_aux(K, B),
if Dec ->
unballeft(b, A, Xk, Xv, B1);
true ->
{{b, A, Xk, Xv, B1}, false}
end;
true ->
case B of
empty ->
blackify(A);
_ ->
{B1, {Mk, Mv}, Dec} = erase_min(B),
if Dec ->
unballeft(b, A, Mk, Mv, B1);
true ->
{{b, A, Mk, Mv, B1}, false}
end
end
end;
erase_aux(K, {r, A, Xk, Xv, B}) ->
if K < Xk ->
{A1, Dec} = erase_aux(K, A),
if Dec ->
unbalright(r, A1, Xk, Xv, B);
true ->
{{r, A1, Xk, Xv, B}, false}
end;
K > Xk ->
{B1, Dec} = erase_aux(K, B),
if Dec ->
unballeft(r, A, Xk, Xv, B1);
true ->
{{r, A, Xk, Xv, B1}, false}
end;
true ->
case B of
empty ->
{A, false};
_ ->
{B1, {Mk, Mv}, Dec} = erase_min(B),
if Dec ->
unballeft(r, A, Mk, Mv, B1);
true ->
{{r, A, Mk, Mv, B1}, false}
end
end
end.
-spec erase_min(dictionary(K, V)) ->
{dictionary(K, V), {ec_dictionary:key(K), ec_dictionary:value(V)}, boolean()}.
erase_min({b, empty, Xk, Xv, empty}) ->
{empty, {Xk, Xv}, true};
erase_min({b, empty, Xk, Xv, {r, A, Yk, Yv, B}}) ->
{{b, A, Yk, Yv, B}, {Xk, Xv}, false};
erase_min({b, empty, _, _, {b, _, _, _, _}}) ->
exit(boom);
erase_min({r, empty, Xk, Xv, A}) ->
{A, {Xk, Xv}, false};
erase_min({b, A, Xk, Xv, B}) ->
{A1, Min, Dec} = erase_min(A),
if Dec ->
{T, Dec1} = unbalright(b, A1, Xk, Xv, B),
{T, Min, Dec1};
true -> {{b, A1, Xk, Xv, B}, Min, false}
end;
erase_min({r, A, Xk, Xv, B}) ->
{A1, Min, Dec} = erase_min(A),
if Dec ->
{T, Dec1} = unbalright(r, A1, Xk, Xv, B),
{T, Min, Dec1};
true -> {{r, A1, Xk, Xv, B}, Min, false}
end.
blackify({r, A, K, V, B}) -> {{b, A, K, V, B}, false};
blackify(Node) -> {Node, true}.
unballeft(r, {b, A, Xk, Xv, B}, Yk, Yv, C) ->
{lbalance(b, {r, A, Xk, Xv, B}, Yk, Yv, C), false};
unballeft(b, {b, A, Xk, Xv, B}, Yk, Yv, C) ->
{lbalance(b, {r, A, Xk, Xv, B}, Yk, Yv, C), true};
unballeft(b, {r, A, Xk, Xv, {b, B, Yk, Yv, C}}, Zk, Zv,
D) ->
{{b, A, Xk, Xv,
lbalance(b, {r, B, Yk, Yv, C}, Zk, Zv, D)},
false}.
unbalright(r, A, Xk, Xv, {b, B, Yk, Yv, C}) ->
{rbalance(b, A, Xk, Xv, {r, B, Yk, Yv, C}), false};
unbalright(b, A, Xk, Xv, {b, B, Yk, Yv, C}) ->
{rbalance(b, A, Xk, Xv, {r, B, Yk, Yv, C}), true};
unbalright(b, A, Xk, Xv,
{r, {b, B, Yk, Yv, C}, Zk, Zv, D}) ->
{{b, rbalance(b, A, Xk, Xv, {r, B, Yk, Yv, C}), Zk, Zv,
D},
false}.
-spec fold(fun((ec_dictionary:key(K), ec_dictionary:value(V), any()) -> any()),
any(), dictionary(K, V)) -> any().
fold(_, Acc, empty) -> Acc;
fold(F, Acc, {_, A, Xk, Xv, B}) ->
fold(F, F(Xk, Xv, fold(F, Acc, B)), A).
add1(K, V, empty) -> {r, empty, K, V, empty};
add1(K, V, {C, Left, K1, V1, Right}) when K < K1 ->
lbalance(C, add1(K, V, Left), K1, V1, Right);
add1(K, V, {C, Left, K1, V1, Right}) when K > K1 ->
rbalance(C, Left, K1, V1, add1(K, V, Right));
add1(K, V, {C, L, _, _, R}) -> {C, L, K, V, R}.
size1(empty) -> 0;
size1({_, L, _, _, R}) -> size1(L) + size1(R) + 1.
to_list(empty, List) -> List;
to_list({_, A, Xk, Xv, B}, List) ->
to_list(A, [{Xk, Xv} | to_list(B, List)]).
-spec lbalance(color(), dictionary(K, V),
ec_dictionary:key(K), ec_dictionary:value(V),
dictionary(K, V)) ->
dictionary(K, V).
lbalance(b, {r, {r, A, Xk, Xv, B}, Yk, Yv, C}, Zk, Zv,
D) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
lbalance(b, {r, A, Xk, Xv, {r, B, Yk, Yv, C}}, Zk, Zv,
D) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
lbalance(C, A, Xk, Xv, B) -> {C, A, Xk, Xv, B}.
-spec rbalance(color(), dictionary(K, V),
ec_dictionary:key(K), ec_dictionary:value(V),
dictionary(K, V)) ->
dictionary(K, V).
rbalance(b, A, Xk, Xv,
{r, {r, B, Yk, Yv, C}, Zk, Zv, D}) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
rbalance(b, A, Xk, Xv,
{r, B, Yk, Yv, {r, C, Zk, Zv, D}}) ->
{r, {b, A, Xk, Xv, B}, Yk, Yv, {b, C, Zk, Zv, D}};
rbalance(C, A, Xk, Xv, B) -> {C, A, Xk, Xv, B}.
|
690204d8669097500ee013d4bfe915f0d61c09f089fb214dcc553b3b3047e66b | heshrobe/joshua-dist | documentation-commands.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Package : CLIM - ENV ; Base : 10 ; Lowercase : Yes -*-
Copyright ( c ) 1994 - 2000 , .
Copyright ( c ) 2001 - 2003 , and .
;;; All rights reserved. No warranty is expressed or implied.
;;; See COPYRIGHT for full copyright and terms of use.
(in-package :clim-env)
;;; Documentation commands
;;--- Help {Commands, Keyboard}
--- Commands - > lists all the commands in the current comtab
;;--- Keyboard -> basic input editor help, including advice to try control-Help
;;--- Show Documentation
;;--- Show Overview
;;--- Show Table Of Contents
| null | https://raw.githubusercontent.com/heshrobe/joshua-dist/f59f06303f9fabef3e945a920cf9a26d9c2fd55e/clim-env/portable-lisp-environment/documentation-commands.lisp | lisp | Syntax : ANSI - Common - Lisp ; Package : CLIM - ENV ; Base : 10 ; Lowercase : Yes -*-
All rights reserved. No warranty is expressed or implied.
See COPYRIGHT for full copyright and terms of use.
Documentation commands
--- Help {Commands, Keyboard}
--- Keyboard -> basic input editor help, including advice to try control-Help
--- Show Documentation
--- Show Overview
--- Show Table Of Contents |
Copyright ( c ) 1994 - 2000 , .
Copyright ( c ) 2001 - 2003 , and .
(in-package :clim-env)
--- Commands - > lists all the commands in the current comtab
|
b5e06259c0e08da02476339a9cc525f9833bc416ac6d3fd0c18facb70a8d36bf | informatimago/lisp | c11-yacc.lisp | (DEFINE-PARSER *C11-PARSER*
(:START-SYMBOL |translation_unit|)
(:TERMINALS (
|identifier| |typedef_name| |func_name| |string_literal|
|i_constant| |f_constant| |enum_name|
|alignas| |alignof| |atomic| |generic| |noreturn| |static_assert|
|thread_local| |case| |default| |if| |else| |switch| |while| |do|
|for| |goto| |continue| |break| |return| |struct| |union| |enum|
|...| |complex| |imaginary| |bool| |char| |short| |int| |long|
|signed| |unsigned| |float| |double| |void| |const| |restrict|
|volatile| |typedef| |extern| |static| |auto| |register| |inline|
|sizeof|
^= \|= -= <<= >>= &= && |\|\|| *= /= %= += -> ++ -- << >>
<= >= == !=))
;; renaming terminals:
(IDENTIFIER |identifier|)
(TYPEDEF_NAME |typedef_name|)
(FUNC_NAME |func_name|)
(STRING_LITERAL |string_literal|)
(I_CONSTANT |i_constant|)
(F_CONSTANT |f_constant|)
(|constant| I_CONSTANT F_CONSTANT ) ;ENUMERATION_CONSTANT
(|string| STRING_LITERAL FUNC_NAME)
(ALIGNAS |alignas|)
(ALIGNOF |alignof|)
(ATOMIC |atomic|)
(GENERIC |generic|)
(NORETURN |noreturn|)
(STATIC_ASSERT |static_assert|)
(THREAD_LOCAL |thread_local|)
(CASE |case|)
(DEFAULT |default|)
(IF |if|)
(ELSE |else|)
(SWITCH |switch|)
(WHILE |while|)
(DO |do|)
(FOR |for|)
(GOTO |goto|)
(CONTINUE |continue|)
(BREAK |break|)
(RETURN |return|)
(STRUCT |struct|)
(UNION |union|)
(ENUM |enum|)
(ELLIPSIS |...|)
(COMPLEX |complex|)
(IMAGINARY |imaginary|)
(BOOL |bool|)
(CHAR |char|)
(SHORT |short|)
(INT |int|)
(LONG |long|)
(SIGNED |signed|)
(UNSIGNED |unsigned|)
(FLOAT |float|)
(DOUBLE |double|)
(VOID |void|)
(CONST |const|)
(RESTRICT |restrict|)
(VOLATILE |volatile|)
(TYPEDEF |typedef|)
(EXTERN |extern|)
(STATIC |static|)
(AUTO |auto|)
(REGISTER |register|)
(INLINE |inline|)
(SIZEOF |sizeof|)
(XOR_ASSIGN |^=|)
(OR_ASSIGN \|=)
(SUB_ASSIGN |-=|)
(LEFT_ASSIGN |<<=|)
(RIGHT_ASSIGN |>>=|)
(AND_ASSIGN |&=|)
(AND_OP |&&|)
(OR_OP \|\|)
(MUL_ASSIGN |*=|)
(DIV_ASSIGN |/=|)
(MOD_ASSIGN |%=|)
(ADD_ASSIGN |+=|)
(PTR_OP |->|)
(INC_OP |++|)
(DEC_OP |--|)
(LEFT_OP |<<|)
(RIGHT_OP |>>|)
(LE_OP |<=|)
(GE_OP |>=|)
(EQ_OP |==|)
(NE_OP |!=|)
;; productions:
(|primary_expression|
IDENTIFIER
|constant|
|string|
(\( |expression| \))
|generic_selection|)
(|generic_selection|
(GENERIC \( |assignment_expression| \, |generic_assoc_list| \)))
(|generic_assoc_list|
|generic_association|
(|generic_assoc_list| \, |generic_association|))
(|generic_association|
(|type_name| \: |assignment_expression|)
(DEFAULT \: |assignment_expression|))
(|postfix_expression|
|primary_expression|
(|postfix_expression| [ |expression| ])
(|postfix_expression| \( \))
(|postfix_expression| \( |argument_expression_list| \))
(|postfix_expression| |.| IDENTIFIER)
(|postfix_expression| PTR_OP IDENTIFIER)
(|postfix_expression| INC_OP)
(|postfix_expression| DEC_OP)
(\( |type_name| \) { |initializer_list| })
(\( |type_name| \) { |initializer_list| \, }))
(|argument_expression_list|
|assignment_expression|
(|argument_expression_list| \, |assignment_expression|))
(|unary_expression|
|postfix_expression|
(INC_OP |unary_expression|)
(DEC_OP |unary_expression|)
(|unary_operator| |cast_expression|)
(SIZEOF |unary_expression|)
(SIZEOF \( |type_name| \))
(ALIGNOF \( |type_name| \)))
(|unary_operator|
&
*
+
-
~
!)
(|cast_expression|
|unary_expression|
(\( |type_name| \) |cast_expression|))
(|multiplicative_expression|
|cast_expression|
(|multiplicative_expression| * |cast_expression|)
(|multiplicative_expression| / |cast_expression|)
(|multiplicative_expression| % |cast_expression|))
(|additive_expression|
|multiplicative_expression|
(|additive_expression| + |multiplicative_expression|)
(|additive_expression| - |multiplicative_expression|))
(|shift_expression|
|additive_expression|
(|shift_expression| LEFT_OP |additive_expression|)
(|shift_expression| RIGHT_OP |additive_expression|))
(|relational_expression|
|shift_expression|
(|relational_expression| < |shift_expression|)
(|relational_expression| > |shift_expression|)
(|relational_expression| LE_OP |shift_expression|)
(|relational_expression| GE_OP |shift_expression|))
(|equality_expression|
|relational_expression|
(|equality_expression| EQ_OP |relational_expression|)
(|equality_expression| NE_OP |relational_expression|))
(|and_expression|
|equality_expression|
(|and_expression| & |equality_expression|))
(|exclusive_or_expression|
|and_expression|
(|exclusive_or_expression| ^ |and_expression|))
(|inclusive_or_expression|
|exclusive_or_expression|
(|inclusive_or_expression| \| |exclusive_or_expression|))
(|logical_and_expression|
|inclusive_or_expression|
(|logical_and_expression| AND_OP |inclusive_or_expression|))
(|logical_or_expression|
|logical_and_expression|
(|logical_or_expression| OR_OP |logical_and_expression|))
(|conditional_expression|
|logical_or_expression|
(|logical_or_expression| ? |expression| \: |conditional_expression|))
(|assignment_expression|
|conditional_expression|
(|unary_expression| |assignment_operator| |assignment_expression|))
(|assignment_operator|
=
MUL_ASSIGN
DIV_ASSIGN
MOD_ASSIGN
ADD_ASSIGN
SUB_ASSIGN
LEFT_ASSIGN
RIGHT_ASSIGN
AND_ASSIGN
XOR_ASSIGN
OR_ASSIGN)
(|expression|
|assignment_expression|
(|expression| \, |assignment_expression|))
(|constant_expression|
|conditional_expression|)
(|declaration|
(|declaration_specifiers| \;)
(|declaration_specifiers| |init_declarator_list| \;)
|static_assert_declaration|)
(|declaration_specifiers|
(|storage_class_specifier| |declaration_specifiers|)
|storage_class_specifier|
(|type_specifier| |declaration_specifiers|)
|type_specifier|
(|type_qualifier| |declaration_specifiers|)
|type_qualifier|
(|function_specifier| |declaration_specifiers|)
|function_specifier|
(|alignment_specifier| |declaration_specifiers|)
|alignment_specifier|)
(|init_declarator_list|
|init_declarator|
(|init_declarator_list| \, |init_declarator|))
(|init_declarator|
(|declarator| = |initializer|)
|declarator|)
(|storage_class_specifier|
TYPEDEF
EXTERN
STATIC
THREAD_LOCAL
AUTO
REGISTER)
(|type_specifier|
VOID
CHAR
SHORT
INT
LONG
FLOAT
DOUBLE
SIGNED
UNSIGNED
BOOL
COMPLEX
IMAGINARY
|atomic_type_specifier|
|struct_or_union_specifier|
|enum_specifier|
TYPEDEF_NAME)
(|struct_or_union_specifier|
(|struct_or_union| { |struct_declaration_list| })
(|struct_or_union| IDENTIFIER { |struct_declaration_list| })
(|struct_or_union| IDENTIFIER))
(|struct_or_union|
STRUCT
UNION)
(|struct_declaration_list|
|struct_declaration|
(|struct_declaration_list| |struct_declaration|))
(|struct_declaration|
(|specifier_qualifier_list| \;)
(|specifier_qualifier_list| |struct_declarator_list| \;)
|static_assert_declaration|)
(|specifier_qualifier_list|
(|type_specifier| |specifier_qualifier_list|)
|type_specifier|
(|type_qualifier| |specifier_qualifier_list|)
|type_qualifier|)
(|struct_declarator_list|
|struct_declarator|
(|struct_declarator_list| \, |struct_declarator|))
(|struct_declarator|
(\: |constant_expression|)
(|declarator| \: |constant_expression|)
|declarator|)
(|enum_specifier|
(ENUM { |enumerator_list| })
(ENUM { |enumerator_list| \, })
(ENUM IDENTIFIER { |enumerator_list| })
(ENUM IDENTIFIER { |enumerator_list| \, })
(ENUM IDENTIFIER))
(|enumerator_list|
|enumerator|
(|enumerator_list| \, |enumerator|))
(|enumeration_constant|
IDENTIFIER)
(|enumerator|
(|enumeration_constant| = |constant_expression|)
|enumeration_constant|)
(|declarator|
(|pointer| |direct_declarator|)
|direct_declarator|)
(|direct_declarator|
IDENTIFIER
(\( |declarator| \))
(|direct_declarator| [ ])
(|direct_declarator| [ * ])
(|direct_declarator| [ STATIC |type_qualifier_list| |assignment_expression| ])
(|direct_declarator| [ STATIC |assignment_expression| ])
(|direct_declarator| [ |type_qualifier_list| * ])
(|direct_declarator| [ |type_qualifier_list| STATIC |assignment_expression| ])
(|direct_declarator| [ |type_qualifier_list| |assignment_expression| ])
(|direct_declarator| [ |type_qualifier_list| ])
(|direct_declarator| [ |assignment_expression| ])
(|direct_declarator| \( |parameter_type_list| \))
(|direct_declarator| \( \))
(|direct_declarator| \( |identifier_list| \)))
(|pointer|
(* |type_qualifier_list| |pointer|)
(* |type_qualifier_list|)
(* |pointer|)
*)
(|type_qualifier_list|
|type_qualifier|
(|type_qualifier_list| |type_qualifier|))
(|parameter_type_list|
(|parameter_list| \, ELLIPSIS)
|parameter_list|)
(|parameter_list|
|parameter_declaration|
(|parameter_list| \, |parameter_declaration|))
(|parameter_declaration|
(|declaration_specifiers| |declarator|)
(|declaration_specifiers| |abstract_declarator|)
|declaration_specifiers|)
(|identifier_list|
IDENTIFIER
(|identifier_list| \, IDENTIFIER))
(|type_name|
(|specifier_qualifier_list| |abstract_declarator|)
|specifier_qualifier_list|)
(|abstract_declarator|
(|pointer| |direct_abstract_declarator|)
|pointer|
|direct_abstract_declarator|)
(|direct_abstract_declarator|
(\( |abstract_declarator| \))
([ ])
([ * ])
([ STATIC |type_qualifier_list| |assignment_expression| ])
([ STATIC |assignment_expression| ])
([ |type_qualifier_list| STATIC |assignment_expression| ])
([ |type_qualifier_list| |assignment_expression| ])
([ |type_qualifier_list| ])
([ |assignment_expression| ])
(|direct_abstract_declarator| [ ])
(|direct_abstract_declarator| [ * ])
(|direct_abstract_declarator| [ STATIC |type_qualifier_list| |assignment_expression| ])
(|direct_abstract_declarator| [ STATIC |assignment_expression| ])
(|direct_abstract_declarator| [ |type_qualifier_list| |assignment_expression| ])
(|direct_abstract_declarator| [ |type_qualifier_list| STATIC |assignment_expression| ])
(|direct_abstract_declarator| [ |type_qualifier_list| ])
(|direct_abstract_declarator| [ |assignment_expression| ])
(\( \))
(\( |parameter_type_list| \))
(|direct_abstract_declarator| \( \))
(|direct_abstract_declarator| \( |parameter_type_list| \)))
(|initializer|
({ |initializer_list| })
({ |initializer_list| \, })
|assignment_expression|)
(|initializer_list|
(|designation| |initializer|)
|initializer|
(|initializer_list| \, |designation| |initializer|)
(|initializer_list| \, |initializer|))
(|designation|
(|designator_list| =))
(|designator_list|
|designator|
(|designator_list| |designator|))
(|designator|
([ |constant_expression| ])
(|.| IDENTIFIER))
(|static_assert_declaration|
(STATIC_ASSERT \( |constant_expression| \, STRING_LITERAL \) \;))
(|statement|
|labeled_statement|
|compound_statement|
|expression_statement|
|selection_statement|
|iteration_statement|
|jump_statement|)
(|labeled_statement|
(IDENTIFIER \: |statement|)
(CASE |constant_expression| \: |statement|)
(DEFAULT \: |statement|))
(|compound_statement|
({ })
({ |block_item_list| }))
(|block_item_list|
|block_item|
(|block_item_list| |block_item|))
(|block_item|
|declaration|
|statement|)
(|expression_statement|
\;
(|expression| \;))
(|selection_statement|
(IF \( |expression| \) |statement| ELSE |statement|)
(IF \( |expression| \) |statement|)
(SWITCH \( |expression| \) |statement|))
(|iteration_statement|
(WHILE \( |expression| \) |statement|)
(DO |statement| WHILE \( |expression| \) \;)
(FOR \( |expression_statement| |expression_statement| \) |statement|)
(FOR \( |expression_statement| |expression_statement| |expression| \) |statement|)
(FOR \( |declaration| |expression_statement| \) |statement|)
(FOR \( |declaration| |expression_statement| |expression| \) |statement|))
(|jump_statement|
(GOTO IDENTIFIER \;)
(CONTINUE \;)
(BREAK \;)
(RETURN \;)
(RETURN |expression| \;))
(|translation_unit|
|external_declaration|
(|translation_unit| |external_declaration|))
(|external_declaration|
|function_definition|
|declaration|)
(|function_definition|
(|declaration_specifiers| |declarator| |declaration_list| |compound_statement|)
(|declaration_specifiers| |declarator| |compound_statement|))
(|declaration_list|
|declaration|
(|declaration_list| |declaration|))
)
#-(and)
(let ((*PRINT-PRETTY* nil)
(*PRINT-LEVEL* nil)
(*PRINT-LENGTH* nil)
(*PRINT-CIRCLE* nil)
(*PRINT-CASE* :upcase)
(*PRINT-READABLY*)
(*PRINT-GENSYM* T)
(*PRINT-BASE* 10 )
(*PRINT-RADIX* nil)
(*PRINT-ARRAY* T)
(*PRINT-LINES* nil)
(*PRINT-ESCAPE* T)
(*PRINT-RIGHT-MARGIN* 110))
(pprint
(mapcar (lambda (prod)
`(--> ,(first prod)
,(case (length (rest prod))
((0) '(seq))
((1) (if (listp (second prod))
`(seq ,@(second prod))
(second prod)))
(otherwise
`(alt ,@(mapcar (lambda (rhs)
(if (listp rhs)
`(seq ,@rhs)
rhs))
(rest prod)))))))
'())))
| null | https://raw.githubusercontent.com/informatimago/lisp/571af24c06ba466e01b4c9483f8bb7690bc46d03/languages/c11/c11-yacc.lisp | lisp | renaming terminals:
ENUMERATION_CONSTANT
productions:
)
)
)
)
))
))
)
)
)
)
)
)) | (DEFINE-PARSER *C11-PARSER*
(:START-SYMBOL |translation_unit|)
(:TERMINALS (
|identifier| |typedef_name| |func_name| |string_literal|
|i_constant| |f_constant| |enum_name|
|alignas| |alignof| |atomic| |generic| |noreturn| |static_assert|
|thread_local| |case| |default| |if| |else| |switch| |while| |do|
|for| |goto| |continue| |break| |return| |struct| |union| |enum|
|...| |complex| |imaginary| |bool| |char| |short| |int| |long|
|signed| |unsigned| |float| |double| |void| |const| |restrict|
|volatile| |typedef| |extern| |static| |auto| |register| |inline|
|sizeof|
^= \|= -= <<= >>= &= && |\|\|| *= /= %= += -> ++ -- << >>
<= >= == !=))
(IDENTIFIER |identifier|)
(TYPEDEF_NAME |typedef_name|)
(FUNC_NAME |func_name|)
(STRING_LITERAL |string_literal|)
(I_CONSTANT |i_constant|)
(F_CONSTANT |f_constant|)
(|string| STRING_LITERAL FUNC_NAME)
(ALIGNAS |alignas|)
(ALIGNOF |alignof|)
(ATOMIC |atomic|)
(GENERIC |generic|)
(NORETURN |noreturn|)
(STATIC_ASSERT |static_assert|)
(THREAD_LOCAL |thread_local|)
(CASE |case|)
(DEFAULT |default|)
(IF |if|)
(ELSE |else|)
(SWITCH |switch|)
(WHILE |while|)
(DO |do|)
(FOR |for|)
(GOTO |goto|)
(CONTINUE |continue|)
(BREAK |break|)
(RETURN |return|)
(STRUCT |struct|)
(UNION |union|)
(ENUM |enum|)
(ELLIPSIS |...|)
(COMPLEX |complex|)
(IMAGINARY |imaginary|)
(BOOL |bool|)
(CHAR |char|)
(SHORT |short|)
(INT |int|)
(LONG |long|)
(SIGNED |signed|)
(UNSIGNED |unsigned|)
(FLOAT |float|)
(DOUBLE |double|)
(VOID |void|)
(CONST |const|)
(RESTRICT |restrict|)
(VOLATILE |volatile|)
(TYPEDEF |typedef|)
(EXTERN |extern|)
(STATIC |static|)
(AUTO |auto|)
(REGISTER |register|)
(INLINE |inline|)
(SIZEOF |sizeof|)
(XOR_ASSIGN |^=|)
(OR_ASSIGN \|=)
(SUB_ASSIGN |-=|)
(LEFT_ASSIGN |<<=|)
(RIGHT_ASSIGN |>>=|)
(AND_ASSIGN |&=|)
(AND_OP |&&|)
(OR_OP \|\|)
(MUL_ASSIGN |*=|)
(DIV_ASSIGN |/=|)
(MOD_ASSIGN |%=|)
(ADD_ASSIGN |+=|)
(PTR_OP |->|)
(INC_OP |++|)
(DEC_OP |--|)
(LEFT_OP |<<|)
(RIGHT_OP |>>|)
(LE_OP |<=|)
(GE_OP |>=|)
(EQ_OP |==|)
(NE_OP |!=|)
(|primary_expression|
IDENTIFIER
|constant|
|string|
(\( |expression| \))
|generic_selection|)
(|generic_selection|
(GENERIC \( |assignment_expression| \, |generic_assoc_list| \)))
(|generic_assoc_list|
|generic_association|
(|generic_assoc_list| \, |generic_association|))
(|generic_association|
(|type_name| \: |assignment_expression|)
(DEFAULT \: |assignment_expression|))
(|postfix_expression|
|primary_expression|
(|postfix_expression| [ |expression| ])
(|postfix_expression| \( \))
(|postfix_expression| \( |argument_expression_list| \))
(|postfix_expression| |.| IDENTIFIER)
(|postfix_expression| PTR_OP IDENTIFIER)
(|postfix_expression| INC_OP)
(|postfix_expression| DEC_OP)
(\( |type_name| \) { |initializer_list| })
(\( |type_name| \) { |initializer_list| \, }))
(|argument_expression_list|
|assignment_expression|
(|argument_expression_list| \, |assignment_expression|))
(|unary_expression|
|postfix_expression|
(INC_OP |unary_expression|)
(DEC_OP |unary_expression|)
(|unary_operator| |cast_expression|)
(SIZEOF |unary_expression|)
(SIZEOF \( |type_name| \))
(ALIGNOF \( |type_name| \)))
(|unary_operator|
&
*
+
-
~
!)
(|cast_expression|
|unary_expression|
(\( |type_name| \) |cast_expression|))
(|multiplicative_expression|
|cast_expression|
(|multiplicative_expression| * |cast_expression|)
(|multiplicative_expression| / |cast_expression|)
(|multiplicative_expression| % |cast_expression|))
(|additive_expression|
|multiplicative_expression|
(|additive_expression| + |multiplicative_expression|)
(|additive_expression| - |multiplicative_expression|))
(|shift_expression|
|additive_expression|
(|shift_expression| LEFT_OP |additive_expression|)
(|shift_expression| RIGHT_OP |additive_expression|))
(|relational_expression|
|shift_expression|
(|relational_expression| < |shift_expression|)
(|relational_expression| > |shift_expression|)
(|relational_expression| LE_OP |shift_expression|)
(|relational_expression| GE_OP |shift_expression|))
(|equality_expression|
|relational_expression|
(|equality_expression| EQ_OP |relational_expression|)
(|equality_expression| NE_OP |relational_expression|))
(|and_expression|
|equality_expression|
(|and_expression| & |equality_expression|))
(|exclusive_or_expression|
|and_expression|
(|exclusive_or_expression| ^ |and_expression|))
(|inclusive_or_expression|
|exclusive_or_expression|
(|inclusive_or_expression| \| |exclusive_or_expression|))
(|logical_and_expression|
|inclusive_or_expression|
(|logical_and_expression| AND_OP |inclusive_or_expression|))
(|logical_or_expression|
|logical_and_expression|
(|logical_or_expression| OR_OP |logical_and_expression|))
(|conditional_expression|
|logical_or_expression|
(|logical_or_expression| ? |expression| \: |conditional_expression|))
(|assignment_expression|
|conditional_expression|
(|unary_expression| |assignment_operator| |assignment_expression|))
(|assignment_operator|
=
MUL_ASSIGN
DIV_ASSIGN
MOD_ASSIGN
ADD_ASSIGN
SUB_ASSIGN
LEFT_ASSIGN
RIGHT_ASSIGN
AND_ASSIGN
XOR_ASSIGN
OR_ASSIGN)
(|expression|
|assignment_expression|
(|expression| \, |assignment_expression|))
(|constant_expression|
|conditional_expression|)
(|declaration|
|static_assert_declaration|)
(|declaration_specifiers|
(|storage_class_specifier| |declaration_specifiers|)
|storage_class_specifier|
(|type_specifier| |declaration_specifiers|)
|type_specifier|
(|type_qualifier| |declaration_specifiers|)
|type_qualifier|
(|function_specifier| |declaration_specifiers|)
|function_specifier|
(|alignment_specifier| |declaration_specifiers|)
|alignment_specifier|)
(|init_declarator_list|
|init_declarator|
(|init_declarator_list| \, |init_declarator|))
(|init_declarator|
(|declarator| = |initializer|)
|declarator|)
(|storage_class_specifier|
TYPEDEF
EXTERN
STATIC
THREAD_LOCAL
AUTO
REGISTER)
(|type_specifier|
VOID
CHAR
SHORT
INT
LONG
FLOAT
DOUBLE
SIGNED
UNSIGNED
BOOL
COMPLEX
IMAGINARY
|atomic_type_specifier|
|struct_or_union_specifier|
|enum_specifier|
TYPEDEF_NAME)
(|struct_or_union_specifier|
(|struct_or_union| { |struct_declaration_list| })
(|struct_or_union| IDENTIFIER { |struct_declaration_list| })
(|struct_or_union| IDENTIFIER))
(|struct_or_union|
STRUCT
UNION)
(|struct_declaration_list|
|struct_declaration|
(|struct_declaration_list| |struct_declaration|))
(|struct_declaration|
|static_assert_declaration|)
(|specifier_qualifier_list|
(|type_specifier| |specifier_qualifier_list|)
|type_specifier|
(|type_qualifier| |specifier_qualifier_list|)
|type_qualifier|)
(|struct_declarator_list|
|struct_declarator|
(|struct_declarator_list| \, |struct_declarator|))
(|struct_declarator|
(\: |constant_expression|)
(|declarator| \: |constant_expression|)
|declarator|)
(|enum_specifier|
(ENUM { |enumerator_list| })
(ENUM { |enumerator_list| \, })
(ENUM IDENTIFIER { |enumerator_list| })
(ENUM IDENTIFIER { |enumerator_list| \, })
(ENUM IDENTIFIER))
(|enumerator_list|
|enumerator|
(|enumerator_list| \, |enumerator|))
(|enumeration_constant|
IDENTIFIER)
(|enumerator|
(|enumeration_constant| = |constant_expression|)
|enumeration_constant|)
(|declarator|
(|pointer| |direct_declarator|)
|direct_declarator|)
(|direct_declarator|
IDENTIFIER
(\( |declarator| \))
(|direct_declarator| [ ])
(|direct_declarator| [ * ])
(|direct_declarator| [ STATIC |type_qualifier_list| |assignment_expression| ])
(|direct_declarator| [ STATIC |assignment_expression| ])
(|direct_declarator| [ |type_qualifier_list| * ])
(|direct_declarator| [ |type_qualifier_list| STATIC |assignment_expression| ])
(|direct_declarator| [ |type_qualifier_list| |assignment_expression| ])
(|direct_declarator| [ |type_qualifier_list| ])
(|direct_declarator| [ |assignment_expression| ])
(|direct_declarator| \( |parameter_type_list| \))
(|direct_declarator| \( \))
(|direct_declarator| \( |identifier_list| \)))
(|pointer|
(* |type_qualifier_list| |pointer|)
(* |type_qualifier_list|)
(* |pointer|)
*)
(|type_qualifier_list|
|type_qualifier|
(|type_qualifier_list| |type_qualifier|))
(|parameter_type_list|
(|parameter_list| \, ELLIPSIS)
|parameter_list|)
(|parameter_list|
|parameter_declaration|
(|parameter_list| \, |parameter_declaration|))
(|parameter_declaration|
(|declaration_specifiers| |declarator|)
(|declaration_specifiers| |abstract_declarator|)
|declaration_specifiers|)
(|identifier_list|
IDENTIFIER
(|identifier_list| \, IDENTIFIER))
(|type_name|
(|specifier_qualifier_list| |abstract_declarator|)
|specifier_qualifier_list|)
(|abstract_declarator|
(|pointer| |direct_abstract_declarator|)
|pointer|
|direct_abstract_declarator|)
(|direct_abstract_declarator|
(\( |abstract_declarator| \))
([ ])
([ * ])
([ STATIC |type_qualifier_list| |assignment_expression| ])
([ STATIC |assignment_expression| ])
([ |type_qualifier_list| STATIC |assignment_expression| ])
([ |type_qualifier_list| |assignment_expression| ])
([ |type_qualifier_list| ])
([ |assignment_expression| ])
(|direct_abstract_declarator| [ ])
(|direct_abstract_declarator| [ * ])
(|direct_abstract_declarator| [ STATIC |type_qualifier_list| |assignment_expression| ])
(|direct_abstract_declarator| [ STATIC |assignment_expression| ])
(|direct_abstract_declarator| [ |type_qualifier_list| |assignment_expression| ])
(|direct_abstract_declarator| [ |type_qualifier_list| STATIC |assignment_expression| ])
(|direct_abstract_declarator| [ |type_qualifier_list| ])
(|direct_abstract_declarator| [ |assignment_expression| ])
(\( \))
(\( |parameter_type_list| \))
(|direct_abstract_declarator| \( \))
(|direct_abstract_declarator| \( |parameter_type_list| \)))
(|initializer|
({ |initializer_list| })
({ |initializer_list| \, })
|assignment_expression|)
(|initializer_list|
(|designation| |initializer|)
|initializer|
(|initializer_list| \, |designation| |initializer|)
(|initializer_list| \, |initializer|))
(|designation|
(|designator_list| =))
(|designator_list|
|designator|
(|designator_list| |designator|))
(|designator|
([ |constant_expression| ])
(|.| IDENTIFIER))
(|static_assert_declaration|
(|statement|
|labeled_statement|
|compound_statement|
|expression_statement|
|selection_statement|
|iteration_statement|
|jump_statement|)
(|labeled_statement|
(IDENTIFIER \: |statement|)
(CASE |constant_expression| \: |statement|)
(DEFAULT \: |statement|))
(|compound_statement|
({ })
({ |block_item_list| }))
(|block_item_list|
|block_item|
(|block_item_list| |block_item|))
(|block_item|
|declaration|
|statement|)
(|expression_statement|
(|selection_statement|
(IF \( |expression| \) |statement| ELSE |statement|)
(IF \( |expression| \) |statement|)
(SWITCH \( |expression| \) |statement|))
(|iteration_statement|
(WHILE \( |expression| \) |statement|)
(FOR \( |expression_statement| |expression_statement| \) |statement|)
(FOR \( |expression_statement| |expression_statement| |expression| \) |statement|)
(FOR \( |declaration| |expression_statement| \) |statement|)
(FOR \( |declaration| |expression_statement| |expression| \) |statement|))
(|jump_statement|
(|translation_unit|
|external_declaration|
(|translation_unit| |external_declaration|))
(|external_declaration|
|function_definition|
|declaration|)
(|function_definition|
(|declaration_specifiers| |declarator| |declaration_list| |compound_statement|)
(|declaration_specifiers| |declarator| |compound_statement|))
(|declaration_list|
|declaration|
(|declaration_list| |declaration|))
)
#-(and)
(let ((*PRINT-PRETTY* nil)
(*PRINT-LEVEL* nil)
(*PRINT-LENGTH* nil)
(*PRINT-CIRCLE* nil)
(*PRINT-CASE* :upcase)
(*PRINT-READABLY*)
(*PRINT-GENSYM* T)
(*PRINT-BASE* 10 )
(*PRINT-RADIX* nil)
(*PRINT-ARRAY* T)
(*PRINT-LINES* nil)
(*PRINT-ESCAPE* T)
(*PRINT-RIGHT-MARGIN* 110))
(pprint
(mapcar (lambda (prod)
`(--> ,(first prod)
,(case (length (rest prod))
((0) '(seq))
((1) (if (listp (second prod))
`(seq ,@(second prod))
(second prod)))
(otherwise
`(alt ,@(mapcar (lambda (rhs)
(if (listp rhs)
`(seq ,@rhs)
rhs))
(rest prod)))))))
'())))
|
f968b46b921e4e0296cc9c21d04fcf0b4feae5d8590308616b37b4aec1483fb4 | NetComposer/nksip | basic_test_server.erl |
%% -------------------------------------------------------------------
%%
%% basic_test: Basic Test Suite
%%
Copyright ( c ) 2013 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
-module(basic_test_server).
-export([sip_route/5, config/1]).
-export([srv_init/2, srv_handle_call/4, srv_handle_cast/3, srv_handle_info/3]).
-include_lib("nkserver/include/nkserver_module.hrl").
config(Opts) ->
Opts#{
sip_from => "\"NkSIP Basic SUITE Test Server\" <sip:?MODULE@nksip>",
sip_listen => "sip;tcp_listeners=10, sips:all:5061",
plugins => [nksip_registrar, nksip_trace]
%sip_trace => true,
%sip_debug=>[nkpacket, call, packet]
}.
sip_route(Scheme, User, Domain, Req, _Call) ->
Domains = nkserver:get(?MODULE, domains),
Opts = [
record_route,
{insert, "x-nk-server", ?MODULE}
],
case lists:member(Domain, Domains) of
true when User =:= <<>> ->
case nksip_request:header(<<"x-nk-op">>, Req) of
{ok, [<<"reply-request">>]} ->
Body = base64:encode(term_to_binary(Req)),
{reply, {ok, [{body, Body}, contact]}};
{ok, [<<"reply-stateless">>]} ->
{reply_stateless, ok};
{ok, [<<"reply-stateful">>]} ->
{reply, ok};
{ok, [<<"reply-invalid">>]} ->
{reply, 'INVALID'};
{ok, [<<"force-error">>]} ->
error(test_error);
{ok, _} ->
process
end;
true when Domain =:= <<"nksip">> ->
lager:error("NKLOG MY PROCESS2"),
case nksip_registrar:find(?MODULE, Scheme, User, Domain) of
[] -> {reply, temporarily_unavailable};
UriList -> {proxy, UriList, Opts}
end;
_ ->
lager:error("NKLOG MY PROCESS3"),
{proxy, ruri, Opts}
end.
srv_init(#{id:=PkgId}, State) ->
ok = nkserver:put(PkgId, domains, [<<"nksip">>, <<"127.0.0.1">>, <<"[::1]">>]),
{ok, State#{my_name=>PkgId}}.
srv_handle_call(get_domains, _From, _Service, #{my_name:=?MODULE}=State) ->
Domains = nkserver:get(?MODULE, domains),
{reply, {ok, Domains}, State};
srv_handle_call({set_domains, Domains}, _From, _Service, #{my_name:=?MODULE}=State) ->
ok = nkserver:put(?MODULE, domains, Domains),
{reply, ok, State};
srv_handle_call(_Msg, _From, _Service, _State) ->
continue.
srv_handle_cast({cast_test, Ref, Pid}, _Service, #{my_name:=?MODULE}=State) ->
Pid ! {Ref, {cast_test, ?MODULE}},
{noreply, State};
srv_handle_cast(_Msg, _Service, _State) ->
continue.
srv_handle_info({info_test, Ref, Pid}, _Service, #{my_name:=?MODULE}=State) ->
Pid ! {Ref, {info_test, ?MODULE}},
{noreply, State};
srv_handle_info(_Msg, _Service, _State) ->
continue.
| null | https://raw.githubusercontent.com/NetComposer/nksip/7fbcc66806635dc8ecc5d11c30322e4d1df36f0a/test/callbacks/basic_test_server.erl | erlang | -------------------------------------------------------------------
basic_test: Basic Test Suite
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
sip_trace => true,
sip_debug=>[nkpacket, call, packet] |
Copyright ( c ) 2013 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(basic_test_server).
-export([sip_route/5, config/1]).
-export([srv_init/2, srv_handle_call/4, srv_handle_cast/3, srv_handle_info/3]).
-include_lib("nkserver/include/nkserver_module.hrl").
config(Opts) ->
Opts#{
sip_from => "\"NkSIP Basic SUITE Test Server\" <sip:?MODULE@nksip>",
sip_listen => "sip;tcp_listeners=10, sips:all:5061",
plugins => [nksip_registrar, nksip_trace]
}.
sip_route(Scheme, User, Domain, Req, _Call) ->
Domains = nkserver:get(?MODULE, domains),
Opts = [
record_route,
{insert, "x-nk-server", ?MODULE}
],
case lists:member(Domain, Domains) of
true when User =:= <<>> ->
case nksip_request:header(<<"x-nk-op">>, Req) of
{ok, [<<"reply-request">>]} ->
Body = base64:encode(term_to_binary(Req)),
{reply, {ok, [{body, Body}, contact]}};
{ok, [<<"reply-stateless">>]} ->
{reply_stateless, ok};
{ok, [<<"reply-stateful">>]} ->
{reply, ok};
{ok, [<<"reply-invalid">>]} ->
{reply, 'INVALID'};
{ok, [<<"force-error">>]} ->
error(test_error);
{ok, _} ->
process
end;
true when Domain =:= <<"nksip">> ->
lager:error("NKLOG MY PROCESS2"),
case nksip_registrar:find(?MODULE, Scheme, User, Domain) of
[] -> {reply, temporarily_unavailable};
UriList -> {proxy, UriList, Opts}
end;
_ ->
lager:error("NKLOG MY PROCESS3"),
{proxy, ruri, Opts}
end.
srv_init(#{id:=PkgId}, State) ->
ok = nkserver:put(PkgId, domains, [<<"nksip">>, <<"127.0.0.1">>, <<"[::1]">>]),
{ok, State#{my_name=>PkgId}}.
srv_handle_call(get_domains, _From, _Service, #{my_name:=?MODULE}=State) ->
Domains = nkserver:get(?MODULE, domains),
{reply, {ok, Domains}, State};
srv_handle_call({set_domains, Domains}, _From, _Service, #{my_name:=?MODULE}=State) ->
ok = nkserver:put(?MODULE, domains, Domains),
{reply, ok, State};
srv_handle_call(_Msg, _From, _Service, _State) ->
continue.
srv_handle_cast({cast_test, Ref, Pid}, _Service, #{my_name:=?MODULE}=State) ->
Pid ! {Ref, {cast_test, ?MODULE}},
{noreply, State};
srv_handle_cast(_Msg, _Service, _State) ->
continue.
srv_handle_info({info_test, Ref, Pid}, _Service, #{my_name:=?MODULE}=State) ->
Pid ! {Ref, {info_test, ?MODULE}},
{noreply, State};
srv_handle_info(_Msg, _Service, _State) ->
continue.
|
998e4baef3708d15d6e6d568bab5b667e4d309c431aecc56fa5d3eb58c049b88 | jakemcc/test-refresh | hello.clj | (ns example.hello)
(defn greet [name]
(str "Hello " name))
| null | https://raw.githubusercontent.com/jakemcc/test-refresh/209d1d410ff4fdd0fc091b023483979fad86ae09/dep.edn/src/example/hello.clj | clojure | (ns example.hello)
(defn greet [name]
(str "Hello " name))
| |
2e9ffaf8cbc8171158f512a3e6d98801f050c310199fbc0a5366a2e8306e566f | basho/riak_search | riak_search_kv_erlang_extractor.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
%%
%% -------------------------------------------------------------------
-module(riak_search_kv_erlang_extractor).
-export([extract/3,
extract_value/3]).
-include("riak_search.hrl").
-import(riak_search_utils, [to_utf8/1]).
%%% Index erlang terms and proplists (bz)
%%%
%%% Riak objects should have the content type "application/x-erlang"
%%%
%%% Much like the JSON extractor:
%%%
* bare terms ( ' foo ' , < < " foo " > > , 123 ) are indexed in the default field
%%%
%%% * proplists are indexed under the prop names
%%% ( [{<<"foo">>,<<"hello">>}] indexes "hello" in the "foo" field )
%%%
%%% * nested proplists separate name components with underscores
%%% ( [{<<"foo">>,[{<<"bar">>,<<"hello">>}]}] indexes "hello"
%%% in the "foo_bar" field )
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
extract(RiakObject, DefaultField, _Args) ->
try
Values = riak_object:get_values(RiakObject),
lists:flatten([extract_value(V, DefaultField, _Args) || V <- Values])
catch
_:Err ->
{fail, {bad_erlang,Err}}
end.
extract_value(Data, DefaultField, _Args) ->
Fields = lists:reverse(lists:flatten(make_search_fields(undefined, Data, DefaultField, []))),
[{to_utf8(FieldName), to_utf8(FieldValue)} || {FieldName, FieldValue} <- Fields].
make_search_fields(NamePrefix, {Prop, Value}, DefaultField, Output)
when is_atom(Prop); is_binary(Prop) ->
make_search_fields(append_fieldname(NamePrefix, Prop),
Value, DefaultField, Output);
make_search_fields(Name, List, DefaultField, Output) when is_list(List) ->
%% all list elements are indexed individually
%% -> encode strings as binaries, not lists!
F = fun(El, Acc) ->
[make_search_fields(Name, El, DefaultField, []) | Acc]
end,
lists:foldl(F, Output, List);
make_search_fields(_Name, undefined, _DefaultField, Output) ->
Output;
make_search_fields(Name, Term, DefaultField, Output) when is_atom(Term);
is_binary(Term);
is_number(Term) ->
[{search_fieldname(Name, DefaultField), Term} | Output];
make_search_fields(_Name, _Term, _DefaultField, Output) ->
ca n't index PIDs , Ports , > 2 - arity Tuples , ...
Output.
append_fieldname(undefined, Name) when is_binary(Name) ->
binary_to_list(Name);
append_fieldname(FieldPrefix, Name) when is_binary(Name) ->
FieldPrefix ++ [$_ | binary_to_list(Name)];
append_fieldname(FieldPrefix, Name) when is_atom(Name) ->
append_fieldname(FieldPrefix, atom_to_binary(Name, utf8)).
%% Make a search field name - if no names encountered yet use the
%% default field, otherwise make sure the field name does not
%% contain . or : by substituting with _
search_fieldname(undefined, DefaultField) ->
DefaultField;
search_fieldname(Name, _) ->
riak_search_kv_extractor:clean_name(Name).
-ifdef(TEST).
bad_binary_test() ->
Data = {<<"this">>,<<"is not">>,<<"a proplist">>},
Object = riak_object:new(<<"b">>, <<"k">>, Data,
"application/x-erlang"),
?assertMatch([], extract(Object, <<"value">>, undefined)).
term_test() ->
Tests = [{[{<<"myfield">>,<<"myvalue">>}],
[{<<"myfield">>,<<"myvalue">>}]},
{[{<<"myfield">>,123}],
[{<<"myfield">>,<<"123">>}]},
{[{<<"myfield">>,123.456}],
[{<<"myfield">>,<<"123.456">>}]},
{[{<<"myfield">>,true}],
[{<<"myfield">>,<<"true">>}]},
{[{<<"myfield">>,undefined}],
[]},
{[{<<"one">>,[{<<"two">>,[{<<"three">>,<<"go">>}]}]}],
[{<<"one_two_three">>, <<"go">>}]},
{<<"abc">>,
[{<<"value">>, <<"abc">>}]},
{[{<<"menu">>,
[{<<"id">>,<<"file">>},
{<<"value">>,<<"File">>},
{<<"popup">>,
[{<<"menuitem">>,
[[{<<"value">>,<<"New">>},
{<<"onclick">>,<<"CreateNewDoc()">>}],
[{<<"value">>,<<"Open">>},
{<<"onclick">>,<<"OpenDoc()">>}],
[{<<"value">>,<<"Close">>},
{<<"onclick">>, <<"CloseDoc()">>}]]
}]
}]
}],
[{<<"menu_id">>, <<"file">>},
{<<"menu_value">>, <<"File">>},
{<<"menu_popup_menuitem_value">>, <<"New">>},
{<<"menu_popup_menuitem_onclick">>, <<"CreateNewDoc()">>},
{<<"menu_popup_menuitem_value">>, <<"Open">>},
{<<"menu_popup_menuitem_onclick">>, <<"OpenDoc()">>},
{<<"menu_popup_menuitem_value">>, <<"Close">>},
{<<"menu_popup_menuitem_onclick">>, <<"CloseDoc()">>}]},
%% From -atom2json.html
%% via riak_search_kv_json_extractor.erl
{[{<<"lang">>,<<"en-US">>},
{<<"dir">>,<<"ltr">>},
{<<"id">>,<<"tag:example.org,2007:/foo">>},
{<<"title">>,<<"Example Feed">>},
{<<"subtitle">>,
[{<<"attributes">>,
[{<<"type">>,<<"html">>}]},
{<<"children">>,
[{<<"name">>,<<"p">>},
{<<"attributes">>,[]},
{<<"children">>,[<<"This is an example feed">>] }]}]},
{<<"rights">>,
[{<<"attributes">>,
[{<<"type">>,<<"xhtml">>}]},
{<<"children">>,
[{<<"name">>,<<"p">>},
{<<"attributes">>,[]},
{<<"children">>,[<<"Copyright © James M Snell">>]}]}]},
{<<"updated">>,<<"2007-10-14T12:12:12.000Z">>},
{<<"authors">>,
[{<<"name">>,<<"James M Snell">>},
{<<"email">>,<<"">>},
{<<"uri">>,<<"/~jasnell">>}]},
{<<"links">>,
[[{<<"href">>,<<"">>},
{<<"rel">>,<<"self">>}],
[{<<"href">>,<<"">>}],
[{<<"href">>,<<";json">>},
{<<"rel">>,<<"alternate">>},
{<<"type">>,<<"application/json">>}]]},
{<<"entries">>,[]},
{<<"attributes">>,
[{<<"xml:lang">>,<<"en-US">>},
{<<"xml:base">>,<<"">>}]}],
[{<<"lang">>,<<"en-US">>},
{<<"dir">>,<<"ltr">>},
{<<"id">>,<<"tag:example.org,2007:/foo">>},
{<<"title">>,<<"Example Feed">>},
{<<"subtitle_attributes_type">>, <<"html">>},
{<<"subtitle_children_name">>, <<"p">>},
{<<"subtitle_children_children">>, <<"This is an example feed">>},
{<<"rights_attributes_type">>, <<"xhtml">> },
{<<"rights_children_name">>,<<"p">>},
{<<"rights_children_children">>,<<"Copyright © James M Snell" >>},
{<<"updated">>,<<"2007-10-14T12:12:12.000Z">>},
{<<"authors_name">>,<<"James M Snell">>},
{<<"authors_email">>,<<"">>},
{<<"authors_uri">>,<<"/~jasnell">>},
{<<"links_href">>,<<"">>},
{<<"links_rel">>,<<"self">>},
{<<"links_href">>,<<"">>},
{<<"links_href">>,<<";json">>},
{<<"links_rel">>,<<"alternate">>},
{<<"links_type">>,<<"application/json">>},
{<<"attributes_xml_lang">>, <<"en-US">>},
{<<"attributes_xml_base">>, <<"">>}]}
],
check_expected(Tests).
check_expected([]) ->
ok;
check_expected([{Terms, Fields}|Rest]) ->
Object = riak_object:new(<<"b">>, <<"k">>, Terms, "application/x-erlang"),
?assertEqual(Fields, extract(Object, <<"value">>, undefined)),
check_expected(Rest).
-endif. % TEST
| null | https://raw.githubusercontent.com/basho/riak_search/79c034350f37706a1db42ffca8f6449d4cce99e1/src/riak_search_kv_erlang_extractor.erl | erlang | -------------------------------------------------------------------
-------------------------------------------------------------------
Index erlang terms and proplists (bz)
Riak objects should have the content type "application/x-erlang"
Much like the JSON extractor:
* proplists are indexed under the prop names
( [{<<"foo">>,<<"hello">>}] indexes "hello" in the "foo" field )
* nested proplists separate name components with underscores
( [{<<"foo">>,[{<<"bar">>,<<"hello">>}]}] indexes "hello"
in the "foo_bar" field )
all list elements are indexed individually
-> encode strings as binaries, not lists!
Make a search field name - if no names encountered yet use the
default field, otherwise make sure the field name does not
contain . or : by substituting with _
From -atom2json.html
via riak_search_kv_json_extractor.erl
TEST | Copyright ( c ) 2007 - 2010 Basho Technologies , Inc. All Rights Reserved .
-module(riak_search_kv_erlang_extractor).
-export([extract/3,
extract_value/3]).
-include("riak_search.hrl").
-import(riak_search_utils, [to_utf8/1]).
* bare terms ( ' foo ' , < < " foo " > > , 123 ) are indexed in the default field
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
extract(RiakObject, DefaultField, _Args) ->
try
Values = riak_object:get_values(RiakObject),
lists:flatten([extract_value(V, DefaultField, _Args) || V <- Values])
catch
_:Err ->
{fail, {bad_erlang,Err}}
end.
extract_value(Data, DefaultField, _Args) ->
Fields = lists:reverse(lists:flatten(make_search_fields(undefined, Data, DefaultField, []))),
[{to_utf8(FieldName), to_utf8(FieldValue)} || {FieldName, FieldValue} <- Fields].
make_search_fields(NamePrefix, {Prop, Value}, DefaultField, Output)
when is_atom(Prop); is_binary(Prop) ->
make_search_fields(append_fieldname(NamePrefix, Prop),
Value, DefaultField, Output);
make_search_fields(Name, List, DefaultField, Output) when is_list(List) ->
F = fun(El, Acc) ->
[make_search_fields(Name, El, DefaultField, []) | Acc]
end,
lists:foldl(F, Output, List);
make_search_fields(_Name, undefined, _DefaultField, Output) ->
Output;
make_search_fields(Name, Term, DefaultField, Output) when is_atom(Term);
is_binary(Term);
is_number(Term) ->
[{search_fieldname(Name, DefaultField), Term} | Output];
make_search_fields(_Name, _Term, _DefaultField, Output) ->
ca n't index PIDs , Ports , > 2 - arity Tuples , ...
Output.
append_fieldname(undefined, Name) when is_binary(Name) ->
binary_to_list(Name);
append_fieldname(FieldPrefix, Name) when is_binary(Name) ->
FieldPrefix ++ [$_ | binary_to_list(Name)];
append_fieldname(FieldPrefix, Name) when is_atom(Name) ->
append_fieldname(FieldPrefix, atom_to_binary(Name, utf8)).
search_fieldname(undefined, DefaultField) ->
DefaultField;
search_fieldname(Name, _) ->
riak_search_kv_extractor:clean_name(Name).
-ifdef(TEST).
bad_binary_test() ->
Data = {<<"this">>,<<"is not">>,<<"a proplist">>},
Object = riak_object:new(<<"b">>, <<"k">>, Data,
"application/x-erlang"),
?assertMatch([], extract(Object, <<"value">>, undefined)).
term_test() ->
Tests = [{[{<<"myfield">>,<<"myvalue">>}],
[{<<"myfield">>,<<"myvalue">>}]},
{[{<<"myfield">>,123}],
[{<<"myfield">>,<<"123">>}]},
{[{<<"myfield">>,123.456}],
[{<<"myfield">>,<<"123.456">>}]},
{[{<<"myfield">>,true}],
[{<<"myfield">>,<<"true">>}]},
{[{<<"myfield">>,undefined}],
[]},
{[{<<"one">>,[{<<"two">>,[{<<"three">>,<<"go">>}]}]}],
[{<<"one_two_three">>, <<"go">>}]},
{<<"abc">>,
[{<<"value">>, <<"abc">>}]},
{[{<<"menu">>,
[{<<"id">>,<<"file">>},
{<<"value">>,<<"File">>},
{<<"popup">>,
[{<<"menuitem">>,
[[{<<"value">>,<<"New">>},
{<<"onclick">>,<<"CreateNewDoc()">>}],
[{<<"value">>,<<"Open">>},
{<<"onclick">>,<<"OpenDoc()">>}],
[{<<"value">>,<<"Close">>},
{<<"onclick">>, <<"CloseDoc()">>}]]
}]
}]
}],
[{<<"menu_id">>, <<"file">>},
{<<"menu_value">>, <<"File">>},
{<<"menu_popup_menuitem_value">>, <<"New">>},
{<<"menu_popup_menuitem_onclick">>, <<"CreateNewDoc()">>},
{<<"menu_popup_menuitem_value">>, <<"Open">>},
{<<"menu_popup_menuitem_onclick">>, <<"OpenDoc()">>},
{<<"menu_popup_menuitem_value">>, <<"Close">>},
{<<"menu_popup_menuitem_onclick">>, <<"CloseDoc()">>}]},
{[{<<"lang">>,<<"en-US">>},
{<<"dir">>,<<"ltr">>},
{<<"id">>,<<"tag:example.org,2007:/foo">>},
{<<"title">>,<<"Example Feed">>},
{<<"subtitle">>,
[{<<"attributes">>,
[{<<"type">>,<<"html">>}]},
{<<"children">>,
[{<<"name">>,<<"p">>},
{<<"attributes">>,[]},
{<<"children">>,[<<"This is an example feed">>] }]}]},
{<<"rights">>,
[{<<"attributes">>,
[{<<"type">>,<<"xhtml">>}]},
{<<"children">>,
[{<<"name">>,<<"p">>},
{<<"attributes">>,[]},
{<<"children">>,[<<"Copyright © James M Snell">>]}]}]},
{<<"updated">>,<<"2007-10-14T12:12:12.000Z">>},
{<<"authors">>,
[{<<"name">>,<<"James M Snell">>},
{<<"email">>,<<"">>},
{<<"uri">>,<<"/~jasnell">>}]},
{<<"links">>,
[[{<<"href">>,<<"">>},
{<<"rel">>,<<"self">>}],
[{<<"href">>,<<"">>}],
[{<<"href">>,<<";json">>},
{<<"rel">>,<<"alternate">>},
{<<"type">>,<<"application/json">>}]]},
{<<"entries">>,[]},
{<<"attributes">>,
[{<<"xml:lang">>,<<"en-US">>},
{<<"xml:base">>,<<"">>}]}],
[{<<"lang">>,<<"en-US">>},
{<<"dir">>,<<"ltr">>},
{<<"id">>,<<"tag:example.org,2007:/foo">>},
{<<"title">>,<<"Example Feed">>},
{<<"subtitle_attributes_type">>, <<"html">>},
{<<"subtitle_children_name">>, <<"p">>},
{<<"subtitle_children_children">>, <<"This is an example feed">>},
{<<"rights_attributes_type">>, <<"xhtml">> },
{<<"rights_children_name">>,<<"p">>},
{<<"rights_children_children">>,<<"Copyright © James M Snell" >>},
{<<"updated">>,<<"2007-10-14T12:12:12.000Z">>},
{<<"authors_name">>,<<"James M Snell">>},
{<<"authors_email">>,<<"">>},
{<<"authors_uri">>,<<"/~jasnell">>},
{<<"links_href">>,<<"">>},
{<<"links_rel">>,<<"self">>},
{<<"links_href">>,<<"">>},
{<<"links_href">>,<<";json">>},
{<<"links_rel">>,<<"alternate">>},
{<<"links_type">>,<<"application/json">>},
{<<"attributes_xml_lang">>, <<"en-US">>},
{<<"attributes_xml_base">>, <<"">>}]}
],
check_expected(Tests).
check_expected([]) ->
ok;
check_expected([{Terms, Fields}|Rest]) ->
Object = riak_object:new(<<"b">>, <<"k">>, Terms, "application/x-erlang"),
?assertEqual(Fields, extract(Object, <<"value">>, undefined)),
check_expected(Rest).
|
d1d4059bc33b68856cb0961318a5bedd0c0457c9fc175ed3d2030509a3861bcb | binaryage/chromex | signed_in_devices.cljs | (ns chromex.app.signed-in-devices (:require-macros [chromex.app.signed-in-devices :refer [gen-wrap]])
(:require [chromex.core]))
-- functions --------------------------------------------------------------------------------------------------------------
(defn get* [config is-local]
(gen-wrap :function ::get config is-local))
; -- events -----------------------------------------------------------------------------------------------------------------
(defn on-device-info-change* [config channel & args]
(gen-wrap :event ::on-device-info-change config channel args))
| null | https://raw.githubusercontent.com/binaryage/chromex/33834ba5dd4f4238a3c51f99caa0416f30c308c5/src/apps/chromex/app/signed_in_devices.cljs | clojure | -- events ----------------------------------------------------------------------------------------------------------------- | (ns chromex.app.signed-in-devices (:require-macros [chromex.app.signed-in-devices :refer [gen-wrap]])
(:require [chromex.core]))
-- functions --------------------------------------------------------------------------------------------------------------
(defn get* [config is-local]
(gen-wrap :function ::get config is-local))
(defn on-device-info-change* [config channel & args]
(gen-wrap :event ::on-device-info-change config channel args))
|
456e65098faf5e3b5384c518c81eac673e84a9702a0adaca329fdae15d26b15d | byteverse/json-syntax | print-syntax.hs | # language BangPatterns #
# language LambdaCase #
import Data.Primitive (ByteArray)
import Data.ByteString (ByteString)
import Data.Bool (bool)
import Control.Exception
import Foreign.C.Types (CChar)
import qualified Json
import qualified Data.Bytes as Bytes
import qualified Data.Bytes.Chunks as Chunks
import qualified System.IO as IO
main :: IO ()
main = do
input <- Chunks.hGetContents IO.stdin
case Json.decode (Chunks.concat input) of
Left err -> fail (show err)
Right v -> print v
| null | https://raw.githubusercontent.com/byteverse/json-syntax/c9978417b96f1e6bbc50a9bbfed907b7d0859ace/scripts/print-syntax.hs | haskell | # language BangPatterns #
# language LambdaCase #
import Data.Primitive (ByteArray)
import Data.ByteString (ByteString)
import Data.Bool (bool)
import Control.Exception
import Foreign.C.Types (CChar)
import qualified Json
import qualified Data.Bytes as Bytes
import qualified Data.Bytes.Chunks as Chunks
import qualified System.IO as IO
main :: IO ()
main = do
input <- Chunks.hGetContents IO.stdin
case Json.decode (Chunks.concat input) of
Left err -> fail (show err)
Right v -> print v
| |
667a40b7a276e446fceb6c40f5a96890adbf70a4364d4737f4c47cb8b115a26d | zenspider/schemers | exercise.2.52.scm | #lang racket/base
Exercise 2.52
;; Make changes to the square limit of `wave' shown in *Note Figure
2 - 9 : : by working at each of the levels described above . In
;; particular:
;;
;; a. Add some segments to the primitive `wave' painter of *Note
;; Exercise 2-49:: (to add a smile, for example).
;;
;; b. Change the pattern constructed by `corner-split' (for
example , by using only one copy of the ` up - split ' and
` right - split ' images instead of two ) .
;;
c. Modify the version of ` square - limit ' that uses
;; `square-of-four' so as to assemble the corners in a different
pattern . ( For example , you might make the big Mr.
;; look outward from each corner of the square.)
;; (assert-equal x y)
;; no
| null | https://raw.githubusercontent.com/zenspider/schemers/2939ca553ac79013a4c3aaaec812c1bad3933b16/sicp/ch_2/exercise.2.52.scm | scheme | Make changes to the square limit of `wave' shown in *Note Figure
particular:
a. Add some segments to the primitive `wave' painter of *Note
Exercise 2-49:: (to add a smile, for example).
b. Change the pattern constructed by `corner-split' (for
`square-of-four' so as to assemble the corners in a different
look outward from each corner of the square.)
(assert-equal x y)
no | #lang racket/base
Exercise 2.52
2 - 9 : : by working at each of the levels described above . In
example , by using only one copy of the ` up - split ' and
` right - split ' images instead of two ) .
c. Modify the version of ` square - limit ' that uses
pattern . ( For example , you might make the big Mr.
|
2505d72c9eb32358fda12f3323693cd52dd7bfc589cf83a7a9b7d49bc43ed545 | emaphis/HtDP2e-solutions | ex114a.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-beginner-reader.ss" "lang")((modname 06_04_space_invader_3) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
HtDP 2e - 6 Itemizations and Structures
;; 6.4 Checking the World
Exercises : 114
Ex . 114 :
Use the predicates from exercise 113 to check the space invader world
program , the virtual pet program ( exercise 106 ) , and the editor program
;; (A Graphical Editor).
;; space invader simulation
(require 2htdp/image)
(require 2htdp/universe)
Ex . 94 :
;; Draw some sketches of what the game scenery looks like at various stages.
;; Use the sketches to determine the constant and the variable pieces of the
;; game. For the former, develop physical and graphical constants that describe
;; the dimensions of the world (canvas) and its objects. Also develop some
;; background scenery. Finally, create your initial scene from the constants
;; for the tank, the UFO, and the background.
;; physical constants:
(define WIDTH 200) ; world constants
(define HEIGHT 200)
(define TANK-SPEED 3)
(define UFO-SPEED 7)
(define MISSILE-SPEED 10)
(define JUMP 7) ; UFO randomly jumps by this amount
; graphical constants
(define UFO (overlay (rectangle 20 2 "solid" "blue")
(ellipse 15 10 "solid" "purple")))
(define UFO-HEIGHT (image-height UFO))
(define UFO-WIDTH (image-width UFO))
(define H-UFO-HEIGHT (/ UFO-HEIGHT 2))
(define H-UFO-WIDTH (/ UFO-WIDTH 2))
(define TANK (rectangle 15 10 "solid" "olive"))
(define TANK-HEIGHT (image-height TANK))
(define H-TANK-HEIGHT (/ TANK-HEIGHT 2))
(define TANK-Y (- HEIGHT H-TANK-HEIGHT)) ; Y pos never changes for tank
(define MISSILE
(rectangle 1 15 "solid" "red"))
(define MISSILE-HEIGHT (image-height MISSILE))
(define MISSILE-WIDTH (image-width MISSILE))
(define H-MISSILE-HEIGHT (/ MISSILE-HEIGHT 2))
(define H-MISSILE-WIDTH (/ MISSILE-WIDTH 2))
(define BACKGROUND (empty-scene WIDTH HEIGHT "lightblue"))
;; a mock-up
(define MOCK-UP
(place-image MISSILE 140 (- HEIGHT 120)
(place-image TANK 100 TANK-Y
(place-image UFO 150 (- HEIGHT 160)
BACKGROUND))))
;;;;;;;;;;;;;;;;;;;
;; data definitions
; A UFO is a Posn.
; interpretation (make-posn x y) is the UFO's location
; (using the top-down, left-to-right convention)
(define UFO1 (make-posn 10 60))
; UFO -> ???
#; ;template for UFO
(define (fn-for-ufo u)
(... (... (posn-x u)) ; Number
(... (posn-y u)))) ; Number
(define-struct tank [loc vel])
; A Tank is a structure:
; (make-tank Number Number).
; interpretation (make-tank x dx) specifies the position:
( x , HEIGHT ) and the tank 's speed : dx pixels / tick
(define TANK1 (make-tank 50 10))
; Tank -> ???
#; ;template for Tank
(define (fn-for-tank t)
(... (... (tank-loc t)) ; Number
(... (tank-vel t)))) ; Number
; A Missile is a Posn.
; interpretation (make-posn x y) is the missile's place
(define MISSILE1 (make-posn 20 80))
; Missile -> ???
#; ;template for Missile
(define (fn-for-missile m)
(... (... (posn-x m)) ; Number
(... (posn-y m)))) ; Number
; the time period when the player is trying to get the tank in position
; for a shot,
(define-struct aim [ufo tank])
(define AIM0 (make-aim (make-posn 100 120)
(make-tank 50 3)))
; Aim -> ???
#; ;template for Aim
(define (fn-for-aim a)
(... (fn-for-ufo (aim-ufo a)) ; UFO
(fn-for-tank (aim-tank a)))) ; Tank
; tates after the missile is fired. Before we can formulate a data definition
; for the complete game state
(define-struct fired [ufo tank missile])
(define FIRED1 (make-fired (make-posn 100 120)
(make-tank 50 3)
(make-posn 20 150)))
; Fired -> ???
#; ;template for Fired
(define (fn-for-fired f)
(... (fn-for-ufo (fired-ufo f)) ; UFO
(fn-for-tank (fired-tank f)) ; Tank
(fn-for-missile (fired-missile f)))) ; Missile
A SIGS is one of :
; – (make-aim UFO Tank)
; – (make-fired UFO Tank Missile)
; interpretation represents the complete state of a
; space invader game
; an instance that describes the tank maneuvering into position to fire:
; the missile:
(define AIM1
(make-aim (make-posn 20 10)
(make-tank 28 -3)))
; just like the previous one but the missile has been fired:
(define FIRE1
(make-fired (make-posn 20 10)
(make-tank 28 -3)
(make-posn 28 (- HEIGHT TANK-HEIGHT))))
; one where the missile is about to collide with the UFO:
(define FIRE2
(make-fired (make-posn 20 100)
(make-tank 100 3)
(make-posn 22 103)))
; SIGS -> ???
#; ;template for SIGS
(define (fn-for-sigs s)
(cond [(aim? s)
(... (fn-for-ufo (aim-ufo s))
(fn-for-tank (aim-tank s)))]
[(fired? s)
(... (fn-for-ufo (fired-ufo s))
(fn-for-tank (fired-tank s))
(fn-for-missile (fired-missile s)))]))
;;;;;;;;;;;;;;;;;;;;;
Ex . 95 :
Explain why the three instances are generated according to the
first or second clause of the data definition .
The first data definition is the only one that follows the first because it
has only one representation , the second clause has two defintions since it
two matjor states , just fired and in flight .
;;;;;;;;;;;;;;;;;;;;;;;
Ex . 96 :
Sketch how each of the three game states could be rendered assuming
a 200 by 200 canvas . See MOCK - UP above
; an instance that describes the tank maneuvering into position to fire:
; the missile:
( make - aim ( make - posn 20 10 ) ( make - tank 28 -3 ) )
(define AIM-SCENE1
(place-image TANK 28 TANK-Y
(place-image UFO 20 10
BACKGROUND)))
; just like the previous one but the missile has been fired:
( make - fired ( make - posn 20 10 ) ( make - tank 28 -3 )
( make - posn 28 ( - HEIGHT TANK - HEIGHT ) ) )
(define FIRE-SCENE1
(place-image TANK 28 TANK-Y
(place-image UFO 20 10
(place-image MISSILE 28 (- HEIGHT TANK-HEIGHT 10)
BACKGROUND))))
; one where the missile is about to collide with the UFO:
( make - fired ( make - posn 20 100 ) ( make - tank 100 3 ) ( make - posn 22 103 ) )
TANK - > UFO - >
(define FIRE-SCENE2
(place-image TANK 100 TANK-Y
(place-image UFO 20 100
(place-image MISSILE 22 103
BACKGROUND))))
;;;;;;;;;;;;;;;;;;;;;;;;;;
;; Functions:
; SIGS -> Image
adds TANK , UFO , and possibly MISSILE to
; the BACKGROUND scene
;(define (si-render s) BACKGROUND) ;stub
(define (si-render s)
(cond [(aim? s)
(tank-render (aim-tank s)
(ufo-render (aim-ufo s)
BACKGROUND))]
[(fired? s)
(tank-render (fired-tank s)
(ufo-render (fired-ufo s)
(missile-render (fired-missile s)
BACKGROUND)))]))
;;;;;;;;;;;;;;;;;
Ex . 97 :
;; Design the functions tank-render, ufo-render, and missile-render.
;; Is the result of this expression
; (tank-render
; (fired-tank s)
; (ufo-render (fired-ufo s)
; (missile-render (fired-missile s)
; BACKGROUND)))
;; the same as the result of
; (ufo-render
; (fired-ufo s)
; (tank-render (fired-tank s)
; (missile-render (fired-missile s)
; BACKGROUND)))
the two expressions produce the same result ?
; the different implementations will only produce the same result
; when the images don't overlap.
; Tank Image -> Image
; adds t to the given image im
(check-expect (tank-render (make-tank 28 -3) BACKGROUND)
(place-image TANK 28 TANK-Y BACKGROUND))
;(define (tank-render t im) im) ;stub
(define (tank-render t im)
(place-image TANK (tank-loc t) TANK-Y im))
; UFO Image -> Image
; adds u to the given image im
(check-expect (ufo-render (make-posn 20 100) BACKGROUND)
(place-image UFO 20 100 BACKGROUND))
;(define (ufo-render u im) im) ;stub
(define (ufo-render u im)
(place-image UFO (posn-x u) (posn-y u) im))
Image
; adds m to the given image im
(check-expect (missile-render (make-posn 28 30) BACKGROUND)
(place-image MISSILE 28 30 BACKGROUND))
;(define (missile-render u im) im) ;stub
(define (missile-render m im)
(place-image MISSILE (posn-x m) (posn-y m) im))
;;;;;;;;;;;;;;;;;;;
Ex . 98 :
;; Design the function si-game-over? for use as the stop-when handler.
;; The game stops if the UFO lands or if the missile hits the UFO.
For both conditions , we recommend that you check for proximity of one object
;; to another.
The stop - when clause allows for an optional second sub - expression , namely a
;; function that renders the final state of the game. Design si-render-final
and use it as the second part for your stop - when clause in the main function
of exercise 100
; NOTE:
; make-aim is relevan when UFO is landed, landed is always #true
make - fired is relevant when UFO is landed or Missile hits UFO
; Tank x-pos does not matter
; SIGS -> Boolean
returns # true if the UFO has landed or if the Missile is in
; proximity of the UFO
(check-expect (si-game-over? ; not landed, no missle
(make-aim (make-posn 20 100)
(make-tank 10 3)))
#false)
(check-expect (si-game-over? ; landed, no missle
(make-aim (make-posn 20 (- HEIGHT H-UFO-HEIGHT))
(make-tank 10 3)))
#true)
(check-expect (si-game-over? ; landed, no hit
(make-fired (make-posn 20 (- HEIGHT H-UFO-HEIGHT))
(make-tank 10 3) (make-posn 10 10)))
#true)
(check-expect (si-game-over? ; not landed, no hit
(make-fired (make-posn 20 100)
(make-tank 10 3) (make-posn 10 10)))
#false)
(check-expect (si-game-over? ; not landed, hit!
(make-fired (make-posn 20 100)
(make-tank 10 3) (make-posn 20 100)))
#true)
;(define (si-game=over? s) #false) ;stub
(define (si-game-over? s)
(cond [(aim? s)
(ufo-landed? (aim-ufo s))]
[(fired? s)
(or
(ufo-landed? (fired-ufo s))
(ufo-hit? (fired-ufo s)
(fired-missile s)))]))
; UFO -> Boolean
; #true if the UFO has landed -- you loose
(check-expect (ufo-landed? ; not landed
(make-posn 20 100))
#false)
(check-expect (ufo-landed? ; landed
(make-posn 20 (- HEIGHT H-UFO-HEIGHT)))
#true)
;(define (ufo-landed? u) #false) ;stub
(define (ufo-landed? u)
(>= (posn-y u) (- HEIGHT H-UFO-HEIGHT)))
; UFO Missile -> Boolean
# true if the UFO was hit by the -- you win
(check-expect (ufo-hit? ; landed, no hit
(make-posn 20 (- HEIGHT UFO-HEIGHT))
(make-posn 10 10))
#false)
(check-expect (ufo-hit? ; not landed, no hit
(make-posn 20 100)
(make-posn 10 10))
#false)
(check-expect (ufo-hit? ; not landed, hit!
(make-posn 20 100)
(make-posn 20 100))
#true)
;(define (ufo-hit? u m) #false) ;stub
(define (ufo-hit? u m)
(and (<= (abs (- (posn-x u) (posn-x m)))
(+ H-UFO-WIDTH H-MISSILE-WIDTH))
(<= (abs (- (posn-y u) (posn-y m)))
(+ H-UFO-HEIGHT H-MISSILE-HEIGHT))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Ex . 99 :
;; Design si-move. This function is called for every clock tick to determine
;; to which position the objects move now. Accordingly it consumes an element
of SIGS and produces another one .
;; Assumption:
;; 'si-move-proper' will move the Missile (if exists),the UFO andd the Tank
;; each 'tock'
;; 'si-move' adds a random element to UFO move
( Random 3 ) results :
0 - subtract JUMP from UFO - SPEED
1 - move JUMP ( - ) in x direction
2 - move JUMP ( + ) in x direction
; SIGS -> SIGS
update the SIGS with random UFO adustment state every tock
(define (si-move w)
(si-move-proper w (random 3)))
; SIGS -> SIGS
; move the UFO objects predictably
0 - subtract 1 from UFO - SPEED
1 - move -1 in x direction
2 - move 1 in x direction
(check-expect (si-move-proper (make-aim (make-posn 10 20)
(make-tank 50 0))
0)
(make-aim (make-posn 10 (+ 20 (- UFO-SPEED JUMP)))
(make-tank 50 0)))
(check-expect (si-move-proper (make-aim (make-posn 10 20)
(make-tank 50 3))
1)
(make-aim (make-posn (- 10 JUMP) (+ 20 UFO-SPEED))
(make-tank 53 3)))
(check-expect (si-move-proper (make-aim (make-posn 10 20)
(make-tank 50 3))
2)
(make-aim (make-posn (+ 10 JUMP) (+ 20 UFO-SPEED))
(make-tank 53 3)))
(check-expect (si-move-proper (make-fired (make-posn 10 20)
(make-tank 50 3)
(make-posn 20 30))
0)
(make-fired (make-posn 10 (+ 20 (- UFO-SPEED JUMP)))
(make-tank 53 3)
(make-posn 20 (- 30 MISSILE-SPEED))))
;(define (si-move-proper w r)
; w)
(define (si-move-proper s r)
(cond [(aim? s)
(make-aim (update-ufo (aim-ufo s) r)
(update-tank (aim-tank s)))]
[(fired? s)
(make-fired (update-ufo (fired-ufo s) r)
(update-tank (fired-tank s))
(update-missile (fired-missile s)))]))
; Ufo Number -> Ufo
; update a UFO with a random adustment on tock
0 - subtract JUMP from UFO - SPEED
1 - move JUMP ( - ) in x direction
2 - move JUMP ( + ) in x direction
(check-expect (update-ufo (make-posn 20 30) 0)
(make-posn 20 (+ 30 (- UFO-SPEED JUMP))))
(check-expect (update-ufo (make-posn 20 30) 1)
(make-posn (- 20 JUMP) (+ 30 UFO-SPEED)))
(check-expect (update-ufo (make-posn 20 30) 2)
(make-posn (+ 20 JUMP) (+ 30 UFO-SPEED)))
(define (update-ufo u r)
(cond [(= r 0) (make-posn (posn-x u) (+ (posn-y u) (- UFO-SPEED JUMP)))]
[(= r 1) (make-posn (- (posn-x u) JUMP) (+ (posn-y u) UFO-SPEED))]
[(= r 2) (make-posn (+ (posn-x u) JUMP) (+ (posn-y u) UFO-SPEED))]))
; Tank -> Tank
; update tank x postion with tank velocity on tock
(check-expect (update-tank (make-tank 10 3)) (make-tank 13 3))
(define (update-tank t)
(make-tank (+ (tank-loc t) (tank-vel t)) (tank-vel t)))
; Missile -> Missile
update 's y position on tock
(check-expect (update-missile (make-posn 20 30))
(make-posn 20 (- 30 MISSILE-SPEED)))
(define (update-missile m)
(make-posn (posn-x m) (- (posn-y m) MISSILE-SPEED)))
;;;;;;;;;;;;;;;;;;;;;;;;;;
Ex . 100 :
;; Design the function si-control, which plays the role of the key event
handler . As such , it consumes a game state and a KeyEvent and produces
a new game state . It reacts to three different keys :
; pressing the left arrow ensures that the tank moves left;
; pressing the right arrow ensures that the tank moves right; and
; pressing the space bar fires the missile if it hasn’t been launched yet.
;; Once you have this function, you can define the si-main function, which
;; uses big-bang to spawn the game-playing window. Enjoy!
SIG
(define (si-control sg ke)
(cond [(key=? ke "left") (tank-left sg)]
[(key=? ke "right") (tank-right sg)]
[(key=? ke " ") (fire-missile sg)]
[else sg])) ; ignore rest
; SIGS -> SIGS
; turn tank right (set velocity -)
(check-expect (tank-left (make-aim (make-posn 10 50) (make-tank 30 3)))
(make-aim (make-posn 10 50) (make-tank 30 -3)))
(check-expect (tank-left (make-aim (make-posn 10 50) (make-tank 30 -3)))
(make-aim (make-posn 10 50) (make-tank 30 -3)))
(check-expect (tank-left (make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(check-expect (tank-left (make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
;(define (tank-left s) s) ;stub
(define (tank-left s)
(cond [(aim? s)
(make-aim (aim-ufo s)
(tank-turn-left (aim-tank s)))]
[(fired? s)
(make-fired (fired-ufo s)
(tank-turn-left (fired-tank s))
(fired-missile s))]))
; Tank -> Tank
; turn Tank right (+)
(check-expect (tank-turn-left (make-tank 30 -3))
(make-tank 30 -3))
(check-expect (tank-turn-left (make-tank 30 3))
(make-tank 30 -3))
(define (tank-turn-left t)
(cond [(<= 0 (tank-vel t))
(make-tank (tank-loc t) (- 0 (tank-vel t)))]
[else t]))
; SIGS -> SIGS
; turn tank right (set velocity +)
(check-expect (tank-right (make-aim (make-posn 10 50) (make-tank 30 3)))
(make-aim (make-posn 10 50) (make-tank 30 3)))
(check-expect (tank-right (make-aim (make-posn 10 50) (make-tank 30 -3)))
(make-aim (make-posn 10 50) (make-tank 30 3)))
(check-expect (tank-right (make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(check-expect (tank-right (make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
;(define (tank-right s) s) ;stub
(define (tank-right s)
(cond [(aim? s)
(make-aim (aim-ufo s)
(tank-turn-right (aim-tank s)))]
[(fired? s)
(make-fired (fired-ufo s)
(tank-turn-right (fired-tank s))
(fired-missile s))]))
; Tank -> Tank
; turn Tank right (+)
(check-expect (tank-turn-right (make-tank 30 -3))
(make-tank 30 3))
(check-expect (tank-turn-right (make-tank 30 3))
(make-tank 30 3))
(define (tank-turn-right t)
(cond [(< (tank-vel t) 0)
(make-tank (tank-loc t) (- 0 (tank-vel t)))]
[else t]))
; SIGS -> SIGS
; fire a missile (convert an Aim to a Fired)
(check-expect (fire-missile (make-aim (make-posn 25 30)
(make-tank 20 3)))
(make-fired (make-posn 25 30)
(make-tank 20 3)
(make-posn (+ 20 3) (- HEIGHT H-TANK-HEIGHT))))
(check-expect (fire-missile (make-fired (make-posn 25 30)
(make-tank 20 3)
(make-posn 40 10)))
(make-fired (make-posn 25 30)
(make-tank 20 3)
(make-posn (+ 20 3) (- HEIGHT H-TANK-HEIGHT))))
; (define (fire-missile s) s) ;stub
(define (fire-missile s)
(cond [(aim? s)
(make-fired (aim-ufo s)
(aim-tank s)
(make-posn (+ (tank-loc (aim-tank s))
(tank-vel (aim-tank s)))
(- HEIGHT H-TANK-HEIGHT)))]
[(fired? s)
(make-fired (fired-ufo s)
(fired-tank s)
(make-posn (+ (tank-loc (fired-tank s))
(tank-vel (fired-tank s)))
(- HEIGHT H-TANK-HEIGHT)))]))
;;;;;;;;;;;;;
Ex . 114 - check - with
; Any -> Boolean
is any a SIGS ?
(check-expect (SIGS? (make-aim (make-posn 10 10)
(make-tank 20 3)))
#true)
(check-expect (SIGS? (make-fired (make-posn 10 10)
(make-tank 20 2)
(make-posn 10 20)))
#true)
(check-expect (SIGS? "SIGS") #false)
(check-expect (SIGS? #true) #false)
(check-expect (SIGS? 10) #false)
(check-expect (SIGS? (make-posn 10 20)) #false)
(define (SIGS? v)
(or (aim? v) (fired? v)))
;; SIGS -> SIGS
start the world with ( main ( make - aim ( make - posn 50 00 ) ( make - tank 20 3 ) ) )
(define (main ws)
SIGS
[check-with SIGS?] ; Any -> Boolean
[on-tick si-move 0.2] ; SIGS -> SIGS
[to-draw si-render] ; SiGS -> Image
[stop-when si-game-over?] ; SIGS -> Boolean
( on - mouse ... ) ; SIGS Integer Integer MouseEvent - > SIGS
- > SIGS | null | https://raw.githubusercontent.com/emaphis/HtDP2e-solutions/ecb60b9a7bbf9b8999c0122b6ea152a3301f0a68/1-Fixed-Size-Data/06-Itemizations-Structures/ex114a.rkt | racket | about the language level of this file in a form that our tools can easily process.
6.4 Checking the World
(A Graphical Editor).
space invader simulation
Draw some sketches of what the game scenery looks like at various stages.
Use the sketches to determine the constant and the variable pieces of the
game. For the former, develop physical and graphical constants that describe
the dimensions of the world (canvas) and its objects. Also develop some
background scenery. Finally, create your initial scene from the constants
for the tank, the UFO, and the background.
physical constants:
world constants
UFO randomly jumps by this amount
graphical constants
Y pos never changes for tank
a mock-up
data definitions
A UFO is a Posn.
interpretation (make-posn x y) is the UFO's location
(using the top-down, left-to-right convention)
UFO -> ???
;template for UFO
Number
Number
A Tank is a structure:
(make-tank Number Number).
interpretation (make-tank x dx) specifies the position:
Tank -> ???
;template for Tank
Number
Number
A Missile is a Posn.
interpretation (make-posn x y) is the missile's place
Missile -> ???
;template for Missile
Number
Number
the time period when the player is trying to get the tank in position
for a shot,
Aim -> ???
;template for Aim
UFO
Tank
tates after the missile is fired. Before we can formulate a data definition
for the complete game state
Fired -> ???
;template for Fired
UFO
Tank
Missile
– (make-aim UFO Tank)
– (make-fired UFO Tank Missile)
interpretation represents the complete state of a
space invader game
an instance that describes the tank maneuvering into position to fire:
the missile:
just like the previous one but the missile has been fired:
one where the missile is about to collide with the UFO:
SIGS -> ???
;template for SIGS
an instance that describes the tank maneuvering into position to fire:
the missile:
just like the previous one but the missile has been fired:
one where the missile is about to collide with the UFO:
Functions:
SIGS -> Image
the BACKGROUND scene
(define (si-render s) BACKGROUND) ;stub
Design the functions tank-render, ufo-render, and missile-render.
Is the result of this expression
(tank-render
(fired-tank s)
(ufo-render (fired-ufo s)
(missile-render (fired-missile s)
BACKGROUND)))
the same as the result of
(ufo-render
(fired-ufo s)
(tank-render (fired-tank s)
(missile-render (fired-missile s)
BACKGROUND)))
the different implementations will only produce the same result
when the images don't overlap.
Tank Image -> Image
adds t to the given image im
(define (tank-render t im) im) ;stub
UFO Image -> Image
adds u to the given image im
(define (ufo-render u im) im) ;stub
adds m to the given image im
(define (missile-render u im) im) ;stub
Design the function si-game-over? for use as the stop-when handler.
The game stops if the UFO lands or if the missile hits the UFO.
to another.
function that renders the final state of the game. Design si-render-final
NOTE:
make-aim is relevan when UFO is landed, landed is always #true
Tank x-pos does not matter
SIGS -> Boolean
proximity of the UFO
not landed, no missle
landed, no missle
landed, no hit
not landed, no hit
not landed, hit!
(define (si-game=over? s) #false) ;stub
UFO -> Boolean
#true if the UFO has landed -- you loose
not landed
landed
(define (ufo-landed? u) #false) ;stub
UFO Missile -> Boolean
landed, no hit
not landed, no hit
not landed, hit!
(define (ufo-hit? u m) #false) ;stub
Design si-move. This function is called for every clock tick to determine
to which position the objects move now. Accordingly it consumes an element
Assumption:
'si-move-proper' will move the Missile (if exists),the UFO andd the Tank
each 'tock'
'si-move' adds a random element to UFO move
SIGS -> SIGS
SIGS -> SIGS
move the UFO objects predictably
(define (si-move-proper w r)
w)
Ufo Number -> Ufo
update a UFO with a random adustment on tock
Tank -> Tank
update tank x postion with tank velocity on tock
Missile -> Missile
Design the function si-control, which plays the role of the key event
pressing the left arrow ensures that the tank moves left;
pressing the right arrow ensures that the tank moves right; and
pressing the space bar fires the missile if it hasn’t been launched yet.
Once you have this function, you can define the si-main function, which
uses big-bang to spawn the game-playing window. Enjoy!
ignore rest
SIGS -> SIGS
turn tank right (set velocity -)
(define (tank-left s) s) ;stub
Tank -> Tank
turn Tank right (+)
SIGS -> SIGS
turn tank right (set velocity +)
(define (tank-right s) s) ;stub
Tank -> Tank
turn Tank right (+)
SIGS -> SIGS
fire a missile (convert an Aim to a Fired)
(define (fire-missile s) s) ;stub
Any -> Boolean
SIGS -> SIGS
Any -> Boolean
SIGS -> SIGS
SiGS -> Image
SIGS -> Boolean
SIGS Integer Integer MouseEvent - > SIGS | The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-beginner-reader.ss" "lang")((modname 06_04_space_invader_3) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f () #f)))
HtDP 2e - 6 Itemizations and Structures
Exercises : 114
Ex . 114 :
Use the predicates from exercise 113 to check the space invader world
program , the virtual pet program ( exercise 106 ) , and the editor program
(require 2htdp/image)
(require 2htdp/universe)
Ex . 94 :
(define HEIGHT 200)
(define TANK-SPEED 3)
(define UFO-SPEED 7)
(define MISSILE-SPEED 10)
(define UFO (overlay (rectangle 20 2 "solid" "blue")
(ellipse 15 10 "solid" "purple")))
(define UFO-HEIGHT (image-height UFO))
(define UFO-WIDTH (image-width UFO))
(define H-UFO-HEIGHT (/ UFO-HEIGHT 2))
(define H-UFO-WIDTH (/ UFO-WIDTH 2))
(define TANK (rectangle 15 10 "solid" "olive"))
(define TANK-HEIGHT (image-height TANK))
(define H-TANK-HEIGHT (/ TANK-HEIGHT 2))
(define MISSILE
(rectangle 1 15 "solid" "red"))
(define MISSILE-HEIGHT (image-height MISSILE))
(define MISSILE-WIDTH (image-width MISSILE))
(define H-MISSILE-HEIGHT (/ MISSILE-HEIGHT 2))
(define H-MISSILE-WIDTH (/ MISSILE-WIDTH 2))
(define BACKGROUND (empty-scene WIDTH HEIGHT "lightblue"))
(define MOCK-UP
(place-image MISSILE 140 (- HEIGHT 120)
(place-image TANK 100 TANK-Y
(place-image UFO 150 (- HEIGHT 160)
BACKGROUND))))
(define UFO1 (make-posn 10 60))
(define (fn-for-ufo u)
(define-struct tank [loc vel])
( x , HEIGHT ) and the tank 's speed : dx pixels / tick
(define TANK1 (make-tank 50 10))
(define (fn-for-tank t)
(define MISSILE1 (make-posn 20 80))
(define (fn-for-missile m)
(define-struct aim [ufo tank])
(define AIM0 (make-aim (make-posn 100 120)
(make-tank 50 3)))
(define (fn-for-aim a)
(define-struct fired [ufo tank missile])
(define FIRED1 (make-fired (make-posn 100 120)
(make-tank 50 3)
(make-posn 20 150)))
(define (fn-for-fired f)
A SIGS is one of :
(define AIM1
(make-aim (make-posn 20 10)
(make-tank 28 -3)))
(define FIRE1
(make-fired (make-posn 20 10)
(make-tank 28 -3)
(make-posn 28 (- HEIGHT TANK-HEIGHT))))
(define FIRE2
(make-fired (make-posn 20 100)
(make-tank 100 3)
(make-posn 22 103)))
(define (fn-for-sigs s)
(cond [(aim? s)
(... (fn-for-ufo (aim-ufo s))
(fn-for-tank (aim-tank s)))]
[(fired? s)
(... (fn-for-ufo (fired-ufo s))
(fn-for-tank (fired-tank s))
(fn-for-missile (fired-missile s)))]))
Ex . 95 :
Explain why the three instances are generated according to the
first or second clause of the data definition .
The first data definition is the only one that follows the first because it
has only one representation , the second clause has two defintions since it
two matjor states , just fired and in flight .
Ex . 96 :
Sketch how each of the three game states could be rendered assuming
a 200 by 200 canvas . See MOCK - UP above
( make - aim ( make - posn 20 10 ) ( make - tank 28 -3 ) )
(define AIM-SCENE1
(place-image TANK 28 TANK-Y
(place-image UFO 20 10
BACKGROUND)))
( make - fired ( make - posn 20 10 ) ( make - tank 28 -3 )
( make - posn 28 ( - HEIGHT TANK - HEIGHT ) ) )
(define FIRE-SCENE1
(place-image TANK 28 TANK-Y
(place-image UFO 20 10
(place-image MISSILE 28 (- HEIGHT TANK-HEIGHT 10)
BACKGROUND))))
( make - fired ( make - posn 20 100 ) ( make - tank 100 3 ) ( make - posn 22 103 ) )
TANK - > UFO - >
(define FIRE-SCENE2
(place-image TANK 100 TANK-Y
(place-image UFO 20 100
(place-image MISSILE 22 103
BACKGROUND))))
adds TANK , UFO , and possibly MISSILE to
(define (si-render s)
(cond [(aim? s)
(tank-render (aim-tank s)
(ufo-render (aim-ufo s)
BACKGROUND))]
[(fired? s)
(tank-render (fired-tank s)
(ufo-render (fired-ufo s)
(missile-render (fired-missile s)
BACKGROUND)))]))
Ex . 97 :
the two expressions produce the same result ?
(check-expect (tank-render (make-tank 28 -3) BACKGROUND)
(place-image TANK 28 TANK-Y BACKGROUND))
(define (tank-render t im)
(place-image TANK (tank-loc t) TANK-Y im))
(check-expect (ufo-render (make-posn 20 100) BACKGROUND)
(place-image UFO 20 100 BACKGROUND))
(define (ufo-render u im)
(place-image UFO (posn-x u) (posn-y u) im))
Image
(check-expect (missile-render (make-posn 28 30) BACKGROUND)
(place-image MISSILE 28 30 BACKGROUND))
(define (missile-render m im)
(place-image MISSILE (posn-x m) (posn-y m) im))
Ex . 98 :
For both conditions , we recommend that you check for proximity of one object
The stop - when clause allows for an optional second sub - expression , namely a
and use it as the second part for your stop - when clause in the main function
of exercise 100
make - fired is relevant when UFO is landed or Missile hits UFO
returns # true if the UFO has landed or if the Missile is in
(make-aim (make-posn 20 100)
(make-tank 10 3)))
#false)
(make-aim (make-posn 20 (- HEIGHT H-UFO-HEIGHT))
(make-tank 10 3)))
#true)
(make-fired (make-posn 20 (- HEIGHT H-UFO-HEIGHT))
(make-tank 10 3) (make-posn 10 10)))
#true)
(make-fired (make-posn 20 100)
(make-tank 10 3) (make-posn 10 10)))
#false)
(make-fired (make-posn 20 100)
(make-tank 10 3) (make-posn 20 100)))
#true)
(define (si-game-over? s)
(cond [(aim? s)
(ufo-landed? (aim-ufo s))]
[(fired? s)
(or
(ufo-landed? (fired-ufo s))
(ufo-hit? (fired-ufo s)
(fired-missile s)))]))
(make-posn 20 100))
#false)
(make-posn 20 (- HEIGHT H-UFO-HEIGHT)))
#true)
(define (ufo-landed? u)
(>= (posn-y u) (- HEIGHT H-UFO-HEIGHT)))
# true if the UFO was hit by the -- you win
(make-posn 20 (- HEIGHT UFO-HEIGHT))
(make-posn 10 10))
#false)
(make-posn 20 100)
(make-posn 10 10))
#false)
(make-posn 20 100)
(make-posn 20 100))
#true)
(define (ufo-hit? u m)
(and (<= (abs (- (posn-x u) (posn-x m)))
(+ H-UFO-WIDTH H-MISSILE-WIDTH))
(<= (abs (- (posn-y u) (posn-y m)))
(+ H-UFO-HEIGHT H-MISSILE-HEIGHT))))
Ex . 99 :
of SIGS and produces another one .
( Random 3 ) results :
0 - subtract JUMP from UFO - SPEED
1 - move JUMP ( - ) in x direction
2 - move JUMP ( + ) in x direction
update the SIGS with random UFO adustment state every tock
(define (si-move w)
(si-move-proper w (random 3)))
0 - subtract 1 from UFO - SPEED
1 - move -1 in x direction
2 - move 1 in x direction
(check-expect (si-move-proper (make-aim (make-posn 10 20)
(make-tank 50 0))
0)
(make-aim (make-posn 10 (+ 20 (- UFO-SPEED JUMP)))
(make-tank 50 0)))
(check-expect (si-move-proper (make-aim (make-posn 10 20)
(make-tank 50 3))
1)
(make-aim (make-posn (- 10 JUMP) (+ 20 UFO-SPEED))
(make-tank 53 3)))
(check-expect (si-move-proper (make-aim (make-posn 10 20)
(make-tank 50 3))
2)
(make-aim (make-posn (+ 10 JUMP) (+ 20 UFO-SPEED))
(make-tank 53 3)))
(check-expect (si-move-proper (make-fired (make-posn 10 20)
(make-tank 50 3)
(make-posn 20 30))
0)
(make-fired (make-posn 10 (+ 20 (- UFO-SPEED JUMP)))
(make-tank 53 3)
(make-posn 20 (- 30 MISSILE-SPEED))))
(define (si-move-proper s r)
(cond [(aim? s)
(make-aim (update-ufo (aim-ufo s) r)
(update-tank (aim-tank s)))]
[(fired? s)
(make-fired (update-ufo (fired-ufo s) r)
(update-tank (fired-tank s))
(update-missile (fired-missile s)))]))
0 - subtract JUMP from UFO - SPEED
1 - move JUMP ( - ) in x direction
2 - move JUMP ( + ) in x direction
(check-expect (update-ufo (make-posn 20 30) 0)
(make-posn 20 (+ 30 (- UFO-SPEED JUMP))))
(check-expect (update-ufo (make-posn 20 30) 1)
(make-posn (- 20 JUMP) (+ 30 UFO-SPEED)))
(check-expect (update-ufo (make-posn 20 30) 2)
(make-posn (+ 20 JUMP) (+ 30 UFO-SPEED)))
(define (update-ufo u r)
(cond [(= r 0) (make-posn (posn-x u) (+ (posn-y u) (- UFO-SPEED JUMP)))]
[(= r 1) (make-posn (- (posn-x u) JUMP) (+ (posn-y u) UFO-SPEED))]
[(= r 2) (make-posn (+ (posn-x u) JUMP) (+ (posn-y u) UFO-SPEED))]))
(check-expect (update-tank (make-tank 10 3)) (make-tank 13 3))
(define (update-tank t)
(make-tank (+ (tank-loc t) (tank-vel t)) (tank-vel t)))
update 's y position on tock
(check-expect (update-missile (make-posn 20 30))
(make-posn 20 (- 30 MISSILE-SPEED)))
(define (update-missile m)
(make-posn (posn-x m) (- (posn-y m) MISSILE-SPEED)))
Ex . 100 :
handler . As such , it consumes a game state and a KeyEvent and produces
a new game state . It reacts to three different keys :
SIG
(define (si-control sg ke)
(cond [(key=? ke "left") (tank-left sg)]
[(key=? ke "right") (tank-right sg)]
[(key=? ke " ") (fire-missile sg)]
(check-expect (tank-left (make-aim (make-posn 10 50) (make-tank 30 3)))
(make-aim (make-posn 10 50) (make-tank 30 -3)))
(check-expect (tank-left (make-aim (make-posn 10 50) (make-tank 30 -3)))
(make-aim (make-posn 10 50) (make-tank 30 -3)))
(check-expect (tank-left (make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(check-expect (tank-left (make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(define (tank-left s)
(cond [(aim? s)
(make-aim (aim-ufo s)
(tank-turn-left (aim-tank s)))]
[(fired? s)
(make-fired (fired-ufo s)
(tank-turn-left (fired-tank s))
(fired-missile s))]))
(check-expect (tank-turn-left (make-tank 30 -3))
(make-tank 30 -3))
(check-expect (tank-turn-left (make-tank 30 3))
(make-tank 30 -3))
(define (tank-turn-left t)
(cond [(<= 0 (tank-vel t))
(make-tank (tank-loc t) (- 0 (tank-vel t)))]
[else t]))
(check-expect (tank-right (make-aim (make-posn 10 50) (make-tank 30 3)))
(make-aim (make-posn 10 50) (make-tank 30 3)))
(check-expect (tank-right (make-aim (make-posn 10 50) (make-tank 30 -3)))
(make-aim (make-posn 10 50) (make-tank 30 3)))
(check-expect (tank-right (make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(check-expect (tank-right (make-fired (make-posn 10 50)
(make-tank 30 -3)
(make-posn 30 30)))
(make-fired (make-posn 10 50)
(make-tank 30 3)
(make-posn 30 30)))
(define (tank-right s)
(cond [(aim? s)
(make-aim (aim-ufo s)
(tank-turn-right (aim-tank s)))]
[(fired? s)
(make-fired (fired-ufo s)
(tank-turn-right (fired-tank s))
(fired-missile s))]))
(check-expect (tank-turn-right (make-tank 30 -3))
(make-tank 30 3))
(check-expect (tank-turn-right (make-tank 30 3))
(make-tank 30 3))
(define (tank-turn-right t)
(cond [(< (tank-vel t) 0)
(make-tank (tank-loc t) (- 0 (tank-vel t)))]
[else t]))
(check-expect (fire-missile (make-aim (make-posn 25 30)
(make-tank 20 3)))
(make-fired (make-posn 25 30)
(make-tank 20 3)
(make-posn (+ 20 3) (- HEIGHT H-TANK-HEIGHT))))
(check-expect (fire-missile (make-fired (make-posn 25 30)
(make-tank 20 3)
(make-posn 40 10)))
(make-fired (make-posn 25 30)
(make-tank 20 3)
(make-posn (+ 20 3) (- HEIGHT H-TANK-HEIGHT))))
(define (fire-missile s)
(cond [(aim? s)
(make-fired (aim-ufo s)
(aim-tank s)
(make-posn (+ (tank-loc (aim-tank s))
(tank-vel (aim-tank s)))
(- HEIGHT H-TANK-HEIGHT)))]
[(fired? s)
(make-fired (fired-ufo s)
(fired-tank s)
(make-posn (+ (tank-loc (fired-tank s))
(tank-vel (fired-tank s)))
(- HEIGHT H-TANK-HEIGHT)))]))
Ex . 114 - check - with
is any a SIGS ?
(check-expect (SIGS? (make-aim (make-posn 10 10)
(make-tank 20 3)))
#true)
(check-expect (SIGS? (make-fired (make-posn 10 10)
(make-tank 20 2)
(make-posn 10 20)))
#true)
(check-expect (SIGS? "SIGS") #false)
(check-expect (SIGS? #true) #false)
(check-expect (SIGS? 10) #false)
(check-expect (SIGS? (make-posn 10 20)) #false)
(define (SIGS? v)
(or (aim? v) (fired? v)))
start the world with ( main ( make - aim ( make - posn 50 00 ) ( make - tank 20 3 ) ) )
(define (main ws)
SIGS
- > SIGS |
2c3c93c0285217194bd310397a6046eb32408b925f04e6eedaa49650e773b120 | davidlazar/ocaml-semantics | literal-string.ml | "Hello \"World\"!"
| null | https://raw.githubusercontent.com/davidlazar/ocaml-semantics/6f302c6b9cced0407d501d70ad25c2d2aefbb77d/tests/unit/literal-string.ml | ocaml | "Hello \"World\"!"
| |
963553f447adcb46b2a52973f14bb84902561e9375a52397b6f675c123b73368 | haskell-hvr/paths | Lens.hs | {-# LANGUAGE RankNTypes #-}
{-# LANGUAGE Safe #-}
# LANGUAGE ScopedTypeVariables #
| Lenses in the style of [ System . FilePath . Lens]( / package / lens / docs / System - FilePath - Lens.html ) .
--
@since 0.2.0.0
module System.Path.Lens
( -- * Operators
(</>~)
, (<.>~)
-- * Lenses
, basename
, directory
, extension
, filename
) where
import Data.Functor as Fun
import Data.Functor.Identity
import System.Path
----------------------------------------------------------------------------
infixr 4 </>~
(</>~) :: ASetter s t (Path a) (Path a) -> (Path Unrooted) -> s -> t
l </>~ n = overSafe l (</> n)
infixr 4 <.>~
(<.>~) :: ASetter s t (Path a) (Path a) -> FileExt -> s -> t
l <.>~ n = overSafe l (<.> n)
----------------------------------------------------------------------------
basename :: Lens' (Path a) (Path Unrooted)
basename f p = (<.?> takeExtension p) . (takeDirectory p </>) Fun.<$> f (takeBaseName p)
-- local helper
(<.?>) :: Path a -> Maybe FileExt -> Path a
fp <.?> Nothing = fp
fp <.?> Just fe = fp <.> fe
directory :: Lens' (Path a) (Path a)
directory f p = (</> takeFileName p) <$> f (takeDirectory p)
extension :: Lens' (Path a) (Maybe FileExt)
extension f p = (n <.?>) <$> f e
where
(n, e) = splitExtension p
filename :: Lens' (Path a) (Path Unrooted)
filename f p = (takeDirectory p </>) <$> f (takeFileName p)
----------------------------------------------------------------------------
-- internal lens-api definitions
type ASetter s t a b = (a -> Identity b) -> s -> Identity t
type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t
type Lens' s a = Lens s s a a
# INLINE overSafe #
overSafe :: ASetter s t a b -> (a -> b) -> s -> t
overSafe l f = runIdentity `g` (l (Identity `h` f))
where
h _ = (Identity .)
g _ = (runIdentity .)
unsafe / efficient variant of ' overSafe '
over : : ASetter s t a b - > ( a - > b ) - > s - > t
over l f = runIdentity # . l ( Identity # . f )
infixr 9 # .
( # . ) : : Coercible c b = > ( b - > c ) - > ( a - > b ) - > ( a - > c )
( # . ) _ = coerce ( \x - > x : : b ) : : forall a b. Coercible b a = > a - > b
over :: ASetter s t a b -> (a -> b) -> s -> t
over l f = runIdentity #. l (Identity #. f)
infixr 9 #.
(#.) :: Coercible c b => (b -> c) -> (a -> b) -> (a -> c)
(#.) _ = coerce (\x -> x :: b) :: forall a b. Coercible b a => a -> b
-}
| null | https://raw.githubusercontent.com/haskell-hvr/paths/2d29fd546ae122b4c027c75fa85313244c2e1f77/src/System/Path/Lens.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE Safe #
* Operators
* Lenses
--------------------------------------------------------------------------
--------------------------------------------------------------------------
local helper
--------------------------------------------------------------------------
internal lens-api definitions | # LANGUAGE ScopedTypeVariables #
| Lenses in the style of [ System . FilePath . Lens]( / package / lens / docs / System - FilePath - Lens.html ) .
@since 0.2.0.0
module System.Path.Lens
(</>~)
, (<.>~)
, basename
, directory
, extension
, filename
) where
import Data.Functor as Fun
import Data.Functor.Identity
import System.Path
infixr 4 </>~
(</>~) :: ASetter s t (Path a) (Path a) -> (Path Unrooted) -> s -> t
l </>~ n = overSafe l (</> n)
infixr 4 <.>~
(<.>~) :: ASetter s t (Path a) (Path a) -> FileExt -> s -> t
l <.>~ n = overSafe l (<.> n)
basename :: Lens' (Path a) (Path Unrooted)
basename f p = (<.?> takeExtension p) . (takeDirectory p </>) Fun.<$> f (takeBaseName p)
(<.?>) :: Path a -> Maybe FileExt -> Path a
fp <.?> Nothing = fp
fp <.?> Just fe = fp <.> fe
directory :: Lens' (Path a) (Path a)
directory f p = (</> takeFileName p) <$> f (takeDirectory p)
extension :: Lens' (Path a) (Maybe FileExt)
extension f p = (n <.?>) <$> f e
where
(n, e) = splitExtension p
filename :: Lens' (Path a) (Path Unrooted)
filename f p = (takeDirectory p </>) <$> f (takeFileName p)
type ASetter s t a b = (a -> Identity b) -> s -> Identity t
type Lens s t a b = forall f. Functor f => (a -> f b) -> s -> f t
type Lens' s a = Lens s s a a
# INLINE overSafe #
overSafe :: ASetter s t a b -> (a -> b) -> s -> t
overSafe l f = runIdentity `g` (l (Identity `h` f))
where
h _ = (Identity .)
g _ = (runIdentity .)
unsafe / efficient variant of ' overSafe '
over : : ASetter s t a b - > ( a - > b ) - > s - > t
over l f = runIdentity # . l ( Identity # . f )
infixr 9 # .
( # . ) : : Coercible c b = > ( b - > c ) - > ( a - > b ) - > ( a - > c )
( # . ) _ = coerce ( \x - > x : : b ) : : forall a b. Coercible b a = > a - > b
over :: ASetter s t a b -> (a -> b) -> s -> t
over l f = runIdentity #. l (Identity #. f)
infixr 9 #.
(#.) :: Coercible c b => (b -> c) -> (a -> b) -> (a -> c)
(#.) _ = coerce (\x -> x :: b) :: forall a b. Coercible b a => a -> b
-}
|
c7bb4be373091bcbbd49fc5b1f0d7f85aa0e5c54f269815541a25c50eb7a4a1a | privet-kitty/cl-competitive | offline-dynamic-connectivity.lisp | ;;;
;;; Offline dynamic connectivity
;;;
(defpackage :cp/offline-dynamic-connectivity
(:use :cl :cp/undoable-disjoint-set)
(:export #:dynamic-connectivity #:make-dynamic-connectivity
#:dycon-insert #:dycon-delete #:dycon-build #:dycon-map
#:dycon-num-components #:dycon-disjoint-set))
(in-package :cp/offline-dynamic-connectivity)
;; NOTE: not tested
NOTE : MAX - TIME must be positive .
(define-undoable-disjoint-set undoable-disjoint-set
:conc-name uds-)
(defstruct (dynamic-connectivity
(:constructor make-dynamic-connectivity
(size max-time
&aux (segtree (make-array (- (* 2 max-time) 1) :element-type 'list :initial-element nil))
(counter (make-hash-table :test #'equal))
(appearance (make-hash-table :test #'equal))
(events (make-array 0 :element-type 'list :fill-pointer 0))
(disjoint-set (make-undoable-disjoint-set size :buffer-size max-time))
(num-components size)))
(:copier nil)
(:predicate nil)
(:conc-name dycon-))
(size 0 :type (integer 0 #.most-positive-fixnum))
(max-time 0 :type (integer 1 #.most-positive-fixnum))
(last-time 0 :type (integer 0 #.most-positive-fixnum))
(segtree nil :type (simple-array list (*)))
(counter nil :type hash-table)
(appearance nil :type hash-table)
( appear - time disappear - time vertex1 . )
(events nil :type (array list (*)))
;; disjoint set that holds connectivity of graph
(disjoint-set nil :type undoable-disjoint-set)
;; number of connected components
(num-components 0 :type (integer 0 #.most-positive-fixnum)))
(defun dycon-insert (dycon u v time)
"Inserts an edge {u, v} at TIME."
(declare (optimize (speed 3))
((integer 0 #.most-positive-fixnum) u v time))
(symbol-macrolet ((last-time (dycon-last-time dycon))
(counter (dycon-counter dycon))
(appearance (dycon-appearance dycon)))
(assert (>= time last-time))
(setf last-time time)
(when (> u v) (rotatef u v))
(let* ((edge (cons u v))
(count (gethash edge counter)))
(declare ((or null (integer 0 #.most-positive-fixnum)) count))
(if count
(setf (gethash edge counter) (+ count 1))
(setf (gethash edge appearance) time
(gethash edge counter) 1)))))
(defun dycon-delete (dycon u v time)
"Deletes an edge {u, v} at TIME."
(declare (optimize (speed 3))
((integer 0 #.most-positive-fixnum) u v time))
(symbol-macrolet ((last-time (dycon-last-time dycon))
(counter (dycon-counter dycon))
(appearance (dycon-appearance dycon))
(events (dycon-events dycon)))
(assert (>= time last-time))
(setf last-time time)
(when (> u v) (rotatef u v))
(let* ((edge (cons u v))
(count (gethash edge counter)))
(declare ((or null (integer 0 #.most-positive-fixnum)) count))
(assert count () "Attempted to delete non-existent edge (~W . ~W) at time ~W"
u v time)
(if (= count 1)
(let ((appear-time (gethash edge appearance)))
(remhash edge counter)
(unless (eql appear-time time)
(vector-push-extend (list* appear-time time edge) events)))
(setf (gethash edge counter) (- count 1))))))
(defun %dycon-update (dycon edge l r)
(declare (optimize (speed 3))
((integer 0 #.most-positive-fixnum) l r))
(let ((segtree (dycon-segtree dycon))
(max-time (dycon-max-time dycon)))
(incf l (- max-time 1))
(incf r (- max-time 1))
(loop while (< l r)
when (evenp l)
do (push edge (aref segtree l))
(incf l)
when (evenp r)
do (decf r)
(push edge (aref segtree r))
do (setq l (ash (- l 1) -1)
r (ash (- r 1) -1)))))
(defun dycon-build (dycon)
(declare (optimize (speed 3)))
(let ((counter (dycon-counter dycon))
(events (dycon-events dycon))
(appearance (dycon-appearance dycon))
(max-time (dycon-max-time dycon)))
(maphash (lambda (edge count)
(declare (ignore count))
(let ((appear-time (gethash edge appearance)))
(vector-push-extend (list* appear-time max-time edge) events)))
counter)
(loop for (appear-time disappear-time u . v) across events
do (%dycon-update dycon (cons u v) appear-time disappear-time))
dycon))
TODO : safeguard against calling DYCON - MAP before DYCON - BUILD
(defun dycon-map (dycon function)
"FUCTION takes time as an argument: When FUNCTION is called, NUM-COMPONENTS
and DISJOINT-SET take a state immediately after the time. Be sure to call
DYCON-BUILD beforehand."
(declare (optimize (speed 3))
(function function))
(symbol-macrolet ((comp (dycon-num-components dycon)))
(let* ((disjoint-set (dycon-disjoint-set dycon))
(segtree (dycon-segtree dycon))
(max-time (dycon-max-time dycon)))
(labels ((recur (i)
(declare ((integer 0 #.most-positive-fixnum) i))
(let ((comp-delta 0))
(declare ((integer 0 #.most-positive-fixnum) comp-delta))
(loop for (u . v) in (aref segtree i)
when (uds-unite! disjoint-set u v)
do (incf comp-delta))
(decf comp comp-delta)
(if (< i (- max-time 1))
(progn
(recur (+ 1 (* 2 i)))
(recur (+ 2 (* 2 i))))
(funcall function (- i (- max-time 1))))
(incf comp comp-delta)
(loop for edge in (aref segtree i)
do (uds-undo! disjoint-set)))))
(recur 0)))))
| null | https://raw.githubusercontent.com/privet-kitty/cl-competitive/4d1c601ff42b10773a5d0c5989b1234da5bb98b6/module/offline-dynamic-connectivity.lisp | lisp |
Offline dynamic connectivity
NOTE: not tested
disjoint set that holds connectivity of graph
number of connected components |
(defpackage :cp/offline-dynamic-connectivity
(:use :cl :cp/undoable-disjoint-set)
(:export #:dynamic-connectivity #:make-dynamic-connectivity
#:dycon-insert #:dycon-delete #:dycon-build #:dycon-map
#:dycon-num-components #:dycon-disjoint-set))
(in-package :cp/offline-dynamic-connectivity)
NOTE : MAX - TIME must be positive .
(define-undoable-disjoint-set undoable-disjoint-set
:conc-name uds-)
(defstruct (dynamic-connectivity
(:constructor make-dynamic-connectivity
(size max-time
&aux (segtree (make-array (- (* 2 max-time) 1) :element-type 'list :initial-element nil))
(counter (make-hash-table :test #'equal))
(appearance (make-hash-table :test #'equal))
(events (make-array 0 :element-type 'list :fill-pointer 0))
(disjoint-set (make-undoable-disjoint-set size :buffer-size max-time))
(num-components size)))
(:copier nil)
(:predicate nil)
(:conc-name dycon-))
(size 0 :type (integer 0 #.most-positive-fixnum))
(max-time 0 :type (integer 1 #.most-positive-fixnum))
(last-time 0 :type (integer 0 #.most-positive-fixnum))
(segtree nil :type (simple-array list (*)))
(counter nil :type hash-table)
(appearance nil :type hash-table)
( appear - time disappear - time vertex1 . )
(events nil :type (array list (*)))
(disjoint-set nil :type undoable-disjoint-set)
(num-components 0 :type (integer 0 #.most-positive-fixnum)))
(defun dycon-insert (dycon u v time)
"Inserts an edge {u, v} at TIME."
(declare (optimize (speed 3))
((integer 0 #.most-positive-fixnum) u v time))
(symbol-macrolet ((last-time (dycon-last-time dycon))
(counter (dycon-counter dycon))
(appearance (dycon-appearance dycon)))
(assert (>= time last-time))
(setf last-time time)
(when (> u v) (rotatef u v))
(let* ((edge (cons u v))
(count (gethash edge counter)))
(declare ((or null (integer 0 #.most-positive-fixnum)) count))
(if count
(setf (gethash edge counter) (+ count 1))
(setf (gethash edge appearance) time
(gethash edge counter) 1)))))
(defun dycon-delete (dycon u v time)
"Deletes an edge {u, v} at TIME."
(declare (optimize (speed 3))
((integer 0 #.most-positive-fixnum) u v time))
(symbol-macrolet ((last-time (dycon-last-time dycon))
(counter (dycon-counter dycon))
(appearance (dycon-appearance dycon))
(events (dycon-events dycon)))
(assert (>= time last-time))
(setf last-time time)
(when (> u v) (rotatef u v))
(let* ((edge (cons u v))
(count (gethash edge counter)))
(declare ((or null (integer 0 #.most-positive-fixnum)) count))
(assert count () "Attempted to delete non-existent edge (~W . ~W) at time ~W"
u v time)
(if (= count 1)
(let ((appear-time (gethash edge appearance)))
(remhash edge counter)
(unless (eql appear-time time)
(vector-push-extend (list* appear-time time edge) events)))
(setf (gethash edge counter) (- count 1))))))
(defun %dycon-update (dycon edge l r)
(declare (optimize (speed 3))
((integer 0 #.most-positive-fixnum) l r))
(let ((segtree (dycon-segtree dycon))
(max-time (dycon-max-time dycon)))
(incf l (- max-time 1))
(incf r (- max-time 1))
(loop while (< l r)
when (evenp l)
do (push edge (aref segtree l))
(incf l)
when (evenp r)
do (decf r)
(push edge (aref segtree r))
do (setq l (ash (- l 1) -1)
r (ash (- r 1) -1)))))
(defun dycon-build (dycon)
(declare (optimize (speed 3)))
(let ((counter (dycon-counter dycon))
(events (dycon-events dycon))
(appearance (dycon-appearance dycon))
(max-time (dycon-max-time dycon)))
(maphash (lambda (edge count)
(declare (ignore count))
(let ((appear-time (gethash edge appearance)))
(vector-push-extend (list* appear-time max-time edge) events)))
counter)
(loop for (appear-time disappear-time u . v) across events
do (%dycon-update dycon (cons u v) appear-time disappear-time))
dycon))
TODO : safeguard against calling DYCON - MAP before DYCON - BUILD
(defun dycon-map (dycon function)
"FUCTION takes time as an argument: When FUNCTION is called, NUM-COMPONENTS
and DISJOINT-SET take a state immediately after the time. Be sure to call
DYCON-BUILD beforehand."
(declare (optimize (speed 3))
(function function))
(symbol-macrolet ((comp (dycon-num-components dycon)))
(let* ((disjoint-set (dycon-disjoint-set dycon))
(segtree (dycon-segtree dycon))
(max-time (dycon-max-time dycon)))
(labels ((recur (i)
(declare ((integer 0 #.most-positive-fixnum) i))
(let ((comp-delta 0))
(declare ((integer 0 #.most-positive-fixnum) comp-delta))
(loop for (u . v) in (aref segtree i)
when (uds-unite! disjoint-set u v)
do (incf comp-delta))
(decf comp comp-delta)
(if (< i (- max-time 1))
(progn
(recur (+ 1 (* 2 i)))
(recur (+ 2 (* 2 i))))
(funcall function (- i (- max-time 1))))
(incf comp comp-delta)
(loop for edge in (aref segtree i)
do (uds-undo! disjoint-set)))))
(recur 0)))))
|
211b78381aea0d498fc7e64b7fc9fc5ecc2095d65b2ef2595a6a92c5c20cc49c | phronmophobic/snowball | snowball.clj | (ns com.phronemophobic.snowball
(:require [clojure.tools.build.api :as b]
clojure.tools.deps.alpha
[treemap-clj.core :as treemap]
[clojure.zip :as z]
[membrane.ui :as ui]
[clojure.data.json :as json]
;; (require 'membrane.java2d)
[treemap-clj.rtree :as rtree]
[membrane.component :as component])
(:import com.github.davidmoten.rtree.geometry.Geometries
com.github.davidmoten.rtree.RTree)
(:gen-class))
(defn human-readable [size]
(or (some (fn [[num suffix]]
(when (>= size num)
(let [coefficient (double (/ size num))
num-str (if (< coefficient 10)
(format "%.1f" coefficient)
(-> coefficient (Math/round) int))]
(str num-str suffix))))
[[1e12 "T"]
[1e9 "G"]
[1e6 "M"]
[1e3 "k"]])
(str size)))
(defn overlaps? [rt [x y w h]]
(-> (.search ^RTree rt (Geometries/rectangle
(double x) (double y)
(double (+ x w)) (double (+ y h))))
(.toBlocking)
(.toIterable)
seq))
(defn rects->absolute [tm]
(let [zip (treemap/treezip tm)]
(loop [zip zip]
(if (z/end? zip)
(z/root zip)
(recur (-> zip
(z/edit (fn [rect]
(let [x (if (z/end? zip)
0
(loop [x 0
zip zip]
(if-not zip
x
(recur (+ x (:x (z/node zip)))
(z/up zip)))))
y (if (z/end? zip)
0
(loop [y 0
zip zip]
(if-not zip
y
(recur (+ y (:y (z/node zip)))
(z/up zip)))))]
(assoc rect
:ax x
:ay y))))
(z/next)))))))
(defn render-labels [tm]
(let [tm (rects->absolute tm)
rects (reverse (sort-by #(-> % :obj :size) (tree-seq :children :children tm)))
[rtree labels]
(reduce
(fn [[rt labels] rect]
(let [label (ui/label
(clojure.string/join "\n"
[(human-readable (-> rect :obj (:size 0)))
(-> rect :obj :name name)])
(ui/font "monospace" 12))
[w h] (ui/bounds label)
x (:ax rect)
y (:ay rect)]
(if (overlaps? rt [x y w h])
[rt labels]
[(rtree/add! rt {:x x
:y y
:w w
:h h})
(conj labels
(ui/with-color [1 1 1 0.2]
[(ui/translate (inc x) (inc y)
label)
(ui/translate (inc x) (dec y)
label)
(ui/translate (dec x) (dec y)
label)
(ui/translate (dec x) (inc y)
label)])
(ui/translate x y
label)
)])))
[(rtree/rtree)
[]]
rects)]
labels))
(defn coord-size [lib-tree {:keys [children paths]
:as coord}]
(transduce (comp (map clojure.java.io/file)
(map #(.length %)))
+
0
paths))
(defn transitive-coord-size [lib-tree {:keys [children paths]
:as coord}]
(let [transitive-coords (tree-seq :children
(fn [coord]
(map #(get lib-tree %) (:children coord)))
coord)]
(transduce (map :size)
+
0
transitive-coords)))
(defn top-level-coord? [coord]
(-> coord :dependents nil?))
(defn top-level-deps [lib-tree]
(->> lib-tree
(keep (fn [[lib coord]]
;; based on clojure.tools.deps.alpha/print-tree
implementation for finding root
(when (top-level-coord? coord)
lib)))))
(defn root-coord [lib-tree]
(let [coord {:children (top-level-deps lib-tree)
:name 'root/root
do n't try co calculate root since
;; they're most likely unzipped
:size 0
:paths []}]
(assoc coord
:transitive-size (transitive-coord-size lib-tree coord))))
(defn basis->size-tree [basis]
;; if clojure.tools.deps.alpha/make-tree breaks,
;; check clojure.tools.deps.alpha/print-tree
(let [tree (#'clojure.tools.deps.alpha/make-tree (:libs basis))
tree+names+sizes (into
{}
(map (fn [[k v]]
(let [size (coord-size tree v)]
[k (assoc v
:name k
:top-level (top-level-coord? v)
:size-readable (human-readable size)
:size size)])))
tree)
tree+names+sizes+transitive-sizes
(into
{}
(map (fn [[k v]]
(let [transitive-size (transitive-coord-size tree+names+sizes v)]
[k (assoc v
:transitive-size transitive-size
:transitive-size-readable (human-readable transitive-size))])))
tree+names+sizes)]
tree+names+sizes+transitive-sizes))
(defn render-depth
"Draw filled rectangles of leaf rects
with colors corresponding to the depth."
([rect]
(render-depth rect 0.2))
([rect opacity]
(let [mdepth (treemap/max-depth rect)
color-gradient (requiring-resolve 'treemap-clj.view/color-gradient)]
(loop [to-visit (seq [[0 0 0 rect]])
view []]
(if to-visit
(let [[depth ox oy rect] (first to-visit)
to-visit (if-let [children (:children rect)]
(let [ox (+ ox (:x rect))
oy (+ oy (:y rect))]
(into (next to-visit)
(map #(vector (inc depth) ox oy %) children)))
(next to-visit))]
(recur to-visit
(conj view
(let [children? (:children rect)
opacity (if children?
( max 0.2 ( - 0.8 ( * depth 0.2 ) ) )
0.2
opacity)
style (if children?
:membrane.ui/style-stroke
:membrane.ui/style-fill)]
(ui/with-style style
(ui/with-stroke-width 2
(ui/translate (+ (:x rect) ox) (+ (:y rect) oy)
[(ui/with-color (conj (if children?
[0 0 0]
(color-gradient (/ depth mdepth))
)
opacity)
(ui/rectangle (max 1 (dec (:w rect)))
(max 1 (dec (:h rect))))
)]))))))
)
view)))))
(defn basis->treemap [basis]
(let [lib-tree (basis->size-tree basis)
tm (treemap/treemap (root-coord lib-tree)
(treemap/make-rect 600 600)
(merge
treemap/treemap-options-defaults
{:size (fn [coord]
(max 1 (:transitive-size coord) ))
:keypath-fn
(fn [coord]
(cons
'(find self)
(map #(list 'find %)
(:children coord))))
:branch? #(-> % :children seq)
:children (fn [coord]
(cons
{:name (symbol
(namespace (:name coord))
(str "self:"
(name (:name coord))))
:size (:size coord)
:transitive-size (:size coord)}
(->> coord
:children
(map #(get lib-tree %)))))}))]
tm))
(defn render-treemap [tm]
(let [rendered [(render-depth tm)
(render-labels tm)]]
((requiring-resolve 'treemap-clj.view/wrap-treemap-events) tm rendered)))
(defn size-treemap [basis]
((requiring-resolve 'membrane.java2d/run-sync)
(component/make-app (requiring-resolve 'treemap-clj.view/treemap-explore) {:tm-render (-> (basis->treemap basis)
(render-treemap))})
{:window-start-width 1350
:window-start-height 700
:window-title "Snowball"}))
(defn treemap-image [basis fname]
((requiring-resolve 'membrane.java2d/save-to-image!)
fname
(-> (basis->treemap basis)
(render-treemap)))
(println "Saved to " fname "."))
(defn treemap-edn [basis]
(binding [*print-length* false]
(prn (clojure.walk/prewalk
(fn [obj]
(if (record? obj)
(into {} obj)
obj))
(basis->treemap basis)))))
(defn treemap-json [basis]
(json/write (basis->treemap basis) *out*))
(defn opts->basis [{version :mvn/version
sha :git/sha
url :git/url
root :local/root
deps :deps
lib :lib}]
(when (not deps)
(assert lib "Lib coordinate is required"))
(when sha
(assert url ":git/sha provided, but :git/url not provided"))
(when url
(assert sha ":git/url provided, but :git/sha not provided"))
(let [deps (cond
version {:deps {lib {:mvn/version (name version)}}}
sha {:deps {lib {:git/sha (name sha)
:git/url (name url)}}}
root {:deps {lib {:local/root (name root)}}}
deps (name deps)
:else {:deps {lib {:mvn/version "RELEASE"}}})]
(b/create-basis {:project deps})))
(defn print-sizes [basis]
(let [lib-tree (basis->size-tree basis)
top-libs (->> lib-tree ;; (select-keys lib-tree (top-level-deps lib-tree))
vals
(sort-by :size)
reverse)
max-namespace-width (->> top-libs
(map :name)
(map namespace)
(map count)
(apply max))
max-name-width (->> top-libs
(map :name)
(map name)
(map count)
(apply max))
header (clojure.string/join " | " [(format (str "%" max-namespace-width "s") "namespace")
(format (str "%" max-name-width "s") "name")
(format (str "%13s") "transitive-size")
(format (str "%13s") "self-size")])
]
(println header)
(println (clojure.string/join (repeat (count header) "-" )))
(doseq [{:keys [transitive-size size name]} top-libs]
(println (clojure.string/join " | " [(format (str "%" max-namespace-width "s") (namespace name))
(format (str "%" max-name-width "s") (clojure.core/name name))
(format (str "%,15d") transitive-size)
(format (str "%,13d") size)])))))
(defn print-csv [basis]
(let [lib-tree (basis->size-tree basis)
top-libs (->> lib-tree
vals
(sort-by :size)
reverse)
columns [
["namespace" #(-> % :name namespace)]
["name" #(-> % :name name)]
["transitive-size" :transitive-size]
["transitive-size-readable" :transitive-size-readable]
["self-size" :size]
["self-size-readable" :size-readable]]
]
(println (clojure.string/join "," (map first columns)))
(doseq [lib top-libs]
(println (clojure.string/join "," (map (fn [[_column-name f]]
(str (f lib)))
columns))))))
(defn print-edn [basis]
(binding [*print-length* false]
(prn (basis->size-tree basis))))
(defn print-json [basis]
(json/write (basis->size-tree basis) *out*))
(defn print-usage []
(print (slurp ((requiring-resolve 'clojure.java.io/resource) "usage.txt"))))
(defn -main [{:keys [view path lib deps]
:or {view :treemap
path "snowball.png"}
:as m}]
(if (or lib deps)
(case (name view)
"treemap" (size-treemap (opts->basis m))
"treemap-image" (treemap-image (opts->basis m) (str path))
"print" (print-sizes (opts->basis m))
"csv" (print-csv (opts->basis m))
"edn" (print-edn (opts->basis m))
"json" (print-json (opts->basis m))
"treemap-edn" (treemap-edn (opts->basis m))
"treemap-json" (treemap-json (opts->basis m))
;; else
(print-usage))
(print-usage))
)
| null | https://raw.githubusercontent.com/phronmophobic/snowball/a4b70762e029913e6e57362dba6bad5c30ffd0d6/src/com/phronemophobic/snowball.clj | clojure | (require 'membrane.java2d)
based on clojure.tools.deps.alpha/print-tree
they're most likely unzipped
if clojure.tools.deps.alpha/make-tree breaks,
check clojure.tools.deps.alpha/print-tree
(select-keys lib-tree (top-level-deps lib-tree))
else | (ns com.phronemophobic.snowball
(:require [clojure.tools.build.api :as b]
clojure.tools.deps.alpha
[treemap-clj.core :as treemap]
[clojure.zip :as z]
[membrane.ui :as ui]
[clojure.data.json :as json]
[treemap-clj.rtree :as rtree]
[membrane.component :as component])
(:import com.github.davidmoten.rtree.geometry.Geometries
com.github.davidmoten.rtree.RTree)
(:gen-class))
(defn human-readable [size]
(or (some (fn [[num suffix]]
(when (>= size num)
(let [coefficient (double (/ size num))
num-str (if (< coefficient 10)
(format "%.1f" coefficient)
(-> coefficient (Math/round) int))]
(str num-str suffix))))
[[1e12 "T"]
[1e9 "G"]
[1e6 "M"]
[1e3 "k"]])
(str size)))
(defn overlaps? [rt [x y w h]]
(-> (.search ^RTree rt (Geometries/rectangle
(double x) (double y)
(double (+ x w)) (double (+ y h))))
(.toBlocking)
(.toIterable)
seq))
(defn rects->absolute [tm]
(let [zip (treemap/treezip tm)]
(loop [zip zip]
(if (z/end? zip)
(z/root zip)
(recur (-> zip
(z/edit (fn [rect]
(let [x (if (z/end? zip)
0
(loop [x 0
zip zip]
(if-not zip
x
(recur (+ x (:x (z/node zip)))
(z/up zip)))))
y (if (z/end? zip)
0
(loop [y 0
zip zip]
(if-not zip
y
(recur (+ y (:y (z/node zip)))
(z/up zip)))))]
(assoc rect
:ax x
:ay y))))
(z/next)))))))
(defn render-labels [tm]
(let [tm (rects->absolute tm)
rects (reverse (sort-by #(-> % :obj :size) (tree-seq :children :children tm)))
[rtree labels]
(reduce
(fn [[rt labels] rect]
(let [label (ui/label
(clojure.string/join "\n"
[(human-readable (-> rect :obj (:size 0)))
(-> rect :obj :name name)])
(ui/font "monospace" 12))
[w h] (ui/bounds label)
x (:ax rect)
y (:ay rect)]
(if (overlaps? rt [x y w h])
[rt labels]
[(rtree/add! rt {:x x
:y y
:w w
:h h})
(conj labels
(ui/with-color [1 1 1 0.2]
[(ui/translate (inc x) (inc y)
label)
(ui/translate (inc x) (dec y)
label)
(ui/translate (dec x) (dec y)
label)
(ui/translate (dec x) (inc y)
label)])
(ui/translate x y
label)
)])))
[(rtree/rtree)
[]]
rects)]
labels))
(defn coord-size [lib-tree {:keys [children paths]
:as coord}]
(transduce (comp (map clojure.java.io/file)
(map #(.length %)))
+
0
paths))
(defn transitive-coord-size [lib-tree {:keys [children paths]
:as coord}]
(let [transitive-coords (tree-seq :children
(fn [coord]
(map #(get lib-tree %) (:children coord)))
coord)]
(transduce (map :size)
+
0
transitive-coords)))
(defn top-level-coord? [coord]
(-> coord :dependents nil?))
(defn top-level-deps [lib-tree]
(->> lib-tree
(keep (fn [[lib coord]]
implementation for finding root
(when (top-level-coord? coord)
lib)))))
(defn root-coord [lib-tree]
(let [coord {:children (top-level-deps lib-tree)
:name 'root/root
do n't try co calculate root since
:size 0
:paths []}]
(assoc coord
:transitive-size (transitive-coord-size lib-tree coord))))
(defn basis->size-tree [basis]
(let [tree (#'clojure.tools.deps.alpha/make-tree (:libs basis))
tree+names+sizes (into
{}
(map (fn [[k v]]
(let [size (coord-size tree v)]
[k (assoc v
:name k
:top-level (top-level-coord? v)
:size-readable (human-readable size)
:size size)])))
tree)
tree+names+sizes+transitive-sizes
(into
{}
(map (fn [[k v]]
(let [transitive-size (transitive-coord-size tree+names+sizes v)]
[k (assoc v
:transitive-size transitive-size
:transitive-size-readable (human-readable transitive-size))])))
tree+names+sizes)]
tree+names+sizes+transitive-sizes))
(defn render-depth
"Draw filled rectangles of leaf rects
with colors corresponding to the depth."
([rect]
(render-depth rect 0.2))
([rect opacity]
(let [mdepth (treemap/max-depth rect)
color-gradient (requiring-resolve 'treemap-clj.view/color-gradient)]
(loop [to-visit (seq [[0 0 0 rect]])
view []]
(if to-visit
(let [[depth ox oy rect] (first to-visit)
to-visit (if-let [children (:children rect)]
(let [ox (+ ox (:x rect))
oy (+ oy (:y rect))]
(into (next to-visit)
(map #(vector (inc depth) ox oy %) children)))
(next to-visit))]
(recur to-visit
(conj view
(let [children? (:children rect)
opacity (if children?
( max 0.2 ( - 0.8 ( * depth 0.2 ) ) )
0.2
opacity)
style (if children?
:membrane.ui/style-stroke
:membrane.ui/style-fill)]
(ui/with-style style
(ui/with-stroke-width 2
(ui/translate (+ (:x rect) ox) (+ (:y rect) oy)
[(ui/with-color (conj (if children?
[0 0 0]
(color-gradient (/ depth mdepth))
)
opacity)
(ui/rectangle (max 1 (dec (:w rect)))
(max 1 (dec (:h rect))))
)]))))))
)
view)))))
(defn basis->treemap [basis]
(let [lib-tree (basis->size-tree basis)
tm (treemap/treemap (root-coord lib-tree)
(treemap/make-rect 600 600)
(merge
treemap/treemap-options-defaults
{:size (fn [coord]
(max 1 (:transitive-size coord) ))
:keypath-fn
(fn [coord]
(cons
'(find self)
(map #(list 'find %)
(:children coord))))
:branch? #(-> % :children seq)
:children (fn [coord]
(cons
{:name (symbol
(namespace (:name coord))
(str "self:"
(name (:name coord))))
:size (:size coord)
:transitive-size (:size coord)}
(->> coord
:children
(map #(get lib-tree %)))))}))]
tm))
(defn render-treemap [tm]
(let [rendered [(render-depth tm)
(render-labels tm)]]
((requiring-resolve 'treemap-clj.view/wrap-treemap-events) tm rendered)))
(defn size-treemap [basis]
((requiring-resolve 'membrane.java2d/run-sync)
(component/make-app (requiring-resolve 'treemap-clj.view/treemap-explore) {:tm-render (-> (basis->treemap basis)
(render-treemap))})
{:window-start-width 1350
:window-start-height 700
:window-title "Snowball"}))
(defn treemap-image [basis fname]
((requiring-resolve 'membrane.java2d/save-to-image!)
fname
(-> (basis->treemap basis)
(render-treemap)))
(println "Saved to " fname "."))
(defn treemap-edn [basis]
(binding [*print-length* false]
(prn (clojure.walk/prewalk
(fn [obj]
(if (record? obj)
(into {} obj)
obj))
(basis->treemap basis)))))
(defn treemap-json [basis]
(json/write (basis->treemap basis) *out*))
(defn opts->basis [{version :mvn/version
sha :git/sha
url :git/url
root :local/root
deps :deps
lib :lib}]
(when (not deps)
(assert lib "Lib coordinate is required"))
(when sha
(assert url ":git/sha provided, but :git/url not provided"))
(when url
(assert sha ":git/url provided, but :git/sha not provided"))
(let [deps (cond
version {:deps {lib {:mvn/version (name version)}}}
sha {:deps {lib {:git/sha (name sha)
:git/url (name url)}}}
root {:deps {lib {:local/root (name root)}}}
deps (name deps)
:else {:deps {lib {:mvn/version "RELEASE"}}})]
(b/create-basis {:project deps})))
(defn print-sizes [basis]
(let [lib-tree (basis->size-tree basis)
vals
(sort-by :size)
reverse)
max-namespace-width (->> top-libs
(map :name)
(map namespace)
(map count)
(apply max))
max-name-width (->> top-libs
(map :name)
(map name)
(map count)
(apply max))
header (clojure.string/join " | " [(format (str "%" max-namespace-width "s") "namespace")
(format (str "%" max-name-width "s") "name")
(format (str "%13s") "transitive-size")
(format (str "%13s") "self-size")])
]
(println header)
(println (clojure.string/join (repeat (count header) "-" )))
(doseq [{:keys [transitive-size size name]} top-libs]
(println (clojure.string/join " | " [(format (str "%" max-namespace-width "s") (namespace name))
(format (str "%" max-name-width "s") (clojure.core/name name))
(format (str "%,15d") transitive-size)
(format (str "%,13d") size)])))))
(defn print-csv [basis]
(let [lib-tree (basis->size-tree basis)
top-libs (->> lib-tree
vals
(sort-by :size)
reverse)
columns [
["namespace" #(-> % :name namespace)]
["name" #(-> % :name name)]
["transitive-size" :transitive-size]
["transitive-size-readable" :transitive-size-readable]
["self-size" :size]
["self-size-readable" :size-readable]]
]
(println (clojure.string/join "," (map first columns)))
(doseq [lib top-libs]
(println (clojure.string/join "," (map (fn [[_column-name f]]
(str (f lib)))
columns))))))
(defn print-edn [basis]
(binding [*print-length* false]
(prn (basis->size-tree basis))))
(defn print-json [basis]
(json/write (basis->size-tree basis) *out*))
(defn print-usage []
(print (slurp ((requiring-resolve 'clojure.java.io/resource) "usage.txt"))))
(defn -main [{:keys [view path lib deps]
:or {view :treemap
path "snowball.png"}
:as m}]
(if (or lib deps)
(case (name view)
"treemap" (size-treemap (opts->basis m))
"treemap-image" (treemap-image (opts->basis m) (str path))
"print" (print-sizes (opts->basis m))
"csv" (print-csv (opts->basis m))
"edn" (print-edn (opts->basis m))
"json" (print-json (opts->basis m))
"treemap-edn" (treemap-edn (opts->basis m))
"treemap-json" (treemap-json (opts->basis m))
(print-usage))
(print-usage))
)
|
fff5a860c43cb5fb2fa2feb10ad68356bb4d31c027a07fcb5dd8ac7e725b7745 | CommonDoc/common-doc | constructors.lisp | (in-package :common-doc)
Utilities
(defun construct (class children metadata reference)
"Instantiate a class with children and metadata."
(make-instance class
:children (uiop:ensure-list children)
:metadata metadata
:reference reference))
;; NOTE:
;; Originally, I wanted something like that, to keep metadata and
slots in sync , but reality is more complex and web - link 's URI slot
contains QURI : URI object whereas expects that metadata 's URI
;; item has a string.
;;
;; Thus I've decided to create correct meta-data in the web-link's contructor :(
;;
QUESTION : May be it is should be fixed , to fill attributes
;; from slot values of items returned by FIND-SPECIAL-SLOTS?
;;
( defmethod initialize - instance : after ( ( node document - node ) & rest initargs )
( declare ( ignore ) )
;; ;; We have to keep metadata and slot values syncronized, because
; ; some formats like when emiting node attributes take
;; ;; their names and values from node's metadata.
;; (loop with special-slots = (common-doc:find-special-slots (class-of node))
;; for (meta-name . slot-name) in special-slots
;; for slot-value = (when (slot-boundp node slot-name)
;; (slot-value node slot-name))
;; for meta-value = (get-meta node meta-name)
;; when (and meta-value
;; (not (equal meta-value slot-value)))
do ( warn " Node ~S has different value for slot ~S. In metadata : ~S and in slot ~S. "
;; node slot-name meta-value slot-value)
;; when slot-value
do ( setf ( get - meta node meta - name )
;; slot-value)))
Interface
(defun make-content (children &key metadata reference)
"Create a content node from its children."
(construct 'content-node children metadata reference))
(defun make-text (string &key metadata reference)
"Create a text node from the contents of a string."
(make-instance 'text-node
:text string
:metadata metadata
:reference reference))
(defun make-paragraph (children &key metadata reference)
"Create a paragraph node from its children."
(construct 'paragraph children metadata reference))
(defun make-bold (children &key metadata reference)
"Create a bold node from its children."
(construct 'bold children metadata reference))
(defun make-italic (children &key metadata reference)
"Create an italicized node from its children."
(construct 'italic children metadata reference))
(defun make-underline (children &key metadata reference)
"Create an underlined node from its children."
(construct 'underline children metadata reference))
(defun make-strikethrough (children &key metadata reference)
"Create an striked out node from its children."
(construct 'strikethrough children metadata reference))
(defun make-code (children &key metadata reference)
"Create an inline code node from its children."
(construct 'code children metadata reference))
(defun make-superscript (children &key metadata reference)
"Create a superscripted node from its children."
(construct 'superscript children metadata reference))
(defun make-subscript (children &key metadata reference)
"Create a subscripted node from its children."
(construct 'subscript children metadata reference))
(defun make-code-block (language children &key metadata reference)
"Create a code block node from its children and language."
(make-instance 'code-block
:language language
:children (uiop:ensure-list children)
:metadata metadata
:reference reference))
(defun make-inline-quote (children &key metadata reference)
"Create an inline quote node from its children."
(construct 'inline-quote children metadata reference))
(defun make-block-quote (children &key metadata reference)
"Create a block quote node from its children."
(construct 'block-quote children metadata reference))
(defun make-document-link (document reference children &key metadata)
"Create a document link from document and node references and its children."
(check-type document (or null string))
(check-type reference (or null string))
(let ((node (make-instance 'document-link
:document-reference document
:node-reference reference
:children (uiop:ensure-list children)
:metadata metadata)))
;; Scriba expects there will be a STRING in this metadata item:
(when document
(setf (get-meta node "doc")
document))
(when reference
(setf (get-meta node "id")
reference))
(values node)))
(defun make-web-link (uri children &key metadata reference)
"Create a web link."
(let ((node (make-instance 'web-link
:uri (quri:uri uri)
:children (uiop:ensure-list children)
:metadata metadata
:reference reference)))
;; Scriba expects there will be a STRING in this metadata item:
(setf (get-meta node "uri")
uri)
(values node)))
(defun make-list-item (children &key metadata reference)
"Create a list item."
(construct 'list-item children metadata reference))
(defun make-definition (term definition &key metadata reference)
"Create a definition list item."
(make-instance 'definition
:term term
:definition definition
:metadata metadata
:reference reference))
(defun make-unordered-list (children &key metadata reference)
"Create an unordered list."
(construct 'unordered-list children metadata reference))
(defun make-ordered-list (children &key metadata reference)
"Create an ordered list."
(construct 'ordered-list children metadata reference))
(defun make-definition-list (children &key metadata reference)
"Create a definition list."
(construct 'definition-list children metadata reference))
(defun make-image (source &key description metadata reference)
"Create an image."
(make-instance 'image
:source source
:description description
:metadata metadata
:reference reference))
(defun make-figure (image description &key metadata reference)
"Create a figure."
(make-instance 'figure
:image image
:description description
:metadata metadata
:reference reference))
(defun make-table (rows &key metadata reference)
"Create a table from a list of rows."
(make-instance 'table
:rows rows
:metadata metadata
:reference reference))
(defun make-row (cells &key metadata reference)
"Create a row from a list of cells."
(make-instance 'row
:cells cells
:metadata metadata
:reference reference))
(defun make-cell (children &key metadata reference)
"Create a cell from its children."
(construct 'cell children metadata reference))
(defun make-section (title &key children reference metadata)
"Create a section from its title and children."
(let ((title (loop for item in (uiop:ensure-list title)
collect (etypecase item
(string (make-text item))
(document-node item)))))
(make-instance 'section
:title title
:reference reference
:children (uiop:ensure-list children)
:metadata metadata)))
(defun make-document (title &key children keywords &allow-other-keys)
"Create a document."
(make-instance 'document
:title title
:children (uiop:ensure-list children)
:keywords keywords))
| null | https://raw.githubusercontent.com/CommonDoc/common-doc/1406ab65b8f111f14f1b7759a1a83c65ced763ab/src/constructors.lisp | lisp | NOTE:
Originally, I wanted something like that, to keep metadata and
item has a string.
Thus I've decided to create correct meta-data in the web-link's contructor :(
from slot values of items returned by FIND-SPECIAL-SLOTS?
;; We have to keep metadata and slot values syncronized, because
; some formats like when emiting node attributes take
;; their names and values from node's metadata.
(loop with special-slots = (common-doc:find-special-slots (class-of node))
for (meta-name . slot-name) in special-slots
for slot-value = (when (slot-boundp node slot-name)
(slot-value node slot-name))
for meta-value = (get-meta node meta-name)
when (and meta-value
(not (equal meta-value slot-value)))
node slot-name meta-value slot-value)
when slot-value
slot-value)))
Scriba expects there will be a STRING in this metadata item:
Scriba expects there will be a STRING in this metadata item: | (in-package :common-doc)
Utilities
(defun construct (class children metadata reference)
"Instantiate a class with children and metadata."
(make-instance class
:children (uiop:ensure-list children)
:metadata metadata
:reference reference))
slots in sync , but reality is more complex and web - link 's URI slot
contains QURI : URI object whereas expects that metadata 's URI
QUESTION : May be it is should be fixed , to fill attributes
( defmethod initialize - instance : after ( ( node document - node ) & rest initargs )
( declare ( ignore ) )
do ( warn " Node ~S has different value for slot ~S. In metadata : ~S and in slot ~S. "
do ( setf ( get - meta node meta - name )
Interface
(defun make-content (children &key metadata reference)
"Create a content node from its children."
(construct 'content-node children metadata reference))
(defun make-text (string &key metadata reference)
"Create a text node from the contents of a string."
(make-instance 'text-node
:text string
:metadata metadata
:reference reference))
(defun make-paragraph (children &key metadata reference)
"Create a paragraph node from its children."
(construct 'paragraph children metadata reference))
(defun make-bold (children &key metadata reference)
"Create a bold node from its children."
(construct 'bold children metadata reference))
(defun make-italic (children &key metadata reference)
"Create an italicized node from its children."
(construct 'italic children metadata reference))
(defun make-underline (children &key metadata reference)
"Create an underlined node from its children."
(construct 'underline children metadata reference))
(defun make-strikethrough (children &key metadata reference)
"Create an striked out node from its children."
(construct 'strikethrough children metadata reference))
(defun make-code (children &key metadata reference)
"Create an inline code node from its children."
(construct 'code children metadata reference))
(defun make-superscript (children &key metadata reference)
"Create a superscripted node from its children."
(construct 'superscript children metadata reference))
(defun make-subscript (children &key metadata reference)
"Create a subscripted node from its children."
(construct 'subscript children metadata reference))
(defun make-code-block (language children &key metadata reference)
"Create a code block node from its children and language."
(make-instance 'code-block
:language language
:children (uiop:ensure-list children)
:metadata metadata
:reference reference))
(defun make-inline-quote (children &key metadata reference)
"Create an inline quote node from its children."
(construct 'inline-quote children metadata reference))
(defun make-block-quote (children &key metadata reference)
"Create a block quote node from its children."
(construct 'block-quote children metadata reference))
(defun make-document-link (document reference children &key metadata)
"Create a document link from document and node references and its children."
(check-type document (or null string))
(check-type reference (or null string))
(let ((node (make-instance 'document-link
:document-reference document
:node-reference reference
:children (uiop:ensure-list children)
:metadata metadata)))
(when document
(setf (get-meta node "doc")
document))
(when reference
(setf (get-meta node "id")
reference))
(values node)))
(defun make-web-link (uri children &key metadata reference)
"Create a web link."
(let ((node (make-instance 'web-link
:uri (quri:uri uri)
:children (uiop:ensure-list children)
:metadata metadata
:reference reference)))
(setf (get-meta node "uri")
uri)
(values node)))
(defun make-list-item (children &key metadata reference)
"Create a list item."
(construct 'list-item children metadata reference))
(defun make-definition (term definition &key metadata reference)
"Create a definition list item."
(make-instance 'definition
:term term
:definition definition
:metadata metadata
:reference reference))
(defun make-unordered-list (children &key metadata reference)
"Create an unordered list."
(construct 'unordered-list children metadata reference))
(defun make-ordered-list (children &key metadata reference)
"Create an ordered list."
(construct 'ordered-list children metadata reference))
(defun make-definition-list (children &key metadata reference)
"Create a definition list."
(construct 'definition-list children metadata reference))
(defun make-image (source &key description metadata reference)
"Create an image."
(make-instance 'image
:source source
:description description
:metadata metadata
:reference reference))
(defun make-figure (image description &key metadata reference)
"Create a figure."
(make-instance 'figure
:image image
:description description
:metadata metadata
:reference reference))
(defun make-table (rows &key metadata reference)
"Create a table from a list of rows."
(make-instance 'table
:rows rows
:metadata metadata
:reference reference))
(defun make-row (cells &key metadata reference)
"Create a row from a list of cells."
(make-instance 'row
:cells cells
:metadata metadata
:reference reference))
(defun make-cell (children &key metadata reference)
"Create a cell from its children."
(construct 'cell children metadata reference))
(defun make-section (title &key children reference metadata)
"Create a section from its title and children."
(let ((title (loop for item in (uiop:ensure-list title)
collect (etypecase item
(string (make-text item))
(document-node item)))))
(make-instance 'section
:title title
:reference reference
:children (uiop:ensure-list children)
:metadata metadata)))
(defun make-document (title &key children keywords &allow-other-keys)
"Create a document."
(make-instance 'document
:title title
:children (uiop:ensure-list children)
:keywords keywords))
|
e75bb434437193f1cc3770da380b5c9c140a59136df2a01de1a725a226571112 | monadbobo/ocaml-core | string_id.ml | open Std_internal
include String
let check s =
let stripped = String.strip s in
if not (String.(=) stripped s) then
Some (sprintf ("'%s' is not a valid identifier " ^^
"because it has whitespace on the edge")
s)
else if String.(=) s "" then
Some "Attempt to use empty identifier"
else if String.contains s '|' then
Some "Identifier contains a pipe '|'"
else
None
let of_string s =
match check s with
| None -> s
| Some err -> invalid_arg err
let t_of_sexp sexp =
let s = String.t_of_sexp sexp in
match check s with
| None -> s
| Some err -> of_sexp_error err sexp
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/core/lib/string_id.ml | ocaml | open Std_internal
include String
let check s =
let stripped = String.strip s in
if not (String.(=) stripped s) then
Some (sprintf ("'%s' is not a valid identifier " ^^
"because it has whitespace on the edge")
s)
else if String.(=) s "" then
Some "Attempt to use empty identifier"
else if String.contains s '|' then
Some "Identifier contains a pipe '|'"
else
None
let of_string s =
match check s with
| None -> s
| Some err -> invalid_arg err
let t_of_sexp sexp =
let s = String.t_of_sexp sexp in
match check s with
| None -> s
| Some err -> of_sexp_error err sexp
| |
b28f5ea0b4c37187e87a5428774a676496f47ffede00c0477934c6f3b296caab | heraldry/heraldicon | debounce.cljs | (ns heraldicon.frontend.debounce
(:require
[re-frame.core :as rf]))
(defonce timeouts
(atom {}))
(defn- clear-timer [id]
(some-> @timeouts (get id) js/clearTimeout))
(rf/reg-fx ::dispatch
(fn [[id event-vec n]]
(clear-timer id)
(swap! timeouts assoc id
(js/setTimeout (fn []
(rf/dispatch event-vec)
(swap! timeouts dissoc id))
n))))
(rf/reg-fx ::stop
(fn [id]
(clear-timer id)
(swap! timeouts dissoc id)))
| null | https://raw.githubusercontent.com/heraldry/heraldicon/6fb727fc675e171db70a4d2080c54d7e6428bd87/src/heraldicon/frontend/debounce.cljs | clojure | (ns heraldicon.frontend.debounce
(:require
[re-frame.core :as rf]))
(defonce timeouts
(atom {}))
(defn- clear-timer [id]
(some-> @timeouts (get id) js/clearTimeout))
(rf/reg-fx ::dispatch
(fn [[id event-vec n]]
(clear-timer id)
(swap! timeouts assoc id
(js/setTimeout (fn []
(rf/dispatch event-vec)
(swap! timeouts dissoc id))
n))))
(rf/reg-fx ::stop
(fn [id]
(clear-timer id)
(swap! timeouts dissoc id)))
| |
957712144c1fdb29093c0651757c17a2396c2560e2f2a2d4a3b0272c3c46b5b2 | martijnbastiaan/doctest-parallel | ModuleA.hs | -- |
> > > import .
> > > fib 10
55
module BugfixImportHierarchical.ModuleA where
import BugfixImportHierarchical.ModuleB ()
| null | https://raw.githubusercontent.com/martijnbastiaan/doctest-parallel/f70d6a1c946cc0ada88571b90a39a7cd4d065452/test/integration/BugfixImportHierarchical/ModuleA.hs | haskell | | | > > > import .
> > > fib 10
55
module BugfixImportHierarchical.ModuleA where
import BugfixImportHierarchical.ModuleB ()
|
e7433b4bd914311850b74a92f91e2152df0166fd0643e337ea6e26fc097145c3 | mbudde/jana | Printf.hs | module Jana.Printf (printfRender) where
import Data.List (elemIndex, delete)
import Jana.Error
import Jana.ErrorMessages
printfRender :: [String] -> [(String, String)] -> Either Message String
printfRender str [] =
case findPercent (last str) of
Just _ -> Left printfNotEnoughArgs
Nothing -> Right $ concat str
printfRender str vList@((var, varType):vars) =
case percentIndex of
Just idx | typeStr `elem` acceptedTypes ->
if typeStr == varType
then printfRender (init str ++ [insertVar var cutFirstStr, cutLastStr]) vars
else Left $ printfTypeMismatch typeChar typeStr varType
| typeChar == '%' ->
printfRender (init str ++ [delete '%' cutFirstStr, cutLastStr]) vList
| otherwise -> Left $ printfUnrecognizedType typeChar
where lastStr = last str
typeChar = last str !! (idx+1)
acceptedTypes = ["int", "array", "bool", "stack"]
typeStr = correspondingType typeChar
cutFirstStr = take (idx + 2) lastStr
cutLastStr = drop (idx + 2) lastStr
Nothing -> if null vList
then Right $ concat str
else Left printfTooManyArgs
where percentIndex = findPercent (last str)
insertVar :: String -> String -> String
insertVar var str =
take (length str - 2) str ++ var
correspondingType :: Char -> String
correspondingType typeChar =
case typeChar of
'd' -> "int"
'a' -> "array"
'b' -> "bool"
't' -> "stack"
_ -> ""
findPercent :: String -> Maybe Int
findPercent = elemIndex '%'
| null | https://raw.githubusercontent.com/mbudde/jana/5b51b57e0b6f13ec1e1c61d719a18bf58696f84b/src/Jana/Printf.hs | haskell | module Jana.Printf (printfRender) where
import Data.List (elemIndex, delete)
import Jana.Error
import Jana.ErrorMessages
printfRender :: [String] -> [(String, String)] -> Either Message String
printfRender str [] =
case findPercent (last str) of
Just _ -> Left printfNotEnoughArgs
Nothing -> Right $ concat str
printfRender str vList@((var, varType):vars) =
case percentIndex of
Just idx | typeStr `elem` acceptedTypes ->
if typeStr == varType
then printfRender (init str ++ [insertVar var cutFirstStr, cutLastStr]) vars
else Left $ printfTypeMismatch typeChar typeStr varType
| typeChar == '%' ->
printfRender (init str ++ [delete '%' cutFirstStr, cutLastStr]) vList
| otherwise -> Left $ printfUnrecognizedType typeChar
where lastStr = last str
typeChar = last str !! (idx+1)
acceptedTypes = ["int", "array", "bool", "stack"]
typeStr = correspondingType typeChar
cutFirstStr = take (idx + 2) lastStr
cutLastStr = drop (idx + 2) lastStr
Nothing -> if null vList
then Right $ concat str
else Left printfTooManyArgs
where percentIndex = findPercent (last str)
insertVar :: String -> String -> String
insertVar var str =
take (length str - 2) str ++ var
correspondingType :: Char -> String
correspondingType typeChar =
case typeChar of
'd' -> "int"
'a' -> "array"
'b' -> "bool"
't' -> "stack"
_ -> ""
findPercent :: String -> Maybe Int
findPercent = elemIndex '%'
| |
5707a867a45fc7db759ec8a907f9c800decbbd7b6d4ec71da7e54a923cb952fc | utdemir/nix-tree | Test.hs | # LANGUAGE TemplateHaskell #
module Main where
import Hedgehog
import Hedgehog.Main
import qualified Test.Data.InvertedIndex
main :: IO ()
main =
defaultMain . map checkParallel $
[Test.Data.InvertedIndex.tests]
| null | https://raw.githubusercontent.com/utdemir/nix-tree/27cf891b0635f12d775848cdabb0e9484cb9d54f/test/Test.hs | haskell | # LANGUAGE TemplateHaskell #
module Main where
import Hedgehog
import Hedgehog.Main
import qualified Test.Data.InvertedIndex
main :: IO ()
main =
defaultMain . map checkParallel $
[Test.Data.InvertedIndex.tests]
| |
e8296259ac5427291178e24522346a57d0af5bc055d13b4f42ddc0ca9690ec06 | jeromesimeon/Galax | code_nestedloop.mli | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : code_nestedloop.mli , v 1.7 2007/02/01 22:08:45 simeon Exp $
(* Module: Code_nestedloop
Description:
This module contains code building for operators that implement
nested loops.
*)
open Algebra_type
open Xquery_common_ast
open Xquery_algebra_ast
open Physical_value
open Execution_context
open Code_selection_context
val build_tuple_nodup_code :
code_selection_context -> crname ->
(algebra_context -> tuple_unit Cursor.cursor -> tuple_unit Cursor.cursor)
val build_distinct_code :
Code_selection_context.code_selection_context ->
Algebra_type.algop_expr ->
crname ->
(Algebra_type.alg_eval_code_dep * Code_selection_context.code_selection_context)
val build_default_tuple_tree_pattern_code :
code_selection_context -> crname -> Xquery_algebra_ast.twig_pattern ->
(unit -> eval_fun -> algebra_context -> tuple_unit Cursor.cursor -> tuple_unit Cursor.cursor)
(* fixme, move this *)
val effective_boolean_value : Physical_value.item Cursor.cursor -> Datatypes.xs_boolean
(* fime move this to ast util *)
val get_treejoin_attrs :
Xquery_algebra_ast.twig_pattern ->
Xquery_common_ast.axis * int ->
Namespace_names.rqname * Xquery_common_ast.axis * Xquery_algebra_ast.anode_test
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/code_selection/code/code_nestedloop.mli | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Code_nestedloop
Description:
This module contains code building for operators that implement
nested loops.
fixme, move this
fime move this to ast util | Copyright 2001 - 2007 .
$ I d : code_nestedloop.mli , v 1.7 2007/02/01 22:08:45 simeon Exp $
open Algebra_type
open Xquery_common_ast
open Xquery_algebra_ast
open Physical_value
open Execution_context
open Code_selection_context
val build_tuple_nodup_code :
code_selection_context -> crname ->
(algebra_context -> tuple_unit Cursor.cursor -> tuple_unit Cursor.cursor)
val build_distinct_code :
Code_selection_context.code_selection_context ->
Algebra_type.algop_expr ->
crname ->
(Algebra_type.alg_eval_code_dep * Code_selection_context.code_selection_context)
val build_default_tuple_tree_pattern_code :
code_selection_context -> crname -> Xquery_algebra_ast.twig_pattern ->
(unit -> eval_fun -> algebra_context -> tuple_unit Cursor.cursor -> tuple_unit Cursor.cursor)
val effective_boolean_value : Physical_value.item Cursor.cursor -> Datatypes.xs_boolean
val get_treejoin_attrs :
Xquery_algebra_ast.twig_pattern ->
Xquery_common_ast.axis * int ->
Namespace_names.rqname * Xquery_common_ast.axis * Xquery_algebra_ast.anode_test
|
53ef2a16b27447a77ca681655723910c8e9f7641ad5c66cbc7484e3f5bf73c41 | nobrakal/asak | wtree.mli | This file is part of asak .
*
* Copyright ( C ) 2019 IRIF / OCaml Software Foundation .
*
* asak is distributed under the terms of the MIT license . See the
* included LICENSE file for details .
*
* Copyright (C) 2019 IRIF / OCaml Software Foundation.
*
* asak is distributed under the terms of the MIT license. See the
* included LICENSE file for details. *)
(** Type for weighted trees. *)
type 'a wtree =
| Node of (int * 'a wtree * 'a wtree)
| Leaf of 'a
val fold_tree : (int -> 'b -> 'b -> 'b) -> ('a -> 'b) -> 'a wtree -> 'b
val size_of_tree : ('a -> int) -> 'a wtree -> int
| null | https://raw.githubusercontent.com/nobrakal/asak/c1aaf985815563edd2463f8fe18e2d790f955f05/src/wtree.mli | ocaml | * Type for weighted trees. | This file is part of asak .
*
* Copyright ( C ) 2019 IRIF / OCaml Software Foundation .
*
* asak is distributed under the terms of the MIT license . See the
* included LICENSE file for details .
*
* Copyright (C) 2019 IRIF / OCaml Software Foundation.
*
* asak is distributed under the terms of the MIT license. See the
* included LICENSE file for details. *)
type 'a wtree =
| Node of (int * 'a wtree * 'a wtree)
| Leaf of 'a
val fold_tree : (int -> 'b -> 'b -> 'b) -> ('a -> 'b) -> 'a wtree -> 'b
val size_of_tree : ('a -> int) -> 'a wtree -> int
|
212271fb21e49d31ba38363508fa2ecdd1b3a2df7a98c2411cabf6dc08cb6be4 | tsurucapital/euphoria | Main.hs | module Main where
import Criterion.Main (defaultMain)
import qualified FRP.Euphoria.EnumCollection.Lazy.Bench as ECL
import qualified FRP.Euphoria.EnumCollection.Strict.Bench as ECS
import qualified FRP.Euphoria.HashCollection.Lazy.Bench as HCL
import qualified FRP.Euphoria.HashCollection.Strict.Bench as HCS
main :: IO ()
main = defaultMain
[ ECL.benchmarks
, ECS.benchmarks
, HCL.benchmarks
, HCS.benchmarks
]
| null | https://raw.githubusercontent.com/tsurucapital/euphoria/15ddb49ddc79d62970a0163fe8d77789254db202/benchmarks/Main.hs | haskell | module Main where
import Criterion.Main (defaultMain)
import qualified FRP.Euphoria.EnumCollection.Lazy.Bench as ECL
import qualified FRP.Euphoria.EnumCollection.Strict.Bench as ECS
import qualified FRP.Euphoria.HashCollection.Lazy.Bench as HCL
import qualified FRP.Euphoria.HashCollection.Strict.Bench as HCS
main :: IO ()
main = defaultMain
[ ECL.benchmarks
, ECS.benchmarks
, HCL.benchmarks
, HCS.benchmarks
]
| |
3ce78fe5a43bd3a9d8ba04fe1f89620ce42edba5352903085d76270e6d239007 | mirage/ke | rke.ml | type ('a, 'b) t = {
mutable r : int;
mutable w : int;
mutable c : int;
k : ('a, 'b) Bigarray.kind;
mutable v : ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t;
}
exception Empty
external ( = ) : 'a -> 'a -> bool = "%equal"
let ( = ) (a : int) b = a = b
let[@inline always] mask t v = v land (t.c - 1)
let[@inline always] empty t = t.r = t.w
let[@inline always] size t = t.w - t.r
let[@inline always] available t = t.c - (t.w - t.r)
let[@inline always] full t = size t = t.c
let length q = size q
let[@inline always] to_power_of_two v =
let res = ref (pred v) in
res := !res lor (!res lsr 1);
res := !res lor (!res lsr 2);
res := !res lor (!res lsr 4);
res := !res lor (!res lsr 8);
res := !res lor (!res lsr 16);
succ !res
let[@inline always] is_power_of_two v = v <> 0 && v land (lnot v + 1) = v
let is_empty t = (empty [@inlined]) t
let create ?capacity kind =
let capacity =
match capacity with
| None | Some 0 -> 1
| Some n ->
if n < 0 then Fmt.invalid_arg "Rke.create" else to_power_of_two n
in
{
r = 0;
w = 0;
c = capacity;
k = kind;
v = Bigarray.Array1.create kind Bigarray.c_layout capacity;
}
let capacity { c; _ } = c
let copy t =
let v = Bigarray.Array1.create t.k Bigarray.c_layout t.c in
Bigarray.Array1.blit t.v v;
{ r = t.r; w = t.w; c = t.c; v; k = t.k }
let grow t want =
let max : int -> int -> int = max in
let c = to_power_of_two (max 1 (max want (size t))) in
if c <> Bigarray.Array1.dim t.v then (
let dst = Bigarray.Array1.create t.k Bigarray.c_layout c in
let sze = (size [@inlined]) t in
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = sze - pre in
(if rst > 0 then (
Bigarray.Array1.(blit (sub t.v msk pre) (sub dst 0 pre));
Bigarray.Array1.(blit (sub t.v 0 rst) (sub dst pre rst)))
else Bigarray.Array1.(blit (sub t.v msk sze) (sub dst 0 sze)));
t.v <- dst;
t.w <- sze;
t.c <- c;
t.r <- 0)
let push t v =
if (full [@inlined]) t then grow t (2 * (size [@inlined]) t);
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t t.w) v;
t.w <- t.w + 1
let cons t v =
if (full [@inlined]) t then grow t (2 * (size [@inlined]) t);
let i = t.r - 1 in
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t i) v;
t.r <- i
let pop_exn t =
if (empty [@inlined]) t then raise Empty;
let r = Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r) in
t.r <- t.r + 1;
r
let pop t = try Some (pop_exn t) with Empty -> None
let peek_exn t =
if (empty [@inlined]) t then raise Empty;
Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r)
let peek t = try Some (peek_exn t) with Empty -> None
let blit src src_off dst dst_off len =
let a = Bigarray.Array1.sub src src_off len in
let b = Bigarray.Array1.sub dst dst_off len in
Bigarray.Array1.blit a b
let compress t =
let len = length t in
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
if (available [@inlined]) t >= pre then (
): in this case , [ pre + rst < = msk ] , so [ blit ] will not
overlap bytes at the end of [ t.v ] ( at offset [ msk ] ) .
overlap bytes at the end of [t.v] (at offset [msk]). *)
blit t.v 0 t.v pre rst;
blit t.v msk t.v 0 pre)
else
let tmp = Bigarray.Array1.create t.k Bigarray.c_layout pre in
blit t.v msk tmp 0 pre;
blit t.v 0 t.v pre rst;
blit tmp 0 t.v 0 pre)
else blit t.v msk t.v 0 len;
t.r <- 0;
t.w <- len
module N = struct
type ('a, 'b) bigarray = ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t
type ('a, 'b) blit = 'a -> int -> 'b -> int -> int -> unit
type 'a length = 'a -> int
let push t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (available [@inlined]) t < len then grow t (len + (size [@inlined]) t);
let msk = (mask [@inlined]) t t.w in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
blit v off t.v msk pre;
blit v (off + pre) t.v 0 rst)
else blit v off t.v msk len;
t.w <- t.w + len
let keep_exn t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (size [@inlined]) t < len then raise Empty;
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
blit t.v msk v off pre;
blit t.v 0 v (off + pre) rst)
else blit t.v msk v off len
let keep t ~blit ~length ?off ?len v =
try Some (keep_exn t ~blit ~length ?off ?len v) with Empty -> None
let peek t =
let len = (size [@inlined]) t in
if len == 0 then []
else
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then
[ Bigarray.Array1.sub t.v msk pre; Bigarray.Array1.sub t.v 0 rst ]
else [ Bigarray.Array1.sub t.v msk len ]
let unsafe_shift t len = t.r <- t.r + len
let shift_exn t len =
if (size [@inlined]) t < len then raise Empty;
unsafe_shift t len
let shift t len = try Some (shift_exn t len) with Empty -> None
end
let iter f t =
let idx = ref t.r in
let max = t.w in
while !idx <> max do
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
incr idx
done
let rev_iter f t =
if t.r == t.w then ()
else
let idx = ref (pred t.w) in
let min = t.r in
while
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
!idx <> min
do
decr idx
done
let fold f a t =
let a = ref a in
iter (fun x -> a := f !a x) t;
!a
let pp ?sep pp_elt = Fmt.iter ?sep iter pp_elt
let dump pp_elt = Fmt.Dump.iter iter (Fmt.any "rke") pp_elt
let clear q =
q.r <- 0;
q.w <- 0
module Weighted = struct
type ('a, 'b) t = {
mutable r : int;
mutable w : int;
c : int;
k : ('a, 'b) Bigarray.kind;
v : ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t;
}
exception Empty
exception Full
let[@inline always] mask t v = v land (t.c - 1)
let[@inline always] empty t = t.r = t.w
let[@inline always] size t = t.w - t.r
let[@inline always] full t = size t = t.c
let[@inline always] available t = t.c - (t.w - t.r)
let is_empty t = (empty [@inlined]) t
let length q = size q
let create ?capacity kind =
let capacity =
match capacity with
| None | Some 0 -> 1
| Some n ->
if n < 0 then Fmt.invalid_arg "Rke.Weighted.create"
else to_power_of_two n
in
( {
r = 0;
w = 0;
c = capacity;
k = kind;
v = Bigarray.Array1.create kind Bigarray.c_layout capacity;
},
capacity )
let copy t =
let v = Bigarray.Array1.create t.k Bigarray.c_layout t.c in
Bigarray.Array1.blit t.v v;
{ r = t.r; w = t.w; c = t.c; v; k = t.k }
let from v =
if not (is_power_of_two (Bigarray.Array1.dim v)) then
Fmt.invalid_arg "RBA.from";
let c = Bigarray.Array1.dim v in
let k = Bigarray.Array1.kind v in
{ r = 0; w = 0; c; k; v }
let push_exn t v =
if (full [@inlined]) t then raise Full;
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t t.w) v;
t.w <- t.w + 1
let push t v = try Some (push_exn t v) with Full -> None
let cons_exn t v =
if (full [@inlined]) t then raise Full;
let i = t.r - 1 in
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t i) v;
t.r <- i
let cons t v = try Some (cons_exn t v) with Full -> None
let pop_exn t =
if (empty [@inlined]) t then raise Empty;
let r = Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r) in
t.r <- t.r + 1;
r
let pop t = try Some (pop_exn t) with Empty -> None
let peek_exn t =
if (empty [@inlined]) t then raise Empty;
Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r)
let peek t = try Some (peek_exn t) with Empty -> None
let compress t =
let len = length t in
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
if (available [@inlined]) t >= pre then (
): in this case , [ pre + rst < = msk ] , so [ blit ] will not
overlap bytes at the end of [ t.v ] ( at offset [ msk ] ) .
overlap bytes at the end of [t.v] (at offset [msk]). *)
blit t.v 0 t.v pre rst;
blit t.v msk t.v 0 pre)
else
let tmp = Bigarray.Array1.create t.k Bigarray.c_layout pre in
blit t.v msk tmp 0 pre;
blit t.v 0 t.v pre rst;
blit tmp 0 t.v 0 pre)
else blit t.v msk t.v 0 len;
t.r <- 0;
t.w <- len
module N = struct
type ('a, 'b) bigarray = ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t
type ('a, 'b) blit = 'a -> int -> 'b -> int -> int -> unit
type 'a length = 'a -> int
let push_exn t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (available [@inlined]) t < len then raise Full;
let msk = (mask [@inlined]) t t.w in
let pre = t.c - msk in
let rst = len - pre in
let ret =
if rst > 0 then (
blit v off t.v msk pre;
blit v (off + pre) t.v 0 rst;
[
Bigarray.Array1.sub t.v ((mask [@inlined]) t t.w) pre;
Bigarray.Array1.sub t.v 0 rst;
])
else (
blit v off t.v msk len;
[ Bigarray.Array1.sub t.v ((mask [@inlined]) t t.w) len ])
in
t.w <- t.w + len;
ret
let push t ~blit ~length ?off ?len v =
try Some (push_exn t ~blit ~length ?off ?len v) with Full -> None
let keep_exn t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (size [@inlined]) t < len then raise Empty;
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
blit t.v msk v off pre;
blit t.v 0 v (off + pre) rst)
else blit t.v msk v off len
let keep t ~blit ~length ?off ?len v =
try Some (keep_exn t ~blit ~length ?off ?len v) with Empty -> None
let peek t =
let len = (size [@inlined]) t in
if len == 0 then []
else
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then
[ Bigarray.Array1.sub t.v msk pre; Bigarray.Array1.sub t.v 0 rst ]
else [ Bigarray.Array1.sub t.v msk len ]
let unsafe_shift t len = t.r <- t.r + len
let shift_exn t len =
if (size [@inlined]) t < len then raise Empty;
unsafe_shift t len
let shift t len = try Some (shift_exn t len) with Empty -> None
end
let iter f t =
let idx = ref t.r in
let max = t.w in
while !idx <> max do
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
incr idx
done
let rev_iter f t =
if t.r == t.w then ()
else
let idx = ref (pred t.w) in
let min = t.r in
while
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
!idx <> min
do
decr idx
done
let fold f a t =
let a = ref a in
iter (fun x -> a := f !a x) t;
!a
let pp ?sep pp_elt = Fmt.iter ?sep iter pp_elt
let dump pp_elt = Fmt.Dump.iter iter (Fmt.any "rke:weighted") pp_elt
let clear q =
q.r <- 0;
q.w <- 0
let unsafe_bigarray { v; _ } = v
end
| null | https://raw.githubusercontent.com/mirage/ke/2b79a7fa8a2249653d5310c8e851bbce66fac57c/lib/rke.ml | ocaml | type ('a, 'b) t = {
mutable r : int;
mutable w : int;
mutable c : int;
k : ('a, 'b) Bigarray.kind;
mutable v : ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t;
}
exception Empty
external ( = ) : 'a -> 'a -> bool = "%equal"
let ( = ) (a : int) b = a = b
let[@inline always] mask t v = v land (t.c - 1)
let[@inline always] empty t = t.r = t.w
let[@inline always] size t = t.w - t.r
let[@inline always] available t = t.c - (t.w - t.r)
let[@inline always] full t = size t = t.c
let length q = size q
let[@inline always] to_power_of_two v =
let res = ref (pred v) in
res := !res lor (!res lsr 1);
res := !res lor (!res lsr 2);
res := !res lor (!res lsr 4);
res := !res lor (!res lsr 8);
res := !res lor (!res lsr 16);
succ !res
let[@inline always] is_power_of_two v = v <> 0 && v land (lnot v + 1) = v
let is_empty t = (empty [@inlined]) t
let create ?capacity kind =
let capacity =
match capacity with
| None | Some 0 -> 1
| Some n ->
if n < 0 then Fmt.invalid_arg "Rke.create" else to_power_of_two n
in
{
r = 0;
w = 0;
c = capacity;
k = kind;
v = Bigarray.Array1.create kind Bigarray.c_layout capacity;
}
let capacity { c; _ } = c
let copy t =
let v = Bigarray.Array1.create t.k Bigarray.c_layout t.c in
Bigarray.Array1.blit t.v v;
{ r = t.r; w = t.w; c = t.c; v; k = t.k }
let grow t want =
let max : int -> int -> int = max in
let c = to_power_of_two (max 1 (max want (size t))) in
if c <> Bigarray.Array1.dim t.v then (
let dst = Bigarray.Array1.create t.k Bigarray.c_layout c in
let sze = (size [@inlined]) t in
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = sze - pre in
(if rst > 0 then (
Bigarray.Array1.(blit (sub t.v msk pre) (sub dst 0 pre));
Bigarray.Array1.(blit (sub t.v 0 rst) (sub dst pre rst)))
else Bigarray.Array1.(blit (sub t.v msk sze) (sub dst 0 sze)));
t.v <- dst;
t.w <- sze;
t.c <- c;
t.r <- 0)
let push t v =
if (full [@inlined]) t then grow t (2 * (size [@inlined]) t);
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t t.w) v;
t.w <- t.w + 1
let cons t v =
if (full [@inlined]) t then grow t (2 * (size [@inlined]) t);
let i = t.r - 1 in
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t i) v;
t.r <- i
let pop_exn t =
if (empty [@inlined]) t then raise Empty;
let r = Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r) in
t.r <- t.r + 1;
r
let pop t = try Some (pop_exn t) with Empty -> None
let peek_exn t =
if (empty [@inlined]) t then raise Empty;
Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r)
let peek t = try Some (peek_exn t) with Empty -> None
let blit src src_off dst dst_off len =
let a = Bigarray.Array1.sub src src_off len in
let b = Bigarray.Array1.sub dst dst_off len in
Bigarray.Array1.blit a b
let compress t =
let len = length t in
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
if (available [@inlined]) t >= pre then (
): in this case , [ pre + rst < = msk ] , so [ blit ] will not
overlap bytes at the end of [ t.v ] ( at offset [ msk ] ) .
overlap bytes at the end of [t.v] (at offset [msk]). *)
blit t.v 0 t.v pre rst;
blit t.v msk t.v 0 pre)
else
let tmp = Bigarray.Array1.create t.k Bigarray.c_layout pre in
blit t.v msk tmp 0 pre;
blit t.v 0 t.v pre rst;
blit tmp 0 t.v 0 pre)
else blit t.v msk t.v 0 len;
t.r <- 0;
t.w <- len
module N = struct
type ('a, 'b) bigarray = ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t
type ('a, 'b) blit = 'a -> int -> 'b -> int -> int -> unit
type 'a length = 'a -> int
let push t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (available [@inlined]) t < len then grow t (len + (size [@inlined]) t);
let msk = (mask [@inlined]) t t.w in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
blit v off t.v msk pre;
blit v (off + pre) t.v 0 rst)
else blit v off t.v msk len;
t.w <- t.w + len
let keep_exn t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (size [@inlined]) t < len then raise Empty;
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
blit t.v msk v off pre;
blit t.v 0 v (off + pre) rst)
else blit t.v msk v off len
let keep t ~blit ~length ?off ?len v =
try Some (keep_exn t ~blit ~length ?off ?len v) with Empty -> None
let peek t =
let len = (size [@inlined]) t in
if len == 0 then []
else
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then
[ Bigarray.Array1.sub t.v msk pre; Bigarray.Array1.sub t.v 0 rst ]
else [ Bigarray.Array1.sub t.v msk len ]
let unsafe_shift t len = t.r <- t.r + len
let shift_exn t len =
if (size [@inlined]) t < len then raise Empty;
unsafe_shift t len
let shift t len = try Some (shift_exn t len) with Empty -> None
end
let iter f t =
let idx = ref t.r in
let max = t.w in
while !idx <> max do
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
incr idx
done
let rev_iter f t =
if t.r == t.w then ()
else
let idx = ref (pred t.w) in
let min = t.r in
while
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
!idx <> min
do
decr idx
done
let fold f a t =
let a = ref a in
iter (fun x -> a := f !a x) t;
!a
let pp ?sep pp_elt = Fmt.iter ?sep iter pp_elt
let dump pp_elt = Fmt.Dump.iter iter (Fmt.any "rke") pp_elt
let clear q =
q.r <- 0;
q.w <- 0
module Weighted = struct
type ('a, 'b) t = {
mutable r : int;
mutable w : int;
c : int;
k : ('a, 'b) Bigarray.kind;
v : ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t;
}
exception Empty
exception Full
let[@inline always] mask t v = v land (t.c - 1)
let[@inline always] empty t = t.r = t.w
let[@inline always] size t = t.w - t.r
let[@inline always] full t = size t = t.c
let[@inline always] available t = t.c - (t.w - t.r)
let is_empty t = (empty [@inlined]) t
let length q = size q
let create ?capacity kind =
let capacity =
match capacity with
| None | Some 0 -> 1
| Some n ->
if n < 0 then Fmt.invalid_arg "Rke.Weighted.create"
else to_power_of_two n
in
( {
r = 0;
w = 0;
c = capacity;
k = kind;
v = Bigarray.Array1.create kind Bigarray.c_layout capacity;
},
capacity )
let copy t =
let v = Bigarray.Array1.create t.k Bigarray.c_layout t.c in
Bigarray.Array1.blit t.v v;
{ r = t.r; w = t.w; c = t.c; v; k = t.k }
let from v =
if not (is_power_of_two (Bigarray.Array1.dim v)) then
Fmt.invalid_arg "RBA.from";
let c = Bigarray.Array1.dim v in
let k = Bigarray.Array1.kind v in
{ r = 0; w = 0; c; k; v }
let push_exn t v =
if (full [@inlined]) t then raise Full;
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t t.w) v;
t.w <- t.w + 1
let push t v = try Some (push_exn t v) with Full -> None
let cons_exn t v =
if (full [@inlined]) t then raise Full;
let i = t.r - 1 in
Bigarray.Array1.unsafe_set t.v ((mask [@inlined]) t i) v;
t.r <- i
let cons t v = try Some (cons_exn t v) with Full -> None
let pop_exn t =
if (empty [@inlined]) t then raise Empty;
let r = Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r) in
t.r <- t.r + 1;
r
let pop t = try Some (pop_exn t) with Empty -> None
let peek_exn t =
if (empty [@inlined]) t then raise Empty;
Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t t.r)
let peek t = try Some (peek_exn t) with Empty -> None
let compress t =
let len = length t in
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
if (available [@inlined]) t >= pre then (
): in this case , [ pre + rst < = msk ] , so [ blit ] will not
overlap bytes at the end of [ t.v ] ( at offset [ msk ] ) .
overlap bytes at the end of [t.v] (at offset [msk]). *)
blit t.v 0 t.v pre rst;
blit t.v msk t.v 0 pre)
else
let tmp = Bigarray.Array1.create t.k Bigarray.c_layout pre in
blit t.v msk tmp 0 pre;
blit t.v 0 t.v pre rst;
blit tmp 0 t.v 0 pre)
else blit t.v msk t.v 0 len;
t.r <- 0;
t.w <- len
module N = struct
type ('a, 'b) bigarray = ('a, 'b, Bigarray.c_layout) Bigarray.Array1.t
type ('a, 'b) blit = 'a -> int -> 'b -> int -> int -> unit
type 'a length = 'a -> int
let push_exn t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (available [@inlined]) t < len then raise Full;
let msk = (mask [@inlined]) t t.w in
let pre = t.c - msk in
let rst = len - pre in
let ret =
if rst > 0 then (
blit v off t.v msk pre;
blit v (off + pre) t.v 0 rst;
[
Bigarray.Array1.sub t.v ((mask [@inlined]) t t.w) pre;
Bigarray.Array1.sub t.v 0 rst;
])
else (
blit v off t.v msk len;
[ Bigarray.Array1.sub t.v ((mask [@inlined]) t t.w) len ])
in
t.w <- t.w + len;
ret
let push t ~blit ~length ?off ?len v =
try Some (push_exn t ~blit ~length ?off ?len v) with Full -> None
let keep_exn t ~blit ~length ?(off = 0) ?len v =
let len = match len with None -> length v - off | Some len -> len in
if (size [@inlined]) t < len then raise Empty;
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then (
blit t.v msk v off pre;
blit t.v 0 v (off + pre) rst)
else blit t.v msk v off len
let keep t ~blit ~length ?off ?len v =
try Some (keep_exn t ~blit ~length ?off ?len v) with Empty -> None
let peek t =
let len = (size [@inlined]) t in
if len == 0 then []
else
let msk = (mask [@inlined]) t t.r in
let pre = t.c - msk in
let rst = len - pre in
if rst > 0 then
[ Bigarray.Array1.sub t.v msk pre; Bigarray.Array1.sub t.v 0 rst ]
else [ Bigarray.Array1.sub t.v msk len ]
let unsafe_shift t len = t.r <- t.r + len
let shift_exn t len =
if (size [@inlined]) t < len then raise Empty;
unsafe_shift t len
let shift t len = try Some (shift_exn t len) with Empty -> None
end
let iter f t =
let idx = ref t.r in
let max = t.w in
while !idx <> max do
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
incr idx
done
let rev_iter f t =
if t.r == t.w then ()
else
let idx = ref (pred t.w) in
let min = t.r in
while
f (Bigarray.Array1.unsafe_get t.v ((mask [@inlined]) t !idx));
!idx <> min
do
decr idx
done
let fold f a t =
let a = ref a in
iter (fun x -> a := f !a x) t;
!a
let pp ?sep pp_elt = Fmt.iter ?sep iter pp_elt
let dump pp_elt = Fmt.Dump.iter iter (Fmt.any "rke:weighted") pp_elt
let clear q =
q.r <- 0;
q.w <- 0
let unsafe_bigarray { v; _ } = v
end
| |
a3f08f1cf49ccd89cd43de36a58d8625b997a6b25f52c19238264e8c3536bb77 | blinkov/sockjs-pubsub | sockjs_pubsub_sup.erl | -module(sockjs_pubsub_sup).
-behaviour(supervisor).
-author("Ivan Blinkov <>").
-include("constants.hrl").
-export([
start_link/0
]).
-export([
init/1
]).
-spec start_link() -> {ok, Pid::pid()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init(_Args) ->
{ok,
{
{one_for_one, 10, 10},
[get_child_spec(Name) || Name <- ?MANAGER_NAMES]
}
}.
-spec get_child_spec(Name :: atom()) -> supervisor:child_spec().
get_child_spec(Name) ->
{Name, {sockjs_pubsub_manager, start_link, [[Name]]},
permanent, 5000, worker, [Name]}. | null | https://raw.githubusercontent.com/blinkov/sockjs-pubsub/ea0cf97b3345768c966abe328b1c80b4429c8b07/src/sockjs_pubsub_sup.erl | erlang | -module(sockjs_pubsub_sup).
-behaviour(supervisor).
-author("Ivan Blinkov <>").
-include("constants.hrl").
-export([
start_link/0
]).
-export([
init/1
]).
-spec start_link() -> {ok, Pid::pid()}.
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init(_Args) ->
{ok,
{
{one_for_one, 10, 10},
[get_child_spec(Name) || Name <- ?MANAGER_NAMES]
}
}.
-spec get_child_spec(Name :: atom()) -> supervisor:child_spec().
get_child_spec(Name) ->
{Name, {sockjs_pubsub_manager, start_link, [[Name]]},
permanent, 5000, worker, [Name]}. | |
5329c16623881fa9557a6e45b1b32c9b7db3864714d66fec51d3fb05fa97d65d | rmculpepper/crypto | main.rkt | Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
;;
;; This library is free software: you can redistribute it and/or modify
;; it under the terms of the GNU Lesser General Public License as published
by the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; This library is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details .
;;
You should have received a copy of the GNU Lesser General Public License
;; along with this library. If not, see </>.
#lang racket/base
(require racket/contract/base
racket/class
racket/random
"private/common/interfaces.rkt"
"private/common/catalog.rkt"
"private/common/common.rkt"
"private/common/error.rkt"
"private/common/util.rkt")
(provide crypto-factory?
digest-spec?
digest-impl?
digest-ctx?
cipher-spec?
cipher-impl?
cipher-ctx?
pk-spec?
pk-impl?
pk-parameters?
pk-key?
kdf-spec?
kdf-impl?
(struct-out bytes-range)
input/c
security-strength/c
security-level/c
security-level->strength
security-strength->level
;; util
(recontract-out
hex->bytes
bytes->hex
bytes->hex-string
crypto-bytes=?)
;; racket/random
crypto-random-bytes)
Common
(define nat? exact-nonnegative-integer?)
(define key/c bytes?)
(define iv/c (or/c bytes? #f))
(define pad-mode/c boolean?)
;; ============================================================
;; Factories
Copyright 2013 - 2018
(provide
(contract-out
[crypto-factories
(parameter/c factories/c (listof crypto-factory?))]
[get-factory
(-> (or/c digest-impl? digest-ctx?
cipher-impl? cipher-ctx?
pk-impl? pk-parameters? pk-key?)
crypto-factory?)]
[factory-version
(-> crypto-factory? (or/c (listof exact-nonnegative-integer?) #f))]
[factory-print-info
(-> crypto-factory? void?)]
[get-digest
(->* [digest-spec?] [factories/c] (or/c digest-impl? #f))]
[get-cipher
(->* [cipher-spec?] [factories/c] (or/c cipher-impl? #f))]
[get-pk
(->* [symbol?] [factories/c] (or/c pk-impl? #f))]
[get-kdf
(->* [kdf-spec?] [factories/c] (or/c kdf-impl? #f))]
))
(define factories/c (or/c crypto-factory? (listof crypto-factory?)))
coerce - list : ( or / c X ( listof X ) - > ( listof X )
(define (coerce-list xs) (if (list? xs) xs (list xs)))
;; crypto-factories : parameter of (listof factory<%>)
(define crypto-factories (make-parameter null coerce-list))
(define (get-factory i)
(with-crypto-entry 'get-factory
(let loop ([i i])
(cond [(is-a? i impl<%>) (send i get-factory)]
[(is-a? i ctx<%>) (loop (send i get-impl))]))))
(define (get-digest di [factory/s (crypto-factories)])
(with-crypto-entry 'get-digest
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-digest di))))
(define (get-cipher ci [factory/s (crypto-factories)])
(with-crypto-entry 'get-cipher
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-cipher ci))))
(define (get-pk pki [factory/s (crypto-factories)])
(with-crypto-entry 'get-pk
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-pk pki))))
(define (get-kdf k [factory/s (crypto-factories)])
(with-crypto-entry 'get-kdf
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-kdf k))))
(define (factory-print-info factory)
(send factory print-info) (void))
(define (factory-version factory)
(send factory get-version))
;; ============================================================
Digests
Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
(provide
(contract-out
[digest-size
(-> (or/c digest-spec? digest-impl? digest-ctx?) exact-nonnegative-integer?)]
[digest-block-size
(-> (or/c digest-spec? digest-impl? digest-ctx?) exact-nonnegative-integer?)]
[digest-security-strength
(-> (or/c digest-spec? digest-impl? digest-ctx?) boolean? (or/c #f security-strength/c))]
[digest
(->* [digest/c input/c] [#:key (or/c bytes? #f)] bytes?)]
[hmac
(-> digest/c bytes? input/c bytes?)]
[make-digest-ctx
(->* [digest/c] [#:key (or/c bytes? #f)] digest-ctx?)]
[digest-update
(-> digest-ctx? input/c void?)]
[digest-final
(-> digest-ctx? bytes?)]
[digest-copy
(-> digest-ctx? (or/c digest-ctx? #f))]
[digest-peek-final
(-> digest-ctx? (or/c bytes? #f))]
[make-hmac-ctx
(-> digest/c bytes? digest-ctx?)]
[generate-hmac-key
(-> digest/c bytes?)]))
(define digest/c (or/c digest-spec? digest-impl?))
(define (-get-digest-impl o) (to-impl o #:what "digest" #:lookup get-digest))
(define (-get-digest-info o) (to-info o #:what "digest" #:lookup digest-spec->info))
;; ----
(define (digest-size o)
(with-crypto-entry 'digest-size
(send (-get-digest-info o) get-size)))
(define (digest-block-size o)
(with-crypto-entry 'digest-block-size
(send (-get-digest-info o) get-block-size)))
(define (digest-security-strength o [cr? #t])
(with-crypto-entry 'digest-security-strength
(send (-get-digest-info o) get-security-strength cr?)))
;; ----
(define (make-digest-ctx di #:key [key #f])
(with-crypto-entry 'make-digest-ctx
(send (-get-digest-impl di) new-ctx key)))
(define (digest-update dg src)
(with-crypto-entry 'digest-update
(send dg update src)))
(define (digest-final dg)
(with-crypto-entry 'digest-final
(send dg final)))
(define (digest-copy dg)
(with-crypto-entry 'digest-copy
(send dg copy)))
(define (digest-peek-final dg)
(with-crypto-entry 'digest-peek-final
(let ([dg2 (send dg copy)]) (and dg2 (send dg2 final)))))
;; ----
(define (digest di inp #:key [key #f])
(with-crypto-entry 'digest
(let ([di (-get-digest-impl di)])
(send di digest inp key))))
;; ----
(define (make-hmac-ctx di key)
(with-crypto-entry 'make-hmac-ctx
(let ([di (-get-digest-impl di)])
(send di new-hmac-ctx key))))
(define (hmac di key inp)
(with-crypto-entry 'hmac
(let ([di (-get-digest-impl di)])
(send di hmac key inp))))
;; ----
(define (generate-hmac-key di)
(with-crypto-entry 'generate-hmac-key
(crypto-random-bytes (digest-size di))))
;; ============================================================
;; Ciphers
Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
(provide
(contract-out
[cipher-default-key-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-key-sizes
(-> (or/c cipher-spec? cipher-impl?) (listof nat?))]
[cipher-block-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-iv-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-aead?
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) boolean?)]
[cipher-default-auth-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-chunk-size
(-> (or/c cipher-impl? cipher-ctx?) nat?)]
[make-encrypt-ctx
(->* [cipher/c key/c iv/c]
[#:pad pad-mode/c #:auth-size (or/c nat? #f) #:auth-attached? boolean?]
encrypt-ctx?)]
[make-decrypt-ctx
(->* [cipher/c key/c iv/c]
[#:pad pad-mode/c #:auth-size (or/c nat? #f) #:auth-attached? boolean?]
decrypt-ctx?)]
[encrypt-ctx?
(-> any/c boolean?)]
[decrypt-ctx?
(-> any/c boolean?)]
[cipher-update
(-> cipher-ctx? input/c bytes?)]
[cipher-update-aad
(-> cipher-ctx? input/c void?)]
[cipher-final
(->* [cipher-ctx?] [(or/c bytes? #f)] bytes?)]
[cipher-get-auth-tag
(-> cipher-ctx? (or/c bytes? #f))]
[encrypt
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-size (or/c nat? #f)]
bytes?)]
[decrypt
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-size (or/c nat? #f)]
bytes?)]
[encrypt/auth
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-size (or/c nat? #f)]
(values bytes? (or/c bytes? #f)))]
[decrypt/auth
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-tag (or/c bytes? #f)]
bytes?)]
[generate-cipher-key
(->* [cipher/c] [#:size nat?] key/c)]
[generate-cipher-iv
(->* [cipher/c] [#:size nat?] iv/c)]))
(define cipher/c (or/c cipher-spec? cipher-impl?))
(define default-pad #t)
(define (-get-cipher-impl o) (to-impl o #:what "cipher" #:lookup get-cipher))
(define (-get-cipher-info o) (to-info o #:what "cipher" #:lookup cipher-spec->info))
;; ----
;; Defer to impl when avail to support unknown ciphers or impl-dependent limits.
(define (cipher-default-key-size o)
(with-crypto-entry 'cipher-default-key-size
(send (-get-cipher-info o) get-key-size)))
(define (cipher-key-sizes o)
(with-crypto-entry 'cipher-key-sizes
(size-set->list (send (-get-cipher-info o) get-key-sizes))))
(define (cipher-block-size o)
(with-crypto-entry 'cipher-block-size
(send (-get-cipher-info o) get-block-size)))
(define (cipher-chunk-size o)
(with-crypto-entry 'cipher-chunk-size
(send (-get-cipher-info o) get-chunk-size)))
(define (cipher-iv-size o)
(with-crypto-entry 'cipher-iv-size
(send (-get-cipher-info o) get-iv-size)))
(define (cipher-aead? o)
(with-crypto-entry 'cipher-aead?
(send (-get-cipher-info o) aead?)))
(define (cipher-default-auth-size o)
(with-crypto-entry 'cipher-default-auth-size
(send (-get-cipher-info o) get-auth-size)))
;; ----
(define (encrypt-ctx? x)
(and (cipher-ctx? x) (send x get-encrypt?)))
(define (decrypt-ctx? x)
(and (cipher-ctx? x) (not (send x get-encrypt?))))
make-{en , de}crypt - ctx : ... - > cipher - ctx
auth - tag - size : Nat/#f -- # f means default tag size for cipher
(define (make-encrypt-ctx ci key iv #:pad [pad? #t]
#:auth-size [auth-size #f] #:auth-attached? [auth-attached? #t])
(with-crypto-entry 'make-encrypt-ctx
(-encrypt-ctx ci key iv pad? auth-size auth-attached?)))
(define (make-decrypt-ctx ci key iv #:pad [pad? #t]
#:auth-size [auth-size #f] #:auth-attached? [auth-attached? #t])
(with-crypto-entry 'make-decrypt-ctx
(-decrypt-ctx ci key iv pad? auth-size auth-attached?)))
(define (-encrypt-ctx ci key iv pad auth-size auth-attached?)
(let ([ci (-get-cipher-impl ci)])
(send ci new-ctx key (or iv #"") #t pad auth-size auth-attached?)))
(define (-decrypt-ctx ci key iv pad auth-size auth-attached?)
(let ([ci (-get-cipher-impl ci)])
(send ci new-ctx key (or iv #"") #f pad auth-size auth-attached?)))
(define (cipher-update-aad c inp)
(with-crypto-entry 'cipher-update-aad
(send c update-aad inp)
(void)))
(define (cipher-update c inp)
(with-crypto-entry 'cipher-update
(send c update inp)
(send c get-output)))
(define (cipher-final c [auth-tag #f])
(with-crypto-entry 'cipher-final
(send c final auth-tag)
(send c get-output)))
(define (cipher-get-auth-tag c)
(with-crypto-entry 'cipher-get-auth-tag
(send c get-auth-tag)))
;; ----
(define (encrypt ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-size [auth-size #f])
(with-crypto-entry 'encrypt
(let ([ci (-get-cipher-impl ci)])
(define ctx (-encrypt-ctx ci key iv pad auth-size #t))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final #f)
(send ctx get-output))))
(define (decrypt ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-size [auth-size #f])
(with-crypto-entry 'decrypt
(let ([ci (-get-cipher-impl ci)])
(define ctx (-decrypt-ctx ci key iv pad auth-size #t))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final #f)
(send ctx get-output))))
(define (encrypt/auth ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-size [auth-size #f])
(with-crypto-entry 'encrypt/auth
(let ([ci (-get-cipher-impl ci)])
(define ctx (-encrypt-ctx ci key iv pad auth-size #f))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final #f)
(values (send ctx get-output) (send ctx get-auth-tag)))))
(define (decrypt/auth ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-tag [auth-tag #f])
(with-crypto-entry 'decrypt
(let ([ci (-get-cipher-impl ci)])
(define auth-len (and auth-tag (bytes-length auth-tag)))
(define ctx (-decrypt-ctx ci key iv pad auth-len #f))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final auth-tag)
(send ctx get-output))))
;; ----
(define (generate-cipher-key ci #:size [size (cipher-default-key-size ci)])
(with-crypto-entry 'generate-cipher-key
;; FIXME: any way to check for weak keys, avoid???
(crypto-random-bytes size)))
(define (generate-cipher-iv ci #:size [size (cipher-iv-size ci)])
(with-crypto-entry 'generate-cipher-iv
(if (positive? size) (crypto-random-bytes size) #"")))
;; ============================================================
KDFs and Password Hashing
Copyright 2014 - 2018
(provide
(contract-out
[kdf
(->* [(or/c kdf-spec? kdf-impl?)
bytes?
(or/c bytes? #f)]
[(listof (list/c symbol? any/c))]
bytes?)]
[pwhash
(->* [(or/c kdf-spec? kdf-impl?) bytes?]
[(listof (list/c symbol? any/c))]
string?)]
[pwhash-verify
(-> (or/c kdf-impl? #f) bytes? string?
boolean?)]
[pbkdf2-hmac
(->* [digest-spec? bytes? bytes? #:iterations exact-positive-integer?]
[#:key-size exact-positive-integer?]
bytes?)]
[scrypt
(->* [bytes?
bytes?
#:N exact-positive-integer?]
[#:r exact-positive-integer?
#:p exact-positive-integer?
#:key-size exact-positive-integer?]
bytes?)]
))
(define (-get-kdf-impl o) (to-impl o #:what "KDF" #:lookup get-kdf))
(define (kdf k pass salt [params '()])
(with-crypto-entry 'kdf
(let ([k (-get-kdf-impl k)])
(send k kdf0 params pass salt))))
(define (pwhash k pass [params '()])
(with-crypto-entry 'pwhash
(let ([k (-get-kdf-impl k)])
(send k pwhash params pass))))
(define (pwhash-verify k pass cred)
(with-crypto-entry 'pwhash-verify
(define k* (or k (-get-kdf-impl (pwcred->kdf-spec cred))))
(send k* pwhash-verify pass cred)))
(define (pwcred->kdf-spec cred)
;; see also crypto/private/rkt/pwhash
(define m (regexp-match #rx"^[$]([a-z0-9-]*)[$]" cred))
(define id (and m (string->symbol (cadr m))))
(case id
[(argon2i argon2d argon2id scrypt) id]
[(pbkdf2) '(pbkdf2 hmac sha1)]
[(pbkdf2-sha256) '(pbkdf2 hmac sha256)]
[(pbkdf2-sha512) '(pbkdf2 hmac sha512)]
[(#f) (crypto-error "invalid password hash format")]
[else (crypto-error "unknown password hash identifier\n id: ~e" id)]))
(define (pbkdf2-hmac di pass salt
#:iterations iterations
#:key-size [key-size (digest-size di)])
(with-crypto-entry 'pbkdf2-hmac
(let ([k (-get-kdf-impl `(pbkdf2 hmac ,di))])
(send k kdf `((iterations ,iterations) (key-size ,key-size)) pass salt))))
(define (scrypt pass salt
#:N N
#:p [p 1]
#:r [r 8]
#:key-size [key-size 32])
(with-crypto-entry 'scrypt
(let ([k (-get-kdf-impl 'scrypt)])
(send k kdf `((N ,N) (p ,p) (r ,r) (key-size ,key-size))
pass salt))))
;; ============================================================
;; Public-key Systems
Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
(provide
private-key?
public-only-key?
(contract-out
[pk-can-sign?
(->* [(or/c pk-spec? pk-impl? pk-key?)]
[(or/c symbol? #f) (or/c symbol? #f)]
boolean?)]
[pk-can-encrypt?
(->* [(or/c pk-spec? pk-impl? pk-key?)] [(or/c symbol? #f)] boolean?)]
[pk-can-key-agree?
(-> (or/c pk-spec? pk-impl? pk-key?) boolean?)]
[pk-has-parameters?
(-> (or/c pk-spec? pk-impl? pk-key?) boolean?)]
[pk-security-strength
(-> (or/c pk-key? pk-parameters?) (or/c #f security-strength/c))]
[pk-key->parameters
(-> pk-key? (or/c pk-parameters? #f))]
[public-key=?
(->* [pk-key?] [] #:rest (listof pk-key?) boolean?)]
[pk-key->public-only-key
(-> pk-key? public-only-key?)]
[pk-key->datum
(-> pk-key? symbol? any/c)]
[datum->pk-key
(->* [any/c symbol?] [(or/c crypto-factory? (listof crypto-factory?))]
pk-key?)]
[pk-parameters->datum
(-> pk-parameters? symbol? any/c)]
[datum->pk-parameters
(->* [any/c symbol?] [(or/c crypto-factory? (listof crypto-factory?))]
pk-parameters?)]
[pk-sign
(->* [private-key? bytes?]
[#:digest (or/c digest-spec? #f 'none) #:pad sign-pad/c]
bytes?)]
[pk-verify
(->* [pk-key? bytes? bytes?]
[#:digest (or/c digest-spec? #f 'none) #:pad sign-pad/c]
boolean?)]
[pk-sign-digest
(->* [private-key? (or/c digest-spec? digest-impl?) bytes?]
[#:pad sign-pad/c]
bytes?)]
[pk-verify-digest
(->* [pk-key? (or/c digest-spec? digest-impl?) bytes? bytes?]
[#:pad sign-pad/c]
boolean?)]
[digest/sign
(->* [private-key? (or/c digest-spec? digest-impl?) input/c]
[#:pad sign-pad/c]
bytes?)]
[digest/verify
(->* [pk-key? (or/c digest-spec? digest-impl?) input/c bytes?]
[#:pad sign-pad/c]
boolean?)]
[pk-encrypt
(->* [pk-key? bytes?] [#:pad encrypt-pad/c]
bytes?)]
[pk-decrypt
(->* [private-key? bytes?] [#:pad encrypt-pad/c]
bytes?)]
[pk-derive-secret
(-> private-key? (or/c pk-key? bytes?)
bytes?)]
[generate-pk-parameters
(->* [(or/c pk-spec? pk-impl?)] [config/c]
pk-parameters?)]
[generate-private-key
(->* [(or/c pk-spec? pk-impl? pk-parameters?)] [config/c]
private-key?)]))
(define encrypt-pad/c
(or/c 'pkcs1-v1.5 'oaep 'none #f))
(define sign-pad/c
(or/c 'pkcs1-v1.5 'pss 'pss* 'none #f))
(define key-format/c
(or/c symbol? #f))
(define (-get-impl pki) (to-impl pki #:what "algorithm" #:lookup get-pk))
;; ----------------------------------------
;; A private key is really a keypair, including both private and public parts.
;; A public key contains only the public part.
(define (private-key? x)
(and (is-a? x pk-key<%>) (send x is-private?)))
(define (public-only-key? x)
(and (is-a? x pk-key<%>) (not (send x is-private?))))
(define (pk-can-sign? pki [pad #f] [dspec #f])
(with-crypto-entry 'pk-can-sign?
(cond [(pk-spec? pki) (pk-spec-can-sign? pki pad)] ;; no dspec!
[else (let ([impl (to-impl pki)])
(case (send impl can-sign pad)
[(depends) (and (send impl can-sign2? pad dspec) #t)]
[(nodigest) (and (memq dspec '(#f none)) #t)]
[(#f) #f]
[else #t]))])))
(define (pk-can-encrypt? pki [pad #f])
(with-crypto-entry 'pk-can-encrypt?
(cond [(pk-spec? pki) (pk-spec-can-encrypt? pki)]
[else (and (send (to-impl pki) can-encrypt? pad) #t)])))
(define (pk-can-key-agree? pki)
(with-crypto-entry 'pk-can-key-agree?
(cond [(pk-spec? pki) (pk-spec-can-key-agree? pki)]
[else (and (send (to-impl pki) can-key-agree?) #t)])))
(define (pk-has-parameters? pki)
(with-crypto-entry 'pk-has-parameters?
(cond [(pk-spec? pki) (pk-spec-has-parameters? pki)]
[else (and (send (to-impl pki) has-params?) #t)])))
(define (pk-security-strength pk)
(with-crypto-entry 'pk-security-strength
(send pk get-security-bits)))
(define (pk-key->parameters pk)
(with-crypto-entry 'pk-key->parameters
(and (pk-has-parameters? pk)
(send pk get-params))))
;; Are the *public parts* of the given keys equal?
(define (public-key=? k1 . ks)
(with-crypto-entry 'public-key=?
(for/and ([k (in-list ks)])
(send k1 equal-to-key? k))))
(define (pk-key->datum pk fmt)
(with-crypto-entry 'pk-key->datum
(send pk write-key fmt)))
(define (datum->pk-key datum fmt [factory/s (crypto-factories)])
(with-crypto-entry 'datum->pk-key
(or (for/or ([factory (in-list (if (list? factory/s) factory/s (list factory/s)))])
(let ([reader (send factory get-pk-reader)])
(and reader (send reader read-key datum fmt))))
(crypto-error "unable to read key\n format: ~e" fmt))))
(define (pk-parameters->datum pkp fmt)
(with-crypto-entry 'pk-parameters->datum
(send pkp write-params fmt)))
(define (datum->pk-parameters datum fmt [factory/s (crypto-factories)])
(with-crypto-entry 'datum->pk-parameters
(or (for/or ([factory (in-list (if (list? factory/s) factory/s (list factory/s)))])
(let ([reader (send factory get-pk-reader)])
(and reader (send reader read-params datum fmt))))
(crypto-error "unable to read parameters\n format: ~e" fmt))))
(define (pk-key->public-only-key pk)
(with-crypto-entry 'pk-key->public-only-key
(send pk get-public-key)))
;; ----------------------------------------
(define (pk-sign pk msg #:digest [dspec #f] #:pad [pad #f])
(with-crypto-entry 'pk-sign
(send pk sign msg dspec pad)))
(define (pk-verify pk msg sig #:digest [dspec #f] #:pad [pad #f])
(with-crypto-entry 'pk-verify
(send pk verify msg dspec pad sig)))
(define (pk-sign-digest pk di dbuf #:pad [pad #f])
(with-crypto-entry 'pk-sign-digest
(let ([di (to-spec di)])
(send pk sign dbuf di pad))))
(define (pk-verify-digest pk di dbuf sig #:pad [pad #f])
(with-crypto-entry 'pk-verify-digest
(let ([di (to-spec di)])
(send pk verify dbuf di pad sig))))
(define (digest/sign pk di inp #:pad [pad #f])
(with-crypto-entry 'digest/sign
(let* ([di (to-spec di)]
[di* (get-digest di (get-factory pk))])
(send pk sign (digest di* inp) di pad))))
(define (digest/verify pk di inp sig #:pad [pad #f])
(with-crypto-entry 'digest/verify
(let* ([di (to-spec di)]
[di* (get-digest di (get-factory pk))])
(send pk verify (digest di* inp) di pad sig))))
;; ----------------------------------------
(define (pk-encrypt pk buf #:pad [pad #f])
(with-crypto-entry 'pk-encrypt
(send pk encrypt buf pad)))
(define (pk-decrypt pk buf #:pad [pad #f])
(with-crypto-entry 'pk-decrypt
(send pk decrypt buf pad)))
;; ----------------------------------------
(define (pk-derive-secret pk peer-key)
(with-crypto-entry 'pk-derive-secret
(send pk compute-secret peer-key)))
;; ----------------------------------------
(define (generate-private-key pki [config '()])
(with-crypto-entry 'generate-private-key
(if (is-a? pki pk-params<%>)
(send pki generate-key config)
(let ([pki (-get-impl pki)])
(send pki generate-key config)))))
(define (generate-pk-parameters pki [config '()])
(with-crypto-entry 'generate-pk-parameters
(let ([pki (-get-impl pki)])
(send pki generate-params config))))
;; ============================================================
;; Security bits and levels
(define security-strength/c exact-nonnegative-integer?)
(define security-level/c (integer-in 0 5))
security - level->strength : Nat[0 - 5 ] - > Nat
(define (security-level->strength level)
(case level [(0) 0] [(1) 80] [(2) 112] [(3) 128] [(4) 192] [(5) 256] [else 256]))
security - strength->level : ]
(define (security-strength->level secbits)
(cond [(< secbits 80) 0]
[(< secbits 112) 1]
[(< secbits 128) 2]
[(< secbits 192) 3]
[(< secbits 256) 4]
[else 5]))
| null | https://raw.githubusercontent.com/rmculpepper/crypto/63e131c06d54756c3f36833ad0b700d56d6a75c8/crypto-lib/main.rkt | racket |
This library is free software: you can redistribute it and/or modify
it under the terms of the GNU Lesser General Public License as published
(at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this library. If not, see </>.
util
racket/random
============================================================
Factories
crypto-factories : parameter of (listof factory<%>)
============================================================
----
----
----
----
----
============================================================
Ciphers
----
Defer to impl when avail to support unknown ciphers or impl-dependent limits.
----
----
----
FIXME: any way to check for weak keys, avoid???
============================================================
see also crypto/private/rkt/pwhash
============================================================
Public-key Systems
----------------------------------------
A private key is really a keypair, including both private and public parts.
A public key contains only the public part.
no dspec!
Are the *public parts* of the given keys equal?
----------------------------------------
----------------------------------------
----------------------------------------
----------------------------------------
============================================================
Security bits and levels | Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
by the Free Software Foundation , either version 3 of the License , or
GNU Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public License
#lang racket/base
(require racket/contract/base
racket/class
racket/random
"private/common/interfaces.rkt"
"private/common/catalog.rkt"
"private/common/common.rkt"
"private/common/error.rkt"
"private/common/util.rkt")
(provide crypto-factory?
digest-spec?
digest-impl?
digest-ctx?
cipher-spec?
cipher-impl?
cipher-ctx?
pk-spec?
pk-impl?
pk-parameters?
pk-key?
kdf-spec?
kdf-impl?
(struct-out bytes-range)
input/c
security-strength/c
security-level/c
security-level->strength
security-strength->level
(recontract-out
hex->bytes
bytes->hex
bytes->hex-string
crypto-bytes=?)
crypto-random-bytes)
Common
(define nat? exact-nonnegative-integer?)
(define key/c bytes?)
(define iv/c (or/c bytes? #f))
(define pad-mode/c boolean?)
Copyright 2013 - 2018
(provide
(contract-out
[crypto-factories
(parameter/c factories/c (listof crypto-factory?))]
[get-factory
(-> (or/c digest-impl? digest-ctx?
cipher-impl? cipher-ctx?
pk-impl? pk-parameters? pk-key?)
crypto-factory?)]
[factory-version
(-> crypto-factory? (or/c (listof exact-nonnegative-integer?) #f))]
[factory-print-info
(-> crypto-factory? void?)]
[get-digest
(->* [digest-spec?] [factories/c] (or/c digest-impl? #f))]
[get-cipher
(->* [cipher-spec?] [factories/c] (or/c cipher-impl? #f))]
[get-pk
(->* [symbol?] [factories/c] (or/c pk-impl? #f))]
[get-kdf
(->* [kdf-spec?] [factories/c] (or/c kdf-impl? #f))]
))
(define factories/c (or/c crypto-factory? (listof crypto-factory?)))
coerce - list : ( or / c X ( listof X ) - > ( listof X )
(define (coerce-list xs) (if (list? xs) xs (list xs)))
(define crypto-factories (make-parameter null coerce-list))
(define (get-factory i)
(with-crypto-entry 'get-factory
(let loop ([i i])
(cond [(is-a? i impl<%>) (send i get-factory)]
[(is-a? i ctx<%>) (loop (send i get-impl))]))))
(define (get-digest di [factory/s (crypto-factories)])
(with-crypto-entry 'get-digest
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-digest di))))
(define (get-cipher ci [factory/s (crypto-factories)])
(with-crypto-entry 'get-cipher
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-cipher ci))))
(define (get-pk pki [factory/s (crypto-factories)])
(with-crypto-entry 'get-pk
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-pk pki))))
(define (get-kdf k [factory/s (crypto-factories)])
(with-crypto-entry 'get-kdf
(for/or ([f (in-list (coerce-list factory/s))])
(send f get-kdf k))))
(define (factory-print-info factory)
(send factory print-info) (void))
(define (factory-version factory)
(send factory get-version))
Digests
Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
(provide
(contract-out
[digest-size
(-> (or/c digest-spec? digest-impl? digest-ctx?) exact-nonnegative-integer?)]
[digest-block-size
(-> (or/c digest-spec? digest-impl? digest-ctx?) exact-nonnegative-integer?)]
[digest-security-strength
(-> (or/c digest-spec? digest-impl? digest-ctx?) boolean? (or/c #f security-strength/c))]
[digest
(->* [digest/c input/c] [#:key (or/c bytes? #f)] bytes?)]
[hmac
(-> digest/c bytes? input/c bytes?)]
[make-digest-ctx
(->* [digest/c] [#:key (or/c bytes? #f)] digest-ctx?)]
[digest-update
(-> digest-ctx? input/c void?)]
[digest-final
(-> digest-ctx? bytes?)]
[digest-copy
(-> digest-ctx? (or/c digest-ctx? #f))]
[digest-peek-final
(-> digest-ctx? (or/c bytes? #f))]
[make-hmac-ctx
(-> digest/c bytes? digest-ctx?)]
[generate-hmac-key
(-> digest/c bytes?)]))
(define digest/c (or/c digest-spec? digest-impl?))
(define (-get-digest-impl o) (to-impl o #:what "digest" #:lookup get-digest))
(define (-get-digest-info o) (to-info o #:what "digest" #:lookup digest-spec->info))
(define (digest-size o)
(with-crypto-entry 'digest-size
(send (-get-digest-info o) get-size)))
(define (digest-block-size o)
(with-crypto-entry 'digest-block-size
(send (-get-digest-info o) get-block-size)))
(define (digest-security-strength o [cr? #t])
(with-crypto-entry 'digest-security-strength
(send (-get-digest-info o) get-security-strength cr?)))
(define (make-digest-ctx di #:key [key #f])
(with-crypto-entry 'make-digest-ctx
(send (-get-digest-impl di) new-ctx key)))
(define (digest-update dg src)
(with-crypto-entry 'digest-update
(send dg update src)))
(define (digest-final dg)
(with-crypto-entry 'digest-final
(send dg final)))
(define (digest-copy dg)
(with-crypto-entry 'digest-copy
(send dg copy)))
(define (digest-peek-final dg)
(with-crypto-entry 'digest-peek-final
(let ([dg2 (send dg copy)]) (and dg2 (send dg2 final)))))
(define (digest di inp #:key [key #f])
(with-crypto-entry 'digest
(let ([di (-get-digest-impl di)])
(send di digest inp key))))
(define (make-hmac-ctx di key)
(with-crypto-entry 'make-hmac-ctx
(let ([di (-get-digest-impl di)])
(send di new-hmac-ctx key))))
(define (hmac di key inp)
(with-crypto-entry 'hmac
(let ([di (-get-digest-impl di)])
(send di hmac key inp))))
(define (generate-hmac-key di)
(with-crypto-entry 'generate-hmac-key
(crypto-random-bytes (digest-size di))))
Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
(provide
(contract-out
[cipher-default-key-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-key-sizes
(-> (or/c cipher-spec? cipher-impl?) (listof nat?))]
[cipher-block-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-iv-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-aead?
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) boolean?)]
[cipher-default-auth-size
(-> (or/c cipher-spec? cipher-impl? cipher-ctx?) nat?)]
[cipher-chunk-size
(-> (or/c cipher-impl? cipher-ctx?) nat?)]
[make-encrypt-ctx
(->* [cipher/c key/c iv/c]
[#:pad pad-mode/c #:auth-size (or/c nat? #f) #:auth-attached? boolean?]
encrypt-ctx?)]
[make-decrypt-ctx
(->* [cipher/c key/c iv/c]
[#:pad pad-mode/c #:auth-size (or/c nat? #f) #:auth-attached? boolean?]
decrypt-ctx?)]
[encrypt-ctx?
(-> any/c boolean?)]
[decrypt-ctx?
(-> any/c boolean?)]
[cipher-update
(-> cipher-ctx? input/c bytes?)]
[cipher-update-aad
(-> cipher-ctx? input/c void?)]
[cipher-final
(->* [cipher-ctx?] [(or/c bytes? #f)] bytes?)]
[cipher-get-auth-tag
(-> cipher-ctx? (or/c bytes? #f))]
[encrypt
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-size (or/c nat? #f)]
bytes?)]
[decrypt
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-size (or/c nat? #f)]
bytes?)]
[encrypt/auth
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-size (or/c nat? #f)]
(values bytes? (or/c bytes? #f)))]
[decrypt/auth
(->* [cipher/c key/c iv/c input/c]
[#:pad pad-mode/c #:aad input/c #:auth-tag (or/c bytes? #f)]
bytes?)]
[generate-cipher-key
(->* [cipher/c] [#:size nat?] key/c)]
[generate-cipher-iv
(->* [cipher/c] [#:size nat?] iv/c)]))
(define cipher/c (or/c cipher-spec? cipher-impl?))
(define default-pad #t)
(define (-get-cipher-impl o) (to-impl o #:what "cipher" #:lookup get-cipher))
(define (-get-cipher-info o) (to-info o #:what "cipher" #:lookup cipher-spec->info))
(define (cipher-default-key-size o)
(with-crypto-entry 'cipher-default-key-size
(send (-get-cipher-info o) get-key-size)))
(define (cipher-key-sizes o)
(with-crypto-entry 'cipher-key-sizes
(size-set->list (send (-get-cipher-info o) get-key-sizes))))
(define (cipher-block-size o)
(with-crypto-entry 'cipher-block-size
(send (-get-cipher-info o) get-block-size)))
(define (cipher-chunk-size o)
(with-crypto-entry 'cipher-chunk-size
(send (-get-cipher-info o) get-chunk-size)))
(define (cipher-iv-size o)
(with-crypto-entry 'cipher-iv-size
(send (-get-cipher-info o) get-iv-size)))
(define (cipher-aead? o)
(with-crypto-entry 'cipher-aead?
(send (-get-cipher-info o) aead?)))
(define (cipher-default-auth-size o)
(with-crypto-entry 'cipher-default-auth-size
(send (-get-cipher-info o) get-auth-size)))
(define (encrypt-ctx? x)
(and (cipher-ctx? x) (send x get-encrypt?)))
(define (decrypt-ctx? x)
(and (cipher-ctx? x) (not (send x get-encrypt?))))
make-{en , de}crypt - ctx : ... - > cipher - ctx
auth - tag - size : Nat/#f -- # f means default tag size for cipher
(define (make-encrypt-ctx ci key iv #:pad [pad? #t]
#:auth-size [auth-size #f] #:auth-attached? [auth-attached? #t])
(with-crypto-entry 'make-encrypt-ctx
(-encrypt-ctx ci key iv pad? auth-size auth-attached?)))
(define (make-decrypt-ctx ci key iv #:pad [pad? #t]
#:auth-size [auth-size #f] #:auth-attached? [auth-attached? #t])
(with-crypto-entry 'make-decrypt-ctx
(-decrypt-ctx ci key iv pad? auth-size auth-attached?)))
(define (-encrypt-ctx ci key iv pad auth-size auth-attached?)
(let ([ci (-get-cipher-impl ci)])
(send ci new-ctx key (or iv #"") #t pad auth-size auth-attached?)))
(define (-decrypt-ctx ci key iv pad auth-size auth-attached?)
(let ([ci (-get-cipher-impl ci)])
(send ci new-ctx key (or iv #"") #f pad auth-size auth-attached?)))
(define (cipher-update-aad c inp)
(with-crypto-entry 'cipher-update-aad
(send c update-aad inp)
(void)))
(define (cipher-update c inp)
(with-crypto-entry 'cipher-update
(send c update inp)
(send c get-output)))
(define (cipher-final c [auth-tag #f])
(with-crypto-entry 'cipher-final
(send c final auth-tag)
(send c get-output)))
(define (cipher-get-auth-tag c)
(with-crypto-entry 'cipher-get-auth-tag
(send c get-auth-tag)))
(define (encrypt ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-size [auth-size #f])
(with-crypto-entry 'encrypt
(let ([ci (-get-cipher-impl ci)])
(define ctx (-encrypt-ctx ci key iv pad auth-size #t))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final #f)
(send ctx get-output))))
(define (decrypt ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-size [auth-size #f])
(with-crypto-entry 'decrypt
(let ([ci (-get-cipher-impl ci)])
(define ctx (-decrypt-ctx ci key iv pad auth-size #t))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final #f)
(send ctx get-output))))
(define (encrypt/auth ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-size [auth-size #f])
(with-crypto-entry 'encrypt/auth
(let ([ci (-get-cipher-impl ci)])
(define ctx (-encrypt-ctx ci key iv pad auth-size #f))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final #f)
(values (send ctx get-output) (send ctx get-auth-tag)))))
(define (decrypt/auth ci key iv inp
#:pad [pad default-pad] #:aad [aad-inp null] #:auth-tag [auth-tag #f])
(with-crypto-entry 'decrypt
(let ([ci (-get-cipher-impl ci)])
(define auth-len (and auth-tag (bytes-length auth-tag)))
(define ctx (-decrypt-ctx ci key iv pad auth-len #f))
(send ctx update-aad aad-inp)
(send ctx update inp)
(send ctx final auth-tag)
(send ctx get-output))))
(define (generate-cipher-key ci #:size [size (cipher-default-key-size ci)])
(with-crypto-entry 'generate-cipher-key
(crypto-random-bytes size)))
(define (generate-cipher-iv ci #:size [size (cipher-iv-size ci)])
(with-crypto-entry 'generate-cipher-iv
(if (positive? size) (crypto-random-bytes size) #"")))
KDFs and Password Hashing
Copyright 2014 - 2018
(provide
(contract-out
[kdf
(->* [(or/c kdf-spec? kdf-impl?)
bytes?
(or/c bytes? #f)]
[(listof (list/c symbol? any/c))]
bytes?)]
[pwhash
(->* [(or/c kdf-spec? kdf-impl?) bytes?]
[(listof (list/c symbol? any/c))]
string?)]
[pwhash-verify
(-> (or/c kdf-impl? #f) bytes? string?
boolean?)]
[pbkdf2-hmac
(->* [digest-spec? bytes? bytes? #:iterations exact-positive-integer?]
[#:key-size exact-positive-integer?]
bytes?)]
[scrypt
(->* [bytes?
bytes?
#:N exact-positive-integer?]
[#:r exact-positive-integer?
#:p exact-positive-integer?
#:key-size exact-positive-integer?]
bytes?)]
))
(define (-get-kdf-impl o) (to-impl o #:what "KDF" #:lookup get-kdf))
(define (kdf k pass salt [params '()])
(with-crypto-entry 'kdf
(let ([k (-get-kdf-impl k)])
(send k kdf0 params pass salt))))
(define (pwhash k pass [params '()])
(with-crypto-entry 'pwhash
(let ([k (-get-kdf-impl k)])
(send k pwhash params pass))))
(define (pwhash-verify k pass cred)
(with-crypto-entry 'pwhash-verify
(define k* (or k (-get-kdf-impl (pwcred->kdf-spec cred))))
(send k* pwhash-verify pass cred)))
(define (pwcred->kdf-spec cred)
(define m (regexp-match #rx"^[$]([a-z0-9-]*)[$]" cred))
(define id (and m (string->symbol (cadr m))))
(case id
[(argon2i argon2d argon2id scrypt) id]
[(pbkdf2) '(pbkdf2 hmac sha1)]
[(pbkdf2-sha256) '(pbkdf2 hmac sha256)]
[(pbkdf2-sha512) '(pbkdf2 hmac sha512)]
[(#f) (crypto-error "invalid password hash format")]
[else (crypto-error "unknown password hash identifier\n id: ~e" id)]))
(define (pbkdf2-hmac di pass salt
#:iterations iterations
#:key-size [key-size (digest-size di)])
(with-crypto-entry 'pbkdf2-hmac
(let ([k (-get-kdf-impl `(pbkdf2 hmac ,di))])
(send k kdf `((iterations ,iterations) (key-size ,key-size)) pass salt))))
(define (scrypt pass salt
#:N N
#:p [p 1]
#:r [r 8]
#:key-size [key-size 32])
(with-crypto-entry 'scrypt
(let ([k (-get-kdf-impl 'scrypt)])
(send k kdf `((N ,N) (p ,p) (r ,r) (key-size ,key-size))
pass salt))))
Copyright 2012 - 2018
Copyright 2007 - 2009 < vyzo at media.mit.edu >
(provide
private-key?
public-only-key?
(contract-out
[pk-can-sign?
(->* [(or/c pk-spec? pk-impl? pk-key?)]
[(or/c symbol? #f) (or/c symbol? #f)]
boolean?)]
[pk-can-encrypt?
(->* [(or/c pk-spec? pk-impl? pk-key?)] [(or/c symbol? #f)] boolean?)]
[pk-can-key-agree?
(-> (or/c pk-spec? pk-impl? pk-key?) boolean?)]
[pk-has-parameters?
(-> (or/c pk-spec? pk-impl? pk-key?) boolean?)]
[pk-security-strength
(-> (or/c pk-key? pk-parameters?) (or/c #f security-strength/c))]
[pk-key->parameters
(-> pk-key? (or/c pk-parameters? #f))]
[public-key=?
(->* [pk-key?] [] #:rest (listof pk-key?) boolean?)]
[pk-key->public-only-key
(-> pk-key? public-only-key?)]
[pk-key->datum
(-> pk-key? symbol? any/c)]
[datum->pk-key
(->* [any/c symbol?] [(or/c crypto-factory? (listof crypto-factory?))]
pk-key?)]
[pk-parameters->datum
(-> pk-parameters? symbol? any/c)]
[datum->pk-parameters
(->* [any/c symbol?] [(or/c crypto-factory? (listof crypto-factory?))]
pk-parameters?)]
[pk-sign
(->* [private-key? bytes?]
[#:digest (or/c digest-spec? #f 'none) #:pad sign-pad/c]
bytes?)]
[pk-verify
(->* [pk-key? bytes? bytes?]
[#:digest (or/c digest-spec? #f 'none) #:pad sign-pad/c]
boolean?)]
[pk-sign-digest
(->* [private-key? (or/c digest-spec? digest-impl?) bytes?]
[#:pad sign-pad/c]
bytes?)]
[pk-verify-digest
(->* [pk-key? (or/c digest-spec? digest-impl?) bytes? bytes?]
[#:pad sign-pad/c]
boolean?)]
[digest/sign
(->* [private-key? (or/c digest-spec? digest-impl?) input/c]
[#:pad sign-pad/c]
bytes?)]
[digest/verify
(->* [pk-key? (or/c digest-spec? digest-impl?) input/c bytes?]
[#:pad sign-pad/c]
boolean?)]
[pk-encrypt
(->* [pk-key? bytes?] [#:pad encrypt-pad/c]
bytes?)]
[pk-decrypt
(->* [private-key? bytes?] [#:pad encrypt-pad/c]
bytes?)]
[pk-derive-secret
(-> private-key? (or/c pk-key? bytes?)
bytes?)]
[generate-pk-parameters
(->* [(or/c pk-spec? pk-impl?)] [config/c]
pk-parameters?)]
[generate-private-key
(->* [(or/c pk-spec? pk-impl? pk-parameters?)] [config/c]
private-key?)]))
(define encrypt-pad/c
(or/c 'pkcs1-v1.5 'oaep 'none #f))
(define sign-pad/c
(or/c 'pkcs1-v1.5 'pss 'pss* 'none #f))
(define key-format/c
(or/c symbol? #f))
(define (-get-impl pki) (to-impl pki #:what "algorithm" #:lookup get-pk))
(define (private-key? x)
(and (is-a? x pk-key<%>) (send x is-private?)))
(define (public-only-key? x)
(and (is-a? x pk-key<%>) (not (send x is-private?))))
(define (pk-can-sign? pki [pad #f] [dspec #f])
(with-crypto-entry 'pk-can-sign?
[else (let ([impl (to-impl pki)])
(case (send impl can-sign pad)
[(depends) (and (send impl can-sign2? pad dspec) #t)]
[(nodigest) (and (memq dspec '(#f none)) #t)]
[(#f) #f]
[else #t]))])))
(define (pk-can-encrypt? pki [pad #f])
(with-crypto-entry 'pk-can-encrypt?
(cond [(pk-spec? pki) (pk-spec-can-encrypt? pki)]
[else (and (send (to-impl pki) can-encrypt? pad) #t)])))
(define (pk-can-key-agree? pki)
(with-crypto-entry 'pk-can-key-agree?
(cond [(pk-spec? pki) (pk-spec-can-key-agree? pki)]
[else (and (send (to-impl pki) can-key-agree?) #t)])))
(define (pk-has-parameters? pki)
(with-crypto-entry 'pk-has-parameters?
(cond [(pk-spec? pki) (pk-spec-has-parameters? pki)]
[else (and (send (to-impl pki) has-params?) #t)])))
(define (pk-security-strength pk)
(with-crypto-entry 'pk-security-strength
(send pk get-security-bits)))
(define (pk-key->parameters pk)
(with-crypto-entry 'pk-key->parameters
(and (pk-has-parameters? pk)
(send pk get-params))))
(define (public-key=? k1 . ks)
(with-crypto-entry 'public-key=?
(for/and ([k (in-list ks)])
(send k1 equal-to-key? k))))
(define (pk-key->datum pk fmt)
(with-crypto-entry 'pk-key->datum
(send pk write-key fmt)))
(define (datum->pk-key datum fmt [factory/s (crypto-factories)])
(with-crypto-entry 'datum->pk-key
(or (for/or ([factory (in-list (if (list? factory/s) factory/s (list factory/s)))])
(let ([reader (send factory get-pk-reader)])
(and reader (send reader read-key datum fmt))))
(crypto-error "unable to read key\n format: ~e" fmt))))
(define (pk-parameters->datum pkp fmt)
(with-crypto-entry 'pk-parameters->datum
(send pkp write-params fmt)))
(define (datum->pk-parameters datum fmt [factory/s (crypto-factories)])
(with-crypto-entry 'datum->pk-parameters
(or (for/or ([factory (in-list (if (list? factory/s) factory/s (list factory/s)))])
(let ([reader (send factory get-pk-reader)])
(and reader (send reader read-params datum fmt))))
(crypto-error "unable to read parameters\n format: ~e" fmt))))
(define (pk-key->public-only-key pk)
(with-crypto-entry 'pk-key->public-only-key
(send pk get-public-key)))
(define (pk-sign pk msg #:digest [dspec #f] #:pad [pad #f])
(with-crypto-entry 'pk-sign
(send pk sign msg dspec pad)))
(define (pk-verify pk msg sig #:digest [dspec #f] #:pad [pad #f])
(with-crypto-entry 'pk-verify
(send pk verify msg dspec pad sig)))
(define (pk-sign-digest pk di dbuf #:pad [pad #f])
(with-crypto-entry 'pk-sign-digest
(let ([di (to-spec di)])
(send pk sign dbuf di pad))))
(define (pk-verify-digest pk di dbuf sig #:pad [pad #f])
(with-crypto-entry 'pk-verify-digest
(let ([di (to-spec di)])
(send pk verify dbuf di pad sig))))
(define (digest/sign pk di inp #:pad [pad #f])
(with-crypto-entry 'digest/sign
(let* ([di (to-spec di)]
[di* (get-digest di (get-factory pk))])
(send pk sign (digest di* inp) di pad))))
(define (digest/verify pk di inp sig #:pad [pad #f])
(with-crypto-entry 'digest/verify
(let* ([di (to-spec di)]
[di* (get-digest di (get-factory pk))])
(send pk verify (digest di* inp) di pad sig))))
(define (pk-encrypt pk buf #:pad [pad #f])
(with-crypto-entry 'pk-encrypt
(send pk encrypt buf pad)))
(define (pk-decrypt pk buf #:pad [pad #f])
(with-crypto-entry 'pk-decrypt
(send pk decrypt buf pad)))
(define (pk-derive-secret pk peer-key)
(with-crypto-entry 'pk-derive-secret
(send pk compute-secret peer-key)))
(define (generate-private-key pki [config '()])
(with-crypto-entry 'generate-private-key
(if (is-a? pki pk-params<%>)
(send pki generate-key config)
(let ([pki (-get-impl pki)])
(send pki generate-key config)))))
(define (generate-pk-parameters pki [config '()])
(with-crypto-entry 'generate-pk-parameters
(let ([pki (-get-impl pki)])
(send pki generate-params config))))
(define security-strength/c exact-nonnegative-integer?)
(define security-level/c (integer-in 0 5))
security - level->strength : Nat[0 - 5 ] - > Nat
(define (security-level->strength level)
(case level [(0) 0] [(1) 80] [(2) 112] [(3) 128] [(4) 192] [(5) 256] [else 256]))
security - strength->level : ]
(define (security-strength->level secbits)
(cond [(< secbits 80) 0]
[(< secbits 112) 1]
[(< secbits 128) 2]
[(< secbits 192) 3]
[(< secbits 256) 4]
[else 5]))
|
ffb661136b81d4edaf6997080ee3b2814d48c10ecff441c3c3518c13fb3475d2 | grin-compiler/ghc-wpc-sample-programs | XorShift.hs | -- |
-- Module : Foundation.Random.XorShift
-- License : BSD-style
--
XorShift variant : Xoroshiro128 +
-- <>
--
-- Xoroshiro128+ is a PRNG that uses a shift/rotate-based linear transformation.
-- This is lar
--
-- C implementation at:
-- <>
--
module Basement.Alg.XorShift
( State(..)
, next
, nextDouble
, jump
) where
import Data.Word
import Data.Bits
import Basement.Compat.Base
import Basement.Floating (wordToDouble)
import Basement.Numerical.Additive
import Basement.Numerical.Subtractive
| State of Xoroshiro128 plus
data State = State {-# UNPACK #-} !Word64 {-# UNPACK #-} !Word64
| Given a state , call the function ' f ' with the generated Word64 and the next State
next :: State -> (Word64 -> State -> a) -> a
next (State s0 s1prev) f = f ran stNext
where
!stNext = State s0' s1'
!ran = s0 + s1prev
!s1 = s0 `xor` s1prev
s0' = (s0 `rotateL` 55) `xor` s1 `xor` (s1 `unsafeShiftL` 14)
s1' = (s1 `rotateL` 36)
| Same as ' next ' but give a random value of type Double in the range of [ 0.0 .. 1.0 ]
nextDouble :: State -> (Double -> State -> a) -> a
nextDouble st f = next st $ \w -> f (toDouble w)
where
generate a number in the interval [ 1 .. 2 [ by bit manipulation .
this generate double with a ~2 ^ 52
toDouble w = wordToDouble (upperMask .|. (w .&. lowerMask)) - 1.0
where
upperMask = 0x3FF0000000000000
lowerMask = 0x000FFFFFFFFFFFFF
| Jump the state by 2 ^ 64 calls of next
jump :: State -> State
jump (State s0 s1) = withK 0xd86b048b86aa9922
$ withK 0xbeac0467eba5facb
$ (State 0 0)
where
withK :: Word64 -> State -> State
withK !k = loop 0
where
loop !i st@(State c0 c1)
| i == 64 = st
| testBit k i = loop (i+1) (State (c0 `xor` s0) (c1 `xor` s1))
| otherwise = st
| null | https://raw.githubusercontent.com/grin-compiler/ghc-wpc-sample-programs/0e3a9b8b7cc3fa0da7c77fb7588dd4830fb087f7/basement-0.0.11/Basement/Alg/XorShift.hs | haskell | |
Module : Foundation.Random.XorShift
License : BSD-style
<>
Xoroshiro128+ is a PRNG that uses a shift/rotate-based linear transformation.
This is lar
C implementation at:
<>
# UNPACK #
# UNPACK # | XorShift variant : Xoroshiro128 +
module Basement.Alg.XorShift
( State(..)
, next
, nextDouble
, jump
) where
import Data.Word
import Data.Bits
import Basement.Compat.Base
import Basement.Floating (wordToDouble)
import Basement.Numerical.Additive
import Basement.Numerical.Subtractive
| State of Xoroshiro128 plus
| Given a state , call the function ' f ' with the generated Word64 and the next State
next :: State -> (Word64 -> State -> a) -> a
next (State s0 s1prev) f = f ran stNext
where
!stNext = State s0' s1'
!ran = s0 + s1prev
!s1 = s0 `xor` s1prev
s0' = (s0 `rotateL` 55) `xor` s1 `xor` (s1 `unsafeShiftL` 14)
s1' = (s1 `rotateL` 36)
| Same as ' next ' but give a random value of type Double in the range of [ 0.0 .. 1.0 ]
nextDouble :: State -> (Double -> State -> a) -> a
nextDouble st f = next st $ \w -> f (toDouble w)
where
generate a number in the interval [ 1 .. 2 [ by bit manipulation .
this generate double with a ~2 ^ 52
toDouble w = wordToDouble (upperMask .|. (w .&. lowerMask)) - 1.0
where
upperMask = 0x3FF0000000000000
lowerMask = 0x000FFFFFFFFFFFFF
| Jump the state by 2 ^ 64 calls of next
jump :: State -> State
jump (State s0 s1) = withK 0xd86b048b86aa9922
$ withK 0xbeac0467eba5facb
$ (State 0 0)
where
withK :: Word64 -> State -> State
withK !k = loop 0
where
loop !i st@(State c0 c1)
| i == 64 = st
| testBit k i = loop (i+1) (State (c0 `xor` s0) (c1 `xor` s1))
| otherwise = st
|
b1dc32beb7e59b71333d1bb21d6f09b369d05c8ed7c356500e6d31bf44077d6f | AmpersandTarski/Ampersand | Extra.hs | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
-- | Extra functions for optparse-applicative.
module Options.Applicative.Builder.Extra
( boolFlags,
boolFlagsNoDefault,
firstBoolFlagsNoDefault,
firstBoolFlagsTrue,
firstBoolFlagsFalse,
enableDisableFlags,
enableDisableFlagsNoDefault,
extraHelpOption,
textOption,
textArgument,
optionalFirst,
optionalFirstTrue,
optionalFirstFalse,
-- ,absFileOption
-- ,relFileOption
-- ,absDirOption
-- ,relDirOption
eitherReader',
fileCompleter,
fileExtCompleter,
dirCompleter,
PathCompleterOpts (..),
defaultPathCompleterOpts,
pathCompleterWith,
unescapeBashArg,
)
where
--import Path hiding ((</>))
import Ampersand.Basics
import Data.List (isPrefixOf)
import Data.Monoid hiding ((<>))
import qualified Data.Text as T
import Options.Applicative
import Options.Applicative.Types (readerAsk)
import System.Directory (doesDirectoryExist, getCurrentDirectory, getDirectoryContents)
import System.FilePath (isRelative, splitFileName, takeBaseName, takeExtension, (</>))
-- | Enable/disable flags for a 'Bool'.
boolFlags ::
-- | Default value
Bool ->
-- | Flag name
String ->
-- | Help suffix
String ->
Mod FlagFields Bool ->
Parser Bool
boolFlags defaultValue name' helpSuffix =
enableDisableFlags defaultValue True False name' $
concat
[ helpSuffix,
" (default: ",
if defaultValue then "enabled" else "disabled",
")"
]
-- | Enable/disable flags for a 'Bool', without a default case (to allow chaining with '<|>').
boolFlagsNoDefault ::
-- | Flag name
String ->
-- | Help suffix
String ->
Mod FlagFields Bool ->
Parser Bool
boolFlagsNoDefault = enableDisableFlagsNoDefault True False
-- | Flag with no default of True or False
firstBoolFlagsNoDefault :: String -> String -> Mod FlagFields (Maybe Bool) -> Parser (First Bool)
firstBoolFlagsNoDefault name' helpSuffix mod' =
First
<$> enableDisableFlags
Nothing
(Just True)
(Just False)
name'
helpSuffix
mod'
-- | Flag with a Semigroup instance and a default of True
firstBoolFlagsTrue :: String -> String -> Mod FlagFields FirstTrue -> Parser FirstTrue
firstBoolFlagsTrue name' helpSuffix =
enableDisableFlags
mempty
(FirstTrue (Just True))
(FirstTrue (Just False))
name'
$ helpSuffix ++ " (default: enabled)"
-- | Flag with a Semigroup instance and a default of False
firstBoolFlagsFalse :: String -> String -> Mod FlagFields FirstFalse -> Parser FirstFalse
firstBoolFlagsFalse name' helpSuffix =
enableDisableFlags
mempty
(FirstFalse (Just True))
(FirstFalse (Just False))
name'
$ helpSuffix ++ " (default: disabled)"
-- | Enable/disable flags for any type.
enableDisableFlags ::
-- | Default value
a ->
-- | Enabled value
a ->
-- | Disabled value
a ->
-- | Name
String ->
-- | Help suffix
String ->
Mod FlagFields a ->
Parser a
enableDisableFlags defaultValue enabledValue disabledValue name' helpSuffix mods =
enableDisableFlagsNoDefault enabledValue disabledValue name' helpSuffix mods
<|> pure defaultValue
-- | Enable/disable flags for any type, without a default (to allow chaining with '<|>')
enableDisableFlagsNoDefault ::
-- | Enabled value
a ->
-- | Disabled value
a ->
-- | Name
String ->
-- | Help suffix
String ->
Mod FlagFields a ->
Parser a
enableDisableFlagsNoDefault enabledValue disabledValue name' helpSuffix mods =
last
<$> some
( ( flag'
enabledValue
( hidden
<> internal
<> long name'
<> help helpSuffix
<> mods
)
<|> flag'
disabledValue
( hidden
<> internal
<> long ("no-" ++ name')
<> help helpSuffix
<> mods
)
)
<|> flag'
disabledValue
( long ("[no-]" ++ name')
<> help ("Enable/disable " ++ helpSuffix)
<> mods
)
)
where
last xs =
case reverse xs of
[] -> impureThrow $ stringException "enableDisableFlagsNoDefault.last"
x : _ -> x
| Show an extra help option ( e.g. @--docker - help@ shows help for all @--docker*@ args ) .
--
-- To actually have that help appear, use 'execExtraHelp' before executing the main parser.
extraHelpOption ::
-- | Hide from the brief description?
Bool ->
-- | Program name, e.g. @"stack"@
String ->
-- | Option glob term, e.g. @"docker*"@
String ->
| Help option name , e.g. @"docker - help"@
String ->
Parser (a -> a)
extraHelpOption hide progName fakeName helpName =
infoOption (optDesc' ++ ".") (long helpName <> hidden <> internal)
<*> infoOption
(optDesc' ++ ".")
( long fakeName
<> help optDesc'
<> (if hide then hidden <> internal else idm)
)
where
optDesc' = concat ["Run '", takeBaseName progName, " --", helpName, "' for details"]
-- | 'option', specialized to 'Text'.
textOption :: Mod OptionFields Text -> Parser Text
textOption = option (T.pack <$> readerAsk)
-- | 'argument', specialized to 'Text'.
textArgument :: Mod ArgumentFields Text -> Parser Text
textArgument = argument (T.pack <$> readerAsk)
-- | Like 'optional', but returning a 'First'.
optionalFirst :: Alternative f => f a -> f (First a)
optionalFirst = fmap First . optional
-- | Like 'optional', but returning a 'FirstTrue'.
optionalFirstTrue :: Alternative f => f Bool -> f FirstTrue
optionalFirstTrue = fmap FirstTrue . optional
| Like ' optional ' , but returning a ' FirstFalse ' .
optionalFirstFalse :: Alternative f => f Bool -> f FirstFalse
optionalFirstFalse = fmap FirstFalse . optional
absFileOption : : ( Path Abs File ) - > Parser ( Path Abs File )
--absFileOption mods = option (eitherReader' parseAbsFile) $
-- completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False }) <> mods
relFileOption : : ( Path Rel File ) - > Parser ( Path Rel File )
--relFileOption mods = option (eitherReader' parseRelFile) $
completer ( pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False } ) < > mods
absDirOption : : ( Path Abs Dir ) - > Parser ( )
--absDirOption mods = option (eitherReader' parseAbsDir) $
-- completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False, pcoFileFilter = const False }) <> mods
relDirOption : : ( Path Rel Dir ) - > Parser ( )
--relDirOption mods = option (eitherReader' parseRelDir) $
completer ( pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False , pcoFileFilter = const False } ) < > mods
-- | Like 'eitherReader', but accepting any @'Show' e@ on the 'Left'.
eitherReader' :: Show e => (String -> Either e a) -> ReadM a
eitherReader' f = eitherReader (mapLeft show . f)
data PathCompleterOpts = PathCompleterOpts
{ pcoAbsolute :: Bool,
pcoRelative :: Bool,
pcoRootDir :: Maybe FilePath,
pcoFileFilter :: FilePath -> Bool,
pcoDirFilter :: FilePath -> Bool
}
defaultPathCompleterOpts :: PathCompleterOpts
defaultPathCompleterOpts =
PathCompleterOpts
{ pcoAbsolute = True,
pcoRelative = True,
pcoRootDir = Nothing,
pcoFileFilter = const True,
pcoDirFilter = const True
}
fileCompleter :: Completer
fileCompleter = pathCompleterWith defaultPathCompleterOpts
fileExtCompleter :: [String] -> Completer
fileExtCompleter exts = pathCompleterWith defaultPathCompleterOpts {pcoFileFilter = (`elem` exts) . takeExtension}
dirCompleter :: Completer
dirCompleter = pathCompleterWith defaultPathCompleterOpts {pcoFileFilter = const False}
pathCompleterWith :: PathCompleterOpts -> Completer
pathCompleterWith PathCompleterOpts {..} = mkCompleter $ \inputRaw -> do
Unescape input , to handle single and double quotes . Note that the
-- results do not need to be re-escaped, due to some fiddly bash
-- magic.
let input = unescapeBashArg inputRaw
let (inputSearchDir0, searchPrefix) = splitFileName input
inputSearchDir = if inputSearchDir0 == "./" then "" else inputSearchDir0
msearchDir <-
case (isRelative inputSearchDir, pcoAbsolute, pcoRelative) of
(True, _, True) -> do
rootDir <- maybe getCurrentDirectory return pcoRootDir
return $ Just (rootDir </> inputSearchDir)
(False, True, _) -> return $ Just inputSearchDir
_ -> return Nothing
case msearchDir of
Nothing
| input == "" && pcoAbsolute -> return ["/"]
| otherwise -> return []
Just searchDir -> do
entries <- getDirectoryContents searchDir `catch` \(_ :: IOException) -> return []
fmap catMaybes $
forM entries $ \entry ->
-- Skip . and .. unless user is typing . or ..
if entry `elem` ["..", "."] && searchPrefix `notElem` ["..", "."]
then return Nothing
else
if searchPrefix `isPrefixOf` entry
then do
let path = searchDir </> entry
case (pcoFileFilter path, pcoDirFilter path) of
(True, True) -> return $ Just (inputSearchDir </> entry)
(fileAllowed, dirAllowed) -> do
isDir <- doesDirectoryExist path
if (if isDir then dirAllowed else fileAllowed)
then return $ Just (inputSearchDir </> entry)
else return Nothing
else return Nothing
unescapeBashArg :: String -> String
unescapeBashArg ('\'' : rest) = rest
unescapeBashArg ('\"' : rest) = go rest
where
pattern' = "$`\"\\\n" :: String
go [] = []
go ('\\' : x : xs)
| x `elem` pattern' = x : xs
| otherwise = '\\' : x : go xs
go (x : xs) = x : go xs
unescapeBashArg input = go input
where
go [] = []
go ('\\' : x : xs) = x : go xs
go (x : xs) = x : go xs
| null | https://raw.githubusercontent.com/AmpersandTarski/Ampersand/8a1c00e3b0aeccd7ea6283b20097a8b73310b138/src/Options/Applicative/Builder/Extra.hs | haskell | | Extra functions for optparse-applicative.
,absFileOption
,relFileOption
,absDirOption
,relDirOption
import Path hiding ((</>))
| Enable/disable flags for a 'Bool'.
| Default value
| Flag name
| Help suffix
| Enable/disable flags for a 'Bool', without a default case (to allow chaining with '<|>').
| Flag name
| Help suffix
| Flag with no default of True or False
| Flag with a Semigroup instance and a default of True
| Flag with a Semigroup instance and a default of False
| Enable/disable flags for any type.
| Default value
| Enabled value
| Disabled value
| Name
| Help suffix
| Enable/disable flags for any type, without a default (to allow chaining with '<|>')
| Enabled value
| Disabled value
| Name
| Help suffix
docker - help@ shows help for all @--docker*@ args ) .
To actually have that help appear, use 'execExtraHelp' before executing the main parser.
| Hide from the brief description?
| Program name, e.g. @"stack"@
| Option glob term, e.g. @"docker*"@
| 'option', specialized to 'Text'.
| 'argument', specialized to 'Text'.
| Like 'optional', but returning a 'First'.
| Like 'optional', but returning a 'FirstTrue'.
absFileOption mods = option (eitherReader' parseAbsFile) $
completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False }) <> mods
relFileOption mods = option (eitherReader' parseRelFile) $
absDirOption mods = option (eitherReader' parseAbsDir) $
completer (pathCompleterWith defaultPathCompleterOpts { pcoRelative = False, pcoFileFilter = const False }) <> mods
relDirOption mods = option (eitherReader' parseRelDir) $
| Like 'eitherReader', but accepting any @'Show' e@ on the 'Left'.
results do not need to be re-escaped, due to some fiddly bash
magic.
Skip . and .. unless user is typing . or .. | # LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
module Options.Applicative.Builder.Extra
( boolFlags,
boolFlagsNoDefault,
firstBoolFlagsNoDefault,
firstBoolFlagsTrue,
firstBoolFlagsFalse,
enableDisableFlags,
enableDisableFlagsNoDefault,
extraHelpOption,
textOption,
textArgument,
optionalFirst,
optionalFirstTrue,
optionalFirstFalse,
eitherReader',
fileCompleter,
fileExtCompleter,
dirCompleter,
PathCompleterOpts (..),
defaultPathCompleterOpts,
pathCompleterWith,
unescapeBashArg,
)
where
import Ampersand.Basics
import Data.List (isPrefixOf)
import Data.Monoid hiding ((<>))
import qualified Data.Text as T
import Options.Applicative
import Options.Applicative.Types (readerAsk)
import System.Directory (doesDirectoryExist, getCurrentDirectory, getDirectoryContents)
import System.FilePath (isRelative, splitFileName, takeBaseName, takeExtension, (</>))
boolFlags ::
Bool ->
String ->
String ->
Mod FlagFields Bool ->
Parser Bool
boolFlags defaultValue name' helpSuffix =
enableDisableFlags defaultValue True False name' $
concat
[ helpSuffix,
" (default: ",
if defaultValue then "enabled" else "disabled",
")"
]
boolFlagsNoDefault ::
String ->
String ->
Mod FlagFields Bool ->
Parser Bool
boolFlagsNoDefault = enableDisableFlagsNoDefault True False
firstBoolFlagsNoDefault :: String -> String -> Mod FlagFields (Maybe Bool) -> Parser (First Bool)
firstBoolFlagsNoDefault name' helpSuffix mod' =
First
<$> enableDisableFlags
Nothing
(Just True)
(Just False)
name'
helpSuffix
mod'
firstBoolFlagsTrue :: String -> String -> Mod FlagFields FirstTrue -> Parser FirstTrue
firstBoolFlagsTrue name' helpSuffix =
enableDisableFlags
mempty
(FirstTrue (Just True))
(FirstTrue (Just False))
name'
$ helpSuffix ++ " (default: enabled)"
firstBoolFlagsFalse :: String -> String -> Mod FlagFields FirstFalse -> Parser FirstFalse
firstBoolFlagsFalse name' helpSuffix =
enableDisableFlags
mempty
(FirstFalse (Just True))
(FirstFalse (Just False))
name'
$ helpSuffix ++ " (default: disabled)"
enableDisableFlags ::
a ->
a ->
a ->
String ->
String ->
Mod FlagFields a ->
Parser a
enableDisableFlags defaultValue enabledValue disabledValue name' helpSuffix mods =
enableDisableFlagsNoDefault enabledValue disabledValue name' helpSuffix mods
<|> pure defaultValue
enableDisableFlagsNoDefault ::
a ->
a ->
String ->
String ->
Mod FlagFields a ->
Parser a
enableDisableFlagsNoDefault enabledValue disabledValue name' helpSuffix mods =
last
<$> some
( ( flag'
enabledValue
( hidden
<> internal
<> long name'
<> help helpSuffix
<> mods
)
<|> flag'
disabledValue
( hidden
<> internal
<> long ("no-" ++ name')
<> help helpSuffix
<> mods
)
)
<|> flag'
disabledValue
( long ("[no-]" ++ name')
<> help ("Enable/disable " ++ helpSuffix)
<> mods
)
)
where
last xs =
case reverse xs of
[] -> impureThrow $ stringException "enableDisableFlagsNoDefault.last"
x : _ -> x
extraHelpOption ::
Bool ->
String ->
String ->
| Help option name , e.g. @"docker - help"@
String ->
Parser (a -> a)
extraHelpOption hide progName fakeName helpName =
infoOption (optDesc' ++ ".") (long helpName <> hidden <> internal)
<*> infoOption
(optDesc' ++ ".")
( long fakeName
<> help optDesc'
<> (if hide then hidden <> internal else idm)
)
where
optDesc' = concat ["Run '", takeBaseName progName, " --", helpName, "' for details"]
textOption :: Mod OptionFields Text -> Parser Text
textOption = option (T.pack <$> readerAsk)
textArgument :: Mod ArgumentFields Text -> Parser Text
textArgument = argument (T.pack <$> readerAsk)
optionalFirst :: Alternative f => f a -> f (First a)
optionalFirst = fmap First . optional
optionalFirstTrue :: Alternative f => f Bool -> f FirstTrue
optionalFirstTrue = fmap FirstTrue . optional
| Like ' optional ' , but returning a ' FirstFalse ' .
optionalFirstFalse :: Alternative f => f Bool -> f FirstFalse
optionalFirstFalse = fmap FirstFalse . optional
absFileOption : : ( Path Abs File ) - > Parser ( Path Abs File )
relFileOption : : ( Path Rel File ) - > Parser ( Path Rel File )
completer ( pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False } ) < > mods
absDirOption : : ( Path Abs Dir ) - > Parser ( )
relDirOption : : ( Path Rel Dir ) - > Parser ( )
completer ( pathCompleterWith defaultPathCompleterOpts { pcoAbsolute = False , pcoFileFilter = const False } ) < > mods
eitherReader' :: Show e => (String -> Either e a) -> ReadM a
eitherReader' f = eitherReader (mapLeft show . f)
data PathCompleterOpts = PathCompleterOpts
{ pcoAbsolute :: Bool,
pcoRelative :: Bool,
pcoRootDir :: Maybe FilePath,
pcoFileFilter :: FilePath -> Bool,
pcoDirFilter :: FilePath -> Bool
}
defaultPathCompleterOpts :: PathCompleterOpts
defaultPathCompleterOpts =
PathCompleterOpts
{ pcoAbsolute = True,
pcoRelative = True,
pcoRootDir = Nothing,
pcoFileFilter = const True,
pcoDirFilter = const True
}
fileCompleter :: Completer
fileCompleter = pathCompleterWith defaultPathCompleterOpts
fileExtCompleter :: [String] -> Completer
fileExtCompleter exts = pathCompleterWith defaultPathCompleterOpts {pcoFileFilter = (`elem` exts) . takeExtension}
dirCompleter :: Completer
dirCompleter = pathCompleterWith defaultPathCompleterOpts {pcoFileFilter = const False}
pathCompleterWith :: PathCompleterOpts -> Completer
pathCompleterWith PathCompleterOpts {..} = mkCompleter $ \inputRaw -> do
Unescape input , to handle single and double quotes . Note that the
let input = unescapeBashArg inputRaw
let (inputSearchDir0, searchPrefix) = splitFileName input
inputSearchDir = if inputSearchDir0 == "./" then "" else inputSearchDir0
msearchDir <-
case (isRelative inputSearchDir, pcoAbsolute, pcoRelative) of
(True, _, True) -> do
rootDir <- maybe getCurrentDirectory return pcoRootDir
return $ Just (rootDir </> inputSearchDir)
(False, True, _) -> return $ Just inputSearchDir
_ -> return Nothing
case msearchDir of
Nothing
| input == "" && pcoAbsolute -> return ["/"]
| otherwise -> return []
Just searchDir -> do
entries <- getDirectoryContents searchDir `catch` \(_ :: IOException) -> return []
fmap catMaybes $
forM entries $ \entry ->
if entry `elem` ["..", "."] && searchPrefix `notElem` ["..", "."]
then return Nothing
else
if searchPrefix `isPrefixOf` entry
then do
let path = searchDir </> entry
case (pcoFileFilter path, pcoDirFilter path) of
(True, True) -> return $ Just (inputSearchDir </> entry)
(fileAllowed, dirAllowed) -> do
isDir <- doesDirectoryExist path
if (if isDir then dirAllowed else fileAllowed)
then return $ Just (inputSearchDir </> entry)
else return Nothing
else return Nothing
unescapeBashArg :: String -> String
unescapeBashArg ('\'' : rest) = rest
unescapeBashArg ('\"' : rest) = go rest
where
pattern' = "$`\"\\\n" :: String
go [] = []
go ('\\' : x : xs)
| x `elem` pattern' = x : xs
| otherwise = '\\' : x : go xs
go (x : xs) = x : go xs
unescapeBashArg input = go input
where
go [] = []
go ('\\' : x : xs) = x : go xs
go (x : xs) = x : go xs
|
0d7a49738313ef25d234941ea8fe8ca771c3d4a6b04aeebc8861b123ebc1de9b | clojurecademy/clojurecademy | course.cljs | (ns clojurecademy.controller.course
(:require [clojurecademy.util :as util]
[reagent.core :as reagent]
[goog.dom :as dom]))
(defn- close
[_]
(reagent/render [(fn [_] [:div])] (dom/getElement "msg-container")))
(defn- refresh
[_]
(.reload js/location))
(defn- render-info-window
[title text]
(reagent/render [(fn [_]
[:div._1ExHXGlOxKpCylTzG5QMT6._1huMPl6-RtzoV17I0HmoRf._3dkNckTVR4VPImWXsThMLq._1SsvwN3XXhA0W2Jo_T7CsX
[:div.sSQDykDehz7XXlU3XotWJ
[:span]
[:div._34wkp9FRhoKrHb1WnMWiYL
[:div._2_bCPSnIBzBKgXvdAQqXRU
[:div
[:h2 title]
[:p text]]
[:div.e61uogkN-YwkT8xPS61b5
[:button#release-close._2fDy3KzGIsY8FHMg74ib-V.Q2qWh46WAtbrJjOvkx6Hq._1mQgyp76JXoCrTvLgR_p-d._141p_JCTXbXubgx5gGZfVe
"Close"]]]]]])] (dom/getElement "msg-container")) [:div])
(defn- release-it
[e]
(let [course-id (-> e .-target .-name)]
(util/ajax :put (str "/courses/" course-id "/release")
:success (fn [d]
(cond
(:does-not-exists? d)
(render-info-window "Course Does Not Exist" "The course that you are trying to release does not exist!")
(:not-owner? d)
(render-info-window "Authorization Error" "You are not the owner of course that you are trying to release!")
(:success d)
(render-info-window "Released!" "You've successfully released new version of your course!"))
(util/set-event-handler! "onclick" "release-close" refresh))
:error (fn [{:keys [status response]}]
(render-info-window "Error!" (str "Something went wrong: " response " - Status Code: " status))
(util/set-event-handler! "onclick" "release-close" refresh)))))
(defn release
[e]
(reagent/render [(fn [_]
[:div
[:div._1ExHXGlOxKpCylTzG5QMT6._1huMPl6-RtzoV17I0HmoRf._3dkNckTVR4VPImWXsThMLq._1SsvwN3XXhA0W2Jo_T7CsX
[:div.sSQDykDehz7XXlU3XotWJ
[:span]
[:div._34wkp9FRhoKrHb1WnMWiYL
[:div._2_bCPSnIBzBKgXvdAQqXRU
[:div
[:h2 "Release Course"]
[:p
"Your latest changes will affect enrolled users to this course but it won't delete/change their codes."]
[:p
"Are you sure you want to release new version of your course?"]]
[:div.e61uogkN-YwkT8xPS61b5
[:a.no-underline._141p_JCTXbXubgx5gGZfVe
[:button#release-it._2fDy3KzGIsY8FHMg74ib-V.Q2qWh46WAtbrJjOvkx6Hq
{:name (-> e .-target .-name)}
"Release It!"]]
[:button#release-close._2fDy3KzGIsY8FHMg74ib-V.Q2qWh46WAtbrJjOvkx6Hq._1mQgyp76JXoCrTvLgR_p-d._141p_JCTXbXubgx5gGZfVe
"Cancel"]]]]]]])]
(dom/getElement "msg-container"))
(util/set-event-handler! "onclick" "release-it" release-it)
(util/set-event-handler! "onclick" "release-close" close))
| null | https://raw.githubusercontent.com/clojurecademy/clojurecademy/97dc7f9b91a90a3f30ca5a3de88542c90a50ce01/src/cljs/clojurecademy/controller/course.cljs | clojure | (ns clojurecademy.controller.course
(:require [clojurecademy.util :as util]
[reagent.core :as reagent]
[goog.dom :as dom]))
(defn- close
[_]
(reagent/render [(fn [_] [:div])] (dom/getElement "msg-container")))
(defn- refresh
[_]
(.reload js/location))
(defn- render-info-window
[title text]
(reagent/render [(fn [_]
[:div._1ExHXGlOxKpCylTzG5QMT6._1huMPl6-RtzoV17I0HmoRf._3dkNckTVR4VPImWXsThMLq._1SsvwN3XXhA0W2Jo_T7CsX
[:div.sSQDykDehz7XXlU3XotWJ
[:span]
[:div._34wkp9FRhoKrHb1WnMWiYL
[:div._2_bCPSnIBzBKgXvdAQqXRU
[:div
[:h2 title]
[:p text]]
[:div.e61uogkN-YwkT8xPS61b5
[:button#release-close._2fDy3KzGIsY8FHMg74ib-V.Q2qWh46WAtbrJjOvkx6Hq._1mQgyp76JXoCrTvLgR_p-d._141p_JCTXbXubgx5gGZfVe
"Close"]]]]]])] (dom/getElement "msg-container")) [:div])
(defn- release-it
[e]
(let [course-id (-> e .-target .-name)]
(util/ajax :put (str "/courses/" course-id "/release")
:success (fn [d]
(cond
(:does-not-exists? d)
(render-info-window "Course Does Not Exist" "The course that you are trying to release does not exist!")
(:not-owner? d)
(render-info-window "Authorization Error" "You are not the owner of course that you are trying to release!")
(:success d)
(render-info-window "Released!" "You've successfully released new version of your course!"))
(util/set-event-handler! "onclick" "release-close" refresh))
:error (fn [{:keys [status response]}]
(render-info-window "Error!" (str "Something went wrong: " response " - Status Code: " status))
(util/set-event-handler! "onclick" "release-close" refresh)))))
(defn release
[e]
(reagent/render [(fn [_]
[:div
[:div._1ExHXGlOxKpCylTzG5QMT6._1huMPl6-RtzoV17I0HmoRf._3dkNckTVR4VPImWXsThMLq._1SsvwN3XXhA0W2Jo_T7CsX
[:div.sSQDykDehz7XXlU3XotWJ
[:span]
[:div._34wkp9FRhoKrHb1WnMWiYL
[:div._2_bCPSnIBzBKgXvdAQqXRU
[:div
[:h2 "Release Course"]
[:p
"Your latest changes will affect enrolled users to this course but it won't delete/change their codes."]
[:p
"Are you sure you want to release new version of your course?"]]
[:div.e61uogkN-YwkT8xPS61b5
[:a.no-underline._141p_JCTXbXubgx5gGZfVe
[:button#release-it._2fDy3KzGIsY8FHMg74ib-V.Q2qWh46WAtbrJjOvkx6Hq
{:name (-> e .-target .-name)}
"Release It!"]]
[:button#release-close._2fDy3KzGIsY8FHMg74ib-V.Q2qWh46WAtbrJjOvkx6Hq._1mQgyp76JXoCrTvLgR_p-d._141p_JCTXbXubgx5gGZfVe
"Cancel"]]]]]]])]
(dom/getElement "msg-container"))
(util/set-event-handler! "onclick" "release-it" release-it)
(util/set-event-handler! "onclick" "release-close" close))
| |
e9adc1aa4a9bdaab473cffff3b7879f7b0fec652ea0284ed9646c743335d4fa9 | everpeace/programming-erlang-code | area_server_final.erl | -module(area_server_final).
-export([start/0, area/2]).
start() -> spawn(fun loop/0).
area(Pid, What) ->
rpc(Pid, What).
rpc(Pid, Request) ->
Pid ! {self(), Request},
receive
{Pid, Response} ->
Response
end.
loop() ->
receive
{From, {rectangle, Width, Ht}} ->
From ! {self(), Width * Ht},
loop();
{From, {circle, R}} ->
From ! {self(), 3.14159 * R * R},
loop();
{From, Other} ->
From ! {self(), {error,Other}},
loop()
end.
| null | https://raw.githubusercontent.com/everpeace/programming-erlang-code/8ef31aa13d15b41754dda225c50284915c29cb48/code/area_server_final.erl | erlang | -module(area_server_final).
-export([start/0, area/2]).
start() -> spawn(fun loop/0).
area(Pid, What) ->
rpc(Pid, What).
rpc(Pid, Request) ->
Pid ! {self(), Request},
receive
{Pid, Response} ->
Response
end.
loop() ->
receive
{From, {rectangle, Width, Ht}} ->
From ! {self(), Width * Ht},
loop();
{From, {circle, R}} ->
From ! {self(), 3.14159 * R * R},
loop();
{From, Other} ->
From ! {self(), {error,Other}},
loop()
end.
| |
e8ab5dc5c1551f590de031af5a8da4bb9896f8309593a1d0f27312a5b48c910f | sbcl/sbcl | readtable.lisp | ;;;; READTABLEs
This software is part of the SBCL system . See the README file for
;;;; more information.
;;;;
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
;;;; public domain. The software is in the public domain and is
;;;; provided with absolutely no warranty. See the COPYING and CREDITS
;;;; files for more information.
(in-package "SB-IMPL")
(sb-xc:deftype attribute-table ()
`(simple-array (unsigned-byte 8) (,base-char-code-limit)))
;;; constants for readtable character attributes. These are all as in
;;; the manual.
;;;
FIXME : wait a minute . Firstly , I doubt they 're in the manual .
Secondly , the numerical order of these constants is coupled with
;;; code in CHAR-CLASS{,2,3} in the reader implementation, so beware
;;; when changing them.
(defconstant +char-attr-whitespace+ 0)
(defconstant +char-attr-terminating-macro+ 1)
(defconstant +char-attr-single-escape+ 2)
(defconstant +char-attr-multiple-escape+ 3)
(defconstant +char-attr-constituent+ 4)
(defconstant +char-attr-constituent-dot+ 5)
(defconstant +char-attr-constituent-expt+ 6)
(defconstant +char-attr-constituent-slash+ 7)
(defconstant +char-attr-constituent-digit+ 8)
(defconstant +char-attr-constituent-sign+ 9)
the following two are not static but depend on * READ - BASE * .
DECIMAL - DIGIT is for characters being digits in base 10 but not in
base * READ - BASE * ( which is therefore perforce smaller than 10 ) ;
;;; DIGIT-OR-EXPT is for characters being both exponent markers and
;;; digits in base *READ-BASE* (which is therefore perforce larger
than 10 ) . -- CSR , 2004 - 03 - 16
(defconstant +char-attr-constituent-decimal-digit+ 10)
(defconstant +char-attr-constituent-digit-or-expt+ 11)
(defconstant +char-attr-package-delimiter+ 12)
(defconstant +char-attr-invalid+ 13)
Meta : there is no such function as READ - UNQUALIFIED - TOKEN . No biggie .
(defconstant +char-attr-delimiter+ 14) ; (a fake for READ-UNQUALIFIED-TOKEN)
(define-load-time-global *empty-extended-char-table* (make-hash-table :rehash-size 1 :test #'eq))
(sb-xc:defstruct (readtable (:conc-name nil)
(:constructor make-readtable ())
(:predicate readtablep)
ANSI requires a CL : COPY - READTABLE to do
a deep copy , so the DEFSTRUCT - generated
;; default is not suitable.
(:copier nil))
"A READTABLE is a data structure that maps characters into syntax
types for the Common Lisp expression reader."
;; The BASE-CHAR-SYNTAX-ARRAY is a vector of BASE-CHAR-CODE-LIMIT
;; integers for describing the character type. Conceptually, there
are 4 distinct " primary " character attributes :
;; +CHAR-ATTR-WHITESPACE+, +CHAR-ATTR-TERMINATING-MACRO+,
;; +CHAR-ATTR-ESCAPE+, and +CHAR-ATTR-CONSTITUENT+. Non-terminating
;; macros (such as the symbol reader) have the attribute
;; +CHAR-ATTR-CONSTITUENT+.
;;
;; In order to make READ-TOKEN fast, all this information is stored
;; in the character attribute table by having different varieties of
;; constituents.
(base-char-syntax-array
(make-array base-char-code-limit
:element-type '(unsigned-byte 8)
:initial-element +char-attr-constituent+)
:type attribute-table
:read-only t)
;; The BASE-CHAR-MACRO-TABLE is a vector of BASE-CHAR-CODE-LIMIT
functions . One of these functions called with appropriate
arguments whenever any non - WHITESPACE character is encountered
;; inside READ-PRESERVING-WHITESPACE. These functions are used to
;; implement user-defined read-macros, system read-macros, and the
;; number-symbol reader.
(base-char-macro-array
(make-array base-char-code-limit :initial-element nil)
:type (simple-vector #.base-char-code-limit)
:read-only t)
;; Characters above the BASE-CHAR range
(extended-char-table *empty-extended-char-table* :type hash-table)
(%readtable-case :upcase :type (member :upcase :downcase :preserve :invert))
;; Element type to use when reading a string literal with no extended-chars.
;; The system itself prefers base-string, but otherwise it is a contentious
;; issue. We don't (by default) use base-strings, because people often write:
( SETF ( ( READ - STRING S ) 0 ) # \PILE_OF_POO ) ,
;; or more likely, something the effect of which resembles
( SETF ( ( ADJUST - ARRAY " " 10 ) 0 ) # \SMILE )
;; which are each dubious constructs, because they assume READ to produce
;; strings capable of holding any char. The latter further assumes something
;; about compilation, because in that example, considering that there are no
;; characters in the literal, it is unclear whether the array should
;; be similar-as-constant to an array of base-char or array of character.
While indeed SBCL prints base - strings readably ( if * PRINT - READABLY * is T )
;; using #. syntax, the question is what the writer of the code intended
;; if (s)he did not know that the string should have been expressly
;; specified via #.(MAKE-STRING ... :ELEMENT-TYPE) or somesuch.
(%readtable-string-preference 'base-char :type (member character base-char))
;; With symbols, it's fairly clear that immutability of print names
;; renders the distinction between the kinds of string in the symbol-name
;; as being less relevant. If you expect (copy-seq (string asymbol))
;; to produce a certain type of string, your code is unportable anyway.
(%readtable-symbol-preference 'base-char :type (member character base-char))
(%readtable-normalization #+sb-unicode t #-sb-unicode nil :type boolean))
(declaim (freeze-type readtable))
| null | https://raw.githubusercontent.com/sbcl/sbcl/8db39e18758349d3ad4acdf1514ead8b19bf9d7f/src/code/readtable.lisp | lisp | READTABLEs
more information.
public domain. The software is in the public domain and is
provided with absolutely no warranty. See the COPYING and CREDITS
files for more information.
constants for readtable character attributes. These are all as in
the manual.
code in CHAR-CLASS{,2,3} in the reader implementation, so beware
when changing them.
DIGIT-OR-EXPT is for characters being both exponent markers and
digits in base *READ-BASE* (which is therefore perforce larger
(a fake for READ-UNQUALIFIED-TOKEN)
default is not suitable.
The BASE-CHAR-SYNTAX-ARRAY is a vector of BASE-CHAR-CODE-LIMIT
integers for describing the character type. Conceptually, there
+CHAR-ATTR-WHITESPACE+, +CHAR-ATTR-TERMINATING-MACRO+,
+CHAR-ATTR-ESCAPE+, and +CHAR-ATTR-CONSTITUENT+. Non-terminating
macros (such as the symbol reader) have the attribute
+CHAR-ATTR-CONSTITUENT+.
In order to make READ-TOKEN fast, all this information is stored
in the character attribute table by having different varieties of
constituents.
The BASE-CHAR-MACRO-TABLE is a vector of BASE-CHAR-CODE-LIMIT
inside READ-PRESERVING-WHITESPACE. These functions are used to
implement user-defined read-macros, system read-macros, and the
number-symbol reader.
Characters above the BASE-CHAR range
Element type to use when reading a string literal with no extended-chars.
The system itself prefers base-string, but otherwise it is a contentious
issue. We don't (by default) use base-strings, because people often write:
or more likely, something the effect of which resembles
which are each dubious constructs, because they assume READ to produce
strings capable of holding any char. The latter further assumes something
about compilation, because in that example, considering that there are no
characters in the literal, it is unclear whether the array should
be similar-as-constant to an array of base-char or array of character.
using #. syntax, the question is what the writer of the code intended
if (s)he did not know that the string should have been expressly
specified via #.(MAKE-STRING ... :ELEMENT-TYPE) or somesuch.
With symbols, it's fairly clear that immutability of print names
renders the distinction between the kinds of string in the symbol-name
as being less relevant. If you expect (copy-seq (string asymbol))
to produce a certain type of string, your code is unportable anyway. |
This software is part of the SBCL system . See the README file for
This software is derived from the CMU CL system , which was
written at Carnegie Mellon University and released into the
(in-package "SB-IMPL")
(sb-xc:deftype attribute-table ()
`(simple-array (unsigned-byte 8) (,base-char-code-limit)))
FIXME : wait a minute . Firstly , I doubt they 're in the manual .
Secondly , the numerical order of these constants is coupled with
(defconstant +char-attr-whitespace+ 0)
(defconstant +char-attr-terminating-macro+ 1)
(defconstant +char-attr-single-escape+ 2)
(defconstant +char-attr-multiple-escape+ 3)
(defconstant +char-attr-constituent+ 4)
(defconstant +char-attr-constituent-dot+ 5)
(defconstant +char-attr-constituent-expt+ 6)
(defconstant +char-attr-constituent-slash+ 7)
(defconstant +char-attr-constituent-digit+ 8)
(defconstant +char-attr-constituent-sign+ 9)
the following two are not static but depend on * READ - BASE * .
DECIMAL - DIGIT is for characters being digits in base 10 but not in
than 10 ) . -- CSR , 2004 - 03 - 16
(defconstant +char-attr-constituent-decimal-digit+ 10)
(defconstant +char-attr-constituent-digit-or-expt+ 11)
(defconstant +char-attr-package-delimiter+ 12)
(defconstant +char-attr-invalid+ 13)
Meta : there is no such function as READ - UNQUALIFIED - TOKEN . No biggie .
(define-load-time-global *empty-extended-char-table* (make-hash-table :rehash-size 1 :test #'eq))
(sb-xc:defstruct (readtable (:conc-name nil)
(:constructor make-readtable ())
(:predicate readtablep)
ANSI requires a CL : COPY - READTABLE to do
a deep copy , so the DEFSTRUCT - generated
(:copier nil))
"A READTABLE is a data structure that maps characters into syntax
types for the Common Lisp expression reader."
are 4 distinct " primary " character attributes :
(base-char-syntax-array
(make-array base-char-code-limit
:element-type '(unsigned-byte 8)
:initial-element +char-attr-constituent+)
:type attribute-table
:read-only t)
functions . One of these functions called with appropriate
arguments whenever any non - WHITESPACE character is encountered
(base-char-macro-array
(make-array base-char-code-limit :initial-element nil)
:type (simple-vector #.base-char-code-limit)
:read-only t)
(extended-char-table *empty-extended-char-table* :type hash-table)
(%readtable-case :upcase :type (member :upcase :downcase :preserve :invert))
( SETF ( ( READ - STRING S ) 0 ) # \PILE_OF_POO ) ,
( SETF ( ( ADJUST - ARRAY " " 10 ) 0 ) # \SMILE )
While indeed SBCL prints base - strings readably ( if * PRINT - READABLY * is T )
(%readtable-string-preference 'base-char :type (member character base-char))
(%readtable-symbol-preference 'base-char :type (member character base-char))
(%readtable-normalization #+sb-unicode t #-sb-unicode nil :type boolean))
(declaim (freeze-type readtable))
|
607862bca2eb315d88e020767dcac4bd9d05b11ef9b00eb18e86a2f8d8942535 | programaker-project/Programaker-Core | automate_coordination_app.erl | %%%-------------------------------------------------------------------
@doc automate_coordination APP
%% @end
%%%-------------------------------------------------------------------
-module(automate_coordination_app).
-behaviour(application).
%% Application callbacks
-export([start/0, start/2, stop/1]).
%%====================================================================
%% API
%%====================================================================
start() ->
automate_coordination_sup:start_link().
start(_StartType, _StartArgs) ->
start().
%%--------------------------------------------------------------------
stop(_State) ->
ok.
%%====================================================================
Internal functions
%%====================================================================
| null | https://raw.githubusercontent.com/programaker-project/Programaker-Core/ef10fc6d2a228b2096b121170c421f5c29f9f270/backend/apps/automate_coordination/src/automate_coordination_app.erl | erlang | -------------------------------------------------------------------
@end
-------------------------------------------------------------------
Application callbacks
====================================================================
API
====================================================================
--------------------------------------------------------------------
====================================================================
==================================================================== | @doc automate_coordination APP
-module(automate_coordination_app).
-behaviour(application).
-export([start/0, start/2, stop/1]).
start() ->
automate_coordination_sup:start_link().
start(_StartType, _StartArgs) ->
start().
stop(_State) ->
ok.
Internal functions
|
1536a9df668212765d73db4629a654aa98984e0cea72e6a967aab9fa01ce4fa3 | titola/incudine | types.lisp | (in-package :incudine-tests)
;; Foreign types.
(deftest arg-types.1
(progn
(define-ugen arg-test-1 list ((a foreign-float) (b foreign-double)
(c int32) (d int64) (p foreign-pointer))
(list a b c d (cffi:null-pointer-p p)))
(let* ((u (funcall (arg-test-1 1 2 3 4 (cffi:null-pointer))))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(1.0 2.0d0 3 4 t))
;; Foreign types (alias).
(deftest arg-types.2
(progn
(define-ugen arg-test-2 list ((a f32) (b f64) (c i32) (d i64) (p ptr))
(list a b c d (cffi:null-pointer-p p)))
(let* ((u (funcall (arg-test-2 1 2 3 4 (cffi:null-pointer))))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(1.0 2.0d0 3 4 t))
;; Coercing of the numbers inside WITH.
(deftest init-types.1
(progn
(define-ugen arg-test-3 list ()
(with ((a 1)
(b 2)
(c 3)
(d 4))
(declare (sample a) (f32 b) (f64 c) (int32 d))
(list a b c d)))
(let* ((u (funcall (arg-test-3)))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(1.0d0 2.0 3.0d0 4))
;; Foreign pointer test.
(deftest pointer-test.1
(progn
(define-ugen pointer-test-1 boolean ()
(with ((a 0) (b 0) (c 0) (d 0) (e 0) (f (cffi:null-pointer)))
(declare (sample a) (i32 b) (i64 c) (f32 d) (f64 e) (ptr f))
(macrolet ((ptr-test ()
`(list ,@(mapcar (lambda (var)
`(cffi:pointerp (get-pointer ,var)))
'(a b c d e f)))))
(notany #'null (ptr-test)))))
(let* ((u (funcall (pointer-test-1)))
(res (funcall (ugen-perf-function u))))
(free u)
res))
t)
(deftest pointer-test.2
(progn
(define-ugen pointer-test-2 boolean
((a sample) (b i32) (c i64) (d f32) (e f64) (f ptr))
(macrolet ((ptr-test ()
`(list ,@(mapcar (lambda (var)
`(cffi:pointerp (get-pointer ,var)))
'(a b c d e f)))))
(notany #'null (ptr-test))))
(let* ((u (funcall (pointer-test-2 1 2 3 4 5 (cffi:null-pointer))))
(res (funcall (ugen-perf-function u))))
(free u)
res))
t)
(deftest pointer-test.3
(progn
(define-ugen pointer-test-3 list ()
(with ((a (make-frame 8 :initial-element 0.12345d0))
(b (make-f32-array 8))
(c (make-f64-array 8))
(d (make-i32-array 8))
(e (make-i64-array 8))
(f (cffi:null-pointer))
(g 0)
(l 0)
(m 0)
(n 0)
(p 0))
(declare (pointer a b c d e f)
(sample g) (i32 l) (i64 m) (f32 n) (f64 p))
(dotimes (i 8)
(setf (f32-ref b i) (coerce (smp-ref a i) 'single-float))
(setf (f64-ref c i) (coerce (* 0.123 (f32-ref b i)) 'double-float))
(setf (i32-ref d i) (floor (* 1000 (f64-ref c i))))
(setf (i64-ref e i) (ash (i32-ref d i) 3))
(setf f (cffi:mem-aptr a 'sample i))
(incf g (+ (smp-ref a i) (smp-ref f 0)))
(incf l (i32-ref d i))
(incf m (i64-ref e i))
(incf n (f32-ref b i))
(incf p (f64-ref c i)))
(list (sample->fixnum (* 1d7 g)) l m (floor (* 1e3 n))
(floor (* 1d7 p)))))
(let* ((u (funcall (pointer-test-3)))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(19752000 120 960 987 1214748))
(deftest pointer-test.4
(progn
(define-ugen pointer-test-4 pointer ()
(with ((a (make-frame 2))
(b (make-f32-array 4))
(c (make-f64-array 6))
(d (make-i32-array 8))
(e (make-u32-array 10))
(f (make-i64-array 12))
(g (make-u64-array 14))
(h (make-pointer-array 8)))
(declare (pointer a b c d e f g h))
(initialize
(loop for i from 0
for ptr in (list a b c d e f g)
do (setf (ptr-ref h i) ptr)))
h))
(let ((u (funcall (pointer-test-4))))
(funcall (ugen-perf-function u))
(let* ((ptr (ptr-ref (ugen-return-pointer u) 0))
(res (when (and (eq (foreign-array-type-of ptr) :pointer)
(= (foreign-length ptr) 8))
(loop for i below 7
for vec = (ptr-ref ptr i)
collect (foreign-array-type-of vec)
collect (foreign-length vec)))))
(free u)
res)))
(SAMPLE 2 :FLOAT 4 :DOUBLE 6 :INT32 8 :UINT32 10 :INT64 12 :UINT64 14))
(deftest pointer-test.5
(progn
(define-ugen pointer-test-5 list ()
(with ((arr (make-i32-array 8 :initial-contents '(0 1 2 3 4 5 6 7))))
(loop for i below 8 collect (i32-ref arr i))))
(with-ugen-instance (u pointer-test-5)
(funcall (ugen-perf-function u))))
(0 1 2 3 4 5 6 7))
| null | https://raw.githubusercontent.com/titola/incudine/325174a54a540f4daa67bcbb29780073c35b7b80/tests/vug/types.lisp | lisp | Foreign types.
Foreign types (alias).
Coercing of the numbers inside WITH.
Foreign pointer test. | (in-package :incudine-tests)
(deftest arg-types.1
(progn
(define-ugen arg-test-1 list ((a foreign-float) (b foreign-double)
(c int32) (d int64) (p foreign-pointer))
(list a b c d (cffi:null-pointer-p p)))
(let* ((u (funcall (arg-test-1 1 2 3 4 (cffi:null-pointer))))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(1.0 2.0d0 3 4 t))
(deftest arg-types.2
(progn
(define-ugen arg-test-2 list ((a f32) (b f64) (c i32) (d i64) (p ptr))
(list a b c d (cffi:null-pointer-p p)))
(let* ((u (funcall (arg-test-2 1 2 3 4 (cffi:null-pointer))))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(1.0 2.0d0 3 4 t))
(deftest init-types.1
(progn
(define-ugen arg-test-3 list ()
(with ((a 1)
(b 2)
(c 3)
(d 4))
(declare (sample a) (f32 b) (f64 c) (int32 d))
(list a b c d)))
(let* ((u (funcall (arg-test-3)))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(1.0d0 2.0 3.0d0 4))
(deftest pointer-test.1
(progn
(define-ugen pointer-test-1 boolean ()
(with ((a 0) (b 0) (c 0) (d 0) (e 0) (f (cffi:null-pointer)))
(declare (sample a) (i32 b) (i64 c) (f32 d) (f64 e) (ptr f))
(macrolet ((ptr-test ()
`(list ,@(mapcar (lambda (var)
`(cffi:pointerp (get-pointer ,var)))
'(a b c d e f)))))
(notany #'null (ptr-test)))))
(let* ((u (funcall (pointer-test-1)))
(res (funcall (ugen-perf-function u))))
(free u)
res))
t)
(deftest pointer-test.2
(progn
(define-ugen pointer-test-2 boolean
((a sample) (b i32) (c i64) (d f32) (e f64) (f ptr))
(macrolet ((ptr-test ()
`(list ,@(mapcar (lambda (var)
`(cffi:pointerp (get-pointer ,var)))
'(a b c d e f)))))
(notany #'null (ptr-test))))
(let* ((u (funcall (pointer-test-2 1 2 3 4 5 (cffi:null-pointer))))
(res (funcall (ugen-perf-function u))))
(free u)
res))
t)
(deftest pointer-test.3
(progn
(define-ugen pointer-test-3 list ()
(with ((a (make-frame 8 :initial-element 0.12345d0))
(b (make-f32-array 8))
(c (make-f64-array 8))
(d (make-i32-array 8))
(e (make-i64-array 8))
(f (cffi:null-pointer))
(g 0)
(l 0)
(m 0)
(n 0)
(p 0))
(declare (pointer a b c d e f)
(sample g) (i32 l) (i64 m) (f32 n) (f64 p))
(dotimes (i 8)
(setf (f32-ref b i) (coerce (smp-ref a i) 'single-float))
(setf (f64-ref c i) (coerce (* 0.123 (f32-ref b i)) 'double-float))
(setf (i32-ref d i) (floor (* 1000 (f64-ref c i))))
(setf (i64-ref e i) (ash (i32-ref d i) 3))
(setf f (cffi:mem-aptr a 'sample i))
(incf g (+ (smp-ref a i) (smp-ref f 0)))
(incf l (i32-ref d i))
(incf m (i64-ref e i))
(incf n (f32-ref b i))
(incf p (f64-ref c i)))
(list (sample->fixnum (* 1d7 g)) l m (floor (* 1e3 n))
(floor (* 1d7 p)))))
(let* ((u (funcall (pointer-test-3)))
(res (funcall (ugen-perf-function u))))
(free u)
res))
(19752000 120 960 987 1214748))
(deftest pointer-test.4
(progn
(define-ugen pointer-test-4 pointer ()
(with ((a (make-frame 2))
(b (make-f32-array 4))
(c (make-f64-array 6))
(d (make-i32-array 8))
(e (make-u32-array 10))
(f (make-i64-array 12))
(g (make-u64-array 14))
(h (make-pointer-array 8)))
(declare (pointer a b c d e f g h))
(initialize
(loop for i from 0
for ptr in (list a b c d e f g)
do (setf (ptr-ref h i) ptr)))
h))
(let ((u (funcall (pointer-test-4))))
(funcall (ugen-perf-function u))
(let* ((ptr (ptr-ref (ugen-return-pointer u) 0))
(res (when (and (eq (foreign-array-type-of ptr) :pointer)
(= (foreign-length ptr) 8))
(loop for i below 7
for vec = (ptr-ref ptr i)
collect (foreign-array-type-of vec)
collect (foreign-length vec)))))
(free u)
res)))
(SAMPLE 2 :FLOAT 4 :DOUBLE 6 :INT32 8 :UINT32 10 :INT64 12 :UINT64 14))
(deftest pointer-test.5
(progn
(define-ugen pointer-test-5 list ()
(with ((arr (make-i32-array 8 :initial-contents '(0 1 2 3 4 5 6 7))))
(loop for i below 8 collect (i32-ref arr i))))
(with-ugen-instance (u pointer-test-5)
(funcall (ugen-perf-function u))))
(0 1 2 3 4 5 6 7))
|
bc1cacde97b15223cd3e389c2118a11ee9a46c68f4e056b9494e850f00e683cb | SNePS/SNePS2 | driver.lisp | -*- Mode : Lisp ; Syntax : Common - Lisp ; Package : SNEPSUL ; -*-
Copyright ( C ) 1984 - -2013
Research Foundation of State University of New York
Version : $ I d : driver.lisp , v 1.2 2013/08/28 19:07:23 shapiro Exp $
;; This file is part of SNePS.
$ BEGIN LICENSE$
The contents of this file are subject to the University at
Buffalo Public License Version 1.0 ( the " License " ) ; you may
;;; not use this file except in compliance with the License. You
;;; may obtain a copy of the License at
;;; . edu/sneps/Downloads/ubpl.pdf.
;;;
Software distributed under the License is distributed on an
" AS IS " basis , WITHOUT WARRANTY OF ANY KIND , either express
;;; or implied. See the License for the specific language gov
;;; erning rights and limitations under the License.
;;;
The Original Code is SNePS 2.8 .
;;;
The Initial Developer of the Original Code is Research Foun
dation of State University of New York , on behalf of Univer
sity at Buffalo .
;;;
Portions created by the Initial Developer are Copyright ( C )
2011 Research Foundation of State University of New York , on
behalf of University at Buffalo . All Rights Reserved .
$ END LICENSE$
(in-package :snepsul)
Lisp side of the ARC / LISP communication package . Contains functions
;; that generate appropriate nextcommand.aml files and send them to
;; the communication directory where they will be read and executed
by the ARC command loop .
Author :
Created : May 31 , 1990
Modified : July 17 , 1990
(defvar *interface-directory* "/u0/grads/hans/interface"
"This is the name of the ARC/LISP communication directory viewed
from the machine where LISP runs on.")
;; Pathnames of various files viewed from the machine LISP is running on
(defvar *nextcom* (format nil "~a/nextcommand.aml" *interface-directory*))
(defvar *exit* (format nil "~a/exit" *interface-directory*))
The command - output file has to be defined in 2 steps because its name
is needed also on the machine ARC is running on .
(defvar *comout-name* "comout.log")
(defvar *comout* (format nil "~a/~a" *interface-directory* *comout-name*))
(defun wait-for-completion ()
"Waits until the file nextcommand.aml gets deleted by the
AML command loop running in the ARC process. Returns once
nextcommand.aml does not exist anymore."
(loop (unless (probe-file *nextcom*)
(return))
;; give system some time to breathe
(sleep 0.1)))
(defun execute-command (command-string)
"Takes a COMMAND-STRING and writes a file nextcommand.aml into the
communication interface directory. The command loop running in the ARC
process waits for this file, executes it and deletes it. The output
generated by the command gets written to a command output (watch) file.
This allows to get results back into the LISP process."
(wait-for-completion)
(with-open-file (nextcom *nextcom* :direction :output)
;; nextcommand.aml takes as an argument the name of the communication
directory viewed from the ARC process , into which ARC will
;; write the comout file
(format nextcom
"&args interface~%~
&watch %interface%/~a~%~
~a~%~
&watch &off~%~
&return~%"
*comout-name* command-string)
)
(wait-for-completion))
(defun get-file-as-string (file)
"Opens FILE and returns its contents as a string. Used to
read ARC command execution results back into the LISP process."
(cond ((probe-file file)
(with-output-to-string (out)
(with-open-file (in file :direction :input)
(loop
(let ((line (read-line in nil :eof nil)))
(cond ((eq line :eof)
(return))
(t (format out "~a~%" line))))))))
(t "")))
(defun print-result (&optional (stream *standard-output*))
"Prints contents of current command output file to STREAM."
(format stream "~&~a" (get-file-as-string *comout*)))
(defun stop-arc ()
"Creates an exit file in the communication directory which will cause
the arc command loop to terminate."
(with-open-file (e *exit* :direction :output :if-exists nil)
(format e " ")))
(defun arc-loop ()
"Simple read/execute/print loop that reads ARC commands in LISP,
executes them in the ARC process and prints the results. Used for
testing of the interface. The command ^^ terminates the loop."
(let ((command "")
(prompt "Arc: "))
#+ibcl(read-line)
(loop
(format t "~&~a" prompt)
(setq command (read-line))
(cond ((equal command "^^")
(stop-arc)
(return 'bye))
(t (execute-command command)
(format t "~a" (get-file-as-string *comout*)))))))
| null | https://raw.githubusercontent.com/SNePS/SNePS2/d3862108609b1879f2c546112072ad4caefc050d/demo/snere/arcinfo/driver.lisp | lisp | Syntax : Common - Lisp ; Package : SNEPSUL ; -*-
This file is part of SNePS.
you may
not use this file except in compliance with the License. You
may obtain a copy of the License at
. edu/sneps/Downloads/ubpl.pdf.
or implied. See the License for the specific language gov
erning rights and limitations under the License.
that generate appropriate nextcommand.aml files and send them to
the communication directory where they will be read and executed
Pathnames of various files viewed from the machine LISP is running on
give system some time to breathe
nextcommand.aml takes as an argument the name of the communication
write the comout file |
Copyright ( C ) 1984 - -2013
Research Foundation of State University of New York
Version : $ I d : driver.lisp , v 1.2 2013/08/28 19:07:23 shapiro Exp $
$ BEGIN LICENSE$
The contents of this file are subject to the University at
Software distributed under the License is distributed on an
" AS IS " basis , WITHOUT WARRANTY OF ANY KIND , either express
The Original Code is SNePS 2.8 .
The Initial Developer of the Original Code is Research Foun
dation of State University of New York , on behalf of Univer
sity at Buffalo .
Portions created by the Initial Developer are Copyright ( C )
2011 Research Foundation of State University of New York , on
behalf of University at Buffalo . All Rights Reserved .
$ END LICENSE$
(in-package :snepsul)
Lisp side of the ARC / LISP communication package . Contains functions
by the ARC command loop .
Author :
Created : May 31 , 1990
Modified : July 17 , 1990
(defvar *interface-directory* "/u0/grads/hans/interface"
"This is the name of the ARC/LISP communication directory viewed
from the machine where LISP runs on.")
(defvar *nextcom* (format nil "~a/nextcommand.aml" *interface-directory*))
(defvar *exit* (format nil "~a/exit" *interface-directory*))
The command - output file has to be defined in 2 steps because its name
is needed also on the machine ARC is running on .
(defvar *comout-name* "comout.log")
(defvar *comout* (format nil "~a/~a" *interface-directory* *comout-name*))
(defun wait-for-completion ()
"Waits until the file nextcommand.aml gets deleted by the
AML command loop running in the ARC process. Returns once
nextcommand.aml does not exist anymore."
(loop (unless (probe-file *nextcom*)
(return))
(sleep 0.1)))
(defun execute-command (command-string)
"Takes a COMMAND-STRING and writes a file nextcommand.aml into the
communication interface directory. The command loop running in the ARC
process waits for this file, executes it and deletes it. The output
generated by the command gets written to a command output (watch) file.
This allows to get results back into the LISP process."
(wait-for-completion)
(with-open-file (nextcom *nextcom* :direction :output)
directory viewed from the ARC process , into which ARC will
(format nextcom
"&args interface~%~
&watch %interface%/~a~%~
~a~%~
&watch &off~%~
&return~%"
*comout-name* command-string)
)
(wait-for-completion))
(defun get-file-as-string (file)
"Opens FILE and returns its contents as a string. Used to
read ARC command execution results back into the LISP process."
(cond ((probe-file file)
(with-output-to-string (out)
(with-open-file (in file :direction :input)
(loop
(let ((line (read-line in nil :eof nil)))
(cond ((eq line :eof)
(return))
(t (format out "~a~%" line))))))))
(t "")))
(defun print-result (&optional (stream *standard-output*))
"Prints contents of current command output file to STREAM."
(format stream "~&~a" (get-file-as-string *comout*)))
(defun stop-arc ()
"Creates an exit file in the communication directory which will cause
the arc command loop to terminate."
(with-open-file (e *exit* :direction :output :if-exists nil)
(format e " ")))
(defun arc-loop ()
"Simple read/execute/print loop that reads ARC commands in LISP,
executes them in the ARC process and prints the results. Used for
testing of the interface. The command ^^ terminates the loop."
(let ((command "")
(prompt "Arc: "))
#+ibcl(read-line)
(loop
(format t "~&~a" prompt)
(setq command (read-line))
(cond ((equal command "^^")
(stop-arc)
(return 'bye))
(t (execute-command command)
(format t "~a" (get-file-as-string *comout*)))))))
|
fce5a65ca5ed757a00ad5c91669d7023e2ef58cde852126fe82bdc9757d580a2 | xxyzz/SICP | Exercise_3_17.rkt | #lang racket/base
(define (find-in-list item l)
(cond [(null? l) #f]
[(eq? item (mcar l)) #t]
[else (find-in-list item (mcdr l))]))
(define (count-pairs x)
(define (iter pair counted-list count)
(cond [(null? pair) count]
[(find-in-list (mcar pair) counted-list) count]
[(not (mpair? pair)) 0]
[else (iter (mcdr pair) (mcons (mcar pair) counted-list) (add1 count))]))
(iter x null 0))
(define (last-pair x)
(if (null? (mcdr x)) x (last-pair (mcdr x))))
(count-pairs (mcons 'a (mcons 'b (mcons 'c null))))
3
(define count-4-list (mcons 'a (mcons 'b (mcons 'c null))))
(set-mcar! count-4-list (last-pair count-4-list))
(count-pairs count-4-list)
3
(define count-7-list (mcons 'a (mcons 'b (mcons 'c null))))
(set-mcar! count-7-list (mcdr count-7-list))
(set-mcar! (mcdr count-7-list) (last-pair count-7-list))
(count-pairs count-7-list)
3
(define infinite-list (mcons 'a (mcons 'b (mcons 'c null))))
(let ([last (last-pair count-4-list)])
(set-mcdr! last last))
(count-pairs infinite-list)
3
| null | https://raw.githubusercontent.com/xxyzz/SICP/e26aea1c58fd896297dbf5406f7fcd32bb4f8f78/3_Modularity_Objects_and_State/3.3_Modeling_with_Mutable_Data/Exercise_3_17.rkt | racket | #lang racket/base
(define (find-in-list item l)
(cond [(null? l) #f]
[(eq? item (mcar l)) #t]
[else (find-in-list item (mcdr l))]))
(define (count-pairs x)
(define (iter pair counted-list count)
(cond [(null? pair) count]
[(find-in-list (mcar pair) counted-list) count]
[(not (mpair? pair)) 0]
[else (iter (mcdr pair) (mcons (mcar pair) counted-list) (add1 count))]))
(iter x null 0))
(define (last-pair x)
(if (null? (mcdr x)) x (last-pair (mcdr x))))
(count-pairs (mcons 'a (mcons 'b (mcons 'c null))))
3
(define count-4-list (mcons 'a (mcons 'b (mcons 'c null))))
(set-mcar! count-4-list (last-pair count-4-list))
(count-pairs count-4-list)
3
(define count-7-list (mcons 'a (mcons 'b (mcons 'c null))))
(set-mcar! count-7-list (mcdr count-7-list))
(set-mcar! (mcdr count-7-list) (last-pair count-7-list))
(count-pairs count-7-list)
3
(define infinite-list (mcons 'a (mcons 'b (mcons 'c null))))
(let ([last (last-pair count-4-list)])
(set-mcdr! last last))
(count-pairs infinite-list)
3
| |
351e340024658ecf7269e0ea251e3fe379fbd2ffcabb9e682d68b5cbea97a24a | racket/racket7 | input-port.rkt | #lang racket/base
(require "../common/check.rkt"
"../host/thread.rkt"
"port.rkt"
"evt.rkt")
(provide prop:input-port
input-port?
->core-input-port
(struct-out core-input-port)
make-core-input-port)
(define-values (prop:input-port input-port-via-property? input-port-ref)
(make-struct-type-property 'input-port
(lambda (v sti)
(check 'prop:input-port (lambda (v) (or (exact-nonnegative-integer? v)
(input-port? v)))
#:contract "(or/c input-port? exact-nonnegative-integer?)"
v)
(check-immutable-field 'prop:input-port v sti)
(if (exact-nonnegative-integer? v)
(make-struct-field-accessor (list-ref sti 3) v)
v))
(list (cons prop:secondary-evt
(lambda (v) port->evt))
(cons prop:input-port-evt
(lambda (i)
(input-port-evt-ref (->core-input-port i)))))))
(define (input-port? p)
(or (core-input-port? p)
(input-port-via-property? p)))
;; This function should not be called in atomic mode,
;; since it can invoke an artitrary function
(define (->core-input-port v)
(cond
[(core-input-port? v) v]
[(input-port? v)
(let ([p (input-port-ref v)])
(cond
[(struct-accessor-procedure? p)
(->core-input-port (p v))]
[else
(->core-input-port p)]))]
[else
empty-input-port]))
(struct core-input-port core-port
(
;; Various functions below are called in atomic mode. The intent of
;; atomic mode is to ensure that the completion and return of the
;; function is atomic with respect to some further activity, such
;; as position and line counting. Also, a guard against operations
;; on a closed port precedes most operations. Any of the functions
;; is free to exit and re-enter atomic mode, but they may take on
;; the burden of re-checking for a closed port. Leave atomic mode
;; explicitly before raising an exception.
prepare-change ; #f or (-> void)
;; Called in atomic mode
;; May leave atomic mode temporarily, but on return,
;; ensures that other atomic operations are ok to
;; change the port. The main use of `prepare-change`
;; is to pause and `port-commit-peeked` attempts to
;; not succeed while a potential change is in
;; progress, where the commit attempts can resume after
;; atomic mode is left. The `close` operation
;; is *not* guarded by a call to `prepare-change`.
read-byte ; #f or (-> (or/c byte? eof-object? evt?))
;; Called in atomic mode.
;; This shortcut is optional.
;; Non-blocking byte read, where an event must be
;; returned if no byte is available. The event's result
;; is ignored, so it should not consume a byte.
read-in ; port or (bytes start-k end-k copy? -> (or/c integer? ...))
;; Called in atomic mode.
;; A port value redirects to the port. Otherwise, the function
never blocks , and can assume ` ( - end - k start - k ) ` is non - zero .
;; The `copy?` flag indicates that the given byte string should
;; not be exposed to untrusted code, and instead of should be
;; copied if necessary. The return values are the same as
;; documented for `make-input-port`, except that a pipe result
;; is not allowed (or, more precisely, it's treated as an event).
peek-byte ; #f or (-> (or/c byte? eof-object? evt?))
;; Called in atomic mode.
;; This shortcut is optional.
;; Non-blocking byte read, where an event must be
;; returned if no byte is available. The event's result
;; is ignored.
peek-in ; port or (bytes start-k end-k skip-k progress-evt copy? -> (or/c integer? ...))
;; Called in atomic mode.
;; A port value redirects to the port. Otherwise, the function
never blocks , and it can assume that ` ( - end - k start - k ) ` is non - zero .
;; The `copy?` flag is the same as for `read-in`. The return values
;; are the same as documented for `make-input-port`.
byte-ready ; port or ((->) -> (or/c boolean? evt))
;; Called in atomic mode.
;; A port value makes sense when `peek-in` has a port value.
Otherwise , check whether a peek on one byte would succeed
;; without blocking and return a boolean, or return an event
;; that effectively does the same. The event's value doesn't
;; matter, because it will be wrapped to return some original
;; port. When `byte-ready` is a function, it should call the
given ( for its side effect ) when work has been
;; done that might unblock this port or some other port.
get-progress-evt ; #f or (-> evt?)
;; *Not* called in atomic mode.
;; Optional support for progress events, and may be
;; called on a closed port.
commit ; (amt-k progress-evt? evt? (bytes? -> any) -> boolean)
;; Called in atomic mode.
;; Goes with `get-progress-evt`. The final `evt?`
;; argument is constrained to a few kinds of events;
;; see docs for `port-commit-peeked` for more information.
;; On success, a completion function is called in atomic mode,
;; but possibly in a different thread, with the committed bytes.
;; The result is a boolean indicating success or failure.
[pending-eof? #:mutable]
[read-handler #:mutable])
#:authentic
#:property prop:input-port-evt (lambda (i)
(cond
[(closed-state-closed? (core-port-closed i))
always-evt]
[else
(define byte-ready (core-input-port-byte-ready i))
(cond
[(input-port? byte-ready)
byte-ready]
[else
(poller-evt
(poller
(lambda (self poll-ctx)
(define v (byte-ready (lambda ()
(schedule-info-did-work! (poll-ctx-sched-info poll-ctx)))))
(cond
[(evt? v)
(values #f v)]
[(eq? v #t)
(values (list #t) #f)]
[else
(values #f self)]))))])])))
(define (make-core-input-port #:name name
#:data [data #f]
#:prepare-change [prepare-change #f]
#:read-byte [read-byte #f]
#:read-in read-in
#:peek-byte [peek-byte #f]
#:peek-in peek-in
#:byte-ready byte-ready
#:close close
#:get-progress-evt [get-progress-evt #f]
#:commit [commit #f]
#:get-location [get-location #f]
#:count-lines! [count-lines! #f]
#:init-offset [init-offset 0]
#:file-position [file-position #f]
#:buffer-mode [buffer-mode #f])
(core-input-port name
data
close
count-lines!
get-location
file-position
buffer-mode
(closed-state #f #f)
init-offset ; offset
#f ; count?
#f ; state
#f ; cr-state
#f ; line
#f ; column
#f ; position
prepare-change
read-byte
read-in
peek-byte
peek-in
byte-ready
get-progress-evt
commit
#f ; pending-eof?
#f)) ; read-handler
(define empty-input-port
(make-core-input-port #:name 'empty
#:read-in (lambda (bstr start-k end-k copy?) eof)
#:peek-in (lambda (bstr start-k end-k skip-k copy?) eof)
#:byte-ready (lambda (did-work!) #f)
#:close void))
| null | https://raw.githubusercontent.com/racket/racket7/5dbb62c6bbec198b4a790f1dc08fef0c45c2e32b/racket/src/io/port/input-port.rkt | racket | This function should not be called in atomic mode,
since it can invoke an artitrary function
Various functions below are called in atomic mode. The intent of
atomic mode is to ensure that the completion and return of the
function is atomic with respect to some further activity, such
as position and line counting. Also, a guard against operations
on a closed port precedes most operations. Any of the functions
is free to exit and re-enter atomic mode, but they may take on
the burden of re-checking for a closed port. Leave atomic mode
explicitly before raising an exception.
#f or (-> void)
Called in atomic mode
May leave atomic mode temporarily, but on return,
ensures that other atomic operations are ok to
change the port. The main use of `prepare-change`
is to pause and `port-commit-peeked` attempts to
not succeed while a potential change is in
progress, where the commit attempts can resume after
atomic mode is left. The `close` operation
is *not* guarded by a call to `prepare-change`.
#f or (-> (or/c byte? eof-object? evt?))
Called in atomic mode.
This shortcut is optional.
Non-blocking byte read, where an event must be
returned if no byte is available. The event's result
is ignored, so it should not consume a byte.
port or (bytes start-k end-k copy? -> (or/c integer? ...))
Called in atomic mode.
A port value redirects to the port. Otherwise, the function
The `copy?` flag indicates that the given byte string should
not be exposed to untrusted code, and instead of should be
copied if necessary. The return values are the same as
documented for `make-input-port`, except that a pipe result
is not allowed (or, more precisely, it's treated as an event).
#f or (-> (or/c byte? eof-object? evt?))
Called in atomic mode.
This shortcut is optional.
Non-blocking byte read, where an event must be
returned if no byte is available. The event's result
is ignored.
port or (bytes start-k end-k skip-k progress-evt copy? -> (or/c integer? ...))
Called in atomic mode.
A port value redirects to the port. Otherwise, the function
The `copy?` flag is the same as for `read-in`. The return values
are the same as documented for `make-input-port`.
port or ((->) -> (or/c boolean? evt))
Called in atomic mode.
A port value makes sense when `peek-in` has a port value.
without blocking and return a boolean, or return an event
that effectively does the same. The event's value doesn't
matter, because it will be wrapped to return some original
port. When `byte-ready` is a function, it should call the
done that might unblock this port or some other port.
#f or (-> evt?)
*Not* called in atomic mode.
Optional support for progress events, and may be
called on a closed port.
(amt-k progress-evt? evt? (bytes? -> any) -> boolean)
Called in atomic mode.
Goes with `get-progress-evt`. The final `evt?`
argument is constrained to a few kinds of events;
see docs for `port-commit-peeked` for more information.
On success, a completion function is called in atomic mode,
but possibly in a different thread, with the committed bytes.
The result is a boolean indicating success or failure.
offset
count?
state
cr-state
line
column
position
pending-eof?
read-handler | #lang racket/base
(require "../common/check.rkt"
"../host/thread.rkt"
"port.rkt"
"evt.rkt")
(provide prop:input-port
input-port?
->core-input-port
(struct-out core-input-port)
make-core-input-port)
(define-values (prop:input-port input-port-via-property? input-port-ref)
(make-struct-type-property 'input-port
(lambda (v sti)
(check 'prop:input-port (lambda (v) (or (exact-nonnegative-integer? v)
(input-port? v)))
#:contract "(or/c input-port? exact-nonnegative-integer?)"
v)
(check-immutable-field 'prop:input-port v sti)
(if (exact-nonnegative-integer? v)
(make-struct-field-accessor (list-ref sti 3) v)
v))
(list (cons prop:secondary-evt
(lambda (v) port->evt))
(cons prop:input-port-evt
(lambda (i)
(input-port-evt-ref (->core-input-port i)))))))
(define (input-port? p)
(or (core-input-port? p)
(input-port-via-property? p)))
(define (->core-input-port v)
(cond
[(core-input-port? v) v]
[(input-port? v)
(let ([p (input-port-ref v)])
(cond
[(struct-accessor-procedure? p)
(->core-input-port (p v))]
[else
(->core-input-port p)]))]
[else
empty-input-port]))
(struct core-input-port core-port
(
never blocks , and can assume ` ( - end - k start - k ) ` is non - zero .
never blocks , and it can assume that ` ( - end - k start - k ) ` is non - zero .
Otherwise , check whether a peek on one byte would succeed
given ( for its side effect ) when work has been
[pending-eof? #:mutable]
[read-handler #:mutable])
#:authentic
#:property prop:input-port-evt (lambda (i)
(cond
[(closed-state-closed? (core-port-closed i))
always-evt]
[else
(define byte-ready (core-input-port-byte-ready i))
(cond
[(input-port? byte-ready)
byte-ready]
[else
(poller-evt
(poller
(lambda (self poll-ctx)
(define v (byte-ready (lambda ()
(schedule-info-did-work! (poll-ctx-sched-info poll-ctx)))))
(cond
[(evt? v)
(values #f v)]
[(eq? v #t)
(values (list #t) #f)]
[else
(values #f self)]))))])])))
(define (make-core-input-port #:name name
#:data [data #f]
#:prepare-change [prepare-change #f]
#:read-byte [read-byte #f]
#:read-in read-in
#:peek-byte [peek-byte #f]
#:peek-in peek-in
#:byte-ready byte-ready
#:close close
#:get-progress-evt [get-progress-evt #f]
#:commit [commit #f]
#:get-location [get-location #f]
#:count-lines! [count-lines! #f]
#:init-offset [init-offset 0]
#:file-position [file-position #f]
#:buffer-mode [buffer-mode #f])
(core-input-port name
data
close
count-lines!
get-location
file-position
buffer-mode
(closed-state #f #f)
prepare-change
read-byte
read-in
peek-byte
peek-in
byte-ready
get-progress-evt
commit
(define empty-input-port
(make-core-input-port #:name 'empty
#:read-in (lambda (bstr start-k end-k copy?) eof)
#:peek-in (lambda (bstr start-k end-k skip-k copy?) eof)
#:byte-ready (lambda (did-work!) #f)
#:close void))
|
e31f3218a7037008ac554b366bb4f293d8101e8fdd93da6bc52fcdc4332f937c | kudu-dynamics/blaze | Main.hs | module Main where
import Blaze.Prelude
import Blaze.Util.MLIL
import qualified Data.Text as Text
main :: IO ()
main = getArgs >>= \case
[opName, binPath] -> do
xs <- getInstructionsWithOpByName (Text.pack opName) binPath
mapM_ p xs
putText "[finished]"
exitSuccess
where
p (fn, ix) = putText $ fn <> " " <> show ix
_ -> putText "find_mlil [op name] [binary path]"
| null | https://raw.githubusercontent.com/kudu-dynamics/blaze/a385bb3b37a0a0e061217ebdd70dd0eecbb20332/app/find_mlil/Main.hs | haskell | module Main where
import Blaze.Prelude
import Blaze.Util.MLIL
import qualified Data.Text as Text
main :: IO ()
main = getArgs >>= \case
[opName, binPath] -> do
xs <- getInstructionsWithOpByName (Text.pack opName) binPath
mapM_ p xs
putText "[finished]"
exitSuccess
where
p (fn, ix) = putText $ fn <> " " <> show ix
_ -> putText "find_mlil [op name] [binary path]"
| |
5520229b0715c7aafc780c3375652821e97c2de8eda0fbae15b8713f87224338 | jpmonettas/clindex | data_readers.clj | {url clindex.utils/read-url}
| null | https://raw.githubusercontent.com/jpmonettas/clindex/77097d80a23aa85d2ff50e55645a1452f2dcb3c0/src/data_readers.clj | clojure | {url clindex.utils/read-url}
| |
a1a8bcb1906627a30b08ca4655c3713168720b1a78b6d063efcd7da4d6123215 | mon-key/unicly | unicly-uuid-version.lisp | : FILE - CREATED < Timestamp : # { 2011 - 08 - 17T16:19:17 - 04:00Z}#{11333 } - by MON >
;;; :FILE unicly/unicly-uuid-version.lisp
;;; ==============================
(in-package #:unicly)
;; *package*
;; %uuid-uuid-version-if
(declaim (inline %uuid-uuid-version-if))
(defun %uuid-uuid-version-if (uuid-time-high-and-version uuid)
;; :TEST (signals succesfully)
;; (let ((v4uuid (make-v4-uuid)))
( setf ( slot - value v4uuid ' % uuid_time - high - and - version ) # xFFFF )
;; (%uuid-uuid-version-if (slot-value v4uuid '%uuid_time-high-and-version) v4uuid))
(declare (type unique-universal-identifier uuid)
(type uuid-ub16 uuid-time-high-and-version)
(optimize (speed 3)))
(when (ldb-test (byte 1 15) uuid-time-high-and-version)
(error 'uuid-bit-48-error :uuid-bit-48-error-datum uuid)))
;;; ==============================
;; ,---- RFC4122 4.1.3. Subsection "Version"
| The version number is in the most significant 4 bits of the time
| stamp ( bits 4 through 7 of the time_hi_and_version field ) .
;; |
;; | 15 14 13 12
| Msb0 Msb1 Msb2 Msb3 Version Description
| 0 0 0 1 1 The time - based version specified in this document .
| 0 0 1 0 2 DCE Security version , with embedded POSIX UIDs .
| 0 0 1 1 3 The name - based MD5
| 0 1 0 0 4 The randomly or pseudo - randomly generated version
| 0 1 0 1 5 The name - based SHA-1
;; | ^--bit-48
;; `----
;;
: Currently not detecting v1 or v2 UUIDs at all .
;; uuid-version-bit-vector
(declaim (inline uuid-version-uuid))
(defun uuid-version-uuid (uuid)
;; :WAS (declare (unique-universal-identifier uuid)
(declare (type (or unique-universal-identifier uuid-bit-vector-128) uuid)
(inline %unique-universal-identifier-null-p)
(optimize (speed 3)))
;; :NOTE !EXPERIMENTAL!
Its entirely possible for a bit - vector of length 128 to be
;; passed and for that that b-v to be contained of a bit-field that
independent of its 48 bit in no other way resembles a UUID
(when (uuid-bit-vector-128-p uuid)
(return-from uuid-version-uuid (uuid-version-bit-vector uuid)))
;;
(locally
(declare (type unique-universal-identifier uuid))
(when (%unique-universal-identifier-null-p uuid)
(return-from uuid-version-uuid (values 0 'null-uuid)))
(let ((uuid-thav (if (slot-boundp uuid '%uuid_time-high-and-version)
(slot-value uuid '%uuid_time-high-and-version)
(error 'uuid-simple-error ;; 'uuid-slot-unbound-error
:format-control "slot %UUID_TIME-HIGH-AND-VERSION is not ~
`cl:slot-boundp' in uuid object"))))
(declare (type uuid-ub16 uuid-thav))
(%uuid-uuid-version-if uuid-thav uuid)
(or (and (ldb-test (byte 1 13) uuid-thav)
(ldb-test (byte 1 12) uuid-thav)
3)
(and (ldb-test (byte 1 14) uuid-thav)
(or (and (ldb-test (byte 1 12) uuid-thav) 5)
(and (not (ldb-test (byte 1 13) uuid-thav)) 4)
(error 'uuid-simple-error
:format-control "something wrong with UUID bit field~% got: ~S"
:format-arguments (list uuid-thav))))))))
;; :TODO use `uuid-string-parse-integer' to get the version from a uuid-string-32
;; :SEE The notes at `make-uuid-from-string-if' and `make-uuid-from-string'.
;; (defun uuid-string-36-version (uuis-hex-string-36) (...))
;; (defun uuid-string-32-version (uuis-hex-string-32) (...))
;;; ==============================
;; Local Variables:
;; indent-tabs-mode: nil
;; show-trailing-whitespace: t
;; mode: lisp-interaction
;; package: unicly
;; End:
;;; ==============================
EOF
| null | https://raw.githubusercontent.com/mon-key/unicly/f9bd21446f35e28766d2f1ada2741399b14d93cb/unicly-uuid-version.lisp | lisp | :FILE unicly/unicly-uuid-version.lisp
==============================
*package*
%uuid-uuid-version-if
:TEST (signals succesfully)
(let ((v4uuid (make-v4-uuid)))
(%uuid-uuid-version-if (slot-value v4uuid '%uuid_time-high-and-version) v4uuid))
==============================
,---- RFC4122 4.1.3. Subsection "Version"
|
| 15 14 13 12
| ^--bit-48
`----
uuid-version-bit-vector
:WAS (declare (unique-universal-identifier uuid)
:NOTE !EXPERIMENTAL!
passed and for that that b-v to be contained of a bit-field that
'uuid-slot-unbound-error
:TODO use `uuid-string-parse-integer' to get the version from a uuid-string-32
:SEE The notes at `make-uuid-from-string-if' and `make-uuid-from-string'.
(defun uuid-string-36-version (uuis-hex-string-36) (...))
(defun uuid-string-32-version (uuis-hex-string-32) (...))
==============================
Local Variables:
indent-tabs-mode: nil
show-trailing-whitespace: t
mode: lisp-interaction
package: unicly
End:
============================== | : FILE - CREATED < Timestamp : # { 2011 - 08 - 17T16:19:17 - 04:00Z}#{11333 } - by MON >
(in-package #:unicly)
(declaim (inline %uuid-uuid-version-if))
(defun %uuid-uuid-version-if (uuid-time-high-and-version uuid)
( setf ( slot - value v4uuid ' % uuid_time - high - and - version ) # xFFFF )
(declare (type unique-universal-identifier uuid)
(type uuid-ub16 uuid-time-high-and-version)
(optimize (speed 3)))
(when (ldb-test (byte 1 15) uuid-time-high-and-version)
(error 'uuid-bit-48-error :uuid-bit-48-error-datum uuid)))
| The version number is in the most significant 4 bits of the time
| stamp ( bits 4 through 7 of the time_hi_and_version field ) .
| Msb0 Msb1 Msb2 Msb3 Version Description
| 0 0 0 1 1 The time - based version specified in this document .
| 0 0 1 0 2 DCE Security version , with embedded POSIX UIDs .
| 0 0 1 1 3 The name - based MD5
| 0 1 0 0 4 The randomly or pseudo - randomly generated version
| 0 1 0 1 5 The name - based SHA-1
: Currently not detecting v1 or v2 UUIDs at all .
(declaim (inline uuid-version-uuid))
(defun uuid-version-uuid (uuid)
(declare (type (or unique-universal-identifier uuid-bit-vector-128) uuid)
(inline %unique-universal-identifier-null-p)
(optimize (speed 3)))
Its entirely possible for a bit - vector of length 128 to be
independent of its 48 bit in no other way resembles a UUID
(when (uuid-bit-vector-128-p uuid)
(return-from uuid-version-uuid (uuid-version-bit-vector uuid)))
(locally
(declare (type unique-universal-identifier uuid))
(when (%unique-universal-identifier-null-p uuid)
(return-from uuid-version-uuid (values 0 'null-uuid)))
(let ((uuid-thav (if (slot-boundp uuid '%uuid_time-high-and-version)
(slot-value uuid '%uuid_time-high-and-version)
:format-control "slot %UUID_TIME-HIGH-AND-VERSION is not ~
`cl:slot-boundp' in uuid object"))))
(declare (type uuid-ub16 uuid-thav))
(%uuid-uuid-version-if uuid-thav uuid)
(or (and (ldb-test (byte 1 13) uuid-thav)
(ldb-test (byte 1 12) uuid-thav)
3)
(and (ldb-test (byte 1 14) uuid-thav)
(or (and (ldb-test (byte 1 12) uuid-thav) 5)
(and (not (ldb-test (byte 1 13) uuid-thav)) 4)
(error 'uuid-simple-error
:format-control "something wrong with UUID bit field~% got: ~S"
:format-arguments (list uuid-thav))))))))
EOF
|
dbffb5a3b55f70bd8cf517180adfe65ea1babd48104e13216a92d5633534f803 | FranklinChen/hugs98-plus-Sep2006 | GeislerTest.hs | module Main where
import Xml2Haskell
generated with DtdToHaskell from Geisler.dtd
readFoo :: IO Foo
readFoo = readXml "-"
writeFoo :: Foo -> IO ()
writeFoo = writeXml "-"
main = do
foo <- readFoo
writeFoo foo
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/HaXml/bugs/GeislerTest.hs | haskell | module Main where
import Xml2Haskell
generated with DtdToHaskell from Geisler.dtd
readFoo :: IO Foo
readFoo = readXml "-"
writeFoo :: Foo -> IO ()
writeFoo = writeXml "-"
main = do
foo <- readFoo
writeFoo foo
| |
7fec94889b38072020c5871c5f1884823eb346115656f1c2de8354328e928c11 | CRogers/obc | growvect.ml |
* growvect.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright ( c ) 2006
* All rights reserved
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
* 2 . Redistributions in binary form must reproduce the above copyright notice ,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution .
* 3 . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS OR
* IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED .
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
* SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ;
* OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
* OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
* $ I d : growvect.ml 343 2007 - 01 - 24 22:32:02Z
* growvect.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright (c) 2006 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $Id: growvect.ml 343 2007-01-24 22:32:02Z mike $
*)
type 'a t =
{ mutable size: int;
mutable elements: 'a array }
let create n =
{ size = 0; elements = Array.create n (Obj.magic ()) }
let size v = v.size
let get v i =
if i >= v.size then raise (Invalid_argument "index out of bounds");
Array.get v.elements i
let set v i x =
if i >= v.size then raise (Invalid_argument "index out of bounds");
Array.set v.elements i x
let append v x =
let n = Array.length v.elements in
if v.size >= n then begin
let newv = Array.create (2*n) (Obj.magic ()) in
Array.blit v.elements 0 newv 0 n;
v.elements <- newv
end;
Array.set v.elements v.size x;
v.size <- v.size+1
let iter f v =
for i = 0 to v.size-1 do f v.elements.(i) done
| null | https://raw.githubusercontent.com/CRogers/obc/49064db244e0c9d2ec2a83420c8d0ee917b54196/yacc/growvect.ml | ocaml |
* growvect.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright ( c ) 2006
* All rights reserved
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions are met :
*
* 1 . Redistributions of source code must retain the above copyright notice ,
* this list of conditions and the following disclaimer .
* 2 . Redistributions in binary form must reproduce the above copyright notice ,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution .
* 3 . The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission .
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ` ` AS IS '' AND ANY EXPRESS OR
* IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED .
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL ,
* SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO ,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ;
* OR BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE OR
* OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
*
* $ I d : growvect.ml 343 2007 - 01 - 24 22:32:02Z
* growvect.ml
*
* This file is part of the Oxford Oberon-2 compiler
* Copyright (c) 2006 J. M. Spivey
* All rights reserved
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions are met:
*
* 1. Redistributions of source code must retain the above copyright notice,
* this list of conditions and the following disclaimer.
* 2. Redistributions in binary form must reproduce the above copyright notice,
* this list of conditions and the following disclaimer in the documentation
* and/or other materials provided with the distribution.
* 3. The name of the author may not be used to endorse or promote products
* derived from this software without specific prior written permission.
*
* THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR
* IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
* OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
* IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
* PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS;
* OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
* OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
* ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* $Id: growvect.ml 343 2007-01-24 22:32:02Z mike $
*)
type 'a t =
{ mutable size: int;
mutable elements: 'a array }
let create n =
{ size = 0; elements = Array.create n (Obj.magic ()) }
let size v = v.size
let get v i =
if i >= v.size then raise (Invalid_argument "index out of bounds");
Array.get v.elements i
let set v i x =
if i >= v.size then raise (Invalid_argument "index out of bounds");
Array.set v.elements i x
let append v x =
let n = Array.length v.elements in
if v.size >= n then begin
let newv = Array.create (2*n) (Obj.magic ()) in
Array.blit v.elements 0 newv 0 n;
v.elements <- newv
end;
Array.set v.elements v.size x;
v.size <- v.size+1
let iter f v =
for i = 0 to v.size-1 do f v.elements.(i) done
| |
88aafd2bcbaef7a50688fe431a5c84b4f795e9062afc10125cfedd2d43c60e05 | lancelet/wgpu-hs | Device.hs | # LANGUAGE ForeignFunctionInterface #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
{-# OPTIONS_GHC -Wno-deferred-type-errors #-}
-- |
-- Module : WGPU.Internal.Device.
-- Description : Device (open connection to a device).
module WGPU.Internal.Device
( -- * Types
Device (..),
DeviceDescriptor (..),
Features (..),
Limits (..),
-- * Functions
requestDevice,
)
where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Default (Default, def)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Word (Word32)
import Foreign (Ptr, nullPtr)
import WGPU.Internal.Adapter (Adapter, adapterInst, wgpuAdapter)
import WGPU.Internal.ChainedStruct (ChainedStruct (EmptyChain, PtrChain))
import WGPU.Internal.Instance (Instance, wgpuHsInstance)
import WGPU.Internal.Memory
( ToRaw,
evalContT,
freeHaskellFunPtr,
newEmptyMVar,
putMVar,
raw,
rawPtr,
showWithPtr,
takeMVar,
withCZeroingAfter,
)
import WGPU.Raw.Generated.Enum.WGPUNativeFeature (WGPUNativeFeature)
import qualified WGPU.Raw.Generated.Enum.WGPUNativeFeature as WGPUNativeFeature
import qualified WGPU.Raw.Generated.Enum.WGPUNativeSType as WGPUSType
import qualified WGPU.Raw.Generated.Fun as RawFun
import qualified WGPU.Raw.Generated.Struct.WGPUDeviceDescriptor as WGPUDeviceDescriptor
import WGPU.Raw.Generated.Struct.WGPUDeviceExtras (WGPUDeviceExtras)
import qualified WGPU.Raw.Generated.Struct.WGPUDeviceExtras as WGPUDeviceExtras
import WGPU.Raw.Types (WGPUDevice (WGPUDevice), WGPURequestDeviceCallback)
-------------------------------------------------------------------------------
-- | An open connection to a graphics and/or compute device.
--
-- A 'Device' may be created using the 'requestDevice' function.
data Device = Device
{ deviceInst :: !Instance,
wgpuDevice :: !WGPUDevice
}
instance Show Device where
show d =
let Device _ (WGPUDevice ptr) = d
in showWithPtr "Device" ptr
instance Eq Device where
(==) d1 d2 =
let Device _ (WGPUDevice d1_ptr) = d1
Device _ (WGPUDevice d2_ptr) = d2
in d1_ptr == d2_ptr
instance ToRaw Device WGPUDevice where
raw = pure . wgpuDevice
-------------------------------------------------------------------------------
-- | Device features that are not guaranteed to be supported.
--
-- * NOTE: The Rust API currently has far more extensive @Features@. Perhaps
-- they have not yet been ported to the C API?
-- <-types/0.9.0/wgpu_types/struct.Features.html>
newtype Features = Features
{ textureAdapterSpecificFormatFeatures :: Bool
}
deriving (Eq, Show)
instance Default Features where
def =
Features
{ textureAdapterSpecificFormatFeatures = False
}
instance ToRaw Features WGPUNativeFeature where
raw Features {..} =
pure $
if textureAdapterSpecificFormatFeatures
then WGPUNativeFeature.TEXTURE_ADAPTER_SPECIFIC_FORMAT_FEATURES
else 0
-------------------------------------------------------------------------------
-- | Device limits.
--
-- Represents the set of limits an adapter/device supports.
data Limits = Limits
{ -- | Maximum allowed value for the width of a 1D texture.
maxTextureDimension1D :: !Word32,
-- | Maximum allowed value for the width and height of a 2D texture.
maxTextureDimension2D :: !Word32,
-- | Maximum allowed value for the width, height or depth of a 3D texture.
maxTextureDimension3D :: !Word32,
-- | Maximum allowed value for the array layers of a texture.
maxTextureArrayLayers :: !Word32,
-- | Amount of bind groups that can be attached to a pipeline at the same
-- time.
maxBindGroups :: !Word32,
-- | Amount of storage buffer bindings that can be dynamic in a single
-- pipeline.
maxDynamicStorageBuffersPerPipelineLayout :: !Word32,
-- | Amount of sampled textures visible in a single shader stage.
maxStorageBuffersPerShaderStage :: !Word32,
-- | Maximum size in bytes of a binding to a uniform buffer.
maxStorageBufferBindingSize :: !Word32
}
deriving (Eq, Show)
instance Default Limits where
def =
Limits
{ maxTextureDimension1D = 0,
maxTextureDimension2D = 0,
maxTextureDimension3D = 0,
maxTextureArrayLayers = 0,
maxBindGroups = 0,
maxDynamicStorageBuffersPerPipelineLayout = 0,
maxStorageBuffersPerShaderStage = 0,
maxStorageBufferBindingSize = 0
}
-------------------------------------------------------------------------------
-- | Describes a 'Device'.
data DeviceDescriptor = DeviceDescriptor
{ -- | Debug label for the device.
deviceLabel :: !Text,
-- | Features that the device should support.
features :: !Features,
-- | Limits that the device should support (minimum values).
limits :: !Limits
}
deriving (Eq, Show)
instance Default DeviceDescriptor where
def =
DeviceDescriptor
{ deviceLabel = Text.empty,
features = def,
limits = def
}
instance ToRaw DeviceDescriptor WGPUDeviceExtras where
raw DeviceDescriptor {..} = do
chain_ptr <- raw (EmptyChain WGPUSType.DeviceExtras)
label_ptr <- rawPtr deviceLabel
n_nativeFeatures <- raw features
pure
WGPUDeviceExtras.WGPUDeviceExtras
{ chain = chain_ptr,
maxTextureDimension1D = maxTextureDimension1D limits,
maxTextureDimension2D = maxTextureDimension2D limits,
maxTextureDimension3D = maxTextureDimension3D limits,
maxTextureArrayLayers = maxTextureArrayLayers limits,
maxBindGroups = maxBindGroups limits,
maxDynamicStorageBuffersPerPipelineLayout =
maxDynamicStorageBuffersPerPipelineLayout limits,
maxStorageBuffersPerShaderStage =
maxStorageBuffersPerShaderStage limits,
maxStorageBufferBindingSize =
maxStorageBufferBindingSize limits,
nativeFeatures = n_nativeFeatures,
label = label_ptr,
tracePath = nullPtr
}
-- | Requests a connection to a physical device, creating a logical device.
--
-- This action blocks until an available device is returned.
requestDevice ::
MonadIO m =>
-- | @Adapter@ for which the device will be returned.
Adapter ->
-- | The features and limits requested for the device.
DeviceDescriptor ->
-- | The returned @Device@, if it could be retrieved.
m (Maybe Device)
requestDevice adapter deviceDescriptor = liftIO . evalContT $ do
let inst = adapterInst adapter
deviceMVar <- newEmptyMVar
callback <- mkDeviceCallback (\d _ -> putMVar deviceMVar d)
deviceExtras_ptr <- rawPtr deviceDescriptor
nextInChain_ptr <- rawPtr (PtrChain WGPUSType.DeviceExtras deviceExtras_ptr)
deviceDescriptor_ptr <-
withCZeroingAfter $
WGPUDeviceDescriptor.WGPUDeviceDescriptor
{ nextInChain = nextInChain_ptr
}
RawFun.wgpuAdapterRequestDevice
(wgpuHsInstance inst)
(wgpuAdapter adapter)
deviceDescriptor_ptr
callback
nullPtr
device <- takeMVar deviceMVar
freeHaskellFunPtr callback
pure $ case device of
WGPUDevice ptr | ptr == nullPtr -> Nothing
WGPUDevice _ -> Just (Device inst device)
mkDeviceCallback ::
(MonadIO m) =>
(WGPUDevice -> Ptr () -> IO ()) ->
m WGPURequestDeviceCallback
mkDeviceCallback = liftIO . mkDeviceCallbackIO
foreign import ccall "wrapper"
mkDeviceCallbackIO ::
(WGPUDevice -> Ptr () -> IO ()) -> IO WGPURequestDeviceCallback
| null | https://raw.githubusercontent.com/lancelet/wgpu-hs/29c6a7a3f21ec074f3b23e282ea69396bad337cb/wgpu-hs/src-internal/WGPU/Internal/Device.hs | haskell | # OPTIONS_GHC -Wno-deferred-type-errors #
|
Module : WGPU.Internal.Device.
Description : Device (open connection to a device).
* Types
* Functions
-----------------------------------------------------------------------------
| An open connection to a graphics and/or compute device.
A 'Device' may be created using the 'requestDevice' function.
-----------------------------------------------------------------------------
| Device features that are not guaranteed to be supported.
* NOTE: The Rust API currently has far more extensive @Features@. Perhaps
they have not yet been ported to the C API?
<-types/0.9.0/wgpu_types/struct.Features.html>
-----------------------------------------------------------------------------
| Device limits.
Represents the set of limits an adapter/device supports.
| Maximum allowed value for the width of a 1D texture.
| Maximum allowed value for the width and height of a 2D texture.
| Maximum allowed value for the width, height or depth of a 3D texture.
| Maximum allowed value for the array layers of a texture.
| Amount of bind groups that can be attached to a pipeline at the same
time.
| Amount of storage buffer bindings that can be dynamic in a single
pipeline.
| Amount of sampled textures visible in a single shader stage.
| Maximum size in bytes of a binding to a uniform buffer.
-----------------------------------------------------------------------------
| Describes a 'Device'.
| Debug label for the device.
| Features that the device should support.
| Limits that the device should support (minimum values).
| Requests a connection to a physical device, creating a logical device.
This action blocks until an available device is returned.
| @Adapter@ for which the device will be returned.
| The features and limits requested for the device.
| The returned @Device@, if it could be retrieved. | # LANGUAGE ForeignFunctionInterface #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
module WGPU.Internal.Device
Device (..),
DeviceDescriptor (..),
Features (..),
Limits (..),
requestDevice,
)
where
import Control.Monad.IO.Class (MonadIO, liftIO)
import Data.Default (Default, def)
import Data.Text (Text)
import qualified Data.Text as Text
import Data.Word (Word32)
import Foreign (Ptr, nullPtr)
import WGPU.Internal.Adapter (Adapter, adapterInst, wgpuAdapter)
import WGPU.Internal.ChainedStruct (ChainedStruct (EmptyChain, PtrChain))
import WGPU.Internal.Instance (Instance, wgpuHsInstance)
import WGPU.Internal.Memory
( ToRaw,
evalContT,
freeHaskellFunPtr,
newEmptyMVar,
putMVar,
raw,
rawPtr,
showWithPtr,
takeMVar,
withCZeroingAfter,
)
import WGPU.Raw.Generated.Enum.WGPUNativeFeature (WGPUNativeFeature)
import qualified WGPU.Raw.Generated.Enum.WGPUNativeFeature as WGPUNativeFeature
import qualified WGPU.Raw.Generated.Enum.WGPUNativeSType as WGPUSType
import qualified WGPU.Raw.Generated.Fun as RawFun
import qualified WGPU.Raw.Generated.Struct.WGPUDeviceDescriptor as WGPUDeviceDescriptor
import WGPU.Raw.Generated.Struct.WGPUDeviceExtras (WGPUDeviceExtras)
import qualified WGPU.Raw.Generated.Struct.WGPUDeviceExtras as WGPUDeviceExtras
import WGPU.Raw.Types (WGPUDevice (WGPUDevice), WGPURequestDeviceCallback)
data Device = Device
{ deviceInst :: !Instance,
wgpuDevice :: !WGPUDevice
}
instance Show Device where
show d =
let Device _ (WGPUDevice ptr) = d
in showWithPtr "Device" ptr
instance Eq Device where
(==) d1 d2 =
let Device _ (WGPUDevice d1_ptr) = d1
Device _ (WGPUDevice d2_ptr) = d2
in d1_ptr == d2_ptr
instance ToRaw Device WGPUDevice where
raw = pure . wgpuDevice
newtype Features = Features
{ textureAdapterSpecificFormatFeatures :: Bool
}
deriving (Eq, Show)
instance Default Features where
def =
Features
{ textureAdapterSpecificFormatFeatures = False
}
instance ToRaw Features WGPUNativeFeature where
raw Features {..} =
pure $
if textureAdapterSpecificFormatFeatures
then WGPUNativeFeature.TEXTURE_ADAPTER_SPECIFIC_FORMAT_FEATURES
else 0
data Limits = Limits
maxTextureDimension1D :: !Word32,
maxTextureDimension2D :: !Word32,
maxTextureDimension3D :: !Word32,
maxTextureArrayLayers :: !Word32,
maxBindGroups :: !Word32,
maxDynamicStorageBuffersPerPipelineLayout :: !Word32,
maxStorageBuffersPerShaderStage :: !Word32,
maxStorageBufferBindingSize :: !Word32
}
deriving (Eq, Show)
instance Default Limits where
def =
Limits
{ maxTextureDimension1D = 0,
maxTextureDimension2D = 0,
maxTextureDimension3D = 0,
maxTextureArrayLayers = 0,
maxBindGroups = 0,
maxDynamicStorageBuffersPerPipelineLayout = 0,
maxStorageBuffersPerShaderStage = 0,
maxStorageBufferBindingSize = 0
}
data DeviceDescriptor = DeviceDescriptor
deviceLabel :: !Text,
features :: !Features,
limits :: !Limits
}
deriving (Eq, Show)
instance Default DeviceDescriptor where
def =
DeviceDescriptor
{ deviceLabel = Text.empty,
features = def,
limits = def
}
instance ToRaw DeviceDescriptor WGPUDeviceExtras where
raw DeviceDescriptor {..} = do
chain_ptr <- raw (EmptyChain WGPUSType.DeviceExtras)
label_ptr <- rawPtr deviceLabel
n_nativeFeatures <- raw features
pure
WGPUDeviceExtras.WGPUDeviceExtras
{ chain = chain_ptr,
maxTextureDimension1D = maxTextureDimension1D limits,
maxTextureDimension2D = maxTextureDimension2D limits,
maxTextureDimension3D = maxTextureDimension3D limits,
maxTextureArrayLayers = maxTextureArrayLayers limits,
maxBindGroups = maxBindGroups limits,
maxDynamicStorageBuffersPerPipelineLayout =
maxDynamicStorageBuffersPerPipelineLayout limits,
maxStorageBuffersPerShaderStage =
maxStorageBuffersPerShaderStage limits,
maxStorageBufferBindingSize =
maxStorageBufferBindingSize limits,
nativeFeatures = n_nativeFeatures,
label = label_ptr,
tracePath = nullPtr
}
requestDevice ::
MonadIO m =>
Adapter ->
DeviceDescriptor ->
m (Maybe Device)
requestDevice adapter deviceDescriptor = liftIO . evalContT $ do
let inst = adapterInst adapter
deviceMVar <- newEmptyMVar
callback <- mkDeviceCallback (\d _ -> putMVar deviceMVar d)
deviceExtras_ptr <- rawPtr deviceDescriptor
nextInChain_ptr <- rawPtr (PtrChain WGPUSType.DeviceExtras deviceExtras_ptr)
deviceDescriptor_ptr <-
withCZeroingAfter $
WGPUDeviceDescriptor.WGPUDeviceDescriptor
{ nextInChain = nextInChain_ptr
}
RawFun.wgpuAdapterRequestDevice
(wgpuHsInstance inst)
(wgpuAdapter adapter)
deviceDescriptor_ptr
callback
nullPtr
device <- takeMVar deviceMVar
freeHaskellFunPtr callback
pure $ case device of
WGPUDevice ptr | ptr == nullPtr -> Nothing
WGPUDevice _ -> Just (Device inst device)
mkDeviceCallback ::
(MonadIO m) =>
(WGPUDevice -> Ptr () -> IO ()) ->
m WGPURequestDeviceCallback
mkDeviceCallback = liftIO . mkDeviceCallbackIO
foreign import ccall "wrapper"
mkDeviceCallbackIO ::
(WGPUDevice -> Ptr () -> IO ()) -> IO WGPURequestDeviceCallback
|
0ce51c0c11706ec5d56c1be0f8e90fb965c03a3e2f272243d122e1077ebf530b | ekmett/guanxi | Cont.hs | # LANGUAGE CPP #
{-# LANGUAGE RankNTypes #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE ViewPatterns #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
-- |
Copyright : ( c ) 2018
License : BSD-2 - Clause OR Apache-2.0
Maintainer : < >
-- Stability : experimental
-- Portability: non-portable
module Logic.Cont where
import Control.Applicative
import Control.Monad
import Control.Monad.Error.Class
import Control.Monad.Fail as Fail
import Control.Monad.Primitive
import Control.Monad.Reader
import Control.Monad.State.Class
import Data.Foldable (fold)
import Data.Functor.Identity
import Logic.Class
import Unaligned.Base
newtype LogicT m a = LogicT
{ runLogicT :: forall r. (a -> m r -> m r) -> m r -> m r
}
type Logic = LogicT Identity
runLogic :: Logic a -> forall r. (a -> r -> r) -> r -> r
runLogic l s f = runIdentity $ runLogicT l (fmap . s) (Identity f)
pattern Logic :: (forall r . (a -> r -> r) -> r -> r) -> Logic a
pattern Logic f <- (runLogic -> f)
where Logic f = LogicT $ \ k -> Identity . f (\ a -> runIdentity . k a . Identity) . runIdentity
instance Functor (LogicT f) where
fmap f lt = LogicT $ \sk fk -> runLogicT lt (sk . f) fk
instance Applicative (LogicT f) where
pure a = LogicT $ \sk fk -> sk a fk
f <*> a =
LogicT $ \sk fk -> runLogicT f (\g fk' -> runLogicT a (sk . g) fk') fk
instance Alternative (LogicT f) where
empty = LogicT $ \_ fk -> fk
f1 <|> f2 = LogicT $ \sk fk -> runLogicT f1 sk (runLogicT f2 sk fk)
instance Monad (LogicT m) where
return = pure
m >>= f = LogicT $ \sk fk -> runLogicT m (\a fk' -> runLogicT (f a) sk fk') fk
#if __GLASGOW_HASKELL__ < 808
fail _ = LogicT $ \_ fk -> fk
#endif
instance MonadFail (LogicT m) where
fail _ = LogicT $ \_ fk -> fk
instance MonadPlus (LogicT m) where
mzero = empty
mplus = (<|>)
instance MonadTrans LogicT where
lift m = LogicT $ \sk fk -> m >>= \a -> sk a fk
instance MonadIO m => MonadIO (LogicT m) where
liftIO = lift . liftIO
instance Monad m => MonadLogic (LogicT m) where
msplit m = lift $ runLogicT m ssk (return Empty)
where ssk a fk = return $ a :&: (lift fk >>= reflect)
instance (Monad m, Foldable m) => Foldable (LogicT m) where
foldMap f m = fold $ runLogicT m (fmap . mappend . f) (return mempty)
instance Traversable (LogicT Identity) where
traverse g l = runLogic l (\a ft -> c <$> g a <*> ft) (pure mzero)
where c a l' = return a `mplus` l'
instance MonadReader r m => MonadReader r (LogicT m) where
ask = lift ask
local f m = LogicT $ \sk fk -> runLogicT m ((local f .) . sk) (local f fk)
instance MonadState s m => MonadState s (LogicT m) where
get = lift get
put = lift . put
instance MonadError e m => MonadError e (LogicT m) where
throwError = lift . throwError
catchError m h =
LogicT $ \sk fk ->
let handle r = r `catchError` \e -> runLogicT (h e) sk fk
in handle $ runLogicT m (\a -> sk a . handle) fk
instance PrimMonad m => PrimMonad (LogicT m) where
type PrimState (LogicT m) = PrimState m
primitive f = lift (primitive f)
observe :: Logic a -> a
observe lt = runIdentity $ runLogicT lt (const . return) (error "No answer.")
observeAll :: Logic a -> [a]
observeAll = runIdentity . observeAllT
observeMany :: Int -> Logic a -> [a]
observeMany i = runIdentity . observeManyT i
observeT :: MonadFail m => LogicT m a -> m a
observeT lt = runLogicT lt (const . return) (Fail.fail "No answer.")
observeAllT :: Monad m => LogicT m a -> m [a]
observeAllT m = runLogicT m (fmap . (:)) (return [])
observeManyT :: Monad m => Int -> LogicT m a -> m [a]
observeManyT n m
| n <= 0 = return []
| n == 1 = runLogicT m (\a _ -> return [a]) (return [])
| otherwise = runLogicT (msplit m) sk (return []) where
sk Empty _ = return []
sk (a :&: m') _ = (a :) `liftM` observeManyT (n - 1) m'
| null | https://raw.githubusercontent.com/ekmett/guanxi/e267f4210a9c10d0091371ea9b028b7d6fa8b9f3/src/Logic/Cont.hs | haskell | # LANGUAGE RankNTypes #
|
Stability : experimental
Portability: non-portable | # LANGUAGE CPP #
# LANGUAGE PatternSynonyms #
# LANGUAGE ViewPatterns #
# LANGUAGE FlexibleInstances #
# LANGUAGE UndecidableInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE TypeFamilies #
Copyright : ( c ) 2018
License : BSD-2 - Clause OR Apache-2.0
Maintainer : < >
module Logic.Cont where
import Control.Applicative
import Control.Monad
import Control.Monad.Error.Class
import Control.Monad.Fail as Fail
import Control.Monad.Primitive
import Control.Monad.Reader
import Control.Monad.State.Class
import Data.Foldable (fold)
import Data.Functor.Identity
import Logic.Class
import Unaligned.Base
newtype LogicT m a = LogicT
{ runLogicT :: forall r. (a -> m r -> m r) -> m r -> m r
}
type Logic = LogicT Identity
runLogic :: Logic a -> forall r. (a -> r -> r) -> r -> r
runLogic l s f = runIdentity $ runLogicT l (fmap . s) (Identity f)
pattern Logic :: (forall r . (a -> r -> r) -> r -> r) -> Logic a
pattern Logic f <- (runLogic -> f)
where Logic f = LogicT $ \ k -> Identity . f (\ a -> runIdentity . k a . Identity) . runIdentity
instance Functor (LogicT f) where
fmap f lt = LogicT $ \sk fk -> runLogicT lt (sk . f) fk
instance Applicative (LogicT f) where
pure a = LogicT $ \sk fk -> sk a fk
f <*> a =
LogicT $ \sk fk -> runLogicT f (\g fk' -> runLogicT a (sk . g) fk') fk
instance Alternative (LogicT f) where
empty = LogicT $ \_ fk -> fk
f1 <|> f2 = LogicT $ \sk fk -> runLogicT f1 sk (runLogicT f2 sk fk)
instance Monad (LogicT m) where
return = pure
m >>= f = LogicT $ \sk fk -> runLogicT m (\a fk' -> runLogicT (f a) sk fk') fk
#if __GLASGOW_HASKELL__ < 808
fail _ = LogicT $ \_ fk -> fk
#endif
instance MonadFail (LogicT m) where
fail _ = LogicT $ \_ fk -> fk
instance MonadPlus (LogicT m) where
mzero = empty
mplus = (<|>)
instance MonadTrans LogicT where
lift m = LogicT $ \sk fk -> m >>= \a -> sk a fk
instance MonadIO m => MonadIO (LogicT m) where
liftIO = lift . liftIO
instance Monad m => MonadLogic (LogicT m) where
msplit m = lift $ runLogicT m ssk (return Empty)
where ssk a fk = return $ a :&: (lift fk >>= reflect)
instance (Monad m, Foldable m) => Foldable (LogicT m) where
foldMap f m = fold $ runLogicT m (fmap . mappend . f) (return mempty)
instance Traversable (LogicT Identity) where
traverse g l = runLogic l (\a ft -> c <$> g a <*> ft) (pure mzero)
where c a l' = return a `mplus` l'
instance MonadReader r m => MonadReader r (LogicT m) where
ask = lift ask
local f m = LogicT $ \sk fk -> runLogicT m ((local f .) . sk) (local f fk)
instance MonadState s m => MonadState s (LogicT m) where
get = lift get
put = lift . put
instance MonadError e m => MonadError e (LogicT m) where
throwError = lift . throwError
catchError m h =
LogicT $ \sk fk ->
let handle r = r `catchError` \e -> runLogicT (h e) sk fk
in handle $ runLogicT m (\a -> sk a . handle) fk
instance PrimMonad m => PrimMonad (LogicT m) where
type PrimState (LogicT m) = PrimState m
primitive f = lift (primitive f)
observe :: Logic a -> a
observe lt = runIdentity $ runLogicT lt (const . return) (error "No answer.")
observeAll :: Logic a -> [a]
observeAll = runIdentity . observeAllT
observeMany :: Int -> Logic a -> [a]
observeMany i = runIdentity . observeManyT i
observeT :: MonadFail m => LogicT m a -> m a
observeT lt = runLogicT lt (const . return) (Fail.fail "No answer.")
observeAllT :: Monad m => LogicT m a -> m [a]
observeAllT m = runLogicT m (fmap . (:)) (return [])
observeManyT :: Monad m => Int -> LogicT m a -> m [a]
observeManyT n m
| n <= 0 = return []
| n == 1 = runLogicT m (\a _ -> return [a]) (return [])
| otherwise = runLogicT (msplit m) sk (return []) where
sk Empty _ = return []
sk (a :&: m') _ = (a :) `liftM` observeManyT (n - 1) m'
|
4a0560dade787c29d667430078faf3dc7dc0556029bff78f874ab90e13f71556 | 8thlight/hyperion | types.clj | (ns hyperion.postgres.types
(:require [chee.coerce :refer [->keyword]]
[hyperion.coerce]
[hyperion.api :refer [unpack pack]]))
(defmethod pack clojure.lang.Keyword [_ value]
(->keyword value))
(defmethod unpack clojure.lang.Keyword [_ value]
(->keyword value))
| null | https://raw.githubusercontent.com/8thlight/hyperion/b1b8f60a5ef013da854e98319220b97920727865/postgres/src/hyperion/postgres/types.clj | clojure | (ns hyperion.postgres.types
(:require [chee.coerce :refer [->keyword]]
[hyperion.coerce]
[hyperion.api :refer [unpack pack]]))
(defmethod pack clojure.lang.Keyword [_ value]
(->keyword value))
(defmethod unpack clojure.lang.Keyword [_ value]
(->keyword value))
| |
835f5728e1b7274c44f67c792aabb186d44afde7fe3efe0fdc75ac8ad7bfeff9 | hyperfiddle/electric | missionary_test.cljc | (ns hyperfiddle.missionary-test
(:require [missionary.core :as m]
[hyperfiddle.rcf :refer [tests tap % with]])
(:import (missionary Cancelled)))
(tests
"flow cancel before transfer"
(def !x (atom 0))
(def >x (m/watch !x))
(def !it (>x (fn [] (tap ::notify))
(fn [] (tap ::terminate))))
% := ::notify
(!it)
@!it thrown? Cancelled
% := ::terminate)
(tests
"pentagram of death - via Kenny Tilton"
(def !aa (atom 1))
(def !a7 (atom 7))
(with
((m/reactor
(let [<aa (m/signal! (m/watch !aa))
<a7 (m/signal! (m/watch !a7))
<a70 (m/signal! (m/latest (partial * 10) <a7))
<bb (m/signal! <aa)
<cc (m/signal! (m/latest (partial * 10) <aa))
<dd (m/signal! (m/cp
(try
(if (even? (m/?< <bb))
(* 10 (m/?< <cc)) 42)
(catch Cancelled _))))
<ee (m/signal! (m/latest + <a70 <bb (m/latest (partial * 10000) <dd)))]
(m/stream!
(m/ap
(m/amb=
(tap {'aa (m/?< <aa)})
(tap {'a7 (m/?< <a7)})
(tap {'a70 (m/?< <a70)})
(tap {'bb (m/?< <bb)})
(tap {'cc (m/?< <cc)})
(tap {'dd (m/?< <dd)})
(tap {'ee (m/?< <ee)}))))))
tap tap)
% := {'ee 420071}
% := {'dd 42}
% := {'cc 10}
% := {'bb 1}
% := {'a70 70}
% := {'a7 7}
% := {'aa 1}
(swap! !aa inc)
% := {'aa 2}
% := {'bb 2}
% := {'cc 20}
% := {'dd 200}
% := {'ee 2000072}
(swap! !aa inc)
% := {'aa 3}
% := {'bb 3}
% := {'cc 30}
% := {'dd 42}
% := {'ee 420073}))
| null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/test/hyperfiddle/missionary_test.cljc | clojure | (ns hyperfiddle.missionary-test
(:require [missionary.core :as m]
[hyperfiddle.rcf :refer [tests tap % with]])
(:import (missionary Cancelled)))
(tests
"flow cancel before transfer"
(def !x (atom 0))
(def >x (m/watch !x))
(def !it (>x (fn [] (tap ::notify))
(fn [] (tap ::terminate))))
% := ::notify
(!it)
@!it thrown? Cancelled
% := ::terminate)
(tests
"pentagram of death - via Kenny Tilton"
(def !aa (atom 1))
(def !a7 (atom 7))
(with
((m/reactor
(let [<aa (m/signal! (m/watch !aa))
<a7 (m/signal! (m/watch !a7))
<a70 (m/signal! (m/latest (partial * 10) <a7))
<bb (m/signal! <aa)
<cc (m/signal! (m/latest (partial * 10) <aa))
<dd (m/signal! (m/cp
(try
(if (even? (m/?< <bb))
(* 10 (m/?< <cc)) 42)
(catch Cancelled _))))
<ee (m/signal! (m/latest + <a70 <bb (m/latest (partial * 10000) <dd)))]
(m/stream!
(m/ap
(m/amb=
(tap {'aa (m/?< <aa)})
(tap {'a7 (m/?< <a7)})
(tap {'a70 (m/?< <a70)})
(tap {'bb (m/?< <bb)})
(tap {'cc (m/?< <cc)})
(tap {'dd (m/?< <dd)})
(tap {'ee (m/?< <ee)}))))))
tap tap)
% := {'ee 420071}
% := {'dd 42}
% := {'cc 10}
% := {'bb 1}
% := {'a70 70}
% := {'a7 7}
% := {'aa 1}
(swap! !aa inc)
% := {'aa 2}
% := {'bb 2}
% := {'cc 20}
% := {'dd 200}
% := {'ee 2000072}
(swap! !aa inc)
% := {'aa 3}
% := {'bb 3}
% := {'cc 30}
% := {'dd 42}
% := {'ee 420073}))
| |
15bbf509a831af087025c9266444302fd9c3b3b22e1e51aac5df685a0efbb566 | spawnfest/eep49ers | complex2.erl | -module(complex2).
-export([start/1, stop/0, init/1]).
-export([foo/1, bar/1]).
start(ExtPrg) ->
spawn(?MODULE, init, [ExtPrg]).
stop() ->
complex ! stop.
foo(X) ->
call_port({foo, X}).
bar(Y) ->
call_port({bar, Y}).
call_port(Msg) ->
complex ! {call, self(), Msg},
receive
{complex, Result} ->
Result
end.
init(ExtPrg) ->
register(complex, self()),
process_flag(trap_exit, true),
Port = open_port({spawn, ExtPrg}, [{packet, 2}, binary]),
loop(Port).
loop(Port) ->
receive
{call, Caller, Msg} ->
Port ! {self(), {command, term_to_binary(Msg)}},
receive
{Port, {data, Data}} ->
Caller ! {complex, binary_to_term(Data)}
end,
loop(Port);
stop ->
Port ! {self(), close},
receive
{Port, closed} ->
exit(normal)
end;
{'EXIT', Port, Reason} ->
exit(port_terminated)
end.
| null | https://raw.githubusercontent.com/spawnfest/eep49ers/d1020fd625a0bbda8ab01caf0e1738eb1cf74886/system/doc/tutorial/complex2.erl | erlang | -module(complex2).
-export([start/1, stop/0, init/1]).
-export([foo/1, bar/1]).
start(ExtPrg) ->
spawn(?MODULE, init, [ExtPrg]).
stop() ->
complex ! stop.
foo(X) ->
call_port({foo, X}).
bar(Y) ->
call_port({bar, Y}).
call_port(Msg) ->
complex ! {call, self(), Msg},
receive
{complex, Result} ->
Result
end.
init(ExtPrg) ->
register(complex, self()),
process_flag(trap_exit, true),
Port = open_port({spawn, ExtPrg}, [{packet, 2}, binary]),
loop(Port).
loop(Port) ->
receive
{call, Caller, Msg} ->
Port ! {self(), {command, term_to_binary(Msg)}},
receive
{Port, {data, Data}} ->
Caller ! {complex, binary_to_term(Data)}
end,
loop(Port);
stop ->
Port ! {self(), close},
receive
{Port, closed} ->
exit(normal)
end;
{'EXIT', Port, Reason} ->
exit(port_terminated)
end.
| |
1f4be140fa9a11c1bf143c6bdd9ffd1d5a939139833ffca1580904ce6f50204c | otakar-smrz/elixir-fm | B.hs |
module Elixir.Data.Moony.Regular.B (section) where
import Elixir.Lexicon
lexicon = include section
cluster_1 = cluster
|> "b r ^s" <| [
FuCL `noun` {- <bur^s> -} [ ['m','a','t'] ]
`plural` HaFCAL,
HaFCaL `adj` {- <'abra^s> -} [ ['s','p','o','t','t','e','d'], ['s','p','e','c','k','l','e','d'] ],
FaCAL `noun` {- <barA^s> -} [ ['B','a','r','a','s','h'] ] ]
cluster_2 = cluster
|> ['b','A','r','A','^','s','U','t'] <| [
_____ `noun` {- <bArA^sUt> -} [ ['p','a','r','a','c','h','u','t','e'] ] ]
cluster_3 = cluster
|> "b r ^s m" <| [
KaRDaS `verb` {- <bar^sam> -} [ ['s','t','a','r','e'], ['g','a','z','e'] ],
KaRDaS `verb` {- <bar^sam> -} [ ['r','i','v','e','t'], ['h','e','m'] ],
KaRDaS |< aT `noun` {- <bar^samaT> -} [ ['r','i','v','e','t','i','n','g'] ],
KuRDAS |< aT `noun` {- <bur^sAmaT> -} [ ['r','i','v','e','t'] ],
KuRDAS `noun` {- <bur^sAm> -} [ unwords [ ['c','o','m','m','u','n','i','o','n'], ['w','a','f','e','r'] ] ],
KuRDAS |<< "^g" |< Iy `noun` {- <bur^sAm^gIy> -} [ ['r','i','v','e','t','e','r'] ],
KuRDAS |<< "^g" |< Iy |< aT `noun` {- <bur^sAm^gIyaT> -} [ ['r','i','v','e','t','i','n','g'] ],
KaRDUS |< Iy `noun` {- <bar^sUmIy> -} [ unwords [ ['p','r','i','c','k','l','y'], ['p','e','a','r'] ] ] ]
cluster_4 = cluster
|> ['b','i','r','i','^','s','t'] <| [
_____ `noun` {- <biri^st> -} [ unwords [ ['b','i','r','i','s','h','t'], "(", ['s','o','f','t'], "-", ['b','o','i','l','e','d'], ['e','g','g'], ")" ] ] ]
cluster_5 = cluster
|> "b r .s" <| [
FaCiL `verb` {- <bari.s> -} [ unwords [ ['h','a','v','e'], ['l','e','p','r','o','s','y'] ] ]
`imperf` FCaL,
FuCL `noun` {- <bur.s> -} [ ['g','e','c','k','o'] ],
FaCaL `noun` {- <bara.s> -} [ ['l','e','p','r','o','s','y'] ],
HaFCaL `adj` {- <'abra.s> -} [ ['l','e','p','r','o','u','s'], ['l','e','p','e','r'] ] ]
cluster_6 = cluster
|> "b r .d" <| [
FaCaL `verb` {- <bara.d> -} [ ['g','e','r','m','i','n','a','t','e'], ['s','p','r','o','u','t'] ]
`imperf` FCuL,
FuCUL `noun` {- <burU.d> -} [ ['g','e','r','m','i','n','a','t','i','o','n'], ['s','p','r','o','u','t','i','n','g'] ] ]
cluster_7 = cluster
|> "b r .t z" <| [
KaRDUS `noun` {- <bar.tUz> -} [ unwords [ ['c','r','e','w','\'','s'], ['q','u','a','r','t','e','r','s'] ] ] ]
cluster_8 = cluster
|> "b r .t `" <| [
KaRDaS `verb` {- <bar.ta`> -} [ ['g','a','l','l','o','p'] ] ]
cluster_9 = cluster
|> "b r .t l" <| [
KaRDaS `verb` {- <bar.tal> -} [ ['b','r','i','b','e'], ['c','o','r','r','u','p','t'] ],
TaKaRDaS `verb` {- <tabar.tal> -} [ unwords [ ['t','a','k','e'], ['b','r','i','b','e','s'] ], unwords [ ['b','e'], ['c','o','r','r','u','p','t','e','d'] ] ],
KiRDIS `noun` {- <bir.tIl> -} [ ['b','r','i','b','e'] ]
`plural` KaRADIS ]
cluster_10 = cluster
|> "b r .t m" <| [
KaRDaS `verb` {- <bar.tam> -} [ unwords [ ['t','a','l','k'], ['n','o','n','s','e','n','s','e'] ] ],
KuRDUS `noun` {- <bur.tUm> -} [ unwords [ ['e','l','e','p','h','a','n','t'], ['t','r','u','n','k'] ] ],
KaRDUS `noun` {- <bar.tUm> -} [ unwords [ ['e','l','e','p','h','a','n','t'], ['t','r','u','n','k'] ] ] ]
cluster_11 = cluster
|> "b r `" <| [
FaCaL `verb` {- <bara`> -} [ ['e','x','c','e','l'], unwords [ ['b','e'], ['d','i','s','t','i','n','g','u','i','s','h','e','d'] ] ]
`imperf` FCaL,
< baru ` >
`imperf` FCuL,
FACaL `verb` {- <bAra`> -} [ ['s','t','r','i','v','e'], ['w','o','r','k'] ],
TaFaCCaL `verb` {- <tabarra`> -} [ ['c','o','n','t','r','i','b','u','t','e'], ['d','o','n','a','t','e'], ['g','i','v','e'] ],
FaCAL |< aT `noun` {- <barA`aT> -} [ ['s','k','i','l','l'], ['p','r','o','f','i','c','i','e','n','c','y'] ],
FuCUL |< aT `noun` {- <burU`aT> -} [ ['e','m','i','n','e','n','c','e'] ],
HaFCaL `adj` {- <'abra`> -} [ unwords [ ['m','o','r','e'], "/", ['m','o','s','t'], ['e','m','i','n','e','n','t'] ], unwords [ ['m','o','r','e'], "/", ['m','o','s','t'], ['s','k','i','l','l','f','u','l'] ] ],
TaFaCCuL `noun` {- <tabarru`> -} [ ['d','o','n','a','t','i','o','n'], ['c','o','n','t','r','i','b','u','t','i','o','n'] ]
`plural` TaFaCCuL |< At,
< ` >
MutaFaCCiL `noun` {- <mutabarri`> -} [ ['d','o','n','o','r'], ['c','o','n','t','r','i','b','u','t','o','r'] ]
`plural` MutaFaCCiL |< Un
`femini` MutaFaCCiL |< aT ]
cluster_12 = cluster
|> "b r ` m" <| [
KaRDaS `verb` {- <bar`am> -} [ ['b','u','r','g','e','o','n'], ['b','u','d'] ],
TaKaRDaS `verb` {- <tabar`am> -} [ ['b','u','r','g','e','o','n'], ['b','u','d'] ],
KuRDuS `noun` {- <bur`um> -} [ ['b','u','d'], ['b','l','o','s','s','o','m'] ]
`plural` KaRADiS,
KuRDUS `noun` {- <bur`Um> -} [ ['b','u','d'], ['b','l','o','s','s','o','m'] ]
`plural` KaRADIS,
TaKaRDuS `noun` {- <tabar`um> -} [ ['g','e','m','m','a','t','i','o','n'], ['b','u','d','d','i','n','g'], ['b','l','o','s','s','o','m','i','n','g'] ]
`plural` TaKaRDuS |< At ]
cluster_13 = cluster
|> "b r .g _t" <| [
KaRDUS `noun` {- <bar.gU_t> -} [ ['f','l','e','a'] ]
`plural` KaRADIS,
KaRDUS |< Iy `adj` {- <bar.gU_tIy> -} [ ['B','a','r','g','h','o','u','t','h','i'] ] ]
cluster_14 = cluster
|> "b r .g ^s" <| [
KaRDaS `noun` {- <bar.ga^s> -} [ ['g','n','a','t'], ['m','i','d','g','e'] ] ]
cluster_15 = cluster
|> "b r .g l" <| [
KuRDuS `noun` {- <bur.gul> -} [ ['b','u','l','g','u','r'] ] ]
cluster_16 = cluster
|> "b r f r" <| [
KiRDIS `noun` {- <birfIr> -} [ ['p','u','r','p','l','e'] ]
`plural` KaRADIS ]
cluster_17 = cluster
|> "b r q" <| [
< baraq >
`imperf` FCuL,
HaFCaL `verb` {- <'abraq> -} [ ['f','l','a','s','h'], ['s','h','i','n','e'] ],
< barq >
FuCUL `noun` {- <burUq> -} [ ['l','i','g','h','t','n','i','n','g'], ['f','l','a','s','h'] ],
FaCL |< Iy `adj` {- <barqIy> -} [ ['t','e','l','e','g','r','a','p','h'], ['t','e','l','e','g','r','a','p','h','i','c'] ],
FaCL |< Iy |< aT `noun` {- <barqIyaT> -} [ ['t','e','l','e','g','r','a','m'] ],
FaCIL `noun` {- <barIq> -} [ ['g','l','i','t','t','e','r'] ]
`plural` FaCA'iL,
FuCAL `noun` {- <burAq> -} [ ['B','u','r','a','q'] ],
FayCaL `noun` {- <bayraq> -} [ ['b','a','n','n','e','r'] ]
`plural` FayACiL,
FayCaL |<< "dAr" `noun` {- <bayraqdAr> -} [ unwords [ ['f','l','a','g'], "-", ['b','e','a','r','e','r'] ] ],
FaCCAL `adj` {- <barrAq> -} [ ['s','h','i','n','i','n','g'], ['f','l','a','s','h','i','n','g'] ],
< mabraq >
FACiL `noun` {- <bAriq> -} [ ['g','l','i','m','p','s','e'], ['g','l','i','t','t','e','r'], ['g','l','e','a','m'], ['t','w','i','n','k','l','e'] ]
`plural` FawACiL,
MuFCiL `adj` {- <mubriq> -} [ ['t','e','l','e','t','y','p','e'] ] ]
cluster_18 = cluster
|> "b r q ^s" <| [
KaRDaS `verb` {- <barqa^s> -} [ ['e','m','b','e','l','l','i','s','h'], ['v','a','r','i','e','g','a','t','e'] ],
TaKaRDaS `verb` {- <tabarqa^s> -} [ unwords [ ['b','e'], ['e','m','b','e','l','l','i','s','h','e','d'] ], unwords [ ['b','e'], ['v','a','r','i','e','g','a','t','e','d'] ] ],
< >
`plural` KaRADiS,
KaRDaS |< aT `noun` {- <barqa^saT> -} [ ['v','a','r','i','e','g','a','t','i','o','n'], ['c','o','l','o','r','f','u','l'] ],
MuKaRDaS `adj` {- <mubarqa^s> -} [ ['m','u','l','t','i','c','o','l','o','r','e','d'], ['v','a','r','i','e','g','a','t','e','d'] ] ]
cluster_19 = cluster
|> "b r q `" <| [
KaRDaS `verb` {- <barqa`> -} [ ['v','e','i','l'], ['d','r','a','p','e'] ],
TaKaRDaS `verb` {- <tabarqa`> -} [ unwords [ ['b','e'], ['v','e','i','l','e','d'] ] ],
KuRDuS `noun` {- <burqu`> -} [ ['v','e','i','l'] ]
`plural` KaRADiS ]
cluster_20 = cluster
|> ['b','I','r','U','q','r','A','.','t'] <| [
_____ |< Iy `adj` {- <bIrUqrA.tIy> -} [ ['b','u','r','e','a','u','c','r','a','t','i','c'] ],
_____ |< Iy `noun` {- <bIrUqrA.tIy> -} [ ['b','u','r','e','a','u','c','r','a','t'] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT,
< >
cluster_21 = cluster
|> "b r k" <| [
< >
`imperf` FCuL,
FaCCaL `verb` {- <barrak> -} [ unwords [ ['m','a','k','e'], ['k','n','e','e','l'] ] ],
FACaL `verb` {- <bArak> -} [ ['b','l','e','s','s'], ['c','o','n','g','r','a','t','u','l','a','t','e'] ],
HaFCaL `verb` {- <'abrak> -} [ unwords [ ['m','a','k','e'], ['k','n','e','e','l'] ] ],
TaFaCCaL `verb` {- <tabarrak> -} [ unwords [ ['b','e'], ['b','l','e','s','s','e','d'] ], unwords [ ['b','e'], ['p','r','o','s','p','e','r','o','u','s'] ] ],
TaFACaL `verb` {- <tabArak> -} [ unwords [ ['b','e'], ['b','l','e','s','s','e','d'] ] ],
IstaFCaL `verb` {- <istabrak> -} [ unwords [ ['b','e'], ['b','l','e','s','s','e','d'] ] ],
FiCL |< aT `noun` {- <birkaT> -} [ ['p','o','o','l'] ]
`plural` FiCaL,
FaCaL |< aT `noun` {- <barakaT> -} [ ['b','l','e','s','s','i','n','g'] ]
`plural` FaCaL |< At,
FaCaL |< At `noun` {- <barakAt> -} [ ['B','a','r','a','k','a','a','t'], ['B','a','r','a','k','a','t'] ],
HaFCaL `adj` {- <'abrak> -} [ unwords [ ['m','o','r','e'], "/", ['m','o','s','t'], ['b','l','e','s','s','e','d'] ] ],
MaFCUL `intj` {- <mabrUk> -} [ unwords [ ['c','o','n','g','r','a','t','u','l','a','t','i','o','n','s'], "!" ] ],
MaFCUL `adj` {- <mabrUk> -} [ ['b','l','e','s','s','e','d'] ],
TaFCIL `noun` {- <tabrIk> -} [ ['b','l','e','s','s','i','n','g'], ['b','e','n','e','d','i','c','t','i','o','n'] ]
`plural` TaFCIL |< At,
MuFACaL `noun` {- <mubArak> -} [ ['M','u','b','a','r','a','k'] ],
MuFACaL `adj` {- <mubArak> -} [ ['b','l','e','s','s','e','d'], ['f','o','r','t','u','n','a','t','e'] ],
MuFACaL |< Iy `adj` {- <mubArakIy> -} [ ['M','o','u','b','a','r','k','i'] ] ]
|> ['b','a','r','A','r','I','k'] <| [
_____ `noun` {- <barArIk> -} [ ['b','a','r','r','a','c','k','s'] ] ]
cluster_22 = cluster
|> "b r k r" <| [
KiRDAS `noun` {- <birkAr> -} [ ['c','o','m','p','a','s','s'], ['d','i','v','i','d','e','r','s'] ] ]
cluster_23 = cluster
|> "b r k n" <| [
KuRDAS `noun` {- <burkAn> -} [ ['v','o','l','c','a','n','o'] ]
`plural` KaRADIS,
KuRDAS |< Iy `adj` {- <burkAnIy> -} [ ['v','o','l','c','a','n','i','c'] ] ]
cluster_24 = cluster
|> "b r m" <| [
FaCiL `verb` {- <barim> -} [ unwords [ ['b','e'], ['d','i','s','c','o','n','t','e','n','t','e','d'] ], unwords [ ['b','e'], ['b','o','r','e','d'] ] ]
`imperf` FCaL,
FaCaL `verb` {- <baram> -} [ ['b','r','a','i','d'], ['t','w','i','s','t'] ]
`imperf` FCuL,
FaCCaL `verb` {- <barram> -} [ ['t','w','i','s','t'], ['t','w','i','n','e'] ],
HaFCaL `verb` {- <'abram> -} [ ['c','o','n','c','l','u','d','e'], ['r','a','t','i','f','y'] ],
TaFaCCaL `verb` {- <tabarram> -} [ unwords [ ['b','e'], ['b','o','r','e','d'] ], unwords [ ['b','e'], ['f','e','d'], ['u','p'] ] ],
InFaCaL `verb` {- <inbaram> -} [ unwords [ ['b','e'], ['s','e','t','t','l','e','d'] ], unwords [ ['b','e'], ['t','w','i','s','t','e','d'] ] ],
FaCiL `adj` {- <barim> -} [ ['w','e','a','r','y'], ['t','i','r','e','d'] ],
FaCIL `noun` {- <barIm> -} [ ['r','o','p','e'], ['t','w','i','n','e'] ],
FaCCAL |< aT `noun` {- <barrAmaT> -} [ ['d','r','i','l','l'], unwords [ ['d','r','i','l','l','i','n','g'], ['m','a','c','h','i','n','e'] ] ],
FuCL |< aT `noun` {- <burmaT> -} [ unwords [ ['e','a','r','t','h','e','n','w','a','r','e'], ['p','o','t'] ] ]
`plural` FiCAL
`plural` FuCaL,
HiFCAL `noun` {- <'ibrAm> -} [ ['r','a','t','i','f','i','c','a','t','i','o','n'], ['c','o','n','c','l','u','s','i','o','n'] ]
`plural` HiFCAL |< At,
TaFaCCuL `noun` {- <tabarrum> -} [ ['b','o','r','e','d','o','m'], ['d','i','s','s','a','t','i','s','f','a','c','t','i','o','n'] ]
`plural` TaFaCCuL |< At,
MaFCUL `adj` {- <mabrUm> -} [ ['c','r','o','o','k','e','d'], ['t','w','i','s','t','e','d'] ],
MuFCiL `noun` {- <mubrim> -} [ ['c','o','n','c','l','u','s','i','o','n'], ['r','a','t','i','f','i','c','a','t','i','o','n'] ]
`plural` MuFCiL |< At,
MuFCaL `adj` {- <mubram> -} [ ['c','o','n','c','l','u','d','e','d'], ['r','a','t','i','f','i','e','d'], ['e','s','t','a','b','l','i','s','h','e','d'], ['i','r','r','e','v','o','c','a','b','l','e'] ],
MutaFaCCiL `adj` {- <mutabarrim> -} [ ['a','n','n','o','y','e','d'] ] ]
cluster_25 = cluster
|> "b r m ^g" <| [
"barnAma^g" `noun` {- <barnAma^g> -} [ ['p','r','o','g','r','a','m'] ]
`plural` "barAmi^g" ]
|> "b r m ^g" <| [
KaRDaS `verb` {- <barma^g> -} [ ['p','r','o','g','r','a','m'] ],
TaKaRDaS `verb` {- <tabarma^g> -} [ unwords [ ['b','e'], ['p','r','o','g','r','a','m','m','e','d'] ] ],
KaRDaS |< aT `noun` {- <barma^gaT> -} [ ['p','r','o','g','r','a','m','m','i','n','g'] ],
MuKaRDaS `adj` {- <mubarma^g> -} [ ['p','r','o','g','r','a','m','m','e','d'], ['s','c','h','e','d','u','l','e','d'] ],
MuKaRDiS `noun` {- <mubarmi^g> -} [ ['p','r','o','g','r','a','m','m','e','r'] ]
`plural` MuKaRDiS |< Un
`femini` MuKaRDiS |< aT,
KaRDaS |< Iy `adj` {- <barma^gIy> -} [ ['p','r','o','g','r','a','m','m','i','n','g'], ['s','o','f','t','w','a','r','e'] ],
KaRDaS |< Iy |< At `noun` {- <barma^gIyAt> -} [ ['s','o','f','t','w','a','r','e'] ]
`plural` KaRDaS |< Iy |< At
`limited` "-------P--" ]
cluster_26 = cluster
|> "b r m q" <| [
KaRDaS `noun` {- <barmaq> -} [ ['b','a','l','u','s','t','e','r'], ['p','o','s','t'], ['s','p','i','k','e'] ]
`plural` KaRADiS ]
cluster_27 = cluster
|> "b r m l" <| [
KaRDIS `noun` {- <barmIl> -} [ ['b','a','r','r','e','l'] ]
`plural` KaRADIS ]
cluster_28 = cluster
|> ['b','A','r','U','n'] <| [
_____ `noun` {- <bArUn> -} [ ['b','a','r','o','n'] ] ]
cluster_29 = cluster
|> "b r n" <| [
FaCL |< Iy |< aT `noun` {- <barnIyaT> -} [ unwords [ ['c','l','a','y'], ['v','e','s','s','e','l'] ] ]
`plural` FaCALI ]
cluster_30 = cluster
|> "b r n z" <| [
KaRDaS `verb` {- <barnaz> -} [ ['b','r','o','n','z','e'] ],
KaRDaS |< aT `noun` {- <barnazaT> -} [ ['b','r','o','n','z','i','n','g'] ],
MuKaRDaS `adj` {- <mubarnaz> -} [ ['b','r','o','n','z','e','d'] ] ]
cluster_31 = cluster
|> ['b','a','r','A','n','i','s'] <| [
_____ `noun` {- <barAnis> -} [ ['P','y','r','e','n','e','e','s'] ] ]
|> "b r n s" <| [
KuRDuS `noun` {- <burnus> -} [ ['b','u','r','n','o','o','s','e'] ]
`plural` KaRADiS,
KuRDUS `noun` {- <burnUs> -} [ ['b','u','r','n','o','o','s','e'] ]
`plural` KaRADIS ]
cluster_32 = cluster
|> "b r n .t" <| [
TaKaRDaS `verb` {- <tabarna.t> -} [ unwords [ ['w','e','a','r'], "/", ['p','u','t'], ['o','n'], "a", "(", ['W','e','s','t','e','r','n'], ")", ['h','a','t'] ] ] ]
cluster_33 = cluster
|> "b r n q" <| [
KaRDaS `verb` {- <barnaq> -} [ ['v','a','r','n','i','s','h'] ],
KaRDaS |< aT `noun` {- <barnaqaT> -} [ ['v','a','r','n','i','s','h','i','n','g'] ] ]
cluster_34 = cluster
|> ['b','u','r','u','n','^','g','u','k'] <| [
_____ `noun` {- <burun^guk> -} [ ['g','a','u','z','e'], ['c','r','e','p','e'] ] ]
cluster_35 = cluster
|> ['b','U','r','u','n','d'] <| [
< bUrundI >
_____ |< Iy `adj` {- <bUrundIy> -} [ ['B','u','r','u','n','d','i','a','n'] ] ]
cluster_36 = cluster
|> ['b','u','r','U','n','z'] <| [
< burUnz >
_____ |< Iy `adj` {- <burUnzIy> -} [ ['b','r','o','n','z','e'] ] ]
cluster_37 = cluster
|> "b r h" <| [
FuCL |< aT `noun` {- <burhaT> -} [ ['i','n','s','t','a','n','t'], ['m','o','m','e','n','t'] ]
`plural` FuCaL |< At,
FuCayL |< aT `noun` {- <burayhaT> -} [ unwords [ "a", ['l','i','t','t','l','e'], ['w','h','i','l','e'] ] ],
FuCL |< Iy `adj` {- <burhIy> -} [ ['m','o','m','e','n','t','a','r','i','l','y'] ] ]
cluster_38 = cluster
|> "b r h m" <| [
"barhaman" `noun` {- <barhaman> -} [ ['B','r','a','h','m','a','n'] ]
`plural` "barAhim" |< aT ]
|> ['b','a','r','a','h','m','A'] <| [
_____ `noun` {- <barahmA> -} [ ['B','r','a','h','m','a'] ] ]
|> ['b','a','r','h','a','m'] <| [
_____ |< aT `noun` {- <barhamaT> -} [ ['B','r','a','h','m','a','n','i','s','m'] ],
_____ |< Iy `adj` {- <barhamIy> -} [ ['B','r','a','h','m','a','n'] ],
_____ |< Iy |< aT `noun` {- <barhamIyaT> -} [ ['B','r','a','h','m','a','n','i','s','m'] ] ]
cluster_39 = cluster
|> "b r h n" <| [
KaRDaS `verb` {- <barhan> -} [ ['p','r','o','v','e'], ['d','e','m','o','n','s','t','r','a','t','e'] ],
KaRDaS |< aT `noun` {- <barhanaT> -} [ ['p','r','o','v','i','n','g'], ['d','e','m','o','n','s','t','r','a','t','i','o','n'] ],
KuRDAS `noun` {- <burhAn> -} [ ['B','u','r','h','a','n'] ],
KuRDAS `noun` {- <burhAn> -} [ ['p','r','o','o','f'] ]
`plural` KaRADIS ]
cluster_40 = cluster
|> ['b','U','r','t','U','r','I','k'] <| [
_____ |<< "U" `xtra` {- <bUrtUrIkU> -} [ unwords [ ['P','u','e','r','t','o'], ['R','i','c','o'] ] ],
_____ |< Iy `adj` {- <bUrtUrIkIy> -} [ unwords [ ['P','u','e','r','t','o'], ['R','i','c','a','n'] ] ],
_____ |< Iy `noun` {- <bUrtUrIkIy> -} [ unwords [ ['P','u','e','r','t','o'], ['R','i','c','a','n'] ] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_41 = cluster
|> ['b','u','r','t','U','.','g','A','l'] <| [
al >| _____ `xtra` {- <al-burtU.gAl> -} [ ['P','o','r','t','u','g','a','l'] ],
_____ |< Iy `adj` {- <burtU.gAlIy> -} [ ['P','o','r','t','u','g','u','e','s','e'] ],
_____ |< Iy `noun` {- <burtU.gAlIy> -} [ ['P','o','r','t','u','g','u','e','s','e'] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
|> ['b','u','r','t','u','.','g','A','l'] <| [
al >| _____ `xtra` {- <al-burtu.gAl> -} [ ['P','o','r','t','u','g','a','l'] ],
_____ |< Iy `adj` {- <burtu.gAlIy> -} [ ['P','o','r','t','u','g','u','e','s','e'] ],
_____ |< Iy `noun` {- <burtu.gAlIy> -} [ ['P','o','r','t','u','g','u','e','s','e'] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_42 = cluster
|> ['b','u','r','t','u','q','A','l'] <| [
_____ `noun` {- <burtuqAl> -} [ ['o','r','a','n','g','e'] ]
`excepts` Triptote,
_____ |< Iy `adj` {- <burtuqAlIy> -} [ ['o','r','a','n','g','e'] ] ]
cluster_43 = cluster
|> ['b','i','r','^','g','A','s'] <| [
_____ `noun` {- <bir^gAs> -} [ unwords [ ['b','i','r','j','a','s'], "(", ['e','q','u','e','s','t','r','i','a','n'], ['j','o','u','s','t'], ")" ] ] ]
cluster_44 = cluster
|> ['b','i','r','^','g','I','s'] <| [
_____ `noun` {- <bir^gIs> -} [ ['J','u','p','i','t','e','r'] ] ]
cluster_45 = cluster
|> ['b','a','r','d','a','q','U','^','s'] <| [
_____ `noun` {- <bardaqU^s> -} [ ['m','a','r','j','o','r','a','m'] ] ]
cluster_46 = cluster
|> ['b','a','r','r','I','m'] <| [
_____ |< aT `noun` {- <barrImaT> -} [ ['d','r','i','l','l'], ['a','u','g','e','r'], ['b','i','t'] ],
_____ |< Iy |< aT `noun` {- <barrImIyaT> -} [ unwords [ ['d','r','i','l','l'], "-", ['s','h','a','p','e','d'] ], ['s','p','i','r','o','c','h','e','t','e'] ] ]
cluster_47 = cluster
|> ['b','a','r','^','s','i','l','U','n'] <| [
_____ |< aT `noun` {- <bar^silUnaT> -} [ ['B','a','r','c','e','l','o','n','a'] ] ]
cluster_48 = cluster
|> ['b','U','r','.','s'] <| [
_____ |< aT `noun` {- <bUr.saT> -} [ unwords [ ['s','t','o','c','k'], ['e','x','c','h','a','n','g','e'] ], ['b','o','u','r','s','e'] ]
`plural` _____ |< At ]
|> ['b','u','r','.','s'] <| [
_____ |< aT `noun` {- <bur.saT> -} [ unwords [ ['s','t','o','c','k'], ['e','x','c','h','a','n','g','e'] ], ['b','o','u','r','s','e'] ]
`plural` _____ |< At ]
cluster_49 = cluster
|> ['b','a','r','.','t','a','m','A','n'] <| [
_____ `noun` {- <bar.tamAn> -} [ unwords [ ['g','l','a','s','s'], ['j','a','r'] ] ],
_____ `noun` {- <bar.tamAn> -} [ ['a','p','a','r','t','m','e','n','t'] ] ]
cluster_50 = cluster
|> ['b','i','r','.','g','a','m','U','t'] <| [
_____ `noun` {- <bir.gamUt> -} [ ['b','e','r','g','a','m','o','t'] ] ]
cluster_51 = cluster
|> ['b','a','r','q'] <| [
_____ |< aT `noun` {- <barqaT> -} [ ['C','y','r','e','n','a','i','c','a'] ] ]
cluster_52 = cluster
|> ['b','a','r','l','a','m','A','n'] <| [
_____ `noun` {- <barlamAn> -} [ ['p','a','r','l','i','a','m','e','n','t'] ]
`plural` _____ |< At,
_____ |< Iy `adj` {- <barlamAnIy> -} [ ['p','a','r','l','i','a','m','e','n','t','a','r','y'] ],
_____ |< Iy |< aT `noun` {- <barlamAnIyaT> -} [ ['p','a','r','l','i','a','m','e','n','t','a','r','i','s','m'] ] ]
cluster_53 = cluster
|> ['b','U','r','m'] <| [
_____ |<< "A" `xtra` {- <bUrmA> -} [ ['B','u','r','m','a'] ],
_____ |< Iy `adj` {- <bUrmIy> -} [ ['B','u','r','m','e','s','e'] ],
_____ |< Iy `noun` {- <bUrmIy> -} [ ['B','u','r','m','e','s','e'] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
|> ['b','u','r','m'] <| [
_____ |<< "A" `xtra` {- <burmA> -} [ ['B','u','r','m','a'] ],
_____ |< Iy `adj` {- <burmIy> -} [ ['B','u','r','m','e','s','e'] ] ]
cluster_54 = cluster
|> ['b','a','r','m','U','d'] <| [
_____ |< aT `noun` {- <barmUdaT> -} [ ['B','a','r','m','u','d','a','h'] ] ]
|> ['b','i','r','m','U','d'] <| [
_____ |< Iy `adj` {- <birmUdIy> -} [ ['B','e','r','m','u','d','a','n'] ] ]
cluster_55 = cluster
|> "b z b z" <| [
KaRDUS `noun` {- <bazbUz> -} [ ['n','o','z','z','l','e'], ['s','p','o','u','t'] ]
`plural` KaRADIS ]
cluster_56 = cluster
|> ['b','A','z','U','b','a','n','d'] <| [
_____ `noun` {- <bAzUband> -} [ ['b','r','a','c','e','l','e','t'] ] ]
cluster_57 = cluster
|> "b z r" <| [
FaCaL `verb` {- <bazar> -} [ ['s','o','w'], ['s','p','i','c','e'] ]
`imperf` FCiL,
FaCCaL `verb` {- <bazzar> -} [ ['s','o','w'], ['s','p','i','c','e'] ],
< bizr >
`plural` FuCUL
`plural` FiCL |< At,
< bizr >
`plural` HaFCAL
`plural` HaFACIL,
FaCCAL `noun` {- <bazzAr> -} [ ['s','e','e','d','m','a','n'] ]
`plural` FaCCAL |< Un
`femini` FaCCAL |< aT,
FuCayL |< aT `noun` {- <buzayraT> -} [ ['s','p','o','r','e'] ] ]
cluster_58 = cluster
|> ['b','A','z','A','r'] <| [
_____ `noun` {- <bAzAr> -} [ ['b','a','z','a','a','r'] ]
`plural` _____ |< At ]
cluster_59 = cluster
|> "b z .g" <| [
FaCaL `verb` {- <baza.g> -} [ ['r','i','s','e'], ['a','p','p','e','a','r'] ]
`imperf` FCuL,
FuCUL `noun` {- <buzU.g> -} [ ['r','i','s','e'], ['a','p','p','e','a','r','a','n','c','e'] ] ]
cluster_60 = cluster
|> "b z q" <| [
FaCaL `verb` {- <bazaq> -} [ ['s','p','i','t'] ]
`imperf` FCuL,
< bazq >
FuCAL `noun` {- <buzAq> -} [ ['s','p','i','t','t','l','e'], ['s','a','l','i','v','a'] ],
FaCCAL |< aT `noun` {- <bazzAqaT> -} [ ['c','o','b','r','a'], ['s','n','a','i','l'] ],
FaCCAL |< aT `noun` {- <bazzAqaT> -} [ ['s','p','i','t','t','o','o','n'] ],
MiFCaL |< aT `noun` {- <mibzaqaT> -} [ ['s','p','i','t','t','o','o','n'] ]
`plural` MaFACiL ]
cluster_61 = cluster
|> "b z l" <| [
FaCaL `verb` {- <bazal> -} [ ['p','i','e','r','c','e'], ['p','u','n','c','t','u','r','e'] ]
`imperf` FCuL,
FaCL `noun` {- <bazl> -} [ ['p','u','n','c','t','u','r','e'], ['p','a','r','a','c','e','n','t','e','s','i','s'] ],
FuCAL `noun` {- <buzAl> -} [ ['t','a','p'], ['s','p','i','g','o','t'], ['f','a','u','c','e','t'] ],
< >
`plural` MaFACiL ]
cluster_62 = cluster
|> ['b','A','z','a','l','t'] <| [
_____ `noun` {- <bAzalt> -} [ ['b','a','s','a','l','t'] ] ]
cluster_63 = cluster
|> ['b','i','z','a','n','.','t'] <| [
_____ |< Iy `adj` {- <bizan.tIy> -} [ ['B','y','z','a','n','t','i','n','e'] ] ]
|> ['b','I','z','a','n','.','t'] <| [
_____ |< iyA `noun` {- <bIzan.tiyA> -} [ ['B','y','z','a','n','t','i','u','m'] ],
_____ |< Iy `adj` {- <bIzan.tIy> -} [ ['B','y','z','a','n','t','i','n','e'] ],
_____ |< Iy `noun` {- <bIzan.tIy> -} [ ['B','y','z','a','n','t','i','n','e'] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_64 = cluster
|> ['b','i','z','m','U','t'] <| [
_____ `noun` {- <bizmUt> -} [ unwords [ ['b','i','s','m','u','t','h'], "(", ['m','e','t','a','l','l','i','c'], ['e','l','e','m','e','n','t'], ")" ] ] ]
cluster_65 = cluster
|> "b s b s" <| [
KaRDAS `noun` {- <basbAs> -} [ ['m','a','c','e'], ['f','e','n','n','e','l'] ],
KaRDUS |< aT `noun` {- <basbUsaT> -} [ unwords [ ['b','a','s','b','o','u','s','a','h'], "(", ['s','e','m','o','l','i','n','a'], ['c','a','k','e'], ")" ] ]
`plural` KaRADiS ]
cluster_66 = cluster
|> "b s t r" <| [
KaRDaS `verb` {- <bastar> -} [ ['p','a','s','t','e','u','r','i','z','e'] ],
TaKaRDaS `verb` {- <tabastar> -} [ unwords [ ['b','e'], ['p','a','s','t','e','u','r','i','z','e','d'] ] ],
KaRDaS |< aT `noun` {- <bastaraT> -} [ ['p','a','s','t','e','u','r','i','z','a','t','i','o','n'] ],
MuKaRDaS `adj` {- <mubastar> -} [ ['p','a','s','t','e','u','r','i','z','e','d'] ] ]
cluster_67 = cluster
|> "b s r" <| [
FaCaL `verb` {- <basar> -} [ ['s','c','o','w','l'], ['f','r','o','w','n'] ]
`imperf` FCuL,
IFtaCaL `verb` {- <ibtasar> -} [ unwords [ ['b','e'], ['r','a','s','h'] ], unwords [ ['b','e'], ['p','r','e','m','a','t','u','r','e'] ] ],
FuCUL `noun` {- <busUr> -} [ ['s','c','o','w','l','i','n','g'], ['f','r','o','w','n','i','n','g'] ],
FuCL `noun` {- <busr> -} [ unwords [ ['u','n','r','i','p','e'], ['d','a','t','e'] ] ]
`plural` FuCL |< At
`plural` FiCAL,
FACUL `noun` {- <bAsUr> -} [ ['h','e','m','o','r','r','h','o','i','d','s'] ]
`plural` FawACIL ]
cluster_68 = cluster
|> "b s .t" <| [
FaCaL `verb` {- <basa.t> -} [ ['s','p','r','e','a','d'], ['e','x','t','e','n','d'] ]
`imperf` FCuL,
FaCuL `verb` {- <basu.t> -} [ unwords [ ['b','e'], ['s','i','m','p','l','e'] ] ]
`imperf` FCuL,
FaCCaL `verb` {- <bassa.t> -} [ ['s','p','r','e','a','d'], ['s','i','m','p','l','i','f','y'] ],
FACaL `verb` {- <bAsa.t> -} [ unwords [ ['b','e'], ['s','i','n','c','e','r','e'] ], unwords [ ['s','p','e','a','k'], ['o','p','e','n','l','y'] ] ],
TaFaCCaL `verb` {- <tabassa.t> -} [ unwords [ ['b','e'], ['s','i','m','p','l','e'] ], unwords [ ['b','e'], ['f','r','a','n','k'] ] ],
InFaCaL `verb` {- <inbasa.t> -} [ unwords [ ['h','a','v','e'], ['f','u','n'] ], unwords [ ['b','e'], ['h','a','p','p','y'] ], ['s','t','r','e','t','c','h'], unwords [ ['s','p','r','e','a','d'], ['o','u','t'] ] ],
FaCL `noun` {- <bas.t> -} [ ['s','p','r','e','a','d','i','n','g'], ['e','x','t','e','n','s','i','o','n'] ],
FaCL |< aT `noun` {- <bas.taT> -} [ ['e','x','t','e','n','s','i','o','n'], ['e','x','p','o','s','i','t','i','o','n'] ]
`plural` FaCaL |< At,
FiCAL `noun` {- <bisA.t> -} [ ['d','a','i','s'], ['p','l','a','t','f','o','r','m'] ],
FiCAL `noun` {- <bisA.t> -} [ ['c','a','r','p','e','t'] ]
`plural` FuCuL
`plural` FiCAL |< At
`plural` HaFCiL |< aT,
FaCIL `adj` {- <basI.t> -} [ ['s','i','m','p','l','e'], ['p','l','a','i','n'] ]
`plural` FuCaLA',
FaCIL |< aT `noun` {- <basI.taT> -} [ unwords [ ['n','o'], ['p','r','o','b','l','e','m'] ], ['o','k','a','y'] ],
FaCA'iL `noun` {- <basA'i.t> -} [ unwords [ ['b','a','s','i','c'], ['f','a','c','t','s'] ], ['e','l','e','m','e','n','t','s'] ]
`plural` FaCA'iL
`limited` "-------P--",
< basA.taT >
HuFCUL |< aT `noun` {- <'ubsU.taT> -} [ unwords [ ['w','h','e','e','l'], ['r','i','m'] ] ]
`plural` HaFACIL,
HaFCaL `adj` {- <'absa.t> -} [ unwords [ ['s','i','m','p','l','e','r'], "/", ['s','i','m','p','l','e','s','t'] ], unwords [ ['m','o','s','t'], ['b','a','s','i','c'] ] ],
TaFCIL `noun` {- <tabsI.t> -} [ ['s','i','m','p','l','i','f','i','c','a','t','i','o','n'] ]
`plural` TaFCIL |< At,
TaFaCCuL `noun` {- <tabassu.t> -} [ ['c','a','n','d','o','r'], ['f','r','a','n','k','n','e','s','s'] ]
`plural` TaFaCCuL |< At,
InFiCAL `noun` {- <inbisA.t> -} [ ['c','h','e','e','r','f','u','l','n','e','s','s'] ]
`plural` InFiCAL |< At,
InFiCAL `noun` {- <inbisA.t> -} [ ['e','x','t','e','n','s','i','o','n'] ]
`plural` InFiCAL |< At,
InFiCAL |< aT `noun` {- <inbisA.taT> -} [ ['e','x','t','e','n','s','i','o','n'] ],
FACiL `noun` {- <bAsi.t> -} [ ['B','a','s','i','t'] ],
MaFCUL `adj` {- <mabsU.t> -} [ ['c','h','e','e','r','f','u','l'] ],
MunFaCiL `adj` {- <munbasi.t> -} [ ['c','h','e','e','r','f','u','l'] ],
MunFaCaL `noun` {- <munbasa.t> -} [ ['f','l','a','t'], ['l','e','v','e','l'] ] ]
cluster_69 = cluster
|> "b s q" <| [
FaCaL `verb` {- <basaq> -} [ unwords [ ['b','e'], ['t','a','l','l'] ], ['e','x','c','e','l'] ]
`imperf` FCuL,
< bAsiq >
MuFCiL `adj` {- <mubsiq> -} [ ['l','o','f','t','y'], ['t','o','w','e','r','i','n','g'] ] ]
cluster_70 = cluster
|> "b s l" <| [
FaCuL `verb` {- <basul> -} [ unwords [ ['b','e'], ['b','r','a','v','e'] ], unwords [ ['b','e'], ['i','n','t','r','e','p','i','d'] ] ]
`imperf` FCuL,
TaFaCCaL `verb` {- <tabassal> -} [ ['s','c','o','w','l'], unwords [ ['b','e'], ['b','r','a','v','e'] ] ],
IstaFCaL `verb` {- <istabsal> -} [ unwords [ ['b','e'], ['c','o','u','r','a','g','e','o','u','s'] ] ],
FaCAL |< aT `noun` {- <basAlaT> -} [ ['c','o','u','r','a','g','e'] ],
< bAsil >
< bAsil >
`plural` FuCaLA'
`plural` FACiL |< Un
`plural` FawACiL,
IstiFCAL `noun` {- <istibsAl> -} [ ['c','o','u','r','a','g','e'] ]
`plural` IstiFCAL |< At,
MustaFCiL `adj` {- <mustabsil> -} [ ['f','e','a','r','l','e','s','s'], ['i','n','t','r','e','p','i','d'] ] ]
cluster_71 = cluster
|> ['b','A','s','I','l'] <| [
_____ `noun` {- <bAsIl> -} [ ['b','a','c','i','l','l','i'] ] ]
cluster_72 = cluster
|> "b s m" <| [
FaCaL `verb` {- <basam> -} [ ['s','m','i','l','e'] ]
`imperf` FCiL,
TaFaCCaL `verb` {- <tabassam> -} [ ['s','m','i','l','e'] ],
IFtaCaL `verb` {- <ibtasam> -} [ ['s','m','i','l','e'] ],
FaCL `noun` {- <basm> -} [ ['s','m','i','l','i','n','g'] ],
FaCL |< aT `noun` {- <basmaT> -} [ ['s','m','i','l','e'], ['s','m','i','l','i','n','g'] ]
`plural` FaCaL |< At,
FACiL `noun` {- <bAsim> -} [ ['B','a','s','e','m'], ['B','a','s','i','m'] ],
FACiL `adj` {- <bAsim> -} [ ['s','m','i','l','i','n','g'] ],
FaCCAL `noun` {- <bassAm> -} [ ['B','a','s','s','a','m'] ],
FaCCAL `adj` {- <bassAm> -} [ ['s','m','i','l','i','n','g'] ],
FaCIL `noun` {- <basIm> -} [ ['B','a','s','e','e','m'] ],
FaCIL |< aT `noun` {- <basImaT> -} [ ['B','a','s','e','e','m','a'] ],
MaFCiL `noun` {- <mabsim> -} [ ['m','o','u','t','h','p','i','e','c','e'] ]
`plural` MaFACiL,
IFtiCAL `noun` {- <ibtisAm> -} [ ['I','b','t','i','s','a','m'] ],
IFtiCAL `noun` {- <ibtisAm> -} [ ['s','m','i','l','i','n','g'] ],
IFtiCAL |< aT `noun` {- <ibtisAmaT> -} [ ['s','m','i','l','e'] ]
`plural` IFtiCAL |< At ]
cluster_73 = cluster
|> "b s n" <| [
FuCayL |< aT `noun` {- <busaynaT> -} [ ['k','i','t','t','y'] ] ]
cluster_74 = cluster
|> ['b','I','s','U','n'] <| [
_____ `noun` {- <bIsUn> -} [ ['b','i','s','o','n'] ] ]
cluster_75 = cluster
|> ['b','A','s','b','U','r'] <| [
_____ `noun` {- <bAsbUr> -} [ ['p','a','s','s','p','o','r','t'] ]
`plural` _____ |< At ]
cluster_76 = cluster
|> ['b','A','s','t','I','l'] <| [
_____ `noun` {- <bAstIl> -} [ ['p','a','s','t','e','l'] ] ]
cluster_77 = cluster
|> ['b','a','s','t','U','n'] <| [
_____ |< Iy `adj` {- <bastUnIy> -} [ ['s','p','a','d','e','s'] ] ]
|> "b s t n" <| [
KiRDUS `noun` {- <bistUn> -} [ ['p','i','s','t','o','n'] ]
`plural` KiRDUS |< At
`plural` KaRADiS ]
|> "b s t n" <| [
KuRDAS `noun` {- <bustAn> -} [ ['g','a','r','d','e','n'] ]
`plural` KaRADIS,
KuRDAS |< Iy `noun` {- <bustAnIy> -} [ ['B','o','u','s','t','a','n','i'] ],
KuRDAS |< Iy `noun` {- <bustAnIy> -} [ ['g','a','r','d','e','n','e','r'] ]
`plural` KuRDAS |< Iy |< Un
`femini` KuRDAS |< Iy |< aT,
KuRDAS |< Iy `adj` {- <bustAnIy> -} [ unwords [ ['o','f'], ['t','h','e'], ['g','a','r','d','e','n'] ] ],
KaRDaS |< aT `noun` {- <bastanaT> -} [ ['g','a','r','d','e','n','i','n','g'] ] ]
cluster_78 = cluster
|> ['b','a','s','_','h'] <| [
_____ |< aT `noun` {- <bas_haT> -} [ ['E','a','s','t','e','r'], unwords [ ['P','a','s','s','i','o','n'], ['W','e','e','k'] ] ] ]
cluster_79 = cluster
|> ['b','U','s','.','t'] <| [
_____ |< aT `noun` {- <bUs.taT> -} [ ['m','a','i','l'] ],
_____ |<< "a" |<< "^g" |< Iy `noun` {- <bUs.ta^gIy> -} [ ['m','a','i','l','m','a','n'] ]
`plural` _____ |<< "a" |<< "^g" |< Iy |< Un,
_____ |<< "a" |<< "^g" |< Iy |< aT `noun` {- <bUs.ta^gIyaT> -} [ ['m','a','i','l','w','o','m','a','n'] ]
`plural` _____ |<< "a" |<< "^g" |< Iy |< At ]
cluster_80 = cluster
|> ['b','a','s','.','t','u','r','m'] <| [
< >
cluster_81 = cluster
|> ['b','U','s','f','U','r'] <| [
_____ `noun` {- <bUsfUr> -} [ ['B','o','s','p','h','o','r','u','s'], ['B','o','s','p','o','r','u','s'] ] ]
|> ['b','u','s','f','U','r'] <| [
_____ `noun` {- <busfUr> -} [ ['B','o','s','p','h','o','r','u','s'], ['B','o','s','p','o','r','u','s'] ] ]
cluster_82 = cluster
|> ['b','A','s','k'] <| [
< bAskIy >
`plural` _____,
< bAskIy >
`plural` _____
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_83 = cluster
|> ['b','i','s','k','U','t'] <| [
_____ `noun` {- <biskUt> -} [ ['b','i','s','c','u','i','t'] ] ]
cluster_84 = cluster
|> ['b','i','s','k','i','l','I','t'] <| [
_____ `noun` {- <biskilIt> -} [ ['b','i','c','y','c','l','e'] ]
`plural` _____ |< At ]
cluster_85 = cluster
|> ['b','U','s','n'] <| [
_____ |< Iy `adj` {- <bUsnIy> -} [ ['B','o','s','n','i','a','n'] ],
_____ |< Iy `noun` {- <bUsnIy> -} [ ['B','o','s','n','i','a','n'] ]
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT,
_____ |< aT `noun` {- <bUsnaT> -} [ ['B','o','s','n','i','a'] ] ]
cluster_86 = cluster
|> ['b','A','^','s'] <| [
_____ `noun` {- <bA^s> -} [ ['c','h','i','e','f'] ],
_____ |<< "A" `noun` {- <bA^sA> -} [ ['p','a','s','h','a'] ]
`plural` _____ |<< "A" |< At,
_____ |<< "A" |< Iy `adj` {- <bA^sawIy> -} [ unwords [ ['r','a','n','k'], ['o','f'], ['p','a','s','h','a'] ] ] ]
cluster_87 = cluster
|> "b ^s t" <| [
FuCL `noun` {- <bu^st> -} [ ['c','l','o','a','k'] ],
FiCL |< aT `noun` {- <bi^staT> -} [ ['c','l','o','a','k'] ] ]
cluster_88 = cluster
|> "b ^s r" <| [
FaCaL `verb` {- <ba^sar> -} [ ['r','e','j','o','i','c','e'] ]
`imperf` FCiL,
FaCiL `verb` {- <ba^sir> -} [ ['r','e','j','o','i','c','e'] ]
`imperf` FCaL,
FaCCaL `verb` {- <ba^s^sar> -} [ ['a','u','g','u','r'], ['e','v','a','n','g','e','l','i','z','e'] ],
HaFCaL `verb` {- <'ab^sar> -} [ ['r','e','j','o','i','c','e'] ],
IstaFCaL `verb` {- <istab^sar> -} [ ['r','e','j','o','i','c','e'], ['w','e','l','c','o','m','e'] ],
FiCL `noun` {- <bi^sr> -} [ ['j','o','y'] ],
FuCL `noun` {- <bu^sr> -} [ unwords [ ['g','o','o','d'], ['n','e','w','s'] ] ],
FuCLY `noun` {- <bu^srY> -} [ unwords [ ['g','o','o','d'], ['n','e','w','s'] ] ]
`plural` FuCLY |< At,
FuCL |< aT `noun` {- <bu^sraT> -} [ ['B','u','s','h','r','a'] ],
FuCLY `noun` {- <bu^srY> -} [ ['B','u','s','h','r','a'] ],
FaCIL `noun` {- <ba^sIr> -} [ ['B','a','s','h','i','r'] ],
FaCIL |< aT `noun` {- <ba^sIraT> -} [ ['B','a','s','h','i','r','a'] ],
FaCIL `noun` {- <ba^sIr> -} [ ['h','e','r','a','l','d'], ['m','e','s','s','e','n','g','e','r'], ['e','v','a','n','g','e','l','i','s','t'] ]
`plural` FuCaLA',
FaCCAL `noun` {- <ba^s^sAr> -} [ ['B','a','s','h','s','h','a','r'] ],
FiCAL |< aT `noun` {- <bi^sAraT> -} [ ['B','i','s','h','a','r','a'] ],
FiCAL |< aT `noun` {- <bi^sAraT> -} [ unwords [ ['g','o','o','d'], ['n','e','w','s'] ] ]
`plural` FaCA'iL
`plural` FiCAL |< At,
TaFCIL `noun` {- <tab^sIr> -} [ ['e','v','a','n','g','e','l','i','z','a','t','i','o','n'] ]
`plural` TaFCIL |< At,
TaFCIL `noun` {- <tab^sIr> -} [ ['a','n','n','o','u','n','c','e','m','e','n','t'] ]
`plural` TaFCIL |< At,
TaFCIL |< Iy `adj` {- <tab^sIrIy> -} [ ['m','i','s','s','i','o','n','a','r','y'] ],
TaFACIL `noun` {- <tabA^sIr> -} [ unwords [ ['f','i','r','s','t'], ['s','i','g','n','s'] ], ['p','r','e','c','u','r','s','o','r','s'] ]
`plural` TaFACIL
`limited` "-------P--",
MuFaCCiL `noun` {- <muba^s^sir> -} [ ['m','i','s','s','i','o','n','a','r','y'], ['a','n','n','o','u','n','c','e','r'] ]
`plural` MuFaCCiL |< Un
`femini` MuFaCCiL |< aT,
MustaFCiL `adj` {- <mustab^sir> -} [ ['h','a','p','p','y'], ['c','h','e','e','r','f','u','l'] ],
MaFCUL `adj` {- <mab^sUr> -} [ ['g','r','a','t','e','d'], ['s','h','r','e','d','d','e','d'] ] ]
|> "b ^s r" <| [
FaCaL `verb` {- <ba^sar> -} [ ['p','e','e','l'], ['s','c','r','a','p','e'] ]
`imperf` FCuL,
FACaL `verb` {- <bA^sar> -} [ unwords [ ['e','m','b','a','r','k'], ['u','p','o','n'] ], ['p','r','o','c','e','e','d'] ],
MuFACiL `adj` {- <mubA^sir> -} [ ['d','i','r','e','c','t'], ['i','m','m','e','d','i','a','t','e'] ],
MuFACaL |< aT |<< "aN" `noun` {- <mubA^saraTaN> -} [ ['d','i','r','e','c','t','l','y'], ['i','m','m','e','d','i','a','t','e','l','y'] ],
MuFACaL |< aT `noun` {- <mubA^saraT> -} [ ['b','e','g','i','n','n','i','n','g'], ['p','u','r','s','u','i','t'] ],
FaCaL |< aT `noun` {- <ba^saraT> -} [ ['e','p','i','d','e','r','m','i','s'] ],
MiFCaL |< aT `noun` {- <mib^saraT> -} [ ['s','c','r','a','p','e','r'], ['g','r','a','t','e','r'] ]
`plural` MaFACiL,
FaCaL |< Iy |< aT `noun` {- <ba^sarIyaT> -} [ ['h','u','m','a','n','k','i','n','d'], ['m','a','n','k','i','n','d'] ],
FaCaL |< Iy `adj` {- <ba^sarIy> -} [ ['h','u','m','a','n'] ],
FaCaL `noun` {- <ba^sar> -} [ ['m','a','n','k','i','n','d'] ] ]
cluster_89 = cluster
|> ['b','a','^','s','a','r','U','^','s'] <| [
< >
cluster_90 = cluster
|> "b ^s `" <| [
FaCiL `verb` {- <ba^si`> -} [ unwords [ ['b','e'], ['u','g','l','y'] ], unwords [ ['b','e'], ['l','o','a','t','h','s','o','m','e'] ] ]
`imperf` FCaL,
FaCCaL `verb` {- <ba^s^sa`> -} [ unwords [ ['m','a','k','e'], ['u','g','l','y'] ], ['d','i','s','f','i','g','u','r','e'] ],
IstaFCaL `verb` {- <istab^sa`> -} [ unwords [ ['c','o','n','s','i','d','e','r'], ['u','g','l','y'] ] ],
FaCAL |< aT `noun` {- <ba^sA`aT> -} [ ['u','g','l','i','n','e','s','s'], ['r','e','p','u','g','n','a','n','c','e'] ],
FaCiL `adj` {- <ba^si`> -} [ ['u','g','l','y'], ['r','e','p','u','g','n','a','n','t'] ],
FaCIL `adj` {- <ba^sI`> -} [ ['u','g','l','y'], ['r','e','p','u','g','n','a','n','t'] ],
HaFCaL `adj` {- <'ab^sa`> -} [ ['u','g','l','i','e','r'], ['u','g','l','i','e','s','t'] ],
TaFCIL `noun` {- <tab^sI`> -} [ ['d','i','s','f','i','g','u','r','a','t','i','o','n'] ]
`plural` TaFCIL |< At ]
cluster_91 = cluster
|> "b ^s k" <| [
IFtaCaL `verb` {- <ibta^sak> -} [ ['l','i','e'], ['d','e','c','e','i','v','e'] ],
< >
FaCCAL `noun` {- <ba^s^sAk> -} [ ['l','i','a','r'] ]
`plural` FaCCAL |< Un
`femini` FaCCAL |< aT,
IFtiCAL `noun` {- <ibti^sAk> -} [ ['d','e','c','e','i','t'] ]
`plural` IFtiCAL |< At,
FACiL `noun` {- <bA^sik> -} [ unwords [ ['s','p','a','r','r','o','w'], ['h','a','w','k'] ] ]
`plural` FawACiL ]
cluster_92 = cluster
|> "b ^s k r" <| [
KaRDUS `noun` {- <ba^skUr> -} [ unwords [ ['f','i','r','e'], ['i','r','o','n'] ], ['p','o','k','e','r'] ]
`plural` KaRADIS ]
|> "b ^s k r" <| [
KaRDIS `noun` {- <ba^skIr> -} [ unwords [ ['b','a','t','h'], ['t','o','w','e','l'] ] ]
`plural` KaRADIS ]
cluster_93 = cluster
|> "b ^s m" <| [
FaCiL `verb` {- <ba^sim> -} [ unwords [ ['b','e'], ['n','a','u','s','e','a','t','e','d'] ], unwords [ ['h','a','v','e'], ['i','n','d','i','g','e','s','t','i','o','n'] ] ]
`imperf` FCaL,
HaFCaL `verb` {- <'ab^sam> -} [ ['n','a','u','s','e','a','t','e'], unwords [ ['g','i','v','e'], ['i','n','d','i','g','e','s','t','i','o','n'] ] ],
FaCaL `noun` {- <ba^sam> -} [ ['i','n','d','i','g','e','s','t','i','o','n'], ['n','a','u','s','e','a'] ] ]
cluster_94 = cluster
|> "b ^s m r" <| [
KaRDAS `noun` {- <ba^smAr> -} [ ['l','a','c','e','w','o','r','k'] ],
KaRADiS |< Iy `noun` {- <ba^sAmirIy> -} [ ['l','a','c','e','w','o','r','k','e','r'] ]
`plural` KaRADiS |< Iy |< Un
`femini` KaRADiS |< Iy |< aT ]
cluster_95 = cluster
|> "b ^s m q" <| [
KaRDaS `noun` {- <ba^smaq> -} [ ['B','a','s','h','m','a','q'] ],
KaRDaS `noun` {- <ba^smaq> -} [ ['s','l','i','p','p','e','r'] ] ]
cluster_96 = cluster
|> "b ^s n" <| [
FaCL |< aT `noun` {- <ba^snaT> -} [ ['s','o','r','g','h','u','m'] ],
FaCLIL `noun` {- <ba^snIn> -} [ ['l','o','t','u','s'] ] ]
cluster_97 = cluster
|> "b ^s n q" <| [
KaRDUS |< aT `noun` {- <ba^snUqaT> -} [ ['k','e','r','c','h','i','e','f'] ]
`plural` KaRADiS ]
cluster_98 = cluster
|> ['b','a','^','s','a','n','s'] <| [
< ba^sans >
cluster_99 = cluster
|> ['b','a','^','s','t','U','n'] <| [
_____ |< Iy `noun` {- <ba^stUnIy> -} [ ['P','a','s','h','t','u','n'] ]
`plural` _____
`femini` _____ |< Iy |< aT,
_____ |< Iy `adj` {- <ba^stUnIy> -} [ ['P','a','s','h','t','u','n'] ]
`plural` _____ ]
cluster_100 = cluster
|> ['b','A','^','s','q','i','r','d'] <| [
_____ `noun` {- <bA^sqird> -} [ ['B','a','s','h','k','i','r'] ] ]
section = [ cluster_1,
cluster_2,
cluster_3,
cluster_4,
cluster_5,
cluster_6,
cluster_7,
cluster_8,
cluster_9,
cluster_10,
cluster_11,
cluster_12,
cluster_13,
cluster_14,
cluster_15,
cluster_16,
cluster_17,
cluster_18,
cluster_19,
cluster_20,
cluster_21,
cluster_22,
cluster_23,
cluster_24,
cluster_25,
cluster_26,
cluster_27,
cluster_28,
cluster_29,
cluster_30,
cluster_31,
cluster_32,
cluster_33,
cluster_34,
cluster_35,
cluster_36,
cluster_37,
cluster_38,
cluster_39,
cluster_40,
cluster_41,
cluster_42,
cluster_43,
cluster_44,
cluster_45,
cluster_46,
cluster_47,
cluster_48,
cluster_49,
cluster_50,
cluster_51,
cluster_52,
cluster_53,
cluster_54,
cluster_55,
cluster_56,
cluster_57,
cluster_58,
cluster_59,
cluster_60,
cluster_61,
cluster_62,
cluster_63,
cluster_64,
cluster_65,
cluster_66,
cluster_67,
cluster_68,
cluster_69,
cluster_70,
cluster_71,
cluster_72,
cluster_73,
cluster_74,
cluster_75,
cluster_76,
cluster_77,
cluster_78,
cluster_79,
cluster_80,
cluster_81,
cluster_82,
cluster_83,
cluster_84,
cluster_85,
cluster_86,
cluster_87,
cluster_88,
cluster_89,
cluster_90,
cluster_91,
cluster_92,
cluster_93,
cluster_94,
cluster_95,
cluster_96,
cluster_97,
cluster_98,
cluster_99,
cluster_100 ]
| null | https://raw.githubusercontent.com/otakar-smrz/elixir-fm/fae5bab6dd53c15d25c1e147e7787b2c254aabf0/Haskell/ElixirFM/Elixir/Data/Moony/Regular/B.hs | haskell | <bur^s>
<'abra^s>
<barA^s>
<bArA^sUt>
<bar^sam>
<bar^sam>
<bar^samaT>
<bur^sAmaT>
<bur^sAm>
<bur^sAm^gIy>
<bur^sAm^gIyaT>
<bar^sUmIy>
<biri^st>
<bari.s>
<bur.s>
<bara.s>
<'abra.s>
<bara.d>
<burU.d>
<bar.tUz>
<bar.ta`>
<bar.tal>
<tabar.tal>
<bir.tIl>
<bar.tam>
<bur.tUm>
<bar.tUm>
<bara`>
<bAra`>
<tabarra`>
<barA`aT>
<burU`aT>
<'abra`>
<tabarru`>
<mutabarri`>
<bar`am>
<tabar`am>
<bur`um>
<bur`Um>
<tabar`um>
<bar.gU_t>
<bar.gU_tIy>
<bar.ga^s>
<bur.gul>
<birfIr>
<'abraq>
<burUq>
<barqIy>
<barqIyaT>
<barIq>
<burAq>
<bayraq>
<bayraqdAr>
<barrAq>
<bAriq>
<mubriq>
<barqa^s>
<tabarqa^s>
<barqa^saT>
<mubarqa^s>
<barqa`>
<tabarqa`>
<burqu`>
<bIrUqrA.tIy>
<bIrUqrA.tIy>
<barrak>
<bArak>
<'abrak>
<tabarrak>
<tabArak>
<istabrak>
<birkaT>
<barakaT>
<barakAt>
<'abrak>
<mabrUk>
<mabrUk>
<tabrIk>
<mubArak>
<mubArak>
<mubArakIy>
<barArIk>
<birkAr>
<burkAn>
<burkAnIy>
<barim>
<baram>
<barram>
<'abram>
<tabarram>
<inbaram>
<barim>
<barIm>
<barrAmaT>
<burmaT>
<'ibrAm>
<tabarrum>
<mabrUm>
<mubrim>
<mubram>
<mutabarrim>
<barnAma^g>
<barma^g>
<tabarma^g>
<barma^gaT>
<mubarma^g>
<mubarmi^g>
<barma^gIy>
<barma^gIyAt>
<barmaq>
<barmIl>
<bArUn>
<barnIyaT>
<barnaz>
<barnazaT>
<mubarnaz>
<barAnis>
<burnus>
<burnUs>
<tabarna.t>
<barnaq>
<barnaqaT>
<burun^guk>
<bUrundIy>
<burUnzIy>
<burhaT>
<burayhaT>
<burhIy>
<barhaman>
<barahmA>
<barhamaT>
<barhamIy>
<barhamIyaT>
<barhan>
<barhanaT>
<burhAn>
<burhAn>
<bUrtUrIkU>
<bUrtUrIkIy>
<bUrtUrIkIy>
<al-burtU.gAl>
<burtU.gAlIy>
<burtU.gAlIy>
<al-burtu.gAl>
<burtu.gAlIy>
<burtu.gAlIy>
<burtuqAl>
<burtuqAlIy>
<bir^gAs>
<bir^gIs>
<bardaqU^s>
<barrImaT>
<barrImIyaT>
<bar^silUnaT>
<bUr.saT>
<bur.saT>
<bar.tamAn>
<bar.tamAn>
<bir.gamUt>
<barqaT>
<barlamAn>
<barlamAnIy>
<barlamAnIyaT>
<bUrmA>
<bUrmIy>
<bUrmIy>
<burmA>
<burmIy>
<barmUdaT>
<birmUdIy>
<bazbUz>
<bAzUband>
<bazar>
<bazzar>
<bazzAr>
<buzayraT>
<bAzAr>
<baza.g>
<buzU.g>
<bazaq>
<buzAq>
<bazzAqaT>
<bazzAqaT>
<mibzaqaT>
<bazal>
<bazl>
<buzAl>
<bAzalt>
<bizan.tIy>
<bIzan.tiyA>
<bIzan.tIy>
<bIzan.tIy>
<bizmUt>
<basbAs>
<basbUsaT>
<bastar>
<tabastar>
<bastaraT>
<mubastar>
<basar>
<ibtasar>
<busUr>
<busr>
<bAsUr>
<basa.t>
<basu.t>
<bassa.t>
<bAsa.t>
<tabassa.t>
<inbasa.t>
<bas.t>
<bas.taT>
<bisA.t>
<bisA.t>
<basI.t>
<basI.taT>
<basA'i.t>
<'ubsU.taT>
<'absa.t>
<tabsI.t>
<tabassu.t>
<inbisA.t>
<inbisA.t>
<inbisA.taT>
<bAsi.t>
<mabsU.t>
<munbasi.t>
<munbasa.t>
<basaq>
<mubsiq>
<basul>
<tabassal>
<istabsal>
<basAlaT>
<istibsAl>
<mustabsil>
<bAsIl>
<basam>
<tabassam>
<ibtasam>
<basm>
<basmaT>
<bAsim>
<bAsim>
<bassAm>
<bassAm>
<basIm>
<basImaT>
<mabsim>
<ibtisAm>
<ibtisAm>
<ibtisAmaT>
<busaynaT>
<bIsUn>
<bAsbUr>
<bAstIl>
<bastUnIy>
<bistUn>
<bustAn>
<bustAnIy>
<bustAnIy>
<bustAnIy>
<bastanaT>
<bas_haT>
<bUs.taT>
<bUs.ta^gIy>
<bUs.ta^gIyaT>
<bUsfUr>
<busfUr>
<biskUt>
<biskilIt>
<bUsnIy>
<bUsnIy>
<bUsnaT>
<bA^s>
<bA^sA>
<bA^sawIy>
<bu^st>
<bi^staT>
<ba^sar>
<ba^sir>
<ba^s^sar>
<'ab^sar>
<istab^sar>
<bi^sr>
<bu^sr>
<bu^srY>
<bu^sraT>
<bu^srY>
<ba^sIr>
<ba^sIraT>
<ba^sIr>
<ba^s^sAr>
<bi^sAraT>
<bi^sAraT>
<tab^sIr>
<tab^sIr>
<tab^sIrIy>
<tabA^sIr>
<muba^s^sir>
<mustab^sir>
<mab^sUr>
<ba^sar>
<bA^sar>
<mubA^sir>
<mubA^saraTaN>
<mubA^saraT>
<ba^saraT>
<mib^saraT>
<ba^sarIyaT>
<ba^sarIy>
<ba^sar>
<ba^si`>
<ba^s^sa`>
<istab^sa`>
<ba^sA`aT>
<ba^si`>
<ba^sI`>
<'ab^sa`>
<tab^sI`>
<ibta^sak>
<ba^s^sAk>
<ibti^sAk>
<bA^sik>
<ba^skUr>
<ba^skIr>
<ba^sim>
<'ab^sam>
<ba^sam>
<ba^smAr>
<ba^sAmirIy>
<ba^smaq>
<ba^smaq>
<ba^snaT>
<ba^snIn>
<ba^snUqaT>
<ba^stUnIy>
<ba^stUnIy>
<bA^sqird> |
module Elixir.Data.Moony.Regular.B (section) where
import Elixir.Lexicon
lexicon = include section
cluster_1 = cluster
|> "b r ^s" <| [
`plural` HaFCAL,
cluster_2 = cluster
|> ['b','A','r','A','^','s','U','t'] <| [
cluster_3 = cluster
|> "b r ^s m" <| [
cluster_4 = cluster
|> ['b','i','r','i','^','s','t'] <| [
cluster_5 = cluster
|> "b r .s" <| [
`imperf` FCaL,
cluster_6 = cluster
|> "b r .d" <| [
`imperf` FCuL,
cluster_7 = cluster
|> "b r .t z" <| [
cluster_8 = cluster
|> "b r .t `" <| [
cluster_9 = cluster
|> "b r .t l" <| [
`plural` KaRADIS ]
cluster_10 = cluster
|> "b r .t m" <| [
cluster_11 = cluster
|> "b r `" <| [
`imperf` FCaL,
< baru ` >
`imperf` FCuL,
`plural` TaFaCCuL |< At,
< ` >
`plural` MutaFaCCiL |< Un
`femini` MutaFaCCiL |< aT ]
cluster_12 = cluster
|> "b r ` m" <| [
`plural` KaRADiS,
`plural` KaRADIS,
`plural` TaKaRDuS |< At ]
cluster_13 = cluster
|> "b r .g _t" <| [
`plural` KaRADIS,
cluster_14 = cluster
|> "b r .g ^s" <| [
cluster_15 = cluster
|> "b r .g l" <| [
cluster_16 = cluster
|> "b r f r" <| [
`plural` KaRADIS ]
cluster_17 = cluster
|> "b r q" <| [
< baraq >
`imperf` FCuL,
< barq >
`plural` FaCA'iL,
`plural` FayACiL,
< mabraq >
`plural` FawACiL,
cluster_18 = cluster
|> "b r q ^s" <| [
< >
`plural` KaRADiS,
cluster_19 = cluster
|> "b r q `" <| [
`plural` KaRADiS ]
cluster_20 = cluster
|> ['b','I','r','U','q','r','A','.','t'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT,
< >
cluster_21 = cluster
|> "b r k" <| [
< >
`imperf` FCuL,
`plural` FiCaL,
`plural` FaCaL |< At,
`plural` TaFCIL |< At,
|> ['b','a','r','A','r','I','k'] <| [
cluster_22 = cluster
|> "b r k r" <| [
cluster_23 = cluster
|> "b r k n" <| [
`plural` KaRADIS,
cluster_24 = cluster
|> "b r m" <| [
`imperf` FCaL,
`imperf` FCuL,
`plural` FiCAL
`plural` FuCaL,
`plural` HiFCAL |< At,
`plural` TaFaCCuL |< At,
`plural` MuFCiL |< At,
cluster_25 = cluster
|> "b r m ^g" <| [
`plural` "barAmi^g" ]
|> "b r m ^g" <| [
`plural` MuKaRDiS |< Un
`femini` MuKaRDiS |< aT,
`plural` KaRDaS |< Iy |< At
`limited` "-------P--" ]
cluster_26 = cluster
|> "b r m q" <| [
`plural` KaRADiS ]
cluster_27 = cluster
|> "b r m l" <| [
`plural` KaRADIS ]
cluster_28 = cluster
|> ['b','A','r','U','n'] <| [
cluster_29 = cluster
|> "b r n" <| [
`plural` FaCALI ]
cluster_30 = cluster
|> "b r n z" <| [
cluster_31 = cluster
|> ['b','a','r','A','n','i','s'] <| [
|> "b r n s" <| [
`plural` KaRADiS,
`plural` KaRADIS ]
cluster_32 = cluster
|> "b r n .t" <| [
cluster_33 = cluster
|> "b r n q" <| [
cluster_34 = cluster
|> ['b','u','r','u','n','^','g','u','k'] <| [
cluster_35 = cluster
|> ['b','U','r','u','n','d'] <| [
< bUrundI >
cluster_36 = cluster
|> ['b','u','r','U','n','z'] <| [
< burUnz >
cluster_37 = cluster
|> "b r h" <| [
`plural` FuCaL |< At,
cluster_38 = cluster
|> "b r h m" <| [
`plural` "barAhim" |< aT ]
|> ['b','a','r','a','h','m','A'] <| [
|> ['b','a','r','h','a','m'] <| [
cluster_39 = cluster
|> "b r h n" <| [
`plural` KaRADIS ]
cluster_40 = cluster
|> ['b','U','r','t','U','r','I','k'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_41 = cluster
|> ['b','u','r','t','U','.','g','A','l'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
|> ['b','u','r','t','u','.','g','A','l'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_42 = cluster
|> ['b','u','r','t','u','q','A','l'] <| [
`excepts` Triptote,
cluster_43 = cluster
|> ['b','i','r','^','g','A','s'] <| [
cluster_44 = cluster
|> ['b','i','r','^','g','I','s'] <| [
cluster_45 = cluster
|> ['b','a','r','d','a','q','U','^','s'] <| [
cluster_46 = cluster
|> ['b','a','r','r','I','m'] <| [
cluster_47 = cluster
|> ['b','a','r','^','s','i','l','U','n'] <| [
cluster_48 = cluster
|> ['b','U','r','.','s'] <| [
`plural` _____ |< At ]
|> ['b','u','r','.','s'] <| [
`plural` _____ |< At ]
cluster_49 = cluster
|> ['b','a','r','.','t','a','m','A','n'] <| [
cluster_50 = cluster
|> ['b','i','r','.','g','a','m','U','t'] <| [
cluster_51 = cluster
|> ['b','a','r','q'] <| [
cluster_52 = cluster
|> ['b','a','r','l','a','m','A','n'] <| [
`plural` _____ |< At,
cluster_53 = cluster
|> ['b','U','r','m'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
|> ['b','u','r','m'] <| [
cluster_54 = cluster
|> ['b','a','r','m','U','d'] <| [
|> ['b','i','r','m','U','d'] <| [
cluster_55 = cluster
|> "b z b z" <| [
`plural` KaRADIS ]
cluster_56 = cluster
|> ['b','A','z','U','b','a','n','d'] <| [
cluster_57 = cluster
|> "b z r" <| [
`imperf` FCiL,
< bizr >
`plural` FuCUL
`plural` FiCL |< At,
< bizr >
`plural` HaFCAL
`plural` HaFACIL,
`plural` FaCCAL |< Un
`femini` FaCCAL |< aT,
cluster_58 = cluster
|> ['b','A','z','A','r'] <| [
`plural` _____ |< At ]
cluster_59 = cluster
|> "b z .g" <| [
`imperf` FCuL,
cluster_60 = cluster
|> "b z q" <| [
`imperf` FCuL,
< bazq >
`plural` MaFACiL ]
cluster_61 = cluster
|> "b z l" <| [
`imperf` FCuL,
< >
`plural` MaFACiL ]
cluster_62 = cluster
|> ['b','A','z','a','l','t'] <| [
cluster_63 = cluster
|> ['b','i','z','a','n','.','t'] <| [
|> ['b','I','z','a','n','.','t'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_64 = cluster
|> ['b','i','z','m','U','t'] <| [
cluster_65 = cluster
|> "b s b s" <| [
`plural` KaRADiS ]
cluster_66 = cluster
|> "b s t r" <| [
cluster_67 = cluster
|> "b s r" <| [
`imperf` FCuL,
`plural` FuCL |< At
`plural` FiCAL,
`plural` FawACIL ]
cluster_68 = cluster
|> "b s .t" <| [
`imperf` FCuL,
`imperf` FCuL,
`plural` FaCaL |< At,
`plural` FuCuL
`plural` FiCAL |< At
`plural` HaFCiL |< aT,
`plural` FuCaLA',
`plural` FaCA'iL
`limited` "-------P--",
< basA.taT >
`plural` HaFACIL,
`plural` TaFCIL |< At,
`plural` TaFaCCuL |< At,
`plural` InFiCAL |< At,
`plural` InFiCAL |< At,
cluster_69 = cluster
|> "b s q" <| [
`imperf` FCuL,
< bAsiq >
cluster_70 = cluster
|> "b s l" <| [
`imperf` FCuL,
< bAsil >
< bAsil >
`plural` FuCaLA'
`plural` FACiL |< Un
`plural` FawACiL,
`plural` IstiFCAL |< At,
cluster_71 = cluster
|> ['b','A','s','I','l'] <| [
cluster_72 = cluster
|> "b s m" <| [
`imperf` FCiL,
`plural` FaCaL |< At,
`plural` MaFACiL,
`plural` IFtiCAL |< At ]
cluster_73 = cluster
|> "b s n" <| [
cluster_74 = cluster
|> ['b','I','s','U','n'] <| [
cluster_75 = cluster
|> ['b','A','s','b','U','r'] <| [
`plural` _____ |< At ]
cluster_76 = cluster
|> ['b','A','s','t','I','l'] <| [
cluster_77 = cluster
|> ['b','a','s','t','U','n'] <| [
|> "b s t n" <| [
`plural` KiRDUS |< At
`plural` KaRADiS ]
|> "b s t n" <| [
`plural` KaRADIS,
`plural` KuRDAS |< Iy |< Un
`femini` KuRDAS |< Iy |< aT,
cluster_78 = cluster
|> ['b','a','s','_','h'] <| [
cluster_79 = cluster
|> ['b','U','s','.','t'] <| [
`plural` _____ |<< "a" |<< "^g" |< Iy |< Un,
`plural` _____ |<< "a" |<< "^g" |< Iy |< At ]
cluster_80 = cluster
|> ['b','a','s','.','t','u','r','m'] <| [
< >
cluster_81 = cluster
|> ['b','U','s','f','U','r'] <| [
|> ['b','u','s','f','U','r'] <| [
cluster_82 = cluster
|> ['b','A','s','k'] <| [
< bAskIy >
`plural` _____,
< bAskIy >
`plural` _____
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT ]
cluster_83 = cluster
|> ['b','i','s','k','U','t'] <| [
cluster_84 = cluster
|> ['b','i','s','k','i','l','I','t'] <| [
`plural` _____ |< At ]
cluster_85 = cluster
|> ['b','U','s','n'] <| [
`plural` _____ |< Iy |< Un
`femini` _____ |< Iy |< aT,
cluster_86 = cluster
|> ['b','A','^','s'] <| [
`plural` _____ |<< "A" |< At,
cluster_87 = cluster
|> "b ^s t" <| [
cluster_88 = cluster
|> "b ^s r" <| [
`imperf` FCiL,
`imperf` FCaL,
`plural` FuCLY |< At,
`plural` FuCaLA',
`plural` FaCA'iL
`plural` FiCAL |< At,
`plural` TaFCIL |< At,
`plural` TaFCIL |< At,
`plural` TaFACIL
`limited` "-------P--",
`plural` MuFaCCiL |< Un
`femini` MuFaCCiL |< aT,
|> "b ^s r" <| [
`imperf` FCuL,
`plural` MaFACiL,
cluster_89 = cluster
|> ['b','a','^','s','a','r','U','^','s'] <| [
< >
cluster_90 = cluster
|> "b ^s `" <| [
`imperf` FCaL,
`plural` TaFCIL |< At ]
cluster_91 = cluster
|> "b ^s k" <| [
< >
`plural` FaCCAL |< Un
`femini` FaCCAL |< aT,
`plural` IFtiCAL |< At,
`plural` FawACiL ]
cluster_92 = cluster
|> "b ^s k r" <| [
`plural` KaRADIS ]
|> "b ^s k r" <| [
`plural` KaRADIS ]
cluster_93 = cluster
|> "b ^s m" <| [
`imperf` FCaL,
cluster_94 = cluster
|> "b ^s m r" <| [
`plural` KaRADiS |< Iy |< Un
`femini` KaRADiS |< Iy |< aT ]
cluster_95 = cluster
|> "b ^s m q" <| [
cluster_96 = cluster
|> "b ^s n" <| [
cluster_97 = cluster
|> "b ^s n q" <| [
`plural` KaRADiS ]
cluster_98 = cluster
|> ['b','a','^','s','a','n','s'] <| [
< ba^sans >
cluster_99 = cluster
|> ['b','a','^','s','t','U','n'] <| [
`plural` _____
`femini` _____ |< Iy |< aT,
`plural` _____ ]
cluster_100 = cluster
|> ['b','A','^','s','q','i','r','d'] <| [
section = [ cluster_1,
cluster_2,
cluster_3,
cluster_4,
cluster_5,
cluster_6,
cluster_7,
cluster_8,
cluster_9,
cluster_10,
cluster_11,
cluster_12,
cluster_13,
cluster_14,
cluster_15,
cluster_16,
cluster_17,
cluster_18,
cluster_19,
cluster_20,
cluster_21,
cluster_22,
cluster_23,
cluster_24,
cluster_25,
cluster_26,
cluster_27,
cluster_28,
cluster_29,
cluster_30,
cluster_31,
cluster_32,
cluster_33,
cluster_34,
cluster_35,
cluster_36,
cluster_37,
cluster_38,
cluster_39,
cluster_40,
cluster_41,
cluster_42,
cluster_43,
cluster_44,
cluster_45,
cluster_46,
cluster_47,
cluster_48,
cluster_49,
cluster_50,
cluster_51,
cluster_52,
cluster_53,
cluster_54,
cluster_55,
cluster_56,
cluster_57,
cluster_58,
cluster_59,
cluster_60,
cluster_61,
cluster_62,
cluster_63,
cluster_64,
cluster_65,
cluster_66,
cluster_67,
cluster_68,
cluster_69,
cluster_70,
cluster_71,
cluster_72,
cluster_73,
cluster_74,
cluster_75,
cluster_76,
cluster_77,
cluster_78,
cluster_79,
cluster_80,
cluster_81,
cluster_82,
cluster_83,
cluster_84,
cluster_85,
cluster_86,
cluster_87,
cluster_88,
cluster_89,
cluster_90,
cluster_91,
cluster_92,
cluster_93,
cluster_94,
cluster_95,
cluster_96,
cluster_97,
cluster_98,
cluster_99,
cluster_100 ]
|
b58bbc506af5e6d6c7c9e60faf85961a2e11c5a9805d1365435edfc91a6a5ec8 | Clozure/ccl-tests | rotatef.lsp | ;-*- Mode: Lisp -*-
Author :
Created : Sun Apr 20 15:44:38 2003
;;;; Contains: Tests for ROTATEF
(in-package :cl-test)
(deftest rotatef-order.1
(let ((x (vector 'a 'b 'c 'd 'e 'f))
(i 2))
(values
(rotatef (aref x (incf i)) (aref x (incf i)))
x i))
nil
#(a b c e d f)
4)
(deftest rotatef-order.2
(let ((x (vector 'a 'b 'c 'd 'e 'f))
(i 2))
(values
(rotatef (aref x (incf i)) (aref x (incf i)) (aref x (incf i)))
x i))
nil
#(a b c e f d)
5)
(deftest rotatef.1
(let ((x (vector 0 1 2)))
(values
(rotatef (aref x (aref x 0)) (aref x (aref x 1)) (aref x (aref x 2)))
x))
nil
#(1 2 0))
(deftest rotatef.2
(let ((x (vector 0 1 2 3 4 5 6 7 8 9)))
(values
(rotatef (aref x (aref x 0))
(aref x (aref x 1))
(aref x (aref x 2))
(aref x (aref x 3))
(aref x (aref x 4))
(aref x (aref x 5))
(aref x (aref x 6))
(aref x (aref x 7))
(aref x (aref x 8))
(aref x (aref x 9)))
x))
nil
#(1 2 3 4 5 6 7 8 9 0))
(deftest rotatef.3
(rotatef)
nil)
(deftest rotatef.4
(let ((x 10))
(values
x
(rotatef x)
x))
10 nil 10)
(deftest rotatef.5
(let ((x 'a) (y 'b))
(values x y (rotatef x y) x y))
a b nil b a)
ROTATEF is a good testbed for finding conflicts in setf expansions
;;; These tests apply rotatef to various accessors
(deftest rotatef.6
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (car x) (car y) z)
(values x y z))
(c b) (e d) a)
(deftest rotatef.7
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (first x) (first y) z)
(values x y z))
(c b) (e d) a)
(deftest rotatef.8
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z '(e)))
(rotatef (cdr x) (cdr y) z)
(values x y z))
(a d) (c e) (b))
(deftest rotatef.9
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z '(e)))
(rotatef (rest x) (rest y) z)
(values x y z))
(a d) (c e) (b))
(deftest rotatef.10
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (cadr x) (cadr y) z)
(values x y z))
(a d) (c e) b)
(deftest rotatef.11
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (second x) (second y) z)
(values x y z))
(a d) (c e) b)
(deftest rotatef.12
(let* ((x (list 'a 'b 'c))
(y (list 'd 'e 'f))
(z (list 'g)))
(rotatef (cddr x) (cddr y) z)
(values x y z))
(a b f) (d e g) (c))
(deftest rotatef.13
(let* ((x (list (list 'a)))
(y (list (list 'c)))
(z 'e))
(rotatef (caar x) (caar y) z)
(values x y z))
((c)) ((e)) a)
(deftest rotatef.14
(let* ((x (list (list 'a 'b)))
(y (list (list 'c 'd)))
(z (list 'e)))
(rotatef (cdar x) (cdar y) z)
(values x y z))
((a d)) ((c e)) (b))
TODO : c*r accessors with > 2 a / d
;;; TODO: third,...,tenth
(deftest rotatef.15
(let* ((x (vector 'a 'b))
(y (vector 'c 'd))
(z 'e))
(rotatef (aref x 0) (aref y 0) z)
(values x y z))
#(c b) #(e d) a)
(deftest rotatef.16
(let* ((x (vector 'a 'b))
(y (vector 'c 'd))
(z 'e))
(rotatef (svref x 0) (svref y 0) z)
(values x y z))
#(c b) #(e d) a)
(deftest rotatef.17
(let* ((x (copy-seq #*11000))
(y (copy-seq #*11100))
(z 1))
(rotatef (bit x 1) (bit y 3) z)
(values x y z))
#*10000 #*11110 1)
(deftest rotatef.18
(let* ((x (copy-seq "abcde"))
(y (copy-seq "fghij"))
(z #\X))
(rotatef (char x 1) (char y 2) z)
(values x y z))
"ahcde" "fgXij" #\b)
(deftest rotatef.21
(let* ((x (copy-seq #*11000))
(y (copy-seq #*11100))
(z 1))
(rotatef (bit x 1) (bit y 3) z)
(values x y z))
#*10000 #*11110 1)
(deftest rotatef.22
(let* ((x (copy-seq "abcde"))
(y (copy-seq "fghij"))
(z #\X))
(rotatef (char x 1) (char y 2) z)
(values x y z))
"ahcde" "fgXij" #\b)
(deftest rotatef.23
(let* ((x (copy-seq '(a b c d e)))
(y (copy-seq '(f g h i j)))
(z 'k))
(rotatef (elt x 1) (elt y 2) z)
(values x y z))
(a h c d e) (f g k i j) b)
(deftest rotatef.24
(let ((x #b01010101)
(y #b1111)
(z 0))
(rotatef (ldb (byte 4 2) x)
(ldb (byte 4 1) y)
z)
(values x y z))
#b01011101
1
#b0101)
(deftest rotatef.25
(let* ((f1 (gensym))
(f2 (gensym))
(fn1 (constantly :foo))
(fn2 (constantly :bar))
(fn3 (constantly :zzz)))
(setf (fdefinition f1) fn1
(fdefinition f2) fn2)
(rotatef (fdefinition f1)
(fdefinition f2)
fn3)
(values (funcall f1) (funcall f2) (funcall fn3)))
:bar :zzz :foo)
(deftest rotatef.26
(let* ((a1 (make-array '(10) :fill-pointer 5))
(a2 (make-array '(20) :fill-pointer 7))
(z 3))
(rotatef (fill-pointer a1) (fill-pointer a2) z)
(values (fill-pointer a1) (fill-pointer a2) z))
7 3 5)
(deftest rotatef.27
(let* ((x (list 'a 'b 'c 'd))
(y (list 'd 'e 'f 'g))
(n1 1) (n2 2)
(z 'h))
(rotatef (nth n1 x) (nth n2 y) z)
(values x y z))
(a f c d)
(d e h g)
b)
(deftest rotatef.28
(let* ((f1 (gensym))
(f2 (gensym))
(fn1 (constantly :foo))
(fn2 (constantly :bar))
(fn3 (constantly :zzz)))
(setf (symbol-function f1) fn1
(symbol-function f2) fn2)
(rotatef (symbol-function f1) (symbol-function f2) fn3)
(values (funcall f1) (funcall f2) (funcall fn3)))
:bar :zzz :foo)
(deftest rotatef.29
(let* ((s1 (gensym))
(s2 (gensym))
(z 1))
(setf (symbol-value s1) :foo
(symbol-value s2) :bar)
(rotatef (symbol-value s1)
(symbol-value s2)
z)
(values (symbol-value s1) (symbol-value s2) z))
:bar 1 :foo)
(deftest rotatef.30
(let* ((s1 (gensym))
(s2 (gensym))
(v1 (list :foo 1))
(v2 (list :bar 2))
(z nil))
(setf (symbol-plist s1) v1
(symbol-plist s2) v2)
(rotatef (symbol-plist s1) (symbol-plist s2) z)
(values (symbol-plist s1) (symbol-plist s2) z))
(:bar 2) nil (:foo 1))
(deftest rotatef.31
(let* ((x (list 'a 'b 'c 'd 'e))
(y (list 'f 'g 'h 'i 'j))
(p1 1) (p2 2) (len 3)
(z '(10 11 12)))
(rotatef (subseq x p1 (+ p1 len))
(subseq y p2 (+ p2 len))
z)
(values x y z))
(a h i j e)
(f g 10 11 12)
(b c d))
(deftest rotatef.32
(let* ((x (gensym))
(y (gensym))
(k1 :foo)
(k2 :bar)
(v1 1)
(v2 2)
(z 17))
(setf (get x k1) v1 (get y k2) v2)
(rotatef (get x k1) (get y k2) z)
(values (symbol-plist x) (symbol-plist y) z))
(:foo 2) (:bar 17) 1)
(deftest rotatef.33
(let* ((x nil)
(y nil)
(k1 :foo)
(k2 :bar)
(v1 1)
(v2 2)
(z 21))
(setf (getf x k1) v1 (getf y k2) v2)
(rotatef (getf x k1) (getf y k2) z)
(values x y z))
(:foo 2) (:bar 21) 1)
(deftest rotatef.34
(let* ((ht1 (make-hash-table))
(ht2 (make-hash-table))
(k1 :foo) (v1 1)
(k2 :bar) (v2 2)
(z 3))
(setf (gethash k1 ht1) v1
(gethash k2 ht2) v2)
(rotatef z (gethash k1 ht1) (gethash k2 ht2))
(values z (gethash k1 ht1) (gethash k2 ht2)))
1 2 3)
(deftest rotatef.35
(let ((n1 (gensym))
(n2 (gensym))
(n3 (gensym))
(n4 (gensym)))
(eval `(defclass ,n1 () ()))
(eval `(defclass ,n2 () ()))
(setf (find-class n3) (find-class n1)
(find-class n4) (find-class n2))
(rotatef (find-class n3) (find-class n4))
(values (eqlt (find-class n1) (find-class n4))
(eqlt (find-class n2) (find-class n3))))
t t)
;;; Test that explicit calls to macroexpand in subforms
;;; are done in the correct environment
(deftest rotatef.36
(macrolet
((%m (z) z))
(let ((x 1) (y 2))
(rotatef (expand-in-current-env (%m x)) y)
(values x y)))
2 1)
(deftest rotatef.37
(macrolet
((%m (z) z))
(let ((x 1) (y 2))
(rotatef x (expand-in-current-env (%m y)))
(values x y)))
2 1)
;;; TODO: macro-function, mask-field, row-major-aref,
;;; logical-pathname-translations, readtable-case
| null | https://raw.githubusercontent.com/Clozure/ccl-tests/0478abddb34dbc16487a1975560d8d073a988060/ansi-tests/rotatef.lsp | lisp | -*- Mode: Lisp -*-
Contains: Tests for ROTATEF
These tests apply rotatef to various accessors
TODO: third,...,tenth
Test that explicit calls to macroexpand in subforms
are done in the correct environment
TODO: macro-function, mask-field, row-major-aref,
logical-pathname-translations, readtable-case | Author :
Created : Sun Apr 20 15:44:38 2003
(in-package :cl-test)
(deftest rotatef-order.1
(let ((x (vector 'a 'b 'c 'd 'e 'f))
(i 2))
(values
(rotatef (aref x (incf i)) (aref x (incf i)))
x i))
nil
#(a b c e d f)
4)
(deftest rotatef-order.2
(let ((x (vector 'a 'b 'c 'd 'e 'f))
(i 2))
(values
(rotatef (aref x (incf i)) (aref x (incf i)) (aref x (incf i)))
x i))
nil
#(a b c e f d)
5)
(deftest rotatef.1
(let ((x (vector 0 1 2)))
(values
(rotatef (aref x (aref x 0)) (aref x (aref x 1)) (aref x (aref x 2)))
x))
nil
#(1 2 0))
(deftest rotatef.2
(let ((x (vector 0 1 2 3 4 5 6 7 8 9)))
(values
(rotatef (aref x (aref x 0))
(aref x (aref x 1))
(aref x (aref x 2))
(aref x (aref x 3))
(aref x (aref x 4))
(aref x (aref x 5))
(aref x (aref x 6))
(aref x (aref x 7))
(aref x (aref x 8))
(aref x (aref x 9)))
x))
nil
#(1 2 3 4 5 6 7 8 9 0))
(deftest rotatef.3
(rotatef)
nil)
(deftest rotatef.4
(let ((x 10))
(values
x
(rotatef x)
x))
10 nil 10)
(deftest rotatef.5
(let ((x 'a) (y 'b))
(values x y (rotatef x y) x y))
a b nil b a)
ROTATEF is a good testbed for finding conflicts in setf expansions
(deftest rotatef.6
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (car x) (car y) z)
(values x y z))
(c b) (e d) a)
(deftest rotatef.7
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (first x) (first y) z)
(values x y z))
(c b) (e d) a)
(deftest rotatef.8
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z '(e)))
(rotatef (cdr x) (cdr y) z)
(values x y z))
(a d) (c e) (b))
(deftest rotatef.9
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z '(e)))
(rotatef (rest x) (rest y) z)
(values x y z))
(a d) (c e) (b))
(deftest rotatef.10
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (cadr x) (cadr y) z)
(values x y z))
(a d) (c e) b)
(deftest rotatef.11
(let* ((x (list 'a 'b))
(y (list 'c 'd))
(z 'e))
(rotatef (second x) (second y) z)
(values x y z))
(a d) (c e) b)
(deftest rotatef.12
(let* ((x (list 'a 'b 'c))
(y (list 'd 'e 'f))
(z (list 'g)))
(rotatef (cddr x) (cddr y) z)
(values x y z))
(a b f) (d e g) (c))
(deftest rotatef.13
(let* ((x (list (list 'a)))
(y (list (list 'c)))
(z 'e))
(rotatef (caar x) (caar y) z)
(values x y z))
((c)) ((e)) a)
(deftest rotatef.14
(let* ((x (list (list 'a 'b)))
(y (list (list 'c 'd)))
(z (list 'e)))
(rotatef (cdar x) (cdar y) z)
(values x y z))
((a d)) ((c e)) (b))
TODO : c*r accessors with > 2 a / d
(deftest rotatef.15
(let* ((x (vector 'a 'b))
(y (vector 'c 'd))
(z 'e))
(rotatef (aref x 0) (aref y 0) z)
(values x y z))
#(c b) #(e d) a)
(deftest rotatef.16
(let* ((x (vector 'a 'b))
(y (vector 'c 'd))
(z 'e))
(rotatef (svref x 0) (svref y 0) z)
(values x y z))
#(c b) #(e d) a)
(deftest rotatef.17
(let* ((x (copy-seq #*11000))
(y (copy-seq #*11100))
(z 1))
(rotatef (bit x 1) (bit y 3) z)
(values x y z))
#*10000 #*11110 1)
(deftest rotatef.18
(let* ((x (copy-seq "abcde"))
(y (copy-seq "fghij"))
(z #\X))
(rotatef (char x 1) (char y 2) z)
(values x y z))
"ahcde" "fgXij" #\b)
(deftest rotatef.21
(let* ((x (copy-seq #*11000))
(y (copy-seq #*11100))
(z 1))
(rotatef (bit x 1) (bit y 3) z)
(values x y z))
#*10000 #*11110 1)
(deftest rotatef.22
(let* ((x (copy-seq "abcde"))
(y (copy-seq "fghij"))
(z #\X))
(rotatef (char x 1) (char y 2) z)
(values x y z))
"ahcde" "fgXij" #\b)
(deftest rotatef.23
(let* ((x (copy-seq '(a b c d e)))
(y (copy-seq '(f g h i j)))
(z 'k))
(rotatef (elt x 1) (elt y 2) z)
(values x y z))
(a h c d e) (f g k i j) b)
(deftest rotatef.24
(let ((x #b01010101)
(y #b1111)
(z 0))
(rotatef (ldb (byte 4 2) x)
(ldb (byte 4 1) y)
z)
(values x y z))
#b01011101
1
#b0101)
(deftest rotatef.25
(let* ((f1 (gensym))
(f2 (gensym))
(fn1 (constantly :foo))
(fn2 (constantly :bar))
(fn3 (constantly :zzz)))
(setf (fdefinition f1) fn1
(fdefinition f2) fn2)
(rotatef (fdefinition f1)
(fdefinition f2)
fn3)
(values (funcall f1) (funcall f2) (funcall fn3)))
:bar :zzz :foo)
(deftest rotatef.26
(let* ((a1 (make-array '(10) :fill-pointer 5))
(a2 (make-array '(20) :fill-pointer 7))
(z 3))
(rotatef (fill-pointer a1) (fill-pointer a2) z)
(values (fill-pointer a1) (fill-pointer a2) z))
7 3 5)
(deftest rotatef.27
(let* ((x (list 'a 'b 'c 'd))
(y (list 'd 'e 'f 'g))
(n1 1) (n2 2)
(z 'h))
(rotatef (nth n1 x) (nth n2 y) z)
(values x y z))
(a f c d)
(d e h g)
b)
(deftest rotatef.28
(let* ((f1 (gensym))
(f2 (gensym))
(fn1 (constantly :foo))
(fn2 (constantly :bar))
(fn3 (constantly :zzz)))
(setf (symbol-function f1) fn1
(symbol-function f2) fn2)
(rotatef (symbol-function f1) (symbol-function f2) fn3)
(values (funcall f1) (funcall f2) (funcall fn3)))
:bar :zzz :foo)
(deftest rotatef.29
(let* ((s1 (gensym))
(s2 (gensym))
(z 1))
(setf (symbol-value s1) :foo
(symbol-value s2) :bar)
(rotatef (symbol-value s1)
(symbol-value s2)
z)
(values (symbol-value s1) (symbol-value s2) z))
:bar 1 :foo)
(deftest rotatef.30
(let* ((s1 (gensym))
(s2 (gensym))
(v1 (list :foo 1))
(v2 (list :bar 2))
(z nil))
(setf (symbol-plist s1) v1
(symbol-plist s2) v2)
(rotatef (symbol-plist s1) (symbol-plist s2) z)
(values (symbol-plist s1) (symbol-plist s2) z))
(:bar 2) nil (:foo 1))
(deftest rotatef.31
(let* ((x (list 'a 'b 'c 'd 'e))
(y (list 'f 'g 'h 'i 'j))
(p1 1) (p2 2) (len 3)
(z '(10 11 12)))
(rotatef (subseq x p1 (+ p1 len))
(subseq y p2 (+ p2 len))
z)
(values x y z))
(a h i j e)
(f g 10 11 12)
(b c d))
(deftest rotatef.32
(let* ((x (gensym))
(y (gensym))
(k1 :foo)
(k2 :bar)
(v1 1)
(v2 2)
(z 17))
(setf (get x k1) v1 (get y k2) v2)
(rotatef (get x k1) (get y k2) z)
(values (symbol-plist x) (symbol-plist y) z))
(:foo 2) (:bar 17) 1)
(deftest rotatef.33
(let* ((x nil)
(y nil)
(k1 :foo)
(k2 :bar)
(v1 1)
(v2 2)
(z 21))
(setf (getf x k1) v1 (getf y k2) v2)
(rotatef (getf x k1) (getf y k2) z)
(values x y z))
(:foo 2) (:bar 21) 1)
(deftest rotatef.34
(let* ((ht1 (make-hash-table))
(ht2 (make-hash-table))
(k1 :foo) (v1 1)
(k2 :bar) (v2 2)
(z 3))
(setf (gethash k1 ht1) v1
(gethash k2 ht2) v2)
(rotatef z (gethash k1 ht1) (gethash k2 ht2))
(values z (gethash k1 ht1) (gethash k2 ht2)))
1 2 3)
(deftest rotatef.35
(let ((n1 (gensym))
(n2 (gensym))
(n3 (gensym))
(n4 (gensym)))
(eval `(defclass ,n1 () ()))
(eval `(defclass ,n2 () ()))
(setf (find-class n3) (find-class n1)
(find-class n4) (find-class n2))
(rotatef (find-class n3) (find-class n4))
(values (eqlt (find-class n1) (find-class n4))
(eqlt (find-class n2) (find-class n3))))
t t)
(deftest rotatef.36
(macrolet
((%m (z) z))
(let ((x 1) (y 2))
(rotatef (expand-in-current-env (%m x)) y)
(values x y)))
2 1)
(deftest rotatef.37
(macrolet
((%m (z) z))
(let ((x 1) (y 2))
(rotatef x (expand-in-current-env (%m y)))
(values x y)))
2 1)
|
372c987382fd8e92cdb7f52994552897e4a9f0055f61e32dc01c521626bb2b46 | saltlang/saltlang | Driver.hs | Copyright ( c ) 2015 . All rights reserved .
--
-- Redistribution and use in source and binary forms, with or without
-- modification, are permitted provided that the following conditions
-- are met:
--
1 . Redistributions of source code must retain the above copyright
-- notice, this list of conditions and the following disclaimer.
--
2 . Redistributions in binary form must reproduce the above copyright
-- notice, this list of conditions and the following disclaimer in the
-- documentation and/or other materials provided with the distribution.
--
3 . Neither the name of the author nor the names of any contributors
-- may be used to endorse or promote products derived from this software
-- without specific prior written permission.
--
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ` ` AS IS ''
-- AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
-- TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
-- PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
-- OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF
-- USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
-- OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
-- OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
-- SUCH DAMAGE.
{-# OPTIONS_GHC -Wall -Werror #-}
module Language.Salt.Compiler.Driver(
run
) where
import Control.Monad.Collect
import Control.Monad.FileArtifacts
import Control.Monad.FileLoader
import Control.Monad.Frontend
import Control.Monad.Messages
import Data.Array
import Data.Message
import Language.Salt.Compiler.Options
import Language.Salt.Compiler.Stages
import Language.Salt.Surface.Token
import Prelude hiding (lex)
import System.IO
import qualified Data.ByteString as Strict
-- Later on, have some sort of compiler products structure. Also,
-- have run call a compile function that generates compiler products.
-- Also, have a frontend and backend function, that builds a frontend
-- and backend pipeline based on data in options.
-- | Run the compiler with the given options.
run :: Options -> IO ()
run opts @ Options { optInputs = inputs, optStages = stages,
optDistDir = distdiropt,
optSrcDirs = srcdirs } =
let
distdir = case distdiropt of
Just val -> val
Nothing -> Strict.empty
in case bounds stages of
(Lexer, Lexer) ->
let
loader = runFileArtifactsT (lex opts inputs) distdir
msgs = runFileLoaderT loader srcdirs
front = putMessagesT stderr Error msgs
in do
_ <- runFrontendT front keywords
return ()
(Lexer, Parser) ->
let
loader = runFileArtifactsT (parse opts inputs) distdir
msgs = runFileLoaderT loader srcdirs
front = putMessagesT stderr Error msgs
in do
_ <- runFrontendT front keywords
return ()
(Lexer, Collect) ->
let
artifacts = runCollectTComponentsT (collect opts inputs)
(const $! dumpSurface opts)
loader = runFileArtifactsT artifacts distdir
msgs = runFileLoaderT loader srcdirs
front = putMessagesT stderr Error msgs
in do
_ <- runFrontendT front keywords
return ()
(_, _) -> error "Stages array does not begin with Lexer"
| null | https://raw.githubusercontent.com/saltlang/saltlang/f3478904139f19373f23824f25d28e8be745dc60/src/saltc/Language/Salt/Compiler/Driver.hs | haskell |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
notice, this list of conditions and the following disclaimer.
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
may be used to endorse or promote products derived from this software
without specific prior written permission.
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A
PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHORS
OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT
OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF
SUCH DAMAGE.
# OPTIONS_GHC -Wall -Werror #
Later on, have some sort of compiler products structure. Also,
have run call a compile function that generates compiler products.
Also, have a frontend and backend function, that builds a frontend
and backend pipeline based on data in options.
| Run the compiler with the given options. | Copyright ( c ) 2015 . All rights reserved .
1 . Redistributions of source code must retain the above copyright
2 . Redistributions in binary form must reproduce the above copyright
3 . Neither the name of the author nor the names of any contributors
THIS SOFTWARE IS PROVIDED BY THE AUTHORS AND CONTRIBUTORS ` ` AS IS ''
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT
LIMITED TO , PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES ; LOSS OF
ON ANY THEORY OF LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY ,
module Language.Salt.Compiler.Driver(
run
) where
import Control.Monad.Collect
import Control.Monad.FileArtifacts
import Control.Monad.FileLoader
import Control.Monad.Frontend
import Control.Monad.Messages
import Data.Array
import Data.Message
import Language.Salt.Compiler.Options
import Language.Salt.Compiler.Stages
import Language.Salt.Surface.Token
import Prelude hiding (lex)
import System.IO
import qualified Data.ByteString as Strict
run :: Options -> IO ()
run opts @ Options { optInputs = inputs, optStages = stages,
optDistDir = distdiropt,
optSrcDirs = srcdirs } =
let
distdir = case distdiropt of
Just val -> val
Nothing -> Strict.empty
in case bounds stages of
(Lexer, Lexer) ->
let
loader = runFileArtifactsT (lex opts inputs) distdir
msgs = runFileLoaderT loader srcdirs
front = putMessagesT stderr Error msgs
in do
_ <- runFrontendT front keywords
return ()
(Lexer, Parser) ->
let
loader = runFileArtifactsT (parse opts inputs) distdir
msgs = runFileLoaderT loader srcdirs
front = putMessagesT stderr Error msgs
in do
_ <- runFrontendT front keywords
return ()
(Lexer, Collect) ->
let
artifacts = runCollectTComponentsT (collect opts inputs)
(const $! dumpSurface opts)
loader = runFileArtifactsT artifacts distdir
msgs = runFileLoaderT loader srcdirs
front = putMessagesT stderr Error msgs
in do
_ <- runFrontendT front keywords
return ()
(_, _) -> error "Stages array does not begin with Lexer"
|
28df670460e29ddcd91cf35728e9af9084002aa44520b581cea5df278f1ddf3d | foshardware/lsc | SVG.hs | Copyright 2018 - < >
SPDX - License - Identifier : GPL-3.0 - or - later
{-# LANGUAGE OverloadedStrings #-}
module LSC.SVG where
import Control.Applicative
import Control.Lens
import Data.Foldable
import Data.Hashable
import Data.IntSet (elems)
import Data.List (intersperse)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as Lazy
import Data.Text.Lazy.Builder (Builder, fromText)
import Data.Text.Lazy.Builder.Int
import Text.Blaze.Svg11 ((!), toSvg, toValue)
import qualified Text.Blaze.Svg11 as S
import qualified Text.Blaze.Svg11.Attributes as A
import Text.Blaze.Svg.Renderer.Text (renderSvg)
import LSC.Cartesian
import LSC.Component
import LSC.Model
import LSC.NetGraph
import LSC.Polygon
pixelsPerMicron :: Int
pixelsPerMicron = 60
gateColor :: Gate -> Arg
gateColor g | g ^. feedthrough = "lightyellow"
gateColor g | g ^. fixed = "lightblue"
gateColor _ = "lightgrey"
layerColor :: [Layer] -> Arg
layerColor [Metal1] = "#EFEFFF"
layerColor _ = "transparent"
identColor :: Identifier -> Arg
identColor
= toValue
. T.cons '#'
. T.justifyRight 6 '0'
. base16Identifier
. flip mod 0x666666
. hash
type Marker = Line' Int
type Area = Component' Layer Int
type Poly = Polygon' Layer Int
type Svg = S.Svg
type Arg = S.AttributeValue
type Args = (Arg, Arg)
plotStdout :: Double -> NetGraph -> IO ()
plotStdout scale = Lazy.putStr . renderSvg . plot scale
plot :: Double -> NetGraph -> Svg
plot scale
| scale <= 0
= error $ "plot: invalid scaling factor " ++ show scale
plot scale
= svgDoc . region pixelated pixelated . liftA2 id quadrantI id
where
pixelated
= round . (/ scale)
. fromIntegral . (* pixelsPerMicron)
quadrantI
= liftA2 region ((+) . abs . min 0 . view l) ((+) . abs . min 0 . view b)
. netGraphArea
svgDoc :: NetGraph -> Svg
svgDoc top = S.docTypeSvg
! A.version "1.1"
! A.width (toValue $ area ^. r)
! A.height (toValue $ area ^. t)
$ do
track area `mapM_` view (supercell . tracks) top
place `mapM_` view gates top
route `mapM_` view nets top
ports `mapM_` view nets top
drawA ("black", "lightyellow") `mapM_` outerRim top
where
area = netGraphArea top
place :: Gate -> Svg
place g = do
let a = g ^. space
let d = rotation $ a ^. orientation
(x, y) = label a
let k = height a `div` 9
drawA ("black", gateColor g) a
S.text_
! A.x (toValue x)
! A.y (toValue y)
! A.fontSize (toValue k)
! A.fontFamily "monospace"
! A.transform (toValue $ "rotate(" <> decimal d <> " " <> decimal x <> "," <> decimal y <> ")")
$ toSvg $ views number decimal g <> ": " <> views identifier (forShort 8) g
label :: Area -> (Int, Int)
label a = case a ^. orientation of
FN -> (a ^. r - height a `div` 32, a ^. t - height a `div` 24)
_ -> (a ^. l + height a `div` 32, a ^. b + height a `div` 24)
rotation :: Orientation -> Int
rotation FN = 270
rotation _ = 90
route :: Net -> Svg
route n
| views geometry null n
= do
drawL (views identifier identColor n) `mapM_` view netSegments n
route _
= do
pure ()
track :: Area -> Either Track Track -> Svg
track a (Right x)
= for_ (x ^. stabs . to elems)
$ drawL (x ^. layers z . to layerColor) . vertical (a ^. t)
track a (Left y)
= for_ (y ^. stabs . to elems)
$ drawL (y ^. layers z . to layerColor) . horizontal (a ^. r)
horizontal, vertical :: Int -> Int -> Marker
horizontal x y = Line (0, y) (x, y)
vertical y x = Line (x, 0) (x, y)
ports :: Net -> Svg
ports
= mapM_ drawP
. foldMap (view geometry)
. fold
. view contacts
drawP :: Poly -> Svg
drawP p = S.polygon
! A.points (toValue $ fold $ intersperse " " points)
! A.fill "transparent"
! A.stroke "black"
where
points = toList $ p ^. path <&> \ (x, y) -> decimal x <> "," <> decimal y
drawA :: Args -> Area -> Svg
drawA (border, background) a = S.rect
! A.x (toValue $ a ^. l)
! A.y (toValue $ a ^. b)
! A.width (toValue $ width a)
! A.height (toValue $ height a)
! A.stroke border
! A.fill background
drawL :: Arg -> Marker -> Svg
drawL color (Line (x1, y1) (x2, y2)) = S.line
! A.x1 (toValue x1)
! A.y1 (toValue y1)
! A.x2 (toValue x2)
! A.y2 (toValue y2)
! A.stroke color
! A.strokeWidth "3"
forShort :: Int -> Text -> Builder
forShort n string
| T.length string > n
= fromText (T.take (n - 2) string) <> ".."
forShort _ string
= fromText string
| null | https://raw.githubusercontent.com/foshardware/lsc/006c245a89b0a0056286205917438c7d031d04b9/src/LSC/SVG.hs | haskell | # LANGUAGE OverloadedStrings # | Copyright 2018 - < >
SPDX - License - Identifier : GPL-3.0 - or - later
module LSC.SVG where
import Control.Applicative
import Control.Lens
import Data.Foldable
import Data.Hashable
import Data.IntSet (elems)
import Data.List (intersperse)
import Data.Text (Text)
import qualified Data.Text as T
import qualified Data.Text.Lazy.IO as Lazy
import Data.Text.Lazy.Builder (Builder, fromText)
import Data.Text.Lazy.Builder.Int
import Text.Blaze.Svg11 ((!), toSvg, toValue)
import qualified Text.Blaze.Svg11 as S
import qualified Text.Blaze.Svg11.Attributes as A
import Text.Blaze.Svg.Renderer.Text (renderSvg)
import LSC.Cartesian
import LSC.Component
import LSC.Model
import LSC.NetGraph
import LSC.Polygon
pixelsPerMicron :: Int
pixelsPerMicron = 60
gateColor :: Gate -> Arg
gateColor g | g ^. feedthrough = "lightyellow"
gateColor g | g ^. fixed = "lightblue"
gateColor _ = "lightgrey"
layerColor :: [Layer] -> Arg
layerColor [Metal1] = "#EFEFFF"
layerColor _ = "transparent"
identColor :: Identifier -> Arg
identColor
= toValue
. T.cons '#'
. T.justifyRight 6 '0'
. base16Identifier
. flip mod 0x666666
. hash
type Marker = Line' Int
type Area = Component' Layer Int
type Poly = Polygon' Layer Int
type Svg = S.Svg
type Arg = S.AttributeValue
type Args = (Arg, Arg)
plotStdout :: Double -> NetGraph -> IO ()
plotStdout scale = Lazy.putStr . renderSvg . plot scale
plot :: Double -> NetGraph -> Svg
plot scale
| scale <= 0
= error $ "plot: invalid scaling factor " ++ show scale
plot scale
= svgDoc . region pixelated pixelated . liftA2 id quadrantI id
where
pixelated
= round . (/ scale)
. fromIntegral . (* pixelsPerMicron)
quadrantI
= liftA2 region ((+) . abs . min 0 . view l) ((+) . abs . min 0 . view b)
. netGraphArea
svgDoc :: NetGraph -> Svg
svgDoc top = S.docTypeSvg
! A.version "1.1"
! A.width (toValue $ area ^. r)
! A.height (toValue $ area ^. t)
$ do
track area `mapM_` view (supercell . tracks) top
place `mapM_` view gates top
route `mapM_` view nets top
ports `mapM_` view nets top
drawA ("black", "lightyellow") `mapM_` outerRim top
where
area = netGraphArea top
place :: Gate -> Svg
place g = do
let a = g ^. space
let d = rotation $ a ^. orientation
(x, y) = label a
let k = height a `div` 9
drawA ("black", gateColor g) a
S.text_
! A.x (toValue x)
! A.y (toValue y)
! A.fontSize (toValue k)
! A.fontFamily "monospace"
! A.transform (toValue $ "rotate(" <> decimal d <> " " <> decimal x <> "," <> decimal y <> ")")
$ toSvg $ views number decimal g <> ": " <> views identifier (forShort 8) g
label :: Area -> (Int, Int)
label a = case a ^. orientation of
FN -> (a ^. r - height a `div` 32, a ^. t - height a `div` 24)
_ -> (a ^. l + height a `div` 32, a ^. b + height a `div` 24)
rotation :: Orientation -> Int
rotation FN = 270
rotation _ = 90
route :: Net -> Svg
route n
| views geometry null n
= do
drawL (views identifier identColor n) `mapM_` view netSegments n
route _
= do
pure ()
track :: Area -> Either Track Track -> Svg
track a (Right x)
= for_ (x ^. stabs . to elems)
$ drawL (x ^. layers z . to layerColor) . vertical (a ^. t)
track a (Left y)
= for_ (y ^. stabs . to elems)
$ drawL (y ^. layers z . to layerColor) . horizontal (a ^. r)
horizontal, vertical :: Int -> Int -> Marker
horizontal x y = Line (0, y) (x, y)
vertical y x = Line (x, 0) (x, y)
ports :: Net -> Svg
ports
= mapM_ drawP
. foldMap (view geometry)
. fold
. view contacts
drawP :: Poly -> Svg
drawP p = S.polygon
! A.points (toValue $ fold $ intersperse " " points)
! A.fill "transparent"
! A.stroke "black"
where
points = toList $ p ^. path <&> \ (x, y) -> decimal x <> "," <> decimal y
drawA :: Args -> Area -> Svg
drawA (border, background) a = S.rect
! A.x (toValue $ a ^. l)
! A.y (toValue $ a ^. b)
! A.width (toValue $ width a)
! A.height (toValue $ height a)
! A.stroke border
! A.fill background
drawL :: Arg -> Marker -> Svg
drawL color (Line (x1, y1) (x2, y2)) = S.line
! A.x1 (toValue x1)
! A.y1 (toValue y1)
! A.x2 (toValue x2)
! A.y2 (toValue y2)
! A.stroke color
! A.strokeWidth "3"
forShort :: Int -> Text -> Builder
forShort n string
| T.length string > n
= fromText (T.take (n - 2) string) <> ".."
forShort _ string
= fromText string
|
11aabe95961781ea99353483f46d3cf78aef36ac82b9e5428eef8275ded7ff85 | helins/templ-lib.cljc | user.clj | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns user
"For daydreaming at the REPL."
{:author "{{ developer }}"}
(:require [{{ group }}.{{ name }} :as {{ name }}]
[{{ group }}.{{ name }}.dev :as {{ name }}.dev]))
(set! *warn-on-reflection*
true)
;;;;;;;;;;
(comment
)
| null | https://raw.githubusercontent.com/helins/templ-lib.cljc/c2438aaa7c6e1fc11d9f328275987cee6b7720bc/resources/clj/new/helins_lib_cljc/src/dev/user.clj | clojure | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(ns user
"For daydreaming at the REPL."
{:author "{{ developer }}"}
(:require [{{ group }}.{{ name }} :as {{ name }}]
[{{ group }}.{{ name }}.dev :as {{ name }}.dev]))
(set! *warn-on-reflection*
true)
(comment
)
| |
c853339bfcade4d19a00fdeddeb896a30d0c289a4f11086aca55760cc26dfc4f | songyahui/AlgebraicEffect | 5_sequence_of_elements.ml | effect Produce : int -> unit
let rec string_of_int_list li : string =
match li with
| [] -> ""
| x :: xs -> string_of_int x ^ "; " ^ string_of_int_list xs
let rec pos li : bool =
match li with
| [] -> true
| x :: xs -> if x <= 0 then false else pos xs
let producer ()
(*@ requires (true, _^* ,()) @*)
(*@ ensures (true, Produce(3).Produce(5),()) @*)
=
perform (Produce 3);
perform (Produce 5)
let main
(*@ requires (true, emp,()) @*)
(*@ ensures (true, {l->[1]}.[pos (l)].{l->l++[3]}.[pos (l)].{l->l++[5]},()) @*)
=
let l = Sys.opaque_identity (ref [1]) in
print_string (string_of_int_list !l ^ "\n");
match producer () with
| v -> ()
| effect (Produce x) k ->
assert (pos !l);
l := List.append (!l) [x];
print_string (string_of_int_list !l ^ "\n");
(continue k ())
For main :
{ l->[1]}.Produce(3).Produce(5 )
currenct effects continuation k stack /\ heap /\ assertion
--------------------------------------------------------------------------------
{ l->[1 ] } Produce(3).Produce(5 ) l->[1 ]
Produce(3 ) [ pos l].{l->l++[x]}.[pos ) x=3 /\ l->[1 ]
[ pos l ] { l->l++[x]}.[pos ) x=3 /\ l->[1 ] /\ true
{ l->l++[x ] } [ pos l].Produce(5 ) x=3 /\ l->[1;3 ]
[ pos l ] Produce(5 ) x=3 /\ l->[1;3 ] /\ true
Produce(5 ) [ pos l].{l->l++[x]}.[pos l ] x=5 /\ l->[1;3 ]
[ pos l ] { l->l++[x]}.[pos l ] x=5 /\ l->[1;3 ] /\ true
{ l->l++[x ] } [ pos l ] x=5 /\ l->[1;3;5 ]
[ pos l ] emp x=5 /\ l->[1;3;5]/\true
For main:
{l->[1]}.Produce(3).Produce(5)
currenct effects continuation k stack /\ heap /\ assertion
--------------------------------------------------------------------------------
{l->[1]} Produce(3).Produce(5) l->[1]
Produce(3) [pos l].{l->l++[x]}.[pos l].Produce(5) x=3 /\ l->[1]
[pos l] {l->l++[x]}.[pos l].Produce(5) x=3 /\ l->[1] /\ true
{l->l++[x]} [pos l].Produce(5) x=3 /\ l->[1;3]
[pos l] Produce(5) x=3 /\ l->[1;3] /\ true
Produce(5) [pos l].{l->l++[x]}.[pos l] x=5 /\ l->[1;3]
[pos l] {l->l++[x]}.[pos l] x=5 /\ l->[1;3] /\ true
{l->l++[x]} [pos l] x=5 /\ l->[1;3;5]
[pos l] emp x=5 /\ l->[1;3;5]/\true
*)
| null | https://raw.githubusercontent.com/songyahui/AlgebraicEffect/27688952b598a101a27523be796e8011d70b02de/src/sp_tests/5_sequence_of_elements.ml | ocaml | @ requires (true, _^* ,()) @
@ ensures (true, Produce(3).Produce(5),()) @
@ requires (true, emp,()) @
@ ensures (true, {l->[1]}.[pos (l)].{l->l++[3]}.[pos (l)].{l->l++[5]},()) @ | effect Produce : int -> unit
let rec string_of_int_list li : string =
match li with
| [] -> ""
| x :: xs -> string_of_int x ^ "; " ^ string_of_int_list xs
let rec pos li : bool =
match li with
| [] -> true
| x :: xs -> if x <= 0 then false else pos xs
let producer ()
=
perform (Produce 3);
perform (Produce 5)
let main
=
let l = Sys.opaque_identity (ref [1]) in
print_string (string_of_int_list !l ^ "\n");
match producer () with
| v -> ()
| effect (Produce x) k ->
assert (pos !l);
l := List.append (!l) [x];
print_string (string_of_int_list !l ^ "\n");
(continue k ())
For main :
{ l->[1]}.Produce(3).Produce(5 )
currenct effects continuation k stack /\ heap /\ assertion
--------------------------------------------------------------------------------
{ l->[1 ] } Produce(3).Produce(5 ) l->[1 ]
Produce(3 ) [ pos l].{l->l++[x]}.[pos ) x=3 /\ l->[1 ]
[ pos l ] { l->l++[x]}.[pos ) x=3 /\ l->[1 ] /\ true
{ l->l++[x ] } [ pos l].Produce(5 ) x=3 /\ l->[1;3 ]
[ pos l ] Produce(5 ) x=3 /\ l->[1;3 ] /\ true
Produce(5 ) [ pos l].{l->l++[x]}.[pos l ] x=5 /\ l->[1;3 ]
[ pos l ] { l->l++[x]}.[pos l ] x=5 /\ l->[1;3 ] /\ true
{ l->l++[x ] } [ pos l ] x=5 /\ l->[1;3;5 ]
[ pos l ] emp x=5 /\ l->[1;3;5]/\true
For main:
{l->[1]}.Produce(3).Produce(5)
currenct effects continuation k stack /\ heap /\ assertion
--------------------------------------------------------------------------------
{l->[1]} Produce(3).Produce(5) l->[1]
Produce(3) [pos l].{l->l++[x]}.[pos l].Produce(5) x=3 /\ l->[1]
[pos l] {l->l++[x]}.[pos l].Produce(5) x=3 /\ l->[1] /\ true
{l->l++[x]} [pos l].Produce(5) x=3 /\ l->[1;3]
[pos l] Produce(5) x=3 /\ l->[1;3] /\ true
Produce(5) [pos l].{l->l++[x]}.[pos l] x=5 /\ l->[1;3]
[pos l] {l->l++[x]}.[pos l] x=5 /\ l->[1;3] /\ true
{l->l++[x]} [pos l] x=5 /\ l->[1;3;5]
[pos l] emp x=5 /\ l->[1;3;5]/\true
*)
|
59eb2c5fd86e7c46a46fda33c3b4341cf19b59c40ce3be9c83ad885755743033 | openworkload/swm-core | wm_user.erl | -module(wm_user).
-behaviour(gen_server).
-export([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("../../lib/wm_log.hrl").
-include("../../lib/wm_entity.hrl").
-include("../../../include/wm_scheduler.hrl").
-record(mstate, {spool = "" :: string}).
%% ============================================================================
%% API
%% ============================================================================
-spec start_link([term()]) -> {ok, pid()}.
start_link(Args) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, Args, []).
%% ============================================================================
%% Callbacks
%% ============================================================================
-spec init(term()) -> {ok, term()} | {ok, term(), hibernate | infinity | non_neg_integer()} | {stop, term()} | ignore.
-spec handle_call(term(), term(), term()) ->
{reply, term(), term()} |
{reply, term(), term(), hibernate | infinity | non_neg_integer()} |
{noreply, term()} |
{noreply, term(), hibernate | infinity | non_neg_integer()} |
{stop, term(), term()} |
{stop, term(), term(), term()}.
-spec handle_cast(term(), term()) ->
{noreply, term()} |
{noreply, term(), hibernate | infinity | non_neg_integer()} |
{stop, term(), term()}.
-spec handle_info(term(), term()) ->
{noreply, term()} |
{noreply, term(), hibernate | infinity | non_neg_integer()} |
{stop, term(), term()}.
-spec terminate(term(), term()) -> ok.
-spec code_change(term(), term(), term()) -> {ok, term()}.
init(Args) ->
?LOG_INFO("Load user management service"),
process_flag(trap_exit, true),
wm_event:subscribe(http_started, node(), ?MODULE),
MState = parse_args(Args, #mstate{}),
{ok, MState}.
handle_call({show, JIDs}, _From, MState) ->
{reply, handle_request(show, JIDs, MState), MState};
handle_call({requeue, JIDs}, _From, MState) ->
{reply, handle_request(requeue, JIDs, MState), MState};
handle_call({cancel, JIDs}, _From, MState) ->
{reply, handle_request(cancel, JIDs, MState), MState};
handle_call({submit, JobScriptContent, Filename, Username}, _From, MState) ->
{reply, handle_request(submit, {JobScriptContent, Filename, Username}, MState), MState};
handle_call({list, TabList}, _From, MState) ->
{reply, handle_request(list, TabList, MState), MState};
handle_call({list, TabList, Limit}, _From, MState) ->
{reply, handle_request(list, {TabList, Limit}, MState), MState};
handle_call({stdout, JobId}, _From, MState) ->
{reply, handle_request({output, job_stdout}, JobId, MState), MState};
handle_call({stderr, JobId}, _From, MState) ->
{reply, handle_request({output, job_stderr}, JobId, MState), MState};
handle_call(Msg, From, MState) ->
?LOG_DEBUG("Unknown call message from ~p: ~p", [From, Msg]),
{reply, ok, MState}.
handle_cast({event, EventType, EventData}, MState) ->
handle_event(EventType, EventData),
{noreply, MState};
handle_cast(Msg, MState) ->
?LOG_DEBUG("Unknown cast message: ~p", [Msg]),
{noreply, MState}.
terminate(Reason, _) ->
wm_utils:terminate_msg(?MODULE, Reason),
wm_tcp_server:terminate(Reason, ?MODULE).
handle_info(_Info, Data) ->
{noreply, Data}.
code_change(_OldVsn, Data, _Extra) ->
{ok, Data}.
%% ============================================================================
%% Implementation functions
%% ============================================================================
parse_args([], #mstate{} = MState) ->
MState;
parse_args([{spool, Spool} | T], #mstate{} = MState) ->
parse_args(T, MState#mstate{spool = Spool});
parse_args([{_, _} | T], MState) ->
parse_args(T, MState).
handle_event(http_started, _) ->
?LOG_INFO("Initialize user REST API resources"),
wm_http:add_route({api, wm_user_rest}, "/user"),
wm_http:add_route({api, wm_user_rest}, "/user/node"),
wm_http:add_route({api, wm_user_rest}, "/user/flavor"),
wm_http:add_route({api, wm_user_rest}, "/user/remote"),
wm_http:add_route({api, wm_user_rest}, "/user/job"),
wm_http:add_route({api, wm_user_rest}, "/user/job/:id"),
wm_http:add_route({api, wm_user_rest}, "/user/job/:id/stdout"),
wm_http:add_route({api, wm_user_rest}, "/user/job/:id/stderr").
-spec handle_request(atom(), any(), #mstate{}) -> any().
handle_request({output, OutputType}, JobId, #mstate{spool = Spool}) ->
?LOG_DEBUG("Job ~p has been requested: ~p", [OutputType, JobId]),
case wm_conf:select(job, {id, JobId}) of
{ok, Job} ->
FileName = wm_entity:get(OutputType, Job),
FullPath = filename:join([Spool, "output", JobId, FileName]),
wm_utils:read_file(FullPath, [binary]);
_ ->
{error, "job not found"}
end;
handle_request(submit, Args, #mstate{spool = Spool}) ->
?LOG_DEBUG("Job submission has been requested: ~n~p", [Args]),
{JobScriptContent, Filename, Username} = Args,
case wm_conf:select(user, {name, Username}) of
{error, not_found} ->
?LOG_ERROR("User ~p not found, job submission failed", [Username]),
R = io_lib:format("User ~p is not registred in the workload manager", [Username]),
{string, [R]};
{ok, User} ->
TODO verify user credentials using provided certificate
JobId = wm_utils:uuid(v4),
Cluster = wm_topology:get_subdiv(cluster),
Job1 = wm_jobscript:parse(JobScriptContent),
Job2 =
wm_entity:set([{cluster_id, wm_entity:get(id, Cluster)},
{state, ?JOB_STATE_QUEUED},
{execution_path, Filename},
{script_content, JobScriptContent},
{user_id, wm_entity:get(id, User)},
{id, JobId},
{job_stdout, "stdout.log"},
{job_stderr, "stderr.log"},
{submit_time, wm_utils:now_iso8601(without_ms)},
{duration, 3600}],
Job1),
Job3 = set_defaults(Job2, Spool),
Job4 = ensure_request_is_full(Job3),
1 = wm_conf:update(Job4),
{string, JobId}
end;
handle_request(requeue, Args, _) ->
?LOG_DEBUG("Jobs requeue has been requested: ~p", [Args]),
Results = requeue_jobs(Args, []),
RequeuedFiltered =
lists:filter(fun ({requeued, _}) ->
true;
(_) ->
false
end,
Results),
RequeuedIds = lists:map(fun({_, ID}) -> ID end, RequeuedFiltered),
NotFoundFiltered =
lists:filter(fun ({not_found, _}) ->
true;
(_) ->
false
end,
Results),
NotFoundIds = lists:map(fun({_, ID}) -> ID end, NotFoundFiltered),
Msg = "Requeued: " ++ lists:join(", ", RequeuedIds) ++ "\n" ++ "Not found: " ++ lists:join(", ", NotFoundIds),
{string, Msg};
handle_request(cancel, Args, _) ->
?LOG_DEBUG("Jobs cancellation has been requested: ~p", [Args]),
Results = cancel_jobs(Args, []),
CancelledFiltered =
lists:filter(fun ({cancelled, _}) ->
true;
(_) ->
false
end,
Results),
CancelledIds = lists:map(fun({_, ID}) -> ID end, CancelledFiltered),
NotFoundFiltered =
lists:filter(fun ({not_found, _}) ->
true;
(_) ->
false
end,
Results),
NotFoundIds = lists:map(fun({_, ID}) -> ID end, NotFoundFiltered),
Msg = "Cancelled: " ++ lists:join(", ", CancelledIds) ++ "\n" ++ "Not found: " ++ lists:join(", ", NotFoundIds),
{string, Msg};
handle_request(list, {[flavor], Limit}, _) ->
Nodes = wm_conf:select(node, {all, Limit}),
lists:filter(fun(X) -> wm_entity:get(is_template, X) == true end, Nodes);
handle_request(list, {Args, Limit}, _) ->
?LOG_DEBUG("List of ~p entities with limit ~p has been requested", [Args, Limit]),
F = fun(X) -> wm_conf:select(X, {all, Limit}) end,
lists:flatten([F(X) || X <- Args]);
handle_request(list, Args, _) ->
?LOG_DEBUG("List of ~p entities has been requested", [Args]),
F = fun(X) -> wm_conf:select(X, all) end,
lists:flatten([F(X) || X <- Args]);
handle_request(show, Args, _) ->
?LOG_DEBUG("Job show has been requested: ~p", [Args]),
wm_conf:select(job, Args).
-spec ensure_request_is_full(#job{}) -> #job{}.
ensure_request_is_full(Job) ->
ResourcesOld = wm_entity:get(request, Job),
ResourcesNew = add_missed_mandatory_request_resources(ResourcesOld),
wm_entity:set({request, ResourcesNew}, Job).
-spec add_missed_mandatory_request_resources([#resource{}]) -> [#resource{}].
add_missed_mandatory_request_resources(Resources) ->
Names = lists:foldl(fun(R, Acc) -> [wm_entity:get(name, R) | Acc] end, [], Resources),
AddIfMissed =
fun(Name, ResList, AddFun) ->
case lists:member(Name, Names) of
false ->
[AddFun() | ResList];
true ->
ResList
end
end,
Resources2 =
AddIfMissed("node",
Resources,
fun() ->
ResNode1 = wm_entity:new(resource),
ResNode2 = wm_entity:set({name, "node"}, ResNode1),
wm_entity:set({count, 1}, ResNode2)
end),
Resources3 =
AddIfMissed("cpus",
Resources2,
fun() ->
ResCpu1 = wm_entity:new(resource),
ResCpu2 = wm_entity:set({name, "cpus"}, ResCpu1),
wm_entity:set({count, 1}, ResCpu2)
end),
Resources3.
-spec requeue_jobs([job_id()], [{atom(), job_id()}]) -> [{atom(), job_id()}].
requeue_jobs([], Results) ->
Results;
requeue_jobs([JobId | T], Results) ->
Result =
case wm_conf:select(job, {id, JobId}) of
{ok, Job} ->
UpdatedJob = wm_entity:set({state, ?JOB_STATE_QUEUED}, Job),
1 = wm_conf:update([UpdatedJob]),
{requeued, JobId};
_ ->
{not_found, JobId}
end,
requeue_jobs(T, [Result | Results]).
cancel_jobs([], Results) ->
Results;
cancel_jobs([JobId | T], Results) ->
Result =
case wm_conf:select(job, {id, JobId}) of
{ok, Job} ->
UpdatedJob = wm_entity:set({state, ?JOB_STATE_CANCELLED}, Job),
1 = wm_conf:update([UpdatedJob]),
Process = wm_entity:set([{state, ?JOB_STATE_CANCELLED}], wm_entity:new(process)),
EndTime = wm_utils:now_iso8601(without_ms),
wm_event:announce(job_cancelled, {JobId, Process, EndTime, node()}),
{cancelled, JobId};
_ ->
{not_found, JobId}
end,
cancel_jobs(T, [Result | Results]).
-spec set_defaults(#job{}, string()) -> #job{}.
set_defaults(#job{workdir = [], id = JobId} = Job, Spool) ->
set_defaults(wm_entity:set({workdir, Spool ++ "/output/" ++ JobId}, Job), Spool);
set_defaults(#job{account_id = [], user_id = UserId} = Job, Spool) ->
% If account is not specified by user during job submission then use the user's main account
AccountId =
case wm_conf:select(account, {admins, [UserId]}) of
{ok, Accounts} when is_list(Accounts) ->
%TODO Handle case: multiple accounts are administrated by same user
wm_entity:get(id, hd(Accounts));
{ok, Account} ->
wm_entity:get(id, Account)
end,
set_defaults(wm_entity:set({account_id, AccountId}, Job), Spool);
set_defaults(Job, _) ->
Job.
| null | https://raw.githubusercontent.com/openworkload/swm-core/250facc707739010715cd9f1605b0dda817ab05c/src/srv/user/wm_user.erl | erlang | ============================================================================
API
============================================================================
============================================================================
Callbacks
============================================================================
============================================================================
Implementation functions
============================================================================
If account is not specified by user during job submission then use the user's main account
TODO Handle case: multiple accounts are administrated by same user | -module(wm_user).
-behaviour(gen_server).
-export([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("../../lib/wm_log.hrl").
-include("../../lib/wm_entity.hrl").
-include("../../../include/wm_scheduler.hrl").
-record(mstate, {spool = "" :: string}).
-spec start_link([term()]) -> {ok, pid()}.
start_link(Args) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, Args, []).
-spec init(term()) -> {ok, term()} | {ok, term(), hibernate | infinity | non_neg_integer()} | {stop, term()} | ignore.
-spec handle_call(term(), term(), term()) ->
{reply, term(), term()} |
{reply, term(), term(), hibernate | infinity | non_neg_integer()} |
{noreply, term()} |
{noreply, term(), hibernate | infinity | non_neg_integer()} |
{stop, term(), term()} |
{stop, term(), term(), term()}.
-spec handle_cast(term(), term()) ->
{noreply, term()} |
{noreply, term(), hibernate | infinity | non_neg_integer()} |
{stop, term(), term()}.
-spec handle_info(term(), term()) ->
{noreply, term()} |
{noreply, term(), hibernate | infinity | non_neg_integer()} |
{stop, term(), term()}.
-spec terminate(term(), term()) -> ok.
-spec code_change(term(), term(), term()) -> {ok, term()}.
init(Args) ->
?LOG_INFO("Load user management service"),
process_flag(trap_exit, true),
wm_event:subscribe(http_started, node(), ?MODULE),
MState = parse_args(Args, #mstate{}),
{ok, MState}.
handle_call({show, JIDs}, _From, MState) ->
{reply, handle_request(show, JIDs, MState), MState};
handle_call({requeue, JIDs}, _From, MState) ->
{reply, handle_request(requeue, JIDs, MState), MState};
handle_call({cancel, JIDs}, _From, MState) ->
{reply, handle_request(cancel, JIDs, MState), MState};
handle_call({submit, JobScriptContent, Filename, Username}, _From, MState) ->
{reply, handle_request(submit, {JobScriptContent, Filename, Username}, MState), MState};
handle_call({list, TabList}, _From, MState) ->
{reply, handle_request(list, TabList, MState), MState};
handle_call({list, TabList, Limit}, _From, MState) ->
{reply, handle_request(list, {TabList, Limit}, MState), MState};
handle_call({stdout, JobId}, _From, MState) ->
{reply, handle_request({output, job_stdout}, JobId, MState), MState};
handle_call({stderr, JobId}, _From, MState) ->
{reply, handle_request({output, job_stderr}, JobId, MState), MState};
handle_call(Msg, From, MState) ->
?LOG_DEBUG("Unknown call message from ~p: ~p", [From, Msg]),
{reply, ok, MState}.
handle_cast({event, EventType, EventData}, MState) ->
handle_event(EventType, EventData),
{noreply, MState};
handle_cast(Msg, MState) ->
?LOG_DEBUG("Unknown cast message: ~p", [Msg]),
{noreply, MState}.
terminate(Reason, _) ->
wm_utils:terminate_msg(?MODULE, Reason),
wm_tcp_server:terminate(Reason, ?MODULE).
handle_info(_Info, Data) ->
{noreply, Data}.
code_change(_OldVsn, Data, _Extra) ->
{ok, Data}.
parse_args([], #mstate{} = MState) ->
MState;
parse_args([{spool, Spool} | T], #mstate{} = MState) ->
parse_args(T, MState#mstate{spool = Spool});
parse_args([{_, _} | T], MState) ->
parse_args(T, MState).
handle_event(http_started, _) ->
?LOG_INFO("Initialize user REST API resources"),
wm_http:add_route({api, wm_user_rest}, "/user"),
wm_http:add_route({api, wm_user_rest}, "/user/node"),
wm_http:add_route({api, wm_user_rest}, "/user/flavor"),
wm_http:add_route({api, wm_user_rest}, "/user/remote"),
wm_http:add_route({api, wm_user_rest}, "/user/job"),
wm_http:add_route({api, wm_user_rest}, "/user/job/:id"),
wm_http:add_route({api, wm_user_rest}, "/user/job/:id/stdout"),
wm_http:add_route({api, wm_user_rest}, "/user/job/:id/stderr").
-spec handle_request(atom(), any(), #mstate{}) -> any().
handle_request({output, OutputType}, JobId, #mstate{spool = Spool}) ->
?LOG_DEBUG("Job ~p has been requested: ~p", [OutputType, JobId]),
case wm_conf:select(job, {id, JobId}) of
{ok, Job} ->
FileName = wm_entity:get(OutputType, Job),
FullPath = filename:join([Spool, "output", JobId, FileName]),
wm_utils:read_file(FullPath, [binary]);
_ ->
{error, "job not found"}
end;
handle_request(submit, Args, #mstate{spool = Spool}) ->
?LOG_DEBUG("Job submission has been requested: ~n~p", [Args]),
{JobScriptContent, Filename, Username} = Args,
case wm_conf:select(user, {name, Username}) of
{error, not_found} ->
?LOG_ERROR("User ~p not found, job submission failed", [Username]),
R = io_lib:format("User ~p is not registred in the workload manager", [Username]),
{string, [R]};
{ok, User} ->
TODO verify user credentials using provided certificate
JobId = wm_utils:uuid(v4),
Cluster = wm_topology:get_subdiv(cluster),
Job1 = wm_jobscript:parse(JobScriptContent),
Job2 =
wm_entity:set([{cluster_id, wm_entity:get(id, Cluster)},
{state, ?JOB_STATE_QUEUED},
{execution_path, Filename},
{script_content, JobScriptContent},
{user_id, wm_entity:get(id, User)},
{id, JobId},
{job_stdout, "stdout.log"},
{job_stderr, "stderr.log"},
{submit_time, wm_utils:now_iso8601(without_ms)},
{duration, 3600}],
Job1),
Job3 = set_defaults(Job2, Spool),
Job4 = ensure_request_is_full(Job3),
1 = wm_conf:update(Job4),
{string, JobId}
end;
handle_request(requeue, Args, _) ->
?LOG_DEBUG("Jobs requeue has been requested: ~p", [Args]),
Results = requeue_jobs(Args, []),
RequeuedFiltered =
lists:filter(fun ({requeued, _}) ->
true;
(_) ->
false
end,
Results),
RequeuedIds = lists:map(fun({_, ID}) -> ID end, RequeuedFiltered),
NotFoundFiltered =
lists:filter(fun ({not_found, _}) ->
true;
(_) ->
false
end,
Results),
NotFoundIds = lists:map(fun({_, ID}) -> ID end, NotFoundFiltered),
Msg = "Requeued: " ++ lists:join(", ", RequeuedIds) ++ "\n" ++ "Not found: " ++ lists:join(", ", NotFoundIds),
{string, Msg};
handle_request(cancel, Args, _) ->
?LOG_DEBUG("Jobs cancellation has been requested: ~p", [Args]),
Results = cancel_jobs(Args, []),
CancelledFiltered =
lists:filter(fun ({cancelled, _}) ->
true;
(_) ->
false
end,
Results),
CancelledIds = lists:map(fun({_, ID}) -> ID end, CancelledFiltered),
NotFoundFiltered =
lists:filter(fun ({not_found, _}) ->
true;
(_) ->
false
end,
Results),
NotFoundIds = lists:map(fun({_, ID}) -> ID end, NotFoundFiltered),
Msg = "Cancelled: " ++ lists:join(", ", CancelledIds) ++ "\n" ++ "Not found: " ++ lists:join(", ", NotFoundIds),
{string, Msg};
handle_request(list, {[flavor], Limit}, _) ->
Nodes = wm_conf:select(node, {all, Limit}),
lists:filter(fun(X) -> wm_entity:get(is_template, X) == true end, Nodes);
handle_request(list, {Args, Limit}, _) ->
?LOG_DEBUG("List of ~p entities with limit ~p has been requested", [Args, Limit]),
F = fun(X) -> wm_conf:select(X, {all, Limit}) end,
lists:flatten([F(X) || X <- Args]);
handle_request(list, Args, _) ->
?LOG_DEBUG("List of ~p entities has been requested", [Args]),
F = fun(X) -> wm_conf:select(X, all) end,
lists:flatten([F(X) || X <- Args]);
handle_request(show, Args, _) ->
?LOG_DEBUG("Job show has been requested: ~p", [Args]),
wm_conf:select(job, Args).
-spec ensure_request_is_full(#job{}) -> #job{}.
ensure_request_is_full(Job) ->
ResourcesOld = wm_entity:get(request, Job),
ResourcesNew = add_missed_mandatory_request_resources(ResourcesOld),
wm_entity:set({request, ResourcesNew}, Job).
-spec add_missed_mandatory_request_resources([#resource{}]) -> [#resource{}].
add_missed_mandatory_request_resources(Resources) ->
Names = lists:foldl(fun(R, Acc) -> [wm_entity:get(name, R) | Acc] end, [], Resources),
AddIfMissed =
fun(Name, ResList, AddFun) ->
case lists:member(Name, Names) of
false ->
[AddFun() | ResList];
true ->
ResList
end
end,
Resources2 =
AddIfMissed("node",
Resources,
fun() ->
ResNode1 = wm_entity:new(resource),
ResNode2 = wm_entity:set({name, "node"}, ResNode1),
wm_entity:set({count, 1}, ResNode2)
end),
Resources3 =
AddIfMissed("cpus",
Resources2,
fun() ->
ResCpu1 = wm_entity:new(resource),
ResCpu2 = wm_entity:set({name, "cpus"}, ResCpu1),
wm_entity:set({count, 1}, ResCpu2)
end),
Resources3.
-spec requeue_jobs([job_id()], [{atom(), job_id()}]) -> [{atom(), job_id()}].
requeue_jobs([], Results) ->
Results;
requeue_jobs([JobId | T], Results) ->
Result =
case wm_conf:select(job, {id, JobId}) of
{ok, Job} ->
UpdatedJob = wm_entity:set({state, ?JOB_STATE_QUEUED}, Job),
1 = wm_conf:update([UpdatedJob]),
{requeued, JobId};
_ ->
{not_found, JobId}
end,
requeue_jobs(T, [Result | Results]).
cancel_jobs([], Results) ->
Results;
cancel_jobs([JobId | T], Results) ->
Result =
case wm_conf:select(job, {id, JobId}) of
{ok, Job} ->
UpdatedJob = wm_entity:set({state, ?JOB_STATE_CANCELLED}, Job),
1 = wm_conf:update([UpdatedJob]),
Process = wm_entity:set([{state, ?JOB_STATE_CANCELLED}], wm_entity:new(process)),
EndTime = wm_utils:now_iso8601(without_ms),
wm_event:announce(job_cancelled, {JobId, Process, EndTime, node()}),
{cancelled, JobId};
_ ->
{not_found, JobId}
end,
cancel_jobs(T, [Result | Results]).
-spec set_defaults(#job{}, string()) -> #job{}.
set_defaults(#job{workdir = [], id = JobId} = Job, Spool) ->
set_defaults(wm_entity:set({workdir, Spool ++ "/output/" ++ JobId}, Job), Spool);
set_defaults(#job{account_id = [], user_id = UserId} = Job, Spool) ->
AccountId =
case wm_conf:select(account, {admins, [UserId]}) of
{ok, Accounts} when is_list(Accounts) ->
wm_entity:get(id, hd(Accounts));
{ok, Account} ->
wm_entity:get(id, Account)
end,
set_defaults(wm_entity:set({account_id, AccountId}, Job), Spool);
set_defaults(Job, _) ->
Job.
|
20e3ef1c3d3196a6ee29caffce34d68b099b2ff995228da4707a3a6f58ab2ac8 | inhabitedtype/ocaml-aws | deleteCustomAvailabilityZone.ml | open Types
open Aws
type input = DeleteCustomAvailabilityZoneMessage.t
type output = DeleteCustomAvailabilityZoneResult.t
type error = Errors_internal.t
let service = "rds"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[ "Version", [ "2014-10-31" ]; "Action", [ "DeleteCustomAvailabilityZone" ] ]
(Util.drop_empty
(Uri.query_of_encoded
(Query.render (DeleteCustomAvailabilityZoneMessage.to_query req)))))
in
`POST, uri, []
let of_http body =
try
let xml = Ezxmlm.from_string body in
let resp =
Util.option_bind
(Xml.member "DeleteCustomAvailabilityZoneResponse" (snd xml))
(Xml.member "DeleteCustomAvailabilityZoneResult")
in
try
Util.or_error
(Util.option_bind resp DeleteCustomAvailabilityZoneResult.parse)
(let open Error in
BadResponse
{ body
; message = "Could not find well formed DeleteCustomAvailabilityZoneResult."
})
with Xml.RequiredFieldMissing msg ->
let open Error in
`Error
(BadResponse
{ body
; message =
"Error parsing DeleteCustomAvailabilityZoneResult - missing field in body \
or children: "
^ msg
})
with Failure msg ->
`Error
(let open Error in
BadResponse { body; message = "Error parsing xml: " ^ msg })
let parse_error code err =
let errors = [] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if List.mem var errors
&&
match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true
then Some var
else None
| None -> None
| null | https://raw.githubusercontent.com/inhabitedtype/ocaml-aws/b6d5554c5d201202b5de8d0b0253871f7b66dab6/libraries/rds/lib/deleteCustomAvailabilityZone.ml | ocaml | open Types
open Aws
type input = DeleteCustomAvailabilityZoneMessage.t
type output = DeleteCustomAvailabilityZoneResult.t
type error = Errors_internal.t
let service = "rds"
let signature_version = Request.V4
let to_http service region req =
let uri =
Uri.add_query_params
(Uri.of_string (Aws.Util.of_option_exn (Endpoints.url_of service region)))
(List.append
[ "Version", [ "2014-10-31" ]; "Action", [ "DeleteCustomAvailabilityZone" ] ]
(Util.drop_empty
(Uri.query_of_encoded
(Query.render (DeleteCustomAvailabilityZoneMessage.to_query req)))))
in
`POST, uri, []
let of_http body =
try
let xml = Ezxmlm.from_string body in
let resp =
Util.option_bind
(Xml.member "DeleteCustomAvailabilityZoneResponse" (snd xml))
(Xml.member "DeleteCustomAvailabilityZoneResult")
in
try
Util.or_error
(Util.option_bind resp DeleteCustomAvailabilityZoneResult.parse)
(let open Error in
BadResponse
{ body
; message = "Could not find well formed DeleteCustomAvailabilityZoneResult."
})
with Xml.RequiredFieldMissing msg ->
let open Error in
`Error
(BadResponse
{ body
; message =
"Error parsing DeleteCustomAvailabilityZoneResult - missing field in body \
or children: "
^ msg
})
with Failure msg ->
`Error
(let open Error in
BadResponse { body; message = "Error parsing xml: " ^ msg })
let parse_error code err =
let errors = [] @ Errors_internal.common in
match Errors_internal.of_string err with
| Some var ->
if List.mem var errors
&&
match Errors_internal.to_http_code var with
| Some var -> var = code
| None -> true
then Some var
else None
| None -> None
| |
37858549ce00ed33a7116ff8e7d1f3f8ea554ec0ca24fbd755579c9a5a059e74 | erlymon/erlymon | em_tk103_protocol.erl | %%%-------------------------------------------------------------------
@author
( C ) 2015 , < >
%%% @doc
Erlymon is an open source GPS tracking system for various GPS tracking devices .
%%%
Copyright ( C ) 2015 , < > .
%%%
This file is part of Erlymon .
%%%
Erlymon is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License , version 3 ,
as published by the Free Software Foundation .
%%%
Erlymon is distributed in the hope that it will be useful ,
%%% but WITHOUT ANY WARRANTY; without even the implied warranty of
%%% MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details .
%%%
You should have received a copy of the GNU Affero General Public License
%%% along with this program. If not, see </>.
%%% @end
%%%-------------------------------------------------------------------
-module(em_tk103_protocol).
-author("Sergey Penkovsky <>").
-behaviour(ranch_protocol).
-behaviour(gen_server).
-include("em_hardware.hrl").
-include("em_records.hrl").
%% API
-export([start_link/4]).
%% gen_server callbacks
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(TIMEOUT, infinity).
-define(SERVER, ?MODULE).
-define(PATTERN, list_to_binary([
"(\\d+)(,)?" ++ %% device id
"(.{4}),?" ++ %% command
"(\\d*)" ++ %% imei?
"(\\d{2})(\\d{2})(\\d{2}),?" ++ %% date
"([AV]),?" ++ %% validity
"(\\d{2})(\\d{2}\\.\\d+)" ++ %% latitude
"([NS]),?" ++
"(\\d{3})(\\d{2}\\.\\d+)" ++ %% longitude
"([EW]),?" ++
"(\\d+\\.\\d)(?:\\d*,)?" ++ %% speed
"(\\d{2})(\\d{2})(\\d{2}),?" ++ %% time
"(\\d+\\.?\\d{1,2}),?" ++ %% course
"(?:([01]{8})|([0-9a-fA-F]{8}))?,?" ++ %% state
"(?:L([0-9a-fA-F]+))?" ++ %% odometer
".*(?:\\)" ++
")?"
])).
-define(PATTERN_BATTERY, list_to_binary([
"(\\d+)," ++ %% device id
"ZC20," ++
"(\\d{2})(\\d{2})(\\d{2})," ++ %% date (ddmmyy)
"(\\d{2})(\\d{2})(\\d{2})," ++ %% time
"\\d+," ++ %% battery level
"(\\d+)," ++ %% battery voltage
"(\\d+)," ++ %% power voltage
"\\d+" %% installed
])).
-define(PATTERN_NETWORK, list_to_binary([
"(\\d{12})" ++ %% device id
"BZ00," ++
"(\\d+)," ++ %% mcc
"(\\d+)," ++ %% mnc
"([0-9a-fA-F]+)," ++ %% lac
"([0-9a-fA-F]+)," ++ %% cid
".*"
])).
-define(SOCKET_OPTS, [{active, once}, {packet, line}, {line_delimiter, $)}]).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
%% @end
%%--------------------------------------------------------------------
-spec(start_link(Ref :: any(), Socket :: any(), Transport :: any(), Opts :: any()) ->
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
start_link(Ref, Socket, Transport, Opts) ->
{ok, proc_lib:spawn_link(?MODULE, init, [{Ref, Socket, Transport, Opts}])}.
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
-spec(init(Args :: term()) ->
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
{stop, Reason :: term()} | ignore).
init({Ref, Socket, Transport, Opts}) ->
ok = ranch:accept_ack(Ref),
ok = Transport:setopts(Socket, ?SOCKET_OPTS),
Protocol = proplists:get_value(protocol, Opts),
gen_server:enter_loop(?MODULE, [],
#state{protocol = Protocol, socket = Socket, transport = Transport}, ?TIMEOUT).
%%{ok, #state{}}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
%% @end
%%--------------------------------------------------------------------
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
State :: #state{}) ->
{reply, Reply :: term(), NewState :: #state{}} |
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
{noreply, NewState :: #state{}} |
{noreply, NewState :: #state{}, timeout() | hibernate} |
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
{stop, Reason :: term(), NewState :: #state{}}).
handle_call(_Request, _From, State) ->
{reply, ok, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
%% @end
%%--------------------------------------------------------------------
-spec(handle_cast(Request :: term(), State :: #state{}) ->
{noreply, NewState :: #state{}} |
{noreply, NewState :: #state{}, timeout() | hibernate} |
{stop, Reason :: term(), NewState :: #state{}}).
handle_cast(_Request, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
{noreply, NewState :: #state{}} |
{noreply, NewState :: #state{}, timeout() | hibernate} |
{stop, Reason :: term(), NewState :: #state{}}).
handle_info({command, Command}, State) ->
do_execute_command(State, Command);
handle_info({tcp, _, <<67,78,88,78,0,0,0,1,0,0,4,0,27,0,0,0,77,10>>}, State = #state{socket = Socket, transport = Transport}) ->
Transport:setopts(Socket, ?SOCKET_OPTS),
{noreply, State, ?TIMEOUT};
handle_info({tcp, Socket, Data}, State = #state{socket = Socket, transport = Transport}) ->
Transport:setopts(Socket, ?SOCKET_OPTS),
do_process_data(State, Data);
handle_info({tcp_closed, _Socket}, State) ->
{stop, normal, State};
handle_info({tcp_error, _, Reason}, State) ->
{stop, Reason, State};
handle_info(timeout, State) ->
{stop, normal, State};
handle_info(_Info, State) ->
{stop, normal, State}.
handle_info(_Info , State ) - >
{ noreply , State } .
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
State :: #state{}) -> term()).
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
Extra :: term()) ->
{ok, NewState :: #state{}} | {error, Reason :: term()}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
do_execute_command(State = #state{transport = Transport, socket = Socket}, Command) ->
case em_manager_devices:get_by_id(Command#command.deviceId) of
{ok, Device} ->
case do_encode_command(Device#device.uniqueId, Command) of
{ok, CommandBin} ->
em_logger:info("CommandBin: ~s", [CommandBin]),
Transport:send(Socket, CommandBin),
{noreply, State, ?TIMEOUT};
{error, Reason} ->
em_logger:info("Error: ~s", [Reason]),
{noreply, State, ?TIMEOUT}
end;
{error, Reason} ->
em_logger:info("Error: ~s", [Reason]),
{noreply, State, ?TIMEOUT}
end.
do_encode_command(_UniqueId, _Command) ->
{error, <<"Unsupported command">>}.
do_process_data(State = #state{transport = Transport, socket = Socket, protocol = _Protocol, deviceId = 0}, Data) ->
em_logger:info("[packet] unit: ip = '~s' data: ~s", [em_inet:resolve(Socket), Data]),
case parse(Data) of
{ok, SysDeviceId, Command, Imei, PositionModel} ->
em_logger:info("[packet] unit: ip = '~s' imei = '~s' message: ~w", [em_inet:resolve(Socket), Imei, PositionModel]),
case em_data_manager:get_device_by_uid(Imei) of
{error, _Reason} ->
em_logger:info("[packet] unit: ip = '~s' unknown device with imei = '~s'", [em_inet:resolve(Socket), Imei]),
{stop, normal, State};
{ok, Object} ->
em_proc:registry(Object#device.id, self()),
do_save_position(State#state{deviceId = Object#device.id}, Object#device.id, Imei, PositionModel),
Transport:send(Socket, format_response(SysDeviceId, Command)),
{noreply, State#state{deviceId = Object#device.id}, ?TIMEOUT}
end;
{error, Message} ->
em_logger:info("ERROR: ~s", [Message]),
{stop, normal, State}
end;
do_process_data(State = #state{transport = Transport, socket = Socket, protocol = _Protocol, deviceId = DeviceId}, Data) ->
em_logger:info("[packet] unit: ip = '~s' data: ~s", [em_inet:resolve(Socket), Data]),
case parse(Data) of
{ok, SysDeviceId, Command, Imei, PositionModel} ->
do_save_position(State, DeviceId, Imei, PositionModel),
Transport:send(Socket, format_response(SysDeviceId, Command)),
{noreply, State, ?TIMEOUT};
{error, Message} ->
em_logger:info("ERROR: ~s", [Message]),
{stop, normal, State}
end;
do_process_data(State, _) ->
em_logger : info("ERROR : parsing packet " ) ,
{stop, normal, State}.
do_save_position(State = #state{socket = Socket, protocol = Protocol}, DeviceId, Imei, PositionModel) ->
em_logger:info("[packet] unit: ip = '~s' imei = '~s' message: ~w", [em_inet:resolve(Socket), Imei, PositionModel]),
Position = PositionModel#position{
deviceId = DeviceId,
protocol = atom_to_binary(Protocol, utf8),
attributes = maps:merge(PositionModel#position.attributes, #{
?KEY_IP => em_inet:resolve(Socket)
})
},
em_logger:info("save message => unit: ip = '~s' id = '~w' imei = '~s' position: ~w", [em_inet:resolve(Socket), DeviceId, Imei, Position]),
em_data_manager:create_position(DeviceId, Position),
{noreply, State, ?TIMEOUT}.
echo " 2 . tk103 "
( echo -n -e " ( 123456789012BP05123456789012345120101A6000.0000N13000.0000E000.0120200000.0000000000L000946BB ) " ;) | nc -v localhost 5002
parse(Data) ->
case em_regexp:match(Data, ?PATTERN) of
{ok, [_, DeviceId, _, Command, Imei, Year, Month, Day, Validity, LatDD, LatMM_MMMM, LatType, LonDD, LonMM_MMMM, LonType, Speed, Hour, Minute, Second, Course, _State, _Millage|_]} ->
Position = #position{
deviceTime = parse_date(Year, Month, Day, Hour, Minute, Second),
latitude = parse_coord(LatDD, LatMM_MMMM, LatType),
longitude = parse_coord(LonDD, LonMM_MMMM, LonType),
speed = parse_speed(Speed),
course = parse_course(Course),
valid = parse_validity(Validity)
},
{ok, DeviceId, Command, Imei, Position};
Reason ->
Reason
end.
parse_coord(CoordDD, CoordMM_MMMM, CoordType) ->
Coord = list_to_integer(binary_to_list(CoordDD)) + list_to_float(binary_to_list(CoordMM_MMMM)) / 60,
case CoordType of
<<"S">> ->
Coord * -1;
<<"N">> ->
Coord;
<<"W">> ->
Coord * -1;
<<"E">> ->
Coord
end.
parse_course(Course) ->
list_to_float(binary_to_list(Course)).
parse_speed(Speed) ->
list_to_float(binary_to_list(Speed)).
%%parse_device_id(DeviceId) ->
list_to_integer(binary_to_list(DeviceId ) ) .
parse_validity(<<"A">>) -> true;
parse_validity(_) -> false.
parse_date(Year, Month, Day, Hour, Minute, Second) ->
Date = {
{
list_to_integer(binary_to_list(Year)) + 2000,
list_to_integer(binary_to_list(Month)),
list_to_integer(binary_to_list(Day))
},
{
list_to_integer(binary_to_list(Hour)),
list_to_integer(binary_to_list(Minute)),
list_to_integer(binary_to_list(Second))
}
},
em_helper_time:datetime_to_utc(Date).
format_response(SysDeviceId, <<"BP00">>) ->
<<"(", SysDeviceId/binary, "AP01)">>;
format_response(SysDeviceId, <<"BP05">>) ->
<<"(", SysDeviceId/binary, "AP05)">>.
( 123456789012BP05123456789012345120101A6000.0000N13000.0000E000.0120200000.0000000000L000946BB )
( 123456789012 BP05 123456789012345 120101 A 6000.0000N 13000.0000E 000.0 120200 000.00 00000000 L000946BB )
%%test() ->
Packet = < < " ( 123456789012BP05123456789012345120101A6000.0000N13000.0000E000.0120200000.0000000000L000946BB ) " > > ,
%% em_regexp:match(Packet, ?PATTERN). | null | https://raw.githubusercontent.com/erlymon/erlymon/2250619783d6da1e33a502911a8fa52ce016c094/apps/erlymon/src/em_hardware/protocols/em_tk103_protocol.erl | erlang | -------------------------------------------------------------------
@doc
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
along with this program. If not, see </>.
@end
-------------------------------------------------------------------
API
gen_server callbacks
device id
command
imei?
date
validity
latitude
longitude
speed
time
course
state
odometer
device id
date (ddmmyy)
time
battery level
battery voltage
power voltage
installed
device id
mcc
mnc
lac
cid
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
{ok, #state{}}.
--------------------------------------------------------------------
@doc
Handling call messages
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling cast messages
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
===================================================================
parse_device_id(DeviceId) ->
test() ->
em_regexp:match(Packet, ?PATTERN). | @author
( C ) 2015 , < >
Erlymon is an open source GPS tracking system for various GPS tracking devices .
Copyright ( C ) 2015 , < > .
This file is part of Erlymon .
Erlymon is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License , version 3 ,
as published by the Free Software Foundation .
Erlymon is distributed in the hope that it will be useful ,
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
-module(em_tk103_protocol).
-author("Sergey Penkovsky <>").
-behaviour(ranch_protocol).
-behaviour(gen_server).
-include("em_hardware.hrl").
-include("em_records.hrl").
-export([start_link/4]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(TIMEOUT, infinity).
-define(SERVER, ?MODULE).
-define(PATTERN, list_to_binary([
"([NS]),?" ++
"([EW]),?" ++
".*(?:\\)" ++
")?"
])).
-define(PATTERN_BATTERY, list_to_binary([
"ZC20," ++
])).
-define(PATTERN_NETWORK, list_to_binary([
"BZ00," ++
".*"
])).
-define(SOCKET_OPTS, [{active, once}, {packet, line}, {line_delimiter, $)}]).
-spec(start_link(Ref :: any(), Socket :: any(), Transport :: any(), Opts :: any()) ->
{ok, Pid :: pid()} | ignore | {error, Reason :: term()}).
start_link(Ref, Socket, Transport, Opts) ->
{ok, proc_lib:spawn_link(?MODULE, init, [{Ref, Socket, Transport, Opts}])}.
@private
) - > { ok , State } |
{ ok , State , Timeout } |
-spec(init(Args :: term()) ->
{ok, State :: #state{}} | {ok, State :: #state{}, timeout() | hibernate} |
{stop, Reason :: term()} | ignore).
init({Ref, Socket, Transport, Opts}) ->
ok = ranch:accept_ack(Ref),
ok = Transport:setopts(Socket, ?SOCKET_OPTS),
Protocol = proplists:get_value(protocol, Opts),
gen_server:enter_loop(?MODULE, [],
#state{protocol = Protocol, socket = Socket, transport = Transport}, ?TIMEOUT).
@private
-spec(handle_call(Request :: term(), From :: {pid(), Tag :: term()},
State :: #state{}) ->
{reply, Reply :: term(), NewState :: #state{}} |
{reply, Reply :: term(), NewState :: #state{}, timeout() | hibernate} |
{noreply, NewState :: #state{}} |
{noreply, NewState :: #state{}, timeout() | hibernate} |
{stop, Reason :: term(), Reply :: term(), NewState :: #state{}} |
{stop, Reason :: term(), NewState :: #state{}}).
handle_call(_Request, _From, State) ->
{reply, ok, State}.
@private
-spec(handle_cast(Request :: term(), State :: #state{}) ->
{noreply, NewState :: #state{}} |
{noreply, NewState :: #state{}, timeout() | hibernate} |
{stop, Reason :: term(), NewState :: #state{}}).
handle_cast(_Request, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
-spec(handle_info(Info :: timeout() | term(), State :: #state{}) ->
{noreply, NewState :: #state{}} |
{noreply, NewState :: #state{}, timeout() | hibernate} |
{stop, Reason :: term(), NewState :: #state{}}).
handle_info({command, Command}, State) ->
do_execute_command(State, Command);
handle_info({tcp, _, <<67,78,88,78,0,0,0,1,0,0,4,0,27,0,0,0,77,10>>}, State = #state{socket = Socket, transport = Transport}) ->
Transport:setopts(Socket, ?SOCKET_OPTS),
{noreply, State, ?TIMEOUT};
handle_info({tcp, Socket, Data}, State = #state{socket = Socket, transport = Transport}) ->
Transport:setopts(Socket, ?SOCKET_OPTS),
do_process_data(State, Data);
handle_info({tcp_closed, _Socket}, State) ->
{stop, normal, State};
handle_info({tcp_error, _, Reason}, State) ->
{stop, Reason, State};
handle_info(timeout, State) ->
{stop, normal, State};
handle_info(_Info, State) ->
{stop, normal, State}.
handle_info(_Info , State ) - >
{ noreply , State } .
@private
with . The return value is ignored .
, State ) - > void ( )
-spec(terminate(Reason :: (normal | shutdown | {shutdown, term()} | term()),
State :: #state{}) -> term()).
terminate(_Reason, _State) ->
ok.
@private
, State , Extra ) - > { ok , NewState }
-spec(code_change(OldVsn :: term() | {down, term()}, State :: #state{},
Extra :: term()) ->
{ok, NewState :: #state{}} | {error, Reason :: term()}).
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
do_execute_command(State = #state{transport = Transport, socket = Socket}, Command) ->
case em_manager_devices:get_by_id(Command#command.deviceId) of
{ok, Device} ->
case do_encode_command(Device#device.uniqueId, Command) of
{ok, CommandBin} ->
em_logger:info("CommandBin: ~s", [CommandBin]),
Transport:send(Socket, CommandBin),
{noreply, State, ?TIMEOUT};
{error, Reason} ->
em_logger:info("Error: ~s", [Reason]),
{noreply, State, ?TIMEOUT}
end;
{error, Reason} ->
em_logger:info("Error: ~s", [Reason]),
{noreply, State, ?TIMEOUT}
end.
do_encode_command(_UniqueId, _Command) ->
{error, <<"Unsupported command">>}.
do_process_data(State = #state{transport = Transport, socket = Socket, protocol = _Protocol, deviceId = 0}, Data) ->
em_logger:info("[packet] unit: ip = '~s' data: ~s", [em_inet:resolve(Socket), Data]),
case parse(Data) of
{ok, SysDeviceId, Command, Imei, PositionModel} ->
em_logger:info("[packet] unit: ip = '~s' imei = '~s' message: ~w", [em_inet:resolve(Socket), Imei, PositionModel]),
case em_data_manager:get_device_by_uid(Imei) of
{error, _Reason} ->
em_logger:info("[packet] unit: ip = '~s' unknown device with imei = '~s'", [em_inet:resolve(Socket), Imei]),
{stop, normal, State};
{ok, Object} ->
em_proc:registry(Object#device.id, self()),
do_save_position(State#state{deviceId = Object#device.id}, Object#device.id, Imei, PositionModel),
Transport:send(Socket, format_response(SysDeviceId, Command)),
{noreply, State#state{deviceId = Object#device.id}, ?TIMEOUT}
end;
{error, Message} ->
em_logger:info("ERROR: ~s", [Message]),
{stop, normal, State}
end;
do_process_data(State = #state{transport = Transport, socket = Socket, protocol = _Protocol, deviceId = DeviceId}, Data) ->
em_logger:info("[packet] unit: ip = '~s' data: ~s", [em_inet:resolve(Socket), Data]),
case parse(Data) of
{ok, SysDeviceId, Command, Imei, PositionModel} ->
do_save_position(State, DeviceId, Imei, PositionModel),
Transport:send(Socket, format_response(SysDeviceId, Command)),
{noreply, State, ?TIMEOUT};
{error, Message} ->
em_logger:info("ERROR: ~s", [Message]),
{stop, normal, State}
end;
do_process_data(State, _) ->
em_logger : info("ERROR : parsing packet " ) ,
{stop, normal, State}.
do_save_position(State = #state{socket = Socket, protocol = Protocol}, DeviceId, Imei, PositionModel) ->
em_logger:info("[packet] unit: ip = '~s' imei = '~s' message: ~w", [em_inet:resolve(Socket), Imei, PositionModel]),
Position = PositionModel#position{
deviceId = DeviceId,
protocol = atom_to_binary(Protocol, utf8),
attributes = maps:merge(PositionModel#position.attributes, #{
?KEY_IP => em_inet:resolve(Socket)
})
},
em_logger:info("save message => unit: ip = '~s' id = '~w' imei = '~s' position: ~w", [em_inet:resolve(Socket), DeviceId, Imei, Position]),
em_data_manager:create_position(DeviceId, Position),
{noreply, State, ?TIMEOUT}.
echo " 2 . tk103 "
( echo -n -e " ( 123456789012BP05123456789012345120101A6000.0000N13000.0000E000.0120200000.0000000000L000946BB ) " ;) | nc -v localhost 5002
parse(Data) ->
case em_regexp:match(Data, ?PATTERN) of
{ok, [_, DeviceId, _, Command, Imei, Year, Month, Day, Validity, LatDD, LatMM_MMMM, LatType, LonDD, LonMM_MMMM, LonType, Speed, Hour, Minute, Second, Course, _State, _Millage|_]} ->
Position = #position{
deviceTime = parse_date(Year, Month, Day, Hour, Minute, Second),
latitude = parse_coord(LatDD, LatMM_MMMM, LatType),
longitude = parse_coord(LonDD, LonMM_MMMM, LonType),
speed = parse_speed(Speed),
course = parse_course(Course),
valid = parse_validity(Validity)
},
{ok, DeviceId, Command, Imei, Position};
Reason ->
Reason
end.
parse_coord(CoordDD, CoordMM_MMMM, CoordType) ->
Coord = list_to_integer(binary_to_list(CoordDD)) + list_to_float(binary_to_list(CoordMM_MMMM)) / 60,
case CoordType of
<<"S">> ->
Coord * -1;
<<"N">> ->
Coord;
<<"W">> ->
Coord * -1;
<<"E">> ->
Coord
end.
parse_course(Course) ->
list_to_float(binary_to_list(Course)).
parse_speed(Speed) ->
list_to_float(binary_to_list(Speed)).
list_to_integer(binary_to_list(DeviceId ) ) .
parse_validity(<<"A">>) -> true;
parse_validity(_) -> false.
parse_date(Year, Month, Day, Hour, Minute, Second) ->
Date = {
{
list_to_integer(binary_to_list(Year)) + 2000,
list_to_integer(binary_to_list(Month)),
list_to_integer(binary_to_list(Day))
},
{
list_to_integer(binary_to_list(Hour)),
list_to_integer(binary_to_list(Minute)),
list_to_integer(binary_to_list(Second))
}
},
em_helper_time:datetime_to_utc(Date).
format_response(SysDeviceId, <<"BP00">>) ->
<<"(", SysDeviceId/binary, "AP01)">>;
format_response(SysDeviceId, <<"BP05">>) ->
<<"(", SysDeviceId/binary, "AP05)">>.
( 123456789012BP05123456789012345120101A6000.0000N13000.0000E000.0120200000.0000000000L000946BB )
( 123456789012 BP05 123456789012345 120101 A 6000.0000N 13000.0000E 000.0 120200 000.00 00000000 L000946BB )
Packet = < < " ( 123456789012BP05123456789012345120101A6000.0000N13000.0000E000.0120200000.0000000000L000946BB ) " > > , |
3fafaa8a1db47564eac353181c06fd71bb32e957923ff32a25b73e82b1da3f6c | evturn/haskellbook | 30.07-our-exceptions.hs | module OurExceptions where
import Control.Exception
data EATD = NotEven Int
| NotDivThree Int
deriving (Eq, Show)
instance Exception EATD
evenAndThreeDiv :: Int -> IO Int
evenAndThreeDiv i
| rem i 3 /= 0 = throwIO (NotDivThree i)
| even i = throwIO (NotEven i)
| otherwise = return i
catchNotDivThree : : IO Int - > ( NotDivThree - > IO Int ) - > IO Int
-- catchNotDivThree = catch
-- catchNotEven :: IO Int -> (NotEven -> IO Int) -> IO Int
-- catchNotEven = catch
catchBoth :: IO Int -> IO Int
catchBoth ioInt = catches ioInt
[ Handler (\(NotEven _) -> return maxBound)
, Handler (\(NotDivThree _) -> return minBound)
]
| null | https://raw.githubusercontent.com/evturn/haskellbook/3d310d0ddd4221ffc5b9fd7ec6476b2a0731274a/30/30.07-our-exceptions.hs | haskell | catchNotDivThree = catch
catchNotEven :: IO Int -> (NotEven -> IO Int) -> IO Int
catchNotEven = catch | module OurExceptions where
import Control.Exception
data EATD = NotEven Int
| NotDivThree Int
deriving (Eq, Show)
instance Exception EATD
evenAndThreeDiv :: Int -> IO Int
evenAndThreeDiv i
| rem i 3 /= 0 = throwIO (NotDivThree i)
| even i = throwIO (NotEven i)
| otherwise = return i
catchNotDivThree : : IO Int - > ( NotDivThree - > IO Int ) - > IO Int
catchBoth :: IO Int -> IO Int
catchBoth ioInt = catches ioInt
[ Handler (\(NotEven _) -> return maxBound)
, Handler (\(NotDivThree _) -> return minBound)
]
|
d1449ca52b8272b5880f9f0745e87b3546d5c4b492366fd6fea9db033932f6d0 | RDTK/generator | mixins.lisp | ;;;; mixins.lisp --- Generic mixin classes used by project, templates, etc.
;;;;
Copyright ( C ) 2012 - 2019 Jan Moringen
;;;;
Author : < >
(cl:in-package #:build-generator.model.variables)
;;; `direct-variables-mixin'
(defclass direct-variables-mixin ()
((variables :initarg :variables
:type list ; alist
:accessor %direct-variables
:reader direct-variables
:initform '()
:documentation
"Stores direct variables definitions as an alist with
elements of the form
(NAME . EXPRESSION)
where NAME is a keyword naming the variable and
EXPRESSION a `variable-expression', the unevaluated
value of the variable."))
(:documentation
"Adds a list of direct variable definition cells."))
(defmethod shared-initialize :around ((instance direct-variables-mixin)
(slot-names t)
&key)
(call-next-method)
(loop :with locations = *variable-locations*
:for cell :in (%direct-variables instance)
:unless (gethash cell locations)
:do (setf (gethash cell locations) instance)))
(defmethod variables append ((thing direct-variables-mixin))
(copy-list (direct-variables thing)))
(defmethod direct-lookup ((thing direct-variables-mixin) (name t))
(if-let ((cell (find name (direct-variables thing)
:test #'eq
:key #'car)))
(values cell '() t)
(values nil '() nil)))
(defmethod lookup ((thing direct-variables-mixin) (name t)
&key if-undefined)
(declare (ignore if-undefined))
(direct-lookup thing name))
(defmethod (setf lookup) ((new-value t)
(thing direct-variables-mixin)
(name t)
&key if-undefined)
(declare (ignore if-undefined))
(removef (%direct-variables thing) name :key #'car)
(let ((cell (value-cons name new-value)))
(push cell (%direct-variables thing))
(setf (gethash cell *variable-locations*) thing)
new-value))
;;; `builtin-entries-mixin'
(defclass builtin-entries-mixin ()
())
(defmethod shared-initialize :after ((instance builtin-entries-mixin)
(slot-names t)
&key
(variables nil variables-supplied?))
(declare (ignore variables))
(when variables-supplied?
(loop :for (name . value) :in (builtin-entries instance)
:do (if-let ((cell (assoc name (%direct-variables instance) :test #'eq)))
(setf (cdr cell) value)
(push (cons name value) (%direct-variables instance))))))
| null | https://raw.githubusercontent.com/RDTK/generator/8d9e6e47776f2ccb7b5ed934337d2db50ecbe2f5/src/model/variables/mixins.lisp | lisp | mixins.lisp --- Generic mixin classes used by project, templates, etc.
`direct-variables-mixin'
alist
`builtin-entries-mixin' | Copyright ( C ) 2012 - 2019 Jan Moringen
Author : < >
(cl:in-package #:build-generator.model.variables)
(defclass direct-variables-mixin ()
((variables :initarg :variables
:accessor %direct-variables
:reader direct-variables
:initform '()
:documentation
"Stores direct variables definitions as an alist with
elements of the form
(NAME . EXPRESSION)
where NAME is a keyword naming the variable and
EXPRESSION a `variable-expression', the unevaluated
value of the variable."))
(:documentation
"Adds a list of direct variable definition cells."))
(defmethod shared-initialize :around ((instance direct-variables-mixin)
(slot-names t)
&key)
(call-next-method)
(loop :with locations = *variable-locations*
:for cell :in (%direct-variables instance)
:unless (gethash cell locations)
:do (setf (gethash cell locations) instance)))
(defmethod variables append ((thing direct-variables-mixin))
(copy-list (direct-variables thing)))
(defmethod direct-lookup ((thing direct-variables-mixin) (name t))
(if-let ((cell (find name (direct-variables thing)
:test #'eq
:key #'car)))
(values cell '() t)
(values nil '() nil)))
(defmethod lookup ((thing direct-variables-mixin) (name t)
&key if-undefined)
(declare (ignore if-undefined))
(direct-lookup thing name))
(defmethod (setf lookup) ((new-value t)
(thing direct-variables-mixin)
(name t)
&key if-undefined)
(declare (ignore if-undefined))
(removef (%direct-variables thing) name :key #'car)
(let ((cell (value-cons name new-value)))
(push cell (%direct-variables thing))
(setf (gethash cell *variable-locations*) thing)
new-value))
(defclass builtin-entries-mixin ()
())
(defmethod shared-initialize :after ((instance builtin-entries-mixin)
(slot-names t)
&key
(variables nil variables-supplied?))
(declare (ignore variables))
(when variables-supplied?
(loop :for (name . value) :in (builtin-entries instance)
:do (if-let ((cell (assoc name (%direct-variables instance) :test #'eq)))
(setf (cdr cell) value)
(push (cons name value) (%direct-variables instance))))))
|
45d19f55394edb2af28f1c585d9ba25a9312b6674808b2293e6a7d9546ba8af7 | pierric/neural-network | SIMD.hs | module Data.NeuralNetwork.Backend.BLASHS.SIMD where
import Data.Vector.Storable.Mutable as MV
import qualified Data.Vector.Storable as SV
import Data.Primitive.SIMD
import Control.Exception
import Control.Monad
class Num (SIMDPACK a) => SIMDable a where
type SIMDPACK a
konst :: a -> SIMDPACK a
foreach :: (SIMDPACK a -> SIMDPACK a) -> IOVector a -> IOVector a -> IO ()
hadamard :: (SIMDPACK a -> SIMDPACK a -> SIMDPACK a) -> IOVector a -> IOVector a -> IOVector a -> IO ()
instance SIMDable Float where
type SIMDPACK Float = FloatX16
hadamard op v x y = assert (MV.length x == sz && MV.length y == sz) $ do
let sv = unsafeCast v :: IOVector FloatX16
sx = unsafeCast x :: IOVector FloatX16
sy = unsafeCast y :: IOVector FloatX16
go (MV.length sv) sv sx sy
let rm = sz `mod` 16
rn = sz - rm
rv = unsafeDrop rn v
rx = unsafeDrop rn x
ry = unsafeDrop rn y
when (rm /= 0) $ rest rm rv rx ry
where
sz = MV.length v
go 0 _ _ _ = return ()
go !n !z !x !y = do
a <- unsafeRead x 0
b <- unsafeRead y 0
unsafeWrite z 0 (op a b)
go (n-1) (unsafeTail z) (unsafeTail x) (unsafeTail y)
rest n z x y = do
sx <- SV.unsafeFreeze x
sy <- SV.unsafeFreeze y
let vx = SV.ifoldl' (\v i a -> unsafeInsertVector v a i) nullVector sx
vy = SV.ifoldl' (\v i a -> unsafeInsertVector v a i) nullVector sy
(vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE,_) = unpackVector (op vx vy)
forM_ (zip [0..n-1] [vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE]) $ uncurry (unsafeWrite z)
foreach op v x = assert (MV.length v == MV.length x) $ do
let sv = unsafeCast v :: IOVector FloatX16
sx = unsafeCast x :: IOVector FloatX16
go (MV.length sv) sv sx
let rm = sz `mod` 16
rn = sz - rm
rv = unsafeDrop rn v
rx = unsafeDrop rn x
when (rm /= 0) $ rest rm rv rx
where
sz = MV.length v
go 0 _ _ = return ()
go !n !z !x = do
a <- unsafeRead x 0
unsafeWrite z 0 (op a)
go (n-1) (unsafeTail z) (unsafeTail x)
rest n z x = do
sx <- SV.unsafeFreeze x
let vx = SV.ifoldl' (\v i a -> unsafeInsertVector v a i) nullVector sx
(vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE,_) = unpackVector (op vx)
forM_ (zip [0..n-1] [vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE]) $ uncurry (unsafeWrite z)
konst = broadcastVector
| null | https://raw.githubusercontent.com/pierric/neural-network/406ecaf334cde9b10c9324e1f6c4b8663eae58d7/Backend-blashs/vec512/Data/NeuralNetwork/Backend/BLASHS/SIMD.hs | haskell | module Data.NeuralNetwork.Backend.BLASHS.SIMD where
import Data.Vector.Storable.Mutable as MV
import qualified Data.Vector.Storable as SV
import Data.Primitive.SIMD
import Control.Exception
import Control.Monad
class Num (SIMDPACK a) => SIMDable a where
type SIMDPACK a
konst :: a -> SIMDPACK a
foreach :: (SIMDPACK a -> SIMDPACK a) -> IOVector a -> IOVector a -> IO ()
hadamard :: (SIMDPACK a -> SIMDPACK a -> SIMDPACK a) -> IOVector a -> IOVector a -> IOVector a -> IO ()
instance SIMDable Float where
type SIMDPACK Float = FloatX16
hadamard op v x y = assert (MV.length x == sz && MV.length y == sz) $ do
let sv = unsafeCast v :: IOVector FloatX16
sx = unsafeCast x :: IOVector FloatX16
sy = unsafeCast y :: IOVector FloatX16
go (MV.length sv) sv sx sy
let rm = sz `mod` 16
rn = sz - rm
rv = unsafeDrop rn v
rx = unsafeDrop rn x
ry = unsafeDrop rn y
when (rm /= 0) $ rest rm rv rx ry
where
sz = MV.length v
go 0 _ _ _ = return ()
go !n !z !x !y = do
a <- unsafeRead x 0
b <- unsafeRead y 0
unsafeWrite z 0 (op a b)
go (n-1) (unsafeTail z) (unsafeTail x) (unsafeTail y)
rest n z x y = do
sx <- SV.unsafeFreeze x
sy <- SV.unsafeFreeze y
let vx = SV.ifoldl' (\v i a -> unsafeInsertVector v a i) nullVector sx
vy = SV.ifoldl' (\v i a -> unsafeInsertVector v a i) nullVector sy
(vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE,_) = unpackVector (op vx vy)
forM_ (zip [0..n-1] [vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE]) $ uncurry (unsafeWrite z)
foreach op v x = assert (MV.length v == MV.length x) $ do
let sv = unsafeCast v :: IOVector FloatX16
sx = unsafeCast x :: IOVector FloatX16
go (MV.length sv) sv sx
let rm = sz `mod` 16
rn = sz - rm
rv = unsafeDrop rn v
rx = unsafeDrop rn x
when (rm /= 0) $ rest rm rv rx
where
sz = MV.length v
go 0 _ _ = return ()
go !n !z !x = do
a <- unsafeRead x 0
unsafeWrite z 0 (op a)
go (n-1) (unsafeTail z) (unsafeTail x)
rest n z x = do
sx <- SV.unsafeFreeze x
let vx = SV.ifoldl' (\v i a -> unsafeInsertVector v a i) nullVector sx
(vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE,_) = unpackVector (op vx)
forM_ (zip [0..n-1] [vz0,vz1,vz2,vz3,vz4,vz5,vz6,vz7,vz8,vz9,vzA,vzB,vzC,vzD,vzE]) $ uncurry (unsafeWrite z)
konst = broadcastVector
| |
e5d6cd773bdaa85b526d463a51f9b1f76ce601821faf03342a22bd110743bb66 | mtravers/goddinpotty | database_test.clj | (ns goddinpotty.database-test
(:require [goddinpotty.database :refer :all]
[goddinpotty.parser :as parser]
[clojure.test :refer :all]))
(deftest content-refs-test
(is (= #{"wizards" "light"}
;; This is trick for referring to non-public fn, good to know...
(set (#'block-refs {:parsed (parser/parse-to-ast "Do not meddle in the affairs of #wizards, because they become soggy and hard to #light.")} )))))
| null | https://raw.githubusercontent.com/mtravers/goddinpotty/d1abee0b2c06b0d5f264ce08b18ba7d14ad92c1c/test/goddinpotty/database_test.clj | clojure | This is trick for referring to non-public fn, good to know... | (ns goddinpotty.database-test
(:require [goddinpotty.database :refer :all]
[goddinpotty.parser :as parser]
[clojure.test :refer :all]))
(deftest content-refs-test
(is (= #{"wizards" "light"}
(set (#'block-refs {:parsed (parser/parse-to-ast "Do not meddle in the affairs of #wizards, because they become soggy and hard to #light.")} )))))
|
249cdd8bb8401ed14778142048b708a3055e2d4c311323af3f8e13eca361bf4c | rcherrueau/rastache | renderer.rkt | #lang racket/base
; /\ \__ /\ \
; _ __ __ ____\ \ ,_\ __ ___\ \ \___ __
/\`'__\/'__`\ /',__\\ \ \/ /'__`\ /'___\ \ _ ` \ /'__`\
; \ \ \//\ \L\.\_/\__, `\\ \ \_/\ \L\.\_/\ \__/\ \ \ \ \/\ __/
\ \_\\ \__/.\_\/\____/ \ \__\ \__/.\_\ \____\\ \_\ \_\
; \/_/ \/__/\/_/\/___/ \/__/\/__/\/_/\/____/ \/_/\/_/\/____/
; Mustache template engine for Racket
; Mustache template renderer.
;
; Reads a list of tokens and renders the template. The rendering is
; done in a stream.
(provide render)
; ______________________________________________________________________________
; import and implementation
(require "commons.rkt"
"parser.rkt"
racket/match
net/url
xml)
;; Returns #t if value is a rastache context, #f otherwise.
(define rast-context? hash?)
;; Returns the value of `key' in a rastache context if any. Otherwise
# f.
(define (lookup context key) (hash-ref context key #f))
;; Returns the value of `key' in a rastache context if any. If the
;; value is a lambda, then the lambda is applied. If `key' doesn't
exist in the rastache context , it returns # f.
(define (var-lookup context key)
(let ([var (lookup context key)])
(cond
;; If var is a lambda: evaluate it
[(procedure? var)
(cond
;; 0 or arity-at-least arg
[(or (eq? (procedure-arity var) 0)
(arity-at-least? (procedure-arity var)))
(var)]
1 arg : give context
[(eq? (procedure-arity var) 1)
(var context)]
2 args : give context and render - function
[(eq? (procedure-arity var) 2)
(var context
(λ (txt)
(let ([o (open-output-string)])
(render
;; A lambda's return value should be parse with the
default delimiters ( see Lambdas tests >
Interpolation - Alternate Delimiters )
(parameterize ([open-tag "{{"]
[close-tag "}}"])
(tokenize (open-input-string txt)))
context
o)
(get-output-string o))))]
[else
(error (format (string-append "Error: The lambda ~s "
"should have zero, one "
"or two argument(s)") var))])]
;; Else var is a val: return it
[else var])))
;; Returns the value of a `key' in a rastache context when current
;; token is a section or inverted section. If `key' doesn't exist in
the rastache context , it returns # f.
(define (sec-lookup context key)
(let ([the-val (lookup context key)])
(cond
;; In section, if the val is a procedure with an arity different
of 2 , the procedure should be applied .
[(and (procedure? the-val)
(not (equal? (procedure-arity the-val) 2)))
(var-lookup context key)]
[else the-val])))
;; Returns #t if `val' is a rastache non-false value (i.e.: non-false
value , non - empty list , non - unexisting key ) . Otherwise # f.
(define (non-false? val)
(and
;; non-empty list
(not (and (list? val) (null? val)))
;; non-false value / non-unexisting key
(not (and (boolean? val) (not val)))))
Returns # t if ` val ' is a rastache non - empty list . Otherwise # f.
(define (non-empty-list? val)
(and (list? val) (not (null? val))))
;; Returns an html escaped string.
(define (htmlescape-string string)
(regexp-replace* #rx"\""
(xexpr->string string)
(regexp-replace-quote """)))
;; Render a mustache tokens thanks to the rendering context.
;; render: (list token) rast-context port-out -> void
(define (render tokens context stream)
(let _render ([the-tokens tokens]
[the-ctx context])
(cond
;; No more tokens
[(null? the-tokens)
(void)]
;; Process token
[else
(define the-token (car the-tokens))
(match the-token
;; Static
[(token-static content)
(display content stream)
(_render (cdr the-tokens) the-ctx)]
;; Etag
[(token-etag key)
(define val (var-lookup the-ctx key))
(display (cond
[(null? val) ""]
[(and (boolean? val) (not val)) ""]
[(number? val) (number->string val)]
[else (htmlescape-string val)]) stream)
(_render (cdr the-tokens) the-ctx)]
Utag
[(token-utag key)
(define val (var-lookup the-ctx key))
(display (cond
[(null? val) ""]
[(and (boolean? val) (not val)) ""]
[(number? val) (number->string val)]
[else val]) stream)
(_render (cdr the-tokens) the-ctx)]
;; Dotted Name
[(token-sec key section #t)
(define val (var-lookup the-ctx key))
(cond
;; Non-false value
[(non-false? val)
(_render section
(cond
;; `val' is rastache context and this is a dotted
;; name. Render with `val' context
[(rast-context? val) val]
;; `val' is not a rastache context. Render with
;; general context overriding by `val' put at
;; `period-name' position
[else (hash-set the-ctx period-name val)]))])
(_render (cdr the-tokens) the-ctx)]
;; Section
[(token-sec key section #f)
(define val (sec-lookup the-ctx key))
(cond
;; Non-empty list
[(non-empty-list? val)
;; Render for each items of the list
(for-each
(λ (the-val)
(_render section
(cond
;; `the-val' is rastache context but this is
;; not a dotted name section. Render with
;; general context overriding by `the-val'
;; content
[(rast-context? the-val)
(foldl (λ (kv ctx) (hash-set ctx (car kv) (cdr kv)))
the-ctx (hash->list the-val))]
;; `the-val' is not a rastache context.
;; Render with general context overriding by
;; `the-val' put at `period-name' position
[else (hash-set the-ctx period-name the-val)])))
val)]
;; Lambda
[(procedure? val)
(unless (eq? (procedure-arity val) 2)
(error (format (string-append "Error: The lambda ~s "
"should have zero, one "
"or two argument(s)") val)))
;; Pass text and render-function as arguments
(display
(val (mustachize section)
(λ (txt) (let ([o (open-output-string)])
(render (tokenize (open-input-string txt))
the-ctx
o)
(get-output-string o))))
stream)]
;; Non-false value
[(non-false? val)
(_render section
(cond
;; `val' is rastache context but this is not a
;; dotted name section. Render with general
;; context overriding by `val' content
[(rast-context? val)
(foldl (λ (kv ctx) (hash-set ctx (car kv) (cdr kv)))
the-ctx (hash->list val))]
;; `val' is not a rastache context.Render with
;; general context overriding by `val' put at
;; `period-name' position
[else (hash-set the-ctx period-name val)]))])
(_render (cdr the-tokens) the-ctx)]
;; Inverted Section
[(token-inv-sec key inv-section #f)
(define val (sec-lookup the-ctx key))
;; In contrast with section, we call the inverted section if
;; tha value is false, the list is empty or the key is
;; missed.
(when (not (non-false? val))
(_render inv-section the-ctx))
(_render (cdr the-tokens) the-ctx)]
;; Inverted Section with Dotted Names
[(token-inv-sec key inv-section #t)
;; If val is evaluated to false, go to the last inverted
;; section of this dotted name and renders `inv-section'.
;; Otherwise go deeper and test again.
(define val (sec-lookup the-ctx key))
(cond
[(not (non-false? val))
;; Render the deepest inv-section
(let render-inv-sec ([t (car inv-section)])
(match t
;; Not the last inverted section of this dotted name
;; => Go deeper
[(token-inv-sec k is #t)
(render-inv-sec (car is))]
;; Last inverted section of this dotted name
;; => Render section
[(token-inv-sec k is #f)
(_render is the-ctx)]))]
[else
;; Render with context seting to val
(_render inv-section
(cond
;; `val' is rastache context and this is a dotted
;; name. Render with `val' context
[(rast-context? val) val]
;; `val' is not a rastache context. Render with
;; general context overriding by `val' put at
;; `period-name' position
[else (hash-set the-ctx period-name val)]))])
(_render (cdr the-tokens) the-ctx)]
;; Partial
[(token-partial url)
(define protocol (url-scheme url))
(define partial-template
(cond
;; If url has no protocol, the default is `file'
[(not protocol)
(open-input-file (url->path url))]
;; Else use GET method to retrieve whatever information is
;; identified by url.
[else
(get-pure-port url #:redirections 1)]))
(render (tokenize partial-template) the-ctx stream)
(when (not (port-closed? partial-template))
(close-input-port partial-template))
(_render (cdr the-tokens) the-ctx)]
;; Delimiter
[(token-delimiter new-otag new-ctag)
(parameterize ([open-tag new-otag]
[close-tag new-ctag])
(_render (cdr the-tokens) the-ctx))]
;; If this is a unknow token: Error!
[other
(error (format (string-append "Unknown token ~s "
"while rendering") other))])])))
| null | https://raw.githubusercontent.com/rcherrueau/rastache/059d00c83416f8ba27cc38fa7f8321b075756d14/rastache/renderer.rkt | racket | /\ \__ /\ \
_ __ __ ____\ \ ,_\ __ ___\ \ \___ __
\ \ \//\ \L\.\_/\__, `\\ \ \_/\ \L\.\_/\ \__/\ \ \ \ \/\ __/
\/_/ \/__/\/_/\/___/ \/__/\/__/\/_/\/____/ \/_/\/_/\/____/
Mustache template engine for Racket
Mustache template renderer.
Reads a list of tokens and renders the template. The rendering is
done in a stream.
______________________________________________________________________________
import and implementation
Returns #t if value is a rastache context, #f otherwise.
Returns the value of `key' in a rastache context if any. Otherwise
Returns the value of `key' in a rastache context if any. If the
value is a lambda, then the lambda is applied. If `key' doesn't
If var is a lambda: evaluate it
0 or arity-at-least arg
A lambda's return value should be parse with the
Else var is a val: return it
Returns the value of a `key' in a rastache context when current
token is a section or inverted section. If `key' doesn't exist in
In section, if the val is a procedure with an arity different
Returns #t if `val' is a rastache non-false value (i.e.: non-false
non-empty list
non-false value / non-unexisting key
Returns an html escaped string.
Render a mustache tokens thanks to the rendering context.
render: (list token) rast-context port-out -> void
No more tokens
Process token
Static
Etag
Dotted Name
Non-false value
`val' is rastache context and this is a dotted
name. Render with `val' context
`val' is not a rastache context. Render with
general context overriding by `val' put at
`period-name' position
Section
Non-empty list
Render for each items of the list
`the-val' is rastache context but this is
not a dotted name section. Render with
general context overriding by `the-val'
content
`the-val' is not a rastache context.
Render with general context overriding by
`the-val' put at `period-name' position
Lambda
Pass text and render-function as arguments
Non-false value
`val' is rastache context but this is not a
dotted name section. Render with general
context overriding by `val' content
`val' is not a rastache context.Render with
general context overriding by `val' put at
`period-name' position
Inverted Section
In contrast with section, we call the inverted section if
tha value is false, the list is empty or the key is
missed.
Inverted Section with Dotted Names
If val is evaluated to false, go to the last inverted
section of this dotted name and renders `inv-section'.
Otherwise go deeper and test again.
Render the deepest inv-section
Not the last inverted section of this dotted name
=> Go deeper
Last inverted section of this dotted name
=> Render section
Render with context seting to val
`val' is rastache context and this is a dotted
name. Render with `val' context
`val' is not a rastache context. Render with
general context overriding by `val' put at
`period-name' position
Partial
If url has no protocol, the default is `file'
Else use GET method to retrieve whatever information is
identified by url.
Delimiter
If this is a unknow token: Error! | #lang racket/base
/\`'__\/'__`\ /',__\\ \ \/ /'__`\ /'___\ \ _ ` \ /'__`\
\ \_\\ \__/.\_\/\____/ \ \__\ \__/.\_\ \____\\ \_\ \_\
(provide render)
(require "commons.rkt"
"parser.rkt"
racket/match
net/url
xml)
(define rast-context? hash?)
# f.
(define (lookup context key) (hash-ref context key #f))
exist in the rastache context , it returns # f.
(define (var-lookup context key)
(let ([var (lookup context key)])
(cond
[(procedure? var)
(cond
[(or (eq? (procedure-arity var) 0)
(arity-at-least? (procedure-arity var)))
(var)]
1 arg : give context
[(eq? (procedure-arity var) 1)
(var context)]
2 args : give context and render - function
[(eq? (procedure-arity var) 2)
(var context
(λ (txt)
(let ([o (open-output-string)])
(render
default delimiters ( see Lambdas tests >
Interpolation - Alternate Delimiters )
(parameterize ([open-tag "{{"]
[close-tag "}}"])
(tokenize (open-input-string txt)))
context
o)
(get-output-string o))))]
[else
(error (format (string-append "Error: The lambda ~s "
"should have zero, one "
"or two argument(s)") var))])]
[else var])))
the rastache context , it returns # f.
(define (sec-lookup context key)
(let ([the-val (lookup context key)])
(cond
of 2 , the procedure should be applied .
[(and (procedure? the-val)
(not (equal? (procedure-arity the-val) 2)))
(var-lookup context key)]
[else the-val])))
value , non - empty list , non - unexisting key ) . Otherwise # f.
(define (non-false? val)
(and
(not (and (list? val) (null? val)))
(not (and (boolean? val) (not val)))))
Returns # t if ` val ' is a rastache non - empty list . Otherwise # f.
(define (non-empty-list? val)
(and (list? val) (not (null? val))))
(define (htmlescape-string string)
(regexp-replace* #rx"\""
(xexpr->string string)
(regexp-replace-quote """)))
(define (render tokens context stream)
(let _render ([the-tokens tokens]
[the-ctx context])
(cond
[(null? the-tokens)
(void)]
[else
(define the-token (car the-tokens))
(match the-token
[(token-static content)
(display content stream)
(_render (cdr the-tokens) the-ctx)]
[(token-etag key)
(define val (var-lookup the-ctx key))
(display (cond
[(null? val) ""]
[(and (boolean? val) (not val)) ""]
[(number? val) (number->string val)]
[else (htmlescape-string val)]) stream)
(_render (cdr the-tokens) the-ctx)]
Utag
[(token-utag key)
(define val (var-lookup the-ctx key))
(display (cond
[(null? val) ""]
[(and (boolean? val) (not val)) ""]
[(number? val) (number->string val)]
[else val]) stream)
(_render (cdr the-tokens) the-ctx)]
[(token-sec key section #t)
(define val (var-lookup the-ctx key))
(cond
[(non-false? val)
(_render section
(cond
[(rast-context? val) val]
[else (hash-set the-ctx period-name val)]))])
(_render (cdr the-tokens) the-ctx)]
[(token-sec key section #f)
(define val (sec-lookup the-ctx key))
(cond
[(non-empty-list? val)
(for-each
(λ (the-val)
(_render section
(cond
[(rast-context? the-val)
(foldl (λ (kv ctx) (hash-set ctx (car kv) (cdr kv)))
the-ctx (hash->list the-val))]
[else (hash-set the-ctx period-name the-val)])))
val)]
[(procedure? val)
(unless (eq? (procedure-arity val) 2)
(error (format (string-append "Error: The lambda ~s "
"should have zero, one "
"or two argument(s)") val)))
(display
(val (mustachize section)
(λ (txt) (let ([o (open-output-string)])
(render (tokenize (open-input-string txt))
the-ctx
o)
(get-output-string o))))
stream)]
[(non-false? val)
(_render section
(cond
[(rast-context? val)
(foldl (λ (kv ctx) (hash-set ctx (car kv) (cdr kv)))
the-ctx (hash->list val))]
[else (hash-set the-ctx period-name val)]))])
(_render (cdr the-tokens) the-ctx)]
[(token-inv-sec key inv-section #f)
(define val (sec-lookup the-ctx key))
(when (not (non-false? val))
(_render inv-section the-ctx))
(_render (cdr the-tokens) the-ctx)]
[(token-inv-sec key inv-section #t)
(define val (sec-lookup the-ctx key))
(cond
[(not (non-false? val))
(let render-inv-sec ([t (car inv-section)])
(match t
[(token-inv-sec k is #t)
(render-inv-sec (car is))]
[(token-inv-sec k is #f)
(_render is the-ctx)]))]
[else
(_render inv-section
(cond
[(rast-context? val) val]
[else (hash-set the-ctx period-name val)]))])
(_render (cdr the-tokens) the-ctx)]
[(token-partial url)
(define protocol (url-scheme url))
(define partial-template
(cond
[(not protocol)
(open-input-file (url->path url))]
[else
(get-pure-port url #:redirections 1)]))
(render (tokenize partial-template) the-ctx stream)
(when (not (port-closed? partial-template))
(close-input-port partial-template))
(_render (cdr the-tokens) the-ctx)]
[(token-delimiter new-otag new-ctag)
(parameterize ([open-tag new-otag]
[close-tag new-ctag])
(_render (cdr the-tokens) the-ctx))]
[other
(error (format (string-append "Unknown token ~s "
"while rendering") other))])])))
|
56b9c47154a4763061c0af3a242aec0442e9dc48a8d8233c58446aad3bf04efa | roman01la/clojurescript-workshop | core.cljs | ;; Composition & partial application
(def coll [1 2 3 4 5])
# 1
8
# 2
(def crunch-numbers (comp #(reduce + %)
#(filter odd? %)
#(map inc %)))
8
# 3
(def crunch-numbers (comp (partial reduce +)
(partial filter odd?)
(partial map inc)))
8
# 4
(->> coll
(map inc)
(filter odd?)
8
Memoization
(def coll [1 2 3 4 5])
(def crunch-numbers
(memoize (comp #(reduce + %)
#(filter odd? %)
#(map inc %))))
(crunch-numbers [1 ... 10000000]) ;; takes some time
(crunch-numbers [1 ... 10000000]) ;; immediately returns cached result
(crunch-numbers [1 ... 10000000]) ;; immediately returns cached result
;; Identity
5
( 1 true 9 0 -1 )
;; Juxtaposition
(def get-name (juxt :fname :lname))
(get-name {:fname "John"
:lname "Doe"
[ " " " Doe " ]
| null | https://raw.githubusercontent.com/roman01la/clojurescript-workshop/48b02266d65cae8113edd4ce34c4ab282ad256d1/06.higher_order_fns/core.cljs | clojure | Composition & partial application
takes some time
immediately returns cached result
immediately returns cached result
Identity
Juxtaposition |
(def coll [1 2 3 4 5])
# 1
8
# 2
(def crunch-numbers (comp #(reduce + %)
#(filter odd? %)
#(map inc %)))
8
# 3
(def crunch-numbers (comp (partial reduce +)
(partial filter odd?)
(partial map inc)))
8
# 4
(->> coll
(map inc)
(filter odd?)
8
Memoization
(def coll [1 2 3 4 5])
(def crunch-numbers
(memoize (comp #(reduce + %)
#(filter odd? %)
#(map inc %))))
5
( 1 true 9 0 -1 )
(def get-name (juxt :fname :lname))
(get-name {:fname "John"
:lname "Doe"
[ " " " Doe " ]
|
39e07fb5bb21bb739ceeff9c00f791b9af3b985008f71f59eddc4c39e05f9a86 | wdebeaum/step | balance.lisp | ;;;;
;;;; W::BALANCE
;;;;
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
(W::BALANCE
(SENSES
((LF-PARENT ONT::balance-scale) (TEMPL COUNT-PRED-TEMPL)
(META-DATA :ORIGIN CALO :ENTRY-DATE 20040204 :CHANGE-DATE NIL
:COMMENTS HTML-PURCHASING-CORPUS))))
))
(define-words :pos W::v :TEMPL AGENT-FORMAL-XP-TEMPL
:words (
(W::balance
(SENSES
((EXAMPLE "the cost doesn't balance the effort")
(LF-PARENT ONT::object-compare)
(SEM (F::Aspect F::static) (F::Time-span F::extended))
(TEMPL NEUTRAL-NEUTRAL1-XP-TEMPL)
(meta-data :origin calo :entry-date 20050425 :change-date nil :comments projector-purchasing)
)
((EXAMPLE "the cup is balancing on the table")
(LF-PARENT ONT::balance) ;BE-AT-LOC)
(SEM (F::Aspect F::Stage-level) (F::Time-span F::Extended))
(TEMPL neutral-templ)
)
))
))
| null | https://raw.githubusercontent.com/wdebeaum/step/f38c07d9cd3a58d0e0183159d4445de9a0eafe26/src/LexiconManager/Data/new/balance.lisp | lisp |
W::BALANCE
BE-AT-LOC) |
(define-words :pos W::n :templ COUNT-PRED-TEMPL
:words (
(W::BALANCE
(SENSES
((LF-PARENT ONT::balance-scale) (TEMPL COUNT-PRED-TEMPL)
(META-DATA :ORIGIN CALO :ENTRY-DATE 20040204 :CHANGE-DATE NIL
:COMMENTS HTML-PURCHASING-CORPUS))))
))
(define-words :pos W::v :TEMPL AGENT-FORMAL-XP-TEMPL
:words (
(W::balance
(SENSES
((EXAMPLE "the cost doesn't balance the effort")
(LF-PARENT ONT::object-compare)
(SEM (F::Aspect F::static) (F::Time-span F::extended))
(TEMPL NEUTRAL-NEUTRAL1-XP-TEMPL)
(meta-data :origin calo :entry-date 20050425 :change-date nil :comments projector-purchasing)
)
((EXAMPLE "the cup is balancing on the table")
(SEM (F::Aspect F::Stage-level) (F::Time-span F::Extended))
(TEMPL neutral-templ)
)
))
))
|
0dbb249f81288728ef5d0c10d70e87915c47c11b1e894f35e9fceb007e542b03 | fpco/th-utilities | FixQ.hs | # LANGUAGE CPP #
-- | A compat module to take fixed points in 'Q'.
module TH.FixQ (fixQ) where
#if MIN_VERSION_template_haskell(2,17,0)
import Control.Monad.Fix (mfix)
import Language.Haskell.TH.Syntax (Q (..))
fixQ :: (a -> Q a) -> Q a
fixQ = mfix
#else
We do n't have a MonadFix instance for Q
import Control.Concurrent.MVar (newEmptyMVar, readMVar, putMVar)
import Control.Exception (BlockedIndefinitelyOnMVar (..), catch, throwIO)
import Control.Exception.Base (FixIOException (..))
import Language.Haskell.TH.Syntax (Q (..), runIO)
import GHC.IO.Unsafe (unsafeDupableInterleaveIO)
fixQ :: (a -> Q a) -> Q a
fixQ k = do
m <- runIO newEmptyMVar
ans <- runIO (unsafeDupableInterleaveIO
(readMVar m `catch` \BlockedIndefinitelyOnMVar ->
throwIO FixIOException))
result <- k ans
runIO (putMVar m result)
return result
#endif
| null | https://raw.githubusercontent.com/fpco/th-utilities/893adcb8a2f7b48342d4f63e816cda2217629f7b/src/TH/FixQ.hs | haskell | | A compat module to take fixed points in 'Q'. | # LANGUAGE CPP #
module TH.FixQ (fixQ) where
#if MIN_VERSION_template_haskell(2,17,0)
import Control.Monad.Fix (mfix)
import Language.Haskell.TH.Syntax (Q (..))
fixQ :: (a -> Q a) -> Q a
fixQ = mfix
#else
We do n't have a MonadFix instance for Q
import Control.Concurrent.MVar (newEmptyMVar, readMVar, putMVar)
import Control.Exception (BlockedIndefinitelyOnMVar (..), catch, throwIO)
import Control.Exception.Base (FixIOException (..))
import Language.Haskell.TH.Syntax (Q (..), runIO)
import GHC.IO.Unsafe (unsafeDupableInterleaveIO)
fixQ :: (a -> Q a) -> Q a
fixQ k = do
m <- runIO newEmptyMVar
ans <- runIO (unsafeDupableInterleaveIO
(readMVar m `catch` \BlockedIndefinitelyOnMVar ->
throwIO FixIOException))
result <- k ans
runIO (putMVar m result)
return result
#endif
|
07f6570d25c74b72b38869346b0ab9428a67a83eccc39f8853f8586a1f571693 | konfig-xyz/rust-reason-parser | SchemaParser.hs | {-# LANGUAGE OverloadedStrings #-}
module SchemaParser (parseTypeContainer, parseSchema) where
import Data.Set (member)
import qualified Data.Text as T
import Text.Parsec
import Type.Reflection.Unsafe
import Types
data TypeName = Simple T.Text | Qualified (T.Text, T.Text)
deriving (Eq, Ord)
parseQualifiedType :: Parsec T.Text () (T.Text, T.Text)
parseQualifiedType = do
spaces
base <- manyTill anyChar $ string "."
nesting <- manyTill anyChar eof
pure (T.pack base, T.pack nesting)
parseTypeContainer :: Parsec T.Text () (T.Text, T.Text)
parseTypeContainer = do
containerType <- manyTill anyChar $ try $ string "<"
valueType <- manyTill anyChar $ try $ string ">" <* (eof >> pure "")
pure (T.pack containerType, T.pack valueType)
parseType :: Parsec T.Text () (T.Text, T.Text)
parseType = do
spaces
typeName <- manyTill anyChar $ spaces *> string "->" <* spaces
typeVar <- manyTill anyChar $ string ","
optional eof
pure (T.pack typeName, T.pack typeVar)
parseTable :: Parsec T.Text () (T.Text, [(T.Text, T.Text)])
parseTable = do
string "table! {" <* try spaces
string "use diesel::sql_types::*;" <* try spaces
typeName <- manyTill anyChar $ try space
spaces
try $ manyTill anyChar $ try $ string "{"
contents <- manyTill (try parseType) $ try $ spaces *> string "}"
spaces
try $ string "}"
pure (T.pack typeName, contents)
parseSchema :: T.Text -> [(T.Text, [(T.Text, T.Text)])]
parseSchema xs = case runParser schemaParser () "Error Parsing" xs of
Right x -> x
Left y -> []
where
schemaParser = manyTill (try parseTable <* spaces) $ try (string "joinable" <|> (eof >> pure ""))
| null | https://raw.githubusercontent.com/konfig-xyz/rust-reason-parser/71b7d79bca485e0c67835ec477dbf9760746385f/src/SchemaParser.hs | haskell | # LANGUAGE OverloadedStrings # |
module SchemaParser (parseTypeContainer, parseSchema) where
import Data.Set (member)
import qualified Data.Text as T
import Text.Parsec
import Type.Reflection.Unsafe
import Types
data TypeName = Simple T.Text | Qualified (T.Text, T.Text)
deriving (Eq, Ord)
parseQualifiedType :: Parsec T.Text () (T.Text, T.Text)
parseQualifiedType = do
spaces
base <- manyTill anyChar $ string "."
nesting <- manyTill anyChar eof
pure (T.pack base, T.pack nesting)
parseTypeContainer :: Parsec T.Text () (T.Text, T.Text)
parseTypeContainer = do
containerType <- manyTill anyChar $ try $ string "<"
valueType <- manyTill anyChar $ try $ string ">" <* (eof >> pure "")
pure (T.pack containerType, T.pack valueType)
parseType :: Parsec T.Text () (T.Text, T.Text)
parseType = do
spaces
typeName <- manyTill anyChar $ spaces *> string "->" <* spaces
typeVar <- manyTill anyChar $ string ","
optional eof
pure (T.pack typeName, T.pack typeVar)
parseTable :: Parsec T.Text () (T.Text, [(T.Text, T.Text)])
parseTable = do
string "table! {" <* try spaces
string "use diesel::sql_types::*;" <* try spaces
typeName <- manyTill anyChar $ try space
spaces
try $ manyTill anyChar $ try $ string "{"
contents <- manyTill (try parseType) $ try $ spaces *> string "}"
spaces
try $ string "}"
pure (T.pack typeName, contents)
parseSchema :: T.Text -> [(T.Text, [(T.Text, T.Text)])]
parseSchema xs = case runParser schemaParser () "Error Parsing" xs of
Right x -> x
Left y -> []
where
schemaParser = manyTill (try parseTable <* spaces) $ try (string "joinable" <|> (eof >> pure ""))
|
06fbad30a5415b4c05e3dd5e0f2dddbedf82c704271d04c8f819ab785e4abfcc | janestreet/core | doubly_linked_intf.ml | * Doubly - linked lists .
Compared to other doubly - linked lists , in this one :
1 . Calls to modification functions ( [ insert * ] , [ move * ] , ... ) detect if the list is
being iterated over ( [ iter ] , [ fold ] , ... ) , and if so raise an exception . For example ,
a use like the following would raise :
{ [
iter t ~f:(fun _ - > ... remove t e ... )
] }
2 . There is a designated " front " and " back " of each list , rather than viewing each
element as an equal in a ring .
3 . Elements know which list they 're in . Each operation that takes an [ Elt.t ] also
takes a [ t ] , first checks that the [ Elt ] belongs to the [ t ] , and if not , raises .
4 . Related to ( 3 ) , lists can not be split , though a sort of splicing is available as
[ transfer ] . In other words , no operation will cause one list to become two . This
makes this module unsuitable for maintaining the faces of a planar graph under edge
insertion and deletion , for example .
5 . Another property permitted by ( 3 ) and ( 4 ) is that [ length ] is O(1 ) .
Compared to other doubly-linked lists, in this one:
1. Calls to modification functions ([insert*], [move*], ...) detect if the list is
being iterated over ([iter], [fold], ...), and if so raise an exception. For example,
a use like the following would raise:
{[
iter t ~f:(fun _ -> ... remove t e ...)
]}
2. There is a designated "front" and "back" of each list, rather than viewing each
element as an equal in a ring.
3. Elements know which list they're in. Each operation that takes an [Elt.t] also
takes a [t], first checks that the [Elt] belongs to the [t], and if not, raises.
4. Related to (3), lists cannot be split, though a sort of splicing is available as
[transfer]. In other words, no operation will cause one list to become two. This
makes this module unsuitable for maintaining the faces of a planar graph under edge
insertion and deletion, for example.
5. Another property permitted by (3) and (4) is that [length] is O(1). *)
open! Import
module type S = sig
module Elt : sig
type 'a t
val value : 'a t -> 'a
(** pointer equality *)
val equal : 'a t -> 'a t -> bool
val set : 'a t -> 'a -> unit
val sexp_of_t : ('a -> Base.Sexp.t) -> 'a t -> Base.Sexp.t
end
type 'a t [@@deriving compare, sexp, sexp_grammar]
include Container.S1 with type 'a t := 'a t
include Invariant.S1 with type 'a t := 'a t
* { 2 Creating doubly - linked lists }
val create : unit -> 'a t
* [ of_list l ] returns a doubly - linked list [ t ] with the same elements as [ l ] and in
the same order ( i.e. , the first element of [ l ] is the first element of [ t ] ) . It is
always the case that [ l = to_list ( of_list l ) ] .
the same order (i.e., the first element of [l] is the first element of [t]). It is
always the case that [l = to_list (of_list l)]. *)
val of_list : 'a list -> 'a t
val of_array : 'a array -> 'a t
* { 2 Predicates }
(** pointer equality *)
val equal : 'a t -> 'a t -> bool
val is_first : 'a t -> 'a Elt.t -> bool
val is_last : 'a t -> 'a Elt.t -> bool
val mem_elt : 'a t -> 'a Elt.t -> bool
* { 2 Constant - time extraction of first and last elements }
val first_elt : 'a t -> 'a Elt.t option
val last_elt : 'a t -> 'a Elt.t option
val first : 'a t -> 'a option
val last : 'a t -> 'a option
val first_exn : 'a t -> 'a
val last_exn : 'a t -> 'a
* { 2 Constant - time retrieval of next or previous element }
val next : 'a t -> 'a Elt.t -> 'a Elt.t option
val prev : 'a t -> 'a Elt.t -> 'a Elt.t option
* { 2 Constant - time insertion of a new element }
val insert_before : 'a t -> 'a Elt.t -> 'a -> 'a Elt.t
val insert_after : 'a t -> 'a Elt.t -> 'a -> 'a Elt.t
val insert_first : 'a t -> 'a -> 'a Elt.t
val insert_last : 'a t -> 'a -> 'a Elt.t
* { 2 Constant - time move of an element from and to positions in the same list }
An exception is raised if [ elt ] is equal to [ anchor ] .
An exception is raised if [elt] is equal to [anchor]. *)
val move_to_front : 'a t -> 'a Elt.t -> unit
val move_to_back : 'a t -> 'a Elt.t -> unit
val move_after : 'a t -> 'a Elt.t -> anchor:'a Elt.t -> unit
val move_before : 'a t -> 'a Elt.t -> anchor:'a Elt.t -> unit
* { 2 Constant - time removal of an element }
val remove : 'a t -> 'a Elt.t -> unit
val remove_first : 'a t -> 'a option
val remove_last : 'a t -> 'a option
val iteri : 'a t -> f:(int -> 'a -> unit) -> unit
val foldi : 'a t -> init:'acc -> f:(int -> 'acc -> 'a -> 'acc) -> 'acc
(** [fold_elt t ~init ~f] is the same as fold, except [f] is called with the ['a
Elt.t]'s from the list instead of the contained ['a] values.
Note that like other iteration functions, it is an error to mutate [t] inside the
fold. If you'd like to call [remove] on any of the ['a Elt.t]'s, use
[filter_inplace]. *)
val fold_elt : 'a t -> init:'acc -> f:('acc -> 'a Elt.t -> 'acc) -> 'acc
val foldi_elt : 'a t -> init:'acc -> f:(int -> 'acc -> 'a Elt.t -> 'acc) -> 'acc
val iter_elt : 'a t -> f:('a Elt.t -> unit) -> unit
val iteri_elt : 'a t -> f:(int -> 'a Elt.t -> unit) -> unit
val fold_right : 'a t -> init:'acc -> f:('a -> 'acc -> 'acc) -> 'acc
val fold_right_elt : 'a t -> init:'acc -> f:('a Elt.t -> 'acc -> 'acc) -> 'acc
* [ find_elt t ~f ] finds the first element in [ t ] that satisfies [ f ] , by testing each
of element of [ t ] in turn until [ f ] succeeds .
of element of [t] in turn until [f] succeeds. *)
val find_elt : 'a t -> f:('a -> bool) -> 'a Elt.t option
val findi_elt : 'a t -> f:(int -> 'a -> bool) -> (int * 'a Elt.t) option
(** [clear t] removes all elements from the list in constant time. *)
val clear : 'a t -> unit
val copy : 'a t -> 'a t
(** [transfer ~src ~dst] has the same behavior as
[iter src ~f:(insert_last dst); clear src] except that it runs in constant time.
If [s = to_list src] and [d = to_list dst], then after [transfer ~src ~dst]:
[to_list src = []]
[to_list dst = d @ s] *)
val transfer : src:'a t -> dst:'a t -> unit
* { 2 Linear - time mapping of lists ( creates a new list ) }
val map : 'a t -> f:('a -> 'b) -> 'b t
val mapi : 'a t -> f:(int -> 'a -> 'b) -> 'b t
val filter : 'a t -> f:('a -> bool) -> 'a t
val filteri : 'a t -> f:(int -> 'a -> bool) -> 'a t
val filter_map : 'a t -> f:('a -> 'b option) -> 'b t
val filter_mapi : 'a t -> f:(int -> 'a -> 'b option) -> 'b t
* { 2 Linear - time partition of lists ( creates two new lists ) }
val partition_tf : 'a t -> f:('a -> bool) -> 'a t * 'a t
val partitioni_tf : 'a t -> f:(int -> 'a -> bool) -> 'a t * 'a t
val partition_map : 'a t -> f:('a -> ('b, 'c) Either.t) -> 'b t * 'c t
val partition_mapi : 'a t -> f:(int -> 'a -> ('b, 'c) Either.t) -> 'b t * 'c t
* { 2 Linear - time in - place mapping of lists }
(** [map_inplace t ~f] replaces all values [v] with [f v] *)
val map_inplace : 'a t -> f:('a -> 'a) -> unit
val mapi_inplace : 'a t -> f:(int -> 'a -> 'a) -> unit
(** [filter_inplace t ~f] removes all elements of [t] that don't satisfy [f]. *)
val filter_inplace : 'a t -> f:('a -> bool) -> unit
val filteri_inplace : 'a t -> f:(int -> 'a -> bool) -> unit
(** If [f] returns [None], the element is removed, else the value is replaced with the
contents of the [Some] *)
val filter_map_inplace : 'a t -> f:('a -> 'a option) -> unit
val filter_mapi_inplace : 'a t -> f:(int -> 'a -> 'a option) -> unit
(** [unchecked_iter t ~f] behaves like [iter t ~f] except that [f] is allowed to modify
[t]. Adding or removing elements before the element currently being visited has no
effect on the traversal. Elements added after the element currently being visited
will be traversed. Elements deleted after the element currently being visited will
not be traversed. Deleting the element currently being visited is an error that is
not detected (presumably leading to an infinite loop). *)
val unchecked_iter : 'a t -> f:('a -> unit) -> unit
(** A sequence of values from the doubly-linked list. It makes an intermediate copy of
the list so that the returned sequence is immune to any subsequent mutation of the
original list. *)
val to_sequence : 'a t -> 'a Sequence.t
end
module type Doubly_linked = sig
module type S = S
include S
end
| null | https://raw.githubusercontent.com/janestreet/core/f382131ccdcb4a8cd21ebf9a49fa42dcf8183de6/core/src/doubly_linked_intf.ml | ocaml | * pointer equality
* pointer equality
* [fold_elt t ~init ~f] is the same as fold, except [f] is called with the ['a
Elt.t]'s from the list instead of the contained ['a] values.
Note that like other iteration functions, it is an error to mutate [t] inside the
fold. If you'd like to call [remove] on any of the ['a Elt.t]'s, use
[filter_inplace].
* [clear t] removes all elements from the list in constant time.
* [transfer ~src ~dst] has the same behavior as
[iter src ~f:(insert_last dst); clear src] except that it runs in constant time.
If [s = to_list src] and [d = to_list dst], then after [transfer ~src ~dst]:
[to_list src = []]
[to_list dst = d @ s]
* [map_inplace t ~f] replaces all values [v] with [f v]
* [filter_inplace t ~f] removes all elements of [t] that don't satisfy [f].
* If [f] returns [None], the element is removed, else the value is replaced with the
contents of the [Some]
* [unchecked_iter t ~f] behaves like [iter t ~f] except that [f] is allowed to modify
[t]. Adding or removing elements before the element currently being visited has no
effect on the traversal. Elements added after the element currently being visited
will be traversed. Elements deleted after the element currently being visited will
not be traversed. Deleting the element currently being visited is an error that is
not detected (presumably leading to an infinite loop).
* A sequence of values from the doubly-linked list. It makes an intermediate copy of
the list so that the returned sequence is immune to any subsequent mutation of the
original list. | * Doubly - linked lists .
Compared to other doubly - linked lists , in this one :
1 . Calls to modification functions ( [ insert * ] , [ move * ] , ... ) detect if the list is
being iterated over ( [ iter ] , [ fold ] , ... ) , and if so raise an exception . For example ,
a use like the following would raise :
{ [
iter t ~f:(fun _ - > ... remove t e ... )
] }
2 . There is a designated " front " and " back " of each list , rather than viewing each
element as an equal in a ring .
3 . Elements know which list they 're in . Each operation that takes an [ Elt.t ] also
takes a [ t ] , first checks that the [ Elt ] belongs to the [ t ] , and if not , raises .
4 . Related to ( 3 ) , lists can not be split , though a sort of splicing is available as
[ transfer ] . In other words , no operation will cause one list to become two . This
makes this module unsuitable for maintaining the faces of a planar graph under edge
insertion and deletion , for example .
5 . Another property permitted by ( 3 ) and ( 4 ) is that [ length ] is O(1 ) .
Compared to other doubly-linked lists, in this one:
1. Calls to modification functions ([insert*], [move*], ...) detect if the list is
being iterated over ([iter], [fold], ...), and if so raise an exception. For example,
a use like the following would raise:
{[
iter t ~f:(fun _ -> ... remove t e ...)
]}
2. There is a designated "front" and "back" of each list, rather than viewing each
element as an equal in a ring.
3. Elements know which list they're in. Each operation that takes an [Elt.t] also
takes a [t], first checks that the [Elt] belongs to the [t], and if not, raises.
4. Related to (3), lists cannot be split, though a sort of splicing is available as
[transfer]. In other words, no operation will cause one list to become two. This
makes this module unsuitable for maintaining the faces of a planar graph under edge
insertion and deletion, for example.
5. Another property permitted by (3) and (4) is that [length] is O(1). *)
open! Import
module type S = sig
module Elt : sig
type 'a t
val value : 'a t -> 'a
val equal : 'a t -> 'a t -> bool
val set : 'a t -> 'a -> unit
val sexp_of_t : ('a -> Base.Sexp.t) -> 'a t -> Base.Sexp.t
end
type 'a t [@@deriving compare, sexp, sexp_grammar]
include Container.S1 with type 'a t := 'a t
include Invariant.S1 with type 'a t := 'a t
* { 2 Creating doubly - linked lists }
val create : unit -> 'a t
* [ of_list l ] returns a doubly - linked list [ t ] with the same elements as [ l ] and in
the same order ( i.e. , the first element of [ l ] is the first element of [ t ] ) . It is
always the case that [ l = to_list ( of_list l ) ] .
the same order (i.e., the first element of [l] is the first element of [t]). It is
always the case that [l = to_list (of_list l)]. *)
val of_list : 'a list -> 'a t
val of_array : 'a array -> 'a t
* { 2 Predicates }
val equal : 'a t -> 'a t -> bool
val is_first : 'a t -> 'a Elt.t -> bool
val is_last : 'a t -> 'a Elt.t -> bool
val mem_elt : 'a t -> 'a Elt.t -> bool
* { 2 Constant - time extraction of first and last elements }
val first_elt : 'a t -> 'a Elt.t option
val last_elt : 'a t -> 'a Elt.t option
val first : 'a t -> 'a option
val last : 'a t -> 'a option
val first_exn : 'a t -> 'a
val last_exn : 'a t -> 'a
* { 2 Constant - time retrieval of next or previous element }
val next : 'a t -> 'a Elt.t -> 'a Elt.t option
val prev : 'a t -> 'a Elt.t -> 'a Elt.t option
* { 2 Constant - time insertion of a new element }
val insert_before : 'a t -> 'a Elt.t -> 'a -> 'a Elt.t
val insert_after : 'a t -> 'a Elt.t -> 'a -> 'a Elt.t
val insert_first : 'a t -> 'a -> 'a Elt.t
val insert_last : 'a t -> 'a -> 'a Elt.t
* { 2 Constant - time move of an element from and to positions in the same list }
An exception is raised if [ elt ] is equal to [ anchor ] .
An exception is raised if [elt] is equal to [anchor]. *)
val move_to_front : 'a t -> 'a Elt.t -> unit
val move_to_back : 'a t -> 'a Elt.t -> unit
val move_after : 'a t -> 'a Elt.t -> anchor:'a Elt.t -> unit
val move_before : 'a t -> 'a Elt.t -> anchor:'a Elt.t -> unit
* { 2 Constant - time removal of an element }
val remove : 'a t -> 'a Elt.t -> unit
val remove_first : 'a t -> 'a option
val remove_last : 'a t -> 'a option
val iteri : 'a t -> f:(int -> 'a -> unit) -> unit
val foldi : 'a t -> init:'acc -> f:(int -> 'acc -> 'a -> 'acc) -> 'acc
val fold_elt : 'a t -> init:'acc -> f:('acc -> 'a Elt.t -> 'acc) -> 'acc
val foldi_elt : 'a t -> init:'acc -> f:(int -> 'acc -> 'a Elt.t -> 'acc) -> 'acc
val iter_elt : 'a t -> f:('a Elt.t -> unit) -> unit
val iteri_elt : 'a t -> f:(int -> 'a Elt.t -> unit) -> unit
val fold_right : 'a t -> init:'acc -> f:('a -> 'acc -> 'acc) -> 'acc
val fold_right_elt : 'a t -> init:'acc -> f:('a Elt.t -> 'acc -> 'acc) -> 'acc
* [ find_elt t ~f ] finds the first element in [ t ] that satisfies [ f ] , by testing each
of element of [ t ] in turn until [ f ] succeeds .
of element of [t] in turn until [f] succeeds. *)
val find_elt : 'a t -> f:('a -> bool) -> 'a Elt.t option
val findi_elt : 'a t -> f:(int -> 'a -> bool) -> (int * 'a Elt.t) option
val clear : 'a t -> unit
val copy : 'a t -> 'a t
val transfer : src:'a t -> dst:'a t -> unit
* { 2 Linear - time mapping of lists ( creates a new list ) }
val map : 'a t -> f:('a -> 'b) -> 'b t
val mapi : 'a t -> f:(int -> 'a -> 'b) -> 'b t
val filter : 'a t -> f:('a -> bool) -> 'a t
val filteri : 'a t -> f:(int -> 'a -> bool) -> 'a t
val filter_map : 'a t -> f:('a -> 'b option) -> 'b t
val filter_mapi : 'a t -> f:(int -> 'a -> 'b option) -> 'b t
* { 2 Linear - time partition of lists ( creates two new lists ) }
val partition_tf : 'a t -> f:('a -> bool) -> 'a t * 'a t
val partitioni_tf : 'a t -> f:(int -> 'a -> bool) -> 'a t * 'a t
val partition_map : 'a t -> f:('a -> ('b, 'c) Either.t) -> 'b t * 'c t
val partition_mapi : 'a t -> f:(int -> 'a -> ('b, 'c) Either.t) -> 'b t * 'c t
* { 2 Linear - time in - place mapping of lists }
val map_inplace : 'a t -> f:('a -> 'a) -> unit
val mapi_inplace : 'a t -> f:(int -> 'a -> 'a) -> unit
val filter_inplace : 'a t -> f:('a -> bool) -> unit
val filteri_inplace : 'a t -> f:(int -> 'a -> bool) -> unit
val filter_map_inplace : 'a t -> f:('a -> 'a option) -> unit
val filter_mapi_inplace : 'a t -> f:(int -> 'a -> 'a option) -> unit
val unchecked_iter : 'a t -> f:('a -> unit) -> unit
val to_sequence : 'a t -> 'a Sequence.t
end
module type Doubly_linked = sig
module type S = S
include S
end
|
fac2812561fd65a80c89d8538966fbcdb433a6b05ee2946c0a56a6671dada925 | seancorfield/usermanager-example | main.clj | copyright ( c ) 2019 - 2023 , all rights reserved
(ns usermanager.main
"This is an example web application, using just a few basic Clojure
libraries: Ring, Compojure, Component, Selmer, and next.jdbc.
I recommend this as a good way to get started building web applications
in Clojure so that you understand the basic moving parts in any web app.
Ring is pretty much the fundamental building block of all web apps
in Clojure. It provides an abstraction that maps HTTP requests to
simple Clojure hash maps. Your handler processes those hash maps
and produces another hash map containing :status and :body that
Ring turns into an HTTP response.
Compojure is the most widely used routing library. It lets you
define mappings from URL patterns -- routes -- to handler functions.
Selmer is a templating library that lets you write your web pages
as HTML templates that follow the Django style of simple variable
substitution, conditionals, and loops. Another popular approach
for building web pages is Hiccup, which takes Clojure data structures
and transforms them to HTML. If you need designers to deal with your
HTML templates, Selmer is going to be a lot easier for them to work with.
next.jdbc is the next generation JDBC library for Clojure, replacing
clojure.java.jdbc. It provides a fast, idiomatic wrapper around the
complexity that is Java's JDBC class hierarchy.
This example uses a local SQLite database to store data."
(:require [com.stuartsierra.component :as component]
[compojure.coercions :refer [as-int]]
[compojure.core :refer [GET POST let-routes]]
[compojure.route :as route]
we use by default but if you want to use
;; http-kit instead, uncomment this line...
;; [org.httpkit.server :refer [run-server]]
... and comment out this Jetty line :
[ring.adapter.jetty :refer [run-jetty]]
[ring.middleware.defaults :as ring-defaults]
[ring.util.response :as resp]
[usermanager.controllers.user :as user-ctl]
[usermanager.model.user-manager :as model])
(:gen-class))
;; Implement your application's lifecycle here:
;; Although the application config is not used in this simple
;; case, it probably would be in the general case -- and the
;; application state here is trivial but could be more complex.
(defrecord Application [config ; configuration (unused)
database ; dependency
state] ; behavior
component/Lifecycle
(start [this]
;; Component ensures that dependencies are fully initialized and
;; started before invoking this component.
(assoc this :state "Running"))
(stop [this]
(assoc this :state "Stopped")))
(defn my-application
"Return your application component, fully configured.
In this simple case, we just pass the whole configuration into
the application (a hash map containing a :repl flag).
The application depends on the database (which is created in
new-system below and automatically passed into Application by
Component itself, before calling start)."
[config]
(component/using (map->Application {:config config})
[:database]))
(defn my-middleware
"This middleware runs for every request and can execute before/after logic.
If the handler returns an HTTP response (like a redirect), we're done.
Else we use the result of the handler to render an HTML page."
[handler]
(fn [req]
(let [resp (handler req)]
(if (resp/response? resp)
resp
(user-ctl/render-page resp)))))
;; Helper for building the middleware:
(defn- add-app-component
"Middleware to add your application component into the request. Use
the same qualified keyword in your controller to retrieve it."
[handler application]
(fn [req]
(handler (assoc req :application/component application))))
;; This is Ring-specific, the specific stack of middleware you need for your
;; application. This example uses a fairly standard stack of Ring middleware
;; with some tweaks for convenience
(defn middleware-stack
"Given the application component and middleware, return a standard stack of
Ring middleware for a web application."
[app-component app-middleware]
(fn [handler]
(-> handler
(app-middleware)
(add-app-component app-component)
(ring-defaults/wrap-defaults (-> ring-defaults/site-defaults
disable XSRF for now
(assoc-in [:security :anti-forgery] false)
;; support load balancers
(assoc-in [:proxy] true))))))
;; This is the main web handler, that builds routing middleware
;; from the application component (defined above). The handler is passed
;; into the web server component (below).
Note that Vars are used -- the # ' notation -- instead of bare symbols
;; to make REPL-driven development easier. See the following for details:
#writing-repl-friendly-programs
(defn my-handler
"Given the application component, return middleware for routing.
We use let-routes here rather than the more usual defroutes because
Compojure assumes that if there's a match on the route, the entire
request will be handled by the function specified for that route.
Since we need to deal with page rendering after the handler runs,
and we need to pass in the application component at start up, we
need to define our route handlers so that they can be parameterized."
[application]
(let-routes [wrap (middleware-stack application #'my-middleware)]
(GET "/" [] (wrap #'user-ctl/default))
;; horrible: application should POST to this URL!
(GET "/user/delete/:id{[0-9]+}" [id :<< as-int] (wrap #'user-ctl/delete-by-id))
;; add a new user:
(GET "/user/form" [] (wrap #'user-ctl/edit))
;; edit an existing user:
(GET "/user/form/:id{[0-9]+}" [id :<< as-int] (wrap #'user-ctl/edit))
(GET "/user/list" [] (wrap #'user-ctl/get-users))
(POST "/user/save" [] (wrap #'user-ctl/save))
;; this just resets the change tracker but really should be a POST :)
(GET "/reset" [] (wrap #'user-ctl/reset-changes))
(route/resources "/")
(route/not-found "Not Found")))
;; Standard web server component -- knows how to stop and start your chosen
web server ... uses but explains how to use http - kit instead :
;; lifecycle for the specified web server in which we run
(defrecord WebServer [handler-fn server port ; parameters
application ; dependencies
http-server shutdown] ; state
component/Lifecycle
(start [this]
;; it's important for your components to be idempotent: if you start
them more than once , only the first call to start should do anything
;; and subsequent calls should be an no-op -- the same applies to the
;; stop calls: only stop the system if it is running, else do nothing
(if http-server
this
(assoc this
start a Jetty web server -- use : join ? false
;; so that it does not block (we use a promise
;; to block on in -main).
;; to start an http-kit web server instead:
1 . call run - server instead of run - jetty
2 . omit : join ? false since http - kit does
;; not block when it starts
:http-server (run-jetty (handler-fn application)
{:port port :join? false})
;; this promise exists primarily so -main can
;; wait on something, since we start the web
;; server in a non-blocking way:
:shutdown (promise))))
(stop [this]
(if http-server
(do
;; shutdown Jetty: call .stop on the server object:
(.stop http-server)
;; shutdown http-kit: invoke the server (as a function):
;; (http-server)
;; deliver the promise to indicate shutdown (this is
;; really just good housekeeping, since you're only
;; going to call stop via the REPL when you are not
;; waiting on the promise):
(deliver shutdown true)
(assoc this :http-server nil))
this)))
(defn web-server
"Return a WebServer component that depends on the application.
The handler-fn is a function that accepts the application (Component) and
returns a fully configured Ring handler (with middeware)."
[handler-fn port]
(component/using (map->WebServer {:handler-fn handler-fn
:port port})
[:application]))
;; This is the piece that combines the generic web server component above with
;; your application-specific component defined at the top of the file, and
;; any dependencies your application has (in this case, the database):
Note that a Var is used -- the # ' notation -- instead of a bare symbol
;; to make REPL-driven development easier. See the following for details:
#writing-repl-friendly-programs
(defn new-system
"Build a default system to run. In the REPL:
(def system (new-system 8888))
(alter-var-root #'system component/start)
(alter-var-root #'system component/stop)
See the Rich Comment Form below."
([port] (new-system port true))
([port repl]
(component/system-map :application (my-application {:repl repl})
:database (model/setup-database)
:web-server (web-server #'my-handler port))))
(comment
(def system (new-system 8888))
(alter-var-root #'system component/start)
(alter-var-root #'system component/stop)
;; the comma here just "anchors" the closing paren on this line,
;; which makes it easier to put you cursor at the end of the lines
;; above when you want to evaluate them into the REPL:
,)
(defonce ^:private
^{:doc "This exists so that if you run a socket REPL when
you start the application, you can get at the running
system easily.
Assuming a socket REPL running on 50505:
nc localhost 50505
user=> (require 'usermanager.main)
nil
user=> (in-ns 'usermanager.main)
...
usermanager.main=> (require '[next.jdbc :as jdbc])
nil
usermanager.main=> (def db (-> repl-system deref :application :database))
#'usermanager.main/db
usermanager.main=> (jdbc/execute! (db) [\"select * from addressbook\"])
[#:addressbook{:id 1, :first_name \"Sean\", :last_name \"Corfield\", :email \"\", :department_id 4}]
usermanager.main=>"}
repl-system
(atom nil))
(defn -main
[& [port]]
(let [port (or port (get (System/getenv) "PORT" 8080))
port (cond-> port (string? port) Integer/parseInt)]
(println "Starting up on port" port)
;; start the web server and application:
(-> (component/start (new-system port false))
;; then put it into the atom so we can get at it from a REPL
;; connected to this application:
(->> (reset! repl-system))
;; then wait "forever" on the promise created:
:web-server :shutdown deref)))
| null | https://raw.githubusercontent.com/seancorfield/usermanager-example/1a58880c692a68a7f46796fee66d13dabe170300/src/usermanager/main.clj | clojure | http-kit instead, uncomment this line...
[org.httpkit.server :refer [run-server]]
Implement your application's lifecycle here:
Although the application config is not used in this simple
case, it probably would be in the general case -- and the
application state here is trivial but could be more complex.
configuration (unused)
dependency
behavior
Component ensures that dependencies are fully initialized and
started before invoking this component.
Helper for building the middleware:
This is Ring-specific, the specific stack of middleware you need for your
application. This example uses a fairly standard stack of Ring middleware
with some tweaks for convenience
support load balancers
This is the main web handler, that builds routing middleware
from the application component (defined above). The handler is passed
into the web server component (below).
to make REPL-driven development easier. See the following for details:
horrible: application should POST to this URL!
add a new user:
edit an existing user:
this just resets the change tracker but really should be a POST :)
Standard web server component -- knows how to stop and start your chosen
lifecycle for the specified web server in which we run
parameters
dependencies
state
it's important for your components to be idempotent: if you start
and subsequent calls should be an no-op -- the same applies to the
stop calls: only stop the system if it is running, else do nothing
so that it does not block (we use a promise
to block on in -main).
to start an http-kit web server instead:
not block when it starts
this promise exists primarily so -main can
wait on something, since we start the web
server in a non-blocking way:
shutdown Jetty: call .stop on the server object:
shutdown http-kit: invoke the server (as a function):
(http-server)
deliver the promise to indicate shutdown (this is
really just good housekeeping, since you're only
going to call stop via the REPL when you are not
waiting on the promise):
This is the piece that combines the generic web server component above with
your application-specific component defined at the top of the file, and
any dependencies your application has (in this case, the database):
to make REPL-driven development easier. See the following for details:
the comma here just "anchors" the closing paren on this line,
which makes it easier to put you cursor at the end of the lines
above when you want to evaluate them into the REPL:
start the web server and application:
then put it into the atom so we can get at it from a REPL
connected to this application:
then wait "forever" on the promise created: | copyright ( c ) 2019 - 2023 , all rights reserved
(ns usermanager.main
"This is an example web application, using just a few basic Clojure
libraries: Ring, Compojure, Component, Selmer, and next.jdbc.
I recommend this as a good way to get started building web applications
in Clojure so that you understand the basic moving parts in any web app.
Ring is pretty much the fundamental building block of all web apps
in Clojure. It provides an abstraction that maps HTTP requests to
simple Clojure hash maps. Your handler processes those hash maps
and produces another hash map containing :status and :body that
Ring turns into an HTTP response.
Compojure is the most widely used routing library. It lets you
define mappings from URL patterns -- routes -- to handler functions.
Selmer is a templating library that lets you write your web pages
as HTML templates that follow the Django style of simple variable
substitution, conditionals, and loops. Another popular approach
for building web pages is Hiccup, which takes Clojure data structures
and transforms them to HTML. If you need designers to deal with your
HTML templates, Selmer is going to be a lot easier for them to work with.
next.jdbc is the next generation JDBC library for Clojure, replacing
clojure.java.jdbc. It provides a fast, idiomatic wrapper around the
complexity that is Java's JDBC class hierarchy.
This example uses a local SQLite database to store data."
(:require [com.stuartsierra.component :as component]
[compojure.coercions :refer [as-int]]
[compojure.core :refer [GET POST let-routes]]
[compojure.route :as route]
we use by default but if you want to use
... and comment out this Jetty line :
[ring.adapter.jetty :refer [run-jetty]]
[ring.middleware.defaults :as ring-defaults]
[ring.util.response :as resp]
[usermanager.controllers.user :as user-ctl]
[usermanager.model.user-manager :as model])
(:gen-class))
component/Lifecycle
(start [this]
(assoc this :state "Running"))
(stop [this]
(assoc this :state "Stopped")))
(defn my-application
"Return your application component, fully configured.
In this simple case, we just pass the whole configuration into
the application (a hash map containing a :repl flag).
The application depends on the database (which is created in
new-system below and automatically passed into Application by
Component itself, before calling start)."
[config]
(component/using (map->Application {:config config})
[:database]))
(defn my-middleware
"This middleware runs for every request and can execute before/after logic.
If the handler returns an HTTP response (like a redirect), we're done.
Else we use the result of the handler to render an HTML page."
[handler]
(fn [req]
(let [resp (handler req)]
(if (resp/response? resp)
resp
(user-ctl/render-page resp)))))
(defn- add-app-component
"Middleware to add your application component into the request. Use
the same qualified keyword in your controller to retrieve it."
[handler application]
(fn [req]
(handler (assoc req :application/component application))))
(defn middleware-stack
"Given the application component and middleware, return a standard stack of
Ring middleware for a web application."
[app-component app-middleware]
(fn [handler]
(-> handler
(app-middleware)
(add-app-component app-component)
(ring-defaults/wrap-defaults (-> ring-defaults/site-defaults
disable XSRF for now
(assoc-in [:security :anti-forgery] false)
(assoc-in [:proxy] true))))))
Note that Vars are used -- the # ' notation -- instead of bare symbols
#writing-repl-friendly-programs
(defn my-handler
"Given the application component, return middleware for routing.
We use let-routes here rather than the more usual defroutes because
Compojure assumes that if there's a match on the route, the entire
request will be handled by the function specified for that route.
Since we need to deal with page rendering after the handler runs,
and we need to pass in the application component at start up, we
need to define our route handlers so that they can be parameterized."
[application]
(let-routes [wrap (middleware-stack application #'my-middleware)]
(GET "/" [] (wrap #'user-ctl/default))
(GET "/user/delete/:id{[0-9]+}" [id :<< as-int] (wrap #'user-ctl/delete-by-id))
(GET "/user/form" [] (wrap #'user-ctl/edit))
(GET "/user/form/:id{[0-9]+}" [id :<< as-int] (wrap #'user-ctl/edit))
(GET "/user/list" [] (wrap #'user-ctl/get-users))
(POST "/user/save" [] (wrap #'user-ctl/save))
(GET "/reset" [] (wrap #'user-ctl/reset-changes))
(route/resources "/")
(route/not-found "Not Found")))
web server ... uses but explains how to use http - kit instead :
component/Lifecycle
(start [this]
them more than once , only the first call to start should do anything
(if http-server
this
(assoc this
start a Jetty web server -- use : join ? false
1 . call run - server instead of run - jetty
2 . omit : join ? false since http - kit does
:http-server (run-jetty (handler-fn application)
{:port port :join? false})
:shutdown (promise))))
(stop [this]
(if http-server
(do
(.stop http-server)
(deliver shutdown true)
(assoc this :http-server nil))
this)))
(defn web-server
"Return a WebServer component that depends on the application.
The handler-fn is a function that accepts the application (Component) and
returns a fully configured Ring handler (with middeware)."
[handler-fn port]
(component/using (map->WebServer {:handler-fn handler-fn
:port port})
[:application]))
Note that a Var is used -- the # ' notation -- instead of a bare symbol
#writing-repl-friendly-programs
(defn new-system
"Build a default system to run. In the REPL:
(def system (new-system 8888))
(alter-var-root #'system component/start)
(alter-var-root #'system component/stop)
See the Rich Comment Form below."
([port] (new-system port true))
([port repl]
(component/system-map :application (my-application {:repl repl})
:database (model/setup-database)
:web-server (web-server #'my-handler port))))
(comment
(def system (new-system 8888))
(alter-var-root #'system component/start)
(alter-var-root #'system component/stop)
,)
(defonce ^:private
^{:doc "This exists so that if you run a socket REPL when
you start the application, you can get at the running
system easily.
Assuming a socket REPL running on 50505:
nc localhost 50505
user=> (require 'usermanager.main)
nil
user=> (in-ns 'usermanager.main)
...
usermanager.main=> (require '[next.jdbc :as jdbc])
nil
usermanager.main=> (def db (-> repl-system deref :application :database))
#'usermanager.main/db
usermanager.main=> (jdbc/execute! (db) [\"select * from addressbook\"])
[#:addressbook{:id 1, :first_name \"Sean\", :last_name \"Corfield\", :email \"\", :department_id 4}]
usermanager.main=>"}
repl-system
(atom nil))
(defn -main
[& [port]]
(let [port (or port (get (System/getenv) "PORT" 8080))
port (cond-> port (string? port) Integer/parseInt)]
(println "Starting up on port" port)
(-> (component/start (new-system port false))
(->> (reset! repl-system))
:web-server :shutdown deref)))
|
095ca24869ccbfbd74a0b9c7a23df5903bcd0de1a799eefd307356ee75815116 | leandrosilva/cameron | cameron_job_runner.erl | @author < >
2011 .
%% @doc The gen_server responsable to execute a process instance, which we call job.
-module(cameron_job_runner).
-author('Leandro Silva <>').
-behaviour(gen_server).
% admin api
-export([start_link/2, dump/1, stop/1]).
% public api
-export([run_job/1, handle_task/2]).
% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
%%
%% Includes and Records ---------------------------------------------------------------------------
%%
-include("cameron.hrl").
-record(state, {running_job, how_many_running_tasks}).
%%
%% Admin API --------------------------------------------------------------------------------------
%%
@spec start_link(Pname , Job ) - > { ok , Pid } | ignore | { error , Error }
%% @doc Start a cameron_job_runner generic server. Pname is the server "process name", or in other
%% words, the name by which it is going to be registered.
start_link(Pname, Job) ->
gen_server:start_link({local, Pname}, ?MODULE, [Job], []).
@spec dump(Pname ) - > { ok , ServerDump } | { error , Error }
%% @doc Dumps generic server state. Pname is the server "process name", or in other words, the name
%% by which it was registered.
dump(Pname) ->
gen_server:cast(Pname, dump).
@spec stop(Pname ) - > ok
%% @doc Manually stops the server. Pname is the server "process name", or in other words, the name
%% by which it was registered.
stop(Pname) ->
gen_server:cast(Pname, stop).
%%
%% Public API -------------------------------------------------------------------------------------
%%
) - > ok
%% @doc Create a new process instance (a.k.a. job), child of cameron_process_sup, and then run it
%% in parallel.
run_job(#job{} = Job) ->
case cameron_process_sup:start_child(Job) of
{ok, _Pid} ->
ok = dispatch_action(run_job, Job);
{error, {already_started, _Pid}} ->
ok
end.
%%
%% Gen_Server Callbacks ---------------------------------------------------------------------------
%%
@spec init([Job ] ) - > { ok , State } | { ok , State , Timeout } | ignore | { stop , Reason }
%% @doc Initiates the server.
init([Job]) ->
process_flag(trap_exit, true),
{ok, #state{running_job = Job, how_many_running_tasks = 0}}.
, From , State ) - >
{ reply , Reply , State } | { reply , Reply , State , Timeout } | { noreply , State } |
{ noreply , State , Timeout } | { stop , Reason , Reply , State } | { stop , Reason , State }
%% @doc Handling call messages.
% handle_call generic fallback
handle_call(_Request, _From, State) ->
{reply, undefined, State}.
@spec handle_cast(Msg , State ) - >
{ noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
%% @doc Handling cast messages.
% to run a job
handle_cast({action, run_job}, State) ->
Job = State#state.running_job,
ok = cameron_job_data:mark_job_as_running(Job),
StartTask = build_start_task(Job),
dispatch_action(spawn_task, StartTask),
{noreply, State};
% to spawn a individual task handler
handle_cast({action, spawn_task, #task{} = Task}, State) ->
Job = State#state.running_job,
#job{uuid = UUID} = Job,
log_action(UUID, {spawn_task, #task{} = Task}, State),
spawn_link(?MODULE, handle_task, [Job, Task]),
_NewState = update_state(task_has_been_spawned, State);
% when a individual task is being handled
handle_cast({event, task_is_being_handled, #task{} = Task}, State) ->
#job{uuid = UUID} = State#state.running_job,
log_event(UUID, {task_is_being_handled, #task{} = Task}, State),
ok = cameron_job_data:mark_task_as_running(Task),
_NewState = update_state(task_is_being_handled, State);
% when a individual task has been done with "no error"
handle_cast({event, task_has_been_done, #task{} = Task}, State) ->
#job{uuid = UUID} = State#state.running_job,
log_event(UUID, {task_has_been_done, #task{} = Task}, State),
ok = cameron_job_data:save_task_output(Task),
_NewState = update_state(task_has_been_done, State);
% when a individual task has been done with error
handle_cast({event, task_has_been_done_with_error, #task{} = Task}, State) ->
#job{uuid = UUID} = State#state.running_job,
log_event(UUID, {task_has_been_done_with_error, #task{} = Task}, State),
ok = cameron_job_data:save_error_on_task_execution(Task),
_NewState = update_state(task_has_been_done_with_error, State);
% dumps server state
handle_cast(dump, State) ->
{state, {job, UUID,
{process_definition, ProcessName,
{activity_definition, StartActivity, URL}},
{job_input, Key, Data, Requestor}},
HowManyTasksRunning} = State,
?NOTICE("cameron_job_runner >> current state:~n
{state, {job, {uuid, ~s},
{process_definition, {name, ~s},
{activity_definition, {name, ~s},
{url, ~s}}},
{job_input, {key, ~s},
{data, ~s},
{requestor, ~s}}},
{how_many_running_tasks, ~w}}", [UUID, ProcessName, StartActivity, URL,
Key, Data, Requestor, HowManyTasksRunning]),
{noreply, State};
% manual shutdown
handle_cast(stop, State) ->
{stop, normal, State};
% handle_cast generic fallback (ignore)
handle_cast(_Msg, State) ->
{noreply, State}.
, State ) - >
{ noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
%% @doc Handling all non call/cast messages.
% exit // by any reason
handle_info({'EXIT', Pid, Reason}, State) ->
% i could do 'how_many_running_tasks' and mark_job_as_done here, couldn't i?
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_info(UUID, {Pid, Reason, N}),
{noreply, State};
% down
handle_info({'DOWN', Ref, Type, Pid, Info}, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_info(UUID, {Pid, N, Ref, Type, Info}),
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
, State ) - > void ( )
%% @doc This function is called by a gen_server when it is about to terminate. When it returns,
%% the gen_server terminates with Reason. The return value is ignored.
% no problem, that's ok
terminate(normal, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_termination(UUID, {self(), N}),
ok;
% handle_info generic fallback (ignore) // any reason, i.e: cameron_process_sup:stop_child
terminate(Reason, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_termination(UUID, {self(), N, Reason}),
ok.
, State , Extra ) - > { ok , NewState }
%% @doc Convert process state when code is changed.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%
%% Internal Functions -----------------------------------------------------------------------------
%%
% --- gen_server message dispatching --------------------------------------------------------------
dispatch_action(_, undefined) ->
undefined;
dispatch_action(run_job, Job) ->
dispatch_message(Job, {action, run_job});
dispatch_action(spawn_tasks, Tasks) when is_list(Tasks) ->
SpawnTask = fun (Task) ->
dispatch_action(spawn_task, Task)
end,
lists:map(SpawnTask, Tasks);
dispatch_action(Action, #task{} = Task) ->
dispatch_message({action, Action, Task}).
dispatch_event(Event, #task{} = Task) ->
dispatch_message({event, Event, Task}).
dispatch_message({Type, What, #task{} = Task}) ->
Job = Task#task.context_job,
ok = dispatch_message(Job, {Type, What, Task}).
dispatch_message(Job, Message) ->
Pname = ?pname(Job#job.uuid),
ok = gen_server:cast(Pname, Message).
% --- gen_server state management -----------------------------------------------------------------
update_state(task_has_been_spawned, State) ->
N = State#state.how_many_running_tasks,
NewState = State#state{how_many_running_tasks = N + 1},
{noreply, NewState};
update_state(task_is_being_handled, State) ->
{noreply, State};
update_state(task_has_been_done, State) ->
update_state(State);
update_state(task_has_been_done_with_error, State) ->
update_state(State).
update_state(State) ->
case State#state.how_many_running_tasks of
1 ->
ok = cameron_job_data:mark_job_as_done(State#state.running_job),
NewState = State#state{how_many_running_tasks = 0},
{stop, normal, NewState};
N ->
NewState = State#state{how_many_running_tasks = N - 1},
{noreply, NewState}
end.
% --- task building -------------------------------------------------------------------------------
build_task(Job, {Data, Requestor}, ActivityDefinition) ->
#job{input = #job_input{key = Key}} = Job,
TaskInput = #task_input{key = Key,
data = Data,
requestor = Requestor},
#task{context_job = Job,
activity = ActivityDefinition,
input = TaskInput}.
build_start_task(Job) ->
#job{process = #process_definition{start_activity = StartActivityDefinition},
input = JobInput} = Job,
#job_input{data = Data,
requestor = Requestor} = JobInput,
build_task(Job, {Data, Requestor}, StartActivityDefinition).
build_next_task(Job, Data, Requestor, ActivityDefinition) ->
build_task(Job, {Data, Requestor}, ActivityDefinition).
build_next_tasks(_Job, _Data, _Requestor, undefined) ->
undefined;
build_next_tasks(Job, Data, Requestor, NextActivitiesJson) ->
NextActivitiesStruct = struct:from_json(NextActivitiesJson),
ActivitiesStruct = struct:get_value(<<"definitions">>, NextActivitiesStruct),
BuildNextTask = fun (ActivityStruct) ->
Name = struct:get_value(<<"name">>, ActivityStruct, {format, list}),
URL = struct:get_value(<<"url">>, ActivityStruct, {format, list}),
build_next_task(Job,
Data,
Requestor,
#activity_definition{name = Name,
url = URL})
end,
lists:map(BuildNextTask, ActivitiesStruct).
build_failed_task(Task, {Key, Value}) when is_atom(Key) and is_atom(Value) ->
build_failed_task(Task, [atom_to_list(Key), " - ", atom_to_list(Value)]);
build_failed_task(Task, Reason) when is_atom(Reason) ->
build_failed_task(Task, atom_to_list(Reason));
build_failed_task(Task, Reason) ->
#task{activity = #activity_definition{url = URL}} = Task,
Error = lists:concat(["{\"error\":\"", Reason, "\",\"url\":\"", URL, "\"}"]),
Task#task{output = #task_output{data = Error}, failed = yes}.
% --- task handling -------------------------------------------------------------------------------
handle_task(Job, #task{} = Task) ->
dispatch_event(task_is_being_handled, Task),
#task{activity = #activity_definition{url = URL},
input = #task_input{data = Data, requestor = Requestor}} = Task,
RequestPayload = cameron_protocol:build_request_payload(Job, {Data, Requestor}),
case execute_task(Task, {http_request, URL, RequestPayload}) of
{task_has_been_done, DoneTask, NextTasks} ->
dispatch_action(spawn_tasks, NextTasks),
dispatch_event(task_has_been_done, DoneTask);
{task_has_been_done_with_error, FailedTask} ->
dispatch_event(task_has_been_done_with_error, FailedTask)
end,
ok.
execute_task(Task, {http_request, URL, RequestPayload}) ->
HttpResponse = eh_http:http_post(URL, RequestPayload),
inspect_task_result(Task, HttpResponse).
inspect_task_result(Task, {ok, {{"HTTP/1.1", 200, _}, _, ResponsePayload}}) ->
{ResponseName, ResponseData, ResponseNextActivities} = cameron_protocol:parse_response_payload(ResponsePayload),
DoneTask = Task#task{output = #task_output{data = ResponseData, next_activities = ResponseNextActivities}},
NextTasks = build_next_tasks(DoneTask#task.context_job, ResponseData, ResponseName, ResponseNextActivities),
{task_has_been_done, DoneTask, NextTasks};
inspect_task_result(Task, {ok, {{"HTTP/1.1", Status, _}, _, _ResponsePayload}}) ->
Reason = "HTTP Status " ++ Status,
inspect_task_result(Task, {error, Reason});
inspect_task_result(Task, {error, Reason}) ->
FailedTask = build_failed_task(Task, Reason),
#task{context_job = #job{uuid = UUID}} = FailedTask,
log_failed_task(UUID, {FailedTask, eh_maybe:maybe_string(Reason)}),
{task_has_been_done_with_error, FailedTask}.
% --- log -----------------------------------------------------------------------------------------
log_action(UUID, {Action, Task}, State) ->
#task{activity = #activity_definition{name = Name}} = Task,
N = State#state.how_many_running_tasks,
?DEBUG("cameron_job_runner >> (~w, N: ~w) action: ~w, UUID: ~s, task: ~s", [self(), N, Action, UUID, Name]).
log_event(UUID, {Event, Task}, State) ->
#task{activity = #activity_definition{name = Name}} = Task,
N = State#state.how_many_running_tasks,
?DEBUG("cameron_job_runner >> (~w, N: ~w) event: ~w, UUID: ~s, task: ~s", [self(), N, Event, UUID, Name]).
log_info(UUID, {Pid, normal, N}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) info: exit, UUID: ~s, reason: normal", [Pid, N, UUID]);
log_info(UUID, {Pid, shutdown, N}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) info: exit, UUID: ~s, reason: shutdown", [Pid, N, UUID]);
log_info(UUID, {Pid, Reason, N}) ->
?ERROR("cameron_job_runner >> (~w, N: ~w) info: exit, UUID: ~s, reason: ~w", [Pid, N, UUID, Reason]);
log_info(UUID, {Pid, N, Ref, Type, Info}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) info: down, UUID: ~s, ref: ~w, type: ~w, info: ~w", [Pid, N, UUID, Ref, Type, Info]).
log_termination(UUID, {Pid, N}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) termination: normal, UUID: ~s", [Pid, N, UUID]);
log_termination(UUID, {Pid, N, Reason}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) termination: ~w, UUID: ~s", [Pid, N, Reason, UUID]).
log_failed_task(UUID, {Task, Error}) ->
#task{activity = #activity_definition{name = Name}} = Task,
?ERROR("cameron_job_runner >> (~w) failing: on_task, UUID: ~s, task: ~s, error: ~s", [self(), UUID, Name, Error]),
log_to_redis(UUID, {Name, Error}).
log_to_redis(UUID, {Task, Error}) ->
ErrorTag = list_to_binary([<<"cameron:error:">>, Error, <<":at:">>, UUID, <<":">>, Task]),
redo:cmd(cameron_redo, [<<"set">>, ErrorTag, eh_datetime:now()]),
ok.
| null | https://raw.githubusercontent.com/leandrosilva/cameron/34051395b620d2c3cb2cb63c854e65234786a176/src/cameron_job_runner.erl | erlang | @doc The gen_server responsable to execute a process instance, which we call job.
admin api
public api
gen_server callbacks
Includes and Records ---------------------------------------------------------------------------
Admin API --------------------------------------------------------------------------------------
@doc Start a cameron_job_runner generic server. Pname is the server "process name", or in other
words, the name by which it is going to be registered.
@doc Dumps generic server state. Pname is the server "process name", or in other words, the name
by which it was registered.
@doc Manually stops the server. Pname is the server "process name", or in other words, the name
by which it was registered.
Public API -------------------------------------------------------------------------------------
@doc Create a new process instance (a.k.a. job), child of cameron_process_sup, and then run it
in parallel.
Gen_Server Callbacks ---------------------------------------------------------------------------
@doc Initiates the server.
@doc Handling call messages.
handle_call generic fallback
@doc Handling cast messages.
to run a job
to spawn a individual task handler
when a individual task is being handled
when a individual task has been done with "no error"
when a individual task has been done with error
dumps server state
manual shutdown
handle_cast generic fallback (ignore)
@doc Handling all non call/cast messages.
exit // by any reason
i could do 'how_many_running_tasks' and mark_job_as_done here, couldn't i?
down
@doc This function is called by a gen_server when it is about to terminate. When it returns,
the gen_server terminates with Reason. The return value is ignored.
no problem, that's ok
handle_info generic fallback (ignore) // any reason, i.e: cameron_process_sup:stop_child
@doc Convert process state when code is changed.
Internal Functions -----------------------------------------------------------------------------
--- gen_server message dispatching --------------------------------------------------------------
--- gen_server state management -----------------------------------------------------------------
--- task building -------------------------------------------------------------------------------
--- task handling -------------------------------------------------------------------------------
--- log ----------------------------------------------------------------------------------------- | @author < >
2011 .
-module(cameron_job_runner).
-author('Leandro Silva <>').
-behaviour(gen_server).
-export([start_link/2, dump/1, stop/1]).
-export([run_job/1, handle_task/2]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-include("cameron.hrl").
-record(state, {running_job, how_many_running_tasks}).
@spec start_link(Pname , Job ) - > { ok , Pid } | ignore | { error , Error }
start_link(Pname, Job) ->
gen_server:start_link({local, Pname}, ?MODULE, [Job], []).
@spec dump(Pname ) - > { ok , ServerDump } | { error , Error }
dump(Pname) ->
gen_server:cast(Pname, dump).
@spec stop(Pname ) - > ok
stop(Pname) ->
gen_server:cast(Pname, stop).
) - > ok
run_job(#job{} = Job) ->
case cameron_process_sup:start_child(Job) of
{ok, _Pid} ->
ok = dispatch_action(run_job, Job);
{error, {already_started, _Pid}} ->
ok
end.
@spec init([Job ] ) - > { ok , State } | { ok , State , Timeout } | ignore | { stop , Reason }
init([Job]) ->
process_flag(trap_exit, true),
{ok, #state{running_job = Job, how_many_running_tasks = 0}}.
, From , State ) - >
{ reply , Reply , State } | { reply , Reply , State , Timeout } | { noreply , State } |
{ noreply , State , Timeout } | { stop , Reason , Reply , State } | { stop , Reason , State }
handle_call(_Request, _From, State) ->
{reply, undefined, State}.
@spec handle_cast(Msg , State ) - >
{ noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
handle_cast({action, run_job}, State) ->
Job = State#state.running_job,
ok = cameron_job_data:mark_job_as_running(Job),
StartTask = build_start_task(Job),
dispatch_action(spawn_task, StartTask),
{noreply, State};
handle_cast({action, spawn_task, #task{} = Task}, State) ->
Job = State#state.running_job,
#job{uuid = UUID} = Job,
log_action(UUID, {spawn_task, #task{} = Task}, State),
spawn_link(?MODULE, handle_task, [Job, Task]),
_NewState = update_state(task_has_been_spawned, State);
handle_cast({event, task_is_being_handled, #task{} = Task}, State) ->
#job{uuid = UUID} = State#state.running_job,
log_event(UUID, {task_is_being_handled, #task{} = Task}, State),
ok = cameron_job_data:mark_task_as_running(Task),
_NewState = update_state(task_is_being_handled, State);
handle_cast({event, task_has_been_done, #task{} = Task}, State) ->
#job{uuid = UUID} = State#state.running_job,
log_event(UUID, {task_has_been_done, #task{} = Task}, State),
ok = cameron_job_data:save_task_output(Task),
_NewState = update_state(task_has_been_done, State);
handle_cast({event, task_has_been_done_with_error, #task{} = Task}, State) ->
#job{uuid = UUID} = State#state.running_job,
log_event(UUID, {task_has_been_done_with_error, #task{} = Task}, State),
ok = cameron_job_data:save_error_on_task_execution(Task),
_NewState = update_state(task_has_been_done_with_error, State);
handle_cast(dump, State) ->
{state, {job, UUID,
{process_definition, ProcessName,
{activity_definition, StartActivity, URL}},
{job_input, Key, Data, Requestor}},
HowManyTasksRunning} = State,
?NOTICE("cameron_job_runner >> current state:~n
{state, {job, {uuid, ~s},
{process_definition, {name, ~s},
{activity_definition, {name, ~s},
{url, ~s}}},
{job_input, {key, ~s},
{data, ~s},
{requestor, ~s}}},
{how_many_running_tasks, ~w}}", [UUID, ProcessName, StartActivity, URL,
Key, Data, Requestor, HowManyTasksRunning]),
{noreply, State};
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(_Msg, State) ->
{noreply, State}.
, State ) - >
{ noreply , State } | { noreply , State , Timeout } | { stop , Reason , State }
handle_info({'EXIT', Pid, Reason}, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_info(UUID, {Pid, Reason, N}),
{noreply, State};
handle_info({'DOWN', Ref, Type, Pid, Info}, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_info(UUID, {Pid, N, Ref, Type, Info}),
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
, State ) - > void ( )
terminate(normal, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_termination(UUID, {self(), N}),
ok;
terminate(Reason, State) ->
#job{uuid = UUID} = State#state.running_job,
N = State#state.how_many_running_tasks,
log_termination(UUID, {self(), N, Reason}),
ok.
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
dispatch_action(_, undefined) ->
undefined;
dispatch_action(run_job, Job) ->
dispatch_message(Job, {action, run_job});
dispatch_action(spawn_tasks, Tasks) when is_list(Tasks) ->
SpawnTask = fun (Task) ->
dispatch_action(spawn_task, Task)
end,
lists:map(SpawnTask, Tasks);
dispatch_action(Action, #task{} = Task) ->
dispatch_message({action, Action, Task}).
dispatch_event(Event, #task{} = Task) ->
dispatch_message({event, Event, Task}).
dispatch_message({Type, What, #task{} = Task}) ->
Job = Task#task.context_job,
ok = dispatch_message(Job, {Type, What, Task}).
dispatch_message(Job, Message) ->
Pname = ?pname(Job#job.uuid),
ok = gen_server:cast(Pname, Message).
update_state(task_has_been_spawned, State) ->
N = State#state.how_many_running_tasks,
NewState = State#state{how_many_running_tasks = N + 1},
{noreply, NewState};
update_state(task_is_being_handled, State) ->
{noreply, State};
update_state(task_has_been_done, State) ->
update_state(State);
update_state(task_has_been_done_with_error, State) ->
update_state(State).
update_state(State) ->
case State#state.how_many_running_tasks of
1 ->
ok = cameron_job_data:mark_job_as_done(State#state.running_job),
NewState = State#state{how_many_running_tasks = 0},
{stop, normal, NewState};
N ->
NewState = State#state{how_many_running_tasks = N - 1},
{noreply, NewState}
end.
build_task(Job, {Data, Requestor}, ActivityDefinition) ->
#job{input = #job_input{key = Key}} = Job,
TaskInput = #task_input{key = Key,
data = Data,
requestor = Requestor},
#task{context_job = Job,
activity = ActivityDefinition,
input = TaskInput}.
build_start_task(Job) ->
#job{process = #process_definition{start_activity = StartActivityDefinition},
input = JobInput} = Job,
#job_input{data = Data,
requestor = Requestor} = JobInput,
build_task(Job, {Data, Requestor}, StartActivityDefinition).
build_next_task(Job, Data, Requestor, ActivityDefinition) ->
build_task(Job, {Data, Requestor}, ActivityDefinition).
build_next_tasks(_Job, _Data, _Requestor, undefined) ->
undefined;
build_next_tasks(Job, Data, Requestor, NextActivitiesJson) ->
NextActivitiesStruct = struct:from_json(NextActivitiesJson),
ActivitiesStruct = struct:get_value(<<"definitions">>, NextActivitiesStruct),
BuildNextTask = fun (ActivityStruct) ->
Name = struct:get_value(<<"name">>, ActivityStruct, {format, list}),
URL = struct:get_value(<<"url">>, ActivityStruct, {format, list}),
build_next_task(Job,
Data,
Requestor,
#activity_definition{name = Name,
url = URL})
end,
lists:map(BuildNextTask, ActivitiesStruct).
build_failed_task(Task, {Key, Value}) when is_atom(Key) and is_atom(Value) ->
build_failed_task(Task, [atom_to_list(Key), " - ", atom_to_list(Value)]);
build_failed_task(Task, Reason) when is_atom(Reason) ->
build_failed_task(Task, atom_to_list(Reason));
build_failed_task(Task, Reason) ->
#task{activity = #activity_definition{url = URL}} = Task,
Error = lists:concat(["{\"error\":\"", Reason, "\",\"url\":\"", URL, "\"}"]),
Task#task{output = #task_output{data = Error}, failed = yes}.
handle_task(Job, #task{} = Task) ->
dispatch_event(task_is_being_handled, Task),
#task{activity = #activity_definition{url = URL},
input = #task_input{data = Data, requestor = Requestor}} = Task,
RequestPayload = cameron_protocol:build_request_payload(Job, {Data, Requestor}),
case execute_task(Task, {http_request, URL, RequestPayload}) of
{task_has_been_done, DoneTask, NextTasks} ->
dispatch_action(spawn_tasks, NextTasks),
dispatch_event(task_has_been_done, DoneTask);
{task_has_been_done_with_error, FailedTask} ->
dispatch_event(task_has_been_done_with_error, FailedTask)
end,
ok.
execute_task(Task, {http_request, URL, RequestPayload}) ->
HttpResponse = eh_http:http_post(URL, RequestPayload),
inspect_task_result(Task, HttpResponse).
inspect_task_result(Task, {ok, {{"HTTP/1.1", 200, _}, _, ResponsePayload}}) ->
{ResponseName, ResponseData, ResponseNextActivities} = cameron_protocol:parse_response_payload(ResponsePayload),
DoneTask = Task#task{output = #task_output{data = ResponseData, next_activities = ResponseNextActivities}},
NextTasks = build_next_tasks(DoneTask#task.context_job, ResponseData, ResponseName, ResponseNextActivities),
{task_has_been_done, DoneTask, NextTasks};
inspect_task_result(Task, {ok, {{"HTTP/1.1", Status, _}, _, _ResponsePayload}}) ->
Reason = "HTTP Status " ++ Status,
inspect_task_result(Task, {error, Reason});
inspect_task_result(Task, {error, Reason}) ->
FailedTask = build_failed_task(Task, Reason),
#task{context_job = #job{uuid = UUID}} = FailedTask,
log_failed_task(UUID, {FailedTask, eh_maybe:maybe_string(Reason)}),
{task_has_been_done_with_error, FailedTask}.
log_action(UUID, {Action, Task}, State) ->
#task{activity = #activity_definition{name = Name}} = Task,
N = State#state.how_many_running_tasks,
?DEBUG("cameron_job_runner >> (~w, N: ~w) action: ~w, UUID: ~s, task: ~s", [self(), N, Action, UUID, Name]).
log_event(UUID, {Event, Task}, State) ->
#task{activity = #activity_definition{name = Name}} = Task,
N = State#state.how_many_running_tasks,
?DEBUG("cameron_job_runner >> (~w, N: ~w) event: ~w, UUID: ~s, task: ~s", [self(), N, Event, UUID, Name]).
log_info(UUID, {Pid, normal, N}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) info: exit, UUID: ~s, reason: normal", [Pid, N, UUID]);
log_info(UUID, {Pid, shutdown, N}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) info: exit, UUID: ~s, reason: shutdown", [Pid, N, UUID]);
log_info(UUID, {Pid, Reason, N}) ->
?ERROR("cameron_job_runner >> (~w, N: ~w) info: exit, UUID: ~s, reason: ~w", [Pid, N, UUID, Reason]);
log_info(UUID, {Pid, N, Ref, Type, Info}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) info: down, UUID: ~s, ref: ~w, type: ~w, info: ~w", [Pid, N, UUID, Ref, Type, Info]).
log_termination(UUID, {Pid, N}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) termination: normal, UUID: ~s", [Pid, N, UUID]);
log_termination(UUID, {Pid, N, Reason}) ->
?DEBUG("cameron_job_runner >> (~w, N: ~w) termination: ~w, UUID: ~s", [Pid, N, Reason, UUID]).
log_failed_task(UUID, {Task, Error}) ->
#task{activity = #activity_definition{name = Name}} = Task,
?ERROR("cameron_job_runner >> (~w) failing: on_task, UUID: ~s, task: ~s, error: ~s", [self(), UUID, Name, Error]),
log_to_redis(UUID, {Name, Error}).
log_to_redis(UUID, {Task, Error}) ->
ErrorTag = list_to_binary([<<"cameron:error:">>, Error, <<":at:">>, UUID, <<":">>, Task]),
redo:cmd(cameron_redo, [<<"set">>, ErrorTag, eh_datetime:now()]),
ok.
|
ad57f17b864b4fff4ebc9ef3fd84262e407f4b33d5927337a358fdd6d6a12bee | alex-hhh/ActivityLog2 | trends-bavg.rkt | #lang racket/base
;; trends-bavg.rkt -- aggregate best-average chart
;;
;; This file is part of ActivityLog2, an fitness activity tracker
Copyright ( C ) 2016 , 2018 , 2019 , 2020 , 2021 < >
;;
;; This program is free software: you can redistribute it and/or modify it
under the terms of the GNU General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option )
;; any later version.
;;
;; This program is distributed in the hope that it will be useful, but WITHOUT
ANY WARRANTY ; without even the implied warranty of MERCHANTABILITY or
;; FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
;; more details.
(require data-frame
data-frame/private/spline
pict
pict/snip
plot-container/hover-util
plot/no-gui
racket/class
racket/gui/base
racket/hash
racket/match
racket/math
"../al-widgets.rkt"
"../bavg-util.rkt"
"../database.rkt"
"../fmt-util-ut.rkt"
"../fmt-util.rkt"
"../metrics.rkt"
"../models/critical-power.rkt"
"../session-df/native-series.rkt"
"../session-df/series-metadata.rkt"
"../session-df/xdata-series.rkt"
"../sport-charms.rkt"
"../utilities.rkt"
"../widgets/main.rkt"
"trends-chart.rkt")
(define default-axis-choices
(list
axis-speed
axis-pace
axis-gap
axis-speed-zone
axis-grade
axis-grade-inverted
axis-hr-bpm
axis-hr-pct
axis-hr-zone
axis-cadence
axis-vertical-oscillation
axis-stance-time
axis-stance-time-percent
axis-stride
axis-vratio
axis-power
axis-power-zone
axis-left-torque-effectiveness
axis-right-torque-effectiveness
axis-left-pedal-smoothness
axis-right-pedal-smoothness
axis-left-power-phase-angle
axis-left-peak-power-phase-angle
axis-right-power-phase-angle
axis-right-peak-power-phase-angle
))
;; Axis choices for lap swimming
(define swim-axis-choices
(list
axis-swim-avg-cadence
axis-swim-stroke-count
axis-swim-stroke-length
axis-swim-swolf
axis-swim-pace))
;; Find an axis that works in SERIES-NAME and return its position in
AXIS - LIST . Return # f is not found
(define (find-axis axis-list series-name)
(define (match? axis)
(let ((sn (if (list? axis)
(car axis)
(send axis series-name))))
(equal? series-name sn)))
(for/first ([(axis index) (in-indexed axis-list)]
#:when (match? axis))
index))
(provide mmax-chart-settings%)
(define mmax-chart-settings%
(class* edit-dialog-base% (chart-settings-interface<%>)
(init-field database
[default-name "Mmax"]
[default-title "Best Avg Chart"])
(super-new [title "Chart Settings"]
[icon (edit-icon)]
[min-height 10])
;; determines if the SERIES-SELECTOR contains lap swimming series
(define lap-swimming-series? #f)
;; last selection on the lap swimming series
(define last-lap-swim-selection #f)
;; last selection on the default series
(define last-non-lap-swim-selection #f)
(define axis-choices #f)
(define (install-axis-choices new-choices selection)
(set! axis-choices
(sort new-choices string<? #:key
(lambda (x)
(if (list? x) (car x) (send x axis-label)))))
(send series-selector clear)
(for ([a axis-choices])
(let ((n (if (list? a) (car a) (send a axis-label))))
(send series-selector append n)))
(when (and selection (>= selection 0) (< selection (length axis-choices)))
(send series-selector set-selection selection))
(let ((selection (send series-selector get-selection)))
(when selection
(on-series-selected selection))))
(define (on-sport-selected sport)
(define lap-swimming?
(and (eq? (car sport) 5) (eq? (cdr sport) 17)))
(unless (eq? lap-swimming? lap-swimming-series?)
(if lap-swimming?
(begin
(set! last-non-lap-swim-selection (send series-selector get-selection))
(install-axis-choices
(append swim-axis-choices (get-available-xdata-metadata database))
last-lap-swim-selection))
(begin
(set! last-lap-swim-selection (send series-selector get-selection))
(install-axis-choices
(append default-axis-choices (get-available-xdata-metadata database))
last-non-lap-swim-selection))))
(set! lap-swimming-series? lap-swimming?))
(define name-gb (make-group-box-panel (send this get-client-pane)))
(define name-field (new text-field% [parent name-gb] [label "Name "]))
(send name-field set-value default-name)
(define title-field (new text-field% [parent name-gb] [label "Title "]))
(send title-field set-value default-title)
(define session-filter (new session-filter%
[database database]
[parent (send this get-client-pane)]
[sport-selected-callback on-sport-selected]))
(define series-gb (make-group-box-panel (send this get-client-pane)))
(define series-selector
(let ((p (make-horizontal-pane series-gb #f)))
(send p spacing al-dlg-item-spacing)
(new choice% [parent p]
[label "Data Series: "]
[choices '("***************************")]
[callback (lambda (c e) (on-series-selected (send c get-selection)))])))
(define show-heat-checkbox #f)
(define heat-percent-input #f)
(let ((p (make-horizontal-pane series-gb #f)))
(send p spacing al-dlg-item-spacing)
(set! show-heat-checkbox
(new check-box% [parent p] [label "Show number of sessions close to the best"]
[value #t]
[callback (lambda (c e) (on-show-heat (send c get-value)))]))
(set! heat-percent-input
(new number-input-field% [parent p]
[label "How close? "] [cue-text "0% .. 100%"]
[min-value 0] [max-value 100]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-heat-percentile v))])))
(define zero-base-checkbox
(let ((p (make-horizontal-pane series-gb #f)))
(send p spacing al-dlg-item-spacing)
(new check-box% [parent p] [label "Start Y axis at 0"])))
(define cp-gb (make-group-box-panel (send this get-client-pane)))
(define nm-range-start-input #f)
(define nm-range-end-input #f)
(define an-range-start-input #f)
(define an-range-end-input #f)
(define ae-range-start-input #f)
(define ae-range-end-input #f)
(define estimate-cp-choice
(let ((p (make-horizontal-pane cp-gb #f)))
(send p spacing al-dlg-item-spacing)
(new choice%
[parent p]
[label "Estimate Critical Power or Velocity "]
[choices '("None" "2 Parameter Model (CP2)" "3 Parameter Model (CP3)")]
[callback (lambda (c e) (on-estimate-cp (send c get-selection)))])))
(let ((p (new grid-pane% [parent cp-gb]
[spacing al-dlg-item-spacing] [columns 3]
[alignment '(left center)])))
(new message% [parent p] [label "Neuromuscular search range"])
(set! nm-range-start-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-nm-range-start v))]))
(set! nm-range-end-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-nm-range-end v))]))
(new message% [parent p] [label "Anaerobic search range"])
(set! an-range-start-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-an-range-start v))]))
(set! an-range-end-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-an-range-end v))]))
(new message% [parent p] [label "Aerobic search range"])
(set! ae-range-start-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-ae-range-start v))]))
(set! ae-range-end-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-ae-range-end v))])))
;; Default ranges for the search intervals
(send nm-range-start-input set-numeric-value 15)
(send nm-range-end-input set-numeric-value 45)
(send an-range-start-input set-numeric-value 120)
(send an-range-end-input set-numeric-value 300)
(send ae-range-start-input set-numeric-value 720)
(send ae-range-end-input set-numeric-value 1200)
(define (on-series-selected series-index)
(let* ((axis (list-ref axis-choices series-index))
(have-cp-estimator? (send axis have-cp-estimate?)))
(send estimate-cp-choice enable have-cp-estimator?)
(send estimate-cp-choice set-selection (if have-cp-estimator? 2 0))
(on-estimate-cp (if have-cp-estimator? 2 0))))
(define (on-estimate-cp v)
(case v
((0)
(send nm-range-start-input enable #f)
(send nm-range-end-input enable #f)
(send an-range-start-input enable #f)
(send an-range-end-input enable #f)
(send ae-range-start-input enable #f)
(send ae-range-end-input enable #f))
CP2
(send nm-range-start-input enable #f)
(send nm-range-end-input enable #f)
(send an-range-start-input enable #t)
(send an-range-end-input enable #t)
(send ae-range-start-input enable #t)
(send ae-range-end-input enable #t))
CP3
(send nm-range-start-input enable #t)
(send nm-range-end-input enable #t)
(send an-range-start-input enable #t)
(send an-range-end-input enable #t)
(send ae-range-start-input enable #t)
(send ae-range-end-input enable #t))))
(define (validate-cp-ranges)
(let ((nmstart (send nm-range-start-input get-converted-value))
(nmend (send nm-range-end-input get-converted-value))
(anstart (send an-range-start-input get-converted-value))
(anend (send an-range-end-input get-converted-value))
(aestart (send ae-range-start-input get-converted-value))
(aeend (send ae-range-end-input get-converted-value)))
(when (number? nmstart)
(send nm-range-start-input mark-valid
(or (not nmend) (eq? nmend 'empty) (< nmstart nmend))))
(when (number? nmend)
(send nm-range-end-input mark-valid
(or (not nmstart) (eq? nmstart 'empty) (< nmend anstart))))
(when (number? anstart)
(send an-range-start-input mark-valid
(or (not anend) (eq? anend 'empty) (< anstart anend))))
(when (number? anend)
(send an-range-end-input mark-valid
(or (not aestart) (eq? aestart 'empty) (< anend aestart))))
(when (number? aestart)
(send ae-range-start-input mark-valid
(or (not aeend) (eq? aeend 'empty) (< aestart aeend))))))
(define (on-nm-range-start s)
(validate-cp-ranges))
(define (on-nm-range-end s)
(validate-cp-ranges))
(define (on-an-range-start s)
(validate-cp-ranges))
(define (on-an-range-end e)
(validate-cp-ranges))
(define (on-ae-range-start s)
(validate-cp-ranges))
(define (on-ae-range-end e)
(validate-cp-ranges))
(define (on-show-heat show?)
(send heat-percent-input enable show?))
(define (on-heat-percentile p)
#f)
(install-axis-choices
(append default-axis-choices (get-available-xdata-metadata database))
#f)
(define/override (has-valid-data?)
(or (not (send estimate-cp-choice is-enabled?))
(let ((cp-model (send estimate-cp-choice get-selection)))
(case cp-model
((0) #t)
((1)
(and
(number? (send an-range-start-input get-converted-value))
(number? (send an-range-end-input get-converted-value))
(number? (send ae-range-start-input get-converted-value))
(number? (send ae-range-end-input get-converted-value))))
((2)
(and
(number? (send nm-range-start-input get-converted-value))
(number? (send nm-range-end-input get-converted-value))
(number? (send an-range-start-input get-converted-value))
(number? (send an-range-end-input get-converted-value))
(number? (send ae-range-start-input get-converted-value))
(number? (send ae-range-end-input get-converted-value))))))))
(define/public (get-chart-settings)
(define cp-model
(if (send estimate-cp-choice is-enabled?)
(send estimate-cp-choice get-selection)
0))
(hash-union
(send session-filter get-restore-data)
(hash
'name (send name-field get-value)
'title (send title-field get-value)
'series (let ((axis (list-ref axis-choices (send series-selector get-selection))))
(send axis series-name))
'zero-base? (send zero-base-checkbox get-value)
'show-heat? (send show-heat-checkbox get-value)
'heat-percent (let ((v (send heat-percent-input get-converted-value)))
(if (real? v) (/ v 100.0) v))
'model (case cp-model ((0) 'none) ((1) 'cp2) ((2) 'cp3))
'estimate-cp? (> cp-model 0)
'nm-start (send nm-range-start-input get-converted-value)
'nm-end (send nm-range-end-input get-converted-value)
'an-start (send an-range-start-input get-converted-value)
'an-end (send an-range-end-input get-converted-value)
'ae-start (send ae-range-start-input get-converted-value)
'ae-end (send ae-range-end-input get-converted-value))))
(define/public (put-chart-settings data)
(send session-filter restore-from data)
(send name-field set-value (hash-ref data 'name "Mmax"))
(send title-field set-value (hash-ref data 'title "BestAvg Chart"))
(let ((series (hash-ref data 'series #f)))
(when series
(let ((index (find-axis axis-choices series)))
(if index
(send series-selector set-selection index)
(send series-selector set-selection 0)))))
(send zero-base-checkbox set-value (hash-ref data 'zero-base? #f))
(let ((show-heat? (hash-ref data 'show-heat? #f)))
(send show-heat-checkbox set-value show-heat?)
(on-show-heat show-heat?))
(let ((heat-pct (hash-ref data 'heat-percent #f)))
(if (number? heat-pct)
(send heat-percent-input set-numeric-value (* 100 heat-pct))
(send heat-percent-input set-value "")))
(define model (hash-ref data 'model 'none))
(send estimate-cp-choice set-selection (case model ((none) 0) ((cp2) 1) ((cp3) 2)))
(let ((nmstart (hash-ref data 'nm-start 15)))
(if (number? nmstart)
(send nm-range-start-input set-numeric-value nmstart)
(send nm-range-start-input set-value "")))
(let ((nmend (hash-ref data 'nm-end 45)))
(if (number? nmend)
(send nm-range-end-input set-numeric-value nmend)
(send nm-range-end-input set-value "")))
(let ((anstart (hash-ref data 'an-start 120)))
(if (number? anstart)
(send an-range-start-input set-numeric-value anstart)
(send an-range-start-input set-value "")))
(let ((anend (hash-ref data 'an-end 300)))
(if (number? anend)
(send an-range-end-input set-numeric-value anend)
(send an-range-end-input set-value "")))
(let ((aestart (hash-ref data 'ae-start 720)))
(if (number? aestart)
(send ae-range-start-input set-numeric-value aestart)
(send ae-range-start-input set-value "")))
(let ((aeend (hash-ref data 'ae-end 1200)))
(if (number? aeend)
(send ae-range-end-input set-numeric-value aeend)
(send ae-range-end-input set-value "")))
(validate-cp-ranges)
(on-series-selected (send series-selector get-selection))
(on-estimate-cp (send estimate-cp-choice get-selection)))
(define/public (show-dialog parent)
(send session-filter on-before-show-dialog)
(and (send this do-edit parent) (get-chart-settings)))
))
(define (candidate-sessions db params)
(match-define (cons start end) (hash-ref params 'timestamps))
(let ((sport (hash-ref params 'sport))
(labels (hash-ref params 'labels))
(equipment (hash-ref params 'equipment)))
(fetch-candidate-sessions db (car sport) (cdr sport) start end
#:label-ids labels #:equipment-ids equipment)))
(struct tmmax (axis data heat-map plot-fn zero-base? cp cp-fn cp-pict))
(define (fetch-data database params progress-callback)
(let* ((lap-swimming? (is-lap-swimming? (hash-ref params 'sport)))
(candidates (candidate-sessions database params))
(axis (find-series-metadata (hash-ref params 'series) lap-swimming?)))
(unless axis (error "no axis for series"))
(define (read-session-callback percent)
(define msg (format "Reading sessions (~a %)"
(exact-round (* percent 100.0))))
(progress-callback msg))
(define data (get-aggregate-mmax candidates axis read-session-callback))
(define heat-map
(and (not (null? candidates))
(hash-ref params 'show-heat? #f)
(number? (hash-ref params 'heat-percent #f))
(let ((pct (hash-ref params 'heat-percent 0.95)))
(get-aggregate-mmax-heat-map candidates data pct axis))))
(define (cp3-progress-callback percent)
(define msg (format "Finding CP3 parameters (~a %)"
(exact-round (* percent 100.0))))
(progress-callback msg))
(define plot-fn (aggregate-mmax->spline-fn data))
(define-values (cp cp-fn cp-pict)
(if (and plot-fn
(send axis have-cp-estimate?)
(hash-ref params 'estimate-cp?))
(let* ((nparams (if (equal? (hash-ref params 'model #f) 'cp3)
(hash-set params 'progress-callback cp3-progress-callback)
params))
(cp (send axis cp-estimate plot-fn nparams)))
(values
cp
(send axis pd-function cp)
(send axis pd-data-as-pict cp plot-fn)))
(values #f #f #f)))
(tmmax axis
data
heat-map
plot-fn
(hash-ref params 'zero-base?)
cp
cp-fn
cp-pict)))
(define (make-renderer-tree data)
(define-values (min-x max-x min-y max-y) (aggregate-mmax-bounds (tmmax-data data)))
(when (tmmax-zero-base? data) (set! min-y 0))
Adjust MAX - Y ( or MIN - Y for inverted plots ) to include the CP3 max value
( CP2 goes to infinity at small values , so it is not useful to adjust the
;; plot for it).
(when (and (tmmax-cp data) (cp3? (tmmax-cp data)))
(if (send (tmmax-axis data) inverted-mean-max?)
(set! min-y ((tmmax-cp-fn data) min-x))
(set! max-y ((tmmax-cp-fn data) min-x))))
(define rt (list (tick-grid)))
(define (push-renderer r) (set! rt (cons r rt)))
(when (tmmax-plot-fn data)
(push-renderer
(function (tmmax-plot-fn data)
#:color (send (tmmax-axis data) plot-color)
#:width 3)))
(when (tmmax-cp-fn data)
(push-renderer
(function (tmmax-cp-fn data) #:color "red" #:width 2.0 #:style 'long-dash)))
(when (tmmax-heat-map data)
(let* ((range (* 0.3 (- max-y min-y)))
(raw-fn (spline (tmmax-heat-map data)))
NOTE : splines will have huge peaks when two points with opposing
;; tangents are close together, this makes the heat map appear to
go over 100 % . Fix that manually with the ` min ` call .
(fn (lambda (x) (let ((y (min 0.98 (raw-fn x)))) (+ min-y (* range y))))))
(push-renderer (function-interval
(lambda (x) min-y)
(lambda (x) (+ min-y range))
#:color '(#xdc #xdc #xdc)
#:line1-style 'transparent
#:line2-style 'transparent))
(push-renderer (function fn #:color '(#xb0 #x30 #x60) #:width 2))))
(if (tmmax-plot-fn data)
(values rt min-x max-x min-y max-y)
(values #f #f #f #f #f)))
(define (generate-plot output-fn axis renderer-tree)
(parameterize ([plot-x-ticks (mean-max-ticks)]
[plot-x-label "Duration"]
[plot-x-transform log-transform]
[plot-y-ticks (send axis plot-ticks)]
[plot-x-tick-label-anchor 'top-right]
[plot-x-tick-label-angle 30]
[plot-y-label (send axis axis-label)])
(output-fn renderer-tree)))
(define (insert-plot-snip canvas axis rt min-x max-x min-y max-y)
(if rt
(generate-plot
(lambda (renderer-tree)
(plot-to-canvas renderer-tree canvas
#:x-min min-x #:x-max max-x #:y-min min-y #:y-max max-y))
axis rt)
(begin
(send canvas clear-all)
(send canvas set-background-message "No data to plot")
#f)))
(define (save-plot-to-file file-name width height axis rt min-x max-x min-y max-y)
(generate-plot
(lambda (renderer-tree)
(plot-file renderer-tree file-name
#:x-min min-x #:x-max max-x #:y-min min-y #:y-max max-y
#:width width #:height height))
axis rt))
(provide mmax-trends-chart%)
(define mmax-trends-chart%
(class trends-chart%
(init-field database) (super-new)
(define cached-data #f)
(define generation 0)
(define pd-model-snip #f)
(define saved-pd-model-snip-location #f)
(define (get-generation) generation)
(define/override (make-settings-dialog)
(new mmax-chart-settings%
[default-name "BestAvg"]
[default-title "Best Avg"]
[database database]))
(define/override (invalidate-data)
(set! cached-data #f))
(define/override (is-invalidated-by-events? events)
(or (hash-ref events 'session-deleted #f)
(hash-ref events 'session-updated #f)
(hash-ref events 'session-created #f)))
(define/override (export-data-to-file file formatted?)
(when cached-data
(call-with-output-file file export-data-as-csv
#:mode 'text #:exists 'truncate)))
(define (export-data-as-csv out)
(define data (tmmax-data cached-data))
(define heat-map (tmmax-heat-map cached-data))
(write-string "Duration, Value, Sid, Time" out)
(when heat-map (write-string ", Heat" out))
(newline out)
(for (((datum index) (in-indexed data)))
(match-define (list sid pos duration value) datum)
(write-string (format "~a, ~a, ~a, ~a"
duration
value
sid
pos)
out)
;; Heat map, if present, should have the same number of items as the
;; main data, in the same order for the same durations. We don't
;; check that, though.
(when heat-map
(let ((h (list-ref heat-map index)))
(write-string (format ", ~a" (vector-ref h 1)) out)))
(newline out)))
(define (make-plot-hover-callback)
(define params (send this get-chart-settings))
(define format-value
(send (tmmax-axis cached-data) value-formatter (hash-ref params 'sport)))
(lambda (snip event x y)
(define info '())
(define (add-info tag value) (set! info (cons (list tag value) info)))
(define renderers '())
(define (add-renderer r) (set! renderers (cons r renderers)))
(when (and (good-hover? snip x y event) cached-data)
(add-renderer (hover-vrule x))
(let ((closest (lookup-duration (tmmax-data cached-data) x)))
(when closest
(match-define (cons (list sid1 ts1 d1 v1) (list sid2 ts2 d2 v2)) closest)
(add-renderer (hover-markers (list (vector d1 v1) (vector d2 v2))))
(add-info #f (date-time->string (get-session-start-time sid2)))
(add-info "Point 2" (string-append (format-value v2) " @ " (duration->string d2)))
(add-info #f (date-time->string (get-session-start-time sid1)))
(add-info "Point 1" (string-append (format-value v1) " @ " (duration->string d1)))))
(let ((cpfn (tmmax-cp-fn cached-data)))
(when cpfn
(let ((my (cpfn x)))
(when my
(add-info "Model" (format-value my))))))
(let ((plotfn (tmmax-plot-fn cached-data)))
(when plotfn
(let ((dy (plotfn x)))
(when dy
(add-info (send (tmmax-axis cached-data) name) (format-value dy))))))
(add-info "Duration" (duration->string x))
(add-renderer (hover-label x y (make-hover-badge (reverse info)))))
(set-overlay-renderers snip renderers)))
(define/override (put-plot-snip canvas)
(set! generation (add1 generation))
(let ((previous-data cached-data)
(params (send this get-chart-settings))
(saved-generation generation)
(saved-location (get-snip-location pd-model-snip)))
(send canvas clear-all)
(send canvas set-background-message "Working...")
(if params
(queue-task
"mmax-trends-chart%/put-plot-snip"
(lambda ()
(define (report-progress message)
(queue-callback
(lambda ()
(when (= saved-generation (get-generation))
(send canvas set-background-message message))))
;; Let the GUI thread run...
(sleep 0))
(define data (or previous-data (fetch-data database params report-progress)))
(define-values (rt min-x max-x min-y max-y) (make-renderer-tree data))
(queue-callback
(lambda ()
(when (= saved-generation (get-generation))
(set! cached-data data) ; put it back, or put the fresh one here
(define snip (insert-plot-snip canvas (tmmax-axis data) rt
min-x max-x min-y max-y))
(when snip (set-mouse-event-callback snip (make-plot-hover-callback)))
(when (tmmax-cp-pict data)
(set! pd-model-snip (new pict-snip% [pict (tmmax-cp-pict data)]))
(send canvas set-floating-snip pd-model-snip 0 0)
(move-snip-to pd-model-snip (or saved-location saved-pd-model-snip-location))))))))
(begin
(send canvas clear-all)
(send canvas set-background-message "No params for plot")))))
(define/override (save-plot-image file-name width height)
;; We assume the data is ready, and don't do anything if it is not.
(let ((data cached-data)
(params (send this get-chart-settings)))
(when (and params data)
(define-values (rt min-x max-x min-y max-y) (make-renderer-tree data))
(when rt
(save-plot-to-file file-name width height
(tmmax-axis data)
rt min-x max-x min-y max-y)))))
(define/override (get-chart-settings)
(define sdata (super get-chart-settings))
(if (hash? sdata)
(let ((location (or (get-snip-location pd-model-snip)
saved-pd-model-snip-location)))
(if location
(hash-set sdata 'pd-model-location location)
sdata))
sdata))
(define/override (put-chart-settings data)
(set! saved-pd-model-snip-location
(hash-ref data 'pd-model-location #f))
(super put-chart-settings data))
))
| null | https://raw.githubusercontent.com/alex-hhh/ActivityLog2/1a63a10a9100bd1a33c74896acfcb68f2cb75e9c/rkt/trend-charts/trends-bavg.rkt | racket | trends-bavg.rkt -- aggregate best-average chart
This file is part of ActivityLog2, an fitness activity tracker
This program is free software: you can redistribute it and/or modify it
any later version.
This program is distributed in the hope that it will be useful, but WITHOUT
without even the implied warranty of MERCHANTABILITY or
FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
more details.
Axis choices for lap swimming
Find an axis that works in SERIES-NAME and return its position in
determines if the SERIES-SELECTOR contains lap swimming series
last selection on the lap swimming series
last selection on the default series
Default ranges for the search intervals
plot for it).
tangents are close together, this makes the heat map appear to
Heat map, if present, should have the same number of items as the
main data, in the same order for the same durations. We don't
check that, though.
Let the GUI thread run...
put it back, or put the fresh one here
We assume the data is ready, and don't do anything if it is not. | #lang racket/base
Copyright ( C ) 2016 , 2018 , 2019 , 2020 , 2021 < >
under the terms of the GNU General Public License as published by the Free
Software Foundation , either version 3 of the License , or ( at your option )
(require data-frame
data-frame/private/spline
pict
pict/snip
plot-container/hover-util
plot/no-gui
racket/class
racket/gui/base
racket/hash
racket/match
racket/math
"../al-widgets.rkt"
"../bavg-util.rkt"
"../database.rkt"
"../fmt-util-ut.rkt"
"../fmt-util.rkt"
"../metrics.rkt"
"../models/critical-power.rkt"
"../session-df/native-series.rkt"
"../session-df/series-metadata.rkt"
"../session-df/xdata-series.rkt"
"../sport-charms.rkt"
"../utilities.rkt"
"../widgets/main.rkt"
"trends-chart.rkt")
(define default-axis-choices
(list
axis-speed
axis-pace
axis-gap
axis-speed-zone
axis-grade
axis-grade-inverted
axis-hr-bpm
axis-hr-pct
axis-hr-zone
axis-cadence
axis-vertical-oscillation
axis-stance-time
axis-stance-time-percent
axis-stride
axis-vratio
axis-power
axis-power-zone
axis-left-torque-effectiveness
axis-right-torque-effectiveness
axis-left-pedal-smoothness
axis-right-pedal-smoothness
axis-left-power-phase-angle
axis-left-peak-power-phase-angle
axis-right-power-phase-angle
axis-right-peak-power-phase-angle
))
(define swim-axis-choices
(list
axis-swim-avg-cadence
axis-swim-stroke-count
axis-swim-stroke-length
axis-swim-swolf
axis-swim-pace))
AXIS - LIST . Return # f is not found
(define (find-axis axis-list series-name)
(define (match? axis)
(let ((sn (if (list? axis)
(car axis)
(send axis series-name))))
(equal? series-name sn)))
(for/first ([(axis index) (in-indexed axis-list)]
#:when (match? axis))
index))
(provide mmax-chart-settings%)
(define mmax-chart-settings%
(class* edit-dialog-base% (chart-settings-interface<%>)
(init-field database
[default-name "Mmax"]
[default-title "Best Avg Chart"])
(super-new [title "Chart Settings"]
[icon (edit-icon)]
[min-height 10])
(define lap-swimming-series? #f)
(define last-lap-swim-selection #f)
(define last-non-lap-swim-selection #f)
(define axis-choices #f)
(define (install-axis-choices new-choices selection)
(set! axis-choices
(sort new-choices string<? #:key
(lambda (x)
(if (list? x) (car x) (send x axis-label)))))
(send series-selector clear)
(for ([a axis-choices])
(let ((n (if (list? a) (car a) (send a axis-label))))
(send series-selector append n)))
(when (and selection (>= selection 0) (< selection (length axis-choices)))
(send series-selector set-selection selection))
(let ((selection (send series-selector get-selection)))
(when selection
(on-series-selected selection))))
(define (on-sport-selected sport)
(define lap-swimming?
(and (eq? (car sport) 5) (eq? (cdr sport) 17)))
(unless (eq? lap-swimming? lap-swimming-series?)
(if lap-swimming?
(begin
(set! last-non-lap-swim-selection (send series-selector get-selection))
(install-axis-choices
(append swim-axis-choices (get-available-xdata-metadata database))
last-lap-swim-selection))
(begin
(set! last-lap-swim-selection (send series-selector get-selection))
(install-axis-choices
(append default-axis-choices (get-available-xdata-metadata database))
last-non-lap-swim-selection))))
(set! lap-swimming-series? lap-swimming?))
(define name-gb (make-group-box-panel (send this get-client-pane)))
(define name-field (new text-field% [parent name-gb] [label "Name "]))
(send name-field set-value default-name)
(define title-field (new text-field% [parent name-gb] [label "Title "]))
(send title-field set-value default-title)
(define session-filter (new session-filter%
[database database]
[parent (send this get-client-pane)]
[sport-selected-callback on-sport-selected]))
(define series-gb (make-group-box-panel (send this get-client-pane)))
(define series-selector
(let ((p (make-horizontal-pane series-gb #f)))
(send p spacing al-dlg-item-spacing)
(new choice% [parent p]
[label "Data Series: "]
[choices '("***************************")]
[callback (lambda (c e) (on-series-selected (send c get-selection)))])))
(define show-heat-checkbox #f)
(define heat-percent-input #f)
(let ((p (make-horizontal-pane series-gb #f)))
(send p spacing al-dlg-item-spacing)
(set! show-heat-checkbox
(new check-box% [parent p] [label "Show number of sessions close to the best"]
[value #t]
[callback (lambda (c e) (on-show-heat (send c get-value)))]))
(set! heat-percent-input
(new number-input-field% [parent p]
[label "How close? "] [cue-text "0% .. 100%"]
[min-value 0] [max-value 100]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-heat-percentile v))])))
(define zero-base-checkbox
(let ((p (make-horizontal-pane series-gb #f)))
(send p spacing al-dlg-item-spacing)
(new check-box% [parent p] [label "Start Y axis at 0"])))
(define cp-gb (make-group-box-panel (send this get-client-pane)))
(define nm-range-start-input #f)
(define nm-range-end-input #f)
(define an-range-start-input #f)
(define an-range-end-input #f)
(define ae-range-start-input #f)
(define ae-range-end-input #f)
(define estimate-cp-choice
(let ((p (make-horizontal-pane cp-gb #f)))
(send p spacing al-dlg-item-spacing)
(new choice%
[parent p]
[label "Estimate Critical Power or Velocity "]
[choices '("None" "2 Parameter Model (CP2)" "3 Parameter Model (CP3)")]
[callback (lambda (c e) (on-estimate-cp (send c get-selection)))])))
(let ((p (new grid-pane% [parent cp-gb]
[spacing al-dlg-item-spacing] [columns 3]
[alignment '(left center)])))
(new message% [parent p] [label "Neuromuscular search range"])
(set! nm-range-start-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-nm-range-start v))]))
(set! nm-range-end-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-nm-range-end v))]))
(new message% [parent p] [label "Anaerobic search range"])
(set! an-range-start-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-an-range-start v))]))
(set! an-range-end-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-an-range-end v))]))
(new message% [parent p] [label "Aerobic search range"])
(set! ae-range-start-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-ae-range-start v))]))
(set! ae-range-end-input
(new number-input-field% [parent p]
[label ""] [cue-text "seconds"]
[min-value 0] [allow-empty? #f]
[stretchable-width #f]
[valid-value-cb (lambda (v) (on-ae-range-end v))])))
(send nm-range-start-input set-numeric-value 15)
(send nm-range-end-input set-numeric-value 45)
(send an-range-start-input set-numeric-value 120)
(send an-range-end-input set-numeric-value 300)
(send ae-range-start-input set-numeric-value 720)
(send ae-range-end-input set-numeric-value 1200)
(define (on-series-selected series-index)
(let* ((axis (list-ref axis-choices series-index))
(have-cp-estimator? (send axis have-cp-estimate?)))
(send estimate-cp-choice enable have-cp-estimator?)
(send estimate-cp-choice set-selection (if have-cp-estimator? 2 0))
(on-estimate-cp (if have-cp-estimator? 2 0))))
(define (on-estimate-cp v)
(case v
((0)
(send nm-range-start-input enable #f)
(send nm-range-end-input enable #f)
(send an-range-start-input enable #f)
(send an-range-end-input enable #f)
(send ae-range-start-input enable #f)
(send ae-range-end-input enable #f))
CP2
(send nm-range-start-input enable #f)
(send nm-range-end-input enable #f)
(send an-range-start-input enable #t)
(send an-range-end-input enable #t)
(send ae-range-start-input enable #t)
(send ae-range-end-input enable #t))
CP3
(send nm-range-start-input enable #t)
(send nm-range-end-input enable #t)
(send an-range-start-input enable #t)
(send an-range-end-input enable #t)
(send ae-range-start-input enable #t)
(send ae-range-end-input enable #t))))
(define (validate-cp-ranges)
(let ((nmstart (send nm-range-start-input get-converted-value))
(nmend (send nm-range-end-input get-converted-value))
(anstart (send an-range-start-input get-converted-value))
(anend (send an-range-end-input get-converted-value))
(aestart (send ae-range-start-input get-converted-value))
(aeend (send ae-range-end-input get-converted-value)))
(when (number? nmstart)
(send nm-range-start-input mark-valid
(or (not nmend) (eq? nmend 'empty) (< nmstart nmend))))
(when (number? nmend)
(send nm-range-end-input mark-valid
(or (not nmstart) (eq? nmstart 'empty) (< nmend anstart))))
(when (number? anstart)
(send an-range-start-input mark-valid
(or (not anend) (eq? anend 'empty) (< anstart anend))))
(when (number? anend)
(send an-range-end-input mark-valid
(or (not aestart) (eq? aestart 'empty) (< anend aestart))))
(when (number? aestart)
(send ae-range-start-input mark-valid
(or (not aeend) (eq? aeend 'empty) (< aestart aeend))))))
(define (on-nm-range-start s)
(validate-cp-ranges))
(define (on-nm-range-end s)
(validate-cp-ranges))
(define (on-an-range-start s)
(validate-cp-ranges))
(define (on-an-range-end e)
(validate-cp-ranges))
(define (on-ae-range-start s)
(validate-cp-ranges))
(define (on-ae-range-end e)
(validate-cp-ranges))
(define (on-show-heat show?)
(send heat-percent-input enable show?))
(define (on-heat-percentile p)
#f)
(install-axis-choices
(append default-axis-choices (get-available-xdata-metadata database))
#f)
(define/override (has-valid-data?)
(or (not (send estimate-cp-choice is-enabled?))
(let ((cp-model (send estimate-cp-choice get-selection)))
(case cp-model
((0) #t)
((1)
(and
(number? (send an-range-start-input get-converted-value))
(number? (send an-range-end-input get-converted-value))
(number? (send ae-range-start-input get-converted-value))
(number? (send ae-range-end-input get-converted-value))))
((2)
(and
(number? (send nm-range-start-input get-converted-value))
(number? (send nm-range-end-input get-converted-value))
(number? (send an-range-start-input get-converted-value))
(number? (send an-range-end-input get-converted-value))
(number? (send ae-range-start-input get-converted-value))
(number? (send ae-range-end-input get-converted-value))))))))
(define/public (get-chart-settings)
(define cp-model
(if (send estimate-cp-choice is-enabled?)
(send estimate-cp-choice get-selection)
0))
(hash-union
(send session-filter get-restore-data)
(hash
'name (send name-field get-value)
'title (send title-field get-value)
'series (let ((axis (list-ref axis-choices (send series-selector get-selection))))
(send axis series-name))
'zero-base? (send zero-base-checkbox get-value)
'show-heat? (send show-heat-checkbox get-value)
'heat-percent (let ((v (send heat-percent-input get-converted-value)))
(if (real? v) (/ v 100.0) v))
'model (case cp-model ((0) 'none) ((1) 'cp2) ((2) 'cp3))
'estimate-cp? (> cp-model 0)
'nm-start (send nm-range-start-input get-converted-value)
'nm-end (send nm-range-end-input get-converted-value)
'an-start (send an-range-start-input get-converted-value)
'an-end (send an-range-end-input get-converted-value)
'ae-start (send ae-range-start-input get-converted-value)
'ae-end (send ae-range-end-input get-converted-value))))
(define/public (put-chart-settings data)
(send session-filter restore-from data)
(send name-field set-value (hash-ref data 'name "Mmax"))
(send title-field set-value (hash-ref data 'title "BestAvg Chart"))
(let ((series (hash-ref data 'series #f)))
(when series
(let ((index (find-axis axis-choices series)))
(if index
(send series-selector set-selection index)
(send series-selector set-selection 0)))))
(send zero-base-checkbox set-value (hash-ref data 'zero-base? #f))
(let ((show-heat? (hash-ref data 'show-heat? #f)))
(send show-heat-checkbox set-value show-heat?)
(on-show-heat show-heat?))
(let ((heat-pct (hash-ref data 'heat-percent #f)))
(if (number? heat-pct)
(send heat-percent-input set-numeric-value (* 100 heat-pct))
(send heat-percent-input set-value "")))
(define model (hash-ref data 'model 'none))
(send estimate-cp-choice set-selection (case model ((none) 0) ((cp2) 1) ((cp3) 2)))
(let ((nmstart (hash-ref data 'nm-start 15)))
(if (number? nmstart)
(send nm-range-start-input set-numeric-value nmstart)
(send nm-range-start-input set-value "")))
(let ((nmend (hash-ref data 'nm-end 45)))
(if (number? nmend)
(send nm-range-end-input set-numeric-value nmend)
(send nm-range-end-input set-value "")))
(let ((anstart (hash-ref data 'an-start 120)))
(if (number? anstart)
(send an-range-start-input set-numeric-value anstart)
(send an-range-start-input set-value "")))
(let ((anend (hash-ref data 'an-end 300)))
(if (number? anend)
(send an-range-end-input set-numeric-value anend)
(send an-range-end-input set-value "")))
(let ((aestart (hash-ref data 'ae-start 720)))
(if (number? aestart)
(send ae-range-start-input set-numeric-value aestart)
(send ae-range-start-input set-value "")))
(let ((aeend (hash-ref data 'ae-end 1200)))
(if (number? aeend)
(send ae-range-end-input set-numeric-value aeend)
(send ae-range-end-input set-value "")))
(validate-cp-ranges)
(on-series-selected (send series-selector get-selection))
(on-estimate-cp (send estimate-cp-choice get-selection)))
(define/public (show-dialog parent)
(send session-filter on-before-show-dialog)
(and (send this do-edit parent) (get-chart-settings)))
))
(define (candidate-sessions db params)
(match-define (cons start end) (hash-ref params 'timestamps))
(let ((sport (hash-ref params 'sport))
(labels (hash-ref params 'labels))
(equipment (hash-ref params 'equipment)))
(fetch-candidate-sessions db (car sport) (cdr sport) start end
#:label-ids labels #:equipment-ids equipment)))
(struct tmmax (axis data heat-map plot-fn zero-base? cp cp-fn cp-pict))
(define (fetch-data database params progress-callback)
(let* ((lap-swimming? (is-lap-swimming? (hash-ref params 'sport)))
(candidates (candidate-sessions database params))
(axis (find-series-metadata (hash-ref params 'series) lap-swimming?)))
(unless axis (error "no axis for series"))
(define (read-session-callback percent)
(define msg (format "Reading sessions (~a %)"
(exact-round (* percent 100.0))))
(progress-callback msg))
(define data (get-aggregate-mmax candidates axis read-session-callback))
(define heat-map
(and (not (null? candidates))
(hash-ref params 'show-heat? #f)
(number? (hash-ref params 'heat-percent #f))
(let ((pct (hash-ref params 'heat-percent 0.95)))
(get-aggregate-mmax-heat-map candidates data pct axis))))
(define (cp3-progress-callback percent)
(define msg (format "Finding CP3 parameters (~a %)"
(exact-round (* percent 100.0))))
(progress-callback msg))
(define plot-fn (aggregate-mmax->spline-fn data))
(define-values (cp cp-fn cp-pict)
(if (and plot-fn
(send axis have-cp-estimate?)
(hash-ref params 'estimate-cp?))
(let* ((nparams (if (equal? (hash-ref params 'model #f) 'cp3)
(hash-set params 'progress-callback cp3-progress-callback)
params))
(cp (send axis cp-estimate plot-fn nparams)))
(values
cp
(send axis pd-function cp)
(send axis pd-data-as-pict cp plot-fn)))
(values #f #f #f)))
(tmmax axis
data
heat-map
plot-fn
(hash-ref params 'zero-base?)
cp
cp-fn
cp-pict)))
(define (make-renderer-tree data)
(define-values (min-x max-x min-y max-y) (aggregate-mmax-bounds (tmmax-data data)))
(when (tmmax-zero-base? data) (set! min-y 0))
Adjust MAX - Y ( or MIN - Y for inverted plots ) to include the CP3 max value
( CP2 goes to infinity at small values , so it is not useful to adjust the
(when (and (tmmax-cp data) (cp3? (tmmax-cp data)))
(if (send (tmmax-axis data) inverted-mean-max?)
(set! min-y ((tmmax-cp-fn data) min-x))
(set! max-y ((tmmax-cp-fn data) min-x))))
(define rt (list (tick-grid)))
(define (push-renderer r) (set! rt (cons r rt)))
(when (tmmax-plot-fn data)
(push-renderer
(function (tmmax-plot-fn data)
#:color (send (tmmax-axis data) plot-color)
#:width 3)))
(when (tmmax-cp-fn data)
(push-renderer
(function (tmmax-cp-fn data) #:color "red" #:width 2.0 #:style 'long-dash)))
(when (tmmax-heat-map data)
(let* ((range (* 0.3 (- max-y min-y)))
(raw-fn (spline (tmmax-heat-map data)))
NOTE : splines will have huge peaks when two points with opposing
go over 100 % . Fix that manually with the ` min ` call .
(fn (lambda (x) (let ((y (min 0.98 (raw-fn x)))) (+ min-y (* range y))))))
(push-renderer (function-interval
(lambda (x) min-y)
(lambda (x) (+ min-y range))
#:color '(#xdc #xdc #xdc)
#:line1-style 'transparent
#:line2-style 'transparent))
(push-renderer (function fn #:color '(#xb0 #x30 #x60) #:width 2))))
(if (tmmax-plot-fn data)
(values rt min-x max-x min-y max-y)
(values #f #f #f #f #f)))
(define (generate-plot output-fn axis renderer-tree)
(parameterize ([plot-x-ticks (mean-max-ticks)]
[plot-x-label "Duration"]
[plot-x-transform log-transform]
[plot-y-ticks (send axis plot-ticks)]
[plot-x-tick-label-anchor 'top-right]
[plot-x-tick-label-angle 30]
[plot-y-label (send axis axis-label)])
(output-fn renderer-tree)))
(define (insert-plot-snip canvas axis rt min-x max-x min-y max-y)
(if rt
(generate-plot
(lambda (renderer-tree)
(plot-to-canvas renderer-tree canvas
#:x-min min-x #:x-max max-x #:y-min min-y #:y-max max-y))
axis rt)
(begin
(send canvas clear-all)
(send canvas set-background-message "No data to plot")
#f)))
(define (save-plot-to-file file-name width height axis rt min-x max-x min-y max-y)
(generate-plot
(lambda (renderer-tree)
(plot-file renderer-tree file-name
#:x-min min-x #:x-max max-x #:y-min min-y #:y-max max-y
#:width width #:height height))
axis rt))
(provide mmax-trends-chart%)
(define mmax-trends-chart%
(class trends-chart%
(init-field database) (super-new)
(define cached-data #f)
(define generation 0)
(define pd-model-snip #f)
(define saved-pd-model-snip-location #f)
(define (get-generation) generation)
(define/override (make-settings-dialog)
(new mmax-chart-settings%
[default-name "BestAvg"]
[default-title "Best Avg"]
[database database]))
(define/override (invalidate-data)
(set! cached-data #f))
(define/override (is-invalidated-by-events? events)
(or (hash-ref events 'session-deleted #f)
(hash-ref events 'session-updated #f)
(hash-ref events 'session-created #f)))
(define/override (export-data-to-file file formatted?)
(when cached-data
(call-with-output-file file export-data-as-csv
#:mode 'text #:exists 'truncate)))
(define (export-data-as-csv out)
(define data (tmmax-data cached-data))
(define heat-map (tmmax-heat-map cached-data))
(write-string "Duration, Value, Sid, Time" out)
(when heat-map (write-string ", Heat" out))
(newline out)
(for (((datum index) (in-indexed data)))
(match-define (list sid pos duration value) datum)
(write-string (format "~a, ~a, ~a, ~a"
duration
value
sid
pos)
out)
(when heat-map
(let ((h (list-ref heat-map index)))
(write-string (format ", ~a" (vector-ref h 1)) out)))
(newline out)))
(define (make-plot-hover-callback)
(define params (send this get-chart-settings))
(define format-value
(send (tmmax-axis cached-data) value-formatter (hash-ref params 'sport)))
(lambda (snip event x y)
(define info '())
(define (add-info tag value) (set! info (cons (list tag value) info)))
(define renderers '())
(define (add-renderer r) (set! renderers (cons r renderers)))
(when (and (good-hover? snip x y event) cached-data)
(add-renderer (hover-vrule x))
(let ((closest (lookup-duration (tmmax-data cached-data) x)))
(when closest
(match-define (cons (list sid1 ts1 d1 v1) (list sid2 ts2 d2 v2)) closest)
(add-renderer (hover-markers (list (vector d1 v1) (vector d2 v2))))
(add-info #f (date-time->string (get-session-start-time sid2)))
(add-info "Point 2" (string-append (format-value v2) " @ " (duration->string d2)))
(add-info #f (date-time->string (get-session-start-time sid1)))
(add-info "Point 1" (string-append (format-value v1) " @ " (duration->string d1)))))
(let ((cpfn (tmmax-cp-fn cached-data)))
(when cpfn
(let ((my (cpfn x)))
(when my
(add-info "Model" (format-value my))))))
(let ((plotfn (tmmax-plot-fn cached-data)))
(when plotfn
(let ((dy (plotfn x)))
(when dy
(add-info (send (tmmax-axis cached-data) name) (format-value dy))))))
(add-info "Duration" (duration->string x))
(add-renderer (hover-label x y (make-hover-badge (reverse info)))))
(set-overlay-renderers snip renderers)))
(define/override (put-plot-snip canvas)
(set! generation (add1 generation))
(let ((previous-data cached-data)
(params (send this get-chart-settings))
(saved-generation generation)
(saved-location (get-snip-location pd-model-snip)))
(send canvas clear-all)
(send canvas set-background-message "Working...")
(if params
(queue-task
"mmax-trends-chart%/put-plot-snip"
(lambda ()
(define (report-progress message)
(queue-callback
(lambda ()
(when (= saved-generation (get-generation))
(send canvas set-background-message message))))
(sleep 0))
(define data (or previous-data (fetch-data database params report-progress)))
(define-values (rt min-x max-x min-y max-y) (make-renderer-tree data))
(queue-callback
(lambda ()
(when (= saved-generation (get-generation))
(define snip (insert-plot-snip canvas (tmmax-axis data) rt
min-x max-x min-y max-y))
(when snip (set-mouse-event-callback snip (make-plot-hover-callback)))
(when (tmmax-cp-pict data)
(set! pd-model-snip (new pict-snip% [pict (tmmax-cp-pict data)]))
(send canvas set-floating-snip pd-model-snip 0 0)
(move-snip-to pd-model-snip (or saved-location saved-pd-model-snip-location))))))))
(begin
(send canvas clear-all)
(send canvas set-background-message "No params for plot")))))
(define/override (save-plot-image file-name width height)
(let ((data cached-data)
(params (send this get-chart-settings)))
(when (and params data)
(define-values (rt min-x max-x min-y max-y) (make-renderer-tree data))
(when rt
(save-plot-to-file file-name width height
(tmmax-axis data)
rt min-x max-x min-y max-y)))))
(define/override (get-chart-settings)
(define sdata (super get-chart-settings))
(if (hash? sdata)
(let ((location (or (get-snip-location pd-model-snip)
saved-pd-model-snip-location)))
(if location
(hash-set sdata 'pd-model-location location)
sdata))
sdata))
(define/override (put-chart-settings data)
(set! saved-pd-model-snip-location
(hash-ref data 'pd-model-location #f))
(super put-chart-settings data))
))
|
35a260b142585b985810f11f07b06e660aad3b174a18e64314f0d85841aed9e8 | argp/bap | piqic_ocaml_types.ml | pp camlp4o -I ` ocamlfind query piqi.syntax ` pa_labelscope.cmo pa_openin.cmo
Copyright 2009 , 2010 , 2011 , 2012 , 2013
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2009, 2010, 2011, 2012, 2013 Anton Lavrik
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
(*
* generation of Ocaml type declarations
*)
module C = Piqi_common
open C
open Iolist
piqi compiler - compiler mode indictation
(* TODO: move to piqic configuration *)
let cc_mode = ref false
let gen_cc s =
if !cc_mode
then ios s
else iol []
(*
let gen_cc_cond a b =
if !cc_mode
then a
else b
*)
toplevel for the module which is currently being compiled
(* TODO: this is a dirty method for sharing the setting across all
* piqic_ocaml_* modules *)
let top_modname = ref ""
let scoped_name name = !top_modname ^ "." ^ name
let typedef_mlname = function
| `record t -> some_of t.R#ocaml_name
| `variant t -> some_of t.V#ocaml_name
| `enum t -> some_of t.E#ocaml_name
| `alias t -> some_of t.A#ocaml_name
| `list t -> some_of t.L#ocaml_name
| _ ->
(* this function will be called only for named types (i.e. typedefs) *)
assert false
let capitalize = String.capitalize
let gen_deftype parent ocaml_name =
let ocaml_name = some_of ocaml_name in
match parent with
| Some (`import x) -> (* imported name *)
let ocaml_modname = some_of x.Import#ocaml_name in
(ocaml_modname ^ "." ^ ocaml_name)
| _ -> (* local name *)
scoped_name ocaml_name
(* XXX: check type compatibility *)
let rec gen_piqtype t ocaml_type =
match ocaml_type with
| Some x -> x
| None ->
match t with
| `int -> "int"
| `float -> "float"
| `bool -> "bool"
| `string | `binary -> "string"
| `any ->
if !Piqic_common.is_self_spec
then scoped_name "any"
else "Piqi_piqi.any"
| `record r -> gen_deftype r.R#parent r.R#ocaml_name
| `variant v -> gen_deftype v.V#parent v.V#ocaml_name
| `enum e -> gen_deftype e.E#parent e.E#ocaml_name
| `list l -> gen_deftype l.L#parent l.L#ocaml_name
| `alias a -> gen_aliastype a
and gen_aliastype a =
let open Alias in
let ocaml_name = some_of a.ocaml_name in
let typename = gen_piqtype (some_of a.piqtype) a.ocaml_type in
if ocaml_name = typename
then ocaml_name (* don't generate aliases for built-in types *)
else gen_deftype a.parent a.ocaml_name
let ios_gen_piqtype ?ocaml_type (t :T.piqtype) =
ios (gen_piqtype t ocaml_type)
let gen_field_type f =
let open F in
match f.piqtype with
| None -> ios "bool"; (* flags are represented as booleans *)
| Some t ->
let deftype = ios_gen_piqtype t in
match f.mode with
| `required -> deftype
| `optional when f.default <> None && (not f.ocaml_optional) ->
deftype (* optional + default *)
| `optional -> deftype ^^ ios " option"
| `repeated ->
deftype ^^
if f.ocaml_array
then ios " array"
else ios " list"
let mlname_of name piqtype =
match name, piqtype with
| Some n, _ -> n
| None, Some t -> typedef_mlname t
| _ -> assert false
XXX : move this functionality to mlname _ * . assignment should be done
* once rahter than calling it from every place where it is needed
* once rahter than calling it from every place where it is needed *)
let mlname_of_field f =
let open F in mlname_of f.ocaml_name f.piqtype
let mlname_of_option o =
let open O in mlname_of o.ocaml_name o.piqtype
let gen_field f =
let open F in
let fdef = iod " " (* field definition *)
[
ios "mutable"; (* defining all fields as mutable at the moment *)
ios (mlname_of_field f);
ios ":";
gen_field_type f;
ios ";";
]
in fdef
(* generate record type in record module; see also gen_record' *)
let gen_record_mod r =
let modname = capitalize (some_of r.R#ocaml_name) in
let fields = r.Record#field in
let fdefs = (* field definition list *)
if fields <> []
then iol (List.map gen_field fields)
else ios "_dummy: unit"
in
let rcons = (* record def constructor *)
iol [ios "type t = "; ios "{"; fdefs; ios "}"]
in
let rdef = iod " "
[
ios modname; (* module declaration *)
ios ":";
ios "sig"; (* signature *)
rcons;
ios "end";
ios "=";
ios modname;
(* full version:
ios "struct"; (* structure *)
rcons;
ios "end";
*)
]
in rdef
let gen_pvar_name name =
ios "`" ^^ ios name
let is_local_def def =
match get_parent def with
| `piqi _ -> true
| `import _ -> false
let gen_option o =
let open Option in
match o.ocaml_name, o.piqtype with
| None, Some ((`variant v) as def) ->
NOTE : for some reason , complains about fully qualified
* polymorphic variants in recursive modules , so we need to use
* non - qualified names in this case
* polymorphic variants in recursive modules, so we need to use
* non-qualified names in this case *)
if is_local_def def
then ios (some_of v.V#ocaml_name)
else ios_gen_piqtype def
| None, Some ((`enum e) as def) ->
if is_local_def def
then ios (some_of e.E#ocaml_name)
else ios_gen_piqtype def
| _, Some t ->
let n = gen_pvar_name (mlname_of_option o) in
n ^^ ios " of " ^^ ios_gen_piqtype t
| Some mln, None -> gen_pvar_name mln
| None, None -> assert false
let gen_alias a =
let open Alias in
iol [
ios (some_of a.ocaml_name); ios " = ";
ios_gen_piqtype (some_of a.piqtype) ?ocaml_type:a.ocaml_type ]
let gen_list l =
let open L in
iol [
ios (some_of l.ocaml_name); ios " = ";
ios_gen_piqtype (some_of l.piqtype);
if l.ocaml_array
then ios " array"
else ios " list";
]
let gen_options options =
let var_defs =
iod "|" (List.map gen_option options)
in
iol [ios "["; var_defs; ios "]"]
let gen_variant v =
let open Variant in
iol [
ios (some_of v.ocaml_name);
ios " = ";
gen_options v.option;
]
let gen_enum e =
let open Enum in
iol [
ios (some_of e.ocaml_name);
ios " = ";
gen_options e.option;
]
let gen_record r =
let name = some_of r.Record#ocaml_name in
let modname = capitalize name in
iol [ ios name; ios " = "; ios (modname ^ ".t") ]
let gen_def = function
| `record t -> gen_record t
| `variant t -> gen_variant t
| `enum t -> gen_enum t
| `list t -> gen_list t
| _ -> assert false
let gen_alias a =
let open Alias in
let name = some_of a.ocaml_name in
let typename = gen_piqtype (some_of a.piqtype) a.ocaml_type in
if name = typename
then [] (* don't generate cyclic type abbreviation *)
else [ gen_alias a ]
let gen_def = function (* gen everything except records *)
| `alias t -> gen_alias t
| t -> [gen_def t]
let gen_mod_def = function
| `record r -> [gen_record_mod r]
(* XXX: generate modules for variants? *)
| _ -> []
let gen_defs (defs:T.typedef list) =
let mod_defs = U.flatmap gen_mod_def defs in
let odefs = U.flatmap gen_def defs in
let odef =
let odef =
if odefs = []
then iol []
else iol [
ios "type ";
iod " type " odefs;
]
in
iod " "
[
ios !top_modname; (* module declaration *)
ios ":";
ios "sig"; (* signature *)
odef;
ios "end";
ios "=";
ios !top_modname;
(* full version:
ios "struct"; (* structure *)
odef;
ios "end";
*)
]
in
let defs = [odef] @ mod_defs in
let code = iol
[
ios "module rec ";
iod " and " defs;
]
in
iod " " [
code;
ios "include"; ios !top_modname;
eol;
]
let gen_import x =
let open Import in
let piqi = some_of x.piqi in
iod " " [
ios "module"; ios (some_of x.ocaml_name); ios "=";
ios (some_of piqi.P#ocaml_module);
eol;
]
let gen_imports l =
let l = List.map gen_import l in
iol l
NOTE : for some reason , complains about fully qualified polymorphic
* variants in recursive modules , so instead of relying on OCaml , we need to
* preorder variants ourselves without relying on OCaml to figure out the order
* automatically
* variants in recursive modules, so instead of relying on OCaml, we need to
* preorder variants ourselves without relying on OCaml to figure out the order
* automatically *)
let order_variant_defs variants =
topologically sort local variant defintions
let cycle_visit def =
Piqi_common.error def
("cyclic OCaml variant definition: " ^ typedef_name def)
in
let get_adjacent_vertixes = function
| `variant v ->
(* get the list of included variants *)
U.flatmap (fun o ->
let open O in
match o.ocaml_name, o.piqtype with
| None, Some ((`variant _) as def)
| None, Some ((`enum _) as def) ->
if is_local_def def (* omit any imported definitions *)
then [def]
else []
| _ -> []
) v.V#option
| _ -> []
in
Piqi_graph.tsort variants get_adjacent_vertixes ~cycle_visit
make sure we define aliases for built - in ocaml types first ; some aliases
* ( e.g. float ) can override the default OCaml type names which results in
* cyclic type definitions without such ordering
* (e.g. float) can override the default OCaml type names which results in
* cyclic type definitions without such ordering *)
let order_alias_defs alias_defs =
let rank def =
match def with
| `alias x ->
if C.is_builtin_def def
then
aliases of built - in OCaml types go first
if x.A#ocaml_type <> None then 1 else 2
else 100
| _ ->
assert false
in
let compare_alias_defs a b =
rank a - rank b
in
List.stable_sort compare_alias_defs alias_defs
let order_defs defs =
(* we apply this specific ordering only to variants, to be more specific --
* only to those variants that include other variants by not specifying tags
* for the options *)
let variants, rest =
List.partition (function
| `variant _ | `enum _ -> true
| _ -> false)
defs
in
let aliases, rest =
List.partition (function
| `alias _ -> true
| _ -> false)
rest
in
(* return the updated list of definitions with sorted variants and aliases *)
(order_alias_defs aliases) @ (order_variant_defs variants) @ rest
let gen_piqi (piqi:T.piqi) =
iol [
gen_imports piqi.P#resolved_import;
gen_defs (order_defs piqi.P#resolved_typedef);
]
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/libtracewrap/libtrace/piqi/piqi/piqic/piqic_ocaml_types.ml | ocaml |
* generation of Ocaml type declarations
TODO: move to piqic configuration
let gen_cc_cond a b =
if !cc_mode
then a
else b
TODO: this is a dirty method for sharing the setting across all
* piqic_ocaml_* modules
this function will be called only for named types (i.e. typedefs)
imported name
local name
XXX: check type compatibility
don't generate aliases for built-in types
flags are represented as booleans
optional + default
field definition
defining all fields as mutable at the moment
generate record type in record module; see also gen_record'
field definition list
record def constructor
module declaration
signature
full version:
ios "struct"; (* structure
don't generate cyclic type abbreviation
gen everything except records
XXX: generate modules for variants?
module declaration
signature
full version:
ios "struct"; (* structure
get the list of included variants
omit any imported definitions
we apply this specific ordering only to variants, to be more specific --
* only to those variants that include other variants by not specifying tags
* for the options
return the updated list of definitions with sorted variants and aliases | pp camlp4o -I ` ocamlfind query piqi.syntax ` pa_labelscope.cmo pa_openin.cmo
Copyright 2009 , 2010 , 2011 , 2012 , 2013
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
you may not use this file except in compliance with the License .
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing , software
distributed under the License is distributed on an " AS IS " BASIS ,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND , either express or implied .
See the License for the specific language governing permissions and
limitations under the License .
Copyright 2009, 2010, 2011, 2012, 2013 Anton Lavrik
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*)
module C = Piqi_common
open C
open Iolist
piqi compiler - compiler mode indictation
let cc_mode = ref false
let gen_cc s =
if !cc_mode
then ios s
else iol []
toplevel for the module which is currently being compiled
let top_modname = ref ""
let scoped_name name = !top_modname ^ "." ^ name
let typedef_mlname = function
| `record t -> some_of t.R#ocaml_name
| `variant t -> some_of t.V#ocaml_name
| `enum t -> some_of t.E#ocaml_name
| `alias t -> some_of t.A#ocaml_name
| `list t -> some_of t.L#ocaml_name
| _ ->
assert false
let capitalize = String.capitalize
let gen_deftype parent ocaml_name =
let ocaml_name = some_of ocaml_name in
match parent with
let ocaml_modname = some_of x.Import#ocaml_name in
(ocaml_modname ^ "." ^ ocaml_name)
scoped_name ocaml_name
let rec gen_piqtype t ocaml_type =
match ocaml_type with
| Some x -> x
| None ->
match t with
| `int -> "int"
| `float -> "float"
| `bool -> "bool"
| `string | `binary -> "string"
| `any ->
if !Piqic_common.is_self_spec
then scoped_name "any"
else "Piqi_piqi.any"
| `record r -> gen_deftype r.R#parent r.R#ocaml_name
| `variant v -> gen_deftype v.V#parent v.V#ocaml_name
| `enum e -> gen_deftype e.E#parent e.E#ocaml_name
| `list l -> gen_deftype l.L#parent l.L#ocaml_name
| `alias a -> gen_aliastype a
and gen_aliastype a =
let open Alias in
let ocaml_name = some_of a.ocaml_name in
let typename = gen_piqtype (some_of a.piqtype) a.ocaml_type in
if ocaml_name = typename
else gen_deftype a.parent a.ocaml_name
let ios_gen_piqtype ?ocaml_type (t :T.piqtype) =
ios (gen_piqtype t ocaml_type)
let gen_field_type f =
let open F in
match f.piqtype with
| Some t ->
let deftype = ios_gen_piqtype t in
match f.mode with
| `required -> deftype
| `optional when f.default <> None && (not f.ocaml_optional) ->
| `optional -> deftype ^^ ios " option"
| `repeated ->
deftype ^^
if f.ocaml_array
then ios " array"
else ios " list"
let mlname_of name piqtype =
match name, piqtype with
| Some n, _ -> n
| None, Some t -> typedef_mlname t
| _ -> assert false
XXX : move this functionality to mlname _ * . assignment should be done
* once rahter than calling it from every place where it is needed
* once rahter than calling it from every place where it is needed *)
let mlname_of_field f =
let open F in mlname_of f.ocaml_name f.piqtype
let mlname_of_option o =
let open O in mlname_of o.ocaml_name o.piqtype
let gen_field f =
let open F in
[
ios (mlname_of_field f);
ios ":";
gen_field_type f;
ios ";";
]
in fdef
let gen_record_mod r =
let modname = capitalize (some_of r.R#ocaml_name) in
let fields = r.Record#field in
if fields <> []
then iol (List.map gen_field fields)
else ios "_dummy: unit"
in
iol [ios "type t = "; ios "{"; fdefs; ios "}"]
in
let rdef = iod " "
[
ios ":";
rcons;
ios "end";
ios "=";
ios modname;
rcons;
ios "end";
*)
]
in rdef
let gen_pvar_name name =
ios "`" ^^ ios name
let is_local_def def =
match get_parent def with
| `piqi _ -> true
| `import _ -> false
let gen_option o =
let open Option in
match o.ocaml_name, o.piqtype with
| None, Some ((`variant v) as def) ->
NOTE : for some reason , complains about fully qualified
* polymorphic variants in recursive modules , so we need to use
* non - qualified names in this case
* polymorphic variants in recursive modules, so we need to use
* non-qualified names in this case *)
if is_local_def def
then ios (some_of v.V#ocaml_name)
else ios_gen_piqtype def
| None, Some ((`enum e) as def) ->
if is_local_def def
then ios (some_of e.E#ocaml_name)
else ios_gen_piqtype def
| _, Some t ->
let n = gen_pvar_name (mlname_of_option o) in
n ^^ ios " of " ^^ ios_gen_piqtype t
| Some mln, None -> gen_pvar_name mln
| None, None -> assert false
let gen_alias a =
let open Alias in
iol [
ios (some_of a.ocaml_name); ios " = ";
ios_gen_piqtype (some_of a.piqtype) ?ocaml_type:a.ocaml_type ]
let gen_list l =
let open L in
iol [
ios (some_of l.ocaml_name); ios " = ";
ios_gen_piqtype (some_of l.piqtype);
if l.ocaml_array
then ios " array"
else ios " list";
]
let gen_options options =
let var_defs =
iod "|" (List.map gen_option options)
in
iol [ios "["; var_defs; ios "]"]
let gen_variant v =
let open Variant in
iol [
ios (some_of v.ocaml_name);
ios " = ";
gen_options v.option;
]
let gen_enum e =
let open Enum in
iol [
ios (some_of e.ocaml_name);
ios " = ";
gen_options e.option;
]
let gen_record r =
let name = some_of r.Record#ocaml_name in
let modname = capitalize name in
iol [ ios name; ios " = "; ios (modname ^ ".t") ]
let gen_def = function
| `record t -> gen_record t
| `variant t -> gen_variant t
| `enum t -> gen_enum t
| `list t -> gen_list t
| _ -> assert false
let gen_alias a =
let open Alias in
let name = some_of a.ocaml_name in
let typename = gen_piqtype (some_of a.piqtype) a.ocaml_type in
if name = typename
else [ gen_alias a ]
| `alias t -> gen_alias t
| t -> [gen_def t]
let gen_mod_def = function
| `record r -> [gen_record_mod r]
| _ -> []
let gen_defs (defs:T.typedef list) =
let mod_defs = U.flatmap gen_mod_def defs in
let odefs = U.flatmap gen_def defs in
let odef =
let odef =
if odefs = []
then iol []
else iol [
ios "type ";
iod " type " odefs;
]
in
iod " "
[
ios ":";
odef;
ios "end";
ios "=";
ios !top_modname;
odef;
ios "end";
*)
]
in
let defs = [odef] @ mod_defs in
let code = iol
[
ios "module rec ";
iod " and " defs;
]
in
iod " " [
code;
ios "include"; ios !top_modname;
eol;
]
let gen_import x =
let open Import in
let piqi = some_of x.piqi in
iod " " [
ios "module"; ios (some_of x.ocaml_name); ios "=";
ios (some_of piqi.P#ocaml_module);
eol;
]
let gen_imports l =
let l = List.map gen_import l in
iol l
NOTE : for some reason , complains about fully qualified polymorphic
* variants in recursive modules , so instead of relying on OCaml , we need to
* preorder variants ourselves without relying on OCaml to figure out the order
* automatically
* variants in recursive modules, so instead of relying on OCaml, we need to
* preorder variants ourselves without relying on OCaml to figure out the order
* automatically *)
let order_variant_defs variants =
topologically sort local variant defintions
let cycle_visit def =
Piqi_common.error def
("cyclic OCaml variant definition: " ^ typedef_name def)
in
let get_adjacent_vertixes = function
| `variant v ->
U.flatmap (fun o ->
let open O in
match o.ocaml_name, o.piqtype with
| None, Some ((`variant _) as def)
| None, Some ((`enum _) as def) ->
then [def]
else []
| _ -> []
) v.V#option
| _ -> []
in
Piqi_graph.tsort variants get_adjacent_vertixes ~cycle_visit
make sure we define aliases for built - in ocaml types first ; some aliases
* ( e.g. float ) can override the default OCaml type names which results in
* cyclic type definitions without such ordering
* (e.g. float) can override the default OCaml type names which results in
* cyclic type definitions without such ordering *)
let order_alias_defs alias_defs =
let rank def =
match def with
| `alias x ->
if C.is_builtin_def def
then
aliases of built - in OCaml types go first
if x.A#ocaml_type <> None then 1 else 2
else 100
| _ ->
assert false
in
let compare_alias_defs a b =
rank a - rank b
in
List.stable_sort compare_alias_defs alias_defs
let order_defs defs =
let variants, rest =
List.partition (function
| `variant _ | `enum _ -> true
| _ -> false)
defs
in
let aliases, rest =
List.partition (function
| `alias _ -> true
| _ -> false)
rest
in
(order_alias_defs aliases) @ (order_variant_defs variants) @ rest
let gen_piqi (piqi:T.piqi) =
iol [
gen_imports piqi.P#resolved_import;
gen_defs (order_defs piqi.P#resolved_typedef);
]
|
8983c4576e9cd34e60ec1d4f15c1398ac16dad474d65857e3819951dbbf687a9 | bittide/bittide-hardware | Node.hs | SPDX - FileCopyrightText : 2022 Google LLC
--
SPDX - License - Identifier : Apache-2.0
{-# OPTIONS_GHC -fconstraint-solver-iterations=6 #-}
{-# LANGUAGE GADTs #-}
module Bittide.Node where
import Clash.Prelude
import Protocols.Wishbone
import Bittide.Calendar
import Bittide.DoubleBufferedRam
import Bittide.Link
import Bittide.ProcessingElement
import Bittide.ScatterGather
import Bittide.SharedTypes
import Bittide.Switch
| A simple node consisting of one external bidirectional link and two ' gppe 's .
This node 's ' switch ' has a ' CalendarConfig ' of for a ' calendar ' with up to @1024@ entries ,
-- however, the 'calendar' is initialized with a single entry of repeated zeroes.
The ' scatterUnitWb 's and ' gatherUnitWb 's are initialized with ' CalendarConfig 's of all
zeroes . The ' gppe 's initial memories are both undefined and the ' ' is a
vector of ever increasing base addresses ( increments of 0x1000 ) .
simpleNodeConfig :: NodeConfig 1 2
simpleNodeConfig =
NodeConfig
(ManagementConfig linkConfig nmuConfig)
switchConfig
(repeat (GppeConfig linkConfig peConfig))
where
switchConfig = SwitchConfig{ preamble = preamble', calendarConfig = switchCal}
switchCal = CalendarConfig (SNat @1024) (switchEntry :> Nil) (switchEntry :> Nil)
linkConfig = LinkConfig preamble' (ScatterConfig sgConfig) (GatherConfig sgConfig)
sgConfig = CalendarConfig (SNat @1024) (sgEntry :> Nil) (sgEntry :> Nil)
peConfig = PeConfig memMapPe (Undefined @8192) (Undefined @8192) 0
nmuConfig = PeConfig memMapNmu (Undefined @8192) (Undefined @8192) 0
memMapPe = iterateI (+0x1000) 0
memMapNmu = iterateI (+0x1000) 0
preamble' = 0xDEADBEEFA5A5A5A5FACADE :: BitVector 96
switchEntry = ValidEntry{veEntry = repeat 0, veRepeat = 0 :: Unsigned 0}
sgEntry = ValidEntry{veEntry = 0 :: Index 1024 , veRepeat = 0 :: Unsigned 0}
| Each ' gppe ' results in 4 busses for the ' managementUnit ' , namely :
-- * The 'calendar' for the 'scatterUnitWB'.
-- * The 'calendar' for the 'gatherUnitWB'.
-- * The interface of the 'rxUnit' on the 'gppe' side.
-- * The interface of the 'txUnit' on the 'gppe' side.
type BussesPerGppe = 4
| Each ' switch ' link results in 2 busses for the ' managementUnit ' , namely :
-- * The interface of the 'rxUnit' on the 'switch' side.
-- * The interface of the 'txUnit' on the 'switch' side.
type BussesPerSwitchLink = 2
-- | Configuration of a 'node'.
data NodeConfig externalLinks gppes where
NodeConfig ::
( KnownNat switchBusses
, switchBusses ~ (1 + BussesPerSwitchLink * (externalLinks + (gppes + 1)))
, KnownNat nmuBusses
, nmuBusses ~ ((BussesPerGppe * gppes) + switchBusses + 8)
, KnownNat nmuRemBusWidth
, nmuRemBusWidth ~ (32 - CLog 2 nmuBusses)) =>
-- | Configuration for the 'node's 'managementUnit'.
ManagementConfig ((BussesPerGppe * gppes) + switchBusses) ->
-- | Configuratoin for the 'node's 'switch'.
SwitchConfig (externalLinks + gppes + 1) 4 nmuRemBusWidth ->
-- | Configuration for all the node's 'gppe's.
Vec gppes (GppeConfig nmuRemBusWidth) ->
NodeConfig externalLinks gppes
| A ' node ' consists of a ' switch ' , ' managementUnit ' and @0 .. n@ ' gppe 's .
node ::
forall dom extLinks gppes .
( HiddenClockResetEnable dom, KnownNat extLinks, KnownNat gppes) =>
NodeConfig extLinks gppes ->
Vec extLinks (Signal dom (DataLink 64)) ->
Vec extLinks (Signal dom (DataLink 64))
node (NodeConfig nmuConfig switchConfig gppeConfigs) linksIn = linksOut
where
(switchOut, swS2Ms) =
mkSwitch switchConfig swCalM2S swRxM2Ss swTxM2Ss switchIn
switchIn = nmuToSwitch :> pesToSwitch ++ linksIn
(splitAtI -> (switchToNmu :> switchToPes, linksOut)) = switchOut
(nmuToSwitch, nmuM2Ss) = managementUnit nmuConfig switchToNmu nmuS2Ms
(swM2Ss, peM2Ss) = splitAtI nmuM2Ss
(swCalM2S :> swRxM2Ss, swTxM2Ss) = splitAtI swM2Ss
(swCalS2M :> swRxS2Ms, swTxS2Ms) = splitAtI
@(1 + (extLinks + (gppes + 1))) @(extLinks + (gppes + 1)) swS2Ms
nmuS2Ms = swCalS2M :> (swRxS2Ms ++ swTxS2Ms ++ peS2Ms)
(pesToSwitch, concat -> peS2Ms) =
unzip $ gppe <$> zip3 gppeConfigs switchToPes (unconcatI peM2Ss)
-- | Configuration for the 'managementUnit' and its 'Bittide.Link'.
The management unit contains the 4 wishbone busses that each pe has
and also the management busses for itself and all other pe 's in this node .
-- Furthermore it also has access to the 'calendar' for the 'switch'.
data ManagementConfig nodeBusses where
ManagementConfig ::
(KnownNat nodeBusses) =>
-- | Configuration for the incoming and outgoing 'Bittide.Link'.
LinkConfig 4 (32 - CLog 2 (nodeBusses + 8)) ->
| Configuration for the ' managementUnit 's ' processingElement ' . Controls 8 local busses
-- and all incoming busses from 'calendar's, 'rxUnit's and 'txUnit's.
PeConfig (nodeBusses + 8) ->
ManagementConfig nodeBusses
-- | Configuration for a general purpose processing element together with its link to the
-- switch.
data GppeConfig nmuRemBusWidth where
GppeConfig ::
LinkConfig 4 nmuRemBusWidth ->
-- | Configuration for a 'gppe's 'processingElement', which statically
has four external busses connected to the instruction memory , data memory
-- , 'scatterUnitWb' and 'gatherUnitWb'.
PeConfig 4 ->
GppeConfig nmuRemBusWidth
# NOINLINE gppe #
-- | A general purpose 'processingElement' to be part of a Bittide Node. It contains
-- a 'processingElement', 'linkToPe' and 'peToLink' which create the interface for the
Bittide Link . It takes a ' GppeConfig ' , incoming link and four incoming ' WishboneM2S '
signals and produces the outgoing link alongside four ' WishhboneS2 M ' signals .
The order of Wishbone busses is as follows :
-- ('rxUnit' :> 'scatterUnitWb' :> 'txUnit' :> 'gatherUnitWb' :> Nil).
gppe ::
(KnownNat nmuRemBusWidth, 2 <= nmuRemBusWidth, HiddenClockResetEnable dom) =>
-- |
( all local parameters
, . Link '
-- , Incoming @Vector@ of master busses
-- )
( GppeConfig nmuRemBusWidth
, Signal dom (DataLink 64)
, Vec 4 (Signal dom (WishboneM2S nmuRemBusWidth 4 (Bytes 4)))) ->
-- |
-- ( Outgoing 'Bittide.Link'
-- , Outgoing @Vector@ of slave busses
-- )
( Signal dom (DataLink 64)
, Vec 4 (Signal dom (WishboneS2M (Bytes 4))))
gppe (GppeConfig linkConfig peConfig, linkIn, splitAtI -> (nmuM2S0, nmuM2S1)) =
(linkOut, nmuS2M0 ++ nmuS2M1)
where
(suS2M, nmuS2M0) = linkToPe linkConfig linkIn sc suM2S nmuM2S0
(linkOut, guS2M, nmuS2M1) = peToLink linkConfig sc guM2S nmuM2S1
(suM2S :> guM2S :> Nil) = processingElement peConfig (suS2M :> guS2M :> Nil)
sc = sequenceCounter
# NOINLINE managementUnit #
-- | A special purpose 'processingElement' that manages a Bittide Node. It contains
-- a 'processingElement', 'linkToPe' and 'peToLink' which create the interface for the
Bittide Link . It takes a ' ManagementConfig ' , incoming link and a vector of incoming
-- 'WishboneS2M' signals and produces the outgoing link alongside a vector of
-- 'WishhboneM2S' signals.
managementUnit ::
forall dom nodeBusses .
(HiddenClockResetEnable dom, KnownNat nodeBusses, CLog 2 (nodeBusses + 8) <= 30) =>
-- | Configures all local parameters.
ManagementConfig nodeBusses ->
-- | Incoming 'Bittide.Link'.
Signal dom (DataLink 64) ->
-- | Incoming @Vector@ of slave busses.
Vec nodeBusses (Signal dom (WishboneS2M (Bytes 4))) ->
-- |
-- ( Outgoing 'Bittide.Link'
-- , Outgoing @Vector@ of master busses)
( Signal dom (DataLink 64)
, Vec nodeBusses (Signal dom (WishboneM2S (32 - CLog 2 (nodeBusses + 8)) 4 (Bytes 4))))
managementUnit (ManagementConfig linkConfig peConfig) linkIn nodeS2Ms =
(linkOut, nodeM2Ss)
where
(suS2M, nmuS2M0) = linkToPe linkConfig linkIn sc suM2S nmuM2S0
(linkOut, guS2M, nmuS2M1) = peToLink linkConfig sc guM2S nmuM2S1
(suM2S :> guM2S :> rest) = nmuM2Ss
(splitAtI -> (nmuM2S0, nmuM2S1), nodeM2Ss) = splitAtI rest
nmuM2Ss = processingElement peConfig nmuS2Ms
nmuS2Ms = suS2M :> guS2M :> nmuS2M0 ++ nmuS2M1 ++ nodeS2Ms
sc = sequenceCounter
| null | https://raw.githubusercontent.com/bittide/bittide-hardware/261e3f9e9c0f639fbae8e84d1b5d2f81f23fcb59/bittide/src/Bittide/Node.hs | haskell |
# OPTIONS_GHC -fconstraint-solver-iterations=6 #
# LANGUAGE GADTs #
however, the 'calendar' is initialized with a single entry of repeated zeroes.
* The 'calendar' for the 'scatterUnitWB'.
* The 'calendar' for the 'gatherUnitWB'.
* The interface of the 'rxUnit' on the 'gppe' side.
* The interface of the 'txUnit' on the 'gppe' side.
* The interface of the 'rxUnit' on the 'switch' side.
* The interface of the 'txUnit' on the 'switch' side.
| Configuration of a 'node'.
| Configuration for the 'node's 'managementUnit'.
| Configuratoin for the 'node's 'switch'.
| Configuration for all the node's 'gppe's.
| Configuration for the 'managementUnit' and its 'Bittide.Link'.
Furthermore it also has access to the 'calendar' for the 'switch'.
| Configuration for the incoming and outgoing 'Bittide.Link'.
and all incoming busses from 'calendar's, 'rxUnit's and 'txUnit's.
| Configuration for a general purpose processing element together with its link to the
switch.
| Configuration for a 'gppe's 'processingElement', which statically
, 'scatterUnitWb' and 'gatherUnitWb'.
| A general purpose 'processingElement' to be part of a Bittide Node. It contains
a 'processingElement', 'linkToPe' and 'peToLink' which create the interface for the
('rxUnit' :> 'scatterUnitWb' :> 'txUnit' :> 'gatherUnitWb' :> Nil).
|
, Incoming @Vector@ of master busses
)
|
( Outgoing 'Bittide.Link'
, Outgoing @Vector@ of slave busses
)
| A special purpose 'processingElement' that manages a Bittide Node. It contains
a 'processingElement', 'linkToPe' and 'peToLink' which create the interface for the
'WishboneS2M' signals and produces the outgoing link alongside a vector of
'WishhboneM2S' signals.
| Configures all local parameters.
| Incoming 'Bittide.Link'.
| Incoming @Vector@ of slave busses.
|
( Outgoing 'Bittide.Link'
, Outgoing @Vector@ of master busses) | SPDX - FileCopyrightText : 2022 Google LLC
SPDX - License - Identifier : Apache-2.0
module Bittide.Node where
import Clash.Prelude
import Protocols.Wishbone
import Bittide.Calendar
import Bittide.DoubleBufferedRam
import Bittide.Link
import Bittide.ProcessingElement
import Bittide.ScatterGather
import Bittide.SharedTypes
import Bittide.Switch
| A simple node consisting of one external bidirectional link and two ' gppe 's .
This node 's ' switch ' has a ' CalendarConfig ' of for a ' calendar ' with up to @1024@ entries ,
The ' scatterUnitWb 's and ' gatherUnitWb 's are initialized with ' CalendarConfig 's of all
zeroes . The ' gppe 's initial memories are both undefined and the ' ' is a
vector of ever increasing base addresses ( increments of 0x1000 ) .
simpleNodeConfig :: NodeConfig 1 2
simpleNodeConfig =
NodeConfig
(ManagementConfig linkConfig nmuConfig)
switchConfig
(repeat (GppeConfig linkConfig peConfig))
where
switchConfig = SwitchConfig{ preamble = preamble', calendarConfig = switchCal}
switchCal = CalendarConfig (SNat @1024) (switchEntry :> Nil) (switchEntry :> Nil)
linkConfig = LinkConfig preamble' (ScatterConfig sgConfig) (GatherConfig sgConfig)
sgConfig = CalendarConfig (SNat @1024) (sgEntry :> Nil) (sgEntry :> Nil)
peConfig = PeConfig memMapPe (Undefined @8192) (Undefined @8192) 0
nmuConfig = PeConfig memMapNmu (Undefined @8192) (Undefined @8192) 0
memMapPe = iterateI (+0x1000) 0
memMapNmu = iterateI (+0x1000) 0
preamble' = 0xDEADBEEFA5A5A5A5FACADE :: BitVector 96
switchEntry = ValidEntry{veEntry = repeat 0, veRepeat = 0 :: Unsigned 0}
sgEntry = ValidEntry{veEntry = 0 :: Index 1024 , veRepeat = 0 :: Unsigned 0}
| Each ' gppe ' results in 4 busses for the ' managementUnit ' , namely :
type BussesPerGppe = 4
| Each ' switch ' link results in 2 busses for the ' managementUnit ' , namely :
type BussesPerSwitchLink = 2
data NodeConfig externalLinks gppes where
NodeConfig ::
( KnownNat switchBusses
, switchBusses ~ (1 + BussesPerSwitchLink * (externalLinks + (gppes + 1)))
, KnownNat nmuBusses
, nmuBusses ~ ((BussesPerGppe * gppes) + switchBusses + 8)
, KnownNat nmuRemBusWidth
, nmuRemBusWidth ~ (32 - CLog 2 nmuBusses)) =>
ManagementConfig ((BussesPerGppe * gppes) + switchBusses) ->
SwitchConfig (externalLinks + gppes + 1) 4 nmuRemBusWidth ->
Vec gppes (GppeConfig nmuRemBusWidth) ->
NodeConfig externalLinks gppes
| A ' node ' consists of a ' switch ' , ' managementUnit ' and @0 .. n@ ' gppe 's .
node ::
forall dom extLinks gppes .
( HiddenClockResetEnable dom, KnownNat extLinks, KnownNat gppes) =>
NodeConfig extLinks gppes ->
Vec extLinks (Signal dom (DataLink 64)) ->
Vec extLinks (Signal dom (DataLink 64))
node (NodeConfig nmuConfig switchConfig gppeConfigs) linksIn = linksOut
where
(switchOut, swS2Ms) =
mkSwitch switchConfig swCalM2S swRxM2Ss swTxM2Ss switchIn
switchIn = nmuToSwitch :> pesToSwitch ++ linksIn
(splitAtI -> (switchToNmu :> switchToPes, linksOut)) = switchOut
(nmuToSwitch, nmuM2Ss) = managementUnit nmuConfig switchToNmu nmuS2Ms
(swM2Ss, peM2Ss) = splitAtI nmuM2Ss
(swCalM2S :> swRxM2Ss, swTxM2Ss) = splitAtI swM2Ss
(swCalS2M :> swRxS2Ms, swTxS2Ms) = splitAtI
@(1 + (extLinks + (gppes + 1))) @(extLinks + (gppes + 1)) swS2Ms
nmuS2Ms = swCalS2M :> (swRxS2Ms ++ swTxS2Ms ++ peS2Ms)
(pesToSwitch, concat -> peS2Ms) =
unzip $ gppe <$> zip3 gppeConfigs switchToPes (unconcatI peM2Ss)
The management unit contains the 4 wishbone busses that each pe has
and also the management busses for itself and all other pe 's in this node .
data ManagementConfig nodeBusses where
ManagementConfig ::
(KnownNat nodeBusses) =>
LinkConfig 4 (32 - CLog 2 (nodeBusses + 8)) ->
| Configuration for the ' managementUnit 's ' processingElement ' . Controls 8 local busses
PeConfig (nodeBusses + 8) ->
ManagementConfig nodeBusses
data GppeConfig nmuRemBusWidth where
GppeConfig ::
LinkConfig 4 nmuRemBusWidth ->
has four external busses connected to the instruction memory , data memory
PeConfig 4 ->
GppeConfig nmuRemBusWidth
# NOINLINE gppe #
Bittide Link . It takes a ' GppeConfig ' , incoming link and four incoming ' WishboneM2S '
signals and produces the outgoing link alongside four ' WishhboneS2 M ' signals .
The order of Wishbone busses is as follows :
gppe ::
(KnownNat nmuRemBusWidth, 2 <= nmuRemBusWidth, HiddenClockResetEnable dom) =>
( all local parameters
, . Link '
( GppeConfig nmuRemBusWidth
, Signal dom (DataLink 64)
, Vec 4 (Signal dom (WishboneM2S nmuRemBusWidth 4 (Bytes 4)))) ->
( Signal dom (DataLink 64)
, Vec 4 (Signal dom (WishboneS2M (Bytes 4))))
gppe (GppeConfig linkConfig peConfig, linkIn, splitAtI -> (nmuM2S0, nmuM2S1)) =
(linkOut, nmuS2M0 ++ nmuS2M1)
where
(suS2M, nmuS2M0) = linkToPe linkConfig linkIn sc suM2S nmuM2S0
(linkOut, guS2M, nmuS2M1) = peToLink linkConfig sc guM2S nmuM2S1
(suM2S :> guM2S :> Nil) = processingElement peConfig (suS2M :> guS2M :> Nil)
sc = sequenceCounter
# NOINLINE managementUnit #
Bittide Link . It takes a ' ManagementConfig ' , incoming link and a vector of incoming
managementUnit ::
forall dom nodeBusses .
(HiddenClockResetEnable dom, KnownNat nodeBusses, CLog 2 (nodeBusses + 8) <= 30) =>
ManagementConfig nodeBusses ->
Signal dom (DataLink 64) ->
Vec nodeBusses (Signal dom (WishboneS2M (Bytes 4))) ->
( Signal dom (DataLink 64)
, Vec nodeBusses (Signal dom (WishboneM2S (32 - CLog 2 (nodeBusses + 8)) 4 (Bytes 4))))
managementUnit (ManagementConfig linkConfig peConfig) linkIn nodeS2Ms =
(linkOut, nodeM2Ss)
where
(suS2M, nmuS2M0) = linkToPe linkConfig linkIn sc suM2S nmuM2S0
(linkOut, guS2M, nmuS2M1) = peToLink linkConfig sc guM2S nmuM2S1
(suM2S :> guM2S :> rest) = nmuM2Ss
(splitAtI -> (nmuM2S0, nmuM2S1), nodeM2Ss) = splitAtI rest
nmuM2Ss = processingElement peConfig nmuS2Ms
nmuS2Ms = suS2M :> guS2M :> nmuS2M0 ++ nmuS2M1 ++ nodeS2Ms
sc = sequenceCounter
|
86683170b59a3d58f5f9ad20df94270b95745b7c261e2abf0494086d9c80e93a | tari3x/csec-modex | pitparser.ml | type token =
| CHOICE
| STAR
| COMMA
| LPAREN
| RPAREN
| LBRACKET
| RBRACKET
| BAR
| SEMI
| NEW
| OUT
| IN
| IDENT of (Pitptree.ident)
| STRING of (Pitptree.ident)
| INT of (int)
| REPL
| IF
| THEN
| ELSE
| EQUAL
| FUN
| EQUATION
| REDUCTION
| PREDICATE
| PROCESS
| SLASH
| DOT
| EOF
| LET
| QUERY
| BEFORE
| PUTBEGIN
| NONINTERF
| EVENT
| NOT
| ELIMTRUE
| FREE
| SUCHTHAT
| CLAUSES
| RED
| EQUIV
| EQUIVEQ
| WEDGE
| DIFF
| COLON
| NOUNIF
| PHASE
| AMONG
| WEAKSECRET
| PARAM
| TYPE
| SET
| FORALL
| CONST
| INJEVENT
| OR
| CHANNEL
| LETFUN
| DEFINE
| EXPAND
| YIELD
| LEQ
| PROBA
| LBRACE
| RBRACE
| PROOF
| TABLE
| INSERT
| GET
open Parsing;;
let _ = parse_error;;
# 2 "pitparser.mly"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* and *
* *
* Copyright ( C ) INRIA , LIENS , 2000 - 2009 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet and Xavier Allamigeon *
* *
* Copyright (C) INRIA, LIENS, MPII 2000-2009 *
* *
*************************************************************)
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details ( in file LICENSE ) .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details (in file LICENSE).
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
# 31 "pitparser.mly"
open Parsing_helper
open Ptree
open Pitptree
exception Syntax
# 110 "pitparser.ml"
let yytransl_const = [|
257 (* CHOICE *);
258 (* STAR *);
259 (* COMMA *);
260 (* LPAREN *);
261 (* RPAREN *);
262 (* LBRACKET *);
263 (* RBRACKET *);
BAR
265 (* SEMI *);
266 (* NEW *);
267 (* OUT *);
268 (* IN *);
272 (* REPL *);
273 (* IF *);
274 (* THEN *);
275 (* ELSE *);
EQUAL
277 (* FUN *);
278 (* EQUATION *);
279 (* REDUCTION *);
280 (* PREDICATE *);
281 (* PROCESS *);
282 (* SLASH *);
DOT
EOF
284 (* LET *);
285 (* QUERY *);
BEFORE
NONINTERF
289 (* EVENT *);
290 (* NOT *);
ELIMTRUE
292 (* FREE *);
SUCHTHAT
CLAUSES
295 (* RED *);
EQUIV
EQUIVEQ
298 (* WEDGE *);
DIFF
300 (* COLON *);
NOUNIF
302 (* PHASE *);
AMONG
304 (* WEAKSECRET *);
PARAM
306 (* TYPE *);
307 (* SET *);
308 (* FORALL *);
CONST
310 (* INJEVENT *);
311 (* OR *);
312 (* CHANNEL *);
LETFUN
314 (* DEFINE *);
315 (* EXPAND *);
316 (* YIELD *);
LEQ
PROBA
319 (* LBRACE *);
RBRACE
321 (* PROOF *);
322 (* TABLE *);
323 (* INSERT *);
324 (* GET *);
0|]
let yytransl_block = [|
269 (* IDENT *);
270 (* STRING *);
271 (* INT *);
0|]
let yylhs = "\255\255\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\001\000\020\000\020\000\
\020\000\020\000\021\000\021\000\018\000\018\000\003\000\003\000\
\006\000\006\000\013\000\013\000\011\000\011\000\007\000\007\000\
\005\000\005\000\004\000\004\000\022\000\022\000\008\000\008\000\
\008\000\008\000\008\000\008\000\008\000\008\000\008\000\024\000\
\024\000\023\000\023\000\025\000\025\000\016\000\016\000\015\000\
\015\000\026\000\026\000\026\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\017\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\029\000\029\000\027\000\027\000\030\000\
\030\000\030\000\030\000\028\000\028\000\014\000\014\000\014\000\
\033\000\033\000\034\000\034\000\031\000\031\000\031\000\031\000\
\031\000\031\000\031\000\031\000\036\000\036\000\032\000\032\000\
\037\000\037\000\037\000\037\000\035\000\035\000\009\000\009\000\
\038\000\038\000\038\000\038\000\019\000\019\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\041\000\041\000\
\043\000\043\000\040\000\040\000\040\000\040\000\040\000\044\000\
\044\000\042\000\042\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\045\000\045\000\039\000\039\000\000\000\000\000"
let yylen = "\002\000\
\005\000\010\000\007\000\007\000\005\000\004\000\007\000\008\000\
\005\000\007\000\006\000\009\000\006\000\009\000\006\000\006\000\
\006\000\004\000\006\000\004\000\006\000\004\000\004\000\006\000\
\004\000\004\000\004\000\005\000\006\000\004\000\004\000\007\000\
\003\000\006\000\009\000\007\000\000\000\004\000\001\000\001\000\
\001\000\001\000\001\000\002\000\001\000\003\000\003\000\000\000\
\003\000\001\000\005\000\003\000\001\000\000\000\003\000\000\000\
\001\000\001\000\001\000\000\000\003\000\001\000\004\000\006\000\
\001\000\003\000\003\000\004\000\003\000\003\000\003\000\003\000\
\001\000\001\000\000\000\005\000\001\000\003\000\001\000\003\000\
\001\000\001\000\004\000\004\000\004\000\001\000\006\000\003\000\
\003\000\004\000\003\000\003\000\004\000\004\000\003\000\003\000\
\005\000\002\000\006\000\003\000\001\000\001\000\000\000\006\000\
\004\000\005\000\003\000\001\000\000\000\006\000\006\000\002\000\
\002\000\000\000\002\000\000\000\004\000\004\000\001\000\003\000\
\005\000\002\000\002\000\006\000\003\000\001\000\001\000\000\000\
\006\000\004\000\005\000\003\000\001\000\000\000\006\000\004\000\
\003\000\001\000\003\000\003\000\004\000\003\000\003\000\001\000\
\004\000\002\000\005\000\001\000\001\000\006\000\006\000\004\000\
\007\000\007\000\006\000\004\000\008\000\006\000\004\000\008\000\
\006\000\006\000\008\000\003\000\006\000\003\000\002\000\000\000\
\002\000\000\000\001\000\003\000\003\000\004\000\002\000\003\000\
\001\000\001\000\000\000\004\000\006\000\001\000\003\000\003\000\
\004\000\003\000\003\000\006\000\006\000\006\000\008\000\008\000\
\003\000\003\000\001\000\001\000\000\000\002\000\002\000"
let yydefred = "\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\198\000\000\000\199\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\102\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\074\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\033\000\000\000\000\000\
\112\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\041\000\039\000\040\000\042\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\148\000\000\000\000\000\
\000\000\000\000\000\000\149\000\000\000\000\000\000\000\057\000\
\058\000\000\000\000\000\059\000\055\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\053\000\000\000\000\000\096\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\020\000\000\000\000\000\000\000\089\000\000\000\
\080\000\000\000\000\000\000\000\022\000\078\000\000\000\006\000\
\000\000\025\000\000\000\000\000\071\000\000\000\000\000\000\000\
\030\000\000\000\067\000\000\000\000\000\049\000\000\000\000\000\
\000\000\000\000\000\000\142\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\127\000\115\000\000\000\000\000\
\000\000\018\000\023\000\026\000\000\000\000\000\000\000\000\000\
\031\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\027\000\000\000\044\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\038\000\000\000\000\000\000\000\
\000\000\047\000\005\000\000\000\009\000\000\000\000\000\100\000\
\000\000\000\000\000\000\108\000\000\000\000\000\000\000\083\000\
\084\000\093\000\090\000\094\000\000\000\000\000\000\000\000\000\
\000\000\000\000\072\000\063\000\068\000\000\000\000\000\000\000\
\000\000\000\000\141\000\123\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\196\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\028\000\046\000\000\000\143\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\178\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\166\000\000\000\000\000\000\000\000\000\061\000\000\000\
\000\000\000\000\000\000\011\000\000\000\000\000\097\000\000\000\
\051\000\000\000\019\000\076\000\021\000\000\000\024\000\000\000\
\029\000\000\000\120\000\000\000\000\000\000\000\000\000\125\000\
\000\000\000\000\000\000\017\000\015\000\016\000\000\000\000\000\
\000\000\000\000\193\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\013\000\000\000\184\000\000\000\000\000\034\000\
\000\000\000\000\000\000\000\000\000\000\145\000\000\000\000\000\
\000\000\000\000\173\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\004\000\135\000\000\000\000\000\
\000\000\000\000\087\000\000\000\007\000\064\000\032\000\000\000\
\000\000\000\000\133\000\117\000\000\000\118\000\113\000\111\000\
\110\000\003\000\000\000\000\000\194\000\000\000\180\000\000\000\
\000\000\000\000\185\000\000\000\036\000\010\000\000\000\000\000\
\000\000\000\000\000\000\172\000\176\000\174\000\000\000\000\000\
\000\000\000\000\000\000\000\000\008\000\000\000\000\000\000\000\
\000\000\000\000\121\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\165\000\161\000\000\000\000\000\162\000\000\000\012\000\
\106\000\000\000\000\000\000\000\124\000\014\000\181\000\000\000\
\000\000\000\000\000\000\035\000\154\000\153\000\000\000\000\000\
\000\000\000\000\002\000\104\000\000\000\000\000\000\000\000\000\
\000\000\000\000\163\000\131\000\000\000\000\000\000\000\129\000"
let yydgoto = "\003\000\
\029\000\030\000\092\000\186\000\187\000\065\000\035\000\127\000\
\036\000\183\000\196\000\100\001\197\000\071\000\048\000\053\000\
\098\000\165\000\067\000\166\000\167\000\188\000\128\000\129\000\
\054\000\050\000\099\000\059\001\100\000\060\001\251\000\252\000\
\170\001\145\000\226\001\253\000\227\001\141\000\101\001\128\001\
\138\001\129\001\030\002\130\001\102\001"
let yysindex = "\243\000\
\046\002\046\002\000\000\016\255\046\255\046\255\138\255\144\255\
\151\001\179\255\195\255\229\001\230\000\252\255\046\255\173\255\
\032\000\252\255\057\000\079\000\252\255\252\255\091\000\105\000\
\126\000\138\000\040\255\161\000\000\000\156\255\000\000\186\000\
\170\000\170\001\170\001\191\000\079\001\032\255\251\001\197\000\
\036\255\225\000\251\254\245\000\015\001\020\001\195\000\001\001\
\082\001\029\001\192\255\033\001\024\001\058\001\151\255\055\001\
\034\002\072\001\170\001\041\255\065\001\154\002\070\001\092\001\
\066\001\170\001\046\002\033\255\101\001\108\001\093\001\094\001\
\114\001\191\000\115\001\084\001\118\001\048\000\012\255\143\001\
\122\001\244\255\149\001\194\255\020\255\119\001\159\001\165\001\
\165\002\182\002\252\255\146\001\020\255\150\001\170\000\194\255\
\166\001\028\255\171\001\000\000\172\001\251\001\020\255\161\001\
\142\001\152\001\251\001\251\001\251\001\219\001\046\002\251\001\
\251\001\251\001\251\001\251\001\219\001\187\001\185\001\046\002\
\185\001\020\255\046\002\251\001\046\002\170\001\141\255\207\001\
\000\000\170\001\170\001\170\001\046\002\170\001\170\001\170\001\
\170\001\252\255\020\255\125\002\023\255\000\000\255\002\201\001\
\000\000\197\001\202\255\046\002\046\002\046\002\192\001\100\001\
\020\255\046\002\170\000\245\002\020\255\046\002\020\255\046\002\
\000\000\000\000\000\000\000\000\156\001\244\255\213\001\020\255\
\194\255\212\001\222\001\224\001\226\001\000\000\076\001\245\002\
\075\000\221\001\220\001\000\000\225\001\227\001\014\000\000\000\
\000\000\241\001\248\001\000\000\000\000\170\001\170\001\253\001\
\046\002\005\002\046\002\007\002\000\000\145\255\251\001\000\000\
\165\000\009\002\255\001\251\001\252\255\252\255\013\002\221\255\
\020\002\238\001\000\000\228\001\193\002\000\255\000\000\193\002\
\000\000\170\001\235\001\011\002\000\000\000\000\026\002\000\000\
\149\002\000\000\208\000\170\001\000\000\029\002\134\255\254\001\
\000\000\031\000\000\000\199\002\157\002\000\000\191\000\170\001\
\170\001\170\001\046\255\000\000\038\002\255\002\042\002\039\002\
\043\002\054\002\059\002\037\002\000\000\000\000\255\002\035\255\
\036\002\000\000\000\000\000\000\046\002\044\002\056\002\191\000\
\000\000\061\002\081\002\245\002\073\002\084\002\245\002\075\000\
\106\002\163\002\088\002\050\002\113\002\000\000\046\002\000\000\
\244\255\121\002\251\000\083\002\245\002\245\002\245\002\013\255\
\131\002\116\002\128\000\058\255\245\002\091\002\123\002\137\002\
\143\002\142\002\150\002\194\255\000\000\109\002\020\255\143\000\
\099\255\000\000\000\000\191\000\000\000\135\002\046\002\000\000\
\136\002\146\002\166\002\000\000\111\002\170\000\014\255\000\000\
\000\000\000\000\000\000\000\000\046\002\167\002\046\002\151\002\
\046\002\170\001\000\000\000\000\000\000\046\002\155\002\199\002\
\199\002\199\002\000\000\000\000\181\002\164\002\255\002\169\002\
\255\002\255\002\141\002\186\002\046\002\000\000\046\002\046\002\
\176\002\173\002\245\002\003\001\209\002\000\000\171\002\245\002\
\120\002\185\002\206\002\245\002\245\002\046\002\245\002\245\002\
\245\002\170\002\046\002\200\002\000\000\000\000\203\002\000\000\
\020\255\007\001\010\001\223\002\218\002\194\255\090\255\229\002\
\233\002\000\000\128\000\020\255\215\000\245\002\245\002\245\002\
\194\255\000\000\245\002\128\000\131\002\020\255\000\000\046\002\
\046\255\207\002\194\255\000\000\251\001\219\002\000\000\225\002\
\000\000\251\001\000\000\000\000\000\000\046\002\000\000\234\001\
\000\000\046\002\000\000\255\000\240\002\255\002\242\002\000\000\
\235\002\226\002\202\255\000\000\000\000\000\000\046\002\245\002\
\064\001\245\002\000\000\020\255\246\002\245\002\245\002\245\002\
\177\255\210\002\000\000\103\000\000\000\201\002\046\002\000\000\
\046\002\046\002\251\002\245\002\128\000\000\000\194\255\056\000\
\020\255\128\000\000\000\000\003\255\001\120\255\004\002\002\003\
\131\002\003\003\005\003\191\000\000\000\000\000\046\002\191\255\
\002\002\251\001\000\000\082\001\000\000\000\000\000\000\247\002\
\254\002\007\003\000\000\000\000\009\003\000\000\000\000\000\000\
\000\000\000\000\168\002\245\002\000\000\006\003\000\000\147\001\
\078\002\080\002\000\000\211\002\000\000\000\000\194\255\017\001\
\019\003\131\002\194\255\000\000\000\000\000\000\194\255\194\255\
\143\002\143\002\176\255\004\003\000\000\046\002\165\000\030\002\
\255\002\014\003\000\000\255\002\046\002\249\001\245\002\245\002\
\245\002\245\002\046\002\131\002\143\002\143\002\131\002\081\000\
\180\000\000\000\000\000\194\255\245\002\000\000\046\002\000\000\
\000\000\165\000\020\003\255\002\000\000\000\000\000\000\201\002\
\201\002\087\002\105\002\000\000\000\000\000\000\194\255\194\255\
\131\002\089\002\000\000\000\000\255\000\027\003\245\002\245\002\
\131\002\131\002\000\000\000\000\255\000\201\002\201\002\000\000"
let yyrindex = "\000\000\
\012\003\038\004\000\000\000\000\214\001\214\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\214\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\018\003\018\003\000\000\034\003\000\000\
\010\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\216\255\022\003\146\255\000\000\000\000\033\003\000\000\000\000\
\000\000\000\000\036\003\174\002\000\000\000\000\000\000\238\000\
\000\000\000\000\002\000\037\003\000\000\000\000\000\000\000\000\
\000\000\018\003\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\042\003\000\000\000\000\091\001\
\000\000\000\000\000\000\000\000\042\003\000\000\058\003\000\000\
\096\001\060\003\000\000\000\000\145\001\034\003\000\000\000\000\
\000\000\000\000\034\003\000\000\034\003\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000\
\000\000\042\003\002\000\000\000\002\000\000\000\062\003\000\000\
\000\000\036\003\000\000\000\000\002\000\000\000\000\000\000\000\
\000\000\000\000\000\000\228\255\000\000\000\000\063\003\000\000\
\000\000\000\000\000\000\002\000\002\000\002\000\000\000\000\000\
\000\000\002\000\058\003\000\000\042\003\008\003\042\003\002\000\
\000\000\000\000\000\000\000\000\000\000\009\255\010\003\042\003\
\000\000\000\000\000\000\000\000\035\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\064\003\000\000\000\000\000\000\000\000\000\000\
\002\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\
\066\003\000\000\106\255\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\226\255\023\003\083\255\000\000\013\003\
\000\000\000\000\146\255\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\153\001\
\000\000\204\001\000\000\110\002\000\000\000\000\018\003\000\000\
\000\000\000\000\214\001\000\000\000\000\063\003\000\000\049\003\
\000\000\000\000\065\003\000\000\000\000\000\000\000\000\037\003\
\000\000\000\000\000\000\000\000\002\000\000\000\000\000\018\003\
\000\000\000\000\000\000\070\003\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\070\003\035\000\
\082\000\000\000\071\003\051\003\000\000\000\000\000\000\000\000\
\053\000\000\000\000\000\000\000\000\000\000\000\000\000\051\255\
\024\255\000\000\000\000\018\003\000\000\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\194\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\002\000\000\000\002\000\000\000\
\002\000\000\000\000\000\000\000\000\000\002\000\000\000\233\255\
\240\255\243\255\000\000\000\000\000\000\050\003\063\003\000\000\
\000\000\000\000\206\000\000\000\002\000\000\000\002\000\002\000\
\000\000\000\000\000\000\072\003\000\000\000\000\000\000\070\003\
\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\
\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\218\000\073\003\
\000\000\000\000\071\003\000\000\124\001\000\000\000\000\070\003\
\000\000\000\000\070\003\071\003\097\000\000\000\000\000\002\000\
\214\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\
\000\000\002\000\000\000\074\003\000\000\000\000\000\000\000\000\
\000\000\037\003\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\023\000\000\000\200\000\000\000\217\000\008\003\000\000\
\002\000\002\000\000\000\000\000\000\000\000\000\000\000\019\000\
\000\000\000\000\000\000\000\000\165\255\130\000\135\000\000\000\
\117\000\000\000\000\000\018\003\000\000\000\000\002\000\000\000\
\075\003\000\000\000\000\039\003\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\145\000\000\000\000\000\000\000\000\000\000\000\000\000\
\053\000\053\000\150\000\000\000\000\000\002\000\000\000\076\003\
\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\
\000\000\000\000\002\000\160\000\053\000\053\000\166\000\072\000\
\101\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\077\003\000\000\000\000\000\000\000\000\234\000\
\013\001\000\000\107\000\000\000\000\000\000\000\000\000\000\000\
\179\000\150\000\000\000\000\000\000\000\078\003\000\000\000\000\
\190\000\221\000\000\000\000\000\000\000\036\001\063\001\000\000"
let yygindex = "\000\000\
\000\000\254\255\220\255\188\255\158\255\042\000\007\000\033\000\
\190\002\168\255\181\003\018\000\250\255\122\255\178\000\021\002\
\012\000\061\003\100\003\000\000\178\003\043\003\215\003\175\000\
\000\000\000\000\073\001\000\000\148\003\107\254\048\255\025\255\
\000\000\178\002\000\000\011\003\134\254\000\000\236\254\088\255\
\055\000\221\254\045\002\147\002\172\002"
let yytablesize = 1125
let yytable = "\031\000\
\094\000\037\000\047\000\052\000\203\000\056\000\063\000\198\000\
\039\001\070\000\124\001\034\000\001\001\045\001\085\001\157\000\
\031\001\043\000\152\000\112\000\049\000\066\000\183\000\057\000\
\194\000\154\001\087\000\105\000\032\000\136\000\199\000\243\000\
\184\000\112\000\144\000\095\000\143\000\151\000\143\000\102\000\
\239\000\114\000\115\000\113\000\130\000\062\000\092\001\112\000\
\106\000\244\000\136\000\096\000\168\000\223\000\008\001\114\000\
\115\000\113\000\144\000\073\000\144\000\131\001\076\000\077\000\
\142\000\066\000\089\000\090\000\116\000\114\000\115\000\158\000\
\043\000\125\001\158\000\185\000\103\000\182\000\066\000\103\000\
\027\001\146\000\116\000\181\001\103\000\092\000\033\001\092\000\
\019\001\092\000\021\001\092\000\066\000\131\001\092\000\204\001\
\164\000\033\000\140\000\026\001\155\000\132\001\082\000\107\001\
\211\001\066\000\190\000\145\001\211\000\092\000\052\000\165\001\
\092\000\033\002\052\000\208\001\167\000\221\000\210\001\208\000\
\224\000\049\000\226\000\212\000\213\000\214\000\215\000\216\000\
\049\000\159\000\233\000\255\001\192\000\201\001\156\000\225\000\
\167\001\092\000\077\001\109\001\052\002\135\000\052\000\228\000\
\147\000\002\001\003\001\004\001\077\000\170\000\037\000\009\001\
\044\001\132\000\122\000\020\001\038\000\022\001\227\000\150\000\
\132\000\111\001\112\001\231\000\232\000\151\000\234\000\235\000\
\236\000\237\000\038\001\055\001\077\000\018\001\113\001\134\000\
\135\000\123\000\169\000\238\000\084\000\243\001\134\000\135\000\
\172\000\068\000\060\002\028\002\136\000\160\000\051\001\051\000\
\053\001\034\001\064\002\136\000\109\001\169\000\044\001\187\000\
\069\000\052\000\079\001\170\000\171\000\172\000\173\000\055\000\
\174\000\175\000\176\000\141\001\029\002\229\001\000\001\063\001\
\186\000\006\002\111\001\112\001\157\000\177\000\048\001\049\001\
\082\000\067\001\178\000\097\001\088\000\069\000\088\000\113\001\
\088\000\188\000\088\000\103\000\138\000\088\000\118\000\179\000\
\112\000\137\000\082\000\001\000\002\000\161\000\064\001\065\001\
\139\000\066\000\113\000\140\000\088\000\180\000\138\000\088\000\
\162\000\163\000\094\001\137\000\181\000\182\000\114\000\115\000\
\064\000\106\001\139\000\088\000\189\000\140\000\164\000\146\001\
\080\001\081\001\082\001\116\000\117\001\044\001\195\001\152\000\
\088\000\183\000\037\000\183\000\249\001\183\000\183\000\210\000\
\105\001\205\001\183\000\192\000\233\001\200\001\217\000\144\000\
\183\000\183\000\144\000\212\001\072\000\152\000\122\001\123\001\
\209\001\183\000\132\000\155\000\148\001\144\000\133\001\153\001\
\035\002\168\000\216\001\037\002\168\000\144\000\191\000\044\001\
\183\000\037\000\155\001\156\000\157\001\074\000\159\001\168\000\
\134\000\135\000\251\001\161\001\158\000\183\000\035\001\168\000\
\182\000\238\001\182\000\054\002\182\000\182\000\146\000\036\001\
\044\001\182\000\172\001\075\000\173\001\174\001\037\001\182\000\
\182\000\182\000\158\000\047\002\146\000\164\000\252\001\078\000\
\182\000\155\000\160\001\187\001\146\000\190\000\250\001\190\000\
\192\001\190\000\190\000\164\000\177\001\079\000\190\000\182\000\
\182\000\167\000\109\001\164\000\190\000\185\001\186\001\155\000\
\188\001\189\001\190\001\035\001\182\000\190\000\159\000\167\000\
\070\001\159\000\080\000\156\000\127\001\213\001\156\000\167\000\
\111\001\112\001\075\001\037\001\159\000\147\000\081\000\206\001\
\207\001\156\000\170\000\221\001\159\000\170\000\020\002\223\001\
\217\001\156\000\023\002\147\000\150\000\220\001\024\002\025\002\
\170\000\144\001\151\000\147\000\234\001\083\000\202\000\004\002\
\170\000\057\001\150\000\207\000\058\001\209\000\086\000\169\000\
\151\000\135\000\150\000\044\001\244\001\085\000\245\001\246\001\
\151\000\235\001\160\000\049\002\091\000\169\000\048\002\240\001\
\241\001\242\001\187\000\110\000\187\000\169\000\187\000\187\000\
\160\000\101\000\074\001\187\000\005\002\248\001\057\002\058\002\
\160\000\187\000\187\000\186\000\171\000\186\000\171\000\186\000\
\186\000\157\000\187\000\132\000\186\000\008\002\058\000\114\000\
\114\000\059\000\186\000\186\000\188\000\104\000\188\000\157\000\
\188\000\188\000\060\000\186\000\050\000\188\000\050\000\157\000\
\107\000\134\000\135\000\188\000\188\000\014\002\187\000\120\001\
\111\001\112\001\044\001\032\002\188\000\178\001\136\000\061\000\
\050\000\196\001\038\002\224\001\197\001\113\001\225\001\189\000\
\044\002\189\000\108\000\189\000\189\000\021\002\109\001\109\000\
\189\000\050\000\109\001\111\000\051\002\109\001\189\000\189\000\
\040\002\041\002\042\002\043\002\109\001\117\000\192\000\189\000\
\192\000\119\000\192\000\192\000\111\001\112\001\050\002\192\000\
\111\001\112\001\120\000\111\001\112\001\192\000\192\000\026\002\
\027\002\113\001\111\001\112\001\121\000\113\001\192\000\124\000\
\113\001\191\000\236\001\191\000\131\000\191\000\191\000\113\001\
\062\002\063\002\191\000\045\002\046\002\126\000\137\000\169\000\
\191\000\191\000\093\000\109\001\091\000\170\000\171\000\172\000\
\032\001\191\000\174\000\175\000\176\000\065\000\138\000\065\000\
\065\000\065\000\086\000\065\000\086\000\112\000\086\000\177\000\
\086\000\111\001\112\001\086\000\178\000\139\000\065\000\113\000\
\006\001\146\000\007\001\086\000\147\000\065\000\113\001\148\000\
\149\000\179\000\086\000\114\000\115\000\086\000\175\000\153\000\
\175\000\065\000\065\000\065\000\065\000\065\000\152\000\180\000\
\116\000\086\000\086\000\220\000\150\000\222\000\181\000\182\000\
\154\000\065\000\159\000\098\000\160\000\098\000\086\000\098\000\
\168\000\098\000\039\000\066\000\098\000\066\000\066\000\066\000\
\040\000\066\000\103\000\041\000\098\000\016\002\109\001\189\000\
\130\000\102\000\058\000\098\000\193\000\059\000\098\000\200\000\
\195\000\201\000\042\000\066\000\204\000\043\000\088\000\044\000\
\045\000\205\000\098\000\098\000\111\001\112\001\218\000\066\000\
\066\000\066\000\066\000\206\000\085\000\219\000\085\000\098\000\
\085\000\113\001\085\000\061\000\046\000\085\000\070\000\066\000\
\070\000\070\000\070\000\229\000\070\000\085\000\056\000\254\000\
\255\000\056\000\005\001\023\001\085\000\025\001\039\000\085\000\
\028\001\029\001\056\000\030\001\040\000\031\001\070\000\097\000\
\039\000\040\001\041\001\085\000\085\000\042\001\040\000\043\001\
\222\001\041\000\070\000\070\000\070\000\046\001\042\000\056\000\
\085\000\043\000\047\001\044\000\045\000\132\000\039\000\039\002\
\042\000\062\001\070\000\050\001\040\000\044\000\045\000\097\000\
\069\001\052\001\007\002\054\001\109\001\061\001\115\000\000\002\
\046\000\066\001\086\000\134\000\135\000\112\000\042\000\109\001\
\068\001\118\000\046\000\044\000\045\000\086\000\072\001\113\000\
\136\000\076\001\111\001\112\001\086\000\071\001\034\002\086\000\
\135\000\091\001\087\001\114\000\115\000\111\001\112\001\113\001\
\046\000\112\000\084\001\086\000\086\000\112\000\086\001\088\001\
\116\000\089\001\113\001\113\000\125\000\090\001\093\001\113\000\
\086\000\098\001\004\000\005\000\006\000\007\000\095\001\114\000\
\115\000\008\000\009\000\114\000\115\000\010\000\011\000\012\000\
\013\000\014\000\096\001\015\000\116\000\103\001\099\001\104\001\
\116\000\017\002\016\000\018\002\114\001\017\000\018\000\019\000\
\020\000\109\001\021\000\109\001\028\002\022\000\023\000\024\000\
\025\000\055\002\109\001\026\000\109\001\108\001\027\000\028\000\
\069\000\115\001\069\000\069\000\069\000\116\001\069\000\111\001\
\112\001\111\001\112\001\056\002\109\001\119\001\121\001\134\001\
\111\001\112\001\111\001\112\001\113\001\126\001\113\001\109\001\
\069\000\182\001\044\001\109\001\136\001\113\001\135\001\113\001\
\132\000\139\001\111\001\112\001\069\000\069\000\069\000\137\001\
\142\001\140\001\147\001\149\001\152\001\111\001\112\001\113\001\
\150\001\111\001\112\001\240\000\241\000\242\000\134\000\135\000\
\112\000\164\001\113\001\156\001\151\001\132\000\113\001\073\001\
\132\000\158\001\113\000\136\000\133\000\162\001\109\001\078\001\
\190\000\163\001\169\001\109\001\166\001\110\001\114\000\115\000\
\176\001\065\000\013\002\134\000\135\000\171\001\134\000\135\000\
\065\000\191\000\175\001\116\000\111\001\112\001\134\000\135\000\
\136\000\111\001\112\001\136\000\112\000\179\001\180\001\065\000\
\065\000\113\001\132\000\136\000\109\001\183\001\113\001\134\000\
\135\000\184\001\193\001\198\001\065\000\194\001\199\001\202\001\
\191\001\215\001\114\000\115\000\136\000\203\001\218\001\219\001\
\134\000\135\000\111\001\112\001\228\001\011\001\230\001\116\000\
\012\001\231\001\239\001\144\000\112\001\136\000\013\001\113\001\
\245\000\014\001\246\000\247\001\254\001\015\001\001\002\002\002\
\247\000\003\002\009\002\248\000\010\002\011\002\015\002\091\000\
\016\001\091\000\019\002\091\000\012\002\091\000\017\001\022\002\
\091\000\095\000\249\000\095\000\053\002\095\000\031\002\095\000\
\250\000\036\002\095\000\061\002\037\000\037\000\103\000\091\000\
\075\000\099\000\091\000\099\000\048\000\099\000\060\000\099\000\
\081\000\095\000\099\000\119\000\122\000\119\000\122\000\119\000\
\122\000\119\000\122\000\079\000\119\000\122\000\054\000\116\000\
\101\000\099\000\073\000\128\000\062\000\126\000\171\000\037\000\
\109\000\045\000\197\000\179\000\195\000\177\000\214\001\010\001\
\134\000\107\000\105\000\132\000\130\000\118\001\083\001\024\001\
\230\000\143\001\056\001\232\001\253\001\237\001\059\002\000\000\
\000\000\000\000\000\000\000\000\168\001"
let yycheck = "\002\000\
\037\000\000\000\009\000\010\000\103\000\012\000\013\000\096\000\
\177\000\016\000\031\001\005\000\147\000\000\000\246\000\004\001\
\004\001\009\001\000\000\020\001\009\000\015\000\000\000\012\000\
\093\000\012\001\033\000\033\001\013\001\006\001\003\001\009\001\
\013\001\020\001\000\000\004\001\004\001\074\000\004\001\004\001\
\139\000\042\001\043\001\030\001\004\001\013\000\255\000\020\001\
\054\001\027\001\027\001\020\001\000\000\122\000\153\000\042\001\
\043\001\030\001\026\001\018\000\026\001\004\001\021\000\022\000\
\067\000\042\001\034\000\035\000\055\001\042\001\043\001\000\000\
\064\001\061\001\063\001\056\001\044\001\000\000\055\001\044\001\
\169\000\000\000\055\001\104\001\044\001\003\001\175\000\005\001\
\157\000\007\001\159\000\009\001\042\001\004\001\012\001\131\001\
\000\000\052\001\066\000\168\000\000\000\044\001\063\001\016\001\
\140\001\055\001\000\000\009\001\111\000\027\001\005\001\087\001\
\030\001\007\002\009\001\136\001\000\000\120\000\139\001\108\000\
\123\000\110\000\125\000\112\000\113\000\114\000\115\000\116\000\
\117\000\000\000\133\000\012\001\091\000\044\001\000\000\124\000\
\089\001\055\001\005\001\020\001\034\002\043\001\037\001\003\001\
\000\000\148\000\149\000\150\000\003\001\000\000\013\001\154\000\
\008\001\020\001\004\001\158\000\013\001\160\000\126\000\000\000\
\020\001\042\001\043\001\131\000\132\000\000\000\134\000\135\000\
\136\000\137\000\177\000\027\001\027\001\156\000\055\001\042\001\
\043\001\027\001\000\000\138\000\025\001\005\001\042\001\043\001\
\020\001\013\001\053\002\012\001\055\001\000\000\193\000\013\001\
\195\000\176\000\061\002\055\001\020\001\004\001\008\001\000\000\
\028\001\037\001\239\000\010\001\011\001\012\001\013\001\013\001\
\015\001\016\001\017\001\044\001\037\001\166\001\013\001\204\000\
\000\000\027\001\042\001\043\001\000\000\028\001\190\000\191\000\
\009\001\005\001\033\001\008\001\003\001\028\001\005\001\055\001\
\007\001\000\000\009\001\044\001\009\001\012\001\047\001\046\001\
\020\001\009\001\027\001\001\000\002\000\002\001\205\000\206\000\
\009\001\243\000\030\001\009\001\027\001\060\001\027\001\030\001\
\013\001\014\001\005\001\027\001\067\001\068\001\042\001\043\001\
\013\001\016\001\027\001\042\001\000\000\027\001\027\001\052\001\
\240\000\241\000\242\000\055\001\023\001\008\001\121\001\005\001\
\055\001\003\001\025\001\005\001\197\001\007\001\008\001\110\000\
\015\001\132\001\012\001\000\000\171\001\126\001\117\000\005\001\
\018\001\019\001\008\001\142\001\013\001\027\001\029\001\030\001\
\137\001\027\001\020\001\004\001\055\001\019\001\037\001\062\001\
\009\002\005\001\147\001\012\002\008\001\027\001\000\000\008\001\
\042\001\064\001\069\001\020\001\071\001\013\001\073\001\019\001\
\042\001\043\001\019\001\078\001\005\001\055\001\004\001\027\001\
\003\001\180\001\005\001\036\002\007\001\008\001\005\001\013\001\
\008\001\012\001\093\001\013\001\095\001\096\001\020\001\018\001\
\019\001\020\001\027\001\019\001\019\001\005\001\201\001\013\001\
\027\001\005\001\074\001\110\001\027\001\003\001\199\001\005\001\
\115\001\007\001\008\001\019\001\099\001\013\001\012\001\042\001\
\043\001\005\001\020\001\027\001\018\001\108\001\109\001\027\001\
\111\001\112\001\113\001\004\001\055\001\027\001\005\001\019\001\
\218\000\008\001\013\001\005\001\013\001\144\001\008\001\027\001\
\042\001\043\001\228\000\020\001\019\001\005\001\013\001\134\001\
\135\001\019\001\005\001\158\001\027\001\008\001\247\001\162\001\
\149\001\027\001\251\001\019\001\005\001\154\001\255\001\000\002\
\019\001\027\001\005\001\027\001\175\001\013\001\102\000\212\001\
\027\001\013\001\019\001\107\000\016\001\109\000\013\001\005\001\
\019\001\043\001\027\001\008\001\191\001\004\001\193\001\194\001\
\027\001\176\001\005\001\028\002\006\001\019\001\019\001\182\001\
\183\001\184\001\003\001\009\001\005\001\027\001\007\001\008\001\
\019\001\013\001\003\001\012\001\215\001\196\001\047\002\048\002\
\027\001\018\001\019\001\003\001\003\001\005\001\005\001\007\001\
\008\001\005\001\027\001\020\001\012\001\218\001\001\001\026\001\
\027\001\004\001\018\001\019\001\003\001\013\001\005\001\019\001\
\007\001\008\001\013\001\027\001\007\001\012\001\009\001\027\001\
\004\001\042\001\043\001\018\001\019\001\236\001\055\001\005\001\
\042\001\043\001\008\001\006\002\027\001\003\001\055\001\034\001\
\027\001\003\001\013\002\013\001\003\001\055\001\016\001\003\001\
\019\002\005\001\004\001\007\001\008\001\005\001\020\001\004\001\
\012\001\044\001\020\001\027\001\031\002\020\001\018\001\019\001\
\015\002\016\002\017\002\018\002\020\001\009\001\003\001\027\001\
\005\001\009\001\007\001\008\001\042\001\043\001\029\002\012\001\
\042\001\043\001\027\001\042\001\043\001\018\001\019\001\001\002\
\002\002\055\001\042\001\043\001\003\001\055\001\027\001\009\001\
\055\001\003\001\003\001\005\001\004\001\007\001\008\001\055\001\
\055\002\056\002\012\001\021\002\022\002\006\001\009\001\004\001\
\018\001\019\001\004\001\020\001\006\001\010\001\011\001\012\001\
\013\001\027\001\015\001\016\001\017\001\003\001\003\001\005\001\
\006\001\007\001\003\001\009\001\005\001\020\001\007\001\028\001\
\009\001\042\001\043\001\012\001\033\001\044\001\020\001\030\001\
\013\001\013\001\015\001\020\001\009\001\027\001\055\001\027\001\
\027\001\046\001\027\001\042\001\043\001\030\001\003\001\044\001\
\005\001\039\001\040\001\041\001\042\001\043\001\020\001\060\001\
\055\001\042\001\043\001\119\000\027\001\121\000\067\001\068\001\
\027\001\055\001\004\001\003\001\027\001\005\001\055\001\007\001\
\004\001\009\001\004\001\003\001\012\001\005\001\006\001\007\001\
\010\001\009\001\044\001\013\001\020\001\019\001\020\001\009\001\
\004\001\004\001\001\001\027\001\027\001\004\001\030\001\005\001\
\027\001\006\001\028\001\027\001\020\001\031\001\013\001\033\001\
\034\001\044\001\042\001\043\001\042\001\043\001\004\001\039\001\
\040\001\041\001\042\001\044\001\003\001\013\001\005\001\055\001\
\007\001\055\001\009\001\034\001\054\001\012\001\003\001\055\001\
\005\001\006\001\007\001\005\001\009\001\020\001\001\001\015\001\
\020\001\004\001\027\001\064\001\027\001\009\001\004\001\030\001\
\013\001\004\001\013\001\004\001\010\001\004\001\027\001\013\001\
\004\001\013\001\015\001\042\001\043\001\013\001\010\001\013\001\
\007\001\013\001\039\001\040\001\041\001\005\001\028\001\034\001\
\055\001\031\001\003\001\033\001\034\001\020\001\004\001\007\001\
\028\001\003\001\055\001\007\001\010\001\033\001\034\001\013\001\
\027\001\005\001\009\001\005\001\020\001\005\001\043\001\012\001\
\054\001\005\001\009\001\042\001\043\001\020\001\028\001\020\001\
\005\001\047\001\054\001\033\001\034\001\020\001\005\001\030\001\
\055\001\005\001\042\001\043\001\027\001\027\001\009\001\030\001\
\043\001\005\001\004\001\042\001\043\001\042\001\043\001\055\001\
\054\001\020\001\013\001\042\001\043\001\020\001\013\001\013\001\
\055\001\004\001\055\001\030\001\027\001\003\001\027\001\030\001\
\055\001\005\001\021\001\022\001\023\001\024\001\027\001\042\001\
\043\001\028\001\029\001\042\001\043\001\032\001\033\001\034\001\
\035\001\036\001\027\001\038\001\055\001\013\001\006\001\004\001\
\055\001\012\001\045\001\012\001\005\001\048\001\049\001\050\001\
\051\001\020\001\053\001\020\001\012\001\056\001\057\001\058\001\
\059\001\019\001\020\001\062\001\020\001\004\001\065\001\066\001\
\003\001\064\001\005\001\006\001\007\001\005\001\009\001\042\001\
\043\001\042\001\043\001\019\001\020\001\005\001\044\001\037\001\
\042\001\043\001\042\001\043\001\055\001\018\001\055\001\020\001\
\027\001\018\001\008\001\020\001\004\001\055\001\020\001\055\001\
\020\001\004\001\042\001\043\001\039\001\040\001\041\001\009\001\
\044\001\004\001\020\001\020\001\046\001\042\001\043\001\055\001\
\015\001\042\001\043\001\039\001\040\001\041\001\042\001\043\001\
\020\001\006\001\055\001\005\001\007\001\020\001\055\001\027\001\
\020\001\027\001\030\001\055\001\027\001\027\001\020\001\027\001\
\020\001\005\001\046\001\020\001\020\001\027\001\042\001\043\001\
\020\001\020\001\027\001\042\001\043\001\012\001\042\001\043\001\
\027\001\020\001\027\001\055\001\042\001\043\001\042\001\043\001\
\055\001\042\001\043\001\055\001\020\001\005\001\044\001\042\001\
\043\001\055\001\020\001\055\001\020\001\037\001\055\001\042\001\
\043\001\020\001\027\001\005\001\055\001\027\001\013\001\003\001\
\063\001\027\001\042\001\043\001\055\001\005\001\020\001\015\001\
\042\001\043\001\042\001\043\001\005\001\001\001\005\001\055\001\
\004\001\015\001\005\001\026\001\043\001\055\001\010\001\055\001\
\002\001\013\001\004\001\009\001\005\001\017\001\005\001\005\001\
\010\001\005\001\020\001\013\001\015\001\007\001\009\001\003\001\
\028\001\005\001\064\001\007\001\012\001\009\001\034\001\005\001\
\012\001\003\001\028\001\005\001\009\001\007\001\027\001\009\001\
\034\001\020\001\012\001\009\001\025\001\000\000\005\001\027\001\
\005\001\003\001\030\001\005\001\027\001\007\001\005\001\009\001\
\027\001\027\001\012\001\003\001\003\001\005\001\005\001\007\001\
\007\001\009\001\009\001\027\001\012\001\012\001\005\001\027\001\
\005\001\027\001\005\001\005\001\005\001\005\001\020\001\064\001\
\007\001\064\001\005\001\005\001\005\001\005\001\145\001\155\000\
\007\001\007\001\007\001\007\001\007\001\025\001\243\000\166\000\
\130\000\047\001\199\000\170\001\202\001\178\001\050\002\255\255\
\255\255\255\255\255\255\255\255\090\001"
let yynames_const = "\
CHOICE\000\
STAR\000\
COMMA\000\
LPAREN\000\
RPAREN\000\
LBRACKET\000\
RBRACKET\000\
BAR\000\
SEMI\000\
NEW\000\
OUT\000\
IN\000\
REPL\000\
IF\000\
THEN\000\
ELSE\000\
EQUAL\000\
FUN\000\
EQUATION\000\
REDUCTION\000\
PREDICATE\000\
PROCESS\000\
SLASH\000\
DOT\000\
EOF\000\
LET\000\
QUERY\000\
BEFORE\000\
PUTBEGIN\000\
NONINTERF\000\
EVENT\000\
NOT\000\
ELIMTRUE\000\
FREE\000\
SUCHTHAT\000\
CLAUSES\000\
RED\000\
EQUIV\000\
EQUIVEQ\000\
WEDGE\000\
DIFF\000\
COLON\000\
NOUNIF\000\
PHASE\000\
AMONG\000\
WEAKSECRET\000\
PARAM\000\
TYPE\000\
SET\000\
FORALL\000\
CONST\000\
INJEVENT\000\
OR\000\
CHANNEL\000\
LETFUN\000\
DEFINE\000\
EXPAND\000\
YIELD\000\
LEQ\000\
PROBA\000\
LBRACE\000\
RBRACE\000\
PROOF\000\
TABLE\000\
INSERT\000\
GET\000\
"
let yynames_block = "\
IDENT\000\
STRING\000\
INT\000\
"
let yyact = [|
(fun _ -> failwith "parser")
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 132 "pitparser.mly"
Options are ignored , they are supported for compatibility with
CryptoVerif only
CryptoVerif only *)
TTypeDecl(_2) :: _5 )
# 851 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 8 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 6 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 3 : 'typeid) in
let _8 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _10 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 136 "pitparser.mly"
( (TFunDecl(_2, _4, _7, _8)) :: _10 )
# 862 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeid) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 138 "pitparser.mly"
( (List.map (fun x -> TConstDecl(x, _4, _5)) _2) @ _7 )
# 872 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'forallvartype) in
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'term) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 140 "pitparser.mly"
( (TEquation(_2, _3, _5)) :: _7 )
# 882 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'treduc) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 142 "pitparser.mly"
( (TReduc(_2,_3)) :: _5 )
# 891 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 144 "pitparser.mly"
( (TEventDecl(_2, [])) :: _4 )
# 899 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 146 "pitparser.mly"
( (TEventDecl(_2, _4)) :: _7 )
# 908 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'typeidseq) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 148 "pitparser.mly"
( (TPredDecl(_2, _4, _6)) :: _8 )
# 918 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 150 "pitparser.mly"
( (TPredDecl(_2, [], _3)) :: _5 )
# 927 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 152 "pitparser.mly"
( (TTableDecl(_2, _4)) :: _7 )
# 936 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 154 "pitparser.mly"
( (TPDef(_2,[],_4)) :: _6 )
# 945 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'vartype) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 156 "pitparser.mly"
( (TPDef(_2,_4,_7)) :: _9 )
# 955 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 158 "pitparser.mly"
( (TLetFun(_2,[],_4)) :: _6 )
# 964 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'vartype) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 160 "pitparser.mly"
( (TLetFun(_2,_4,_7)) :: _9 )
# 974 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 162 "pitparser.mly"
( (TSet(_2,S _4)) :: _6 )
# 983 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 164 "pitparser.mly"
( (TSet(_2,I _4)) :: _6 )
# 992 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tfnebindingseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 166 "pitparser.mly"
( (TNoUnif (_2, _4)) :: _6 )
# 1001 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tfnebindingseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 168 "pitparser.mly"
( (TNoUnif ([], _2)) :: _4 )
# 1009 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tqueryseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 170 "pitparser.mly"
( (TQuery(_2,_4)) :: _6 )
# 1018 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tqueryseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 172 "pitparser.mly"
( (TQuery([],_2)) :: _4 )
# 1026 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'niseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 174 "pitparser.mly"
( (TNoninterf(_2, _4)) :: _6 )
# 1035 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'niseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 176 "pitparser.mly"
( (TNoninterf([], _2)) :: _4 )
# 1043 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 178 "pitparser.mly"
( (TWeaksecret(_2)) :: _4 )
# 1051 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 180 "pitparser.mly"
( (TNot(_2, _4)) :: _6 )
# 1060 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 182 "pitparser.mly"
( (TNot([], _2)) :: _4 )
# 1068 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 184 "pitparser.mly"
Supported for compatility with CryptoVerif only
_4 )
# 1077 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 187 "pitparser.mly"
Supported for compatility with CryptoVerif only
_4 )
# 1086 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'proof) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 190 "pitparser.mly"
Supported for compatility with CryptoVerif only
_5 )
# 1095 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 193 "pitparser.mly"
( (TElimtrue (_2,_4)) :: _6 )
# 1104 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 195 "pitparser.mly"
( (TElimtrue ([],_2)) :: _4 )
# 1112 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 197 "pitparser.mly"
For compatibility with CryptoVerif , allow
channel c1 ... cn .
as a synonym for
free c1 ... cn : channel .
channel c1...cn.
as a synonym for
free c1...cn:channel. *)
(List.map (fun x -> TFree(x, ("channel", dummy_ext), [])) _2) @ _4 )
# 1124 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeid) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 203 "pitparser.mly"
( (List.map (fun x -> TFree(x, _4, _5)) _2) @ _7 )
# 1134 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tclauses) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 205 "pitparser.mly"
( (TClauses(_2)) :: _3 )
# 1142 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.tdecl list) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 207 "pitparser.mly"
( (TDefine(_2, [], _4)) :: _6 )
# 1151 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.tdecl list) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 209 "pitparser.mly"
( (TDefine(_2, _4, _7)) :: _9 )
# 1161 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 211 "pitparser.mly"
( (TExpand(_2, _4)) :: _7 )
# 1170 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
Obj.repr(
# 213 "pitparser.mly"
( [] )
# 1176 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.tdecl list) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tprocess) in
Obj.repr(
# 217 "pitparser.mly"
( _1, _3 )
# 1184 "pitparser.ml"
: Pitptree.tdecl list * Pitptree.tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 223 "pitparser.mly"
( _1 )
# 1191 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 225 "pitparser.mly"
( _1 )
# 1198 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 227 "pitparser.mly"
( "*", parse_extent() )
# 1204 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 229 "pitparser.mly"
( ".", parse_extent() )
# 1210 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'prooftoken) in
Obj.repr(
# 233 "pitparser.mly"
( [_1] )
# 1217 "pitparser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'prooftoken) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 235 "pitparser.mly"
( _1 :: _2 )
# 1225 "pitparser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 239 "pitparser.mly"
( [_1] )
# 1232 "pitparser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'proofcommand) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'proof) in
Obj.repr(
# 241 "pitparser.mly"
( _1 :: _3 )
# 1240 "pitparser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'neidentseq) in
Obj.repr(
# 247 "pitparser.mly"
( _2 )
# 1247 "pitparser.ml"
: 'options))
; (fun __caml_parser_env ->
Obj.repr(
# 249 "pitparser.mly"
( [] )
# 1253 "pitparser.ml"
: 'options))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'neidentseq) in
Obj.repr(
# 253 "pitparser.mly"
( _1 :: _3 )
# 1261 "pitparser.ml"
: 'neidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 255 "pitparser.mly"
( [_1] )
# 1268 "pitparser.ml"
: 'neidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartype) in
Obj.repr(
# 259 "pitparser.mly"
( (_1,_3)::_5 )
# 1277 "pitparser.ml"
: 'nevartype))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typeid) in
Obj.repr(
# 262 "pitparser.mly"
( [(_1,_3)] )
# 1285 "pitparser.ml"
: 'nevartype))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nevartype) in
Obj.repr(
# 266 "pitparser.mly"
( _1 )
# 1292 "pitparser.ml"
: 'vartype))
; (fun __caml_parser_env ->
Obj.repr(
# 268 "pitparser.mly"
( [] )
# 1298 "pitparser.ml"
: 'vartype))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'nevartype) in
Obj.repr(
# 272 "pitparser.mly"
( _2 )
# 1305 "pitparser.ml"
: 'forallvartype))
; (fun __caml_parser_env ->
Obj.repr(
# 274 "pitparser.mly"
( [] )
# 1311 "pitparser.ml"
: 'forallvartype))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 278 "pitparser.mly"
( _1 )
# 1318 "pitparser.ml"
: 'typeid))
; (fun __caml_parser_env ->
Obj.repr(
# 280 "pitparser.mly"
( (* channel is allowed as a type, even though it is also a keyword for the declaration channel c1...cn. *)
"channel", parse_extent() )
# 1325 "pitparser.ml"
: 'typeid))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netypeidseq) in
Obj.repr(
# 285 "pitparser.mly"
( _1 )
# 1332 "pitparser.ml"
: 'typeidseq))
; (fun __caml_parser_env ->
Obj.repr(
# 287 "pitparser.mly"
( [] )
# 1338 "pitparser.ml"
: 'typeidseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'netypeidseq) in
Obj.repr(
# 291 "pitparser.mly"
( _1 :: _3 )
# 1346 "pitparser.ml"
: 'netypeidseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typeid) in
Obj.repr(
# 293 "pitparser.mly"
( [_1] )
# 1353 "pitparser.ml"
: 'netypeidseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 299 "pitparser.mly"
( PFunApp (_1, _3), parse_extent() )
# 1361 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'term) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'term) in
Obj.repr(
# 301 "pitparser.mly"
( Param.has_choice := true;
PFunApp(("choice", parse_extent()), [_3; _5]), parse_extent() )
# 1370 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 304 "pitparser.mly"
( PIdent (_1), parse_extent() )
# 1377 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 306 "pitparser.mly"
( PFunApp(("=", parse_extent()), [_1; _3]), parse_extent() )
# 1385 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 308 "pitparser.mly"
( PFunApp(("<>", parse_extent()), [_1; _3]), parse_extent() )
# 1393 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'term) in
Obj.repr(
# 310 "pitparser.mly"
( PFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 1400 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 312 "pitparser.mly"
( PFunApp(("||", parse_extent()), [_1; _3]), parse_extent() )
# 1408 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 314 "pitparser.mly"
( PFunApp(("&&", parse_extent()), [_1; _3]), parse_extent() )
# 1416 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 316 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PTuple (l), parse_extent() )
# 1426 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 323 "pitparser.mly"
( _1 :: _3 )
# 1434 "pitparser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 325 "pitparser.mly"
( [_1] )
# 1441 "pitparser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 329 "pitparser.mly"
( _1 )
# 1448 "pitparser.ml"
: 'termseq))
; (fun __caml_parser_env ->
Obj.repr(
# 331 "pitparser.mly"
( [] )
# 1454 "pitparser.ml"
: 'termseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'netermseq) in
Obj.repr(
# 337 "pitparser.mly"
( (_1, Some _4) )
# 1462 "pitparser.ml"
: 'ni))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 339 "pitparser.mly"
( (_1, None) )
# 1469 "pitparser.ml"
: 'ni))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'ni) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'niseq) in
Obj.repr(
# 343 "pitparser.mly"
( _1 :: _3 )
# 1477 "pitparser.ml"
: 'niseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ni) in
Obj.repr(
# 345 "pitparser.mly"
( [_1] )
# 1484 "pitparser.ml"
: 'niseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tquery) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tqueryseq) in
Obj.repr(
# 351 "pitparser.mly"
( _1 :: _3 )
# 1492 "pitparser.ml"
: 'tqueryseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tquery) in
Obj.repr(
# 353 "pitparser.mly"
( [_1] )
# 1499 "pitparser.ml"
: 'tqueryseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 357 "pitparser.mly"
( PRealQuery(_1) )
# 1506 "pitparser.ml"
: 'tquery))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'neidentseq) in
Obj.repr(
# 359 "pitparser.mly"
( PPutBegin(false, _4) )
# 1513 "pitparser.ml"
: 'tquery))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'neidentseq) in
Obj.repr(
# 361 "pitparser.mly"
( PPutBegin(true, _4) )
# 1520 "pitparser.ml"
: 'tquery))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 365 "pitparser.mly"
( PGFunApp (_1, _3), parse_extent() )
# 1528 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 367 "pitparser.mly"
( PGIdent (_1), parse_extent() )
# 1535 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'gtermseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 369 "pitparser.mly"
( PGPhase(_1, _3, _6), parse_extent() )
# 1544 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 371 "pitparser.mly"
( PGFunApp(("=", parse_extent()), [_1; _3]), parse_extent() )
# 1552 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 373 "pitparser.mly"
( PGFunApp(("<>", parse_extent()), [_1; _3]), parse_extent() )
# 1560 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gterm) in
Obj.repr(
# 375 "pitparser.mly"
( PGFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 1567 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 377 "pitparser.mly"
( PGFunApp(("||", parse_extent()), [_1; _3]), parse_extent() )
# 1575 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 379 "pitparser.mly"
( PGFunApp(("&&", parse_extent()), [_1; _3]), parse_extent() )
# 1583 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 381 "pitparser.mly"
( PGFunApp(("event",parse_extent()), _3), parse_extent() )
# 1590 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 383 "pitparser.mly"
( PGFunApp(("inj-event",parse_extent()), _3), parse_extent() )
# 1597 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 385 "pitparser.mly"
( PGFunApp(("==>", parse_extent()), [_1;_3]), parse_extent() )
# 1605 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 387 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PGTuple (l), parse_extent() )
# 1615 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'bindingseq) in
Obj.repr(
# 392 "pitparser.mly"
( PGName (_2, _4), parse_extent() )
# 1623 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 394 "pitparser.mly"
( PGName (_2, []), parse_extent() )
# 1630 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 396 "pitparser.mly"
( PGLet(_2, _4, _6), parse_extent() )
# 1639 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'negtermseq) in
Obj.repr(
# 400 "pitparser.mly"
( _1 :: _3 )
# 1647 "pitparser.ml"
: 'negtermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 402 "pitparser.mly"
( [_1] )
# 1654 "pitparser.ml"
: 'negtermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'negtermseq) in
Obj.repr(
# 406 "pitparser.mly"
( _1 )
# 1661 "pitparser.ml"
: 'gtermseq))
; (fun __caml_parser_env ->
Obj.repr(
# 408 "pitparser.mly"
( [] )
# 1667 "pitparser.ml"
: 'gtermseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'nesbindingseq) in
Obj.repr(
# 413 "pitparser.mly"
( (("!" ^ (string_of_int (_2)), parse_extent()), _4) :: _6 )
# 1676 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 415 "pitparser.mly"
( [(("!" ^ (string_of_int (_2)), parse_extent()), _4)] )
# 1684 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nesbindingseq) in
Obj.repr(
# 417 "pitparser.mly"
( (_1, _3) :: _5 )
# 1693 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 419 "pitparser.mly"
( [(_1, _3)] )
# 1701 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nesbindingseq) in
Obj.repr(
# 423 "pitparser.mly"
( _1 )
# 1708 "pitparser.ml"
: 'bindingseq))
; (fun __caml_parser_env ->
Obj.repr(
# 425 "pitparser.mly"
( [] )
# 1714 "pitparser.ml"
: 'bindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tfnebindingseq) in
Obj.repr(
# 431 "pitparser.mly"
( BFLet(_2, _4, _6) )
# 1723 "pitparser.ml"
: 'tfnebindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'gformatseq) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'optphase) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'optint) in
Obj.repr(
# 433 "pitparser.mly"
( BFNoUnif((_1,_3,_5), _6) )
# 1733 "pitparser.ml"
: 'tfnebindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : Pitptree.ident) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'optint) in
Obj.repr(
# 435 "pitparser.mly"
( BFNoUnif((_1,[],-1),_2) )
# 1741 "pitparser.ml"
: 'tfnebindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 439 "pitparser.mly"
( _2 )
# 1748 "pitparser.ml"
: 'optphase))
; (fun __caml_parser_env ->
Obj.repr(
# 441 "pitparser.mly"
( -1 )
# 1754 "pitparser.ml"
: 'optphase))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 445 "pitparser.mly"
( _2 )
# 1761 "pitparser.ml"
: 'optint))
; (fun __caml_parser_env ->
Obj.repr(
# 447 "pitparser.mly"
( -1 )
# 1767 "pitparser.ml"
: 'optint))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gformatseq) in
Obj.repr(
# 451 "pitparser.mly"
( PFGFunApp (_1, _3), parse_extent() )
# 1775 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gformat) in
Obj.repr(
# 453 "pitparser.mly"
( PFGFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 1782 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 455 "pitparser.mly"
( PFGIdent (_1), parse_extent() )
# 1789 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'gformatseq) in
Obj.repr(
# 457 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PFGTuple (_2), parse_extent() )
# 1799 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'fbindingseq) in
Obj.repr(
# 462 "pitparser.mly"
( PFGName (_2, _4), parse_extent() )
# 1807 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 464 "pitparser.mly"
( PFGName (_2, []), parse_extent() )
# 1814 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 466 "pitparser.mly"
( PFGAny (_2), parse_extent() )
# 1821 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 468 "pitparser.mly"
( PFGLet(_2, _4, _6), parse_extent() )
# 1830 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'negformatseq) in
Obj.repr(
# 473 "pitparser.mly"
( _1 :: _3 )
# 1838 "pitparser.ml"
: 'negformatseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 475 "pitparser.mly"
( [_1] )
# 1845 "pitparser.ml"
: 'negformatseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'negformatseq) in
Obj.repr(
# 479 "pitparser.mly"
( _1 )
# 1852 "pitparser.ml"
: 'gformatseq))
; (fun __caml_parser_env ->
Obj.repr(
# 481 "pitparser.mly"
( [] )
# 1858 "pitparser.ml"
: 'gformatseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'fnesbindingseq) in
Obj.repr(
# 486 "pitparser.mly"
( (("!" ^ (string_of_int (_2)), parse_extent()), _4) :: _6 )
# 1867 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 488 "pitparser.mly"
( [(("!" ^ (string_of_int (_2)), parse_extent()), _4)] )
# 1875 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fnesbindingseq) in
Obj.repr(
# 490 "pitparser.mly"
( (_1, _3) :: _5 )
# 1884 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 492 "pitparser.mly"
( [(_1, _3)] )
# 1892 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fnesbindingseq) in
Obj.repr(
# 496 "pitparser.mly"
( _1 )
# 1899 "pitparser.ml"
: 'fbindingseq))
; (fun __caml_parser_env ->
Obj.repr(
# 498 "pitparser.mly"
( [] )
# 1905 "pitparser.ml"
: 'fbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'term) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'treduc) in
Obj.repr(
# 504 "pitparser.mly"
( (_1,_2,_4) :: _6 )
# 1915 "pitparser.ml"
: 'treduc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 506 "pitparser.mly"
( [_1,_2,_4] )
# 1924 "pitparser.ml"
: 'treduc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 512 "pitparser.mly"
( PClause(_1,_3) )
# 1932 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 514 "pitparser.mly"
( PFact(_1) )
# 1939 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 516 "pitparser.mly"
( PEquiv(_1,_3,true) )
# 1947 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 518 "pitparser.mly"
( PEquiv(_1,_3,false) )
# 1955 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tclause) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tclauses) in
Obj.repr(
# 522 "pitparser.mly"
( (_1,_2) :: _4 )
# 1964 "pitparser.ml"
: 'tclauses))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tclause) in
Obj.repr(
# 524 "pitparser.mly"
( [_1,_2] )
# 1972 "pitparser.ml"
: 'tclauses))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tprocess) in
Obj.repr(
# 530 "pitparser.mly"
( _2 )
# 1979 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 532 "pitparser.mly"
( PLetDef (_1,[]) )
# 1986 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'ptermseq) in
Obj.repr(
# 534 "pitparser.mly"
( PLetDef (_1, _3) )
# 1994 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 536 "pitparser.mly"
( PRepl _2 )
# 2001 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Pitptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 538 "pitparser.mly"
For convergence with CryptoVerif , we allow an identifier ( bound on the number of copies ) after a replication ; it is simply ignored in ProVerif .
PRepl _5 )
# 2011 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 541 "pitparser.mly"
( let x = _1 in
if x = 0 then PNil else
input_error ("The only integer in a process is 0 for the nil process") (parse_extent()) )
# 2020 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 545 "pitparser.mly"
For convergence with CryptoVerif , we allow yield instead of 0
PNil )
# 2027 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 548 "pitparser.mly"
( PRestr(_2, _4, _6) )
# 2036 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 550 "pitparser.mly"
( PTest(_2,_4,_6) )
# 2045 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 552 "pitparser.mly"
( PTest(_2,_4,PNil) )
# 2053 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'tpattern) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 554 "pitparser.mly"
( PInput(_3,_5,_7) )
# 2062 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 556 "pitparser.mly"
( POutput(_3,_5,_7) )
# 2071 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 558 "pitparser.mly"
( PLet(_2,_4,_6,PNil) )
# 2080 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 560 "pitparser.mly"
( PLet(_2,_4,PNil,PNil) )
# 2088 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 562 "pitparser.mly"
( PLet(_2,_4,_6,_8) )
# 2098 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 564 "pitparser.mly"
( PLetFilter(_2,_4,_6,PNil) )
# 2107 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 566 "pitparser.mly"
( PLetFilter(_2,_4,PNil,PNil) )
# 2115 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 568 "pitparser.mly"
( (* Approximating the else clause with a parallel composition
is not correct for trace reconstruction *)
PLetFilter(_2,_4,_6,_8) )
# 2127 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ptermseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 572 "pitparser.mly"
( PInsert(_2, _4, _6) )
# 2136 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tpatternseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'optinprocess) in
Obj.repr(
# 574 "pitparser.mly"
( PGet(_2, _4, (PPIdent ("true", parse_extent()), parse_extent()), _6) )
# 2145 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'tpatternseq) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'pterm) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'optinprocess) in
Obj.repr(
# 576 "pitparser.mly"
( PGet(_2, _4, _7, _8) )
# 2155 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 578 "pitparser.mly"
( PPar(_1,_3) )
# 2163 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ptermseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 580 "pitparser.mly"
( PEvent(_2, _4, _6) )
# 2172 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : int) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 582 "pitparser.mly"
( PPhase(_2, _3) )
# 2180 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 586 "pitparser.mly"
( _2 )
# 2187 "pitparser.ml"
: 'opttprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 588 "pitparser.mly"
( PNil )
# 2193 "pitparser.ml"
: 'opttprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 592 "pitparser.mly"
( _2 )
# 2200 "pitparser.ml"
: 'optinprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 594 "pitparser.mly"
( PNil )
# 2206 "pitparser.ml"
: 'optinprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 598 "pitparser.mly"
( PPatVar(_1, None) )
# 2213 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typeid) in
Obj.repr(
# 600 "pitparser.mly"
( PPatVar(_1, Some _3) )
# 2221 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tpatternseq) in
Obj.repr(
# 602 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PPatTuple(_2) )
# 2231 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tpatternseq) in
Obj.repr(
# 607 "pitparser.mly"
( PPatFunApp(_1,_3) )
# 2239 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 609 "pitparser.mly"
( PPatEqual(_2) )
# 2246 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tpattern) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 613 "pitparser.mly"
( _1 :: _3 )
# 2254 "pitparser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tpattern) in
Obj.repr(
# 615 "pitparser.mly"
( [_1] )
# 2261 "pitparser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 619 "pitparser.mly"
( _1 )
# 2268 "pitparser.ml"
: 'tpatternseq))
; (fun __caml_parser_env ->
Obj.repr(
# 621 "pitparser.mly"
( [] )
# 2274 "pitparser.ml"
: 'tpatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'ptermseq) in
Obj.repr(
# 627 "pitparser.mly"
( PPFunApp (_1, _3), parse_extent() )
# 2282 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'pterm) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'pterm) in
Obj.repr(
# 629 "pitparser.mly"
( Param.has_choice := true;
PPFunApp(("choice", parse_extent()), [_3; _5]), parse_extent() )
# 2291 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 632 "pitparser.mly"
( PPIdent (_1), parse_extent() )
# 2298 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 634 "pitparser.mly"
( PPFunApp(("=", parse_extent()), [_1; _3]), parse_extent() )
# 2306 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 636 "pitparser.mly"
( PPFunApp(("<>", parse_extent()), [_1; _3]), parse_extent() )
# 2314 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'pterm) in
Obj.repr(
# 638 "pitparser.mly"
( PPFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 2321 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 640 "pitparser.mly"
( PPFunApp(("||", parse_extent()), [_1; _3]), parse_extent() )
# 2329 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 642 "pitparser.mly"
( PPFunApp(("&&", parse_extent()), [_1; _3]), parse_extent() )
# 2337 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 644 "pitparser.mly"
( PPRestr(_2, _4, _6), parse_extent() )
# 2346 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 646 "pitparser.mly"
( PPTest(_2,_4,_6), parse_extent() )
# 2355 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 648 "pitparser.mly"
( PPLetIn(_2,_4,_6), parse_extent() )
# 2364 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 650 "pitparser.mly"
( PPLet(_2,_4,_6,_8), parse_extent() )
# 2374 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 652 "pitparser.mly"
( PPLetFilter(_2,_4,_6,_8), parse_extent() )
# 2384 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ptermseq) in
Obj.repr(
# 654 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PPTuple (l), parse_extent() )
# 2394 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'neptermseq) in
Obj.repr(
# 661 "pitparser.mly"
( _1 :: _3 )
# 2402 "pitparser.ml"
: 'neptermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 663 "pitparser.mly"
( [_1] )
# 2409 "pitparser.ml"
: 'neptermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'neptermseq) in
Obj.repr(
# 667 "pitparser.mly"
( _1 )
# 2416 "pitparser.ml"
: 'ptermseq))
; (fun __caml_parser_env ->
Obj.repr(
# 669 "pitparser.mly"
( [] )
# 2422 "pitparser.ml"
: 'ptermseq))
(* Entry all *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry lib *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
|]
let yytables =
{ Parsing.actions=yyact;
Parsing.transl_const=yytransl_const;
Parsing.transl_block=yytransl_block;
Parsing.lhs=yylhs;
Parsing.len=yylen;
Parsing.defred=yydefred;
Parsing.dgoto=yydgoto;
Parsing.sindex=yysindex;
Parsing.rindex=yyrindex;
Parsing.gindex=yygindex;
Parsing.tablesize=yytablesize;
Parsing.table=yytable;
Parsing.check=yycheck;
Parsing.error_function=parse_error;
Parsing.names_const=yynames_const;
Parsing.names_block=yynames_block }
let all (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 1 lexfun lexbuf : Pitptree.tdecl list * Pitptree.tprocess)
let lib (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 2 lexfun lexbuf : Pitptree.tdecl list)
| null | https://raw.githubusercontent.com/tari3x/csec-modex/5ab2aa18ef308b4d18ac479e5ab14476328a6a50/deps/proverif1.84/src/pitparser.ml | ocaml | CHOICE
STAR
COMMA
LPAREN
RPAREN
LBRACKET
RBRACKET
SEMI
NEW
OUT
IN
REPL
IF
THEN
ELSE
FUN
EQUATION
REDUCTION
PREDICATE
PROCESS
SLASH
LET
QUERY
EVENT
NOT
FREE
RED
WEDGE
COLON
PHASE
WEAKSECRET
TYPE
SET
FORALL
INJEVENT
OR
CHANNEL
DEFINE
EXPAND
YIELD
LBRACE
PROOF
TABLE
INSERT
GET
IDENT
STRING
INT
channel is allowed as a type, even though it is also a keyword for the declaration channel c1...cn.
Approximating the else clause with a parallel composition
is not correct for trace reconstruction
Entry all
Entry lib | type token =
| CHOICE
| STAR
| COMMA
| LPAREN
| RPAREN
| LBRACKET
| RBRACKET
| BAR
| SEMI
| NEW
| OUT
| IN
| IDENT of (Pitptree.ident)
| STRING of (Pitptree.ident)
| INT of (int)
| REPL
| IF
| THEN
| ELSE
| EQUAL
| FUN
| EQUATION
| REDUCTION
| PREDICATE
| PROCESS
| SLASH
| DOT
| EOF
| LET
| QUERY
| BEFORE
| PUTBEGIN
| NONINTERF
| EVENT
| NOT
| ELIMTRUE
| FREE
| SUCHTHAT
| CLAUSES
| RED
| EQUIV
| EQUIVEQ
| WEDGE
| DIFF
| COLON
| NOUNIF
| PHASE
| AMONG
| WEAKSECRET
| PARAM
| TYPE
| SET
| FORALL
| CONST
| INJEVENT
| OR
| CHANNEL
| LETFUN
| DEFINE
| EXPAND
| YIELD
| LEQ
| PROBA
| LBRACE
| RBRACE
| PROOF
| TABLE
| INSERT
| GET
open Parsing;;
let _ = parse_error;;
# 2 "pitparser.mly"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* and *
* *
* Copyright ( C ) INRIA , LIENS , 2000 - 2009 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet and Xavier Allamigeon *
* *
* Copyright (C) INRIA, LIENS, MPII 2000-2009 *
* *
*************************************************************)
This program is free software ; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU General Public License for more details ( in file LICENSE ) .
You should have received a copy of the GNU General Public License
along with this program ; if not , write to the Free Software
Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details (in file LICENSE).
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
# 31 "pitparser.mly"
open Parsing_helper
open Ptree
open Pitptree
exception Syntax
# 110 "pitparser.ml"
let yytransl_const = [|
BAR
EQUAL
DOT
EOF
BEFORE
NONINTERF
ELIMTRUE
SUCHTHAT
CLAUSES
EQUIV
EQUIVEQ
DIFF
NOUNIF
AMONG
PARAM
CONST
LETFUN
LEQ
PROBA
RBRACE
0|]
let yytransl_block = [|
0|]
let yylhs = "\255\255\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\001\000\020\000\020\000\
\020\000\020\000\021\000\021\000\018\000\018\000\003\000\003\000\
\006\000\006\000\013\000\013\000\011\000\011\000\007\000\007\000\
\005\000\005\000\004\000\004\000\022\000\022\000\008\000\008\000\
\008\000\008\000\008\000\008\000\008\000\008\000\008\000\024\000\
\024\000\023\000\023\000\025\000\025\000\016\000\016\000\015\000\
\015\000\026\000\026\000\026\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\017\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\029\000\029\000\027\000\027\000\030\000\
\030\000\030\000\030\000\028\000\028\000\014\000\014\000\014\000\
\033\000\033\000\034\000\034\000\031\000\031\000\031\000\031\000\
\031\000\031\000\031\000\031\000\036\000\036\000\032\000\032\000\
\037\000\037\000\037\000\037\000\035\000\035\000\009\000\009\000\
\038\000\038\000\038\000\038\000\019\000\019\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\010\000\010\000\
\010\000\010\000\010\000\010\000\010\000\010\000\041\000\041\000\
\043\000\043\000\040\000\040\000\040\000\040\000\040\000\044\000\
\044\000\042\000\042\000\012\000\012\000\012\000\012\000\012\000\
\012\000\012\000\012\000\012\000\012\000\012\000\012\000\012\000\
\012\000\045\000\045\000\039\000\039\000\000\000\000\000"
let yylen = "\002\000\
\005\000\010\000\007\000\007\000\005\000\004\000\007\000\008\000\
\005\000\007\000\006\000\009\000\006\000\009\000\006\000\006\000\
\006\000\004\000\006\000\004\000\006\000\004\000\004\000\006\000\
\004\000\004\000\004\000\005\000\006\000\004\000\004\000\007\000\
\003\000\006\000\009\000\007\000\000\000\004\000\001\000\001\000\
\001\000\001\000\001\000\002\000\001\000\003\000\003\000\000\000\
\003\000\001\000\005\000\003\000\001\000\000\000\003\000\000\000\
\001\000\001\000\001\000\000\000\003\000\001\000\004\000\006\000\
\001\000\003\000\003\000\004\000\003\000\003\000\003\000\003\000\
\001\000\001\000\000\000\005\000\001\000\003\000\001\000\003\000\
\001\000\001\000\004\000\004\000\004\000\001\000\006\000\003\000\
\003\000\004\000\003\000\003\000\004\000\004\000\003\000\003\000\
\005\000\002\000\006\000\003\000\001\000\001\000\000\000\006\000\
\004\000\005\000\003\000\001\000\000\000\006\000\006\000\002\000\
\002\000\000\000\002\000\000\000\004\000\004\000\001\000\003\000\
\005\000\002\000\002\000\006\000\003\000\001\000\001\000\000\000\
\006\000\004\000\005\000\003\000\001\000\000\000\006\000\004\000\
\003\000\001\000\003\000\003\000\004\000\003\000\003\000\001\000\
\004\000\002\000\005\000\001\000\001\000\006\000\006\000\004\000\
\007\000\007\000\006\000\004\000\008\000\006\000\004\000\008\000\
\006\000\006\000\008\000\003\000\006\000\003\000\002\000\000\000\
\002\000\000\000\001\000\003\000\003\000\004\000\002\000\003\000\
\001\000\001\000\000\000\004\000\006\000\001\000\003\000\003\000\
\004\000\003\000\003\000\006\000\006\000\006\000\008\000\008\000\
\003\000\003\000\001\000\001\000\000\000\002\000\002\000"
let yydefred = "\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\198\000\000\000\199\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\102\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\074\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\033\000\000\000\000\000\
\112\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\041\000\039\000\040\000\042\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\148\000\000\000\000\000\
\000\000\000\000\000\000\149\000\000\000\000\000\000\000\057\000\
\058\000\000\000\000\000\059\000\055\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\053\000\000\000\000\000\096\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\020\000\000\000\000\000\000\000\089\000\000\000\
\080\000\000\000\000\000\000\000\022\000\078\000\000\000\006\000\
\000\000\025\000\000\000\000\000\071\000\000\000\000\000\000\000\
\030\000\000\000\067\000\000\000\000\000\049\000\000\000\000\000\
\000\000\000\000\000\000\142\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\127\000\115\000\000\000\000\000\
\000\000\018\000\023\000\026\000\000\000\000\000\000\000\000\000\
\031\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\027\000\000\000\044\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\038\000\000\000\000\000\000\000\
\000\000\047\000\005\000\000\000\009\000\000\000\000\000\100\000\
\000\000\000\000\000\000\108\000\000\000\000\000\000\000\083\000\
\084\000\093\000\090\000\094\000\000\000\000\000\000\000\000\000\
\000\000\000\000\072\000\063\000\068\000\000\000\000\000\000\000\
\000\000\000\000\141\000\123\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\001\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\196\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\028\000\046\000\000\000\143\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\178\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\166\000\000\000\000\000\000\000\000\000\061\000\000\000\
\000\000\000\000\000\000\011\000\000\000\000\000\097\000\000\000\
\051\000\000\000\019\000\076\000\021\000\000\000\024\000\000\000\
\029\000\000\000\120\000\000\000\000\000\000\000\000\000\125\000\
\000\000\000\000\000\000\017\000\015\000\016\000\000\000\000\000\
\000\000\000\000\193\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\013\000\000\000\184\000\000\000\000\000\034\000\
\000\000\000\000\000\000\000\000\000\000\145\000\000\000\000\000\
\000\000\000\000\173\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\004\000\135\000\000\000\000\000\
\000\000\000\000\087\000\000\000\007\000\064\000\032\000\000\000\
\000\000\000\000\133\000\117\000\000\000\118\000\113\000\111\000\
\110\000\003\000\000\000\000\000\194\000\000\000\180\000\000\000\
\000\000\000\000\185\000\000\000\036\000\010\000\000\000\000\000\
\000\000\000\000\000\000\172\000\176\000\174\000\000\000\000\000\
\000\000\000\000\000\000\000\000\008\000\000\000\000\000\000\000\
\000\000\000\000\121\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\165\000\161\000\000\000\000\000\162\000\000\000\012\000\
\106\000\000\000\000\000\000\000\124\000\014\000\181\000\000\000\
\000\000\000\000\000\000\035\000\154\000\153\000\000\000\000\000\
\000\000\000\000\002\000\104\000\000\000\000\000\000\000\000\000\
\000\000\000\000\163\000\131\000\000\000\000\000\000\000\129\000"
let yydgoto = "\003\000\
\029\000\030\000\092\000\186\000\187\000\065\000\035\000\127\000\
\036\000\183\000\196\000\100\001\197\000\071\000\048\000\053\000\
\098\000\165\000\067\000\166\000\167\000\188\000\128\000\129\000\
\054\000\050\000\099\000\059\001\100\000\060\001\251\000\252\000\
\170\001\145\000\226\001\253\000\227\001\141\000\101\001\128\001\
\138\001\129\001\030\002\130\001\102\001"
let yysindex = "\243\000\
\046\002\046\002\000\000\016\255\046\255\046\255\138\255\144\255\
\151\001\179\255\195\255\229\001\230\000\252\255\046\255\173\255\
\032\000\252\255\057\000\079\000\252\255\252\255\091\000\105\000\
\126\000\138\000\040\255\161\000\000\000\156\255\000\000\186\000\
\170\000\170\001\170\001\191\000\079\001\032\255\251\001\197\000\
\036\255\225\000\251\254\245\000\015\001\020\001\195\000\001\001\
\082\001\029\001\192\255\033\001\024\001\058\001\151\255\055\001\
\034\002\072\001\170\001\041\255\065\001\154\002\070\001\092\001\
\066\001\170\001\046\002\033\255\101\001\108\001\093\001\094\001\
\114\001\191\000\115\001\084\001\118\001\048\000\012\255\143\001\
\122\001\244\255\149\001\194\255\020\255\119\001\159\001\165\001\
\165\002\182\002\252\255\146\001\020\255\150\001\170\000\194\255\
\166\001\028\255\171\001\000\000\172\001\251\001\020\255\161\001\
\142\001\152\001\251\001\251\001\251\001\219\001\046\002\251\001\
\251\001\251\001\251\001\251\001\219\001\187\001\185\001\046\002\
\185\001\020\255\046\002\251\001\046\002\170\001\141\255\207\001\
\000\000\170\001\170\001\170\001\046\002\170\001\170\001\170\001\
\170\001\252\255\020\255\125\002\023\255\000\000\255\002\201\001\
\000\000\197\001\202\255\046\002\046\002\046\002\192\001\100\001\
\020\255\046\002\170\000\245\002\020\255\046\002\020\255\046\002\
\000\000\000\000\000\000\000\000\156\001\244\255\213\001\020\255\
\194\255\212\001\222\001\224\001\226\001\000\000\076\001\245\002\
\075\000\221\001\220\001\000\000\225\001\227\001\014\000\000\000\
\000\000\241\001\248\001\000\000\000\000\170\001\170\001\253\001\
\046\002\005\002\046\002\007\002\000\000\145\255\251\001\000\000\
\165\000\009\002\255\001\251\001\252\255\252\255\013\002\221\255\
\020\002\238\001\000\000\228\001\193\002\000\255\000\000\193\002\
\000\000\170\001\235\001\011\002\000\000\000\000\026\002\000\000\
\149\002\000\000\208\000\170\001\000\000\029\002\134\255\254\001\
\000\000\031\000\000\000\199\002\157\002\000\000\191\000\170\001\
\170\001\170\001\046\255\000\000\038\002\255\002\042\002\039\002\
\043\002\054\002\059\002\037\002\000\000\000\000\255\002\035\255\
\036\002\000\000\000\000\000\000\046\002\044\002\056\002\191\000\
\000\000\061\002\081\002\245\002\073\002\084\002\245\002\075\000\
\106\002\163\002\088\002\050\002\113\002\000\000\046\002\000\000\
\244\255\121\002\251\000\083\002\245\002\245\002\245\002\013\255\
\131\002\116\002\128\000\058\255\245\002\091\002\123\002\137\002\
\143\002\142\002\150\002\194\255\000\000\109\002\020\255\143\000\
\099\255\000\000\000\000\191\000\000\000\135\002\046\002\000\000\
\136\002\146\002\166\002\000\000\111\002\170\000\014\255\000\000\
\000\000\000\000\000\000\000\000\046\002\167\002\046\002\151\002\
\046\002\170\001\000\000\000\000\000\000\046\002\155\002\199\002\
\199\002\199\002\000\000\000\000\181\002\164\002\255\002\169\002\
\255\002\255\002\141\002\186\002\046\002\000\000\046\002\046\002\
\176\002\173\002\245\002\003\001\209\002\000\000\171\002\245\002\
\120\002\185\002\206\002\245\002\245\002\046\002\245\002\245\002\
\245\002\170\002\046\002\200\002\000\000\000\000\203\002\000\000\
\020\255\007\001\010\001\223\002\218\002\194\255\090\255\229\002\
\233\002\000\000\128\000\020\255\215\000\245\002\245\002\245\002\
\194\255\000\000\245\002\128\000\131\002\020\255\000\000\046\002\
\046\255\207\002\194\255\000\000\251\001\219\002\000\000\225\002\
\000\000\251\001\000\000\000\000\000\000\046\002\000\000\234\001\
\000\000\046\002\000\000\255\000\240\002\255\002\242\002\000\000\
\235\002\226\002\202\255\000\000\000\000\000\000\046\002\245\002\
\064\001\245\002\000\000\020\255\246\002\245\002\245\002\245\002\
\177\255\210\002\000\000\103\000\000\000\201\002\046\002\000\000\
\046\002\046\002\251\002\245\002\128\000\000\000\194\255\056\000\
\020\255\128\000\000\000\000\003\255\001\120\255\004\002\002\003\
\131\002\003\003\005\003\191\000\000\000\000\000\046\002\191\255\
\002\002\251\001\000\000\082\001\000\000\000\000\000\000\247\002\
\254\002\007\003\000\000\000\000\009\003\000\000\000\000\000\000\
\000\000\000\000\168\002\245\002\000\000\006\003\000\000\147\001\
\078\002\080\002\000\000\211\002\000\000\000\000\194\255\017\001\
\019\003\131\002\194\255\000\000\000\000\000\000\194\255\194\255\
\143\002\143\002\176\255\004\003\000\000\046\002\165\000\030\002\
\255\002\014\003\000\000\255\002\046\002\249\001\245\002\245\002\
\245\002\245\002\046\002\131\002\143\002\143\002\131\002\081\000\
\180\000\000\000\000\000\194\255\245\002\000\000\046\002\000\000\
\000\000\165\000\020\003\255\002\000\000\000\000\000\000\201\002\
\201\002\087\002\105\002\000\000\000\000\000\000\194\255\194\255\
\131\002\089\002\000\000\000\000\255\000\027\003\245\002\245\002\
\131\002\131\002\000\000\000\000\255\000\201\002\201\002\000\000"
let yyrindex = "\000\000\
\012\003\038\004\000\000\000\000\214\001\214\001\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\214\001\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\018\003\018\003\000\000\034\003\000\000\
\010\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\216\255\022\003\146\255\000\000\000\000\033\003\000\000\000\000\
\000\000\000\000\036\003\174\002\000\000\000\000\000\000\238\000\
\000\000\000\000\002\000\037\003\000\000\000\000\000\000\000\000\
\000\000\018\003\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\042\003\000\000\000\000\091\001\
\000\000\000\000\000\000\000\000\042\003\000\000\058\003\000\000\
\096\001\060\003\000\000\000\000\145\001\034\003\000\000\000\000\
\000\000\000\000\034\003\000\000\034\003\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\002\000\
\000\000\042\003\002\000\000\000\002\000\000\000\062\003\000\000\
\000\000\036\003\000\000\000\000\002\000\000\000\000\000\000\000\
\000\000\000\000\000\000\228\255\000\000\000\000\063\003\000\000\
\000\000\000\000\000\000\002\000\002\000\002\000\000\000\000\000\
\000\000\002\000\058\003\000\000\042\003\008\003\042\003\002\000\
\000\000\000\000\000\000\000\000\000\000\009\255\010\003\042\003\
\000\000\000\000\000\000\000\000\035\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\064\003\000\000\000\000\000\000\000\000\000\000\
\002\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\
\066\003\000\000\106\255\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\226\255\023\003\083\255\000\000\013\003\
\000\000\000\000\146\255\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\153\001\
\000\000\204\001\000\000\110\002\000\000\000\000\018\003\000\000\
\000\000\000\000\214\001\000\000\000\000\063\003\000\000\049\003\
\000\000\000\000\065\003\000\000\000\000\000\000\000\000\037\003\
\000\000\000\000\000\000\000\000\002\000\000\000\000\000\018\003\
\000\000\000\000\000\000\070\003\000\000\078\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\070\003\035\000\
\082\000\000\000\071\003\051\003\000\000\000\000\000\000\000\000\
\053\000\000\000\000\000\000\000\000\000\000\000\000\000\051\255\
\024\255\000\000\000\000\018\003\000\000\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\194\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\002\000\000\000\002\000\000\000\
\002\000\000\000\000\000\000\000\000\000\002\000\000\000\233\255\
\240\255\243\255\000\000\000\000\000\000\050\003\063\003\000\000\
\000\000\000\000\206\000\000\000\002\000\000\000\002\000\002\000\
\000\000\000\000\000\000\072\003\000\000\000\000\000\000\070\003\
\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\
\000\000\000\000\002\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\218\000\073\003\
\000\000\000\000\071\003\000\000\124\001\000\000\000\000\070\003\
\000\000\000\000\070\003\071\003\097\000\000\000\000\000\002\000\
\214\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\002\000\000\000\000\000\
\000\000\002\000\000\000\074\003\000\000\000\000\000\000\000\000\
\000\000\037\003\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\023\000\000\000\200\000\000\000\217\000\008\003\000\000\
\002\000\002\000\000\000\000\000\000\000\000\000\000\000\019\000\
\000\000\000\000\000\000\000\000\165\255\130\000\135\000\000\000\
\117\000\000\000\000\000\018\003\000\000\000\000\002\000\000\000\
\075\003\000\000\000\000\039\003\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\145\000\000\000\000\000\000\000\000\000\000\000\000\000\
\053\000\053\000\150\000\000\000\000\000\002\000\000\000\076\003\
\000\000\000\000\000\000\000\000\002\000\000\000\000\000\000\000\
\000\000\000\000\002\000\160\000\053\000\053\000\166\000\072\000\
\101\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\077\003\000\000\000\000\000\000\000\000\234\000\
\013\001\000\000\107\000\000\000\000\000\000\000\000\000\000\000\
\179\000\150\000\000\000\000\000\000\000\078\003\000\000\000\000\
\190\000\221\000\000\000\000\000\000\000\036\001\063\001\000\000"
let yygindex = "\000\000\
\000\000\254\255\220\255\188\255\158\255\042\000\007\000\033\000\
\190\002\168\255\181\003\018\000\250\255\122\255\178\000\021\002\
\012\000\061\003\100\003\000\000\178\003\043\003\215\003\175\000\
\000\000\000\000\073\001\000\000\148\003\107\254\048\255\025\255\
\000\000\178\002\000\000\011\003\134\254\000\000\236\254\088\255\
\055\000\221\254\045\002\147\002\172\002"
let yytablesize = 1125
let yytable = "\031\000\
\094\000\037\000\047\000\052\000\203\000\056\000\063\000\198\000\
\039\001\070\000\124\001\034\000\001\001\045\001\085\001\157\000\
\031\001\043\000\152\000\112\000\049\000\066\000\183\000\057\000\
\194\000\154\001\087\000\105\000\032\000\136\000\199\000\243\000\
\184\000\112\000\144\000\095\000\143\000\151\000\143\000\102\000\
\239\000\114\000\115\000\113\000\130\000\062\000\092\001\112\000\
\106\000\244\000\136\000\096\000\168\000\223\000\008\001\114\000\
\115\000\113\000\144\000\073\000\144\000\131\001\076\000\077\000\
\142\000\066\000\089\000\090\000\116\000\114\000\115\000\158\000\
\043\000\125\001\158\000\185\000\103\000\182\000\066\000\103\000\
\027\001\146\000\116\000\181\001\103\000\092\000\033\001\092\000\
\019\001\092\000\021\001\092\000\066\000\131\001\092\000\204\001\
\164\000\033\000\140\000\026\001\155\000\132\001\082\000\107\001\
\211\001\066\000\190\000\145\001\211\000\092\000\052\000\165\001\
\092\000\033\002\052\000\208\001\167\000\221\000\210\001\208\000\
\224\000\049\000\226\000\212\000\213\000\214\000\215\000\216\000\
\049\000\159\000\233\000\255\001\192\000\201\001\156\000\225\000\
\167\001\092\000\077\001\109\001\052\002\135\000\052\000\228\000\
\147\000\002\001\003\001\004\001\077\000\170\000\037\000\009\001\
\044\001\132\000\122\000\020\001\038\000\022\001\227\000\150\000\
\132\000\111\001\112\001\231\000\232\000\151\000\234\000\235\000\
\236\000\237\000\038\001\055\001\077\000\018\001\113\001\134\000\
\135\000\123\000\169\000\238\000\084\000\243\001\134\000\135\000\
\172\000\068\000\060\002\028\002\136\000\160\000\051\001\051\000\
\053\001\034\001\064\002\136\000\109\001\169\000\044\001\187\000\
\069\000\052\000\079\001\170\000\171\000\172\000\173\000\055\000\
\174\000\175\000\176\000\141\001\029\002\229\001\000\001\063\001\
\186\000\006\002\111\001\112\001\157\000\177\000\048\001\049\001\
\082\000\067\001\178\000\097\001\088\000\069\000\088\000\113\001\
\088\000\188\000\088\000\103\000\138\000\088\000\118\000\179\000\
\112\000\137\000\082\000\001\000\002\000\161\000\064\001\065\001\
\139\000\066\000\113\000\140\000\088\000\180\000\138\000\088\000\
\162\000\163\000\094\001\137\000\181\000\182\000\114\000\115\000\
\064\000\106\001\139\000\088\000\189\000\140\000\164\000\146\001\
\080\001\081\001\082\001\116\000\117\001\044\001\195\001\152\000\
\088\000\183\000\037\000\183\000\249\001\183\000\183\000\210\000\
\105\001\205\001\183\000\192\000\233\001\200\001\217\000\144\000\
\183\000\183\000\144\000\212\001\072\000\152\000\122\001\123\001\
\209\001\183\000\132\000\155\000\148\001\144\000\133\001\153\001\
\035\002\168\000\216\001\037\002\168\000\144\000\191\000\044\001\
\183\000\037\000\155\001\156\000\157\001\074\000\159\001\168\000\
\134\000\135\000\251\001\161\001\158\000\183\000\035\001\168\000\
\182\000\238\001\182\000\054\002\182\000\182\000\146\000\036\001\
\044\001\182\000\172\001\075\000\173\001\174\001\037\001\182\000\
\182\000\182\000\158\000\047\002\146\000\164\000\252\001\078\000\
\182\000\155\000\160\001\187\001\146\000\190\000\250\001\190\000\
\192\001\190\000\190\000\164\000\177\001\079\000\190\000\182\000\
\182\000\167\000\109\001\164\000\190\000\185\001\186\001\155\000\
\188\001\189\001\190\001\035\001\182\000\190\000\159\000\167\000\
\070\001\159\000\080\000\156\000\127\001\213\001\156\000\167\000\
\111\001\112\001\075\001\037\001\159\000\147\000\081\000\206\001\
\207\001\156\000\170\000\221\001\159\000\170\000\020\002\223\001\
\217\001\156\000\023\002\147\000\150\000\220\001\024\002\025\002\
\170\000\144\001\151\000\147\000\234\001\083\000\202\000\004\002\
\170\000\057\001\150\000\207\000\058\001\209\000\086\000\169\000\
\151\000\135\000\150\000\044\001\244\001\085\000\245\001\246\001\
\151\000\235\001\160\000\049\002\091\000\169\000\048\002\240\001\
\241\001\242\001\187\000\110\000\187\000\169\000\187\000\187\000\
\160\000\101\000\074\001\187\000\005\002\248\001\057\002\058\002\
\160\000\187\000\187\000\186\000\171\000\186\000\171\000\186\000\
\186\000\157\000\187\000\132\000\186\000\008\002\058\000\114\000\
\114\000\059\000\186\000\186\000\188\000\104\000\188\000\157\000\
\188\000\188\000\060\000\186\000\050\000\188\000\050\000\157\000\
\107\000\134\000\135\000\188\000\188\000\014\002\187\000\120\001\
\111\001\112\001\044\001\032\002\188\000\178\001\136\000\061\000\
\050\000\196\001\038\002\224\001\197\001\113\001\225\001\189\000\
\044\002\189\000\108\000\189\000\189\000\021\002\109\001\109\000\
\189\000\050\000\109\001\111\000\051\002\109\001\189\000\189\000\
\040\002\041\002\042\002\043\002\109\001\117\000\192\000\189\000\
\192\000\119\000\192\000\192\000\111\001\112\001\050\002\192\000\
\111\001\112\001\120\000\111\001\112\001\192\000\192\000\026\002\
\027\002\113\001\111\001\112\001\121\000\113\001\192\000\124\000\
\113\001\191\000\236\001\191\000\131\000\191\000\191\000\113\001\
\062\002\063\002\191\000\045\002\046\002\126\000\137\000\169\000\
\191\000\191\000\093\000\109\001\091\000\170\000\171\000\172\000\
\032\001\191\000\174\000\175\000\176\000\065\000\138\000\065\000\
\065\000\065\000\086\000\065\000\086\000\112\000\086\000\177\000\
\086\000\111\001\112\001\086\000\178\000\139\000\065\000\113\000\
\006\001\146\000\007\001\086\000\147\000\065\000\113\001\148\000\
\149\000\179\000\086\000\114\000\115\000\086\000\175\000\153\000\
\175\000\065\000\065\000\065\000\065\000\065\000\152\000\180\000\
\116\000\086\000\086\000\220\000\150\000\222\000\181\000\182\000\
\154\000\065\000\159\000\098\000\160\000\098\000\086\000\098\000\
\168\000\098\000\039\000\066\000\098\000\066\000\066\000\066\000\
\040\000\066\000\103\000\041\000\098\000\016\002\109\001\189\000\
\130\000\102\000\058\000\098\000\193\000\059\000\098\000\200\000\
\195\000\201\000\042\000\066\000\204\000\043\000\088\000\044\000\
\045\000\205\000\098\000\098\000\111\001\112\001\218\000\066\000\
\066\000\066\000\066\000\206\000\085\000\219\000\085\000\098\000\
\085\000\113\001\085\000\061\000\046\000\085\000\070\000\066\000\
\070\000\070\000\070\000\229\000\070\000\085\000\056\000\254\000\
\255\000\056\000\005\001\023\001\085\000\025\001\039\000\085\000\
\028\001\029\001\056\000\030\001\040\000\031\001\070\000\097\000\
\039\000\040\001\041\001\085\000\085\000\042\001\040\000\043\001\
\222\001\041\000\070\000\070\000\070\000\046\001\042\000\056\000\
\085\000\043\000\047\001\044\000\045\000\132\000\039\000\039\002\
\042\000\062\001\070\000\050\001\040\000\044\000\045\000\097\000\
\069\001\052\001\007\002\054\001\109\001\061\001\115\000\000\002\
\046\000\066\001\086\000\134\000\135\000\112\000\042\000\109\001\
\068\001\118\000\046\000\044\000\045\000\086\000\072\001\113\000\
\136\000\076\001\111\001\112\001\086\000\071\001\034\002\086\000\
\135\000\091\001\087\001\114\000\115\000\111\001\112\001\113\001\
\046\000\112\000\084\001\086\000\086\000\112\000\086\001\088\001\
\116\000\089\001\113\001\113\000\125\000\090\001\093\001\113\000\
\086\000\098\001\004\000\005\000\006\000\007\000\095\001\114\000\
\115\000\008\000\009\000\114\000\115\000\010\000\011\000\012\000\
\013\000\014\000\096\001\015\000\116\000\103\001\099\001\104\001\
\116\000\017\002\016\000\018\002\114\001\017\000\018\000\019\000\
\020\000\109\001\021\000\109\001\028\002\022\000\023\000\024\000\
\025\000\055\002\109\001\026\000\109\001\108\001\027\000\028\000\
\069\000\115\001\069\000\069\000\069\000\116\001\069\000\111\001\
\112\001\111\001\112\001\056\002\109\001\119\001\121\001\134\001\
\111\001\112\001\111\001\112\001\113\001\126\001\113\001\109\001\
\069\000\182\001\044\001\109\001\136\001\113\001\135\001\113\001\
\132\000\139\001\111\001\112\001\069\000\069\000\069\000\137\001\
\142\001\140\001\147\001\149\001\152\001\111\001\112\001\113\001\
\150\001\111\001\112\001\240\000\241\000\242\000\134\000\135\000\
\112\000\164\001\113\001\156\001\151\001\132\000\113\001\073\001\
\132\000\158\001\113\000\136\000\133\000\162\001\109\001\078\001\
\190\000\163\001\169\001\109\001\166\001\110\001\114\000\115\000\
\176\001\065\000\013\002\134\000\135\000\171\001\134\000\135\000\
\065\000\191\000\175\001\116\000\111\001\112\001\134\000\135\000\
\136\000\111\001\112\001\136\000\112\000\179\001\180\001\065\000\
\065\000\113\001\132\000\136\000\109\001\183\001\113\001\134\000\
\135\000\184\001\193\001\198\001\065\000\194\001\199\001\202\001\
\191\001\215\001\114\000\115\000\136\000\203\001\218\001\219\001\
\134\000\135\000\111\001\112\001\228\001\011\001\230\001\116\000\
\012\001\231\001\239\001\144\000\112\001\136\000\013\001\113\001\
\245\000\014\001\246\000\247\001\254\001\015\001\001\002\002\002\
\247\000\003\002\009\002\248\000\010\002\011\002\015\002\091\000\
\016\001\091\000\019\002\091\000\012\002\091\000\017\001\022\002\
\091\000\095\000\249\000\095\000\053\002\095\000\031\002\095\000\
\250\000\036\002\095\000\061\002\037\000\037\000\103\000\091\000\
\075\000\099\000\091\000\099\000\048\000\099\000\060\000\099\000\
\081\000\095\000\099\000\119\000\122\000\119\000\122\000\119\000\
\122\000\119\000\122\000\079\000\119\000\122\000\054\000\116\000\
\101\000\099\000\073\000\128\000\062\000\126\000\171\000\037\000\
\109\000\045\000\197\000\179\000\195\000\177\000\214\001\010\001\
\134\000\107\000\105\000\132\000\130\000\118\001\083\001\024\001\
\230\000\143\001\056\001\232\001\253\001\237\001\059\002\000\000\
\000\000\000\000\000\000\000\000\168\001"
let yycheck = "\002\000\
\037\000\000\000\009\000\010\000\103\000\012\000\013\000\096\000\
\177\000\016\000\031\001\005\000\147\000\000\000\246\000\004\001\
\004\001\009\001\000\000\020\001\009\000\015\000\000\000\012\000\
\093\000\012\001\033\000\033\001\013\001\006\001\003\001\009\001\
\013\001\020\001\000\000\004\001\004\001\074\000\004\001\004\001\
\139\000\042\001\043\001\030\001\004\001\013\000\255\000\020\001\
\054\001\027\001\027\001\020\001\000\000\122\000\153\000\042\001\
\043\001\030\001\026\001\018\000\026\001\004\001\021\000\022\000\
\067\000\042\001\034\000\035\000\055\001\042\001\043\001\000\000\
\064\001\061\001\063\001\056\001\044\001\000\000\055\001\044\001\
\169\000\000\000\055\001\104\001\044\001\003\001\175\000\005\001\
\157\000\007\001\159\000\009\001\042\001\004\001\012\001\131\001\
\000\000\052\001\066\000\168\000\000\000\044\001\063\001\016\001\
\140\001\055\001\000\000\009\001\111\000\027\001\005\001\087\001\
\030\001\007\002\009\001\136\001\000\000\120\000\139\001\108\000\
\123\000\110\000\125\000\112\000\113\000\114\000\115\000\116\000\
\117\000\000\000\133\000\012\001\091\000\044\001\000\000\124\000\
\089\001\055\001\005\001\020\001\034\002\043\001\037\001\003\001\
\000\000\148\000\149\000\150\000\003\001\000\000\013\001\154\000\
\008\001\020\001\004\001\158\000\013\001\160\000\126\000\000\000\
\020\001\042\001\043\001\131\000\132\000\000\000\134\000\135\000\
\136\000\137\000\177\000\027\001\027\001\156\000\055\001\042\001\
\043\001\027\001\000\000\138\000\025\001\005\001\042\001\043\001\
\020\001\013\001\053\002\012\001\055\001\000\000\193\000\013\001\
\195\000\176\000\061\002\055\001\020\001\004\001\008\001\000\000\
\028\001\037\001\239\000\010\001\011\001\012\001\013\001\013\001\
\015\001\016\001\017\001\044\001\037\001\166\001\013\001\204\000\
\000\000\027\001\042\001\043\001\000\000\028\001\190\000\191\000\
\009\001\005\001\033\001\008\001\003\001\028\001\005\001\055\001\
\007\001\000\000\009\001\044\001\009\001\012\001\047\001\046\001\
\020\001\009\001\027\001\001\000\002\000\002\001\205\000\206\000\
\009\001\243\000\030\001\009\001\027\001\060\001\027\001\030\001\
\013\001\014\001\005\001\027\001\067\001\068\001\042\001\043\001\
\013\001\016\001\027\001\042\001\000\000\027\001\027\001\052\001\
\240\000\241\000\242\000\055\001\023\001\008\001\121\001\005\001\
\055\001\003\001\025\001\005\001\197\001\007\001\008\001\110\000\
\015\001\132\001\012\001\000\000\171\001\126\001\117\000\005\001\
\018\001\019\001\008\001\142\001\013\001\027\001\029\001\030\001\
\137\001\027\001\020\001\004\001\055\001\019\001\037\001\062\001\
\009\002\005\001\147\001\012\002\008\001\027\001\000\000\008\001\
\042\001\064\001\069\001\020\001\071\001\013\001\073\001\019\001\
\042\001\043\001\019\001\078\001\005\001\055\001\004\001\027\001\
\003\001\180\001\005\001\036\002\007\001\008\001\005\001\013\001\
\008\001\012\001\093\001\013\001\095\001\096\001\020\001\018\001\
\019\001\020\001\027\001\019\001\019\001\005\001\201\001\013\001\
\027\001\005\001\074\001\110\001\027\001\003\001\199\001\005\001\
\115\001\007\001\008\001\019\001\099\001\013\001\012\001\042\001\
\043\001\005\001\020\001\027\001\018\001\108\001\109\001\027\001\
\111\001\112\001\113\001\004\001\055\001\027\001\005\001\019\001\
\218\000\008\001\013\001\005\001\013\001\144\001\008\001\027\001\
\042\001\043\001\228\000\020\001\019\001\005\001\013\001\134\001\
\135\001\019\001\005\001\158\001\027\001\008\001\247\001\162\001\
\149\001\027\001\251\001\019\001\005\001\154\001\255\001\000\002\
\019\001\027\001\005\001\027\001\175\001\013\001\102\000\212\001\
\027\001\013\001\019\001\107\000\016\001\109\000\013\001\005\001\
\019\001\043\001\027\001\008\001\191\001\004\001\193\001\194\001\
\027\001\176\001\005\001\028\002\006\001\019\001\019\001\182\001\
\183\001\184\001\003\001\009\001\005\001\027\001\007\001\008\001\
\019\001\013\001\003\001\012\001\215\001\196\001\047\002\048\002\
\027\001\018\001\019\001\003\001\003\001\005\001\005\001\007\001\
\008\001\005\001\027\001\020\001\012\001\218\001\001\001\026\001\
\027\001\004\001\018\001\019\001\003\001\013\001\005\001\019\001\
\007\001\008\001\013\001\027\001\007\001\012\001\009\001\027\001\
\004\001\042\001\043\001\018\001\019\001\236\001\055\001\005\001\
\042\001\043\001\008\001\006\002\027\001\003\001\055\001\034\001\
\027\001\003\001\013\002\013\001\003\001\055\001\016\001\003\001\
\019\002\005\001\004\001\007\001\008\001\005\001\020\001\004\001\
\012\001\044\001\020\001\027\001\031\002\020\001\018\001\019\001\
\015\002\016\002\017\002\018\002\020\001\009\001\003\001\027\001\
\005\001\009\001\007\001\008\001\042\001\043\001\029\002\012\001\
\042\001\043\001\027\001\042\001\043\001\018\001\019\001\001\002\
\002\002\055\001\042\001\043\001\003\001\055\001\027\001\009\001\
\055\001\003\001\003\001\005\001\004\001\007\001\008\001\055\001\
\055\002\056\002\012\001\021\002\022\002\006\001\009\001\004\001\
\018\001\019\001\004\001\020\001\006\001\010\001\011\001\012\001\
\013\001\027\001\015\001\016\001\017\001\003\001\003\001\005\001\
\006\001\007\001\003\001\009\001\005\001\020\001\007\001\028\001\
\009\001\042\001\043\001\012\001\033\001\044\001\020\001\030\001\
\013\001\013\001\015\001\020\001\009\001\027\001\055\001\027\001\
\027\001\046\001\027\001\042\001\043\001\030\001\003\001\044\001\
\005\001\039\001\040\001\041\001\042\001\043\001\020\001\060\001\
\055\001\042\001\043\001\119\000\027\001\121\000\067\001\068\001\
\027\001\055\001\004\001\003\001\027\001\005\001\055\001\007\001\
\004\001\009\001\004\001\003\001\012\001\005\001\006\001\007\001\
\010\001\009\001\044\001\013\001\020\001\019\001\020\001\009\001\
\004\001\004\001\001\001\027\001\027\001\004\001\030\001\005\001\
\027\001\006\001\028\001\027\001\020\001\031\001\013\001\033\001\
\034\001\044\001\042\001\043\001\042\001\043\001\004\001\039\001\
\040\001\041\001\042\001\044\001\003\001\013\001\005\001\055\001\
\007\001\055\001\009\001\034\001\054\001\012\001\003\001\055\001\
\005\001\006\001\007\001\005\001\009\001\020\001\001\001\015\001\
\020\001\004\001\027\001\064\001\027\001\009\001\004\001\030\001\
\013\001\004\001\013\001\004\001\010\001\004\001\027\001\013\001\
\004\001\013\001\015\001\042\001\043\001\013\001\010\001\013\001\
\007\001\013\001\039\001\040\001\041\001\005\001\028\001\034\001\
\055\001\031\001\003\001\033\001\034\001\020\001\004\001\007\001\
\028\001\003\001\055\001\007\001\010\001\033\001\034\001\013\001\
\027\001\005\001\009\001\005\001\020\001\005\001\043\001\012\001\
\054\001\005\001\009\001\042\001\043\001\020\001\028\001\020\001\
\005\001\047\001\054\001\033\001\034\001\020\001\005\001\030\001\
\055\001\005\001\042\001\043\001\027\001\027\001\009\001\030\001\
\043\001\005\001\004\001\042\001\043\001\042\001\043\001\055\001\
\054\001\020\001\013\001\042\001\043\001\020\001\013\001\013\001\
\055\001\004\001\055\001\030\001\027\001\003\001\027\001\030\001\
\055\001\005\001\021\001\022\001\023\001\024\001\027\001\042\001\
\043\001\028\001\029\001\042\001\043\001\032\001\033\001\034\001\
\035\001\036\001\027\001\038\001\055\001\013\001\006\001\004\001\
\055\001\012\001\045\001\012\001\005\001\048\001\049\001\050\001\
\051\001\020\001\053\001\020\001\012\001\056\001\057\001\058\001\
\059\001\019\001\020\001\062\001\020\001\004\001\065\001\066\001\
\003\001\064\001\005\001\006\001\007\001\005\001\009\001\042\001\
\043\001\042\001\043\001\019\001\020\001\005\001\044\001\037\001\
\042\001\043\001\042\001\043\001\055\001\018\001\055\001\020\001\
\027\001\018\001\008\001\020\001\004\001\055\001\020\001\055\001\
\020\001\004\001\042\001\043\001\039\001\040\001\041\001\009\001\
\044\001\004\001\020\001\020\001\046\001\042\001\043\001\055\001\
\015\001\042\001\043\001\039\001\040\001\041\001\042\001\043\001\
\020\001\006\001\055\001\005\001\007\001\020\001\055\001\027\001\
\020\001\027\001\030\001\055\001\027\001\027\001\020\001\027\001\
\020\001\005\001\046\001\020\001\020\001\027\001\042\001\043\001\
\020\001\020\001\027\001\042\001\043\001\012\001\042\001\043\001\
\027\001\020\001\027\001\055\001\042\001\043\001\042\001\043\001\
\055\001\042\001\043\001\055\001\020\001\005\001\044\001\042\001\
\043\001\055\001\020\001\055\001\020\001\037\001\055\001\042\001\
\043\001\020\001\027\001\005\001\055\001\027\001\013\001\003\001\
\063\001\027\001\042\001\043\001\055\001\005\001\020\001\015\001\
\042\001\043\001\042\001\043\001\005\001\001\001\005\001\055\001\
\004\001\015\001\005\001\026\001\043\001\055\001\010\001\055\001\
\002\001\013\001\004\001\009\001\005\001\017\001\005\001\005\001\
\010\001\005\001\020\001\013\001\015\001\007\001\009\001\003\001\
\028\001\005\001\064\001\007\001\012\001\009\001\034\001\005\001\
\012\001\003\001\028\001\005\001\009\001\007\001\027\001\009\001\
\034\001\020\001\012\001\009\001\025\001\000\000\005\001\027\001\
\005\001\003\001\030\001\005\001\027\001\007\001\005\001\009\001\
\027\001\027\001\012\001\003\001\003\001\005\001\005\001\007\001\
\007\001\009\001\009\001\027\001\012\001\012\001\005\001\027\001\
\005\001\027\001\005\001\005\001\005\001\005\001\020\001\064\001\
\007\001\064\001\005\001\005\001\005\001\005\001\145\001\155\000\
\007\001\007\001\007\001\007\001\007\001\025\001\243\000\166\000\
\130\000\047\001\199\000\170\001\202\001\178\001\050\002\255\255\
\255\255\255\255\255\255\255\255\090\001"
let yynames_const = "\
CHOICE\000\
STAR\000\
COMMA\000\
LPAREN\000\
RPAREN\000\
LBRACKET\000\
RBRACKET\000\
BAR\000\
SEMI\000\
NEW\000\
OUT\000\
IN\000\
REPL\000\
IF\000\
THEN\000\
ELSE\000\
EQUAL\000\
FUN\000\
EQUATION\000\
REDUCTION\000\
PREDICATE\000\
PROCESS\000\
SLASH\000\
DOT\000\
EOF\000\
LET\000\
QUERY\000\
BEFORE\000\
PUTBEGIN\000\
NONINTERF\000\
EVENT\000\
NOT\000\
ELIMTRUE\000\
FREE\000\
SUCHTHAT\000\
CLAUSES\000\
RED\000\
EQUIV\000\
EQUIVEQ\000\
WEDGE\000\
DIFF\000\
COLON\000\
NOUNIF\000\
PHASE\000\
AMONG\000\
WEAKSECRET\000\
PARAM\000\
TYPE\000\
SET\000\
FORALL\000\
CONST\000\
INJEVENT\000\
OR\000\
CHANNEL\000\
LETFUN\000\
DEFINE\000\
EXPAND\000\
YIELD\000\
LEQ\000\
PROBA\000\
LBRACE\000\
RBRACE\000\
PROOF\000\
TABLE\000\
INSERT\000\
GET\000\
"
let yynames_block = "\
IDENT\000\
STRING\000\
INT\000\
"
let yyact = [|
(fun _ -> failwith "parser")
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 132 "pitparser.mly"
Options are ignored , they are supported for compatibility with
CryptoVerif only
CryptoVerif only *)
TTypeDecl(_2) :: _5 )
# 851 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 8 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 6 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 3 : 'typeid) in
let _8 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _10 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 136 "pitparser.mly"
( (TFunDecl(_2, _4, _7, _8)) :: _10 )
# 862 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeid) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 138 "pitparser.mly"
( (List.map (fun x -> TConstDecl(x, _4, _5)) _2) @ _7 )
# 872 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'forallvartype) in
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'term) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 140 "pitparser.mly"
( (TEquation(_2, _3, _5)) :: _7 )
# 882 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'treduc) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 142 "pitparser.mly"
( (TReduc(_2,_3)) :: _5 )
# 891 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 144 "pitparser.mly"
( (TEventDecl(_2, [])) :: _4 )
# 899 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 146 "pitparser.mly"
( (TEventDecl(_2, _4)) :: _7 )
# 908 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'typeidseq) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 148 "pitparser.mly"
( (TPredDecl(_2, _4, _6)) :: _8 )
# 918 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 150 "pitparser.mly"
( (TPredDecl(_2, [], _3)) :: _5 )
# 927 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 152 "pitparser.mly"
( (TTableDecl(_2, _4)) :: _7 )
# 936 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 154 "pitparser.mly"
( (TPDef(_2,[],_4)) :: _6 )
# 945 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'vartype) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 156 "pitparser.mly"
( (TPDef(_2,_4,_7)) :: _9 )
# 955 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 158 "pitparser.mly"
( (TLetFun(_2,[],_4)) :: _6 )
# 964 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'vartype) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 160 "pitparser.mly"
( (TLetFun(_2,_4,_7)) :: _9 )
# 974 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 162 "pitparser.mly"
( (TSet(_2,S _4)) :: _6 )
# 983 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 164 "pitparser.mly"
( (TSet(_2,I _4)) :: _6 )
# 992 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tfnebindingseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 166 "pitparser.mly"
( (TNoUnif (_2, _4)) :: _6 )
# 1001 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tfnebindingseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 168 "pitparser.mly"
( (TNoUnif ([], _2)) :: _4 )
# 1009 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tqueryseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 170 "pitparser.mly"
( (TQuery(_2,_4)) :: _6 )
# 1018 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tqueryseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 172 "pitparser.mly"
( (TQuery([],_2)) :: _4 )
# 1026 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'niseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 174 "pitparser.mly"
( (TNoninterf(_2, _4)) :: _6 )
# 1035 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'niseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 176 "pitparser.mly"
( (TNoninterf([], _2)) :: _4 )
# 1043 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 178 "pitparser.mly"
( (TWeaksecret(_2)) :: _4 )
# 1051 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 180 "pitparser.mly"
( (TNot(_2, _4)) :: _6 )
# 1060 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 182 "pitparser.mly"
( (TNot([], _2)) :: _4 )
# 1068 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 184 "pitparser.mly"
Supported for compatility with CryptoVerif only
_4 )
# 1077 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 187 "pitparser.mly"
Supported for compatility with CryptoVerif only
_4 )
# 1086 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'proof) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 190 "pitparser.mly"
Supported for compatility with CryptoVerif only
_5 )
# 1095 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 193 "pitparser.mly"
( (TElimtrue (_2,_4)) :: _6 )
# 1104 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 195 "pitparser.mly"
( (TElimtrue ([],_2)) :: _4 )
# 1112 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 197 "pitparser.mly"
For compatibility with CryptoVerif , allow
channel c1 ... cn .
as a synonym for
free c1 ... cn : channel .
channel c1...cn.
as a synonym for
free c1...cn:channel. *)
(List.map (fun x -> TFree(x, ("channel", dummy_ext), [])) _2) @ _4 )
# 1124 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'neidentseq) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeid) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 203 "pitparser.mly"
( (List.map (fun x -> TFree(x, _4, _5)) _2) @ _7 )
# 1134 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tclauses) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 205 "pitparser.mly"
( (TClauses(_2)) :: _3 )
# 1142 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.tdecl list) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 207 "pitparser.mly"
( (TDefine(_2, [], _4)) :: _6 )
# 1151 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.tdecl list) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 209 "pitparser.mly"
( (TDefine(_2, _4, _7)) :: _9 )
# 1161 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'typeidseq) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.tdecl list) in
Obj.repr(
# 211 "pitparser.mly"
( (TExpand(_2, _4)) :: _7 )
# 1170 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
Obj.repr(
# 213 "pitparser.mly"
( [] )
# 1176 "pitparser.ml"
: Pitptree.tdecl list))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.tdecl list) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tprocess) in
Obj.repr(
# 217 "pitparser.mly"
( _1, _3 )
# 1184 "pitparser.ml"
: Pitptree.tdecl list * Pitptree.tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 223 "pitparser.mly"
( _1 )
# 1191 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 225 "pitparser.mly"
( _1 )
# 1198 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 227 "pitparser.mly"
( "*", parse_extent() )
# 1204 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 229 "pitparser.mly"
( ".", parse_extent() )
# 1210 "pitparser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'prooftoken) in
Obj.repr(
# 233 "pitparser.mly"
( [_1] )
# 1217 "pitparser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'prooftoken) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 235 "pitparser.mly"
( _1 :: _2 )
# 1225 "pitparser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 239 "pitparser.mly"
( [_1] )
# 1232 "pitparser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'proofcommand) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'proof) in
Obj.repr(
# 241 "pitparser.mly"
( _1 :: _3 )
# 1240 "pitparser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'neidentseq) in
Obj.repr(
# 247 "pitparser.mly"
( _2 )
# 1247 "pitparser.ml"
: 'options))
; (fun __caml_parser_env ->
Obj.repr(
# 249 "pitparser.mly"
( [] )
# 1253 "pitparser.ml"
: 'options))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'neidentseq) in
Obj.repr(
# 253 "pitparser.mly"
( _1 :: _3 )
# 1261 "pitparser.ml"
: 'neidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 255 "pitparser.mly"
( [_1] )
# 1268 "pitparser.ml"
: 'neidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartype) in
Obj.repr(
# 259 "pitparser.mly"
( (_1,_3)::_5 )
# 1277 "pitparser.ml"
: 'nevartype))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typeid) in
Obj.repr(
# 262 "pitparser.mly"
( [(_1,_3)] )
# 1285 "pitparser.ml"
: 'nevartype))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nevartype) in
Obj.repr(
# 266 "pitparser.mly"
( _1 )
# 1292 "pitparser.ml"
: 'vartype))
; (fun __caml_parser_env ->
Obj.repr(
# 268 "pitparser.mly"
( [] )
# 1298 "pitparser.ml"
: 'vartype))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'nevartype) in
Obj.repr(
# 272 "pitparser.mly"
( _2 )
# 1305 "pitparser.ml"
: 'forallvartype))
; (fun __caml_parser_env ->
Obj.repr(
# 274 "pitparser.mly"
( [] )
# 1311 "pitparser.ml"
: 'forallvartype))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 278 "pitparser.mly"
( _1 )
# 1318 "pitparser.ml"
: 'typeid))
; (fun __caml_parser_env ->
Obj.repr(
# 280 "pitparser.mly"
"channel", parse_extent() )
# 1325 "pitparser.ml"
: 'typeid))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netypeidseq) in
Obj.repr(
# 285 "pitparser.mly"
( _1 )
# 1332 "pitparser.ml"
: 'typeidseq))
; (fun __caml_parser_env ->
Obj.repr(
# 287 "pitparser.mly"
( [] )
# 1338 "pitparser.ml"
: 'typeidseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'netypeidseq) in
Obj.repr(
# 291 "pitparser.mly"
( _1 :: _3 )
# 1346 "pitparser.ml"
: 'netypeidseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'typeid) in
Obj.repr(
# 293 "pitparser.mly"
( [_1] )
# 1353 "pitparser.ml"
: 'netypeidseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 299 "pitparser.mly"
( PFunApp (_1, _3), parse_extent() )
# 1361 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'term) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'term) in
Obj.repr(
# 301 "pitparser.mly"
( Param.has_choice := true;
PFunApp(("choice", parse_extent()), [_3; _5]), parse_extent() )
# 1370 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 304 "pitparser.mly"
( PIdent (_1), parse_extent() )
# 1377 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 306 "pitparser.mly"
( PFunApp(("=", parse_extent()), [_1; _3]), parse_extent() )
# 1385 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 308 "pitparser.mly"
( PFunApp(("<>", parse_extent()), [_1; _3]), parse_extent() )
# 1393 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'term) in
Obj.repr(
# 310 "pitparser.mly"
( PFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 1400 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 312 "pitparser.mly"
( PFunApp(("||", parse_extent()), [_1; _3]), parse_extent() )
# 1408 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 314 "pitparser.mly"
( PFunApp(("&&", parse_extent()), [_1; _3]), parse_extent() )
# 1416 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 316 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PTuple (l), parse_extent() )
# 1426 "pitparser.ml"
: 'term))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 323 "pitparser.mly"
( _1 :: _3 )
# 1434 "pitparser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 325 "pitparser.mly"
( [_1] )
# 1441 "pitparser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 329 "pitparser.mly"
( _1 )
# 1448 "pitparser.ml"
: 'termseq))
; (fun __caml_parser_env ->
Obj.repr(
# 331 "pitparser.mly"
( [] )
# 1454 "pitparser.ml"
: 'termseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'netermseq) in
Obj.repr(
# 337 "pitparser.mly"
( (_1, Some _4) )
# 1462 "pitparser.ml"
: 'ni))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 339 "pitparser.mly"
( (_1, None) )
# 1469 "pitparser.ml"
: 'ni))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'ni) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'niseq) in
Obj.repr(
# 343 "pitparser.mly"
( _1 :: _3 )
# 1477 "pitparser.ml"
: 'niseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'ni) in
Obj.repr(
# 345 "pitparser.mly"
( [_1] )
# 1484 "pitparser.ml"
: 'niseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tquery) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tqueryseq) in
Obj.repr(
# 351 "pitparser.mly"
( _1 :: _3 )
# 1492 "pitparser.ml"
: 'tqueryseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tquery) in
Obj.repr(
# 353 "pitparser.mly"
( [_1] )
# 1499 "pitparser.ml"
: 'tqueryseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 357 "pitparser.mly"
( PRealQuery(_1) )
# 1506 "pitparser.ml"
: 'tquery))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'neidentseq) in
Obj.repr(
# 359 "pitparser.mly"
( PPutBegin(false, _4) )
# 1513 "pitparser.ml"
: 'tquery))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'neidentseq) in
Obj.repr(
# 361 "pitparser.mly"
( PPutBegin(true, _4) )
# 1520 "pitparser.ml"
: 'tquery))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 365 "pitparser.mly"
( PGFunApp (_1, _3), parse_extent() )
# 1528 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 367 "pitparser.mly"
( PGIdent (_1), parse_extent() )
# 1535 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'gtermseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 369 "pitparser.mly"
( PGPhase(_1, _3, _6), parse_extent() )
# 1544 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 371 "pitparser.mly"
( PGFunApp(("=", parse_extent()), [_1; _3]), parse_extent() )
# 1552 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 373 "pitparser.mly"
( PGFunApp(("<>", parse_extent()), [_1; _3]), parse_extent() )
# 1560 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gterm) in
Obj.repr(
# 375 "pitparser.mly"
( PGFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 1567 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 377 "pitparser.mly"
( PGFunApp(("||", parse_extent()), [_1; _3]), parse_extent() )
# 1575 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 379 "pitparser.mly"
( PGFunApp(("&&", parse_extent()), [_1; _3]), parse_extent() )
# 1583 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 381 "pitparser.mly"
( PGFunApp(("event",parse_extent()), _3), parse_extent() )
# 1590 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 383 "pitparser.mly"
( PGFunApp(("inj-event",parse_extent()), _3), parse_extent() )
# 1597 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 385 "pitparser.mly"
( PGFunApp(("==>", parse_extent()), [_1;_3]), parse_extent() )
# 1605 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'gtermseq) in
Obj.repr(
# 387 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PGTuple (l), parse_extent() )
# 1615 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'bindingseq) in
Obj.repr(
# 392 "pitparser.mly"
( PGName (_2, _4), parse_extent() )
# 1623 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 394 "pitparser.mly"
( PGName (_2, []), parse_extent() )
# 1630 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 396 "pitparser.mly"
( PGLet(_2, _4, _6), parse_extent() )
# 1639 "pitparser.ml"
: 'gterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'negtermseq) in
Obj.repr(
# 400 "pitparser.mly"
( _1 :: _3 )
# 1647 "pitparser.ml"
: 'negtermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 402 "pitparser.mly"
( [_1] )
# 1654 "pitparser.ml"
: 'negtermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'negtermseq) in
Obj.repr(
# 406 "pitparser.mly"
( _1 )
# 1661 "pitparser.ml"
: 'gtermseq))
; (fun __caml_parser_env ->
Obj.repr(
# 408 "pitparser.mly"
( [] )
# 1667 "pitparser.ml"
: 'gtermseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'nesbindingseq) in
Obj.repr(
# 413 "pitparser.mly"
( (("!" ^ (string_of_int (_2)), parse_extent()), _4) :: _6 )
# 1676 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 415 "pitparser.mly"
( [(("!" ^ (string_of_int (_2)), parse_extent()), _4)] )
# 1684 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'gterm) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nesbindingseq) in
Obj.repr(
# 417 "pitparser.mly"
( (_1, _3) :: _5 )
# 1693 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gterm) in
Obj.repr(
# 419 "pitparser.mly"
( [(_1, _3)] )
# 1701 "pitparser.ml"
: 'nesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nesbindingseq) in
Obj.repr(
# 423 "pitparser.mly"
( _1 )
# 1708 "pitparser.ml"
: 'bindingseq))
; (fun __caml_parser_env ->
Obj.repr(
# 425 "pitparser.mly"
( [] )
# 1714 "pitparser.ml"
: 'bindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tfnebindingseq) in
Obj.repr(
# 431 "pitparser.mly"
( BFLet(_2, _4, _6) )
# 1723 "pitparser.ml"
: 'tfnebindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'gformatseq) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'optphase) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'optint) in
Obj.repr(
# 433 "pitparser.mly"
( BFNoUnif((_1,_3,_5), _6) )
# 1733 "pitparser.ml"
: 'tfnebindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : Pitptree.ident) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'optint) in
Obj.repr(
# 435 "pitparser.mly"
( BFNoUnif((_1,[],-1),_2) )
# 1741 "pitparser.ml"
: 'tfnebindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 439 "pitparser.mly"
( _2 )
# 1748 "pitparser.ml"
: 'optphase))
; (fun __caml_parser_env ->
Obj.repr(
# 441 "pitparser.mly"
( -1 )
# 1754 "pitparser.ml"
: 'optphase))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 445 "pitparser.mly"
( _2 )
# 1761 "pitparser.ml"
: 'optint))
; (fun __caml_parser_env ->
Obj.repr(
# 447 "pitparser.mly"
( -1 )
# 1767 "pitparser.ml"
: 'optint))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gformatseq) in
Obj.repr(
# 451 "pitparser.mly"
( PFGFunApp (_1, _3), parse_extent() )
# 1775 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'gformat) in
Obj.repr(
# 453 "pitparser.mly"
( PFGFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 1782 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 455 "pitparser.mly"
( PFGIdent (_1), parse_extent() )
# 1789 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'gformatseq) in
Obj.repr(
# 457 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PFGTuple (_2), parse_extent() )
# 1799 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'fbindingseq) in
Obj.repr(
# 462 "pitparser.mly"
( PFGName (_2, _4), parse_extent() )
# 1807 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 464 "pitparser.mly"
( PFGName (_2, []), parse_extent() )
# 1814 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 466 "pitparser.mly"
( PFGAny (_2), parse_extent() )
# 1821 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 468 "pitparser.mly"
( PFGLet(_2, _4, _6), parse_extent() )
# 1830 "pitparser.ml"
: 'gformat))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'negformatseq) in
Obj.repr(
# 473 "pitparser.mly"
( _1 :: _3 )
# 1838 "pitparser.ml"
: 'negformatseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 475 "pitparser.mly"
( [_1] )
# 1845 "pitparser.ml"
: 'negformatseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'negformatseq) in
Obj.repr(
# 479 "pitparser.mly"
( _1 )
# 1852 "pitparser.ml"
: 'gformatseq))
; (fun __caml_parser_env ->
Obj.repr(
# 481 "pitparser.mly"
( [] )
# 1858 "pitparser.ml"
: 'gformatseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'fnesbindingseq) in
Obj.repr(
# 486 "pitparser.mly"
( (("!" ^ (string_of_int (_2)), parse_extent()), _4) :: _6 )
# 1867 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 488 "pitparser.mly"
( [(("!" ^ (string_of_int (_2)), parse_extent()), _4)] )
# 1875 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'gformat) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'fnesbindingseq) in
Obj.repr(
# 490 "pitparser.mly"
( (_1, _3) :: _5 )
# 1884 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'gformat) in
Obj.repr(
# 492 "pitparser.mly"
( [(_1, _3)] )
# 1892 "pitparser.ml"
: 'fnesbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fnesbindingseq) in
Obj.repr(
# 496 "pitparser.mly"
( _1 )
# 1899 "pitparser.ml"
: 'fbindingseq))
; (fun __caml_parser_env ->
Obj.repr(
# 498 "pitparser.mly"
( [] )
# 1905 "pitparser.ml"
: 'fbindingseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'term) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'treduc) in
Obj.repr(
# 504 "pitparser.mly"
( (_1,_2,_4) :: _6 )
# 1915 "pitparser.ml"
: 'treduc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 506 "pitparser.mly"
( [_1,_2,_4] )
# 1924 "pitparser.ml"
: 'treduc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 512 "pitparser.mly"
( PClause(_1,_3) )
# 1932 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 514 "pitparser.mly"
( PFact(_1) )
# 1939 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 516 "pitparser.mly"
( PEquiv(_1,_3,true) )
# 1947 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'term) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'term) in
Obj.repr(
# 518 "pitparser.mly"
( PEquiv(_1,_3,false) )
# 1955 "pitparser.ml"
: 'tclause))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tclause) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tclauses) in
Obj.repr(
# 522 "pitparser.mly"
( (_1,_2) :: _4 )
# 1964 "pitparser.ml"
: 'tclauses))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'forallvartype) in
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tclause) in
Obj.repr(
# 524 "pitparser.mly"
( [_1,_2] )
# 1972 "pitparser.ml"
: 'tclauses))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tprocess) in
Obj.repr(
# 530 "pitparser.mly"
( _2 )
# 1979 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 532 "pitparser.mly"
( PLetDef (_1,[]) )
# 1986 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'ptermseq) in
Obj.repr(
# 534 "pitparser.mly"
( PLetDef (_1, _3) )
# 1994 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 536 "pitparser.mly"
( PRepl _2 )
# 2001 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Pitptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 538 "pitparser.mly"
For convergence with CryptoVerif , we allow an identifier ( bound on the number of copies ) after a replication ; it is simply ignored in ProVerif .
PRepl _5 )
# 2011 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 541 "pitparser.mly"
( let x = _1 in
if x = 0 then PNil else
input_error ("The only integer in a process is 0 for the nil process") (parse_extent()) )
# 2020 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 545 "pitparser.mly"
For convergence with CryptoVerif , we allow yield instead of 0
PNil )
# 2027 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 548 "pitparser.mly"
( PRestr(_2, _4, _6) )
# 2036 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 550 "pitparser.mly"
( PTest(_2,_4,_6) )
# 2045 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 552 "pitparser.mly"
( PTest(_2,_4,PNil) )
# 2053 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'tpattern) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 554 "pitparser.mly"
( PInput(_3,_5,_7) )
# 2062 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 556 "pitparser.mly"
( POutput(_3,_5,_7) )
# 2071 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 558 "pitparser.mly"
( PLet(_2,_4,_6,PNil) )
# 2080 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 560 "pitparser.mly"
( PLet(_2,_4,PNil,PNil) )
# 2088 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 562 "pitparser.mly"
( PLet(_2,_4,_6,_8) )
# 2098 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 564 "pitparser.mly"
( PLetFilter(_2,_4,_6,PNil) )
# 2107 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 566 "pitparser.mly"
( PLetFilter(_2,_4,PNil,PNil) )
# 2115 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 568 "pitparser.mly"
PLetFilter(_2,_4,_6,_8) )
# 2127 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ptermseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 572 "pitparser.mly"
( PInsert(_2, _4, _6) )
# 2136 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'tpatternseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'optinprocess) in
Obj.repr(
# 574 "pitparser.mly"
( PGet(_2, _4, (PPIdent ("true", parse_extent()), parse_extent()), _6) )
# 2145 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'tpatternseq) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'pterm) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'optinprocess) in
Obj.repr(
# 576 "pitparser.mly"
( PGet(_2, _4, _7, _8) )
# 2155 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tprocess) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 578 "pitparser.mly"
( PPar(_1,_3) )
# 2163 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'ptermseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 580 "pitparser.mly"
( PEvent(_2, _4, _6) )
# 2172 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : int) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'opttprocess) in
Obj.repr(
# 582 "pitparser.mly"
( PPhase(_2, _3) )
# 2180 "pitparser.ml"
: 'tprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 586 "pitparser.mly"
( _2 )
# 2187 "pitparser.ml"
: 'opttprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 588 "pitparser.mly"
( PNil )
# 2193 "pitparser.ml"
: 'opttprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'tprocess) in
Obj.repr(
# 592 "pitparser.mly"
( _2 )
# 2200 "pitparser.ml"
: 'optinprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 594 "pitparser.mly"
( PNil )
# 2206 "pitparser.ml"
: 'optinprocess))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 598 "pitparser.mly"
( PPatVar(_1, None) )
# 2213 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'typeid) in
Obj.repr(
# 600 "pitparser.mly"
( PPatVar(_1, Some _3) )
# 2221 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'tpatternseq) in
Obj.repr(
# 602 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PPatTuple(_2) )
# 2231 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'tpatternseq) in
Obj.repr(
# 607 "pitparser.mly"
( PPatFunApp(_1,_3) )
# 2239 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 609 "pitparser.mly"
( PPatEqual(_2) )
# 2246 "pitparser.ml"
: 'tpattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'tpattern) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 613 "pitparser.mly"
( _1 :: _3 )
# 2254 "pitparser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'tpattern) in
Obj.repr(
# 615 "pitparser.mly"
( [_1] )
# 2261 "pitparser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 619 "pitparser.mly"
( _1 )
# 2268 "pitparser.ml"
: 'tpatternseq))
; (fun __caml_parser_env ->
Obj.repr(
# 621 "pitparser.mly"
( [] )
# 2274 "pitparser.ml"
: 'tpatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Pitptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'ptermseq) in
Obj.repr(
# 627 "pitparser.mly"
( PPFunApp (_1, _3), parse_extent() )
# 2282 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'pterm) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'pterm) in
Obj.repr(
# 629 "pitparser.mly"
( Param.has_choice := true;
PPFunApp(("choice", parse_extent()), [_3; _5]), parse_extent() )
# 2291 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Pitptree.ident) in
Obj.repr(
# 632 "pitparser.mly"
( PPIdent (_1), parse_extent() )
# 2298 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 634 "pitparser.mly"
( PPFunApp(("=", parse_extent()), [_1; _3]), parse_extent() )
# 2306 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 636 "pitparser.mly"
( PPFunApp(("<>", parse_extent()), [_1; _3]), parse_extent() )
# 2314 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'pterm) in
Obj.repr(
# 638 "pitparser.mly"
( PPFunApp(("not", parse_extent()), [_3]), parse_extent() )
# 2321 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 640 "pitparser.mly"
( PPFunApp(("||", parse_extent()), [_1; _3]), parse_extent() )
# 2329 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 642 "pitparser.mly"
( PPFunApp(("&&", parse_extent()), [_1; _3]), parse_extent() )
# 2337 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Pitptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'typeid) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 644 "pitparser.mly"
( PPRestr(_2, _4, _6), parse_extent() )
# 2346 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 646 "pitparser.mly"
( PPTest(_2,_4,_6), parse_extent() )
# 2355 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 648 "pitparser.mly"
( PPLetIn(_2,_4,_6), parse_extent() )
# 2364 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'tpattern) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 650 "pitparser.mly"
( PPLet(_2,_4,_6,_8), parse_extent() )
# 2374 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'nevartype) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : 'pterm) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 652 "pitparser.mly"
( PPLetFilter(_2,_4,_6,_8), parse_extent() )
# 2384 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'ptermseq) in
Obj.repr(
# 654 "pitparser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PPTuple (l), parse_extent() )
# 2394 "pitparser.ml"
: 'pterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'neptermseq) in
Obj.repr(
# 661 "pitparser.mly"
( _1 :: _3 )
# 2402 "pitparser.ml"
: 'neptermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pterm) in
Obj.repr(
# 663 "pitparser.mly"
( [_1] )
# 2409 "pitparser.ml"
: 'neptermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'neptermseq) in
Obj.repr(
# 667 "pitparser.mly"
( _1 )
# 2416 "pitparser.ml"
: 'ptermseq))
; (fun __caml_parser_env ->
Obj.repr(
# 669 "pitparser.mly"
( [] )
# 2422 "pitparser.ml"
: 'ptermseq))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
|]
let yytables =
{ Parsing.actions=yyact;
Parsing.transl_const=yytransl_const;
Parsing.transl_block=yytransl_block;
Parsing.lhs=yylhs;
Parsing.len=yylen;
Parsing.defred=yydefred;
Parsing.dgoto=yydgoto;
Parsing.sindex=yysindex;
Parsing.rindex=yyrindex;
Parsing.gindex=yygindex;
Parsing.tablesize=yytablesize;
Parsing.table=yytable;
Parsing.check=yycheck;
Parsing.error_function=parse_error;
Parsing.names_const=yynames_const;
Parsing.names_block=yynames_block }
let all (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 1 lexfun lexbuf : Pitptree.tdecl list * Pitptree.tprocess)
let lib (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 2 lexfun lexbuf : Pitptree.tdecl list)
|
c970dbe9b5c7f329f8c27fa779681dcdd11247709d5c4567927820c605ae4b3c | larcenists/larceny | srfi-69-test.sps | Test suite for SRFI 69
;
$ Id$
(import (rnrs base)
(rnrs io simple)
(rnrs unicode)
(rnrs sorting)
(rnrs arithmetic fixnums)
(srfi :69 basic-hash-tables))
(define (writeln . xs)
(for-each display xs)
(newline))
(define (fail token . more)
(writeln "Error: test failed: " token)
#f)
(define ht1equal (make-hash-table))
(define ht2equal (make-hash-table equal?))
(define ht3equal (make-hash-table equal? hash))
(define ht2eqv (make-hash-table eqv?))
(define ht3eqv (make-hash-table eqv? hash))
(define ht2eq (make-hash-table eq?))
(define ht3eq (make-hash-table eq? hash))
(define ht3string= (make-hash-table string=? string-hash))
(define ht3string-ci= (make-hash-table string-ci=? string-ci-hash))
(define ht3fx= (make-hash-table fx=? values))
(define ht4equal (alist->hash-table '()))
(define ht5equal (alist->hash-table '() equal?))
(define ht6equal (alist->hash-table '() equal? hash))
(define ht5eqv (alist->hash-table '() eqv?))
(define ht6eqv (alist->hash-table '() eqv? hash))
(define ht5eq (alist->hash-table '() eq?))
(define ht6eq (alist->hash-table '() eq? hash))
(define ht6string= (alist->hash-table '() string=? string-hash))
(define ht6string-ci= (alist->hash-table '() string-ci=? string-ci-hash))
(define ht6fx= (alist->hash-table '() fx=? values))
(define (test-tables)
(list ht1equal ht2equal ht3equal
ht2eqv ht3eqv
ht2eq ht3eq
ht3string= ht3string-ci= ht3fx=
ht4equal ht5equal ht6equal
ht5eqv ht6eqv
ht5eq ht6eq
ht6string= ht6string-ci= ht6fx=))
(define (test-tables-general&nonempty)
(list ht4equal ht5equal ht6equal
ht5eqv ht6eqv
ht5eq ht6eq))
(or (equal? (map hash-table? (test-tables))
(map (lambda (x) #t) (test-tables)))
(fail 'hash-table?))
(or (equal? (map hash-table-size (test-tables))
(map (lambda (x) 0) (test-tables)))
(fail 'alist->hash-table:1))
(set! ht4equal (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))))
(set! ht5equal (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
equal?))
(set! ht6equal (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
equal? hash))
(set! ht5eqv (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eqv?))
(set! ht6eqv (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eqv? hash))
(set! ht5eq (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eq?))
(set! ht6eq (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eq? hash))
(set! ht6string=
(alist->hash-table '(("a" 11) ("b" 12) ("cee" 13) ("d" 14))
string=? string-hash))
(set! ht6string-ci=
(alist->hash-table '(("a" 11) ("b" 12) ("CeE" 13) ("d" 14))
string-ci=? string-ci-hash))
(set! ht6fx= (alist->hash-table '((101 201) (102 202) (103 203) (104 204))
fx=? values))
(or (equal? (map hash-table-size (test-tables))
'(0 0 0 0 0 0 0 0 0 0 4 4 4 4 4 4 4 4 4 4))
(fail 'alist->hash-table:2))
(or (equal? (map hash-table-equivalence-function (test-tables))
(list equal? equal? equal? eqv? eqv? eq? eq?
string=? string-ci=? fx=?
equal? equal? equal? eqv? eqv? eq? eq?
string=? string-ci=? fx=?))
(fail 'hash-table-equivalence-function:1))
(or (equal? (map hash-table-hash-function
(list ht1equal ht2equal ht3equal
ht3eqv ht3eq ht3string= ht3string-ci= ht3fx=))
(list hash hash hash hash hash
string-hash string-ci-hash values))
(fail 'hash-table-hash-function:1))
(or (equal? (map (lambda (ht)
(hash-table-ref ht 'cee))
(test-tables-general&nonempty))
'((13) (13) (13) (13) (13) (13) (13)))
(fail 'hash-table-ref:1))
(or (equal? (map (lambda (ht)
(hash-table-ref ht 47.8))
(list ht4equal ht5equal ht6equal ht5eqv ht6eqv))
'((14) (14) (14) (14) (14)))
(fail 'hash-table-ref:2))
(or (equal? (map (lambda (ht)
(hash-table-ref ht "cee" (lambda () #f)))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(#f #f #f #f #f #f #f (13) (13)))
(fail 'hash-table-ref:3))
(or (equal? (map (lambda (ht)
(hash-table-ref ht "CeE" (lambda () 99)))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(99 99 99 99 99 99 99 99 (13)))
(fail 'hash-table-ref:4))
(or (equal? (map (lambda (ht)
(hash-table-ref/default ht "CeE" 97))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(97 97 97 97 97 97 97 97 (13)))
(fail 'hash-table-ref:5))
(for-each (lambda (ht) (hash-table-set! ht "cee" 'see))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
(or (equal? (map hash-table-size
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(5 5 5 5 5 5 5 4 4))
(fail 'hash-table-set!:1))
(for-each (lambda (ht) (hash-table-delete! ht (string #\b)))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
(or (equal? (map hash-table-size
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(4 4 4 5 5 5 5 3 3))
(fail 'hash-table-delete!:1))
(or (equal? (map (lambda (ht) (hash-table-exists? ht "om"))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(#f #f #f #f #f #f #f #f #f))
(fail 'hash-table-exists?:1))
(or (equal? (map (lambda (ht) (hash-table-exists? ht "cee"))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(#t #t #t #f #f #f #f #t #t))
(fail 'hash-table-exists?:2))
(for-each (lambda (ht) (hash-table-update! ht 'a car))
(test-tables-general&nonempty))
(or (equal? (map (lambda (ht) (hash-table-ref/default ht 'a #f))
(test-tables-general&nonempty))
'(11 11 11 11 11 11 11))
(fail 'hash-table-update!:1))
(or (equal? (map hash-table-size (test-tables))
'(0 0 0 0 0 0 0 0 0 0 4 4 4 5 5 5 5 3 3 4))
(fail 'hash-table-size:1))
This is slightly flaky , because hash might hash two keys
;;; to the same value. In particular, a symbol might be hashed
;;; the same as its print string.
(define (canonical-order? x y)
(let ((i (hash x))
(j (hash y)))
(or (< i j)
(and (= i j) (symbol? x) (string? y)))))
(define (canonical-order lis)
(list-sort canonical-order? lis))
(or (equal? (map canonical-order
(map hash-table-keys (test-tables)))
(map canonical-order
'(() () () () () () () () () ()
(a cee 47.8 "cee")
(a cee 47.8 "cee")
(a cee 47.8 "cee")
(a "b" cee 47.8 "cee")
(a "b" cee 47.8 "cee")
(a "b" cee 47.8 "cee")
(a "b" cee 47.8 "cee")
("a" "cee" "d")
("a" "CeE" "d")
(101 102 103 104))))
(fail 'hash-table-keys:1))
(or (equal? (map canonical-order
(map hash-table-values (test-tables)))
(map canonical-order
'(() () () () () () () () () ()
(see 11 (13) (14))
(see 11 (13) (14))
(see 11 (13) (14))
(see 11 (12) (13) (14))
(see 11 (12) (13) (14))
(see 11 (12) (13) (14))
(see 11 (12) (13) (14))
(see (11) (14))
(see (11) (14))
((201) (202) (203) (204)))))
(fail 'hash-table-values:1))
(let ((keys '())
(vals '()))
(hash-table-walk ht4equal
(lambda (key val)
(set! keys (cons key keys))
(set! vals (cons val vals))))
(or (and (equal? (canonical-order keys)
(canonical-order (hash-table-keys ht4equal)))
(equal? (canonical-order vals)
(canonical-order (hash-table-values ht4equal))))
(fail 'hash-table-walk:1)))
(or (and (equal? (canonical-order
(hash-table-fold ht4equal
(lambda (key val x) (cons key x))
'()))
(canonical-order (hash-table-keys ht4equal)))
(equal? (canonical-order
(hash-table-fold ht4equal
(lambda (key val x) (cons val x))
'()))
(canonical-order (hash-table-values ht4equal))))
(fail 'hash-table-fold:1))
; Not yet tested:
;
; hash-table->alist
; hash-table-copy
; hash-table-merge!
;
; hash
; string-hash
; string-ci-hash
; hash-by-identity
(writeln "Done (but these tests are incomplete).")
| null | https://raw.githubusercontent.com/larcenists/larceny/fef550c7d3923deb7a5a1ccd5a628e54cf231c75/lib/SRFI/test/srfi-69-test.sps | scheme |
to the same value. In particular, a symbol might be hashed
the same as its print string.
Not yet tested:
hash-table->alist
hash-table-copy
hash-table-merge!
hash
string-hash
string-ci-hash
hash-by-identity | Test suite for SRFI 69
$ Id$
(import (rnrs base)
(rnrs io simple)
(rnrs unicode)
(rnrs sorting)
(rnrs arithmetic fixnums)
(srfi :69 basic-hash-tables))
(define (writeln . xs)
(for-each display xs)
(newline))
(define (fail token . more)
(writeln "Error: test failed: " token)
#f)
(define ht1equal (make-hash-table))
(define ht2equal (make-hash-table equal?))
(define ht3equal (make-hash-table equal? hash))
(define ht2eqv (make-hash-table eqv?))
(define ht3eqv (make-hash-table eqv? hash))
(define ht2eq (make-hash-table eq?))
(define ht3eq (make-hash-table eq? hash))
(define ht3string= (make-hash-table string=? string-hash))
(define ht3string-ci= (make-hash-table string-ci=? string-ci-hash))
(define ht3fx= (make-hash-table fx=? values))
(define ht4equal (alist->hash-table '()))
(define ht5equal (alist->hash-table '() equal?))
(define ht6equal (alist->hash-table '() equal? hash))
(define ht5eqv (alist->hash-table '() eqv?))
(define ht6eqv (alist->hash-table '() eqv? hash))
(define ht5eq (alist->hash-table '() eq?))
(define ht6eq (alist->hash-table '() eq? hash))
(define ht6string= (alist->hash-table '() string=? string-hash))
(define ht6string-ci= (alist->hash-table '() string-ci=? string-ci-hash))
(define ht6fx= (alist->hash-table '() fx=? values))
(define (test-tables)
(list ht1equal ht2equal ht3equal
ht2eqv ht3eqv
ht2eq ht3eq
ht3string= ht3string-ci= ht3fx=
ht4equal ht5equal ht6equal
ht5eqv ht6eqv
ht5eq ht6eq
ht6string= ht6string-ci= ht6fx=))
(define (test-tables-general&nonempty)
(list ht4equal ht5equal ht6equal
ht5eqv ht6eqv
ht5eq ht6eq))
(or (equal? (map hash-table? (test-tables))
(map (lambda (x) #t) (test-tables)))
(fail 'hash-table?))
(or (equal? (map hash-table-size (test-tables))
(map (lambda (x) 0) (test-tables)))
(fail 'alist->hash-table:1))
(set! ht4equal (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))))
(set! ht5equal (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
equal?))
(set! ht6equal (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
equal? hash))
(set! ht5eqv (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eqv?))
(set! ht6eqv (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eqv? hash))
(set! ht5eq (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eq?))
(set! ht6eq (alist->hash-table '((a 11) ("b" 12) (cee 13) (47.8 14))
eq? hash))
(set! ht6string=
(alist->hash-table '(("a" 11) ("b" 12) ("cee" 13) ("d" 14))
string=? string-hash))
(set! ht6string-ci=
(alist->hash-table '(("a" 11) ("b" 12) ("CeE" 13) ("d" 14))
string-ci=? string-ci-hash))
(set! ht6fx= (alist->hash-table '((101 201) (102 202) (103 203) (104 204))
fx=? values))
(or (equal? (map hash-table-size (test-tables))
'(0 0 0 0 0 0 0 0 0 0 4 4 4 4 4 4 4 4 4 4))
(fail 'alist->hash-table:2))
(or (equal? (map hash-table-equivalence-function (test-tables))
(list equal? equal? equal? eqv? eqv? eq? eq?
string=? string-ci=? fx=?
equal? equal? equal? eqv? eqv? eq? eq?
string=? string-ci=? fx=?))
(fail 'hash-table-equivalence-function:1))
(or (equal? (map hash-table-hash-function
(list ht1equal ht2equal ht3equal
ht3eqv ht3eq ht3string= ht3string-ci= ht3fx=))
(list hash hash hash hash hash
string-hash string-ci-hash values))
(fail 'hash-table-hash-function:1))
(or (equal? (map (lambda (ht)
(hash-table-ref ht 'cee))
(test-tables-general&nonempty))
'((13) (13) (13) (13) (13) (13) (13)))
(fail 'hash-table-ref:1))
(or (equal? (map (lambda (ht)
(hash-table-ref ht 47.8))
(list ht4equal ht5equal ht6equal ht5eqv ht6eqv))
'((14) (14) (14) (14) (14)))
(fail 'hash-table-ref:2))
(or (equal? (map (lambda (ht)
(hash-table-ref ht "cee" (lambda () #f)))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(#f #f #f #f #f #f #f (13) (13)))
(fail 'hash-table-ref:3))
(or (equal? (map (lambda (ht)
(hash-table-ref ht "CeE" (lambda () 99)))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(99 99 99 99 99 99 99 99 (13)))
(fail 'hash-table-ref:4))
(or (equal? (map (lambda (ht)
(hash-table-ref/default ht "CeE" 97))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(97 97 97 97 97 97 97 97 (13)))
(fail 'hash-table-ref:5))
(for-each (lambda (ht) (hash-table-set! ht "cee" 'see))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
(or (equal? (map hash-table-size
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(5 5 5 5 5 5 5 4 4))
(fail 'hash-table-set!:1))
(for-each (lambda (ht) (hash-table-delete! ht (string #\b)))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
(or (equal? (map hash-table-size
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(4 4 4 5 5 5 5 3 3))
(fail 'hash-table-delete!:1))
(or (equal? (map (lambda (ht) (hash-table-exists? ht "om"))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(#f #f #f #f #f #f #f #f #f))
(fail 'hash-table-exists?:1))
(or (equal? (map (lambda (ht) (hash-table-exists? ht "cee"))
(append (test-tables-general&nonempty)
(list ht6string= ht6string-ci=)))
'(#t #t #t #f #f #f #f #t #t))
(fail 'hash-table-exists?:2))
(for-each (lambda (ht) (hash-table-update! ht 'a car))
(test-tables-general&nonempty))
(or (equal? (map (lambda (ht) (hash-table-ref/default ht 'a #f))
(test-tables-general&nonempty))
'(11 11 11 11 11 11 11))
(fail 'hash-table-update!:1))
(or (equal? (map hash-table-size (test-tables))
'(0 0 0 0 0 0 0 0 0 0 4 4 4 5 5 5 5 3 3 4))
(fail 'hash-table-size:1))
This is slightly flaky , because hash might hash two keys
(define (canonical-order? x y)
(let ((i (hash x))
(j (hash y)))
(or (< i j)
(and (= i j) (symbol? x) (string? y)))))
(define (canonical-order lis)
(list-sort canonical-order? lis))
(or (equal? (map canonical-order
(map hash-table-keys (test-tables)))
(map canonical-order
'(() () () () () () () () () ()
(a cee 47.8 "cee")
(a cee 47.8 "cee")
(a cee 47.8 "cee")
(a "b" cee 47.8 "cee")
(a "b" cee 47.8 "cee")
(a "b" cee 47.8 "cee")
(a "b" cee 47.8 "cee")
("a" "cee" "d")
("a" "CeE" "d")
(101 102 103 104))))
(fail 'hash-table-keys:1))
(or (equal? (map canonical-order
(map hash-table-values (test-tables)))
(map canonical-order
'(() () () () () () () () () ()
(see 11 (13) (14))
(see 11 (13) (14))
(see 11 (13) (14))
(see 11 (12) (13) (14))
(see 11 (12) (13) (14))
(see 11 (12) (13) (14))
(see 11 (12) (13) (14))
(see (11) (14))
(see (11) (14))
((201) (202) (203) (204)))))
(fail 'hash-table-values:1))
(let ((keys '())
(vals '()))
(hash-table-walk ht4equal
(lambda (key val)
(set! keys (cons key keys))
(set! vals (cons val vals))))
(or (and (equal? (canonical-order keys)
(canonical-order (hash-table-keys ht4equal)))
(equal? (canonical-order vals)
(canonical-order (hash-table-values ht4equal))))
(fail 'hash-table-walk:1)))
(or (and (equal? (canonical-order
(hash-table-fold ht4equal
(lambda (key val x) (cons key x))
'()))
(canonical-order (hash-table-keys ht4equal)))
(equal? (canonical-order
(hash-table-fold ht4equal
(lambda (key val x) (cons val x))
'()))
(canonical-order (hash-table-values ht4equal))))
(fail 'hash-table-fold:1))
(writeln "Done (but these tests are incomplete).")
|
3e0bb157f3ec9f5f1189a7e29ad8e0e0820e8b5b22bd0ebfec92ded15b8c77ab | andreabedini/foliage | Pages.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingVia #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Foliage.Pages
( allPackagesPageTemplate,
allPackageVersionsPageTemplate,
packageVersionPageTemplate,
makeAllPackagesPage,
makePackageVersionPage,
makeAllPackageVersionsPage,
makeIndexPage,
)
where
import Data.Aeson (KeyValue ((.=)), ToJSON, object)
import Data.Function (on, (&))
import Data.List (sortOn)
import Data.List.NonEmpty qualified as NE
import Data.Maybe (fromMaybe, listToMaybe)
import Data.Ord (Down (Down), comparing)
import Data.Text.Lazy.IO.Utf8 qualified as TL
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (POSIXTime, utcTimeToPOSIXSeconds)
import Development.Shake (Action, traced)
import Distribution.Aeson (jsonGenericPackageDescription)
import Distribution.Package (PackageIdentifier (pkgName, pkgVersion))
import Distribution.Pretty (prettyShow)
import Foliage.Meta (PackageVersionSource)
import Foliage.Meta.Aeson ()
import Foliage.PreparePackageVersion (PreparedPackageVersion (..))
import Foliage.Utils.Aeson (MyAesonEncoding (..))
import GHC.Generics (Generic)
import System.Directory qualified as IO
import System.FilePath ((</>))
import Text.Mustache (Template)
import Text.Mustache.Compile.TH (compileMustacheDir)
import Text.Mustache.Render (renderMustache)
makeIndexPage :: FilePath -> Action ()
makeIndexPage outputDir =
traced "webpages / index" $ do
IO.createDirectoryIfMissing True outputDir
TL.writeFile (outputDir </> "index.html") $
renderMustache indexPageTemplate $
object []
data AllPackagesPageEntry = AllPackagesPageEntry
{ allPackagesPageEntryPkgId :: PackageIdentifier,
allPackagesPageEntryTimestamp :: UTCTime,
allPackagesPageEntryTimestampPosix :: POSIXTime,
allPackagesPageEntrySource :: PackageVersionSource,
allPackagesPageEntryLatestRevisionTimestamp :: Maybe UTCTime
}
deriving stock (Generic)
deriving (ToJSON) via MyAesonEncoding AllPackagesPageEntry
makeAllPackagesPage :: UTCTime -> FilePath -> [PreparedPackageVersion] -> Action ()
makeAllPackagesPage currentTime outputDir packageVersions =
traced "webpages / all-packages" $ do
IO.createDirectoryIfMissing True (outputDir </> "all-packages")
TL.writeFile (outputDir </> "all-packages" </> "index.html") $
renderMustache allPackagesPageTemplate $
object ["packages" .= packages]
where
packages =
packageVersions
-- group package versions by package name
& NE.groupBy ((==) `on` (pkgName . pkgId))
-- for each package name pick the most recent version
& map
( \group ->
group
-- sort them from the most recent version to the least recent
& NE.sortBy (comparing $ Down . pkgVersion . pkgId)
-- pick the most recent version
& NE.head
-- turn it into the template data
& ( \(PreparedPackageVersion {pkgId, pkgTimestamp, cabalFileRevisions, pkgVersionSource}) ->
AllPackagesPageEntry
{ allPackagesPageEntryPkgId = pkgId,
allPackagesPageEntryTimestamp = fromMaybe currentTime pkgTimestamp,
allPackagesPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp),
allPackagesPageEntrySource = pkgVersionSource,
allPackagesPageEntryLatestRevisionTimestamp = fst <$> listToMaybe cabalFileRevisions
}
)
)
-- sort packages by pkgId
& sortOn allPackagesPageEntryPkgId
data AllPackageVersionsPageEntry
= AllPackageVersionsPageEntryPackage
{ allPackageVersionsPageEntryPkgId :: PackageIdentifier,
allPackageVersionsPageEntryTimestamp :: UTCTime,
allPackageVersionsPageEntryTimestampPosix :: POSIXTime,
allPackageVersionsPageEntrySource :: PackageVersionSource
}
| AllPackageVersionsPageEntryRevision
{ allPackageVersionsPageEntryPkgId :: PackageIdentifier,
allPackageVersionsPageEntryTimestamp :: UTCTime,
allPackageVersionsPageEntryTimestampPosix :: POSIXTime
}
deriving stock (Generic)
deriving (ToJSON) via MyAesonEncoding AllPackageVersionsPageEntry
makeAllPackageVersionsPage :: UTCTime -> FilePath -> [PreparedPackageVersion] -> Action ()
makeAllPackageVersionsPage currentTime outputDir packageVersions =
traced "webpages / all-package-versions" $ do
IO.createDirectoryIfMissing True (outputDir </> "all-package-versions")
TL.writeFile (outputDir </> "all-package-versions" </> "index.html") $
renderMustache allPackageVersionsPageTemplate $
object ["entries" .= entries]
where
entries =
-- collect all cabal file revisions including the original cabal file
foldMap
( \PreparedPackageVersion {pkgId, pkgTimestamp, pkgVersionSource, cabalFileRevisions} ->
-- original cabal file
AllPackageVersionsPageEntryPackage
{ allPackageVersionsPageEntryPkgId = pkgId,
allPackageVersionsPageEntryTimestamp = fromMaybe currentTime pkgTimestamp,
allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp),
allPackageVersionsPageEntrySource = pkgVersionSource
}
-- list of revisions
: [ AllPackageVersionsPageEntryRevision
{ allPackageVersionsPageEntryPkgId = pkgId,
allPackageVersionsPageEntryTimestamp = revisionTimestamp,
allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds revisionTimestamp
}
| (revisionTimestamp, _) <- cabalFileRevisions
]
)
packageVersions
-- sort them by timestamp
& sortOn (Down . allPackageVersionsPageEntryTimestamp)
makePackageVersionPage :: FilePath -> PreparedPackageVersion -> Action ()
makePackageVersionPage outputDir PreparedPackageVersion {pkgId, pkgTimestamp, pkgVersionSource, pkgDesc, cabalFileRevisions} = do
traced ("webpages / package / " ++ prettyShow pkgId) $ do
IO.createDirectoryIfMissing True (outputDir </> "package" </> prettyShow pkgId)
TL.writeFile (outputDir </> "package" </> prettyShow pkgId </> "index.html") $
renderMustache packageVersionPageTemplate $
object
[ "pkgVersionSource" .= pkgVersionSource,
"cabalFileRevisions" .= map fst cabalFileRevisions,
"pkgDesc" .= jsonGenericPackageDescription pkgDesc,
"pkgTimestamp" .= pkgTimestamp
]
indexPageTemplate :: Template
indexPageTemplate = $(compileMustacheDir "index" "templates")
allPackagesPageTemplate :: Template
allPackagesPageTemplate = $(compileMustacheDir "allPackages" "templates")
allPackageVersionsPageTemplate :: Template
allPackageVersionsPageTemplate = $(compileMustacheDir "allPackageVersions" "templates")
packageVersionPageTemplate :: Template
packageVersionPageTemplate = $(compileMustacheDir "packageVersion" "templates")
| null | https://raw.githubusercontent.com/andreabedini/foliage/20049b1e814692731b35f8aec1e3852934ca637a/app/Foliage/Pages.hs | haskell | # LANGUAGE OverloadedStrings #
group package versions by package name
for each package name pick the most recent version
sort them from the most recent version to the least recent
pick the most recent version
turn it into the template data
sort packages by pkgId
collect all cabal file revisions including the original cabal file
original cabal file
list of revisions
sort them by timestamp | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingVia #
# LANGUAGE TemplateHaskell #
module Foliage.Pages
( allPackagesPageTemplate,
allPackageVersionsPageTemplate,
packageVersionPageTemplate,
makeAllPackagesPage,
makePackageVersionPage,
makeAllPackageVersionsPage,
makeIndexPage,
)
where
import Data.Aeson (KeyValue ((.=)), ToJSON, object)
import Data.Function (on, (&))
import Data.List (sortOn)
import Data.List.NonEmpty qualified as NE
import Data.Maybe (fromMaybe, listToMaybe)
import Data.Ord (Down (Down), comparing)
import Data.Text.Lazy.IO.Utf8 qualified as TL
import Data.Time (UTCTime)
import Data.Time.Clock.POSIX (POSIXTime, utcTimeToPOSIXSeconds)
import Development.Shake (Action, traced)
import Distribution.Aeson (jsonGenericPackageDescription)
import Distribution.Package (PackageIdentifier (pkgName, pkgVersion))
import Distribution.Pretty (prettyShow)
import Foliage.Meta (PackageVersionSource)
import Foliage.Meta.Aeson ()
import Foliage.PreparePackageVersion (PreparedPackageVersion (..))
import Foliage.Utils.Aeson (MyAesonEncoding (..))
import GHC.Generics (Generic)
import System.Directory qualified as IO
import System.FilePath ((</>))
import Text.Mustache (Template)
import Text.Mustache.Compile.TH (compileMustacheDir)
import Text.Mustache.Render (renderMustache)
makeIndexPage :: FilePath -> Action ()
makeIndexPage outputDir =
traced "webpages / index" $ do
IO.createDirectoryIfMissing True outputDir
TL.writeFile (outputDir </> "index.html") $
renderMustache indexPageTemplate $
object []
data AllPackagesPageEntry = AllPackagesPageEntry
{ allPackagesPageEntryPkgId :: PackageIdentifier,
allPackagesPageEntryTimestamp :: UTCTime,
allPackagesPageEntryTimestampPosix :: POSIXTime,
allPackagesPageEntrySource :: PackageVersionSource,
allPackagesPageEntryLatestRevisionTimestamp :: Maybe UTCTime
}
deriving stock (Generic)
deriving (ToJSON) via MyAesonEncoding AllPackagesPageEntry
makeAllPackagesPage :: UTCTime -> FilePath -> [PreparedPackageVersion] -> Action ()
makeAllPackagesPage currentTime outputDir packageVersions =
traced "webpages / all-packages" $ do
IO.createDirectoryIfMissing True (outputDir </> "all-packages")
TL.writeFile (outputDir </> "all-packages" </> "index.html") $
renderMustache allPackagesPageTemplate $
object ["packages" .= packages]
where
packages =
packageVersions
& NE.groupBy ((==) `on` (pkgName . pkgId))
& map
( \group ->
group
& NE.sortBy (comparing $ Down . pkgVersion . pkgId)
& NE.head
& ( \(PreparedPackageVersion {pkgId, pkgTimestamp, cabalFileRevisions, pkgVersionSource}) ->
AllPackagesPageEntry
{ allPackagesPageEntryPkgId = pkgId,
allPackagesPageEntryTimestamp = fromMaybe currentTime pkgTimestamp,
allPackagesPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp),
allPackagesPageEntrySource = pkgVersionSource,
allPackagesPageEntryLatestRevisionTimestamp = fst <$> listToMaybe cabalFileRevisions
}
)
)
& sortOn allPackagesPageEntryPkgId
data AllPackageVersionsPageEntry
= AllPackageVersionsPageEntryPackage
{ allPackageVersionsPageEntryPkgId :: PackageIdentifier,
allPackageVersionsPageEntryTimestamp :: UTCTime,
allPackageVersionsPageEntryTimestampPosix :: POSIXTime,
allPackageVersionsPageEntrySource :: PackageVersionSource
}
| AllPackageVersionsPageEntryRevision
{ allPackageVersionsPageEntryPkgId :: PackageIdentifier,
allPackageVersionsPageEntryTimestamp :: UTCTime,
allPackageVersionsPageEntryTimestampPosix :: POSIXTime
}
deriving stock (Generic)
deriving (ToJSON) via MyAesonEncoding AllPackageVersionsPageEntry
makeAllPackageVersionsPage :: UTCTime -> FilePath -> [PreparedPackageVersion] -> Action ()
makeAllPackageVersionsPage currentTime outputDir packageVersions =
traced "webpages / all-package-versions" $ do
IO.createDirectoryIfMissing True (outputDir </> "all-package-versions")
TL.writeFile (outputDir </> "all-package-versions" </> "index.html") $
renderMustache allPackageVersionsPageTemplate $
object ["entries" .= entries]
where
entries =
foldMap
( \PreparedPackageVersion {pkgId, pkgTimestamp, pkgVersionSource, cabalFileRevisions} ->
AllPackageVersionsPageEntryPackage
{ allPackageVersionsPageEntryPkgId = pkgId,
allPackageVersionsPageEntryTimestamp = fromMaybe currentTime pkgTimestamp,
allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds (fromMaybe currentTime pkgTimestamp),
allPackageVersionsPageEntrySource = pkgVersionSource
}
: [ AllPackageVersionsPageEntryRevision
{ allPackageVersionsPageEntryPkgId = pkgId,
allPackageVersionsPageEntryTimestamp = revisionTimestamp,
allPackageVersionsPageEntryTimestampPosix = utcTimeToPOSIXSeconds revisionTimestamp
}
| (revisionTimestamp, _) <- cabalFileRevisions
]
)
packageVersions
& sortOn (Down . allPackageVersionsPageEntryTimestamp)
makePackageVersionPage :: FilePath -> PreparedPackageVersion -> Action ()
makePackageVersionPage outputDir PreparedPackageVersion {pkgId, pkgTimestamp, pkgVersionSource, pkgDesc, cabalFileRevisions} = do
traced ("webpages / package / " ++ prettyShow pkgId) $ do
IO.createDirectoryIfMissing True (outputDir </> "package" </> prettyShow pkgId)
TL.writeFile (outputDir </> "package" </> prettyShow pkgId </> "index.html") $
renderMustache packageVersionPageTemplate $
object
[ "pkgVersionSource" .= pkgVersionSource,
"cabalFileRevisions" .= map fst cabalFileRevisions,
"pkgDesc" .= jsonGenericPackageDescription pkgDesc,
"pkgTimestamp" .= pkgTimestamp
]
indexPageTemplate :: Template
indexPageTemplate = $(compileMustacheDir "index" "templates")
allPackagesPageTemplate :: Template
allPackagesPageTemplate = $(compileMustacheDir "allPackages" "templates")
allPackageVersionsPageTemplate :: Template
allPackageVersionsPageTemplate = $(compileMustacheDir "allPackageVersions" "templates")
packageVersionPageTemplate :: Template
packageVersionPageTemplate = $(compileMustacheDir "packageVersion" "templates")
|
5b94b02f578866982fef1873249c2a3e9a9adda2a409a79451321fc1cae75f4c | xapi-project/xen-api | xapi_pif_helpers.ml |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
open API
module D = Debug.Make (struct let name = "xapi_pif_helpers" end)
open D
Any given PIF should belongs only one of the following types
type pif_type_t =
| Tunnel_access of ref_tunnel
| VLAN_untagged of ref_VLAN
| Network_sriov_logical of ref_network_sriov
| Bond_master of ref_Bond
| Physical of pIF_t
let pif_type_to_string = function
| Tunnel_access _ ->
"Tunnel_access"
| VLAN_untagged _ ->
"VLAN_untagged"
| Network_sriov_logical _ ->
"Network_sriov_logical"
| Bond_master _ ->
"Bond_master"
| Physical _ ->
"Physical"
let is_tunnel_access_pif pif_rec =
match pif_rec.API.pIF_tunnel_access_PIF_of with
| tunnel :: _ ->
Some (Tunnel_access tunnel)
| _ ->
None
let is_vlan_master_pif pif_rec =
let vlan = pif_rec.API.pIF_VLAN_master_of in
if vlan = Ref.null then None else Some (VLAN_untagged vlan)
let is_sriov_logical_pif pif_rec =
match pif_rec.API.pIF_sriov_logical_PIF_of with
| sriov :: _ ->
Some (Network_sriov_logical sriov)
| _ ->
None
let is_bond_master_pif pif_rec =
match pif_rec.API.pIF_bond_master_of with
| bond :: _ ->
Some (Bond_master bond)
| _ ->
None
let is_physical_pif pif_rec =
if pif_rec.API.pIF_physical then Some (Physical pif_rec) else None
let ( >>= ) (ret, pif_rec) f =
match (ret, pif_rec) with
| (Some _ as v), _ ->
(v, pif_rec)
| None, _ ->
(f pif_rec, pif_rec)
let get_pif_type pif_rec =
match
(None, pif_rec)
>>= is_tunnel_access_pif
>>= is_vlan_master_pif
>>= is_sriov_logical_pif
>>= is_bond_master_pif
>>= is_physical_pif
with
| Some v, _ ->
v
| None, _ ->
raise
Api_errors.(
Server_error
( internal_error
, [
Printf.sprintf "Cannot calculate PIF type of %s"
pif_rec.API.pIF_uuid
]
)
)
The root PIF underneath should be Physical or Bond_master
* This function aims to get a list of types of the PIFs underneath the given PIF
let get_pif_topo ~__context ~pif_rec =
let rec get_pif_type_till_root ret pif_rec =
let pif_t = get_pif_type pif_rec in
match pif_t with
| Tunnel_access tunnel ->
let tunnel_rec = Db.Tunnel.get_record ~__context ~self:tunnel in
let pif_ref = tunnel_rec.API.tunnel_transport_PIF in
let pif_rec = Db.PIF.get_record ~__context ~self:pif_ref in
get_pif_type_till_root (pif_t :: ret) pif_rec
| VLAN_untagged vlan ->
let vlan_rec = Db.VLAN.get_record ~__context ~self:vlan in
let pif_ref = vlan_rec.API.vLAN_tagged_PIF in
let pif_rec = Db.PIF.get_record ~__context ~self:pif_ref in
get_pif_type_till_root (pif_t :: ret) pif_rec
| Network_sriov_logical sriov ->
let sriov_rec = Db.Network_sriov.get_record ~__context ~self:sriov in
let pif_ref = sriov_rec.API.network_sriov_physical_PIF in
let pif_rec = Db.PIF.get_record ~__context ~self:pif_ref in
get_pif_type_till_root (pif_t :: ret) pif_rec
| Bond_master _ | Physical _ ->
pif_t :: ret
in
let pif_t_list = get_pif_type_till_root [] pif_rec in
let pif_t_list = List.rev pif_t_list in
pif_t_list
let vlan_is_allowed_on_pif ~__context ~tagged_PIF ~pif_rec:_ ~pif_topo ~tag:_ =
match pif_topo with
| Physical pif_rec :: _ when pif_rec.API.pIF_bond_slave_of <> Ref.null ->
(* Disallow creating on bond slave *)
(* Here we rely on the implementation to guarantee that `Physical` is a terminating case *)
raise
Api_errors.(
Server_error
(cannot_add_vlan_to_bond_slave, [Ref.string_of tagged_PIF])
)
| VLAN_untagged _ :: _ ->
raise Api_errors.(Server_error (pif_is_vlan, [Ref.string_of tagged_PIF]))
| Tunnel_access _ :: _ ->
raise
Api_errors.(
Server_error (is_tunnel_access_pif, [Ref.string_of tagged_PIF])
)
| _ ->
()
let tunnel_is_allowed_on_pif ~__context ~transport_PIF =
let pif_rec = Db.PIF.get_record ~__context ~self:transport_PIF in
match get_pif_topo ~__context ~pif_rec with
| Physical pif_rec :: _ when pif_rec.API.pIF_bond_slave_of <> Ref.null ->
(* Disallow creating on bond slave *)
(* Here we rely on the implementation to guarantee that `Physical` is a terminating case *)
raise
Api_errors.(
Server_error
(cannot_add_tunnel_to_bond_slave, [Ref.string_of transport_PIF])
)
| Tunnel_access _ :: _ ->
raise
Api_errors.(
Server_error (is_tunnel_access_pif, [Ref.string_of transport_PIF])
)
| Network_sriov_logical _ :: _ ->
raise
Api_errors.(
Server_error
(cannot_add_tunnel_to_sriov_logical, [Ref.string_of transport_PIF])
)
| VLAN_untagged _ :: Network_sriov_logical _ :: _ ->
raise
Api_errors.(
Server_error
( cannot_add_tunnel_to_vlan_on_sriov_logical
, [Ref.string_of transport_PIF]
)
)
| _ ->
()
let bond_is_allowed_on_pif ~__context ~self =
let pif_rec = Db.PIF.get_record ~__context ~self in
match get_pif_topo ~__context ~pif_rec with
| Physical pif_rec :: _ when pif_rec.API.pIF_bond_slave_of <> Ref.null ->
(* Disallow creating on bond slave *)
(* Here we rely on the implementation to guarantee that `Physical` is a terminating case *)
let bond = pif_rec.API.pIF_bond_slave_of in
let bonded =
try
ignore (Db.Bond.get_uuid ~__context ~self:bond) ;
true
with _ -> false
in
if bonded then
raise
Api_errors.(Server_error (pif_already_bonded, [Ref.string_of self]))
| VLAN_untagged _ :: _ ->
raise
Api_errors.(
Server_error
(pif_vlan_exists, [Db.PIF.get_device_name ~__context ~self])
)
| Tunnel_access _ :: _ ->
raise
Api_errors.(Server_error (is_tunnel_access_pif, [Ref.string_of self]))
| Network_sriov_logical _ :: _ ->
raise
Api_errors.(Server_error (pif_is_sriov_logical, [Ref.string_of self]))
| _ ->
()
let sriov_is_allowed_on_pif ~__context ~physical_PIF ~pif_rec =
let _ =
match get_pif_type pif_rec with
| Physical _ ->
()
| _ ->
raise
Api_errors.(
Server_error (pif_is_not_physical, [Ref.string_of physical_PIF])
)
in
if pif_rec.API.pIF_sriov_physical_PIF_of <> [] then
raise
Api_errors.(
Server_error
(network_sriov_already_enabled, [Ref.string_of physical_PIF])
) ;
if not (List.mem "sriov" pif_rec.API.pIF_capabilities) then
raise
Api_errors.(
Server_error (pif_is_not_sriov_capable, [Ref.string_of physical_PIF])
)
let assert_pif_is_managed ~__context ~self =
if Db.PIF.get_managed ~__context ~self <> true then
raise Api_errors.(Server_error (pif_unmanaged, [Ref.string_of self]))
let assert_not_vlan_slave ~__context ~self =
let vlans = Db.PIF.get_VLAN_slave_of ~__context ~self in
debug "PIF %s assert_no_vlans = [ %s ]"
(Db.PIF.get_uuid ~__context ~self)
(String.concat "; " (List.map Ref.string_of vlans)) ;
if vlans <> [] then (
List.map (fun self -> Db.VLAN.get_uuid ~__context ~self) vlans
|> String.concat "; "
|> debug "PIF has associated VLANs: [ %s ]" ;
raise
Api_errors.(Server_error (pif_vlan_still_exists, [Ref.string_of self]))
)
let is_device_underneath_same_type ~__context pif1 pif2 =
let get_device_info pif =
let pci = Db.PIF.get_PCI ~__context ~self:pif in
let pci_rec = Db.PCI.get_record_internal ~__context ~self:pci in
(pci_rec.Db_actions.pCI_vendor_id, pci_rec.Db_actions.pCI_device_id)
in
get_device_info pif1 = get_device_info pif2
let get_primary_address ~__context ~pif =
match Db.PIF.get_primary_address_type ~__context ~self:pif with
| `IPv4 -> (
match Db.PIF.get_IP ~__context ~self:pif with "" -> None | ip -> Some ip
)
| `IPv6 ->
List.nth_opt (Db.PIF.get_IPv6 ~__context ~self:pif) 0
| null | https://raw.githubusercontent.com/xapi-project/xen-api/fa6f118fb512a2c90159368a1a0bc1cc730c895b/ocaml/xapi/xapi_pif_helpers.ml | ocaml | Disallow creating on bond slave
Here we rely on the implementation to guarantee that `Physical` is a terminating case
Disallow creating on bond slave
Here we rely on the implementation to guarantee that `Physical` is a terminating case
Disallow creating on bond slave
Here we rely on the implementation to guarantee that `Physical` is a terminating case |
* Copyright ( C ) Citrix Systems Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation ; version 2.1 only . with the special
* exception on linking described in file LICENSE .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
* Copyright (C) Citrix Systems Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as published
* by the Free Software Foundation; version 2.1 only. with the special
* exception on linking described in file LICENSE.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*)
open API
module D = Debug.Make (struct let name = "xapi_pif_helpers" end)
open D
Any given PIF should belongs only one of the following types
type pif_type_t =
| Tunnel_access of ref_tunnel
| VLAN_untagged of ref_VLAN
| Network_sriov_logical of ref_network_sriov
| Bond_master of ref_Bond
| Physical of pIF_t
let pif_type_to_string = function
| Tunnel_access _ ->
"Tunnel_access"
| VLAN_untagged _ ->
"VLAN_untagged"
| Network_sriov_logical _ ->
"Network_sriov_logical"
| Bond_master _ ->
"Bond_master"
| Physical _ ->
"Physical"
let is_tunnel_access_pif pif_rec =
match pif_rec.API.pIF_tunnel_access_PIF_of with
| tunnel :: _ ->
Some (Tunnel_access tunnel)
| _ ->
None
let is_vlan_master_pif pif_rec =
let vlan = pif_rec.API.pIF_VLAN_master_of in
if vlan = Ref.null then None else Some (VLAN_untagged vlan)
let is_sriov_logical_pif pif_rec =
match pif_rec.API.pIF_sriov_logical_PIF_of with
| sriov :: _ ->
Some (Network_sriov_logical sriov)
| _ ->
None
let is_bond_master_pif pif_rec =
match pif_rec.API.pIF_bond_master_of with
| bond :: _ ->
Some (Bond_master bond)
| _ ->
None
let is_physical_pif pif_rec =
if pif_rec.API.pIF_physical then Some (Physical pif_rec) else None
let ( >>= ) (ret, pif_rec) f =
match (ret, pif_rec) with
| (Some _ as v), _ ->
(v, pif_rec)
| None, _ ->
(f pif_rec, pif_rec)
let get_pif_type pif_rec =
match
(None, pif_rec)
>>= is_tunnel_access_pif
>>= is_vlan_master_pif
>>= is_sriov_logical_pif
>>= is_bond_master_pif
>>= is_physical_pif
with
| Some v, _ ->
v
| None, _ ->
raise
Api_errors.(
Server_error
( internal_error
, [
Printf.sprintf "Cannot calculate PIF type of %s"
pif_rec.API.pIF_uuid
]
)
)
The root PIF underneath should be Physical or Bond_master
* This function aims to get a list of types of the PIFs underneath the given PIF
let get_pif_topo ~__context ~pif_rec =
let rec get_pif_type_till_root ret pif_rec =
let pif_t = get_pif_type pif_rec in
match pif_t with
| Tunnel_access tunnel ->
let tunnel_rec = Db.Tunnel.get_record ~__context ~self:tunnel in
let pif_ref = tunnel_rec.API.tunnel_transport_PIF in
let pif_rec = Db.PIF.get_record ~__context ~self:pif_ref in
get_pif_type_till_root (pif_t :: ret) pif_rec
| VLAN_untagged vlan ->
let vlan_rec = Db.VLAN.get_record ~__context ~self:vlan in
let pif_ref = vlan_rec.API.vLAN_tagged_PIF in
let pif_rec = Db.PIF.get_record ~__context ~self:pif_ref in
get_pif_type_till_root (pif_t :: ret) pif_rec
| Network_sriov_logical sriov ->
let sriov_rec = Db.Network_sriov.get_record ~__context ~self:sriov in
let pif_ref = sriov_rec.API.network_sriov_physical_PIF in
let pif_rec = Db.PIF.get_record ~__context ~self:pif_ref in
get_pif_type_till_root (pif_t :: ret) pif_rec
| Bond_master _ | Physical _ ->
pif_t :: ret
in
let pif_t_list = get_pif_type_till_root [] pif_rec in
let pif_t_list = List.rev pif_t_list in
pif_t_list
let vlan_is_allowed_on_pif ~__context ~tagged_PIF ~pif_rec:_ ~pif_topo ~tag:_ =
match pif_topo with
| Physical pif_rec :: _ when pif_rec.API.pIF_bond_slave_of <> Ref.null ->
raise
Api_errors.(
Server_error
(cannot_add_vlan_to_bond_slave, [Ref.string_of tagged_PIF])
)
| VLAN_untagged _ :: _ ->
raise Api_errors.(Server_error (pif_is_vlan, [Ref.string_of tagged_PIF]))
| Tunnel_access _ :: _ ->
raise
Api_errors.(
Server_error (is_tunnel_access_pif, [Ref.string_of tagged_PIF])
)
| _ ->
()
let tunnel_is_allowed_on_pif ~__context ~transport_PIF =
let pif_rec = Db.PIF.get_record ~__context ~self:transport_PIF in
match get_pif_topo ~__context ~pif_rec with
| Physical pif_rec :: _ when pif_rec.API.pIF_bond_slave_of <> Ref.null ->
raise
Api_errors.(
Server_error
(cannot_add_tunnel_to_bond_slave, [Ref.string_of transport_PIF])
)
| Tunnel_access _ :: _ ->
raise
Api_errors.(
Server_error (is_tunnel_access_pif, [Ref.string_of transport_PIF])
)
| Network_sriov_logical _ :: _ ->
raise
Api_errors.(
Server_error
(cannot_add_tunnel_to_sriov_logical, [Ref.string_of transport_PIF])
)
| VLAN_untagged _ :: Network_sriov_logical _ :: _ ->
raise
Api_errors.(
Server_error
( cannot_add_tunnel_to_vlan_on_sriov_logical
, [Ref.string_of transport_PIF]
)
)
| _ ->
()
let bond_is_allowed_on_pif ~__context ~self =
let pif_rec = Db.PIF.get_record ~__context ~self in
match get_pif_topo ~__context ~pif_rec with
| Physical pif_rec :: _ when pif_rec.API.pIF_bond_slave_of <> Ref.null ->
let bond = pif_rec.API.pIF_bond_slave_of in
let bonded =
try
ignore (Db.Bond.get_uuid ~__context ~self:bond) ;
true
with _ -> false
in
if bonded then
raise
Api_errors.(Server_error (pif_already_bonded, [Ref.string_of self]))
| VLAN_untagged _ :: _ ->
raise
Api_errors.(
Server_error
(pif_vlan_exists, [Db.PIF.get_device_name ~__context ~self])
)
| Tunnel_access _ :: _ ->
raise
Api_errors.(Server_error (is_tunnel_access_pif, [Ref.string_of self]))
| Network_sriov_logical _ :: _ ->
raise
Api_errors.(Server_error (pif_is_sriov_logical, [Ref.string_of self]))
| _ ->
()
let sriov_is_allowed_on_pif ~__context ~physical_PIF ~pif_rec =
let _ =
match get_pif_type pif_rec with
| Physical _ ->
()
| _ ->
raise
Api_errors.(
Server_error (pif_is_not_physical, [Ref.string_of physical_PIF])
)
in
if pif_rec.API.pIF_sriov_physical_PIF_of <> [] then
raise
Api_errors.(
Server_error
(network_sriov_already_enabled, [Ref.string_of physical_PIF])
) ;
if not (List.mem "sriov" pif_rec.API.pIF_capabilities) then
raise
Api_errors.(
Server_error (pif_is_not_sriov_capable, [Ref.string_of physical_PIF])
)
let assert_pif_is_managed ~__context ~self =
if Db.PIF.get_managed ~__context ~self <> true then
raise Api_errors.(Server_error (pif_unmanaged, [Ref.string_of self]))
let assert_not_vlan_slave ~__context ~self =
let vlans = Db.PIF.get_VLAN_slave_of ~__context ~self in
debug "PIF %s assert_no_vlans = [ %s ]"
(Db.PIF.get_uuid ~__context ~self)
(String.concat "; " (List.map Ref.string_of vlans)) ;
if vlans <> [] then (
List.map (fun self -> Db.VLAN.get_uuid ~__context ~self) vlans
|> String.concat "; "
|> debug "PIF has associated VLANs: [ %s ]" ;
raise
Api_errors.(Server_error (pif_vlan_still_exists, [Ref.string_of self]))
)
let is_device_underneath_same_type ~__context pif1 pif2 =
let get_device_info pif =
let pci = Db.PIF.get_PCI ~__context ~self:pif in
let pci_rec = Db.PCI.get_record_internal ~__context ~self:pci in
(pci_rec.Db_actions.pCI_vendor_id, pci_rec.Db_actions.pCI_device_id)
in
get_device_info pif1 = get_device_info pif2
let get_primary_address ~__context ~pif =
match Db.PIF.get_primary_address_type ~__context ~self:pif with
| `IPv4 -> (
match Db.PIF.get_IP ~__context ~self:pif with "" -> None | ip -> Some ip
)
| `IPv6 ->
List.nth_opt (Db.PIF.get_IPv6 ~__context ~self:pif) 0
|
19488cf140cdd2602092e72cb57b5e54474b5efe7425360e4c654e215c0711a3 | scicloj/wadogo | ordinal.clj | (ns wadogo.scale.ordinal
(:require [wadogo.common :refer [scale ->ScaleType strip-keys merge-params]]
[wadogo.utils :refer [ensure-seq-content]]))
(defmethod scale :ordinal
([_] (scale :ordinal {}))
([s params]
(let [{:keys [domain range sort?] :as params} (merge-params s params)
ndomain (as-> domain domain
(distinct domain)
(ensure-seq-content domain range)
(if sort? (sort domain) domain))
nrange (ensure-seq-content range ndomain)]
(->ScaleType :ordinal ndomain nrange (:ticks params) (:formatter params)
(zipmap ndomain nrange)
(zipmap nrange ndomain)
(strip-keys params)))))
| null | https://raw.githubusercontent.com/scicloj/wadogo/ae293aa28d6da55719c3f2771649c2f2e56b692b/src/wadogo/scale/ordinal.clj | clojure | (ns wadogo.scale.ordinal
(:require [wadogo.common :refer [scale ->ScaleType strip-keys merge-params]]
[wadogo.utils :refer [ensure-seq-content]]))
(defmethod scale :ordinal
([_] (scale :ordinal {}))
([s params]
(let [{:keys [domain range sort?] :as params} (merge-params s params)
ndomain (as-> domain domain
(distinct domain)
(ensure-seq-content domain range)
(if sort? (sort domain) domain))
nrange (ensure-seq-content range ndomain)]
(->ScaleType :ordinal ndomain nrange (:ticks params) (:formatter params)
(zipmap ndomain nrange)
(zipmap nrange ndomain)
(strip-keys params)))))
| |
d8625a7ed742c504fc83d3b1867624caac7d8232614d42ec64a09d74cec75072 | anwarmamat/cmsc330fall18-public | data.ml | open Funs
(***********************)
Part 2 : Integer BST
(***********************)
type int_tree =
| IntLeaf
| IntNode of int * int_tree * int_tree
let empty_int_tree = IntLeaf
let rec int_insert x t =
match t with
| IntLeaf -> IntNode(x, IntLeaf, IntLeaf)
| IntNode (y, l, r) when x > y -> IntNode (y, l, int_insert x r)
| IntNode (y, l, r) when x = y -> t
| IntNode (y, l, r) -> IntNode (y, int_insert x l, r)
let rec int_mem x t =
match t with
| IntLeaf -> false
| IntNode (y, l, r) when x > y -> int_mem x r
| IntNode (y, l, r) when x = y -> true
| IntNode (y, l, r) -> int_mem x l
(* Implement the functions below. *)
let rec int_size t = failwith "unimplemented"
let rec int_max t = failwith "unimplemented"
let rec int_common t x y = failwith "unimplemented"
(***************************)
(* Part 3: Polymorphic BST *)
(***************************)
type 'a atree =
Leaf
| Node of 'a * 'a atree * 'a atree
type 'a compfn = 'a -> 'a -> int
type 'a ptree = 'a compfn * 'a atree
let empty_ptree f : 'a ptree = (f,Leaf)
(* Implement the functions below. *)
let pinsert x t = failwith "unimplemented"
let pmem x t = failwith "unimplemented"
let pinsert_all lst t = failwith "unimplemented"
let rec p_as_list t = failwith "unimplemented"
let pmap f t = failwith "unimplemented"
(*******************************)
Part 4 : Shapes with Records
(*******************************)
type pt = { x: int; y: int }
type shape =
Circ of { radius: float; center: pt }
| Rect of { width: float; height: float; upper: pt }
let area s = failwith "unimplemented"
let filter f lst = failwith "unimplemented"
let partition thresh lst = failwith "unimplemented"
let rec qs lst = failwith "unimplemented"
| null | https://raw.githubusercontent.com/anwarmamat/cmsc330fall18-public/12585d98d45f954f75e2f78df3062444f5f97cf6/p2b/src/data.ml | ocaml | *********************
*********************
Implement the functions below.
*************************
Part 3: Polymorphic BST
*************************
Implement the functions below.
*****************************
***************************** | open Funs
Part 2 : Integer BST
type int_tree =
| IntLeaf
| IntNode of int * int_tree * int_tree
let empty_int_tree = IntLeaf
let rec int_insert x t =
match t with
| IntLeaf -> IntNode(x, IntLeaf, IntLeaf)
| IntNode (y, l, r) when x > y -> IntNode (y, l, int_insert x r)
| IntNode (y, l, r) when x = y -> t
| IntNode (y, l, r) -> IntNode (y, int_insert x l, r)
let rec int_mem x t =
match t with
| IntLeaf -> false
| IntNode (y, l, r) when x > y -> int_mem x r
| IntNode (y, l, r) when x = y -> true
| IntNode (y, l, r) -> int_mem x l
let rec int_size t = failwith "unimplemented"
let rec int_max t = failwith "unimplemented"
let rec int_common t x y = failwith "unimplemented"
type 'a atree =
Leaf
| Node of 'a * 'a atree * 'a atree
type 'a compfn = 'a -> 'a -> int
type 'a ptree = 'a compfn * 'a atree
let empty_ptree f : 'a ptree = (f,Leaf)
let pinsert x t = failwith "unimplemented"
let pmem x t = failwith "unimplemented"
let pinsert_all lst t = failwith "unimplemented"
let rec p_as_list t = failwith "unimplemented"
let pmap f t = failwith "unimplemented"
Part 4 : Shapes with Records
type pt = { x: int; y: int }
type shape =
Circ of { radius: float; center: pt }
| Rect of { width: float; height: float; upper: pt }
let area s = failwith "unimplemented"
let filter f lst = failwith "unimplemented"
let partition thresh lst = failwith "unimplemented"
let rec qs lst = failwith "unimplemented"
|
b7bcc5e017ee7cafda106433a4eba722cbbc15807d4c7cd56c08c6eb4ee31bde | tud-fop/vanda-haskell | Shuffle.hs | -----------------------------------------------------------------------------
-- |
-- Module : Data.List.Shuffle
-- Description : shuffling lists
Copyright : ( c ) Technische Universität Dresden 2016
-- License : BSD-style
--
-- Maintainer :
-- Stability : unknown
-- Portability : portable
--
-- Shuffling list.
-----------------------------------------------------------------------------
module Data.List.Shuffle
( shuffle
) where
import Control.Monad (forM)
import Control.Monad.ST
import Data.Array.MArray.Safe (newListArray)
import Data.Array.ST.Safe
import Data.STRef
import System.Random (RandomGen, randomR)
| Shuffle a list using a given ' RandomGen ' .
--
-- The solution is based on the
-- [Haskell Wiki](#Imperative_algorithm).
shuffle :: RandomGen g => [a] -> g -> ([a], g)
shuffle xs g = runST $ do
gST <- newSTRef g
aST <- newListArrayST (0, hi) xs
xs' <- forM [0 .. hi] $ \ i -> do
j <- stateST gST $ randomR (i, hi)
vi <- readArray aST i
vj <- readArray aST j
writeArray aST j vi
return vj
g' <- readSTRef gST
return (xs', g')
where
hi = pred $ length xs
-- | Identical to 'newListArray', but with more specific type.
newListArrayST :: Ix i => (i, i) -> [e] -> ST s (STArray s i e)
newListArrayST = newListArray
| Like ' Control.Monad.Trans.State.Lazy.state ' , but for " Control . Monad . ST "
and " Data . STRef " .
stateST :: STRef s a -> (a -> (b, a)) -> ST s b
stateST s f = do
(b, a) <- f <$> readSTRef s
writeSTRef s a
return b
| null | https://raw.githubusercontent.com/tud-fop/vanda-haskell/3214966361b6dbf178155950c94423eee7f9453e/library/Data/List/Shuffle.hs | haskell | ---------------------------------------------------------------------------
|
Module : Data.List.Shuffle
Description : shuffling lists
License : BSD-style
Maintainer :
Stability : unknown
Portability : portable
Shuffling list.
---------------------------------------------------------------------------
The solution is based on the
[Haskell Wiki](#Imperative_algorithm).
| Identical to 'newListArray', but with more specific type. | Copyright : ( c ) Technische Universität Dresden 2016
module Data.List.Shuffle
( shuffle
) where
import Control.Monad (forM)
import Control.Monad.ST
import Data.Array.MArray.Safe (newListArray)
import Data.Array.ST.Safe
import Data.STRef
import System.Random (RandomGen, randomR)
| Shuffle a list using a given ' RandomGen ' .
shuffle :: RandomGen g => [a] -> g -> ([a], g)
shuffle xs g = runST $ do
gST <- newSTRef g
aST <- newListArrayST (0, hi) xs
xs' <- forM [0 .. hi] $ \ i -> do
j <- stateST gST $ randomR (i, hi)
vi <- readArray aST i
vj <- readArray aST j
writeArray aST j vi
return vj
g' <- readSTRef gST
return (xs', g')
where
hi = pred $ length xs
newListArrayST :: Ix i => (i, i) -> [e] -> ST s (STArray s i e)
newListArrayST = newListArray
| Like ' Control.Monad.Trans.State.Lazy.state ' , but for " Control . Monad . ST "
and " Data . STRef " .
stateST :: STRef s a -> (a -> (b, a)) -> ST s b
stateST s f = do
(b, a) <- f <$> readSTRef s
writeSTRef s a
return b
|
812cc7614286b62d814d0cd6eca4b8ac8dde9f828988391fc3cc25e407ff8a93 | clash-lang/clash-compiler | Modelsim.hs | # LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Test.Tasty.Modelsim where
import Control.Monad (forM_)
import Data.Coerce (coerce)
import qualified Data.List as List
import Data.Proxy
import Data.Tagged
import qualified Data.Text as T
import System.Directory (copyFile)
import System.FilePath ((</>))
import System.FilePath.Glob (glob)
import Test.Tasty.Common
import Test.Tasty.Options
import Test.Tasty.Program
import Test.Tasty.Providers
-- | @--modelsim@ flag for enabling tests that use modelsim.
newtype ModelSim = ModelSim Bool
deriving (Eq, Ord)
instance IsOption ModelSim where
defaultValue = ModelSim True
parseValue = fmap ModelSim . safeReadBool
optionName = pure "no-modelsim"
optionHelp = pure "Skip modelsim tests"
optionCLParser = flagCLParser Nothing (ModelSim False)
data ModelsimVlibTest = ModelsimVlibTest
{ mvtParentDirectory :: IO FilePath
-- ^ Shared temporary directory
, mvtSourceDirectory :: IO FilePath
-- ^ Directory to work from
}
instance IsTest ModelsimVlibTest where
run optionSet ModelsimVlibTest{..} progressCallback
| ModelSim True <- lookupOption optionSet = do
buildTargetDir mvtParentDirectory mvtSourceDirectory
src <- mvtSourceDirectory
runVlib src ["work"]
| otherwise =
pure (testPassed "Ignoring test due to --no-modelsim")
where
vlib workDir args = TestProgram "vlib" args NoGlob PrintNeither False (Just workDir) []
runVlib workDir args = run optionSet (vlib workDir args) progressCallback
testOptions =
coerce (coerce (testOptions @TestProgram) <> [Option (Proxy @ModelSim)])
data ModelsimVlogTest = ModelsimVlogTest
{ vlogSourceDirectory :: IO FilePath
^ Directory containing VHDL files produced by Clash
}
instance IsTest ModelsimVlogTest where
run optionSet ModelsimVlogTest{vlogSourceDirectory} progressCallback
| ModelSim True <- lookupOption optionSet = do
src <- vlogSourceDirectory
typeFiles <- glob (src </> "*" </> "*_types.sv")
allFiles <- glob (src </> "*" </> "*.sv")
runVlog src (["-sv", "-work", "work"] <> typeFiles <> allFiles)
| otherwise =
pure (testPassed "Ignoring test due to --no-modelsim")
where
vlog workDir args = TestProgram "vlog" args NoGlob PrintNeither False (Just workDir) []
runVlog workDir args = run optionSet (vlog workDir args) progressCallback
testOptions =
coerce (coerce (testOptions @TestProgram) <> [Option (Proxy @ModelSim)])
data ModelsimSimTest = ModelsimSimTest
{ msimExpectFailure :: Maybe (TestExitCode, T.Text)
-- ^ Expected failure code and output (if any)
, msimSourceDirectory :: IO FilePath
^ Directory containing VHDL files produced by Clash
, msimTop :: String
-- ^ Entry point to simulate
}
instance IsTest ModelsimSimTest where
run optionSet ModelsimSimTest{..} progressCallback
| ModelSim True <- lookupOption optionSet = do
src <- msimSourceDirectory
-- See Note [copy data files hack]
lists <- glob (src </> "*/memory.list")
forM_ lists $ \memFile ->
copyFile memFile (src </> "memory.list")
-- TODO: remove -voptargs=+acc=p for a next release of questa intel edition
let args = ["-voptargs=+acc=p","-batch", "-do", doScript, msimTop]
case msimExpectFailure of
Nothing -> run optionSet (vsim src args) progressCallback
Just exit -> run optionSet (failingVsim src args exit) progressCallback
| otherwise =
pure (testPassed "Ignoring test due to --no-modelsim")
where
vsim workDir args =
TestProgram "vsim" args NoGlob PrintNeither False (Just workDir) []
failingVsim workDir args (testExit, expectedErr) =
TestFailingProgram
(testExitCode testExit) "vsim" args NoGlob PrintNeither False
(specificExitCode testExit) (ExpectEither expectedErr) (Just workDir) []
doScript = List.intercalate ";"
[ "run -all"
, unwords
["if {[string equal ready [runStatus]]}"
,"then {quit -f}"
,"else {quit -code 1 -f}"
]
, "quit -code 2 -f"
]
testOptions =
coerce (coerce (testOptions @TestProgram) <> [Option (Proxy @ModelSim)])
| null | https://raw.githubusercontent.com/clash-lang/clash-compiler/fe65a65b993f58b779b4be01340df441fa3996eb/tests/src/Test/Tasty/Modelsim.hs | haskell | | @--modelsim@ flag for enabling tests that use modelsim.
^ Shared temporary directory
^ Directory to work from
^ Expected failure code and output (if any)
^ Entry point to simulate
See Note [copy data files hack]
TODO: remove -voptargs=+acc=p for a next release of questa intel edition | # LANGUAGE NamedFieldPuns #
# LANGUAGE RecordWildCards #
# LANGUAGE TypeApplications #
module Test.Tasty.Modelsim where
import Control.Monad (forM_)
import Data.Coerce (coerce)
import qualified Data.List as List
import Data.Proxy
import Data.Tagged
import qualified Data.Text as T
import System.Directory (copyFile)
import System.FilePath ((</>))
import System.FilePath.Glob (glob)
import Test.Tasty.Common
import Test.Tasty.Options
import Test.Tasty.Program
import Test.Tasty.Providers
newtype ModelSim = ModelSim Bool
deriving (Eq, Ord)
instance IsOption ModelSim where
defaultValue = ModelSim True
parseValue = fmap ModelSim . safeReadBool
optionName = pure "no-modelsim"
optionHelp = pure "Skip modelsim tests"
optionCLParser = flagCLParser Nothing (ModelSim False)
data ModelsimVlibTest = ModelsimVlibTest
{ mvtParentDirectory :: IO FilePath
, mvtSourceDirectory :: IO FilePath
}
instance IsTest ModelsimVlibTest where
run optionSet ModelsimVlibTest{..} progressCallback
| ModelSim True <- lookupOption optionSet = do
buildTargetDir mvtParentDirectory mvtSourceDirectory
src <- mvtSourceDirectory
runVlib src ["work"]
| otherwise =
pure (testPassed "Ignoring test due to --no-modelsim")
where
vlib workDir args = TestProgram "vlib" args NoGlob PrintNeither False (Just workDir) []
runVlib workDir args = run optionSet (vlib workDir args) progressCallback
testOptions =
coerce (coerce (testOptions @TestProgram) <> [Option (Proxy @ModelSim)])
data ModelsimVlogTest = ModelsimVlogTest
{ vlogSourceDirectory :: IO FilePath
^ Directory containing VHDL files produced by Clash
}
instance IsTest ModelsimVlogTest where
run optionSet ModelsimVlogTest{vlogSourceDirectory} progressCallback
| ModelSim True <- lookupOption optionSet = do
src <- vlogSourceDirectory
typeFiles <- glob (src </> "*" </> "*_types.sv")
allFiles <- glob (src </> "*" </> "*.sv")
runVlog src (["-sv", "-work", "work"] <> typeFiles <> allFiles)
| otherwise =
pure (testPassed "Ignoring test due to --no-modelsim")
where
vlog workDir args = TestProgram "vlog" args NoGlob PrintNeither False (Just workDir) []
runVlog workDir args = run optionSet (vlog workDir args) progressCallback
testOptions =
coerce (coerce (testOptions @TestProgram) <> [Option (Proxy @ModelSim)])
data ModelsimSimTest = ModelsimSimTest
{ msimExpectFailure :: Maybe (TestExitCode, T.Text)
, msimSourceDirectory :: IO FilePath
^ Directory containing VHDL files produced by Clash
, msimTop :: String
}
instance IsTest ModelsimSimTest where
run optionSet ModelsimSimTest{..} progressCallback
| ModelSim True <- lookupOption optionSet = do
src <- msimSourceDirectory
lists <- glob (src </> "*/memory.list")
forM_ lists $ \memFile ->
copyFile memFile (src </> "memory.list")
let args = ["-voptargs=+acc=p","-batch", "-do", doScript, msimTop]
case msimExpectFailure of
Nothing -> run optionSet (vsim src args) progressCallback
Just exit -> run optionSet (failingVsim src args exit) progressCallback
| otherwise =
pure (testPassed "Ignoring test due to --no-modelsim")
where
vsim workDir args =
TestProgram "vsim" args NoGlob PrintNeither False (Just workDir) []
failingVsim workDir args (testExit, expectedErr) =
TestFailingProgram
(testExitCode testExit) "vsim" args NoGlob PrintNeither False
(specificExitCode testExit) (ExpectEither expectedErr) (Just workDir) []
doScript = List.intercalate ";"
[ "run -all"
, unwords
["if {[string equal ready [runStatus]]}"
,"then {quit -f}"
,"else {quit -code 1 -f}"
]
, "quit -code 2 -f"
]
testOptions =
coerce (coerce (testOptions @TestProgram) <> [Option (Proxy @ModelSim)])
|
bc71b70d4c52f636988ec758969f339c61c9efddf59f8273c009b6800851ebb7 | tezos/tezos-mirror | test_srs.ml | (*****************************************************************************)
(* *)
MIT License
Copyright ( c ) 2022 Nomadic Labs < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
module Fr = Bls12_381.Fr
module G1 = Bls12_381.G1
module Poly = Tezos_bls12_381_polynomial_internal.Polynomial
module Srs :
Tezos_bls12_381_polynomial_internal.Srs.S_unsafe
with type elt = Bls12_381.G1.t
and type polynomial = Poly.t =
Tezos_bls12_381_polynomial_internal.Srs.Make
(Tezos_bls12_381_polynomial_internal.Srs.Elt_g1)
let test_get () =
let srs = Srs.generate_insecure 1 Fr.one in
assert (G1.eq G1.one (Srs.get srs 0))
let test_pippenger () =
let logn = 1 + Random.int 4 in
let n = 1 lsl logn in
let start = Random.int n in
let len = 1 + Random.int (n - start) in
let srs = Array.init n (fun _ -> G1.random ()) in
let scalars = Array.init n (fun _ -> Fr.random ()) in
let exp_output = Bls12_381.G1.pippenger ~start ~len srs scalars in
let pippenger_ctxt = Srs.of_array srs in
let poly = Poly.of_dense scalars in
let output = Srs.pippenger ~offset:start ~len pippenger_ctxt poly in
assert (Bls12_381.G1.eq output exp_output)
let test_add_and_extract_srs_from_pippenger_ctxt () =
let logn = 1 + Random.int 3 in
let n = 1 lsl logn in
let srs = Array.init n (fun _ -> G1.random ()) in
let pippenger_ctxt = Srs.of_array srs in
let extracted_srs = Srs.to_array pippenger_ctxt in
assert (Array.for_all2 Bls12_381.G1.eq srs extracted_srs)
let test_vector_pippenger () =
let vectors =
[
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
],
[
"01000040ffffff3fffc4fe3f023bcefe0362390706626b26f61d365f7e3df256";
"000000000040000000c0809d00c0003b34c1809d337354230000000000000000";
"000000c0ffffffbfff96ffbf0069ef54017668020276ce0c525f67cad469fb1c";
"01000000ffbffffffe9b7d6202e4bc18d116216cd464e50f487d9d2953a7ed73";
] );
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
"184b377e14ed55e31b35f445cbf468b705e9c71a946a55522a3c85be25b393a2e67265cef7df8edd619002e2a3ad065b0af36ec22b8926ff733c9b7b5478843b0e3dc84e8d94c1d0b2b923cba275e78d8acce1b8537cb242778600e5af19ad46";
"0ed4874249807cc2a1f940e15fd4eb119786194a48a158135a951f58b4ca3b80f916c02a405b9c040338d4da3adbedfa047fae6023b2fd812933d114c1437c749aac6376d0a601229f5f825626974ee8e7ece8eacc4d6b6d75e7f3fd7201947d";
],
[
"18f30c353f952ad33eb17e03f56cc000e6cfa38c9181341b74ca3635c08be372";
"f66f4007409ab02839855ebe1fc20893446e161d0409bebbdac47e83fbec855c";
"430be8668e0567ba3ff8cba1afc74c5095016b47c0fe0d6a678c54f065152a5f";
"10ee9a0e7c8ef77cdf20164a67bd0ad9afe9ec33972cb7473af8753b092b1954";
"73cbbc6a1b984fbaabd6e75c316e476ce3a44eccf812529d8d89aea90329c147";
"6de9f9ee3c5bd6fbd7def12f8b0534dd35a202a7b73eef348f1d6ec41be5c751";
] );
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
"184b377e14ed55e31b35f445cbf468b705e9c71a946a55522a3c85be25b393a2e67265cef7df8edd619002e2a3ad065b0af36ec22b8926ff733c9b7b5478843b0e3dc84e8d94c1d0b2b923cba275e78d8acce1b8537cb242778600e5af19ad46";
"0ed4874249807cc2a1f940e15fd4eb119786194a48a158135a951f58b4ca3b80f916c02a405b9c040338d4da3adbedfa047fae6023b2fd812933d114c1437c749aac6376d0a601229f5f825626974ee8e7ece8eacc4d6b6d75e7f3fd7201947d";
],
[
"ea900cee19d8ba557b68418d3bbc413542c3c92a48317cb6c6b975dbd04d2b23";
"c693db96c8cbbea290d505e731ed16376fa998e68ab4203f7af599c0f78f983f";
"83120bdf88b335f254baf192094c883858aaa785b0bb4255da24d871193fe64e";
"a69a181630f9d27c9dd91e91aade10e310b38cd0f73ee33b08df594fe0e65f1c";
"78b0bd6bc451bd7d6b65e65ce4e81c8f25a5c74c4748777007f35f5a5983da09";
"1025092f62588e2cf536645b238ce88402cec77a5902ab455aa1d4131eaa6f2c";
] );
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
"184b377e14ed55e31b35f445cbf468b705e9c71a946a55522a3c85be25b393a2e67265cef7df8edd619002e2a3ad065b0af36ec22b8926ff733c9b7b5478843b0e3dc84e8d94c1d0b2b923cba275e78d8acce1b8537cb242778600e5af19ad46";
"0ed4874249807cc2a1f940e15fd4eb119786194a48a158135a951f58b4ca3b80f916c02a405b9c040338d4da3adbedfa047fae6023b2fd812933d114c1437c749aac6376d0a601229f5f825626974ee8e7ece8eacc4d6b6d75e7f3fd7201947d";
"0037ae4b3f2191c9df9e179727b6739474e1bc4d502de668d4985a82404d5317ae79ecc63e6f8042ab520a74e897ad720406a2cfc76490fb0fd28cb8e13c706de396c37d613631818b58ea9ecc3db30d86217cd85c0da3c5998927b3789dcff4";
"0eadc0859b8076866c8322add1566589767b15ea33ff225338df15bdb2c196a23e94baf9bc04f85f1f88070a4280adaf0db1f9d1016e4e90a0ae9b6cc47bb93028396d39ce24edfefef0713ca28c6d25b0a211b2dd918fe9d460d9692b8ae235";
"13be3a770003d560b33744d5a0d89dc4a492c78e30f0921e68a5d863bb656693ea947409dc41141bc489ac98e05c3401017107261b06441911b8f980496025697b67c1ccae1b9b019bbd6620722b432991f9db5b86d08a3caeac0bfebb65d0f3";
"0f7017817c17b571a9622905e433400f744a2f4f81c3a6b31d4973ea12fa3f0735c5a7486c699d7abc5eb33b0f1386f60667140c432c00669eccad648ce4b5ac49173f3af926333af85adf0526c17ad9c3987aeb99c8bde2c95a6f11594f2d6a";
"16e397128d39fb23a2ece3b628af491c640768dcea09347039a976d8621c010ff321f9a96b57600f29379452152b0c6c02957a9b42a70224cca6f022d436746ab64a9def1e2da6a3899556ecf37e0d1ab3647838204fa9f37dede4e2bb580fb1";
"00b2c446c12f14a5c5d9b85f3127062aa19865b66b33be62d696fcbe02fd14f8237b94a15cd0241d9ad1a4007942672417149de0f1606edfedb04bfc59bbc103ba7d7d2f6b8eb84a462e115369e9ce654539a1f5891ba704e1f3d95ea74bc75b";
"0c27a162d50f8911af173175e64ec8f04b7cc38e6b96f206fae90e41b65887e61430e7eb5c65396a4c8727c3493f358402800d0ccbd9fd71bcbb0e1cd88f7894078e1b8f8c6a9a5101d190f25675211ff5dd3a1cc1ece5cab23ddf00dbd836e0";
],
[
"ca7b2410082015a7b7819d2cde1b46f2f15b2980af0cb0662439286a3e853b70";
"34e57070ec49b5d3d43ce6771c21c478b9af5d66a27a385acc895bde459b575b";
"5bd281d23682c4cd2df6f83ae6af7cfc5f2e67423ffca271a3a1d5c35a7feb3c";
"9acdfd591f1de1433fc94c56239cfb3df27a3c05f7b6c27a5c4fd1d5be52f045";
"48c2e23f9007c5fa3e6378ed195165fdb35a2eede808f47df6c8685e2982ad5b";
"0066dfd993b3fd05afae8e515f13ad97db1a91093e3bce6c2c77e76f3dc2e01f";
] );
]
in
List.iter
(fun (srs_bytes, scalars_bs) ->
let srs =
List.map
(fun x -> Bls12_381.G1.of_bytes_exn (Hex.to_bytes (`Hex x)))
srs_bytes
|> Array.of_list
in
let scalars =
List.map
(fun x -> Bls12_381.Fr.of_bytes_exn (Hex.to_bytes (`Hex x)))
scalars_bs
|> Array.of_list
in
let start = 1 in
let exp_output = Bls12_381.G1.pippenger ~start srs scalars in
let pippenger_ctxt = Srs.of_array srs in
let poly = Poly.of_dense scalars in
let output = Srs.pippenger ~offset:start pippenger_ctxt poly in
assert (Bls12_381.G1.eq output exp_output))
vectors
let bigstring_of_file filename =
let fd = Unix.openfile filename [Unix.O_RDONLY] 0o440 in
Bigarray.array1_of_genarray
@@ Unix.map_file
fd
Bigarray.char
Bigarray.c_layout
false
[|(* [-1] means read the whole file *) -1|]
let test_load_from_file () =
let bs = bigstring_of_file "srs_zcash_g1_5" in
let max_size = 1 lsl 5 in
let srs = Srs.of_bigstring bs ~len:max_size |> Result.get_ok in
assert (Srs.size srs = max_size) ;
match Srs.of_bigstring bs ~len:(max_size + 1) with
| Error (`End_of_file _) -> ()
| _ -> assert false
let test_load_from_file_vector () =
let bs = bigstring_of_file "srs_zcash_g1_5" in
let srs = Srs.of_bigstring bs ~len:(1 lsl 5) |> Result.get_ok in
let v =
[
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
]
in
List.iteri
(fun i expected ->
let (`Hex e) = Bls12_381.G1.to_bytes (Srs.get srs i) |> Hex.of_bytes in
assert (e = expected))
v
let tests =
List.map
(fun (name, f) -> Alcotest.test_case name `Quick f)
[
("get_set", test_get);
("pippenger", test_pippenger);
("pippenger test vectors", test_vector_pippenger);
( "add and extract srs from pippenger ctxt",
test_add_and_extract_srs_from_pippenger_ctxt );
("load_from_file", test_load_from_file);
("load_from_file_vector", test_load_from_file_vector);
]
| null | https://raw.githubusercontent.com/tezos/tezos-mirror/603edb19d5217fcf0cc1036effe617d78a6ba265/src/lib_bls12_381_polynomial/test/test_srs.ml | ocaml | ***************************************************************************
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
[-1] means read the whole file | MIT License
Copyright ( c ) 2022 Nomadic Labs < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
module Fr = Bls12_381.Fr
module G1 = Bls12_381.G1
module Poly = Tezos_bls12_381_polynomial_internal.Polynomial
module Srs :
Tezos_bls12_381_polynomial_internal.Srs.S_unsafe
with type elt = Bls12_381.G1.t
and type polynomial = Poly.t =
Tezos_bls12_381_polynomial_internal.Srs.Make
(Tezos_bls12_381_polynomial_internal.Srs.Elt_g1)
let test_get () =
let srs = Srs.generate_insecure 1 Fr.one in
assert (G1.eq G1.one (Srs.get srs 0))
let test_pippenger () =
let logn = 1 + Random.int 4 in
let n = 1 lsl logn in
let start = Random.int n in
let len = 1 + Random.int (n - start) in
let srs = Array.init n (fun _ -> G1.random ()) in
let scalars = Array.init n (fun _ -> Fr.random ()) in
let exp_output = Bls12_381.G1.pippenger ~start ~len srs scalars in
let pippenger_ctxt = Srs.of_array srs in
let poly = Poly.of_dense scalars in
let output = Srs.pippenger ~offset:start ~len pippenger_ctxt poly in
assert (Bls12_381.G1.eq output exp_output)
let test_add_and_extract_srs_from_pippenger_ctxt () =
let logn = 1 + Random.int 3 in
let n = 1 lsl logn in
let srs = Array.init n (fun _ -> G1.random ()) in
let pippenger_ctxt = Srs.of_array srs in
let extracted_srs = Srs.to_array pippenger_ctxt in
assert (Array.for_all2 Bls12_381.G1.eq srs extracted_srs)
let test_vector_pippenger () =
let vectors =
[
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
],
[
"01000040ffffff3fffc4fe3f023bcefe0362390706626b26f61d365f7e3df256";
"000000000040000000c0809d00c0003b34c1809d337354230000000000000000";
"000000c0ffffffbfff96ffbf0069ef54017668020276ce0c525f67cad469fb1c";
"01000000ffbffffffe9b7d6202e4bc18d116216cd464e50f487d9d2953a7ed73";
] );
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
"184b377e14ed55e31b35f445cbf468b705e9c71a946a55522a3c85be25b393a2e67265cef7df8edd619002e2a3ad065b0af36ec22b8926ff733c9b7b5478843b0e3dc84e8d94c1d0b2b923cba275e78d8acce1b8537cb242778600e5af19ad46";
"0ed4874249807cc2a1f940e15fd4eb119786194a48a158135a951f58b4ca3b80f916c02a405b9c040338d4da3adbedfa047fae6023b2fd812933d114c1437c749aac6376d0a601229f5f825626974ee8e7ece8eacc4d6b6d75e7f3fd7201947d";
],
[
"18f30c353f952ad33eb17e03f56cc000e6cfa38c9181341b74ca3635c08be372";
"f66f4007409ab02839855ebe1fc20893446e161d0409bebbdac47e83fbec855c";
"430be8668e0567ba3ff8cba1afc74c5095016b47c0fe0d6a678c54f065152a5f";
"10ee9a0e7c8ef77cdf20164a67bd0ad9afe9ec33972cb7473af8753b092b1954";
"73cbbc6a1b984fbaabd6e75c316e476ce3a44eccf812529d8d89aea90329c147";
"6de9f9ee3c5bd6fbd7def12f8b0534dd35a202a7b73eef348f1d6ec41be5c751";
] );
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
"184b377e14ed55e31b35f445cbf468b705e9c71a946a55522a3c85be25b393a2e67265cef7df8edd619002e2a3ad065b0af36ec22b8926ff733c9b7b5478843b0e3dc84e8d94c1d0b2b923cba275e78d8acce1b8537cb242778600e5af19ad46";
"0ed4874249807cc2a1f940e15fd4eb119786194a48a158135a951f58b4ca3b80f916c02a405b9c040338d4da3adbedfa047fae6023b2fd812933d114c1437c749aac6376d0a601229f5f825626974ee8e7ece8eacc4d6b6d75e7f3fd7201947d";
],
[
"ea900cee19d8ba557b68418d3bbc413542c3c92a48317cb6c6b975dbd04d2b23";
"c693db96c8cbbea290d505e731ed16376fa998e68ab4203f7af599c0f78f983f";
"83120bdf88b335f254baf192094c883858aaa785b0bb4255da24d871193fe64e";
"a69a181630f9d27c9dd91e91aade10e310b38cd0f73ee33b08df594fe0e65f1c";
"78b0bd6bc451bd7d6b65e65ce4e81c8f25a5c74c4748777007f35f5a5983da09";
"1025092f62588e2cf536645b238ce88402cec77a5902ab455aa1d4131eaa6f2c";
] );
( [
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
"0016c8fe3880c4e18d15fd2401912cf56ea4439c98ff93a78b21d0e8533158b96a575252af3e2fc75f23f642bc04b42805444f638fbbe3cfb6cdb4c234a81858b572d8f68d6081d20e9097dc8dc7953b4333714639507e856920905ed76d21d2";
"184b377e14ed55e31b35f445cbf468b705e9c71a946a55522a3c85be25b393a2e67265cef7df8edd619002e2a3ad065b0af36ec22b8926ff733c9b7b5478843b0e3dc84e8d94c1d0b2b923cba275e78d8acce1b8537cb242778600e5af19ad46";
"0ed4874249807cc2a1f940e15fd4eb119786194a48a158135a951f58b4ca3b80f916c02a405b9c040338d4da3adbedfa047fae6023b2fd812933d114c1437c749aac6376d0a601229f5f825626974ee8e7ece8eacc4d6b6d75e7f3fd7201947d";
"0037ae4b3f2191c9df9e179727b6739474e1bc4d502de668d4985a82404d5317ae79ecc63e6f8042ab520a74e897ad720406a2cfc76490fb0fd28cb8e13c706de396c37d613631818b58ea9ecc3db30d86217cd85c0da3c5998927b3789dcff4";
"0eadc0859b8076866c8322add1566589767b15ea33ff225338df15bdb2c196a23e94baf9bc04f85f1f88070a4280adaf0db1f9d1016e4e90a0ae9b6cc47bb93028396d39ce24edfefef0713ca28c6d25b0a211b2dd918fe9d460d9692b8ae235";
"13be3a770003d560b33744d5a0d89dc4a492c78e30f0921e68a5d863bb656693ea947409dc41141bc489ac98e05c3401017107261b06441911b8f980496025697b67c1ccae1b9b019bbd6620722b432991f9db5b86d08a3caeac0bfebb65d0f3";
"0f7017817c17b571a9622905e433400f744a2f4f81c3a6b31d4973ea12fa3f0735c5a7486c699d7abc5eb33b0f1386f60667140c432c00669eccad648ce4b5ac49173f3af926333af85adf0526c17ad9c3987aeb99c8bde2c95a6f11594f2d6a";
"16e397128d39fb23a2ece3b628af491c640768dcea09347039a976d8621c010ff321f9a96b57600f29379452152b0c6c02957a9b42a70224cca6f022d436746ab64a9def1e2da6a3899556ecf37e0d1ab3647838204fa9f37dede4e2bb580fb1";
"00b2c446c12f14a5c5d9b85f3127062aa19865b66b33be62d696fcbe02fd14f8237b94a15cd0241d9ad1a4007942672417149de0f1606edfedb04bfc59bbc103ba7d7d2f6b8eb84a462e115369e9ce654539a1f5891ba704e1f3d95ea74bc75b";
"0c27a162d50f8911af173175e64ec8f04b7cc38e6b96f206fae90e41b65887e61430e7eb5c65396a4c8727c3493f358402800d0ccbd9fd71bcbb0e1cd88f7894078e1b8f8c6a9a5101d190f25675211ff5dd3a1cc1ece5cab23ddf00dbd836e0";
],
[
"ca7b2410082015a7b7819d2cde1b46f2f15b2980af0cb0662439286a3e853b70";
"34e57070ec49b5d3d43ce6771c21c478b9af5d66a27a385acc895bde459b575b";
"5bd281d23682c4cd2df6f83ae6af7cfc5f2e67423ffca271a3a1d5c35a7feb3c";
"9acdfd591f1de1433fc94c56239cfb3df27a3c05f7b6c27a5c4fd1d5be52f045";
"48c2e23f9007c5fa3e6378ed195165fdb35a2eede808f47df6c8685e2982ad5b";
"0066dfd993b3fd05afae8e515f13ad97db1a91093e3bce6c2c77e76f3dc2e01f";
] );
]
in
List.iter
(fun (srs_bytes, scalars_bs) ->
let srs =
List.map
(fun x -> Bls12_381.G1.of_bytes_exn (Hex.to_bytes (`Hex x)))
srs_bytes
|> Array.of_list
in
let scalars =
List.map
(fun x -> Bls12_381.Fr.of_bytes_exn (Hex.to_bytes (`Hex x)))
scalars_bs
|> Array.of_list
in
let start = 1 in
let exp_output = Bls12_381.G1.pippenger ~start srs scalars in
let pippenger_ctxt = Srs.of_array srs in
let poly = Poly.of_dense scalars in
let output = Srs.pippenger ~offset:start pippenger_ctxt poly in
assert (Bls12_381.G1.eq output exp_output))
vectors
let bigstring_of_file filename =
let fd = Unix.openfile filename [Unix.O_RDONLY] 0o440 in
Bigarray.array1_of_genarray
@@ Unix.map_file
fd
Bigarray.char
Bigarray.c_layout
false
let test_load_from_file () =
let bs = bigstring_of_file "srs_zcash_g1_5" in
let max_size = 1 lsl 5 in
let srs = Srs.of_bigstring bs ~len:max_size |> Result.get_ok in
assert (Srs.size srs = max_size) ;
match Srs.of_bigstring bs ~len:(max_size + 1) with
| Error (`End_of_file _) -> ()
| _ -> assert false
let test_load_from_file_vector () =
let bs = bigstring_of_file "srs_zcash_g1_5" in
let srs = Srs.of_bigstring bs ~len:(1 lsl 5) |> Result.get_ok in
let v =
[
"17f1d3a73197d7942695638c4fa9ac0fc3688c4f9774b905a14e3a3f171bac586c55e83ff97a1aeffb3af00adb22c6bb08b3f481e3aaa0f1a09e30ed741d8ae4fcf5e095d5d00af600db18cb2c04b3edd03cc744a2888ae40caa232946c5e7e1";
"0626cc1d12a23b7886a4f0b0ef67959b7741bf3cfb2c4a020c7d4cc363796a0a7c0f6ea5ac673865de74a88058e9326e0b96b667165e7320c6190a083999375619571ffa66ebc175565e827666d40fa56bbbb30dbaf0a729436545ac7cfd7ac1";
"18932b935642ee2b2672c870314197639820562829b8957224ad4ace94ffd1b86523a7ba55c84997e3f528c69f40b42608fdce18c7373edb89594f25c3cb5b8ebf2addb430c713f54b65dca8a3e6f8814dd1cd6cb29ab5c159a10dd992dc9c95";
]
in
List.iteri
(fun i expected ->
let (`Hex e) = Bls12_381.G1.to_bytes (Srs.get srs i) |> Hex.of_bytes in
assert (e = expected))
v
let tests =
List.map
(fun (name, f) -> Alcotest.test_case name `Quick f)
[
("get_set", test_get);
("pippenger", test_pippenger);
("pippenger test vectors", test_vector_pippenger);
( "add and extract srs from pippenger ctxt",
test_add_and_extract_srs_from_pippenger_ctxt );
("load_from_file", test_load_from_file);
("load_from_file_vector", test_load_from_file_vector);
]
|
ee41b94a097951f654c58364df2b8c6cfe35a1c9697e61ea606a71b94020b440 | DaveWM/lobster-writer | editable_list.cljs | (ns lobster-writer.components.editable-list
(:require [reagent.core :as reagent]
[lobster-writer.utils :as utils]))
(defn editable-list []
(let [*input-text (reagent/atom "")]
(fn [{:keys [on-item-added on-item-removed on-item-moved-up on-item-moved-down label-fn items]
:or {label-fn identity}}]
[:div.editable-list
(when (seq items)
[:ul.uk-list.uk-list-striped
(->> items
(map-indexed (fn [idx item]
^{:key item}
[:li.uk-flex.uk-flex-row.uk-flex-between.uk-flex-middle
[:span.editable-list__label
(-> (label-fn item)
(utils/highlight-links (fn [url]
[:a
{:href url
:target "_blank"}
url])))]
[:span.editable-list__actions
(when on-item-removed
[:button.uk-button.uk-button-default.uk-button-rounded.uk-button-small
{:on-click (partial on-item-removed item)}
[:i.zmdi.zmdi-delete]])
(when on-item-moved-up
[:button.uk-button.uk-button-default.uk-button-rounded.uk-button-small
{:on-click (partial on-item-moved-up item)
:disabled (zero? idx)}
[:i.zmdi.zmdi-chevron-up]])
(when on-item-moved-down
[:button.uk-button.uk-button-default.uk-button-rounded.uk-button-small
{:on-click (partial on-item-moved-down item)
:disabled (= idx (dec (count items)))}
[:i.zmdi.zmdi-chevron-down]])]])))])
(when on-item-added
^{:key :input}
[:div.uk-flex.uk-flex-row
[:input.uk-input
{:value @*input-text
:on-change #(reset! *input-text (-> % .-target .-value))
:on-key-press #(when (= 13 (.-which %))
(do (on-item-added @*input-text)
(reset! *input-text "")))}]
[:button.uk-button.uk-button-default
{:on-click #(do (on-item-added @*input-text)
(reset! *input-text ""))}
"Add"]])])))
| null | https://raw.githubusercontent.com/DaveWM/lobster-writer/98eef942cc61725f0a2e00c34e6952c1e262f1ad/src/cljs/lobster_writer/components/editable_list.cljs | clojure | (ns lobster-writer.components.editable-list
(:require [reagent.core :as reagent]
[lobster-writer.utils :as utils]))
(defn editable-list []
(let [*input-text (reagent/atom "")]
(fn [{:keys [on-item-added on-item-removed on-item-moved-up on-item-moved-down label-fn items]
:or {label-fn identity}}]
[:div.editable-list
(when (seq items)
[:ul.uk-list.uk-list-striped
(->> items
(map-indexed (fn [idx item]
^{:key item}
[:li.uk-flex.uk-flex-row.uk-flex-between.uk-flex-middle
[:span.editable-list__label
(-> (label-fn item)
(utils/highlight-links (fn [url]
[:a
{:href url
:target "_blank"}
url])))]
[:span.editable-list__actions
(when on-item-removed
[:button.uk-button.uk-button-default.uk-button-rounded.uk-button-small
{:on-click (partial on-item-removed item)}
[:i.zmdi.zmdi-delete]])
(when on-item-moved-up
[:button.uk-button.uk-button-default.uk-button-rounded.uk-button-small
{:on-click (partial on-item-moved-up item)
:disabled (zero? idx)}
[:i.zmdi.zmdi-chevron-up]])
(when on-item-moved-down
[:button.uk-button.uk-button-default.uk-button-rounded.uk-button-small
{:on-click (partial on-item-moved-down item)
:disabled (= idx (dec (count items)))}
[:i.zmdi.zmdi-chevron-down]])]])))])
(when on-item-added
^{:key :input}
[:div.uk-flex.uk-flex-row
[:input.uk-input
{:value @*input-text
:on-change #(reset! *input-text (-> % .-target .-value))
:on-key-press #(when (= 13 (.-which %))
(do (on-item-added @*input-text)
(reset! *input-text "")))}]
[:button.uk-button.uk-button-default
{:on-click #(do (on-item-added @*input-text)
(reset! *input-text ""))}
"Add"]])])))
| |
a39915f81a9309927a8d4877354e867dc15a93b05c7c512146afb51837f4a7e6 | RefactoringTools/HaRe | BCpp.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE CPP #
Check that we can parse a file which requires CPP
module BCpp where
bob :: Int -> Int -> Int
#if __GLASGOW_HASKELL__ > 704
bob x y = x + y
#else
bob x y = x + y * 2
#endif
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/test/testdata/BCpp.hs | haskell | # LANGUAGE FlexibleInstances #
# LANGUAGE CPP #
Check that we can parse a file which requires CPP
module BCpp where
bob :: Int -> Int -> Int
#if __GLASGOW_HASKELL__ > 704
bob x y = x + y
#else
bob x y = x + y * 2
#endif
| |
9af67d7cbc02b511a67726d2d225b93bfc6663ca3c76340480990bbcfd07d749 | synduce/Synduce | Reduce.mli | open Term
module CondTree : sig
type 'a t =
| CBr of 'a
| CIf of term * 'a t * 'a t
val to_term : term t -> term
val of_term : term -> term t
val map : 'a t -> f:('a -> 'b) -> 'b t
val all_or_none : 'a option t -> 'a t option
end
type func_resolution =
| FRFun of fpattern list * term
| FRPmrs of PMRS.t
| FRNonT of PMRS.t
| FRUnknown
(** Resolve the funcion definiton of a term in an environment. *)
val resolve_func : PMRS.Functions.ctx -> Context.t -> term -> func_resolution
(** Project irreducible terms into tuples. *)
val project_irreducible_terms : Context.t -> term -> term
(**
[until_irreducible f t] applies [f] to [t] until [t] is unchanged by application of [f] or we have
reached the limit of rewrites Reduce._MAX.
@param f A function that returns a pair of a term and a boolean. It should be written such that
[f x = x', b => (b = (x != x'))]. The boolean indicates whether the term has be changed.
@param t The term that will be rewritten using [f].
*)
val until_irreducible : (term -> term * bool) -> term -> term
*
[ rule_lookup ctx rule_map f args ] searches for a rewrite rule in [ rule_map ] whose head matches
[ f args ] . For each matched rule , it returns the rhs of the rule with the substitutions
corresponding to apply [ f args ] applied . If no rule is matched , returns an empty list .
[rule_lookup ctx rule_map f args] searches for a rewrite rule in [rule_map] whose head matches
[f args]. For each matched rule, it returns the rhs of the rule with the substitutions
corresponding to apply [f args] applied. If no rule is matched, returns an empty list.
*)
val rule_lookup
: Context.t
-> ('a, variable * variable list * pattern option * term, 'b) Base.Map.t
-> variable
-> term list
-> term list
* [ reduce_term ~fctx t ] reduces the term [ t ] using lambda - calculus reduction rules .
( let x = e in e ' is equivalent to ( λx.e ' ) e ) .
Function symbolds in applications are resolved by looking up functions in the
current function contexts [ fctx ] or context [ ctx ] .
(let x = e in e' is equivalent to (λx.e') e).
Function symbolds in applications are resolved by looking up functions in the
current function contexts [fctx] or context [ctx].
*)
val reduce_term
: ?projecting:bool
-> ?unboxing:bool
-> fctx:PMRS.Functions.ctx
-> ctx:Context.t
-> term
-> term
*
[ reduce_pmrs ~fctx p t ] is a shortcut for [ reduce_term ~fctx ( mk_app ( mk_var p.pmain_symb ) [ t ] ) ]
when [ p ] is in the current environment .
[reduce_pmrs ~fctx ~ctx p t] is a shortcut for [reduce_term ~fctx ~ctx (mk_app (mk_var p.pmain_symb) [t])]
when [p] is in the current environment.
*)
val reduce_pmrs : fctx:PMRS.Functions.ctx -> ctx:Context.t -> PMRS.t -> term -> term
(**
Same as `reduce_term` but returns a list of terms, one for each reduction step using
a pmrs rule.
*)
val calc_term : fctx:PMRS.Functions.ctx -> ctx:Context.t -> term -> term list
*
] instantiate_with_solution ~fctx returns the PMRS [ p ] , in which the unknowns have been
replaced by their definition defined in defs .
@param fctx The function context .
@param ctx The current context with types and names .
@param p A PMRS with unknowns .
@param defs Definitions for the unknown in the PMRS , in the form of a list of triples
( unknown function name , arguments , body of the unknowns )
]instantiate_with_solution ~fctx ~ctx p defs] returns the PMRS [p], in which the unknowns have been
replaced by their definition defined in defs.
@param fctx The function context.
@param ctx The current context with types and names.
@param p A PMRS with unknowns.
@param defs Definitions for the unknown in the PMRS, in the form of a list of triples
(unknown function name, arguments, body of the unknowns)
*)
val instantiate_with_solution
: fctx:PMRS.Functions.ctx
-> ctx:Context.t
-> PMRS.t
-> (string * variable list * term) list
-> PMRS.t
(** Returns [true] if the PMRS is equivalent to the identity function.
Uses the reduction procedures to check wether [f x = x], symbolically.
*)
val is_identity : fctx:PMRS.Functions.ctx -> ctx:Context.t -> PMRS.t -> bool
| null | https://raw.githubusercontent.com/synduce/Synduce/42d970faa863365f10531b19945cbb5cfb70f134/src/lang/Reduce.mli | ocaml | * Resolve the funcion definiton of a term in an environment.
* Project irreducible terms into tuples.
*
[until_irreducible f t] applies [f] to [t] until [t] is unchanged by application of [f] or we have
reached the limit of rewrites Reduce._MAX.
@param f A function that returns a pair of a term and a boolean. It should be written such that
[f x = x', b => (b = (x != x'))]. The boolean indicates whether the term has be changed.
@param t The term that will be rewritten using [f].
*
Same as `reduce_term` but returns a list of terms, one for each reduction step using
a pmrs rule.
* Returns [true] if the PMRS is equivalent to the identity function.
Uses the reduction procedures to check wether [f x = x], symbolically.
| open Term
module CondTree : sig
type 'a t =
| CBr of 'a
| CIf of term * 'a t * 'a t
val to_term : term t -> term
val of_term : term -> term t
val map : 'a t -> f:('a -> 'b) -> 'b t
val all_or_none : 'a option t -> 'a t option
end
type func_resolution =
| FRFun of fpattern list * term
| FRPmrs of PMRS.t
| FRNonT of PMRS.t
| FRUnknown
val resolve_func : PMRS.Functions.ctx -> Context.t -> term -> func_resolution
val project_irreducible_terms : Context.t -> term -> term
val until_irreducible : (term -> term * bool) -> term -> term
*
[ rule_lookup ctx rule_map f args ] searches for a rewrite rule in [ rule_map ] whose head matches
[ f args ] . For each matched rule , it returns the rhs of the rule with the substitutions
corresponding to apply [ f args ] applied . If no rule is matched , returns an empty list .
[rule_lookup ctx rule_map f args] searches for a rewrite rule in [rule_map] whose head matches
[f args]. For each matched rule, it returns the rhs of the rule with the substitutions
corresponding to apply [f args] applied. If no rule is matched, returns an empty list.
*)
val rule_lookup
: Context.t
-> ('a, variable * variable list * pattern option * term, 'b) Base.Map.t
-> variable
-> term list
-> term list
* [ reduce_term ~fctx t ] reduces the term [ t ] using lambda - calculus reduction rules .
( let x = e in e ' is equivalent to ( λx.e ' ) e ) .
Function symbolds in applications are resolved by looking up functions in the
current function contexts [ fctx ] or context [ ctx ] .
(let x = e in e' is equivalent to (λx.e') e).
Function symbolds in applications are resolved by looking up functions in the
current function contexts [fctx] or context [ctx].
*)
val reduce_term
: ?projecting:bool
-> ?unboxing:bool
-> fctx:PMRS.Functions.ctx
-> ctx:Context.t
-> term
-> term
*
[ reduce_pmrs ~fctx p t ] is a shortcut for [ reduce_term ~fctx ( mk_app ( mk_var p.pmain_symb ) [ t ] ) ]
when [ p ] is in the current environment .
[reduce_pmrs ~fctx ~ctx p t] is a shortcut for [reduce_term ~fctx ~ctx (mk_app (mk_var p.pmain_symb) [t])]
when [p] is in the current environment.
*)
val reduce_pmrs : fctx:PMRS.Functions.ctx -> ctx:Context.t -> PMRS.t -> term -> term
val calc_term : fctx:PMRS.Functions.ctx -> ctx:Context.t -> term -> term list
*
] instantiate_with_solution ~fctx returns the PMRS [ p ] , in which the unknowns have been
replaced by their definition defined in defs .
@param fctx The function context .
@param ctx The current context with types and names .
@param p A PMRS with unknowns .
@param defs Definitions for the unknown in the PMRS , in the form of a list of triples
( unknown function name , arguments , body of the unknowns )
]instantiate_with_solution ~fctx ~ctx p defs] returns the PMRS [p], in which the unknowns have been
replaced by their definition defined in defs.
@param fctx The function context.
@param ctx The current context with types and names.
@param p A PMRS with unknowns.
@param defs Definitions for the unknown in the PMRS, in the form of a list of triples
(unknown function name, arguments, body of the unknowns)
*)
val instantiate_with_solution
: fctx:PMRS.Functions.ctx
-> ctx:Context.t
-> PMRS.t
-> (string * variable list * term) list
-> PMRS.t
val is_identity : fctx:PMRS.Functions.ctx -> ctx:Context.t -> PMRS.t -> bool
|
630bc0414dfb5a28ab62b8f772a4ccb9d0b2dcabc1b228194b39e0b2633a4e8a | FlowForwarding/loom | tap_yaws.erl | %%------------------------------------------------------------------------------
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%
%%-----------------------------------------------------------------------------
%%
@author Infoblox Inc < >
2013 Infoblox Inc
%% @doc Starts yaws.
-module(tap_yaws).
-behavior(gen_server).
-export([start_link/1]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(STATE, tap_yaws_state).
-record(?STATE, {
supervisor
}).
%------------------------------------------------------------------------------
% API
%------------------------------------------------------------------------------
start_link(Supervisor) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Supervisor], []).
%------------------------------------------------------------------------------
% gen_server callbacks
%------------------------------------------------------------------------------
init([Supervisor]) ->
gen_server:cast(?MODULE, start),
{ok, #?STATE{supervisor = Supervisor}}.
handle_call(Msg, From, State) ->
error({no_handle_call, ?MODULE}, [Msg, From, State]).
handle_cast(start, State = #?STATE{supervisor = Supervisor}) ->
ok = start_yaws(Supervisor),
{noreply, State};
handle_cast(Msg, State) ->
error({no_handle_cast, ?MODULE}, [Msg, State]).
handle_info(Msg, State) ->
error({no_handle_info, ?MODULE}, [Msg, State]).
terminate(_Reason, _State) ->
ok.
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
%------------------------------------------------------------------------------
% Local Functions
%------------------------------------------------------------------------------
start_yaws(Supervisor) ->
Port = tap_config:getconfig(web_port),
IpAddr = tap_config:getconfig(web_address),
LogDir = tap_config:getenv(web_log),
Id = tap_config:getenv(web_id),
DocRoot = filename:join([code:priv_dir(tapestry), "www"]),
GL = [
{logdir, LogDir}],
SL = [
{port, Port},
{listen, IpAddr}],
{ok, SCList, GC, ChildSpecs} =
yaws_api:embedded_start_conf(DocRoot, SL, GL, Id),
[supervisor:start_child(Supervisor, CS) || CS <- ChildSpecs],
yaws_api:setconf(GC, SCList),
ok.
| null | https://raw.githubusercontent.com/FlowForwarding/loom/86a9c5aa8b7d4776062365716c9a3dbbf3330bc5/tapestry/apps/tapestry/src/tap_yaws.erl | erlang | ------------------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
-----------------------------------------------------------------------------
@doc Starts yaws.
------------------------------------------------------------------------------
API
------------------------------------------------------------------------------
------------------------------------------------------------------------------
gen_server callbacks
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Local Functions
------------------------------------------------------------------------------ | Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author Infoblox Inc < >
2013 Infoblox Inc
-module(tap_yaws).
-behavior(gen_server).
-export([start_link/1]).
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
-define(STATE, tap_yaws_state).
-record(?STATE, {
supervisor
}).
start_link(Supervisor) ->
gen_server:start_link({local, ?MODULE}, ?MODULE, [Supervisor], []).
init([Supervisor]) ->
gen_server:cast(?MODULE, start),
{ok, #?STATE{supervisor = Supervisor}}.
handle_call(Msg, From, State) ->
error({no_handle_call, ?MODULE}, [Msg, From, State]).
handle_cast(start, State = #?STATE{supervisor = Supervisor}) ->
ok = start_yaws(Supervisor),
{noreply, State};
handle_cast(Msg, State) ->
error({no_handle_cast, ?MODULE}, [Msg, State]).
handle_info(Msg, State) ->
error({no_handle_info, ?MODULE}, [Msg, State]).
terminate(_Reason, _State) ->
ok.
code_change(_OldVersion, State, _Extra) ->
{ok, State}.
start_yaws(Supervisor) ->
Port = tap_config:getconfig(web_port),
IpAddr = tap_config:getconfig(web_address),
LogDir = tap_config:getenv(web_log),
Id = tap_config:getenv(web_id),
DocRoot = filename:join([code:priv_dir(tapestry), "www"]),
GL = [
{logdir, LogDir}],
SL = [
{port, Port},
{listen, IpAddr}],
{ok, SCList, GC, ChildSpecs} =
yaws_api:embedded_start_conf(DocRoot, SL, GL, Id),
[supervisor:start_child(Supervisor, CS) || CS <- ChildSpecs],
yaws_api:setconf(GC, SCList),
ok.
|
f6449aef2505adf372d5a70c9425f569b63b8b47571f95b06a918c56123c0c63 | quark-lang/quark | Expression.hs | module Core.Parser.AST.Expression where
import Core.Parser.AST.Literal
import Core.Utility.Color (bold, bBlue)
data Expression
= Node Expression [Expression]
| List [Expression]
| Literal Literal
| Quoted Expression
| Identifier String
deriving Eq
instance Show Expression where
show (Node e es) = "(" ++ show e ++ " " ++ unwords (map show es) ++ ")"
show (List es) = "[" ++ unwords (map show es) ++ "]"
show (Literal l) = show l
show (Quoted e) = bold "@" ++ show e
show (Identifier s) = bBlue s | null | https://raw.githubusercontent.com/quark-lang/quark/e3dc7fff4e4dfba3e5c9ab71f10ede8bc5a30a44/app/Core/Parser/AST/Expression.hs | haskell | module Core.Parser.AST.Expression where
import Core.Parser.AST.Literal
import Core.Utility.Color (bold, bBlue)
data Expression
= Node Expression [Expression]
| List [Expression]
| Literal Literal
| Quoted Expression
| Identifier String
deriving Eq
instance Show Expression where
show (Node e es) = "(" ++ show e ++ " " ++ unwords (map show es) ++ ")"
show (List es) = "[" ++ unwords (map show es) ++ "]"
show (Literal l) = show l
show (Quoted e) = bold "@" ++ show e
show (Identifier s) = bBlue s | |
40485fc1d87f969349e5b8fe0049223f2acd5be943a65c28bc9019cb6826d936 | abarbu/haskell-torch | Inplace.hs | # LANGUAGE AllowAmbiguousTypes , CPP , ConstraintKinds , DataKinds , FlexibleContexts , FlexibleInstances , FunctionalDependencies , GADTs #
# LANGUAGE KindSignatures , MultiParamTypeClasses , OverloadedLabels , OverloadedStrings , PartialTypeSignatures , PolyKinds , RankNTypes #
# LANGUAGE ScopedTypeVariables , TemplateHaskell , TypeApplications , TypeFamilies , TypeFamilyDependencies , TypeInType , TypeOperators #
{-# LANGUAGE UndecidableInstances #-}
{-# OPTIONS_GHC -pgmP cc -optP -E -optP -undef -optP -std=c89 #-}
-- | Operations that mutate tensors.
module Torch.Inplace where
import Data.Coerce
import Data.Default
import Data.Maybe
import Data.Singletons
import Foreign.C.Types
import GHC.Int
import Prelude as P
import qualified Torch.C.Tensor as C
import Torch.Misc
import Torch.Tensor
import Torch.Types
-- * Mathematical operations
mul_ :: forall ty ki sz sz'.
(SingI (BroadcastSizes sz sz'))
=> Tensor ty ki sz -> Tensor ty ki sz' -> IO (Tensor ty ki sz)
mul_ x@Tensor{} y@Tensor{} = do
generic x y
pure x
where
generic x@(Tensor t _) y@(Tensor t' _) = do
let szExpanded = demoteNv @(BroadcastSizes sz sz')
t'e <- C.expand_mab t' szExpanded (boolc True)
C.mul__mt t t'e
mulScalar_ :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
mulScalar_ x@(Tensor t _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.mul__ms t s
pure x
addcmul_ :: forall ty ki sz sz' sz''.
(SingI (BroadcastSizes sz' sz''), sz ~ BroadcastSizes sz' sz'')
=> Tensor ty ki sz
-> TensorTyToHs ty
-> Tensor ty ki sz'
-> Tensor ty ki sz''
-> IO (Tensor ty ki sz)
addcmul_ (Tensor i _) val (Tensor t1 _) (Tensor t2 _) = do
let szExpanded = demoteNv @(BroadcastSizes sz (BroadcastSizes sz' sz''))
t1' <- C.expand_mab t1 szExpanded (boolc True)
t2' <- C.expand_mab t2 szExpanded (boolc True)
val' <- toCScalar @ty @ki $ hsScalarToC val
rt <- C.addcmul__mtts i t1' t2' val'
pure $ Tensor rt Nothing
div_ :: forall ty ki sz sz'.
(SingI (BroadcastSizes sz sz'))
=> Tensor ty ki sz -> Tensor ty ki sz' -> IO (Tensor ty ki sz)
div_ x@Tensor{} y@Tensor{} = do
generic x y
pure x
where
generic x@(Tensor t _) y@(Tensor t' _) = do
let szExpanded = demoteNv @(BroadcastSizes sz sz')
t'e <- C.expand_mab t' szExpanded (boolc True)
C.div__mt t t'e
divScalar_ :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
divScalar_ x@(Tensor t _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.div__ms t s
pure x
addcdiv_ :: forall ty ki sz sz' sz''.
(SingI (BroadcastSizes sz' sz''), sz ~ BroadcastSizes sz' sz'')
=> Tensor ty ki sz
-> TensorTyToHs ty
-> Tensor ty ki sz'
-> Tensor ty ki sz''
-> IO (Tensor ty ki sz)
addcdiv_ (Tensor i _) val (Tensor t1 _) (Tensor t2 _) = do
let szExpanded = demoteNv @(BroadcastSizes sz (BroadcastSizes sz' sz''))
t1' <- C.expand_mab t1 szExpanded (boolc True)
t2' <- C.expand_mab t2 szExpanded (boolc True)
val' <- toCScalar @ty @ki $ hsScalarToC val
rt <- C.addcdiv__mtts i t1' t2' val'
pure $ Tensor rt Nothing
add_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
add_ x y = add_' x y 1
-- | x + alpha * y
add_' :: forall ty ki sz.
Tensor ty ki sz -> Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
add_' x@(Tensor p _) y@(Tensor p' _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.add__mts p p' s
pure x
addScalar_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
addScalar_ x y = addScalar_' x y 1
-- | x + alpha * y
addScalar_' :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
addScalar_' x@(Tensor p _) y alpha = do
y' <- toCScalar @ty @ki $ hsScalarToC y
alpha' <- toCScalar @ty @ki $ hsScalarToC alpha
C.add__mss p y' alpha'
pure x
sub_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
sub_ x y = sub_' x y 1
-- | x + alpha * y
sub_' :: forall ty ki sz.
Tensor ty ki sz -> Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
sub_' x@(Tensor p _) y@(Tensor p' _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.sub__mts p p' s
pure x
subScalar_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
subScalar_ x y = subScalar_' x y 1
-- | x + alpha * y
subScalar_' :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
subScalar_' x@(Tensor p _) y alpha = do
y' <- toCScalar @ty @ki $ hsScalarToC y
alpha' <- toCScalar @ty @ki $ hsScalarToC alpha
C.sub__mss p y' alpha'
pure x
abs_ :: (SingI ty, SingI ki, SingI sz) =>
Tensor ty ki sz -> IO (Tensor ty ki sz)
abs_ x@(Tensor t a) = do
C.abs___t t
pure x
copy_ :: Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
copy_ t@(Tensor dst _) (Tensor src _) = do
C.copy__mtb dst src (boolc False)
pure t
sin_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sin_ x@(Tensor t _) = C.sin___t t >> pure x
sinh_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sinh_ x@(Tensor t _) = C.sinh___t t >> pure x
asin_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
asin_ x@(Tensor t _) = C.asin___t t >> pure x
cos_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
cos_ x@(Tensor t _) = C.cos___t t >> pure x
cosh_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
cosh_ x@(Tensor t _) = C.cosh___t t >> pure x
acos_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
acos_ x@(Tensor t _) = C.acos___t t >> pure x
tan_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
tan_ x@(Tensor t _) = C.tan___t t >> pure x
tanh_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
tanh_ x@(Tensor t _) = C.tanh___t t >> pure x
atan_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
atan_ x@(Tensor t _) = C.atan___t t >> pure x
ceil_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
ceil_ x@(Tensor t _) = C.ceil___t t >> pure x
floor_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
floor_ x@(Tensor t _) = C.floor___t t >> pure x
clamp_ :: forall ty ki sz. TensorTyToHs ty -> TensorTyToHs ty -> Tensor ty ki sz -> IO (Tensor ty ki sz)
clamp_ lower upper x@(Tensor t _) = do
l <- toCScalar @ty @ki (hsScalarToC lower)
u <- toCScalar @ty @ki (hsScalarToC upper)
C.clamp___tss t (Just l) (Just u)
pure x
clampMax_ :: forall ty ki sz. TensorTyToHs ty -> Tensor ty ki sz -> IO (Tensor ty ki sz)
clampMax_ upper x@(Tensor t _) = do
u <- toCScalar @ty @ki (hsScalarToC upper)
C.clamp_max___ts t u
pure x
clampMin_ :: forall ty ki sz. TensorTyToHs ty -> Tensor ty ki sz -> IO (Tensor ty ki sz)
clampMin_ lower x@(Tensor t _) = do
l <- toCScalar @ty @ki (hsScalarToC lower)
C.clamp_min___ts t l
pure x
atan2_ :: forall ty ki sz sz'.
Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
atan2_ x@(Tensor p _) (Tensor p' _) = do
C.atan2__mt p p'
pure x
digamma_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
digamma_ x@(Tensor t _) = C.digamma__m t >> pure x
erf_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
erf_ x@(Tensor t _) = C.erf__m t >> pure x
erfc_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
erfc_ x@(Tensor t _) = C.erfc__m t >> pure x
erfinv_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
erfinv_ x@(Tensor t _) = C.erfinv__m t >> pure x
exp_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
exp_ x@(Tensor t _) = C.exp__m t >> pure x
expm1_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
expm1_ x@(Tensor t _) = C.expm1__m t >> pure x
fmod_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
fmod_ x@(Tensor t _) div = do
d <- toCScalar @ty @ki (hsScalarToC div)
C.fmod__ms t d
pure x
frac_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
frac_ x@(Tensor t _) = C.frac__m t >> pure x
log_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log_ x@(Tensor t _) = C.log__m t >> pure x
log10_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log10_ x@(Tensor t _) = C.log10__m t >> pure x
log1p_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log1p_ x@(Tensor t _) = C.log1p__m t >> pure x
log2_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log2_ x@(Tensor t _) = C.log2__m t >> pure x
mvlgamma_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Int64 -> IO (Tensor ty ki sz)
mvlgamma_ t@(Tensor ptr _) dim = do
C.mvlgamma__m6 ptr dim
pure t
neg_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
neg_ x@(Tensor t _) = C.neg__m t >> pure x
reciprocal_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
reciprocal_ x@(Tensor t _) = C.reciprocal__m t >> pure x
remainder_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
remainder_ x@(Tensor t _) div = do
d <- toCScalar @ty @ki (hsScalarToC div)
C.remainder__ms t d >> pure x
round_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
round_ x@(Tensor t _) = C.round__m t >> pure x
sqrt_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sqrt_ x@(Tensor t _) = C.sqrt__m t >> pure x
rsqrt_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
rsqrt_ x@(Tensor t _) = C.rsqrt__m t >> pure x
sign_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sign_ x@(Tensor t _) = C.sign__m t >> pure x
trunc_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
trunc_ x@(Tensor t _) = C.trunc__m t >> pure x
-- * Non-linear activation functions
relu_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
relu_ x@(Tensor t a) = wrapTensorM (C.relu__m t) a
threshold_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
threshold_ x@(Tensor t a) threshold value = do
x <- toCScalar @ty @ki (hsScalarToC threshold)
y <- toCScalar @ty @ki (hsScalarToC value)
wrapTensorM (C.threshold___tss t x y) a
hardtanh_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
hardtanh_ x@(Tensor t a) min max = do
x <- toCScalar @ty @ki (hsScalarToC min)
y <- toCScalar @ty @ki (hsScalarToC max)
wrapTensorM (C.hardtanh___tss t x y) a
relu6_ :: Num (TensorTyToHs ty) => Tensor ty ki sz -> IO (Tensor ty ki sz)
relu6_ t = hardtanh_ t 0 6
elu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
elu_ x@(Tensor t a) alpha scale = do
x <- toCScalar @ty @ki (hsScalarToC alpha)
y <- toCScalar @ty @ki (hsScalarToC scale)
s <- toCScalar @ty @ki 1
wrapTensorM (C.elu___tsss t x y s) a
selu_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
selu_ x@(Tensor t a) = wrapTensorM (C.selu___t t) a
celu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
celu_ x@(Tensor t a) alpha = do
x <- toCScalar @ty @ki (hsScalarToC alpha)
wrapTensorM (C.celu___ts t x) a
leakyRelu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
leakyRelu_ x@(Tensor t a) negativeSlope = do
x <- toCScalar @ty @ki (hsScalarToC negativeSlope)
wrapTensorM (C.leaky_relu___ts t x) a
rrelu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> DataPurpose -> IO (Tensor ty ki sz)
rrelu_ x@(Tensor t a) lower upper dataPurpose = do
x <- toCScalar @ty @ki (hsScalarToC lower)
y <- toCScalar @ty @ki (hsScalarToC upper)
gen <- generatorFor (demote @ki)
wrapTensorM (C.rrelu___tssbg t x y (boolc (dataPurpose == Train)) (Just gen)) a
sigmoid_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sigmoid_ x@(Tensor t a) = wrapTensorM (C.sigmoid___t t) a
-- * Initialization
ones_ :: forall ty ki sz. (TensorConstraints ty ki sz)
=> Tensor ty ki sz -> IO (Tensor ty ki sz)
ones_ t@(Tensor ptr _) = do
C.ones_out__ta ptr (demoteNv @sz)
pure t
zeros_ :: forall ty ki sz. (TensorConstraints ty ki sz)
=> Tensor ty ki sz -> IO (Tensor ty ki sz)
zeros_ t@(Tensor ptr _) = do
C.zeros_out__ta ptr (demoteNv @sz)
pure t
eye_ :: forall ty ki sz0 sz1. (TensorConstraints ty ki sz0, SingI sz1)
=> Tensor ty ki '[sz0,sz1] -> IO (Tensor ty ki '[sz0,sz1])
eye_ t@(Tensor ptr _) = do
C.eye_out__t66 ptr (demoteN @sz0) (demoteN @sz1)
pure t
data FanMode = FanInMode | FanOutMode
deriving (Show, Eq)
instance Default FanMode where
def = FanInMode
calculateFanInOut :: [Int] -> (Int, Int)
calculateFanInOut [] = error "Cannot compute fan in or out with fewer than two dimensions"
The PyTorch code does n't have this case , but it simplfies so much downstream code
calculateFanInOut [fin] = (fin,fin)
calculateFanInOut [fin,fout] = (fin,fout)
calculateFanInOut (outMaps:inMaps:rest) = (inMaps * product rest, outMaps * product rest)
calculateFan t FanInMode = fst $ calculateFanInOut t
calculateFan t FanOutMode = snd $ calculateFanInOut t
| PyTorch calls this a gain computed for a nonlinearity . That 's kind of
-- strange.
data GainNonlinearity = GainLinear
| GainConv
| GainSigmoid
| GainTanh
| GainRelu
| GainLeakyRelu
deriving (Show, Eq)
instance Default GainNonlinearity where
def = GainLeakyRelu
calculateGain :: GainNonlinearity -> Maybe Double -> Double
calculateGain GainLinear _ = 1
calculateGain GainConv _ = 1
calculateGain GainSigmoid _ = 1
calculateGain GainTanh _ = 5/3
calculateGain GainRelu _ = P.sqrt 2
calculateGain GainLeakyRelu param = P.sqrt(2.0 / (1 + fromMaybe 0.01 param ** 2))
kaimingUniform_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> GainNonlinearity -> FanMode -> IO (Tensor ty ki sz)
kaimingUniform_ t a gainMode fanMode = do
let fan = calculateFan (demoteNs @sz) fanMode
let gain = calculateGain gainMode a
let std = gain / P.sqrt (fromIntegral fan)
let bound = P.sqrt 3 * std -- Calculate uniform bounds from standard deviation
withoutGrad $ uniform_ t (- bound) bound
kaimingUniformBias_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> IO (Tensor ty ki sz)
kaimingUniformBias_ t = do
let (fanIn,_) = calculateFanInOut (demoteNs @sz)
let bound = 1 / P.sqrt (fromIntegral fanIn)
withoutGrad $ uniform_ t (- bound) bound
kaimingNormal_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> GainNonlinearity -> FanMode -> IO (Tensor ty ki sz)
kaimingNormal_ t a gainMode fanMode = do
let fan = calculateFan (demoteNs @sz) fanMode
let gain = calculateGain gainMode a
let std = gain / P.sqrt (fromIntegral fan)
withoutGrad $ normal_ t 0 std
xavierUniform_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> IO (Tensor ty ki sz)
xavierUniform_ t gain = do
let (fanIn,fanOut) = calculateFanInOut (demoteNs @sz)
let std = fromMaybe 1 gain * P.sqrt (2.0 / fromIntegral (fanIn + fanOut))
let a = std * P.sqrt 3 -- Calculate uniform bounds from standard deviation
withoutGrad $ uniform_ t (- a) a
xavierNormal_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> IO (Tensor ty ki sz)
xavierNormal_ t gain = do
let (fanIn,fanOut) = calculateFanInOut (demoteNs @sz)
let std = fromMaybe 1 gain * P.sqrt (2.0 / fromIntegral (fanIn + fanOut))
withoutGrad $ normal_ t 0 std
-- * Random operations
uniform_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
uniform_ t@(Tensor ptr _) l h = do
C.uniform__mddg ptr (coerce l) (coerce h) . pure =<< generatorFor (demote @ki)
pure t
random_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Int64 -> Int64 -> IO (Tensor ty ki sz)
random_ t@(Tensor ptr _) l h = do
C.random__m66g ptr (coerce l) (Just $ coerce h) . pure =<< generatorFor (demote @ki)
pure t
normal_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
normal_ t@(Tensor ptr _) m v = do
C.normal__mddg ptr (CDouble m) (CDouble v) . pure =<< generatorFor (demote @ki)
pure t
bernoulli_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> IO (Tensor ty ki sz)
bernoulli_ t@(Tensor ptr _) p = do
C.bernoulli__mdg ptr (coerce p) . pure =<< generatorFor (demote @ki)
pure t
exponential_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> IO (Tensor ty ki sz)
exponential_ t@(Tensor ptr _) p = do
C.exponential__mdg ptr (coerce p) . pure =<< generatorFor (demote @ki)
pure t
geometric_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> IO (Tensor ty ki sz)
geometric_ t@(Tensor ptr _) p = do
C.geometric__mdg ptr (coerce p) . pure =<< generatorFor (demote @ki)
pure t
cauchy_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
cauchy_ t@(Tensor ptr _) m s = do
C.cauchy__mddg ptr (coerce m) (coerce s) . pure =<< generatorFor (demote @ki)
pure t
logNormal_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
logNormal_ t@(Tensor ptr _) m s = do
C.log_normal__mddg ptr (coerce m) (coerce s) . pure =<< generatorFor (demote @ki)
pure t
constant_ :: forall ty ki sz. (TensorConstraints ty ki sz)
=> Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
constant_ t@(Tensor ptr _) c = do
fill <- toCScalar @ty @ki (hsScalarToC c)
C.full_out__tas ptr (demoteNv @sz) fill
pure t
| null | https://raw.githubusercontent.com/abarbu/haskell-torch/e0afcaf81b78e9211ba120dcd247f9a6112b57ab/haskell-torch/src/Torch/Inplace.hs | haskell | # LANGUAGE UndecidableInstances #
# OPTIONS_GHC -pgmP cc -optP -E -optP -undef -optP -std=c89 #
| Operations that mutate tensors.
* Mathematical operations
| x + alpha * y
| x + alpha * y
| x + alpha * y
| x + alpha * y
* Non-linear activation functions
* Initialization
strange.
Calculate uniform bounds from standard deviation
Calculate uniform bounds from standard deviation
* Random operations | # LANGUAGE AllowAmbiguousTypes , CPP , ConstraintKinds , DataKinds , FlexibleContexts , FlexibleInstances , FunctionalDependencies , GADTs #
# LANGUAGE KindSignatures , MultiParamTypeClasses , OverloadedLabels , OverloadedStrings , PartialTypeSignatures , PolyKinds , RankNTypes #
# LANGUAGE ScopedTypeVariables , TemplateHaskell , TypeApplications , TypeFamilies , TypeFamilyDependencies , TypeInType , TypeOperators #
module Torch.Inplace where
import Data.Coerce
import Data.Default
import Data.Maybe
import Data.Singletons
import Foreign.C.Types
import GHC.Int
import Prelude as P
import qualified Torch.C.Tensor as C
import Torch.Misc
import Torch.Tensor
import Torch.Types
mul_ :: forall ty ki sz sz'.
(SingI (BroadcastSizes sz sz'))
=> Tensor ty ki sz -> Tensor ty ki sz' -> IO (Tensor ty ki sz)
mul_ x@Tensor{} y@Tensor{} = do
generic x y
pure x
where
generic x@(Tensor t _) y@(Tensor t' _) = do
let szExpanded = demoteNv @(BroadcastSizes sz sz')
t'e <- C.expand_mab t' szExpanded (boolc True)
C.mul__mt t t'e
mulScalar_ :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
mulScalar_ x@(Tensor t _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.mul__ms t s
pure x
addcmul_ :: forall ty ki sz sz' sz''.
(SingI (BroadcastSizes sz' sz''), sz ~ BroadcastSizes sz' sz'')
=> Tensor ty ki sz
-> TensorTyToHs ty
-> Tensor ty ki sz'
-> Tensor ty ki sz''
-> IO (Tensor ty ki sz)
addcmul_ (Tensor i _) val (Tensor t1 _) (Tensor t2 _) = do
let szExpanded = demoteNv @(BroadcastSizes sz (BroadcastSizes sz' sz''))
t1' <- C.expand_mab t1 szExpanded (boolc True)
t2' <- C.expand_mab t2 szExpanded (boolc True)
val' <- toCScalar @ty @ki $ hsScalarToC val
rt <- C.addcmul__mtts i t1' t2' val'
pure $ Tensor rt Nothing
div_ :: forall ty ki sz sz'.
(SingI (BroadcastSizes sz sz'))
=> Tensor ty ki sz -> Tensor ty ki sz' -> IO (Tensor ty ki sz)
div_ x@Tensor{} y@Tensor{} = do
generic x y
pure x
where
generic x@(Tensor t _) y@(Tensor t' _) = do
let szExpanded = demoteNv @(BroadcastSizes sz sz')
t'e <- C.expand_mab t' szExpanded (boolc True)
C.div__mt t t'e
divScalar_ :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
divScalar_ x@(Tensor t _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.div__ms t s
pure x
addcdiv_ :: forall ty ki sz sz' sz''.
(SingI (BroadcastSizes sz' sz''), sz ~ BroadcastSizes sz' sz'')
=> Tensor ty ki sz
-> TensorTyToHs ty
-> Tensor ty ki sz'
-> Tensor ty ki sz''
-> IO (Tensor ty ki sz)
addcdiv_ (Tensor i _) val (Tensor t1 _) (Tensor t2 _) = do
let szExpanded = demoteNv @(BroadcastSizes sz (BroadcastSizes sz' sz''))
t1' <- C.expand_mab t1 szExpanded (boolc True)
t2' <- C.expand_mab t2 szExpanded (boolc True)
val' <- toCScalar @ty @ki $ hsScalarToC val
rt <- C.addcdiv__mtts i t1' t2' val'
pure $ Tensor rt Nothing
add_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
add_ x y = add_' x y 1
add_' :: forall ty ki sz.
Tensor ty ki sz -> Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
add_' x@(Tensor p _) y@(Tensor p' _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.add__mts p p' s
pure x
addScalar_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
addScalar_ x y = addScalar_' x y 1
addScalar_' :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
addScalar_' x@(Tensor p _) y alpha = do
y' <- toCScalar @ty @ki $ hsScalarToC y
alpha' <- toCScalar @ty @ki $ hsScalarToC alpha
C.add__mss p y' alpha'
pure x
sub_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
sub_ x y = sub_' x y 1
sub_' :: forall ty ki sz.
Tensor ty ki sz -> Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
sub_' x@(Tensor p _) y@(Tensor p' _) alpha = do
s <- toCScalar @ty @ki $ hsScalarToC alpha
C.sub__mts p p' s
pure x
subScalar_ :: forall ty ki sz.
Num (TensorTyToHs ty) =>
Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
subScalar_ x y = subScalar_' x y 1
subScalar_' :: forall ty ki sz.
Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
subScalar_' x@(Tensor p _) y alpha = do
y' <- toCScalar @ty @ki $ hsScalarToC y
alpha' <- toCScalar @ty @ki $ hsScalarToC alpha
C.sub__mss p y' alpha'
pure x
abs_ :: (SingI ty, SingI ki, SingI sz) =>
Tensor ty ki sz -> IO (Tensor ty ki sz)
abs_ x@(Tensor t a) = do
C.abs___t t
pure x
copy_ :: Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
copy_ t@(Tensor dst _) (Tensor src _) = do
C.copy__mtb dst src (boolc False)
pure t
sin_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sin_ x@(Tensor t _) = C.sin___t t >> pure x
sinh_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sinh_ x@(Tensor t _) = C.sinh___t t >> pure x
asin_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
asin_ x@(Tensor t _) = C.asin___t t >> pure x
cos_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
cos_ x@(Tensor t _) = C.cos___t t >> pure x
cosh_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
cosh_ x@(Tensor t _) = C.cosh___t t >> pure x
acos_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
acos_ x@(Tensor t _) = C.acos___t t >> pure x
tan_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
tan_ x@(Tensor t _) = C.tan___t t >> pure x
tanh_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
tanh_ x@(Tensor t _) = C.tanh___t t >> pure x
atan_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
atan_ x@(Tensor t _) = C.atan___t t >> pure x
ceil_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
ceil_ x@(Tensor t _) = C.ceil___t t >> pure x
floor_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
floor_ x@(Tensor t _) = C.floor___t t >> pure x
clamp_ :: forall ty ki sz. TensorTyToHs ty -> TensorTyToHs ty -> Tensor ty ki sz -> IO (Tensor ty ki sz)
clamp_ lower upper x@(Tensor t _) = do
l <- toCScalar @ty @ki (hsScalarToC lower)
u <- toCScalar @ty @ki (hsScalarToC upper)
C.clamp___tss t (Just l) (Just u)
pure x
clampMax_ :: forall ty ki sz. TensorTyToHs ty -> Tensor ty ki sz -> IO (Tensor ty ki sz)
clampMax_ upper x@(Tensor t _) = do
u <- toCScalar @ty @ki (hsScalarToC upper)
C.clamp_max___ts t u
pure x
clampMin_ :: forall ty ki sz. TensorTyToHs ty -> Tensor ty ki sz -> IO (Tensor ty ki sz)
clampMin_ lower x@(Tensor t _) = do
l <- toCScalar @ty @ki (hsScalarToC lower)
C.clamp_min___ts t l
pure x
atan2_ :: forall ty ki sz sz'.
Tensor ty ki sz -> Tensor ty ki sz -> IO (Tensor ty ki sz)
atan2_ x@(Tensor p _) (Tensor p' _) = do
C.atan2__mt p p'
pure x
digamma_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
digamma_ x@(Tensor t _) = C.digamma__m t >> pure x
erf_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
erf_ x@(Tensor t _) = C.erf__m t >> pure x
erfc_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
erfc_ x@(Tensor t _) = C.erfc__m t >> pure x
erfinv_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
erfinv_ x@(Tensor t _) = C.erfinv__m t >> pure x
exp_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
exp_ x@(Tensor t _) = C.exp__m t >> pure x
expm1_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
expm1_ x@(Tensor t _) = C.expm1__m t >> pure x
fmod_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
fmod_ x@(Tensor t _) div = do
d <- toCScalar @ty @ki (hsScalarToC div)
C.fmod__ms t d
pure x
frac_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
frac_ x@(Tensor t _) = C.frac__m t >> pure x
log_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log_ x@(Tensor t _) = C.log__m t >> pure x
log10_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log10_ x@(Tensor t _) = C.log10__m t >> pure x
log1p_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log1p_ x@(Tensor t _) = C.log1p__m t >> pure x
log2_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
log2_ x@(Tensor t _) = C.log2__m t >> pure x
mvlgamma_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Int64 -> IO (Tensor ty ki sz)
mvlgamma_ t@(Tensor ptr _) dim = do
C.mvlgamma__m6 ptr dim
pure t
neg_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
neg_ x@(Tensor t _) = C.neg__m t >> pure x
reciprocal_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
reciprocal_ x@(Tensor t _) = C.reciprocal__m t >> pure x
remainder_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
remainder_ x@(Tensor t _) div = do
d <- toCScalar @ty @ki (hsScalarToC div)
C.remainder__ms t d >> pure x
round_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
round_ x@(Tensor t _) = C.round__m t >> pure x
sqrt_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sqrt_ x@(Tensor t _) = C.sqrt__m t >> pure x
rsqrt_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
rsqrt_ x@(Tensor t _) = C.rsqrt__m t >> pure x
sign_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sign_ x@(Tensor t _) = C.sign__m t >> pure x
trunc_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
trunc_ x@(Tensor t _) = C.trunc__m t >> pure x
relu_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
relu_ x@(Tensor t a) = wrapTensorM (C.relu__m t) a
threshold_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
threshold_ x@(Tensor t a) threshold value = do
x <- toCScalar @ty @ki (hsScalarToC threshold)
y <- toCScalar @ty @ki (hsScalarToC value)
wrapTensorM (C.threshold___tss t x y) a
hardtanh_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
hardtanh_ x@(Tensor t a) min max = do
x <- toCScalar @ty @ki (hsScalarToC min)
y <- toCScalar @ty @ki (hsScalarToC max)
wrapTensorM (C.hardtanh___tss t x y) a
relu6_ :: Num (TensorTyToHs ty) => Tensor ty ki sz -> IO (Tensor ty ki sz)
relu6_ t = hardtanh_ t 0 6
elu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> IO (Tensor ty ki sz)
elu_ x@(Tensor t a) alpha scale = do
x <- toCScalar @ty @ki (hsScalarToC alpha)
y <- toCScalar @ty @ki (hsScalarToC scale)
s <- toCScalar @ty @ki 1
wrapTensorM (C.elu___tsss t x y s) a
selu_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
selu_ x@(Tensor t a) = wrapTensorM (C.selu___t t) a
celu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
celu_ x@(Tensor t a) alpha = do
x <- toCScalar @ty @ki (hsScalarToC alpha)
wrapTensorM (C.celu___ts t x) a
leakyRelu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
leakyRelu_ x@(Tensor t a) negativeSlope = do
x <- toCScalar @ty @ki (hsScalarToC negativeSlope)
wrapTensorM (C.leaky_relu___ts t x) a
rrelu_ :: forall ty ki sz. Tensor ty ki sz -> TensorTyToHs ty -> TensorTyToHs ty -> DataPurpose -> IO (Tensor ty ki sz)
rrelu_ x@(Tensor t a) lower upper dataPurpose = do
x <- toCScalar @ty @ki (hsScalarToC lower)
y <- toCScalar @ty @ki (hsScalarToC upper)
gen <- generatorFor (demote @ki)
wrapTensorM (C.rrelu___tssbg t x y (boolc (dataPurpose == Train)) (Just gen)) a
sigmoid_ :: Tensor ty ki sz -> IO (Tensor ty ki sz)
sigmoid_ x@(Tensor t a) = wrapTensorM (C.sigmoid___t t) a
ones_ :: forall ty ki sz. (TensorConstraints ty ki sz)
=> Tensor ty ki sz -> IO (Tensor ty ki sz)
ones_ t@(Tensor ptr _) = do
C.ones_out__ta ptr (demoteNv @sz)
pure t
zeros_ :: forall ty ki sz. (TensorConstraints ty ki sz)
=> Tensor ty ki sz -> IO (Tensor ty ki sz)
zeros_ t@(Tensor ptr _) = do
C.zeros_out__ta ptr (demoteNv @sz)
pure t
eye_ :: forall ty ki sz0 sz1. (TensorConstraints ty ki sz0, SingI sz1)
=> Tensor ty ki '[sz0,sz1] -> IO (Tensor ty ki '[sz0,sz1])
eye_ t@(Tensor ptr _) = do
C.eye_out__t66 ptr (demoteN @sz0) (demoteN @sz1)
pure t
data FanMode = FanInMode | FanOutMode
deriving (Show, Eq)
instance Default FanMode where
def = FanInMode
calculateFanInOut :: [Int] -> (Int, Int)
calculateFanInOut [] = error "Cannot compute fan in or out with fewer than two dimensions"
The PyTorch code does n't have this case , but it simplfies so much downstream code
calculateFanInOut [fin] = (fin,fin)
calculateFanInOut [fin,fout] = (fin,fout)
calculateFanInOut (outMaps:inMaps:rest) = (inMaps * product rest, outMaps * product rest)
calculateFan t FanInMode = fst $ calculateFanInOut t
calculateFan t FanOutMode = snd $ calculateFanInOut t
| PyTorch calls this a gain computed for a nonlinearity . That 's kind of
data GainNonlinearity = GainLinear
| GainConv
| GainSigmoid
| GainTanh
| GainRelu
| GainLeakyRelu
deriving (Show, Eq)
instance Default GainNonlinearity where
def = GainLeakyRelu
calculateGain :: GainNonlinearity -> Maybe Double -> Double
calculateGain GainLinear _ = 1
calculateGain GainConv _ = 1
calculateGain GainSigmoid _ = 1
calculateGain GainTanh _ = 5/3
calculateGain GainRelu _ = P.sqrt 2
calculateGain GainLeakyRelu param = P.sqrt(2.0 / (1 + fromMaybe 0.01 param ** 2))
kaimingUniform_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> GainNonlinearity -> FanMode -> IO (Tensor ty ki sz)
kaimingUniform_ t a gainMode fanMode = do
let fan = calculateFan (demoteNs @sz) fanMode
let gain = calculateGain gainMode a
let std = gain / P.sqrt (fromIntegral fan)
withoutGrad $ uniform_ t (- bound) bound
kaimingUniformBias_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> IO (Tensor ty ki sz)
kaimingUniformBias_ t = do
let (fanIn,_) = calculateFanInOut (demoteNs @sz)
let bound = 1 / P.sqrt (fromIntegral fanIn)
withoutGrad $ uniform_ t (- bound) bound
kaimingNormal_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> GainNonlinearity -> FanMode -> IO (Tensor ty ki sz)
kaimingNormal_ t a gainMode fanMode = do
let fan = calculateFan (demoteNs @sz) fanMode
let gain = calculateGain gainMode a
let std = gain / P.sqrt (fromIntegral fan)
withoutGrad $ normal_ t 0 std
xavierUniform_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> IO (Tensor ty ki sz)
xavierUniform_ t gain = do
let (fanIn,fanOut) = calculateFanInOut (demoteNs @sz)
let std = fromMaybe 1 gain * P.sqrt (2.0 / fromIntegral (fanIn + fanOut))
withoutGrad $ uniform_ t (- a) a
xavierNormal_ :: forall sz ty ki. (TensorConstraints ty ki sz, SingI (IsFloatTy ty))
=> Tensor ty ki sz -> Maybe Double -> IO (Tensor ty ki sz)
xavierNormal_ t gain = do
let (fanIn,fanOut) = calculateFanInOut (demoteNs @sz)
let std = fromMaybe 1 gain * P.sqrt (2.0 / fromIntegral (fanIn + fanOut))
withoutGrad $ normal_ t 0 std
uniform_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
uniform_ t@(Tensor ptr _) l h = do
C.uniform__mddg ptr (coerce l) (coerce h) . pure =<< generatorFor (demote @ki)
pure t
random_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Int64 -> Int64 -> IO (Tensor ty ki sz)
random_ t@(Tensor ptr _) l h = do
C.random__m66g ptr (coerce l) (Just $ coerce h) . pure =<< generatorFor (demote @ki)
pure t
normal_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
normal_ t@(Tensor ptr _) m v = do
C.normal__mddg ptr (CDouble m) (CDouble v) . pure =<< generatorFor (demote @ki)
pure t
bernoulli_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> IO (Tensor ty ki sz)
bernoulli_ t@(Tensor ptr _) p = do
C.bernoulli__mdg ptr (coerce p) . pure =<< generatorFor (demote @ki)
pure t
exponential_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> IO (Tensor ty ki sz)
exponential_ t@(Tensor ptr _) p = do
C.exponential__mdg ptr (coerce p) . pure =<< generatorFor (demote @ki)
pure t
geometric_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> IO (Tensor ty ki sz)
geometric_ t@(Tensor ptr _) p = do
C.geometric__mdg ptr (coerce p) . pure =<< generatorFor (demote @ki)
pure t
cauchy_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
cauchy_ t@(Tensor ptr _) m s = do
C.cauchy__mddg ptr (coerce m) (coerce s) . pure =<< generatorFor (demote @ki)
pure t
logNormal_ :: forall ty ki sz. (SingI sz, TensorConstraints ty ki sz)
=> Tensor ty ki sz -> Double -> Double -> IO (Tensor ty ki sz)
logNormal_ t@(Tensor ptr _) m s = do
C.log_normal__mddg ptr (coerce m) (coerce s) . pure =<< generatorFor (demote @ki)
pure t
constant_ :: forall ty ki sz. (TensorConstraints ty ki sz)
=> Tensor ty ki sz -> TensorTyToHs ty -> IO (Tensor ty ki sz)
constant_ t@(Tensor ptr _) c = do
fill <- toCScalar @ty @ki (hsScalarToC c)
C.full_out__tas ptr (demoteNv @sz) fill
pure t
|
44b0880bcabc865814cbcd470a0208c2f19f07545ecc019db1378615da77adfd | FranklinChen/hugs98-plus-Sep2006 | Either.hs | # OPTIONS_GHC -fno - implicit - prelude #
-----------------------------------------------------------------------------
-- |
-- Module : Data.Either
Copyright : ( c ) The University of Glasgow 2001
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- The Either type, and associated operations.
--
-----------------------------------------------------------------------------
module Data.Either (
Either(..),
either -- :: (a -> c) -> (b -> c) -> Either a b -> c
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Base
|
The ' Either ' type represents values with two possibilities : a value of
type @'Either ' a b@ is either @'Left ' a@ or ' b@.
The ' Either ' type is sometimes used to represent a value which is
either correct or an error ; by convention , the ' Left ' constructor is
used to hold an error value and the ' Right ' constructor is used to
hold a correct value ( mnemonic : \"right\ " also means " ) .
The 'Either' type represents values with two possibilities: a value of
type @'Either' a b@ is either @'Left' a@ or @'Right' b@.
The 'Either' type is sometimes used to represent a value which is
either correct or an error; by convention, the 'Left' constructor is
used to hold an error value and the 'Right' constructor is used to
hold a correct value (mnemonic: \"right\" also means \"correct\").
-}
data Either a b = Left a | Right b deriving (Eq, Ord )
-- | Case analysis for the 'Either' type.
If the value is @'Left ' a@ , apply the first function to @a@ ;
if it is ' b@ , apply the second function to @b@.
either :: (a -> c) -> (b -> c) -> Either a b -> c
either f _ (Left x) = f x
either _ g (Right y) = g y
#endif /* __GLASGOW_HASKELL__ */
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/base/Data/Either.hs | haskell | ---------------------------------------------------------------------------
|
Module : Data.Either
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : experimental
Portability : portable
The Either type, and associated operations.
---------------------------------------------------------------------------
:: (a -> c) -> (b -> c) -> Either a b -> c
| Case analysis for the 'Either' type. | # OPTIONS_GHC -fno - implicit - prelude #
Copyright : ( c ) The University of Glasgow 2001
module Data.Either (
Either(..),
) where
#ifdef __GLASGOW_HASKELL__
import GHC.Base
|
The ' Either ' type represents values with two possibilities : a value of
type @'Either ' a b@ is either @'Left ' a@ or ' b@.
The ' Either ' type is sometimes used to represent a value which is
either correct or an error ; by convention , the ' Left ' constructor is
used to hold an error value and the ' Right ' constructor is used to
hold a correct value ( mnemonic : \"right\ " also means " ) .
The 'Either' type represents values with two possibilities: a value of
type @'Either' a b@ is either @'Left' a@ or @'Right' b@.
The 'Either' type is sometimes used to represent a value which is
either correct or an error; by convention, the 'Left' constructor is
used to hold an error value and the 'Right' constructor is used to
hold a correct value (mnemonic: \"right\" also means \"correct\").
-}
data Either a b = Left a | Right b deriving (Eq, Ord )
If the value is @'Left ' a@ , apply the first function to @a@ ;
if it is ' b@ , apply the second function to @b@.
either :: (a -> c) -> (b -> c) -> Either a b -> c
either f _ (Left x) = f x
either _ g (Right y) = g y
#endif /* __GLASGOW_HASKELL__ */
|
3377fdaf268c2b23ba7ad57014fa26e0f410e6e1651f02cc4a1c7d16a8556acb | jmlowenthal/staged-streams.agda | Stream.hs | {-# LANGUAGE BangPatterns #-}
{-# LANGUAGE MagicHash #-}
-- |
-- Module : Data.List.Stream
Copyright : ( c ) 2007
( c ) 2007 - 2013
-- License : BSD-style
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
A reimplementation of the standard list library to take advantage of
stream fusion , and new GHC optimisations . The fusion mechanism is
-- based on stream fusion for sequences. Described in:
--
-- * /Stream Fusion: From Lists to Streams to Nothing at All/, by
, and , ICFP 2007 .
-- </~dons/papers/CLS07.html>
--
* /Rewriting Haskell Strings/ , by , and
Roman Leshchinskiy , Practical Aspects of Declarative Languages
8th International Symposium , PADL 2007 , 2007 .
-- </~dons/papers/CSL06.html>
--
-- See the source for the complete story:
--
-- * </~dons/code/streams/list/Data/Stream.hs>
--
-- This library is a drop in replacement for "Data.List".
--
module Data.List.Stream (
$ fusion_intro
-- * Basic interface
(++), -- :: [a] -> [a] -> [a]
head, -- :: [a] -> a
last, -- :: [a] -> a
tail, -- :: [a] -> [a]
init, -- :: [a] -> [a]
null, -- :: [a] -> Bool
length, -- :: [a] -> Int
-- * List transformations
map, -- :: (a -> b) -> [a] -> [b]
reverse, -- :: [a] -> [a]
intersperse, -- :: a -> [a] -> [a]
intercalate, -- :: [a] -> [[a]] -> [a]
transpose, -- :: [[a]] -> [[a]]
-- * Reducing lists (folds)
foldl, -- :: (a -> b -> a) -> a -> [b] -> a
foldl', -- :: (a -> b -> a) -> a -> [b] -> a
foldl1, -- :: (a -> a -> a) -> [a] -> a
foldl1', -- :: (a -> a -> a) -> [a] -> a
foldr, -- :: (a -> b -> b) -> b -> [a] -> b
foldr1, -- :: (a -> a -> a) -> [a] -> a
-- ** Special folds
concat, -- :: [[a]] -> [a]
concatMap, -- :: (a -> [b]) -> [a] -> [b]
and, -- :: [Bool] -> Bool
or, -- :: [Bool] -> Bool
any, -- :: (a -> Bool) -> [a] -> Bool
all, -- :: (a -> Bool) -> [a] -> Bool
: : a = > [ a ] - > a
: : a = > [ a ] - > a
: : a = > [ a ] - > a
: : a = > [ a ] - > a
-- * Building lists
-- ** Scans
scanl, -- :: (a -> b -> a) -> a -> [b] -> [a]
scanl1, -- :: (a -> a -> a) -> [a] -> [a]
scanr, -- :: (a -> b -> b) -> b -> [a] -> [b]
scanr1, -- :: (a -> a -> a) -> [a] -> [a]
-- ** Accumulating maps
: : ( acc - > x - > ( acc , y ) ) - > acc - > [ x ] - > ( acc , [ y ] )
: : ( acc - > x - > ( acc , y ) ) - > acc - > [ x ] - > ( acc , [ y ] )
-- ** Infinite lists
iterate, -- :: (a -> a) -> a -> [a]
repeat, -- :: a -> [a]
replicate, -- :: Int -> a -> [a]
cycle, -- :: [a] -> [a]
-- ** Unfolding
unfoldr, -- :: (b -> Maybe (a, b)) -> b -> [a]
-- * Sublists
-- ** Extracting sublists
take, -- :: Int -> [a] -> [a]
drop, -- :: Int -> [a] -> [a]
splitAt, -- :: Int -> [a] -> ([a], [a])
takeWhile, -- :: (a -> Bool) -> [a] -> [a]
dropWhile, -- :: (a -> Bool) -> [a] -> [a]
span, -- :: (a -> Bool) -> [a] -> ([a], [a])
break, -- :: (a -> Bool) -> [a] -> ([a], [a])
group, -- :: Eq a => [a] -> [[a]]
inits, -- :: [a] -> [[a]]
tails, -- :: [a] -> [[a]]
-- * Predicates
isPrefixOf, -- :: Eq a => [a] -> [a] -> Bool
isSuffixOf, -- :: Eq a => [a] -> [a] -> Bool
isInfixOf, -- :: Eq a => [a] -> [a] -> Bool
-- * Searching lists
-- ** Searching by equality
elem, -- :: Eq a => a -> [a] -> Bool
notElem, -- :: Eq a => a -> [a] -> Bool
lookup, -- :: Eq a => a -> [(a, b)] -> Maybe b
-- ** Searching with a predicate
find, -- :: (a -> Bool) -> [a] -> Maybe a
filter, -- :: (a -> Bool) -> [a] -> [a]
partition, -- :: (a -> Bool) -> [a] -> ([a], [a])
-- * Indexing lists
-- | These functions treat a list @xs@ as a indexed collection,
-- with indices ranging from 0 to @'length' xs - 1@.
(!!), -- :: [a] -> Int -> a
elemIndex, -- :: Eq a => a -> [a] -> Maybe Int
elemIndices, -- :: Eq a => a -> [a] -> [Int]
findIndex, -- :: (a -> Bool) -> [a] -> Maybe Int
findIndices, -- :: (a -> Bool) -> [a] -> [Int]
-- * Zipping and unzipping lists
zip, -- :: [a] -> [b] -> [(a, b)]
zip3, -- :: [a] -> [b] -> [c] -> [(a, b, c)]
zip4,
zip5,
zip6,
zip7,
| The zipWith family generalises the zip family by zipping with the
function given as the first argument , instead of a tupling function .
zipWith, -- :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith3, -- :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
zipWith4,
zipWith5,
zipWith6,
zipWith7,
unzip, -- :: [(a, b)] -> ([a], [b])
unzip3, -- :: [(a, b, c)] -> ([a], [b], [c])
unzip4,
unzip5,
unzip6,
unzip7,
-- * Special lists
-- ** Functions on strings
lines, -- :: String -> [String]
words, -- :: String -> [String]
unlines, -- :: [String] -> String
unwords, -- :: [String] -> String
-- ** \"Set\" operations
nub, -- :: Eq a => [a] -> [a]
delete, -- :: Eq a => a -> [a] -> [a]
(\\), -- :: Eq a => [a] -> [a] -> [a]
union, -- :: Eq a => [a] -> [a] -> [a]
intersect, -- :: Eq a => [a] -> [a] -> [a]
-- ** Ordered lists
: : a = > [ a ] - > [ a ]
: : a = > a - > [ a ] - > [ a ]
-- * Generalized functions
-- ** The \"By\" operations
-- | By convention, overloaded functions have a non-overloaded
-- counterpart whose name is suffixed with \`@By@\'.
--
-- It is often convenient to use these functions together with
-- 'Data.Function.on', for instance @'sortBy' ('compare'
-- \`on\` 'fst')@.
* * * User - supplied equality ( replacing an Eq context )
-- | The predicate is assumed to define an equivalence.
nubBy, -- :: (a -> a -> Bool) -> [a] -> [a]
deleteBy, -- :: (a -> a -> Bool) -> a -> [a] -> [a]
deleteFirstsBy, -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
unionBy, -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
intersectBy, -- :: (a -> a -> Bool) -> [a] -> [a] -> [a]
groupBy, -- :: (a -> a -> Bool) -> [a] -> [[a]]
* * * User - supplied comparison ( replacing an context )
sortBy, -- :: (a -> a -> Ordering) -> [a] -> [a]
insertBy, -- :: (a -> a -> Ordering) -> a -> [a] -> [a]
maximumBy, -- :: (a -> a -> Ordering) -> [a] -> a
minimumBy, -- :: (a -> a -> Ordering) -> [a] -> a
-- * The \"generic\" operations
-- | The prefix \`@generic@\' indicates an overloaded function that
is a generalized version of a " Prelude " function .
: : i = > [ b ] - > i
genericTake, -- :: Integral i => i -> [a] -> [a]
genericDrop, -- :: Integral i => i -> [a] -> [a]
genericSplitAt, -- :: Integral i => i -> [a] -> ([a], [a])
genericIndex, -- :: Integral a => [b] -> a -> b
genericReplicate, -- :: Integral i => i -> a -> [a]
helper for GHC.List
errorEmptyList -- :: String -> a
) where
#ifndef EXTERNAL_PACKAGE
import {-# SOURCE #-} GHC.Err ( error )
import {-# SOURCE #-} GHC.Real (Integral)
import {-# SOURCE #-} GHC.Num (Num(..))
import {-# SOURCE #-} GHC.Unicode (isSpace)
import GHC.Base (Int, Eq(..), Ord(..), Ordering(..),
Bool(..), not, Ordering(..),
seq, otherwise, flip,
Monad(..),
Char, String,
Int(I#), Int#, (+#),
-- we just reuse these:
foldr, (++), map
)
import Data.Maybe (Maybe(..))
#else
import GHC.Exts (Int(I#), Int#, (+#))
import Prelude (Int,
Integral,
Num(..), Eq(..), Ord(..), Ordering(..),
Bool(..), not, Maybe(..), Char, String,
error, seq, otherwise, flip)
import Data.Char (isSpace)
#endif
import qualified Data.Stream as Stream
import Data.Stream (stream ,unstream)
-- -----------------------------------------------------------------------------
#ifdef EXTERNAL_PACKAGE
infixr 5 ++
#endif
comment to fool
infixl 9 !!
infix 4 `elem`, `notElem`
-- -----------------------------------------------------------------------------
$ fusion_intro
--
-- The functions in this library marked with /fusion/ are
-- (transparently) rewritten by the compiler to stream functions, using
the fusion framework described in /Rewriting Haskell Strings/.
--
-- For example:
--
-- > map f xs
--
-- is transformed via rewrite rules to:
--
> ( unstream . mapS f . ) xs
--
-- The 'unstream' and 'stream' functions identify the allocation points
-- for each function.
--
When two or more fusible functions are in close proximity ( i.e.
-- directly composed, or with only intermediate lets and cases), the
-- fusion rule will fire, removing the intermediate structures.
--
-- Consider:
--
> map f . map
--
-- The rewrite engine will transform this code to:
--
> unstream . mapS f . . unstream . . stream
--
-- The fusion rule will then fire:
--
> unstream . mapS f . . stream
--
-- Removing the intermeidate list that is allocated. The compiler then
-- optimises the result.
--
-- Functions that fail to fuse are not left in stream form. In the final
-- simplifier phase any remaining unfused functions of the form:
--
-- > unstream . g . stream
--
-- Will be transformed back to their original list implementation.
--
--
Notes on simplifer phasing
--
-- * api functions should be rewritten to fusible forms as soon as possble
* This implies a NOINLINE [ 1 ] on the top level functions , so if ghc wants
-- to inline them they'll only have their bodies inlined at the end.
-- * These rewrite rules can then fire in any but the last phase:
-- "++ -> fusible" [~1] forall xs ys.
-- * Finally, if we reach the final phase, rewrite back to best effort [a] forms:
" + + - > unfused " [ 1 ] forall xs ys .
-- * And then inline the result.
--
-- If fusion occurs though, hang on to those 'stream' and 'unstream' pairs:
-- {-# INLINE [0] unstream #-} -- hmm?
--
-- Todo: notes on the phasing of Streams
--
-- -----------------------------------------------------------------------------
-- Fusion for the constructors:
--
-- We do not enable fusion for (:), as it leads to a massive massive
-- slow down in compilation time.
--
RULES
" (: ) - > fusible " [ ~1 ] forall x xs .
x : xs = unstream ( Stream.cons x ( stream xs ) )
" (: ) - > unfused " [ 1 ] forall x xs .
unstream ( Stream.cons x ( stream xs ) ) = x : xs
"(:) -> fusible" [~1] forall x xs.
x : xs = unstream (Stream.cons x (stream xs))
"(:) -> unfused" [1] forall x xs.
unstream (Stream.cons x (stream xs)) = x : xs
-}
-- -----------------------------------------------------------------------------
-- Basic interface
| /O(n)/ , /fusion/. Append two lists , i.e. ,
--
-- > [x1, ..., xm] ++ [y1, ..., yn] == [x1, ..., xm, y1, ..., yn]
-- > [x1, ..., xm] ++ [y1, ...] == [x1, ..., xm, y1, ...]
--
If the first list is not finite , the result is the first list .
The spine of the first list argument must be copied .
#ifdef EXTERNAL_PACKAGE
(++) :: [a] -> [a] -> [a]
(++) [] ys = ys
(++) (x:xs) ys = x : xs ++ ys
# NOINLINE [ 1 ] ( + + ) #
#endif
-- NOTE: This is quite subtle as we do not want to copy the last list in
--
-- xs1 ++ xs2 ++ ... ++ xsn
--
-- Indeed, we don't really want to fuse the above at all unless at least
-- one of the arguments has the form (unstream s) or the result of the
-- concatenation is streamed. The rules below do precisely that. Note they
-- really fuse instead of just rewriting things into a fusible form so there
-- is no need to rewrite back.
# RULES
" + + - > fused on 1st arg " [ ~1 ] forall xs ys .
unstream xs + + ys = " + + - > fused on 2nd arg " [ ~1 ] forall xs ys .
( unstream ys ) = unstream ( Stream.append xs ys )
" + + - > fused ( 1 ) " [ ~1 ] forall xs ys .
stream ( xs + + ys ) = Stream.append ( stream xs ) ( stream ys )
" + + - > fused ( 2 ) " [ ~1 ] forall xs ys .
stream ( ) = Stream.append xs ( stream ys )
" + + - > 1st arg empty " forall xs .
[ ] + + xs = xs
" + + - > 2nd arg empty " forall xs .
xs + + [ ] = xs
" + + / : " forall x xs ys .
( x : xs ) + + ys = x : ( xs + + ys )
#
"++ -> fused on 1st arg" [~1] forall xs ys.
unstream xs ++ ys = Stream.append1 xs ys
"++ -> fused on 2nd arg" [~1] forall xs ys.
Stream.append1 xs (unstream ys) = unstream (Stream.append xs ys)
"++ -> fused (1)" [~1] forall xs ys.
stream (xs ++ ys) = Stream.append (stream xs) (stream ys)
"++ -> fused (2)" [~1] forall xs ys.
stream (Stream.append1 xs ys) = Stream.append xs (stream ys)
"++ -> 1st arg empty" forall xs.
[] ++ xs = xs
"++ -> 2nd arg empty" forall xs.
xs ++ [] = xs
"++ / :" forall x xs ys.
(x:xs) ++ ys = x : (xs ++ ys)
#-}
| /O(1)/ , /fusion/. Extract the first element of a list , which must be
-- non-empty.
head :: [a] -> a
head (x:_) = x
head [] = errorEmptyList "head"
# NOINLINE [ 1 ] head #
# RULES
" head - > fusible " [ ~1 ] forall xs .
head xs = Stream.head ( stream xs )
--"head - > unfused " [ 1 ] forall xs .
-- Stream.head ( stream xs ) = head xs
#
"head -> fusible" [~1] forall xs.
head xs = Stream.head (stream xs)
--"head -> unfused" [1] forall xs.
-- Stream.head (stream xs) = head xs
#-}
-- | /O(n)/, /fusion/. Extract the last element of a list, which must be finite
-- and non-empty.
last :: [a] -> a
last [] = errorEmptyList "last"
last (x:xs) = last' x xs
where
last' y [] = y
last' _ (y:ys) = last' y ys
# NOINLINE [ 1 ] last #
# RULES
" last - > fusible " [ ~1 ] forall xs .
last xs = Stream.last ( stream xs )
--"last - > unfused " [ 1 ] forall xs .
-- Stream.last ( stream xs ) = last xs
#
"last -> fusible" [~1] forall xs.
last xs = Stream.last (stream xs)
--"last -> unfused" [1] forall xs.
-- Stream.last (stream xs) = last xs
#-}
-- | /O(1)/, /fusion/. Extract the elements after the head of a list, which
-- must be non-empty.
tail :: [a] -> [a]
tail (_:xs) = xs
tail [] = errorEmptyList "tail"
# NOINLINE [ 1 ] tail #
# RULES
" tail - > fusible " [ ~1 ] forall xs .
tail xs = unstream ( Stream.tail ( stream xs ) )
--"tail - > unfused " [ 1 ] forall xs .
-- unstream ( Stream.tail ( stream xs ) ) = tail xs
#
"tail -> fusible" [~1] forall xs.
tail xs = unstream (Stream.tail (stream xs))
--"tail -> unfused" [1] forall xs.
-- unstream (Stream.tail (stream xs)) = tail xs
#-}
-- | /O(n)/, /fusion/. Return all the elements of a list except the last one.
-- The list must be finite and non-empty.
init :: [a] -> [a]
init [] = errorEmptyList "init"
init (x:xs) = init' x xs
where
init' _ [] = []
init' y (z:zs) = y : init' z zs
# NOINLINE [ 1 ] init #
# RULES
" init - > fusible " [ ~1 ] forall xs .
init xs = unstream ( Stream.init ( stream xs ) )
--"init - > unfused " [ 1 ] forall xs .
-- unstream ( Stream.init ( stream xs ) ) = init xs
#
"init -> fusible" [~1] forall xs.
init xs = unstream (Stream.init (stream xs))
--"init -> unfused" [1] forall xs.
-- unstream (Stream.init (stream xs)) = init xs
#-}
-- | /O(1)/, /fusion/. Test whether a list is empty.
null :: [a] -> Bool
null [] = True
null (_:_) = False
# NOINLINE [ 1 ] null #
# RULES
" null - > fusible " [ ~1 ] forall xs .
null xs = Stream.null ( stream xs )
--"null - > unfused " [ 1 ] forall xs .
-- Stream.null ( stream xs ) = null xs
#
"null -> fusible" [~1] forall xs.
null xs = Stream.null (stream xs)
--"null -> unfused" [1] forall xs.
-- Stream.null (stream xs) = null xs
#-}
-- | /O(n)/, /fusion/. 'length' returns the length of a finite list as an 'Int'.
-- It is an instance of the more general 'Data.List.genericLength',
-- the result type of which may be any kind of number.
length :: [a] -> Int
length xs0 = len xs0 0#
#ifndef __HADDOCK__
where
len :: [a] -> Int# -> Int
len [] a# = I# a#
len (_:xs) a# = len xs (a# +# 1#)
#endif
# NOINLINE [ 1 ] length #
# RULES
" length - > fusible " [ ~1 ] forall xs .
length xs = Stream.length ( stream xs )
--"length - > unfused " [ 1 ] forall xs .
-- Stream.length ( stream xs ) = length xs
#
"length -> fusible" [~1] forall xs.
length xs = Stream.length (stream xs)
--"length -> unfused" [1] forall xs.
-- Stream.length (stream xs) = length xs
#-}
-- ---------------------------------------------------------------------
-- List transformations
| /O(n)/ , /fusion/. ' map ' is the list obtained by applying @f@ to each element
of @xs@ , i.e. ,
--
-- > map f [x1, x2, ..., xn] == [f x1, f x2, ..., f xn]
-- > map f [x1, x2, ...] == [f x1, f x2, ...]
--
-- Properties:
--
> map f . map = map ( f . )
-- > map f (repeat x) = repeat (f x)
-- > map f (replicate n x) = replicate n (f x)
#ifdef EXTERNAL_PACKAGE
map :: (a -> b) -> [a] -> [b]
map _ [] = []
map f (x:xs) = f x : map f xs
# NOINLINE [ 1 ] map #
#endif
# RULES
" map - > fusible " [ ~1 ] forall f xs .
map f xs = unstream ( Stream.map f ( stream xs ) )
--"map - > unfused " [ 1 ] forall f xs .
-- unstream ( Stream.map f ( stream xs ) ) = map f xs
#
"map -> fusible" [~1] forall f xs.
map f xs = unstream (Stream.map f (stream xs))
--"map -> unfused" [1] forall f xs.
-- unstream (Stream.map f (stream xs)) = map f xs
#-}
-- | /O(n)/, /fusion/. 'reverse' @xs@ returns the elements of @xs@ in reverse order.
-- @xs@ must be finite. Will fuse as a consumer only.
reverse :: [a] -> [a]
reverse = foldl' (flip (:)) []
# INLINE reverse #
{-
reverse l = rev l []
where
rev [] a = a
rev (x:xs) a = rev xs (x:a)
-}
--TODO : I 'm sure there are some cunning things we can do with optimising
-- reverse . Of course if we try and fuse we may need to still force the
-- sping of the list : eg reverse . reverse = forceSpine
forceSpine : : [ a ] - > [ a ]
forceSpine xs = forceSpine ' xs ` seq ` xs
{ - # INLINE forceSpine #
--TODO: I'm sure there are some cunning things we can do with optimising
-- reverse. Of course if we try and fuse we may need to still force the
-- sping of the list: eg reverse . reverse = forceSpine
forceSpine :: [a] -> [a]
forceSpine xs = forceSpine' xs `seq` xs
{-# INLINE forceSpine #-}
-- The idea of this slightly odd construction is that we inline the above form
-- and in the context we may then be able to use xs directly and just keep
-- around the fact that xs must be forced at some point. Remember, seq does not
-- imply any evaluation order.
forceSpine' :: [a] -> ()
forceSpine' [] = ()
forceSpine' (_:xs') = forceSpine' xs'
# NOINLINE forceSpine ' #
-}
-- | /O(n)/, /fusion/. The 'intersperse' function takes an element and a list and
' that element between the elements of the list .
-- For example,
--
-- > intersperse ',' "abcde" == "a,b,c,d,e"
--
intersperse :: a -> [a] -> [a]
intersperse _ [] = []
intersperse sep (x0:xs0) = x0 : go xs0
where
go [] = []
go (x:xs) = sep : x : go xs
# NOINLINE [ 1 ] intersperse #
RULES
" intersperse - > fusible " [ ~1 ] forall x xs .
intersperse x xs = unstream ( Stream.intersperse x ( stream xs ) )
" intersperse - > unfused " [ 1 ] forall x xs .
unstream ( Stream.intersperse x ( stream xs ) ) = intersperse x xs
"intersperse -> fusible" [~1] forall x xs.
intersperse x xs = unstream (Stream.intersperse x (stream xs))
"intersperse -> unfused" [1] forall x xs.
unstream (Stream.intersperse x (stream xs)) = intersperse x xs
-}
| /O(n)/ , /fusion/. ' intercalate ' @xs xss@ is equivalent to @('concat ' ( ' intersperse ' xs xss))@.
-- It inserts the list @xs@ in between the lists in @xss@ and concatenates the
-- result.
--
-- > intercalate = concat . intersperse
--
intercalate :: [a] -> [[a]] -> [a]
intercalate sep xss = go (intersperse sep xss)
where
go [] = []
go (y:ys) = y ++ go ys
# NOINLINE [ 1 ] intercalate #
intercalate _ [ ] = [ ]
intercalate sep ( xs0 : ) = go xs0 where
go [ ] xss = to xss
go ( x : xs ) xss = x : go xs xss
to [ ] = [ ]
to ( xs : xss ) = go ' sep xs xss
go ' [ ] xs xss = go xs xss
go ' ( s : ss ) xs xss = s : go ' ss xs xss
{ - # NOINLINE [ 1 ] intercalate #
intercalate _ [] = []
intercalate sep (xs0:xss0) = go xs0 xss0
where
go [] xss = to xss
go (x:xs) xss = x : go xs xss
to [] = []
to (xs:xss) = go' sep xs xss
go' [] xs xss = go xs xss
go' (s:ss) xs xss = s : go' ss xs xss
{-# NOINLINE [1] intercalate #-}
-}
-- fusion rule based on:
-- intercalate = concat . intersperse
--
RULES
" intercalate - > fusible " [ ~1 ] forall x xs .
intercalate x xs = Stream.concat ( Stream.intersperse x ( stream xs ) )
" intercalate - > unfused " [ 1 ] forall x xs .
Stream.concat ( Stream.intersperse x ( stream xs ) ) = intercalate x xs
"intercalate -> fusible" [~1] forall x xs.
intercalate x xs = Stream.concat (Stream.intersperse x (stream xs))
"intercalate -> unfused" [1] forall x xs.
Stream.concat (Stream.intersperse x (stream xs)) = intercalate x xs
-}
-- | The 'transpose' function transposes the rows and columns of its argument.
-- For example,
--
-- > transpose [[1,2,3],[4,5,6]] == [[1,4],[2,5],[3,6]]
--
transpose :: [[a]] -> [[a]]
transpose [] = []
transpose ([] : xss) = transpose xss
transpose ((x:xs) : xss) = (x : [h | (h:_t) <- xss])
: transpose (xs : [ t | (_h:t) <- xss])
TODO fuse
-- ---------------------------------------------------------------------
-- Reducing lists (folds)
-- | /O(n)/, /fusion/. 'foldl', applied to a binary operator, a starting value (typically
-- the left-identity of the operator), and a list, reduces the list
-- using the binary operator, from left to right:
--
-- > foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
--
-- The list must be finite.
--
foldl :: (a -> b -> a) -> a -> [b] -> a
foldl f z0 xs0 = go z0 xs0
where
go z [] = z
go z (x:xs) = go (f z x) xs
# INLINE [ 1 ] foldl #
# RULES
" foldl - > fusible " [ ~1 ] forall f z xs .
foldl f z xs = Stream.foldl f z ( stream xs )
--"foldl - > unfused " [ 1 ] forall f z xs .
-- Stream.foldl f z ( stream xs ) = foldl f z xs
#
"foldl -> fusible" [~1] forall f z xs.
foldl f z xs = Stream.foldl f z (stream xs)
--"foldl -> unfused" [1] forall f z xs.
-- Stream.foldl f z (stream xs) = foldl f z xs
#-}
-- | /O(n)/, /fusion/. A strict version of 'foldl'.
foldl' :: (a -> b -> a) -> a -> [b] -> a
foldl' f z0 xs0 = go z0 xs0
#ifndef __HADDOCK__
where
go !z [] = z
go !z (x:xs) = go (f z x) xs
#endif
# INLINE [ 1 ] foldl ' #
# RULES
" foldl ' - > fusible " [ ~1 ] forall f z xs .
foldl ' f z xs = Stream.foldl ' f z ( stream xs )
--"foldl ' - > unfused " [ 1 ] forall f z xs .
-- Stream.foldl ' f z ( stream xs ) = foldl ' f z xs
#
"foldl' -> fusible" [~1] forall f z xs.
foldl' f z xs = Stream.foldl' f z (stream xs)
--"foldl' -> unfused" [1] forall f z xs.
-- Stream.foldl' f z (stream xs) = foldl' f z xs
#-}
| /O(n)/ , /fusion/. ' foldl1 ' is a variant of ' foldl ' that has no starting value argument ,
-- and thus must be applied to non-empty lists.
foldl1 :: (a -> a -> a) -> [a] -> a
foldl1 _ [] = errorEmptyList "foldl1"
foldl1 f (x0:xs0) = go x0 xs0
where
go z [] = z
go z (x:xs) = go (f z x) xs
# INLINE [ 1 ] foldl1 #
# RULES
" foldl1 - > fusible " [ ~1 ] forall f xs .
foldl1 f xs = Stream.foldl1 f ( stream xs )
--"foldl1 - > unfused " [ 1 ] forall f xs .
-- Stream.foldl1 f ( stream xs ) = foldl1 f xs
#
"foldl1 -> fusible" [~1] forall f xs.
foldl1 f xs = Stream.foldl1 f (stream xs)
--"foldl1 -> unfused" [1] forall f xs.
-- Stream.foldl1 f (stream xs) = foldl1 f xs
#-}
| /O(n)/ , /fusion/. A strict version of ' foldl1 '
foldl1' :: (a -> a -> a) -> [a] -> a
foldl1' _ [] = errorEmptyList "foldl1'"
foldl1' f (x0:xs0) = go x0 xs0
#ifndef __HADDOCK__
where
go !z [] = z
go !z (x:xs) = go (f z x) xs
#endif
# INLINE [ 1 ] foldl1 ' #
# RULES
" foldl1 ' - > fusible " [ ~1 ] forall f xs .
foldl1 ' f xs = Stream.foldl1 ' f ( stream xs )
--"foldl1 - > unfused " [ 1 ] forall f xs .
-- Stream.foldl1 ' f ( stream xs ) = foldl1 ' f xs
#
"foldl1' -> fusible" [~1] forall f xs.
foldl1' f xs = Stream.foldl1' f (stream xs)
--"foldl1 -> unfused" [1] forall f xs.
-- Stream.foldl1' f (stream xs) = foldl1' f xs
#-}
-- | /O(n)/, /fusion/. 'foldr', applied to a binary operator, a starting value (typically
-- the right-identity of the operator), and a list, reduces the list
-- using the binary operator, from right to left:
--
-- > foldr f z [x1, x2, ..., xn] == x1 `f` (x2 `f` ... (xn `f` z)...)
#ifdef EXTERNAL_PACKAGE
foldr :: (a -> b -> b) -> b -> [a] -> b
foldr k z xs = go xs
where
go [] = z
go (y:ys) = y `k` go ys
{-# INLINE [0] foldr #-}
#endif
# RULES
" foldr - > fusible " [ ~1 ] forall f z xs .
foldr f z xs = Stream.foldr f z ( stream xs )
--"foldr - > unfused " [ 1 ] forall f z xs .
-- Stream.foldr f z ( stream xs ) = foldr f z xs
#
"foldr -> fusible" [~1] forall f z xs.
foldr f z xs = Stream.foldr f z (stream xs)
--"foldr -> unfused" [1] forall f z xs.
-- Stream.foldr f z (stream xs) = foldr f z xs
#-}
| /O(n)/ , /fusion/. ' ' is a variant of ' foldr ' that has no starting value argument ,
-- and thus must be applied to non-empty lists.
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 _ [] = errorEmptyList "foldr1"
foldr1 k (x0:xs0) = go x0 xs0
where go x [] = x
go x (x':xs) = k x (go x' xs)
{-# INLINE [1] foldr1 #-}
# RULES
" foldr1 - > fusible " [ ~1 ] forall f xs .
foldr1 f xs = Stream.foldr1 f ( stream xs )
--"foldr1 - > unfused " [ 1 ] forall f xs .
-- Stream.foldr1 f ( stream xs ) = foldr1 f xs
#
"foldr1 -> fusible" [~1] forall f xs.
foldr1 f xs = Stream.foldr1 f (stream xs)
--"foldr1 -> unfused" [1] forall f xs.
-- Stream.foldr1 f (stream xs) = foldr1 f xs
#-}
-- ---------------------------------------------------------------------
-- Special folds
| /O(n)/ , /fusion/. a list of lists .
concat :: [[a]] -> [a]
concat xss0 = to xss0
where go [] xss = to xss
go (x:xs) xss = x : go xs xss
to [] = []
to (xs:xss) = go xs xss -- hmm, this is slower than the old concat?
# NOINLINE [ 1 ] concat #
--
-- fuse via concatMap, as the Stream (Stream a) is too hard to construct
--
-- or via foldr (++) ?
--
# RULES
" concat - > fused " [ ~1 ] forall xs .
concat xs = Stream.concat ( stream xs )
--"concat - > unfused " [ 1 ] forall xs .
-- Stream.concat ( stream xs ) = concat xs
#
"concat -> fused" [~1] forall xs.
concat xs = Stream.concat (stream xs)
--"concat -> unfused" [1] forall xs.
-- Stream.concat (stream xs) = concat xs
#-}
-- | /O(n)/, /fusion/. Map a function over a list and concatenate the results.
concatMap :: (a -> [b]) -> [a] -> [b]
concatMap f = foldr (\x y -> f x ++ y) [] -- at least it will fuse.
# INLINE concatMap #
concatMap f as0 = to as0
where
go [ ] as = to as
go ( b : bs ) as = b : go bs as
to [ ] = [ ]
to ( a : as ) = go ( f a ) as
{ - # NOINLINE [ 1 ] concatMap #
concatMap f as0 = to as0
where
go [] as = to as
go (b:bs) as = b : go bs as
to [] = []
to (a:as) = go (f a) as
{-# NOINLINE [1] concatMap #-}
-}
RULES
" concatMap - > fusible " [ ~1 ] forall f xs .
concatMap f xs = Stream.concatMap f ( stream xs )
" concatMap - > unfused " [ 1 ] forall f xs .
Stream.concatMap f ( stream xs ) = concatMap f xs
"concatMap -> fusible" [~1] forall f xs.
concatMap f xs = Stream.concatMap f (stream xs)
"concatMap -> unfused" [1] forall f xs.
Stream.concatMap f (stream xs) = concatMap f xs
-}
| /O(n)/ , /fusion/. ' and ' returns the conjunction of a Boolean list . For the result to be
-- 'True', the list must be finite; 'False', however, results from a 'False'
-- value at a finite index of a finite or infinite list.
--
and :: [Bool] -> Bool
and [] = True
and (False:_ ) = False
and (_ :xs) = and xs
# NOINLINE [ 1 ] and #
# RULES
" and - > fused " [ ~1 ] forall xs .
and xs = Stream.and ( stream xs )
--"and - > unfused " [ 1 ] forall xs .
-- Stream.and ( stream xs ) = and xs
#
"and -> fused" [~1] forall xs.
and xs = Stream.and (stream xs)
--"and -> unfused" [1] forall xs.
-- Stream.and (stream xs) = and xs
#-}
| /O(n)/ , /fusion/. ' or ' returns the disjunction of a Boolean list . For the result to be
-- 'False', the list must be finite; 'True', however, results from a 'True'
-- value at a finite index of a finite or infinite list.
or :: [Bool] -> Bool
or [] = False
or (True:_ ) = True
or (_ :xs) = or xs
# NOINLINE [ 1 ] or #
# RULES
" or - > fused " [ ~1 ] forall xs .
or xs = Stream.or ( stream xs )
--"or - > unfused " [ 1 ] forall xs .
-- Stream.or ( stream xs ) = or xs
#
"or -> fused" [~1] forall xs.
or xs = Stream.or (stream xs)
--"or -> unfused" [1] forall xs.
-- Stream.or (stream xs) = or xs
#-}
-- | /O(n)/, /fusion/. Applied to a predicate and a list, 'any' determines if any element
-- of the list satisfies the predicate.
any :: (a -> Bool) -> [a] -> Bool
any p xs0 = go xs0
where go [] = False
go (x:xs) = case p x of
True -> True
False -> go xs
# NOINLINE [ 1 ] any #
TODO : check if being lazy in p is a cost ,
should we do [ ] as a special case and then strictly evaluate p ?
# RULES
" any - > fusible " [ ~1 ] forall f xs .
any f xs = Stream.any f ( stream xs )
--"any - > unfused " [ 1 ] forall f xs .
-- Stream.any f ( stream xs ) = any f xs
#
"any -> fusible" [~1] forall f xs.
any f xs = Stream.any f (stream xs)
--"any -> unfused" [1] forall f xs.
-- Stream.any f (stream xs) = any f xs
#-}
-- | Applied to a predicate and a list, 'all' determines if all elements
-- of the list satisfy the predicate.
all :: (a -> Bool) -> [a] -> Bool
all p xs0 = go xs0
where go [] = True
go (x:xs) = case p x of
True -> go xs
False -> False
# NOINLINE [ 1 ] all #
# RULES
" all - > fusible " [ ~1 ] forall f xs .
all f xs = Stream.all f ( stream xs )
--"all - > unfused " [ 1 ] forall f xs .
-- Stream.all f ( stream xs ) = all f xs
#
"all -> fusible" [~1] forall f xs.
all f xs = Stream.all f (stream xs)
--"all -> unfused" [1] forall f xs.
-- Stream.all f (stream xs) = all f xs
#-}
-- | /O(n)/, /fusion/. The 'sum' function computes the sum of a finite list of numbers.
sum :: Num a => [a] -> a
sum l = sum' l 0
#ifndef __HADDOCK__
where
sum' [] a = a
sum' (x:xs) a = sum' xs (a+x)
#endif
# NOINLINE [ 1 ] sum #
sumInt :: [Int] -> Int
sumInt l = sum' l 0
#ifndef __HADDOCK__
where
sum' [] a = a
sum' (x:xs) !a = sum' xs (a+x)
#endif
# NOINLINE [ 1 ] sumInt #
{-# RULES
"sum spec Int" sum = sumInt :: [Int] -> Int
#-}
# RULES
" sum - > fusible " [ ~1 ] forall xs .
sum xs = Stream.sum ( stream xs )
--"sum - > unfused " [ 1 ] forall xs .
-- Stream.sum ( stream xs ) = sum xs
#
"sum -> fusible" [~1] forall xs.
sum xs = Stream.sum (stream xs)
--"sum -> unfused" [1] forall xs.
-- Stream.sum (stream xs) = sum xs
#-}
# RULES
" sumInt - > fusible " [ ~1 ] forall ( xs : : [ Int ] ) .
sumInt xs = Stream.sum ( stream xs )
--"sumInt - > unfused " [ 1 ] forall ( xs : : [ Int ] ) .
-- Stream.sum ( stream xs ) = sumInt xs
#
"sumInt -> fusible" [~1] forall (xs :: [Int]).
sumInt xs = Stream.sum (stream xs)
--"sumInt -> unfused" [1] forall (xs :: [Int]).
-- Stream.sum (stream xs) = sumInt xs
#-}
-- | /O(n)/,/fusion/. The 'product' function computes the product of a finite list of numbers.
product :: Num a => [a] -> a
product l = prod l 1
#ifndef __HADDOCK__
where
prod [] a = a
prod (x:xs) a = prod xs (a*x)
#endif
# NOINLINE [ 1 ] product #
productInt :: [Int] -> Int
productInt l = product' l 0
#ifndef __HADDOCK__
where
product' [] a = a
product' (x:xs) !a = product' xs (a*x)
#endif
# NOINLINE [ 1 ] productInt #
{-# RULES
"product spec Int" product = productInt :: [Int] -> Int
#-}
# RULES
" product - > fused " [ ~1 ] forall xs .
product xs = Stream.product ( stream xs )
--"product - > unfused " [ 1 ] forall xs .
-- ( stream xs ) = product xs
#
"product -> fused" [~1] forall xs.
product xs = Stream.product (stream xs)
--"product -> unfused" [1] forall xs.
-- Stream.product (stream xs) = product xs
#-}
# RULES
" productInt - > fusible " [ ~1 ] forall ( xs : : [ Int ] ) .
productInt xs = Stream.product ( stream xs )
--"productInt - > unfused " [ 1 ] forall ( xs : : [ Int ] ) .
-- ( stream xs ) = productInt xs
#
"productInt -> fusible" [~1] forall (xs :: [Int]).
productInt xs = Stream.product (stream xs)
--"productInt -> unfused" [1] forall (xs :: [Int]).
-- Stream.product (stream xs) = productInt xs
#-}
-- | /O(n)/,/fusion/. 'maximum' returns the maximum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.maximumBy', which allows the
-- programmer to supply their own comparison function.
maximum :: Ord a => [a] -> a
maximum [] = errorEmptyList "maximum"
maximum xs = foldl1 max xs
# NOINLINE [ 1 ] maximum #
# RULES
" maximum - > fused " [ ~1 ] forall xs .
maximum xs = Stream.maximum ( stream xs )
--"maximum - > unfused " [ 1 ] forall xs .
-- Stream.maximum ( stream xs ) = maximum xs
#
"maximum -> fused" [~1] forall xs.
maximum xs = Stream.maximum (stream xs)
--"maximum -> unfused" [1] forall xs.
-- Stream.maximum (stream xs) = maximum xs
#-}
-- We can't make the overloaded version of maximum strict without
changing its semantics ( might not be strict ) , but we can for
-- the version specialised to 'Int'.
# RULES
" maximumInt " maximum = ( strictMaximum : : [ Int ] - > Int ) ;
" maximumChar " maximum = ( strictMaximum : : [ )
#
"maximumInt" maximum = (strictMaximum :: [Int] -> Int);
"maximumChar" maximum = (strictMaximum :: [Char] -> Char)
#-}
strictMaximum :: (Ord a) => [a] -> a
strictMaximum [] = errorEmptyList "maximum"
strictMaximum xs = foldl1' max xs
# NOINLINE [ 1 ] strictMaximum #
# RULES
" strictMaximum - > fused " [ ~1 ] forall xs .
strictMaximum xs = Stream.strictMaximum ( stream xs )
--"strictMaximum - > unfused " [ 1 ] forall xs .
-- Stream.strictMaximum ( stream xs ) = strictMaximum xs
#
"strictMaximum -> fused" [~1] forall xs.
strictMaximum xs = Stream.strictMaximum (stream xs)
--"strictMaximum -> unfused" [1] forall xs.
-- Stream.strictMaximum (stream xs) = strictMaximum xs
#-}
-- | /O(n)/,/fusion/. 'minimum' returns the minimum value from a list,
-- which must be non-empty, finite, and of an ordered type.
-- It is a special case of 'Data.List.minimumBy', which allows the
-- programmer to supply their own comparison function.
minimum :: Ord a => [a] -> a
minimum [] = errorEmptyList "minimum"
minimum xs = foldl1 min xs
# NOINLINE [ 1 ] minimum #
# RULES
" minimum - > fused " [ ~1 ] forall xs .
minimum xs = Stream.minimum ( stream xs )
--"minimum - > unfused " [ 1 ] forall xs .
-- Stream.minimum ( stream xs ) = minimum xs
#
"minimum -> fused" [~1] forall xs.
minimum xs = Stream.minimum (stream xs)
--"minimum -> unfused" [1] forall xs.
-- Stream.minimum (stream xs) = minimum xs
#-}
# RULES
" minimumInt " minimum = ( strictMinimum : : [ Int ] - > Int ) ;
" minimumChar " minimum = ( strictMinimum : : [ )
#
"minimumInt" minimum = (strictMinimum :: [Int] -> Int);
"minimumChar" minimum = (strictMinimum :: [Char] -> Char)
#-}
strictMinimum :: (Ord a) => [a] -> a
strictMinimum [] = errorEmptyList "maximum"
strictMinimum xs = foldl1' min xs
# NOINLINE [ 1 ] strictMinimum #
# RULES
" strictMinimum - > fused " [ ~1 ] forall xs .
strictMinimum xs = Stream.strictMinimum ( stream xs )
--"strictMinimum - > unfused " [ 1 ] forall xs .
-- Stream.strictMinimum ( stream xs ) = strictMinimum xs
#
"strictMinimum -> fused" [~1] forall xs.
strictMinimum xs = Stream.strictMinimum (stream xs)
--"strictMinimum -> unfused" [1] forall xs.
-- Stream.strictMinimum (stream xs) = strictMinimum xs
#-}
-- ---------------------------------------------------------------------
-- * Building lists
-- ** Scans
| /O(n)/ , /fusion/. ' ' is similar to ' foldl ' , but returns a list of successive
-- reduced values from the left:
--
> f z [ x1 , x2 , ... ] = = [ z , z ` f ` x1 , ( z ` f ` x1 ) ` f ` x2 , ... ]
--
-- Properties:
--
> last ( f z xs ) = = foldl f z x
--
scanl :: (a -> b -> a) -> a -> [b] -> [a]
scanl f q ls = q : case ls of
[] -> []
x:xs -> scanl f (f q x) xs
# INLINE [ 1 ] scanl #
or perhaps :
f q xs0 = q : go q xs0
where go q [ ] = [ ]
go q ( x : xs ) = let q ' = f q x
in q ' : go q ' xs
scanl f q xs0 = q : go q xs0
where go q [] = []
go q (x:xs) = let q' = f q x
in q' : go q' xs
-}
--
note : 's ' scan ' is a bit weird , as it always puts the initial
-- state as a prefix. this complicates the rules.
--
# RULES
" - > fusible " [ ~1 ] forall f z xs .
f z xs = unstream ( Stream.scanl f z ( Stream.snoc ( stream xs ) bottom ) )
--"scanl - > unfused " [ 1 ] forall f z xs .
-- unstream ( Stream.scanl f z ( Stream.snoc ( stream xs ) bottom ) ) = f z xs
#
"scanl -> fusible" [~1] forall f z xs.
scanl f z xs = unstream (Stream.scanl f z (Stream.snoc (stream xs) bottom))
--"scanl -> unfused" [1] forall f z xs.
-- unstream (Stream.scanl f z (Stream.snoc (stream xs) bottom)) = scanl f z xs
#-}
| /O(n)/,/fusion/. ' ' is a variant of ' ' that has no starting value argument :
--
> f [ x1 , x2 , ... ] = = [ x1 , x1 ` f ` x2 , ... ]
--
scanl1 :: (a -> a -> a) -> [a] -> [a]
scanl1 f (x:xs) = scanl f x xs
scanl1 _ [] = []
{-# INLINE [1] scanl1 #-}
# RULES
" scanl1 - > fusible " [ ~1 ] forall f xs .
scanl1 f xs = unstream ( Stream.scanl1 f ( Stream.snoc ( stream xs ) bottom ) )
--"scanl1 - > unfused " [ 1 ] forall f xs .
-- unstream ( Stream.scanl1 f ( Stream.snoc ( stream xs ) bottom ) ) = scanl1 f xs
#
"scanl1 -> fusible" [~1] forall f xs.
scanl1 f xs = unstream (Stream.scanl1 f (Stream.snoc (stream xs) bottom))
--"scanl1 -> unfused" [1] forall f xs.
-- unstream (Stream.scanl1 f (Stream.snoc (stream xs) bottom)) = scanl1 f xs
#-}
| /O(n)/. ' scanr ' is the right - to - left dual of ' ' .
-- Properties:
--
> head ( scanr f z xs ) = = foldr f z xs
--
scanr :: (a -> b -> b) -> b -> [a] -> [b]
scanr _ q0 [] = [q0]
scanr f q0 (x:xs) = f x q : qs
where qs@(q:_) = scanr f q0 xs
# INLINE [ 1 ] scanr #
RULES
" scanr - > fusible " [ ~1 ] forall f z xs .
scanr f z xs = unstream ( Stream.scanr f z ( Stream.cons bottom ( stream xs ) ) )
" scanr - > unfused " [ 1 ] forall f z xs .
unstream ( Stream.scanr f z ( Stream.cons bottom ( stream xs ) ) ) = scanr f z xs
"scanr -> fusible" [~1] forall f z xs.
scanr f z xs = unstream (Stream.scanr f z (Stream.cons bottom (stream xs)))
"scanr -> unfused" [1] forall f z xs.
unstream (Stream.scanr f z (Stream.cons bottom (stream xs))) = scanr f z xs
-}
| ' ' is a variant of ' scanr ' that has no starting value argument .
scanr1 :: (a -> a -> a) -> [a] -> [a]
scanr1 _ [] = []
scanr1 _ [x] = [x]
scanr1 f (x:xs) = f x q : qs
where qs@(q:_) = scanr1 f xs
TODO fuse
-- ---------------------------------------------------------------------
-- ** Accumulating maps
-- | The 'mapAccumL' function behaves like a combination of 'map' and
-- 'foldl'; it applies a function to each element of a list, passing
-- an accumulating parameter from left to right, and returning a final
-- value of this accumulator together with the new list.
--
mapAccumL :: (acc -> x -> (acc, y)) -> acc -> [x] -> (acc, [y])
mapAccumL _ s [] = (s, [])
mapAccumL f s (x:xs) = (s'',y:ys)
where (s', y ) = f s x
(s'',ys) = mapAccumL f s' xs
TODO fuse
-- | The 'mapAccumR' function behaves like a combination of 'map' and
-- 'foldr'; it applies a function to each element of a list, passing
-- an accumulating parameter from right to left, and returning a final
-- value of this accumulator together with the new list.
--
mapAccumR :: (acc -> x -> (acc, y)) -> acc -> [x] -> (acc, [y])
mapAccumR _ s [] = (s, [])
mapAccumR f s (x:xs) = (s'', y:ys)
where (s'',y ) = f s' x
(s', ys) = mapAccumR f s xs
TODO fuse
------------------------------------------------------------------------
-- ** Infinite lists
| /fusion/. ' iterate ' @f returns an infinite list of repeated applications
of to @x@ :
--
-- > iterate f x == [x, f x, f (f x), ...]
iterate :: (a -> a) -> a -> [a]
iterate f x = x : iterate f (f x)
# NOINLINE [ 1 ] iterate #
# RULES
" iterate - > fusible " [ ~1 ] forall f x.
iterate f x = unstream ( Stream.iterate f x )
--"iterate - > unfused " [ 1 ] forall f x.
-- unstream ( Stream.iterate f x ) = iterate f x
#
"iterate -> fusible" [~1] forall f x.
iterate f x = unstream (Stream.iterate f x)
--"iterate -> unfused" [1] forall f x.
-- unstream (Stream.iterate f x) = iterate f x
#-}
-- | /fusion/. 'repeat' @x@ is an infinite list, with @x@ the value of every element.
repeat :: a -> [a]
repeat x = xs where xs = x : xs
# INLINE [ 1 ] repeat #
# RULES
" repeat - > fusible " [ ~1 ] forall x.
repeat x = unstream ( Stream.repeat x )
--"repeat - > unfused " [ 1 ] forall x.
-- unstream ( Stream.repeat x ) = repeat x
#
"repeat -> fusible" [~1] forall x.
repeat x = unstream (Stream.repeat x)
--"repeat -> unfused" [1] forall x.
-- unstream (Stream.repeat x) = repeat x
#-}
| /O(n)/ , /fusion/. ' replicate ' @n is a list of length @n@ with @x@ the value of
-- every element.
It is an instance of the more general ' Data . List.genericReplicate ' ,
in which @n@ may be of any integral type .
--
replicate :: Int -> a -> [a]
replicate n0 _ | n0 <= 0 = []
replicate n0 x = go n0
where
go 0 = []
go n = x : go (n-1)
# NOINLINE [ 1 ] replicate #
# RULES
" replicate - > fusible " [ ~1 ]
replicate = - > unstream ( Stream.replicate n x )
--"replicate - > unfused " [ 1 ] forall n x.
-- unstream ( Stream.replicate n x ) = replicate n x
#
"replicate -> fusible" [~1]
replicate = \n x -> unstream (Stream.replicate n x)
--"replicate -> unfused" [1] forall n x.
-- unstream (Stream.replicate n x) = replicate n x
#-}
-- | /fusion/. 'cycle' ties a finite list into a circular one, or equivalently,
-- the infinite repetition of the original list. It is the identity
-- on infinite lists.
--
cycle :: [a] -> [a]
cycle [] = error "Prelude.cycle: empty list"
cycle xs0 = go xs0
where
go [] = go xs0
go (x:xs) = x : go xs
# NOINLINE [ 1 ] cycle #
# RULES
" cycle - > fusible " [ ~1 ] forall xs .
cycle xs = unstream ( Stream.cycle ( stream xs ) )
--"cycle - > unfused " [ 1 ] forall xs .
-- unstream ( Stream.cycle ( stream xs ) ) = cycle xs
#
"cycle -> fusible" [~1] forall xs.
cycle xs = unstream (Stream.cycle (stream xs))
--"cycle -> unfused" [1] forall xs.
-- unstream (Stream.cycle (stream xs)) = cycle xs
#-}
-- ---------------------------------------------------------------------
-- ** Unfolding
-- | /fusion/. The 'unfoldr' function is a \`dual\' to 'foldr': while 'foldr'
-- reduces a list to a summary value, 'unfoldr' builds a list from
-- a seed value. The function takes the element and returns 'Nothing'
-- if it is done producing the list or returns 'Just' @(a,b)@, in which
case , @a@ is a prepended to the list and @b@ is used as the next
-- element in a recursive call. For example,
--
> iterate f = = unfoldr ( \x - > Just ( x , f x ) )
--
-- In some cases, 'unfoldr' can undo a 'foldr' operation:
--
-- > unfoldr f' (foldr f z xs) == xs
--
-- if the following holds:
--
-- > f' (f x y) = Just (x,y)
-- > f' z = Nothing
--
-- A simple use of unfoldr:
--
> unfoldr ( \b - > if b = = 0 then Nothing else Just ( b , b-1 ) ) 10
-- > [10,9,8,7,6,5,4,3,2,1]
--
unfoldr :: (b -> Maybe (a, b)) -> b -> [a]
unfoldr f b0 = unfold b0
where
unfold b = case f b of
Just (a,b') -> a : unfold b'
Nothing -> []
# INLINE [ 1 ] unfoldr #
# RULES
" unfoldr - > fusible " [ ~1 ] forall f x.
unfoldr f x = unstream ( Stream.unfoldr f x )
--"unfoldr - > unfused " [ 1 ] forall f x.
-- unstream ( Stream.unfoldr f x ) = unfoldr f x
#
"unfoldr -> fusible" [~1] forall f x.
unfoldr f x = unstream (Stream.unfoldr f x)
--"unfoldr -> unfused" [1] forall f x.
-- unstream (Stream.unfoldr f x) = unfoldr f x
#-}
------------------------------------------------------------------------
-- * Sublists
-- ** Extracting sublists
-- | /O(n)/,/fusion/. 'take' @n@, applied to a list @xs@, returns the prefix of @xs@
of length @n@ , or @xs@ itself if @n > ' length ' xs@ :
--
> take 5 " Hello World ! " = = " Hello "
> take 3 [ 1,2,3,4,5 ] = = [ 1,2,3 ]
> take 3 [ 1,2 ] = = [ 1,2 ]
> take 3 [ ] = = [ ]
-- > take (-1) [1,2] == []
> take 0 [ 1,2 ] = = [ ]
--
It is an instance of the more general ' Data . List.genericTake ' ,
in which @n@ may be of any integral type .
--
take :: Int -> [a] -> [a]
take i _ | i <= 0 = []
take i ls = take' i ls
where
take' :: Int -> [a] -> [a]
take' 0 _ = []
take' _ [] = []
take' n (x:xs) = x : take' (n-1) xs
# NOINLINE [ 1 ] take #
# RULES
" take - > fusible " [ ~1 ] forall n x.
take n x = unstream ( Stream.take n ( stream x ) )
--"take - > unfused " [ 1 ] forall n x.
-- unstream ( Stream.take n ( stream x ) ) = take n x
#
"take -> fusible" [~1] forall n x.
take n x = unstream (Stream.take n (stream x))
--"take -> unfused" [1] forall n x.
-- unstream (Stream.take n (stream x)) = take n x
#-}
take : : Int - > [ a ] - > [ a ]
take ( I # n # ) xs = takeUInt n # xs
takeUInt : : Int # - > [ b ] - > [ b ]
takeUInt n xs
| n > = # 0 # = take_unsafe_UInt n xs
| otherwise = [ ]
take_unsafe_UInt : : Int # - > [ b ] - > [ b ]
take_unsafe_UInt 0 # _ = [ ]
take_unsafe_UInt m ls =
case ls of
[ ] - > [ ]
( x : xs ) - > x : take_unsafe_UInt ( m - # 1 # ) xs
take :: Int -> [a] -> [a]
take (I# n#) xs = takeUInt n# xs
takeUInt :: Int# -> [b] -> [b]
takeUInt n xs
| n >=# 0# = take_unsafe_UInt n xs
| otherwise = []
take_unsafe_UInt :: Int# -> [b] -> [b]
take_unsafe_UInt 0# _ = []
take_unsafe_UInt m ls =
case ls of
[] -> []
(x:xs) -> x : take_unsafe_UInt (m -# 1#) xs
-}
| /O(n)/,/fusion/. ' drop ' @n returns the suffix of @xs@
after the first @n@ elements , or @[]@ if @n > ' length ' xs@ :
--
> drop 6 " Hello World ! " = = " World ! "
> drop 3 [ 1,2,3,4,5 ] = = [ 4,5 ]
> drop 3 [ 1,2 ] = = [ ]
> drop 3 [ ] = = [ ]
-- > drop (-1) [1,2] == [1,2]
> drop 0 [ 1,2 ] = = [ 1,2 ]
--
-- It is an instance of the more general 'Data.List.genericDrop',
in which @n@ may be of any integral type .
--
drop :: Int -> [a] -> [a]
drop n ls
| n < 0 = ls
| otherwise = drop' n ls
where
drop' :: Int -> [a] -> [a]
drop' 0 xs = xs
drop' _ xs@[] = xs
drop' m (_:xs) = drop' (m-1) xs
# NOINLINE [ 1 ] drop #
# RULES
" drop - > fusible " [ ~1 ] forall n x.
drop n x = unstream ( Stream.drop n ( stream x ) )
--"drop - > unfused " [ 1 ] forall n x.
-- unstream ( Stream.drop n ( stream x ) ) = drop n x
#
"drop -> fusible" [~1] forall n x.
drop n x = unstream (Stream.drop n (stream x))
--"drop -> unfused" [1] forall n x.
-- unstream (Stream.drop n (stream x)) = drop n x
#-}
| ' splitAt ' @n returns a tuple where first element is @xs@ prefix of
length @n@ and second element is the remainder of the list :
--
> splitAt 6 " Hello World ! " = = ( " Hello " , " World ! " )
> splitAt 3 [ 1,2,3,4,5 ] = = ( [ 1,2,3],[4,5 ] )
> splitAt 1 [ 1,2,3 ] = = ( [ 1],[2,3 ] )
> splitAt 3 [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
> splitAt 4 [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
-- > splitAt 0 [1,2,3] == ([],[1,2,3])
-- > splitAt (-1) [1,2,3] == ([],[1,2,3])
--
-- It is equivalent to @('take' n xs, 'drop' n xs)@.
-- 'splitAt' is an instance of the more general 'Data.List.genericSplitAt',
in which @n@ may be of any integral type .
--
splitAt :: Int -> [a] -> ([a], [a])
splitAt n ls
| n < 0 = ([], ls)
| otherwise = splitAt' n ls
where
splitAt' :: Int -> [a] -> ([a], [a])
splitAt' 0 xs = ([], xs)
splitAt' _ xs@[] = (xs, xs)
splitAt' m (x:xs) = (x:xs', xs'')
where
(xs', xs'') = splitAt' (m-1) xs
# NOINLINE [ 1 ] splitAt #
{-
splitAt n xs | n <= 0 = ([], xs)
splitAt _ [] = ([], [])
splitAt n (x:xs) = (x:xs', xs'')
where
(xs', xs'') = splitAt (n-1) xs
-}
# RULES
" splitAt - > fusible " [ ~1 ] forall n xs .
splitAt n xs = Stream.splitAt n ( stream xs )
--"splitAt - > unfused " [ 1 ] forall n xs .
-- Stream.splitAt n ( stream xs ) = splitAt n xs
#
"splitAt -> fusible" [~1] forall n xs.
splitAt n xs = Stream.splitAt n (stream xs)
--"splitAt -> unfused" [1] forall n xs.
-- Stream.splitAt n (stream xs) = splitAt n xs
#-}
-- | /O(n)/,/fusion/. 'takeWhile', applied to a predicate @p@ and a list @xs@, returns the
-- longest prefix (possibly empty) of @xs@ of elements that satisfy @p@:
--
> ( < 3 ) [ 1,2,3,4,1,2,3,4 ] = = [ 1,2 ]
> ( < 9 ) [ 1,2,3 ] = = [ 1,2,3 ]
> ( < 0 ) [ 1,2,3 ] = = [ ]
--
takeWhile :: (a -> Bool) -> [a] -> [a]
takeWhile _ [] = []
takeWhile p xs0 = go xs0
where
go [] = []
go (x:xs)
| p x = x : go xs
| otherwise = []
# NOINLINE [ 1 ] takeWhile #
# RULES
" takeWhile - > fusible " [ ~1 ] forall f xs .
takeWhile f xs = unstream ( Stream.takeWhile f ( stream xs ) )
--"takeWhile - > unfused " [ 1 ] forall f xs .
-- unstream ( Stream.takeWhile f ( stream xs ) ) = takeWhile f xs
#
"takeWhile -> fusible" [~1] forall f xs.
takeWhile f xs = unstream (Stream.takeWhile f (stream xs))
--"takeWhile -> unfused" [1] forall f xs.
-- unstream (Stream.takeWhile f (stream xs)) = takeWhile f xs
#-}
| /O(n)/,/fusion/. ' dropWhile ' @p xs@ returns the suffix remaining after ' takeWhile ' @p xs@ :
--
> dropWhile ( < 3 ) [ 1,2,3,4,5,1,2,3 ] = = [ 3,4,5,1,2,3 ]
> dropWhile ( < 9 ) [ 1,2,3 ] = = [ ]
-- > dropWhile (< 0) [1,2,3] == [1,2,3]
--
dropWhile :: (a -> Bool) -> [a] -> [a]
dropWhile _ [] = []
dropWhile p xs0 = go xs0
where
go [] = []
go xs@(x:xs')
| p x = go xs'
| otherwise = xs
# NOINLINE [ 1 ] dropWhile #
# RULES
" dropWhile - > fusible " [ ~1 ] forall f xs .
dropWhile f xs = unstream ( Stream.dropWhile f ( stream xs ) )
--"dropWhile - > unfused " [ 1 ] forall f xs .
-- unstream ( Stream.dropWhile f ( stream xs ) ) = dropWhile f xs
#
"dropWhile -> fusible" [~1] forall f xs.
dropWhile f xs = unstream (Stream.dropWhile f (stream xs))
--"dropWhile -> unfused" [1] forall f xs.
-- unstream (Stream.dropWhile f (stream xs)) = dropWhile f xs
#-}
-- | 'span', applied to a predicate @p@ and a list @xs@, returns a tuple where
first element is longest prefix ( possibly empty ) of @xs@ of elements that
satisfy @p@ and second element is the remainder of the list :
--
> span ( < 3 ) [ 1,2,3,4,1,2,3,4 ] = = ( [ 1,2],[3,4,1,2,3,4 ] )
> span ( < 9 ) [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
-- > span (< 0) [1,2,3] == ([],[1,2,3])
--
' span ' @p xs@ is equivalent to @('takeWhile ' p xs , ' dropWhile ' p xs)@
span :: (a -> Bool) -> [a] -> ([a], [a])
span _ [] = ([], [])
span p xs0 = go xs0
where
go [] = ([], [])
go xs@(x:xs')
| p x = let (ys,zs) = go xs'
in (x:ys,zs)
| otherwise = ([],xs)
TODO fuse
-- Hmm, these do a lot of sharing, but is it worth it?
-- | 'break', applied to a predicate @p@ and a list @xs@, returns a tuple where
first element is longest prefix ( possibly empty ) of @xs@ of elements that
/do not satisfy/ @p@ and second element is the remainder of the list :
--
> break ( > 3 ) [ 1,2,3,4,1,2,3,4 ] = = ( [ 1,2,3],[4,1,2,3,4 ] )
> break ( < 9 ) [ 1,2,3 ] = = ( [ ] , [ 1,2,3 ] )
> break ( > 9 ) [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
--
' break ' @p@ is equivalent to @'span ' ( ' not ' . p)@.
--
break :: (a -> Bool) -> [a] -> ([a], [a])
break _ [] = ([], [])
break p xs0 = go xs0
where
go [] = ([], [])
go xs@(x:xs')
| p x = ([],xs)
| otherwise = let (ys,zs) = go xs'
in (x:ys,zs)
TODO fuse
-- | The 'group' function takes a list and returns a list of lists such
-- that the concatenation of the result is equal to the argument. Moreover,
-- each sublist in the result contains only equal elements. For example,
--
> group " Mississippi " = [ " M","i","ss","i","ss","i","pp","i " ]
--
-- It is a special case of 'groupBy', which allows the programmer to supply
-- their own equality test.
group :: Eq a => [a] -> [[a]]
group [] = []
group (x:xs) = (x:ys) : group zs
where (ys,zs) = span (x ==) xs
TODO fuse
-- | The 'inits' function returns all initial segments of the argument,
shortest first . For example ,
--
> inits " abc " = = [ " " , " a","ab","abc " ]
--
inits :: [a] -> [[a]]
inits [] = [] : []
inits (x:xs) = [] : map (x:) (inits xs)
TODO fuse
-- | The 'tails' function returns all final segments of the argument,
-- longest first. For example,
--
> tails " abc " = = [ " abc " , " bc " , " c " , " " ]
--
tails :: [a] -> [[a]]
tails [] = [] : []
tails xxs@(_:xs) = xxs : tails xs
TODO fuse
------------------------------------------------------------------------
-- * Predicates
| /O(n)/,/fusion/. The ' isPrefixOf ' function takes two lists and
returns ' True ' iff the first list is a prefix of the second .
--
isPrefixOf :: Eq a => [a] -> [a] -> Bool
isPrefixOf [] _ = True
isPrefixOf _ [] = False
isPrefixOf (x:xs) (y:ys) | x == y = isPrefixOf xs ys
| otherwise = False
# NOINLINE [ 1 ] isPrefixOf #
# RULES
" isPrefixOf - > fusible " [ ~1 ] forall xs ys .
isPrefixOf xs ys = Stream.isPrefixOf ( stream xs ) ( stream ys )
--"isPrefixOf - > unfused " [ 1 ] forall xs ys .
-- Stream.isPrefixOf ( stream xs ) ( stream ys ) = #
"isPrefixOf -> fusible" [~1] forall xs ys.
isPrefixOf xs ys = Stream.isPrefixOf (stream xs) (stream ys)
--"isPrefixOf -> unfused" [1] forall xs ys.
-- Stream.isPrefixOf (stream xs) (stream ys) = isPrefixOf xs ys
#-}
| The ' isSuffixOf ' function takes two lists and returns ' True '
iff the first list is a suffix of the second .
-- Both lists must be finite.
isSuffixOf :: Eq a => [a] -> [a] -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
TODO fuse
| The ' isInfixOf ' function takes two lists and returns ' True '
iff the first list is contained , wholly and intact ,
anywhere within the second .
--
-- Example:
--
> isInfixOf " Haskell " " I really like . " - > True
> isInfixOf " Ial " " I really like . " - > False
--
isInfixOf :: Eq a => [a] -> [a] -> Bool
isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
TODO fuse
-- ---------------------------------------------------------------------
-- * Searching lists
-- ** Searching by equality
-- | /O(n)/, /fusion/. 'elem' is the list membership predicate, usually written
-- in infix form, e.g., @x `elem` xs@.
--
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem x (y:ys)
| x == y = True
| otherwise = elem x ys
# NOINLINE [ 1 ] elem #
# RULES
" elem - > fusible " [ ~1 ] forall x xs .
elem x xs = Stream.elem x ( stream xs )
--"elem - > unfused " [ 1 ] forall x xs .
-- Stream.elem x ( stream xs ) = elem x xs
#
"elem -> fusible" [~1] forall x xs.
elem x xs = Stream.elem x (stream xs)
--"elem -> unfused" [1] forall x xs.
-- Stream.elem x (stream xs) = elem x xs
#-}
-- | /O(n)/, /fusion/. 'notElem' is the negation of 'elem'.
notElem :: Eq a => a -> [a] -> Bool
notElem x xs = not (elem x xs)
# INLINE notElem #
RULES
-- We do n't provide an expicilty fusible version , since not . elem is
-- just as good .
" notElem - > fusible " [ ~1 ] forall x xs .
notElem x xs = Stream.notElem x ( stream xs )
" notElem - > unfused " [ 1 ] forall x xs .
Stream.notElem x ( stream xs ) = notElem x xs
-- We don't provide an expicilty fusible version, since not . elem is
-- just as good.
"notElem -> fusible" [~1] forall x xs.
notElem x xs = Stream.notElem x (stream xs)
"notElem -> unfused" [1] forall x xs.
Stream.notElem x (stream xs) = notElem x xs
-}
-- | /O(n)/,/fusion/. 'lookup' @key assocs@ looks up a key in an association list.
lookup :: Eq a => a -> [(a, b)] -> Maybe b
lookup _ [] = Nothing
lookup key xys0 = go xys0
where
go [] = Nothing
go ((x,y):xys)
| key == x = Just y
| otherwise = lookup key xys
# NOINLINE [ 1 ] lookup #
# RULES
" lookup - > fusible " [ ~1 ] forall x xs .
lookup x xs = Stream.lookup x ( stream xs )
--"lookup - > unfused " [ 1 ] forall x xs .
-- Stream.lookup x ( stream xs ) = lookup x xs
#
"lookup -> fusible" [~1] forall x xs.
lookup x xs = Stream.lookup x (stream xs)
--"lookup -> unfused" [1] forall x xs.
-- Stream.lookup x (stream xs) = lookup x xs
#-}
-- | /O(n)/,/fusion/. 'filter', applied to a predicate and a list, returns the list of
-- those elements that satisfy the predicate; i.e.,
--
-- > filter p xs = [ x | x <- xs, p x]
--
-- Properties:
--
> filter p ( filter q s ) = filter ( \x - > q x & & p x ) s
--
filter :: (a -> Bool) -> [a] -> [a]
filter _ [] = []
filter p xs0 = go xs0
where
go [] = []
go (x:xs)
| p x = x : go xs
| otherwise = go xs
# NOINLINE [ 1 ] filter #
# RULES
" filter - > fusible " [ ~1 ] forall f xs .
filter f xs = unstream ( Stream.filter f ( stream xs ) )
--"filter - > unfused " [ 1 ] forall f xs .
-- unstream ( Stream.filter f ( stream xs ) ) = filter f xs
#
"filter -> fusible" [~1] forall f xs.
filter f xs = unstream (Stream.filter f (stream xs))
--"filter -> unfused" [1] forall f xs.
-- unstream (Stream.filter f (stream xs)) = filter f xs
#-}
------------------------------------------------------------------------
-- ** Searching with a predicate
-- | /O(n)/,/fusion/. The 'find' function takes a predicate and a list and returns the
first element in the list matching the predicate , or ' Nothing ' if
-- there is no such element.
find :: (a -> Bool) -> [a] -> Maybe a
find _ [] = Nothing
find p xs0 = go xs0
where
go [] = Nothing
go (x:xs) | p x = Just x
| otherwise = go xs
# NOINLINE [ 1 ] find #
# RULES
" find - > fusible " [ ~1 ] forall f xs .
find f xs = Stream.find f ( stream xs )
--"find - > unfused " [ 1 ] forall f xs .
-- Stream.find f ( stream xs ) = find f xs
#
"find -> fusible" [~1] forall f xs.
find f xs = Stream.find f (stream xs)
--"find -> unfused" [1] forall f xs.
-- Stream.find f (stream xs) = find f xs
#-}
-- | The 'partition' function takes a predicate a list and returns
-- the pair of lists of elements which do and do not satisfy the
-- predicate, respectively; i.e.,
--
-- > partition p xs == (filter p xs, filter (not . p) xs)
partition :: (a -> Bool) -> [a] -> ([a], [a])
partition p xs = foldr (select p) ([],[]) xs
# INLINE partition #
TODO fuse
select :: (a -> Bool) -> a -> ([a], [a]) -> ([a], [a])
select p x ~(ts,fs) | p x = (x:ts,fs)
| otherwise = (ts, x:fs)
------------------------------------------------------------------------
-- * Indexing lists
-- | /O(n)/,/fusion/. List index (subscript) operator, starting from 0.
-- It is an instance of the more general 'Data.List.genericIndex',
-- which takes an index of any integral type.
(!!) :: [a] -> Int -> a
xs0 !! n0
| n0 < 0 = error "Prelude.(!!): negative index"
| otherwise = index xs0 n0
#ifndef __HADDOCK__
where
index [] _ = error "Prelude.(!!): index too large"
index (y:ys) n = if n == 0 then y else index ys (n-1)
#endif
# NOINLINE [ 1 ] ( ! ! ) #
# RULES
" ! ! - > fusible " [ ~1 ] forall xs n.
xs ! ! n = Stream.index ( stream xs ) n
-- " ! ! - > unfused " [ 1 ] forall -- Stream.index ( stream xs ) n = xs ! ! n
#
"!! -> fusible" [~1] forall xs n.
xs !! n = Stream.index (stream xs) n
-- "!! -> unfused" [1] forall xs n.
-- Stream.index (stream xs) n = xs !! n
#-}
| The ' elemIndex ' function returns the index of the first element
-- in the given list which is equal (by '==') to the query element,
-- or 'Nothing' if there is no such element.
--
-- Properties:
--
-- > elemIndex x xs = listToMaybe [ n | (n,a) <- zip [0..] xs, a == x ]
-- > elemIndex x xs = findIndex (x==) xs
--
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex x = findIndex (x==)
{-# INLINE elemIndex #-}
elemIndex : : Eq a = > a - > [ a ] - > Maybe Int
elemIndex y xs0 = loop_elemIndex xs0 0
# ifndef _ _ HADDOCK _ _
where
loop_elemIndex [ ] ! _ = Nothing
loop_elemIndex ( x : xs ) ! n
| p x = Just n
| otherwise = loop_elemIndex xs ( n + 1 )
p = ( y =
{ - # NOINLINE [ 1 ] elemIndex #
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex y xs0 = loop_elemIndex xs0 0
#ifndef __HADDOCK__
where
loop_elemIndex [] !_ = Nothing
loop_elemIndex (x:xs) !n
| p x = Just n
| otherwise = loop_elemIndex xs (n + 1)
p = (y ==)
#endif
{-# NOINLINE [1] elemIndex #-}
-}
RULES
" elemIndex - > fusible " [ ~1 ] forall x xs .
elemIndex x xs = Stream.elemIndex x ( stream xs )
" elemIndex - > unfused " [ 1 ] forall x xs .
Stream.elemIndex x ( stream xs ) = elemIndex x xs
"elemIndex -> fusible" [~1] forall x xs.
elemIndex x xs = Stream.elemIndex x (stream xs)
"elemIndex -> unfused" [1] forall x xs.
Stream.elemIndex x (stream xs) = elemIndex x xs
-}
-- | /O(n)/,/fusion/. The 'elemIndices' function extends 'elemIndex', by
-- returning the indices of all elements equal to the query element, in
-- ascending order.
--
-- Properties:
--
> length ( filter (= = a ) xs ) = length ( elemIndices a xs )
--
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices x = findIndices (x==)
# INLINE elemIndices #
elemIndices : : Eq a = > a - > [ a ] - > [ Int ]
elemIndices y xs0 = loop_elemIndices xs0 0
# ifndef _ _ HADDOCK _ _
where
loop_elemIndices [ ] ! _ = [ ]
loop_elemIndices ( x : xs ) ! n
| p x = n : loop_elemIndices xs ( n + 1 )
| otherwise = loop_elemIndices xs ( n + 1 )
p = ( y =
{ - # NOINLINE [ 1 ] elemIndices #
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices y xs0 = loop_elemIndices xs0 0
#ifndef __HADDOCK__
where
loop_elemIndices [] !_ = []
loop_elemIndices (x:xs) !n
| p x = n : loop_elemIndices xs (n + 1)
| otherwise = loop_elemIndices xs (n + 1)
p = (y ==)
#endif
{-# NOINLINE [1] elemIndices #-}
-}
RULES
" elemIndices - > fusible " [ ~1 ] forall x xs .
elemIndices x xs = unstream ( Stream.elemIndices x ( stream xs ) )
" elemIndices - > unfused " [ 1 ] forall x xs .
unstream ( Stream.elemIndices x ( stream xs ) ) = elemIndices x xs
"elemIndices -> fusible" [~1] forall x xs.
elemIndices x xs = unstream (Stream.elemIndices x (stream xs))
"elemIndices -> unfused" [1] forall x xs.
unstream (Stream.elemIndices x (stream xs)) = elemIndices x xs
-}
-- | The 'findIndex' function takes a predicate and a list and returns
the index of the first element in the list satisfying the predicate ,
-- or 'Nothing' if there is no such element.
--
-- Properties:
--
-- > findIndex p xs = listToMaybe [ n | (n,x) <- zip [0..] xs, p x ]
--
findIndex :: (a -> Bool) -> [a] -> Maybe Int
findIndex p ls = loop_findIndex ls 0#
where
loop_findIndex [] _ = Nothing
loop_findIndex (x:xs) n
| p x = Just (I# n)
| otherwise = loop_findIndex xs (n +# 1#)
# NOINLINE [ 1 ] findIndex #
# RULES
" findIndex - > fusible " [ ~1 ] forall f xs .
findIndex f xs = Stream.findIndex f ( stream xs )
-- " findIndex - > unfused " [ 1 ] forall f xs .
-- Stream.findIndex f ( stream xs ) = findIndex f xs
#
"findIndex -> fusible" [~1] forall f xs.
findIndex f xs = Stream.findIndex f (stream xs)
-- "findIndex -> unfused" [1] forall f xs.
-- Stream.findIndex f (stream xs) = findIndex f xs
#-}
-- | /O(n)/,/fusion/. The 'findIndices' function extends 'findIndex', by
-- returning the indices of all elements satisfying the predicate, in
-- ascending order.
--
-- Properties:
--
> length ( filter p xs ) = length ( findIndices p xs )
--
findIndices :: (a -> Bool) -> [a] -> [Int]
findIndices p ls = loop_findIndices ls 0#
where
loop_findIndices [] _ = []
loop_findIndices (x:xs) n
| p x = I# n : loop_findIndices xs (n +# 1#)
| otherwise = loop_findIndices xs (n +# 1#)
# NOINLINE [ 1 ] findIndices #
# RULES
" findIndices - > fusible " [ ~1 ] forall p xs .
findIndices p xs = unstream ( Stream.findIndices p ( stream xs ) )
-- " findIndices - > unfused " [ 1 ] forall p xs .
-- unstream ( Stream.findIndices p ( stream xs ) ) = findIndices p xs
#
"findIndices -> fusible" [~1] forall p xs.
findIndices p xs = unstream (Stream.findIndices p (stream xs))
-- "findIndices -> unfused" [1] forall p xs.
-- unstream (Stream.findIndices p (stream xs)) = findIndices p xs
#-}
------------------------------------------------------------------------
-- * Zipping and unzipping lists
| /O(n)/,/fusion/. ' zip ' takes two lists and returns a list of
corresponding pairs . If one input list is short , excess elements of
-- the longer list are discarded.
--
-- Properties:
--
-- > zip a b = zipWith (,) a b
--
zip :: [a] -> [b] -> [(a, b)]
zip (a:as) (b:bs) = (a,b) : zip as bs
zip _ _ = []
# NOINLINE [ 1 ] zip #
# RULES
" zip - > fusible " [ ~1 ] forall xs ys .
zip xs ys = unstream ( Stream.zip ( stream xs ) ( stream ys ) )
-- " zip - > unfused " [ 1 ] forall xs ys .
-- unstream ( Stream.zip ( stream xs ) ( stream ys ) ) = zip xs ys
#
"zip -> fusible" [~1] forall xs ys.
zip xs ys = unstream (Stream.zip (stream xs) (stream ys))
-- "zip -> unfused" [1] forall xs ys.
-- unstream (Stream.zip (stream xs) (stream ys)) = zip xs ys
#-}
| /O(n)/,/fusion/. ' zip3 ' takes three lists and returns a list of
-- triples, analogous to 'zip'.
--
-- Properties:
--
-- > zip3 a b c = zipWith (,,) a b c
--
zip3 :: [a] -> [b] -> [c] -> [(a, b, c)]
zip3 (a:as) (b:bs) (c:cs) = (a,b,c) : zip3 as bs cs
zip3 _ _ _ = []
# NOINLINE [ 1 ] zip3 #
# RULES
" zip3 - > fusible " [ ~1 ] forall xs ys zs .
= unstream ( Stream.zipWith3 ( , , ) ( stream xs ) ( stream ys ) ( stream zs ) )
-- " zip3 - > unfused " [ 1 ] forall xs ys zs .
-- unstream ( Stream.zipWith3 ( , , ) ( stream xs ) ( stream ys ) ( stream zs ) ) = zip3 xs ys zs
#
"zip3 -> fusible" [~1] forall xs ys zs.
zip3 xs ys zs = unstream (Stream.zipWith3 (,,) (stream xs) (stream ys) (stream zs))
-- "zip3 -> unfused" [1] forall xs ys zs.
-- unstream (Stream.zipWith3 (,,) (stream xs) (stream ys) (stream zs)) = zip3 xs ys zs
#-}
| /O(n)/,/fusion/. The ' zip4 ' function takes four lists and returns a list of
-- quadruples, analogous to 'zip'.
zip4 :: [a] -> [b] -> [c] -> [d] -> [(a, b, c, d)]
zip4 = zipWith4 (,,,)
# INLINE zip4 #
| The ' zip5 ' function takes five lists and returns a list of
five - tuples , analogous to ' zip ' .
zip5 :: [a] -> [b] -> [c] -> [d] -> [e] -> [(a, b, c, d, e)]
zip5 = zipWith5 (,,,,)
| The ' ' function takes six lists and returns a list of six - tuples ,
-- analogous to 'zip'.
zip6 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [(a, b, c, d, e, f)]
zip6 = zipWith6 (,,,,,)
| The ' zip7 ' function takes seven lists and returns a list of
seven - tuples , analogous to ' zip ' .
zip7 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g] -> [(a, b, c, d, e, f, g)]
zip7 = zipWith7 (,,,,,,)
-- | /O(n)/,/fusion/. 'zipWith' generalises 'zip' by zipping with the
function given as the first argument , instead of a tupling function .
For example , @'zipWith ' ( + ) @ is applied to two lists to produce the
-- list of corresponding sums.
-- Properties:
--
-- > zipWith (,) = zip
--
zipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith f (a:as) (b:bs) = f a b : zipWith f as bs
zipWith _ _ _ = []
# INLINE [ 1 ] zipWith #
FIXME : If we change the above INLINE to NOINLINE then ghc goes into
-- a loop, why? Do we have some dodgy recursive rules somewhere?
# RULES
" zipWith - > fusible " [ ~1 ] forall f xs ys .
zipWith f xs ys = unstream ( Stream.zipWith f ( stream xs ) ( stream ys ) )
-- " zipWith - > unfused " [ 1 ] forall f xs ys .
-- unstream ( Stream.zipWith f ( stream xs ) ( stream ys ) ) = zipWith f xs ys
#
"zipWith -> fusible" [~1] forall f xs ys.
zipWith f xs ys = unstream (Stream.zipWith f (stream xs) (stream ys))
-- "zipWith -> unfused" [1] forall f xs ys.
-- unstream (Stream.zipWith f (stream xs) (stream ys)) = zipWith f xs ys
#-}
-- | /O(n)/,/fusion/. The 'zipWith3' function takes a function which
combines three elements , as well as three lists and returns a list of
-- their point-wise combination, analogous to 'zipWith'.
--
-- Properties:
--
-- > zipWith3 (,,) = zip3
--
zipWith3 :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
zipWith3 z (a:as) (b:bs) (c:cs) = z a b c : zipWith3 z as bs cs
zipWith3 _ _ _ _ = []
# NOINLINE [ 1 ] zipWith3 #
# RULES
" zipWith3 - > fusible " [ ~1 ] forall f xs ys zs .
zipWith3 f xs ys zs = unstream ( Stream.zipWith3 f ( stream xs ) ( stream ys ) ( stream zs ) )
-- " zipWith3 - > unfused " [ 1 ] forall f xs ys zs .
-- unstream ( Stream.zipWith3 f ( stream xs ) ( stream ys ) ( stream zs ) ) = zipWith3 f xs ys zs
#
"zipWith3 -> fusible" [~1] forall f xs ys zs.
zipWith3 f xs ys zs = unstream (Stream.zipWith3 f (stream xs) (stream ys) (stream zs))
-- "zipWith3 -> unfused" [1] forall f xs ys zs.
-- unstream (Stream.zipWith3 f (stream xs) (stream ys) (stream zs)) = zipWith3 f xs ys zs
#-}
| /O(n)/,/fusion/. The ' zipWith4 ' function takes a function which combines four
elements , as well as four lists and returns a list of their point - wise
-- combination, analogous to 'zipWith'.
zipWith4 :: (a -> b -> c -> d -> e) -> [a] -> [b] -> [c] -> [d] -> [e]
zipWith4 z (a:as) (b:bs) (c:cs) (d:ds)
= z a b c d : zipWith4 z as bs cs ds
zipWith4 _ _ _ _ _ = []
# NOINLINE [ 1 ] zipWith4 #
# RULES
" zipWith4 - > fusible " [ ~1 ] forall f ws xs ys zs .
zipWith4 f ws xs ys zs = unstream ( Stream.zipWith4 f ( stream ws ) ( stream xs ) ( stream ys ) ( stream zs ) )
-- " zipWith4 - > unfused " [ 1 ] forall f ws xs ys zs .
-- unstream ( Stream.zipWith4 f ( stream ws ) ( stream xs ) ( stream ys ) ( stream zs ) ) =
#
"zipWith4 -> fusible" [~1] forall f ws xs ys zs.
zipWith4 f ws xs ys zs = unstream (Stream.zipWith4 f (stream ws) (stream xs) (stream ys) (stream zs))
-- "zipWith4 -> unfused" [1] forall f ws xs ys zs.
-- unstream (Stream.zipWith4 f (stream ws) (stream xs) (stream ys) (stream zs)) = zipWith4 f ws xs ys zs
#-}
| The ' zipWith5 ' function takes a function which combines five
elements , as well as five lists and returns a list of their point - wise
-- combination, analogous to 'zipWith'.
zipWith5 :: (a -> b -> c -> d -> e -> f)
-> [a] -> [b] -> [c] -> [d] -> [e] -> [f]
zipWith5 z (a:as) (b:bs) (c:cs) (d:ds) (e:es)
= z a b c d e : zipWith5 z as bs cs ds es
zipWith5 _ _ _ _ _ _ = []
TODO fuse
| The ' zipWith6 ' function takes a function which combines six
elements , as well as six lists and returns a list of their point - wise
-- combination, analogous to 'zipWith'.
zipWith6 :: (a -> b -> c -> d -> e -> f -> g)
-> [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g]
zipWith6 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs)
= z a b c d e f : zipWith6 z as bs cs ds es fs
zipWith6 _ _ _ _ _ _ _ = []
TODO fuse
| The ' zipWith7 ' function takes a function which combines seven
elements , as well as seven lists and returns a list of their point - wise
-- combination, analogous to 'zipWith'.
zipWith7 :: (a -> b -> c -> d -> e -> f -> g -> h)
-> [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g] -> [h]
zipWith7 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) (g:gs)
= z a b c d e f g : zipWith7 z as bs cs ds es fs gs
zipWith7 _ _ _ _ _ _ _ _ = []
TODO fuse
------------------------------------------------------------------------
-- unzips
| ' unzip ' transforms a list of pairs into a list of first components
and a list of second components .
unzip :: [(a, b)] -> ([a], [b])
unzip = foldr (\(a,b) ~(as,bs) -> (a:as,b:bs)) ([],[])
TODO fuse
| The ' unzip3 ' function takes a list of triples and returns three
-- lists, analogous to 'unzip'.
unzip3 :: [(a, b, c)] -> ([a], [b], [c])
unzip3 = foldr (\(a,b,c) ~(as,bs,cs) -> (a:as,b:bs,c:cs)) ([],[],[])
TODO fuse
| The ' ' function takes a list of quadruples and returns four
-- lists, analogous to 'unzip'.
unzip4 :: [(a, b, c, d)] -> ([a], [b], [c], [d])
unzip4 = foldr (\(a,b,c,d) ~(as,bs,cs,ds) ->
(a:as,b:bs,c:cs,d:ds))
([],[],[],[])
TODO fuse
| The ' ' function takes a list of five - tuples and returns five
-- lists, analogous to 'unzip'.
unzip5 :: [(a, b, c, d, e)] -> ([a], [b], [c], [d], [e])
unzip5 = foldr (\(a,b,c,d,e) ~(as,bs,cs,ds,es) ->
(a:as,b:bs,c:cs,d:ds,e:es))
([],[],[],[],[])
TODO fuse
| The ' unzip6 ' function takes a list of six - tuples and returns six
-- lists, analogous to 'unzip'.
unzip6 :: [(a, b, c, d, e, f)] -> ([a], [b], [c], [d], [e], [f])
unzip6 = foldr (\(a,b,c,d,e,f) ~(as,bs,cs,ds,es,fs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs))
([],[],[],[],[],[])
TODO fuse
| The ' ' function takes a list of seven - tuples and returns
seven lists , analogous to ' unzip ' .
unzip7 :: [(a, b, c, d, e, f, g)] -> ([a], [b], [c], [d], [e], [f], [g])
unzip7 = foldr (\(a,b,c,d,e,f,g) ~(as,bs,cs,ds,es,fs,gs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs,g:gs))
([],[],[],[],[],[],[])
TODO fuse
------------------------------------------------------------------------
-- * Special lists
-- ** Functions on strings
-- | /O(O)/,/fusion/. 'lines' breaks a string up into a list of strings
-- at newline characters. The resulting strings do not contain
-- newlines.
lines :: String -> [String]
lines [] = []
lines s = let (l, s') = break (== '\n') s
in l : case s' of
[] -> []
(_:s'') -> lines s''
--TODO: can we do better than this and preserve the same strictness?
{-
-- This implementation is fast but too strict :-(
-- it doesn't yield each line until it has seen the ending '\n'
lines :: String -> [String]
lines [] = []
lines cs0 = go [] cs0
where
go l [] = reverse l : []
go l ('\n':cs) = reverse l : case cs of
[] -> []
_ -> go [] cs
go l ( c :cs) = go (c:l) cs
-}
# INLINE [ 1 ] lines #
RULES
" lines - > fusible " [ ~1 ] forall xs .
lines xs = unstream ( Stream.lines ( stream xs ) )
" lines - > unfused " [ 1 ] forall xs .
unstream ( Stream.lines ( stream xs ) ) = lines xs
"lines -> fusible" [~1] forall xs.
lines xs = unstream (Stream.lines (stream xs))
"lines -> unfused" [1] forall xs.
unstream (Stream.lines (stream xs)) = lines xs
-}
-- | 'words' breaks a string up into a list of words, which were delimited
-- by white space.
words :: String -> [String]
words s = case dropWhile isSpace s of
"" -> []
s' -> w : words s''
where (w, s'') = break isSpace s'
TODO fuse
--TODO: can we do better than this and preserve the same strictness?
{-
-- This implementation is fast but too strict :-(
-- it doesn't yield each word until it has seen the ending space
words cs0 = dropSpaces cs0
where
dropSpaces :: String -> [String]
dropSpaces [] = []
dropSpaces (c:cs)
| isSpace c = dropSpaces cs
| otherwise = munchWord [c] cs
munchWord :: String -> String -> [String]
munchWord w [] = reverse w : []
munchWord w (c:cs)
| isSpace c = reverse w : dropSpaces cs
| otherwise = munchWord (c:w) cs
-}
-- | /O(n)/,/fusion/. 'unlines' is an inverse operation to 'lines'.
-- It joins lines, after appending a terminating newline to each.
--
-- > unlines xs = concatMap (++"\n")
--
unlines :: [String] -> String
unlines css0 = to css0
where go [] css = '\n' : to css
go (c:cs) css = c : go cs css
to [] = []
to (cs:css) = go cs css
# NOINLINE [ 1 ] unlines #
--
-- fuse via:
-- unlines xs = concatMap (snoc xs '\n')
--
RULES
" unlines - > fusible " [ ~1 ] forall xs .
unlines xs = unstream ( Stream.concatMap ( \x - > Stream.snoc ( stream x ) ' \n ' ) ( stream xs ) )
" unlines - > unfused " [ 1 ] forall xs .
unstream ( Stream.concatMap ( \x - > Stream.snoc ( stream x ) ' \n ' ) ( stream xs ) ) = unlines xs
"unlines -> fusible" [~1] forall xs.
unlines xs = unstream (Stream.concatMap (\x -> Stream.snoc (stream x) '\n') (stream xs))
"unlines -> unfused" [1] forall xs.
unstream (Stream.concatMap (\x -> Stream.snoc (stream x) '\n') (stream xs)) = unlines xs
-}
-- | 'unwords' is an inverse operation to 'words'.
-- It joins words with separating spaces.
unwords :: [String] -> String
unwords [] = []
unwords (cs0:css0) = go cs0 css0
where go [] css = to css
go (c:cs) css = c : go cs css
to [] = []
to (cs:ccs) = ' ' : go cs ccs
TODO fuse
------------------------------------------------------------------------
-- ** \"Set\" operations
-- | The 'nub' function removes duplicate elements from a list.
In particular , it keeps only the first occurrence of each element .
( The name ' nub ' means \`essence\ ' . )
-- It is a special case of 'nubBy', which allows the programmer to supply
-- their own equality test.
--
nub :: Eq a => [a] -> [a]
nub l = nub' l []
where
nub' [] _ = []
nub' (x:xs) ls
| x `elem` ls = nub' xs ls
| otherwise = x : nub' xs (x:ls)
{- RULES
-- ndm's optimisation
"sort/nub" forall xs. sort (nub xs) = map head (group (sort xs))
-}
TODO fuse
| ' delete ' @x@ removes the first occurrence of @x@ from its list argument .
-- For example,
--
> delete ' a ' " banana " = = " "
--
-- It is a special case of 'deleteBy', which allows the programmer to
-- supply their own equality test.
--
delete :: Eq a => a -> [a] -> [a]
delete = deleteBy (==)
TODO fuse
-- | The '\\' function is list difference ((non-associative).
In the result of @xs@ ' \\ ' @ys@ , the first occurrence of each element of
-- @ys@ in turn (if any) has been removed from @xs@. Thus
--
-- > (xs ++ ys) \\ xs == ys.
--
-- It is a special case of 'deleteFirstsBy', which allows the programmer
-- to supply their own equality test.
(\\) :: Eq a => [a] -> [a] -> [a]
(\\) = foldl (flip delete)
| The ' union ' function returns the list union of the two lists .
-- For example,
--
-- > "dog" `union` "cow" == "dogcw"
--
Duplicates , and elements of the first list , are removed from the
the second list , but if the first list contains duplicates , so will
-- the result.
-- It is a special case of 'unionBy', which allows the programmer to supply
-- their own equality test.
--
union :: Eq a => [a] -> [a] -> [a]
union = unionBy (==)
TODO fuse
| The ' intersect ' function takes the list intersection of two lists .
-- For example,
--
> [ 1,2,3,4 ] ` intersect ` [ 2,4,6,8 ] = = [ 2,4 ]
--
If the first list contains duplicates , so will the result .
-- It is a special case of 'intersectBy', which allows the programmer to
-- supply their own equality test.
--
intersect :: Eq a => [a] -> [a] -> [a]
intersect = intersectBy (==)
TODO fuse
------------------------------------------------------------------------
-- ** Ordered lists
TODO stuff in Ord can use Map / IntMap
, an Ord constraint ! we could use a better structure .
-- | The 'sort' function implements a stable sorting algorithm.
-- It is a special case of 'sortBy', which allows the programmer to supply
-- their own comparison function.
--
-- Properties:
--
-- > not (null x) ==> (head . sort) x = minimum x
-- > not (null x) ==> (last . sort) x = maximum x
--
sort :: Ord a => [a] -> [a]
sort l = mergesort compare l
TODO fuse , we have an Ord constraint !
-- | /O(n)/,/fusion/. The 'insert' function takes an element and a list and inserts the
-- element into the list at the last position where it is still less
-- than or equal to the next element. In particular, if the list
-- is sorted before the call, the result will also be sorted.
-- It is a special case of 'insertBy', which allows the programmer to
-- supply their own comparison function.
--
insert :: Ord a => a -> [a] -> [a]
insert e ls = insertBy (compare) e ls
# INLINE insert #
------------------------------------------------------------------------
-- * Generalized functions
-- ** The \"By\" operations
* * * User - supplied equality ( replacing an Eq context )
-- | The 'nubBy' function behaves just like 'nub', except it uses a
-- user-supplied equality predicate instead of the overloaded '=='
-- function.
nubBy :: (a -> a -> Bool) -> [a] -> [a]
nubBy eq l = nubBy' l []
where
nubBy' [] _ = []
nubBy' (y:ys) xs
| elem_by eq y xs = nubBy' ys xs
| otherwise = y : nubBy' ys (y:xs)
TODO fuse
-- Not exported:
-- Note that we keep the call to `eq` with arguments in the
-- same order as in the reference implementation
-- 'xs' is the list of things we've seen so far,
-- 'y' is the potential new element
--
elem_by :: (a -> a -> Bool) -> a -> [a] -> Bool
elem_by _ _ [] = False
elem_by eq y (x:xs) = if x `eq` y then True else elem_by eq y xs
-- | The 'deleteBy' function behaves like 'delete', but takes a
-- user-supplied equality predicate.
deleteBy :: (a -> a -> Bool) -> a -> [a] -> [a]
deleteBy _ _ [] = []
deleteBy eq x (y:ys) = if x `eq` y then ys else y : deleteBy eq x ys
TODO fuse
deleteFirstsBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
deleteFirstsBy eq = foldl (flip (deleteBy eq))
-- | The 'unionBy' function is the non-overloaded version of 'union'.
unionBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
unionBy eq xs ys = xs ++ foldl (flip (deleteBy eq)) (nubBy eq ys) xs
TODO fuse
-- | The 'intersectBy' function is the non-overloaded version of 'intersect'.
intersectBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
intersectBy eq xs ys = [x | x <- xs, any (eq x) ys]
TODO fuse
-- | The 'groupBy' function is the non-overloaded version of 'group'.
groupBy :: (a -> a -> Bool) -> [a] -> [[a]]
groupBy _ [] = []
groupBy eq (x:xs) = (x:ys) : groupBy eq zs
where (ys,zs) = span (eq x) xs
TODO fuse
------------------------------------------------------------------------
* * * User - supplied comparison ( replacing an context )
-- | The 'sortBy' function is the non-overloaded version of 'sort'.
sortBy :: (a -> a -> Ordering) -> [a] -> [a]
sortBy cmp l = mergesort cmp l
TODO fuse
mergesort :: (a -> a -> Ordering) -> [a] -> [a]
mergesort cmp xs = mergesort' cmp (map wrap xs)
mergesort' :: (a -> a -> Ordering) -> [[a]] -> [a]
mergesort' _ [] = []
mergesort' _ [xs] = xs
mergesort' cmp xss = mergesort' cmp (merge_pairs cmp xss)
merge_pairs :: (a -> a -> Ordering) -> [[a]] -> [[a]]
merge_pairs _ [] = []
merge_pairs _ [xs] = [xs]
merge_pairs cmp (xs:ys:xss) = merge cmp xs ys : merge_pairs cmp xss
merge :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
merge _ xs [] = xs
merge _ [] ys = ys
merge cmp (x:xs) (y:ys)
= case x `cmp` y of
GT -> y : merge cmp (x:xs) ys
_ -> x : merge cmp xs (y:ys)
wrap :: a -> [a]
wrap x = [x]
-- | /O(n)/,/fusion/. The non-overloaded version of 'insert'.
insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a]
insertBy _ x [] = [x]
insertBy cmp x ys@(y:ys')
= case cmp x y of
GT -> y : insertBy cmp x ys'
_ -> x : ys
# NOINLINE [ 1 ] insertBy #
# RULES
" insertBy - > fusible " [ ~1 ] forall f x xs .
insertBy f x xs = unstream ( Stream.insertBy f x ( stream xs ) )
-- " insertBy - > unfused " [ 1 ] forall f x xs .
-- unstream ( Stream.insertBy f x ( stream xs ) ) = insertBy f x xs
#
"insertBy -> fusible" [~1] forall f x xs.
insertBy f x xs = unstream (Stream.insertBy f x (stream xs))
-- "insertBy -> unfused" [1] forall f x xs.
-- unstream (Stream.insertBy f x (stream xs)) = insertBy f x xs
#-}
-- | /O(n)/,/fusion/. The 'maximumBy' function takes a comparison function and a list
-- and returns the greatest element of the list by the comparison function.
-- The list must be finite and non-empty.
--
maximumBy :: (a -> a -> Ordering) -> [a] -> a
maximumBy _ [] = error "List.maximumBy: empty list"
maximumBy cmp xs = foldl1 max' xs
where
max' x y = case cmp x y of
GT -> x
_ -> y
# NOINLINE [ 1 ] maximumBy #
# RULES
" maximumBy - > fused " [ ~1 ] forall p xs .
( stream xs )
-- " maximumBy - > unfused " [ 1 ] forall p xs .
-- Stream.maximumBy p ( stream xs ) = xs
#
"maximumBy -> fused" [~1] forall p xs.
maximumBy p xs = Stream.maximumBy p (stream xs)
-- "maximumBy -> unfused" [1] forall p xs.
-- Stream.maximumBy p (stream xs) = maximumBy p xs
#-}
-- | /O(n)/,/fusion/. The 'minimumBy' function takes a comparison function and a list
-- and returns the least element of the list by the comparison function.
-- The list must be finite and non-empty.
minimumBy :: (a -> a -> Ordering) -> [a] -> a
minimumBy _ [] = error "List.minimumBy: empty list"
minimumBy cmp xs = foldl1 min' xs
where
min' x y = case cmp x y of
GT -> y
_ -> x
# NOINLINE [ 1 ] minimumBy #
# RULES
" minimumBy - > fused " [ ~1 ] forall p xs .
minimumBy p xs = ( stream xs )
-- " minimumBy - > unfused " [ 1 ] forall p xs .
-- ( stream xs ) = minimumBy p xs
#
"minimumBy -> fused" [~1] forall p xs.
minimumBy p xs = Stream.minimumBy p (stream xs)
-- "minimumBy -> unfused" [1] forall p xs.
-- Stream.minimumBy p (stream xs) = minimumBy p xs
#-}
------------------------------------------------------------------------
-- * The \"generic\" operations
| The ' genericLength ' function is an overloaded version of ' length ' . In
-- particular, instead of returning an 'Int', it returns any type which is
an instance of ' ' . It is , however , less efficient than ' length ' .
--
genericLength :: Num i => [b] -> i
genericLength [] = 0
genericLength (_:l) = 1 + genericLength l
# NOINLINE [ 1 ] genericLength #
# RULES
" genericLength - > fusible " [ ~1 ] forall xs .
genericLength xs = Stream.genericLength ( stream xs )
-- " genericLength - > unfused " [ 1 ] forall xs .
-- Stream.genericLength ( stream xs ) = genericLength xs
#
"genericLength -> fusible" [~1] forall xs.
genericLength xs = Stream.genericLength (stream xs)
-- "genericLength -> unfused" [1] forall xs.
-- Stream.genericLength (stream xs) = genericLength xs
#-}
# RULES
" genericLength - > length / Int " genericLength = length : : [ a ] - > Int
#
"genericLength -> length/Int" genericLength = length :: [a] -> Int
#-}
-- | /O(n)/,/fusion/. The 'genericTake' function is an overloaded version of 'take', which
-- accepts any 'Integral' value as the number of elements to take.
genericTake :: Integral i => i -> [a] -> [a]
genericTake 0 _ = []
genericTake _ [] = []
genericTake n (x:xs)
| n > 0 = x : genericTake (n-1) xs
| otherwise = error "List.genericTake: negative argument"
# NOINLINE [ 1 ] genericTake #
# RULES
" genericTake - > fusible " [ ~1 ] forall xs n.
genericTake n xs = unstream ( Stream.genericTake n ( stream xs ) )
-- " genericTake - > unfused " [ 1 ] forall -- unstream ( Stream.genericTake n ( stream xs ) ) = genericTake n xs
#
"genericTake -> fusible" [~1] forall xs n.
genericTake n xs = unstream (Stream.genericTake n (stream xs))
-- "genericTake -> unfused" [1] forall xs n.
-- unstream (Stream.genericTake n (stream xs)) = genericTake n xs
#-}
{-# RULES
"genericTake -> take/Int" genericTake = take :: Int -> [a] -> [a]
#-}
-- | /O(n)/,/fusion/. The 'genericDrop' function is an overloaded version of 'drop', which
-- accepts any 'Integral' value as the number of elements to drop.
genericDrop :: Integral i => i -> [a] -> [a]
genericDrop 0 xs = xs
genericDrop _ [] = []
genericDrop n (_:xs) | n > 0 = genericDrop (n-1) xs
genericDrop _ _ = error "List.genericDrop: negative argument"
# NOINLINE [ 1 ] genericDrop #
# RULES
" genericDrop - > fusible " [ ~1 ] forall xs n.
genericDrop n xs = unstream ( Stream.genericDrop n ( stream xs ) )
-- " genericDrop - > unfused " [ 1 ] forall -- unstream ( Stream.genericDrop n ( stream xs ) ) = genericDrop n xs
#
"genericDrop -> fusible" [~1] forall xs n.
genericDrop n xs = unstream (Stream.genericDrop n (stream xs))
-- "genericDrop -> unfused" [1] forall xs n.
-- unstream (Stream.genericDrop n (stream xs)) = genericDrop n xs
#-}
{-# RULES
"genericDrop -> drop/Int" genericDrop = drop :: Int -> [a] -> [a]
#-}
-- | /O(n)/,/fusion/. The 'genericIndex' function is an overloaded version of '!!', which
-- accepts any 'Integral' value as the index.
genericIndex :: Integral a => [b] -> a -> b
genericIndex (x:_) 0 = x
genericIndex (_:xs) n
| n > 0 = genericIndex xs (n-1)
| otherwise = error "List.genericIndex: negative argument."
genericIndex _ _ = error "List.genericIndex: index too large."
# NOINLINE [ 1 ] genericIndex #
-- can we pull the n > 0 test out and do it just once?
-- probably not since we don't know what n-1 does!!
-- can only specialise it for sane Integral instances :-(
# RULES
" genericIndex - > fusible " [ ~1 ] forall xs n.
genericIndex xs n = Stream.genericIndex ( stream xs ) n
-- " genericIndex - > unfused " [ 1 ] forall -- ( stream xs ) n = genericIndex n xs
#
"genericIndex -> fusible" [~1] forall xs n.
genericIndex xs n = Stream.genericIndex (stream xs) n
-- "genericIndex -> unfused" [1] forall xs n.
-- Stream.genericIndex (stream xs) n = genericIndex n xs
#-}
{-# RULES
"genericIndex -> index/Int" genericIndex = (!!) :: [a] -> Int -> a
#-}
-- | /O(n)/,/fusion/. The 'genericSplitAt' function is an overloaded
-- version of 'splitAt', which accepts any 'Integral' value as the
-- position at which to split.
--
genericSplitAt :: Integral i => i -> [a] -> ([a], [a])
genericSplitAt 0 xs = ([],xs)
genericSplitAt _ [] = ([],[])
genericSplitAt n (x:xs) | n > 0 = (x:xs',xs'')
where (xs',xs'') = genericSplitAt (n-1) xs
genericSplitAt _ _ = error "List.genericSplitAt: negative argument"
# RULES
" genericSplitAt - > fusible " [ ~1 ] forall xs n.
genericSplitAt n xs = Stream.genericSplitAt n ( stream xs )
-- " genericSplitAt - > unfused " [ 1 ] forall -- Stream.genericSplitAt n ( stream xs ) = genericSplitAt n xs
#
"genericSplitAt -> fusible" [~1] forall xs n.
genericSplitAt n xs = Stream.genericSplitAt n (stream xs)
-- "genericSplitAt -> unfused" [1] forall xs n.
-- Stream.genericSplitAt n (stream xs) = genericSplitAt n xs
#-}
{-# RULES
"genericSplitAt -> splitAt/Int" genericSplitAt = splitAt :: Int -> [a] -> ([a], [a])
#-}
-- | /O(n)/,/fusion/. The 'genericReplicate' function is an overloaded version of 'replicate',
-- which accepts any 'Integral' value as the number of repetitions to make.
--
genericReplicate :: Integral i => i -> a -> [a]
genericReplicate n x = genericTake n (repeat x)
# INLINE genericReplicate #
# RULES
" genericReplicate - > replicate / Int " genericReplicate = replicate : : Int - > a - > [ a ]
#
"genericReplicate -> replicate/Int" genericReplicate = replicate :: Int -> a -> [a]
#-}
-- ---------------------------------------------------------------------
Internal utilities
-- Common up near identical calls to `error' to reduce the number
-- constant strings created when compiled:
errorEmptyList :: String -> a
errorEmptyList fun = moduleError fun "empty list"
# NOINLINE errorEmptyList #
moduleError :: String -> String -> a
moduleError fun msg = error ("Data.List." ++ fun ++ ':':' ':msg)
# NOINLINE moduleError #
bottom :: a
bottom = error "Data.List.Stream: bottom"
# NOINLINE bottom #
| null | https://raw.githubusercontent.com/jmlowenthal/staged-streams.agda/3ac5fbff18808a505185c88000a817046de35d0d/lib/stream-fusion-0.1.2.5/Data/List/Stream.hs | haskell | # LANGUAGE BangPatterns #
# LANGUAGE MagicHash #
|
Module : Data.List.Stream
License : BSD-style
Maintainer :
Stability : experimental
Portability : portable
based on stream fusion for sequences. Described in:
* /Stream Fusion: From Lists to Streams to Nothing at All/, by
</~dons/papers/CLS07.html>
</~dons/papers/CSL06.html>
See the source for the complete story:
* </~dons/code/streams/list/Data/Stream.hs>
This library is a drop in replacement for "Data.List".
* Basic interface
:: [a] -> [a] -> [a]
:: [a] -> a
:: [a] -> a
:: [a] -> [a]
:: [a] -> [a]
:: [a] -> Bool
:: [a] -> Int
* List transformations
:: (a -> b) -> [a] -> [b]
:: [a] -> [a]
:: a -> [a] -> [a]
:: [a] -> [[a]] -> [a]
:: [[a]] -> [[a]]
* Reducing lists (folds)
:: (a -> b -> a) -> a -> [b] -> a
:: (a -> b -> a) -> a -> [b] -> a
:: (a -> a -> a) -> [a] -> a
:: (a -> a -> a) -> [a] -> a
:: (a -> b -> b) -> b -> [a] -> b
:: (a -> a -> a) -> [a] -> a
** Special folds
:: [[a]] -> [a]
:: (a -> [b]) -> [a] -> [b]
:: [Bool] -> Bool
:: [Bool] -> Bool
:: (a -> Bool) -> [a] -> Bool
:: (a -> Bool) -> [a] -> Bool
* Building lists
** Scans
:: (a -> b -> a) -> a -> [b] -> [a]
:: (a -> a -> a) -> [a] -> [a]
:: (a -> b -> b) -> b -> [a] -> [b]
:: (a -> a -> a) -> [a] -> [a]
** Accumulating maps
** Infinite lists
:: (a -> a) -> a -> [a]
:: a -> [a]
:: Int -> a -> [a]
:: [a] -> [a]
** Unfolding
:: (b -> Maybe (a, b)) -> b -> [a]
* Sublists
** Extracting sublists
:: Int -> [a] -> [a]
:: Int -> [a] -> [a]
:: Int -> [a] -> ([a], [a])
:: (a -> Bool) -> [a] -> [a]
:: (a -> Bool) -> [a] -> [a]
:: (a -> Bool) -> [a] -> ([a], [a])
:: (a -> Bool) -> [a] -> ([a], [a])
:: Eq a => [a] -> [[a]]
:: [a] -> [[a]]
:: [a] -> [[a]]
* Predicates
:: Eq a => [a] -> [a] -> Bool
:: Eq a => [a] -> [a] -> Bool
:: Eq a => [a] -> [a] -> Bool
* Searching lists
** Searching by equality
:: Eq a => a -> [a] -> Bool
:: Eq a => a -> [a] -> Bool
:: Eq a => a -> [(a, b)] -> Maybe b
** Searching with a predicate
:: (a -> Bool) -> [a] -> Maybe a
:: (a -> Bool) -> [a] -> [a]
:: (a -> Bool) -> [a] -> ([a], [a])
* Indexing lists
| These functions treat a list @xs@ as a indexed collection,
with indices ranging from 0 to @'length' xs - 1@.
:: [a] -> Int -> a
:: Eq a => a -> [a] -> Maybe Int
:: Eq a => a -> [a] -> [Int]
:: (a -> Bool) -> [a] -> Maybe Int
:: (a -> Bool) -> [a] -> [Int]
* Zipping and unzipping lists
:: [a] -> [b] -> [(a, b)]
:: [a] -> [b] -> [c] -> [(a, b, c)]
:: (a -> b -> c) -> [a] -> [b] -> [c]
:: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
:: [(a, b)] -> ([a], [b])
:: [(a, b, c)] -> ([a], [b], [c])
* Special lists
** Functions on strings
:: String -> [String]
:: String -> [String]
:: [String] -> String
:: [String] -> String
** \"Set\" operations
:: Eq a => [a] -> [a]
:: Eq a => a -> [a] -> [a]
:: Eq a => [a] -> [a] -> [a]
:: Eq a => [a] -> [a] -> [a]
:: Eq a => [a] -> [a] -> [a]
** Ordered lists
* Generalized functions
** The \"By\" operations
| By convention, overloaded functions have a non-overloaded
counterpart whose name is suffixed with \`@By@\'.
It is often convenient to use these functions together with
'Data.Function.on', for instance @'sortBy' ('compare'
\`on\` 'fst')@.
| The predicate is assumed to define an equivalence.
:: (a -> a -> Bool) -> [a] -> [a]
:: (a -> a -> Bool) -> a -> [a] -> [a]
:: (a -> a -> Bool) -> [a] -> [a] -> [a]
:: (a -> a -> Bool) -> [a] -> [a] -> [a]
:: (a -> a -> Bool) -> [a] -> [a] -> [a]
:: (a -> a -> Bool) -> [a] -> [[a]]
:: (a -> a -> Ordering) -> [a] -> [a]
:: (a -> a -> Ordering) -> a -> [a] -> [a]
:: (a -> a -> Ordering) -> [a] -> a
:: (a -> a -> Ordering) -> [a] -> a
* The \"generic\" operations
| The prefix \`@generic@\' indicates an overloaded function that
:: Integral i => i -> [a] -> [a]
:: Integral i => i -> [a] -> [a]
:: Integral i => i -> [a] -> ([a], [a])
:: Integral a => [b] -> a -> b
:: Integral i => i -> a -> [a]
:: String -> a
# SOURCE #
# SOURCE #
# SOURCE #
# SOURCE #
we just reuse these:
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
The functions in this library marked with /fusion/ are
(transparently) rewritten by the compiler to stream functions, using
For example:
> map f xs
is transformed via rewrite rules to:
The 'unstream' and 'stream' functions identify the allocation points
for each function.
directly composed, or with only intermediate lets and cases), the
fusion rule will fire, removing the intermediate structures.
Consider:
The rewrite engine will transform this code to:
The fusion rule will then fire:
Removing the intermeidate list that is allocated. The compiler then
optimises the result.
Functions that fail to fuse are not left in stream form. In the final
simplifier phase any remaining unfused functions of the form:
> unstream . g . stream
Will be transformed back to their original list implementation.
* api functions should be rewritten to fusible forms as soon as possble
to inline them they'll only have their bodies inlined at the end.
* These rewrite rules can then fire in any but the last phase:
"++ -> fusible" [~1] forall xs ys.
* Finally, if we reach the final phase, rewrite back to best effort [a] forms:
* And then inline the result.
If fusion occurs though, hang on to those 'stream' and 'unstream' pairs:
{-# INLINE [0] unstream #-} -- hmm?
Todo: notes on the phasing of Streams
-----------------------------------------------------------------------------
Fusion for the constructors:
We do not enable fusion for (:), as it leads to a massive massive
slow down in compilation time.
-----------------------------------------------------------------------------
Basic interface
> [x1, ..., xm] ++ [y1, ..., yn] == [x1, ..., xm, y1, ..., yn]
> [x1, ..., xm] ++ [y1, ...] == [x1, ..., xm, y1, ...]
NOTE: This is quite subtle as we do not want to copy the last list in
xs1 ++ xs2 ++ ... ++ xsn
Indeed, we don't really want to fuse the above at all unless at least
one of the arguments has the form (unstream s) or the result of the
concatenation is streamed. The rules below do precisely that. Note they
really fuse instead of just rewriting things into a fusible form so there
is no need to rewrite back.
non-empty.
"head - > unfused " [ 1 ] forall xs .
Stream.head ( stream xs ) = head xs
"head -> unfused" [1] forall xs.
Stream.head (stream xs) = head xs
| /O(n)/, /fusion/. Extract the last element of a list, which must be finite
and non-empty.
"last - > unfused " [ 1 ] forall xs .
Stream.last ( stream xs ) = last xs
"last -> unfused" [1] forall xs.
Stream.last (stream xs) = last xs
| /O(1)/, /fusion/. Extract the elements after the head of a list, which
must be non-empty.
"tail - > unfused " [ 1 ] forall xs .
unstream ( Stream.tail ( stream xs ) ) = tail xs
"tail -> unfused" [1] forall xs.
unstream (Stream.tail (stream xs)) = tail xs
| /O(n)/, /fusion/. Return all the elements of a list except the last one.
The list must be finite and non-empty.
"init - > unfused " [ 1 ] forall xs .
unstream ( Stream.init ( stream xs ) ) = init xs
"init -> unfused" [1] forall xs.
unstream (Stream.init (stream xs)) = init xs
| /O(1)/, /fusion/. Test whether a list is empty.
"null - > unfused " [ 1 ] forall xs .
Stream.null ( stream xs ) = null xs
"null -> unfused" [1] forall xs.
Stream.null (stream xs) = null xs
| /O(n)/, /fusion/. 'length' returns the length of a finite list as an 'Int'.
It is an instance of the more general 'Data.List.genericLength',
the result type of which may be any kind of number.
"length - > unfused " [ 1 ] forall xs .
Stream.length ( stream xs ) = length xs
"length -> unfused" [1] forall xs.
Stream.length (stream xs) = length xs
---------------------------------------------------------------------
List transformations
> map f [x1, x2, ..., xn] == [f x1, f x2, ..., f xn]
> map f [x1, x2, ...] == [f x1, f x2, ...]
Properties:
> map f (repeat x) = repeat (f x)
> map f (replicate n x) = replicate n (f x)
"map - > unfused " [ 1 ] forall f xs .
unstream ( Stream.map f ( stream xs ) ) = map f xs
"map -> unfused" [1] forall f xs.
unstream (Stream.map f (stream xs)) = map f xs
| /O(n)/, /fusion/. 'reverse' @xs@ returns the elements of @xs@ in reverse order.
@xs@ must be finite. Will fuse as a consumer only.
reverse l = rev l []
where
rev [] a = a
rev (x:xs) a = rev xs (x:a)
TODO : I 'm sure there are some cunning things we can do with optimising
reverse . Of course if we try and fuse we may need to still force the
sping of the list : eg reverse . reverse = forceSpine
TODO: I'm sure there are some cunning things we can do with optimising
reverse. Of course if we try and fuse we may need to still force the
sping of the list: eg reverse . reverse = forceSpine
# INLINE forceSpine #
The idea of this slightly odd construction is that we inline the above form
and in the context we may then be able to use xs directly and just keep
around the fact that xs must be forced at some point. Remember, seq does not
imply any evaluation order.
| /O(n)/, /fusion/. The 'intersperse' function takes an element and a list and
For example,
> intersperse ',' "abcde" == "a,b,c,d,e"
It inserts the list @xs@ in between the lists in @xss@ and concatenates the
result.
> intercalate = concat . intersperse
# NOINLINE [1] intercalate #
fusion rule based on:
intercalate = concat . intersperse
| The 'transpose' function transposes the rows and columns of its argument.
For example,
> transpose [[1,2,3],[4,5,6]] == [[1,4],[2,5],[3,6]]
---------------------------------------------------------------------
Reducing lists (folds)
| /O(n)/, /fusion/. 'foldl', applied to a binary operator, a starting value (typically
the left-identity of the operator), and a list, reduces the list
using the binary operator, from left to right:
> foldl f z [x1, x2, ..., xn] == (...((z `f` x1) `f` x2) `f`...) `f` xn
The list must be finite.
"foldl - > unfused " [ 1 ] forall f z xs .
Stream.foldl f z ( stream xs ) = foldl f z xs
"foldl -> unfused" [1] forall f z xs.
Stream.foldl f z (stream xs) = foldl f z xs
| /O(n)/, /fusion/. A strict version of 'foldl'.
"foldl ' - > unfused " [ 1 ] forall f z xs .
Stream.foldl ' f z ( stream xs ) = foldl ' f z xs
"foldl' -> unfused" [1] forall f z xs.
Stream.foldl' f z (stream xs) = foldl' f z xs
and thus must be applied to non-empty lists.
"foldl1 - > unfused " [ 1 ] forall f xs .
Stream.foldl1 f ( stream xs ) = foldl1 f xs
"foldl1 -> unfused" [1] forall f xs.
Stream.foldl1 f (stream xs) = foldl1 f xs
"foldl1 - > unfused " [ 1 ] forall f xs .
Stream.foldl1 ' f ( stream xs ) = foldl1 ' f xs
"foldl1 -> unfused" [1] forall f xs.
Stream.foldl1' f (stream xs) = foldl1' f xs
| /O(n)/, /fusion/. 'foldr', applied to a binary operator, a starting value (typically
the right-identity of the operator), and a list, reduces the list
using the binary operator, from right to left:
> foldr f z [x1, x2, ..., xn] == x1 `f` (x2 `f` ... (xn `f` z)...)
# INLINE [0] foldr #
"foldr - > unfused " [ 1 ] forall f z xs .
Stream.foldr f z ( stream xs ) = foldr f z xs
"foldr -> unfused" [1] forall f z xs.
Stream.foldr f z (stream xs) = foldr f z xs
and thus must be applied to non-empty lists.
# INLINE [1] foldr1 #
"foldr1 - > unfused " [ 1 ] forall f xs .
Stream.foldr1 f ( stream xs ) = foldr1 f xs
"foldr1 -> unfused" [1] forall f xs.
Stream.foldr1 f (stream xs) = foldr1 f xs
---------------------------------------------------------------------
Special folds
hmm, this is slower than the old concat?
fuse via concatMap, as the Stream (Stream a) is too hard to construct
or via foldr (++) ?
"concat - > unfused " [ 1 ] forall xs .
Stream.concat ( stream xs ) = concat xs
"concat -> unfused" [1] forall xs.
Stream.concat (stream xs) = concat xs
| /O(n)/, /fusion/. Map a function over a list and concatenate the results.
at least it will fuse.
# NOINLINE [1] concatMap #
'True', the list must be finite; 'False', however, results from a 'False'
value at a finite index of a finite or infinite list.
"and - > unfused " [ 1 ] forall xs .
Stream.and ( stream xs ) = and xs
"and -> unfused" [1] forall xs.
Stream.and (stream xs) = and xs
'False', the list must be finite; 'True', however, results from a 'True'
value at a finite index of a finite or infinite list.
"or - > unfused " [ 1 ] forall xs .
Stream.or ( stream xs ) = or xs
"or -> unfused" [1] forall xs.
Stream.or (stream xs) = or xs
| /O(n)/, /fusion/. Applied to a predicate and a list, 'any' determines if any element
of the list satisfies the predicate.
"any - > unfused " [ 1 ] forall f xs .
Stream.any f ( stream xs ) = any f xs
"any -> unfused" [1] forall f xs.
Stream.any f (stream xs) = any f xs
| Applied to a predicate and a list, 'all' determines if all elements
of the list satisfy the predicate.
"all - > unfused " [ 1 ] forall f xs .
Stream.all f ( stream xs ) = all f xs
"all -> unfused" [1] forall f xs.
Stream.all f (stream xs) = all f xs
| /O(n)/, /fusion/. The 'sum' function computes the sum of a finite list of numbers.
# RULES
"sum spec Int" sum = sumInt :: [Int] -> Int
#
"sum - > unfused " [ 1 ] forall xs .
Stream.sum ( stream xs ) = sum xs
"sum -> unfused" [1] forall xs.
Stream.sum (stream xs) = sum xs
"sumInt - > unfused " [ 1 ] forall ( xs : : [ Int ] ) .
Stream.sum ( stream xs ) = sumInt xs
"sumInt -> unfused" [1] forall (xs :: [Int]).
Stream.sum (stream xs) = sumInt xs
| /O(n)/,/fusion/. The 'product' function computes the product of a finite list of numbers.
# RULES
"product spec Int" product = productInt :: [Int] -> Int
#
"product - > unfused " [ 1 ] forall xs .
( stream xs ) = product xs
"product -> unfused" [1] forall xs.
Stream.product (stream xs) = product xs
"productInt - > unfused " [ 1 ] forall ( xs : : [ Int ] ) .
( stream xs ) = productInt xs
"productInt -> unfused" [1] forall (xs :: [Int]).
Stream.product (stream xs) = productInt xs
| /O(n)/,/fusion/. 'maximum' returns the maximum value from a list,
which must be non-empty, finite, and of an ordered type.
It is a special case of 'Data.List.maximumBy', which allows the
programmer to supply their own comparison function.
"maximum - > unfused " [ 1 ] forall xs .
Stream.maximum ( stream xs ) = maximum xs
"maximum -> unfused" [1] forall xs.
Stream.maximum (stream xs) = maximum xs
We can't make the overloaded version of maximum strict without
the version specialised to 'Int'.
"strictMaximum - > unfused " [ 1 ] forall xs .
Stream.strictMaximum ( stream xs ) = strictMaximum xs
"strictMaximum -> unfused" [1] forall xs.
Stream.strictMaximum (stream xs) = strictMaximum xs
| /O(n)/,/fusion/. 'minimum' returns the minimum value from a list,
which must be non-empty, finite, and of an ordered type.
It is a special case of 'Data.List.minimumBy', which allows the
programmer to supply their own comparison function.
"minimum - > unfused " [ 1 ] forall xs .
Stream.minimum ( stream xs ) = minimum xs
"minimum -> unfused" [1] forall xs.
Stream.minimum (stream xs) = minimum xs
"strictMinimum - > unfused " [ 1 ] forall xs .
Stream.strictMinimum ( stream xs ) = strictMinimum xs
"strictMinimum -> unfused" [1] forall xs.
Stream.strictMinimum (stream xs) = strictMinimum xs
---------------------------------------------------------------------
* Building lists
** Scans
reduced values from the left:
Properties:
state as a prefix. this complicates the rules.
"scanl - > unfused " [ 1 ] forall f z xs .
unstream ( Stream.scanl f z ( Stream.snoc ( stream xs ) bottom ) ) = f z xs
"scanl -> unfused" [1] forall f z xs.
unstream (Stream.scanl f z (Stream.snoc (stream xs) bottom)) = scanl f z xs
# INLINE [1] scanl1 #
"scanl1 - > unfused " [ 1 ] forall f xs .
unstream ( Stream.scanl1 f ( Stream.snoc ( stream xs ) bottom ) ) = scanl1 f xs
"scanl1 -> unfused" [1] forall f xs.
unstream (Stream.scanl1 f (Stream.snoc (stream xs) bottom)) = scanl1 f xs
Properties:
---------------------------------------------------------------------
** Accumulating maps
| The 'mapAccumL' function behaves like a combination of 'map' and
'foldl'; it applies a function to each element of a list, passing
an accumulating parameter from left to right, and returning a final
value of this accumulator together with the new list.
| The 'mapAccumR' function behaves like a combination of 'map' and
'foldr'; it applies a function to each element of a list, passing
an accumulating parameter from right to left, and returning a final
value of this accumulator together with the new list.
----------------------------------------------------------------------
** Infinite lists
> iterate f x == [x, f x, f (f x), ...]
"iterate - > unfused " [ 1 ] forall f x.
unstream ( Stream.iterate f x ) = iterate f x
"iterate -> unfused" [1] forall f x.
unstream (Stream.iterate f x) = iterate f x
| /fusion/. 'repeat' @x@ is an infinite list, with @x@ the value of every element.
"repeat - > unfused " [ 1 ] forall x.
unstream ( Stream.repeat x ) = repeat x
"repeat -> unfused" [1] forall x.
unstream (Stream.repeat x) = repeat x
every element.
"replicate - > unfused " [ 1 ] forall n x.
unstream ( Stream.replicate n x ) = replicate n x
"replicate -> unfused" [1] forall n x.
unstream (Stream.replicate n x) = replicate n x
| /fusion/. 'cycle' ties a finite list into a circular one, or equivalently,
the infinite repetition of the original list. It is the identity
on infinite lists.
"cycle - > unfused " [ 1 ] forall xs .
unstream ( Stream.cycle ( stream xs ) ) = cycle xs
"cycle -> unfused" [1] forall xs.
unstream (Stream.cycle (stream xs)) = cycle xs
---------------------------------------------------------------------
** Unfolding
| /fusion/. The 'unfoldr' function is a \`dual\' to 'foldr': while 'foldr'
reduces a list to a summary value, 'unfoldr' builds a list from
a seed value. The function takes the element and returns 'Nothing'
if it is done producing the list or returns 'Just' @(a,b)@, in which
element in a recursive call. For example,
In some cases, 'unfoldr' can undo a 'foldr' operation:
> unfoldr f' (foldr f z xs) == xs
if the following holds:
> f' (f x y) = Just (x,y)
> f' z = Nothing
A simple use of unfoldr:
> [10,9,8,7,6,5,4,3,2,1]
"unfoldr - > unfused " [ 1 ] forall f x.
unstream ( Stream.unfoldr f x ) = unfoldr f x
"unfoldr -> unfused" [1] forall f x.
unstream (Stream.unfoldr f x) = unfoldr f x
----------------------------------------------------------------------
* Sublists
** Extracting sublists
| /O(n)/,/fusion/. 'take' @n@, applied to a list @xs@, returns the prefix of @xs@
> take (-1) [1,2] == []
"take - > unfused " [ 1 ] forall n x.
unstream ( Stream.take n ( stream x ) ) = take n x
"take -> unfused" [1] forall n x.
unstream (Stream.take n (stream x)) = take n x
> drop (-1) [1,2] == [1,2]
It is an instance of the more general 'Data.List.genericDrop',
"drop - > unfused " [ 1 ] forall n x.
unstream ( Stream.drop n ( stream x ) ) = drop n x
"drop -> unfused" [1] forall n x.
unstream (Stream.drop n (stream x)) = drop n x
> splitAt 0 [1,2,3] == ([],[1,2,3])
> splitAt (-1) [1,2,3] == ([],[1,2,3])
It is equivalent to @('take' n xs, 'drop' n xs)@.
'splitAt' is an instance of the more general 'Data.List.genericSplitAt',
splitAt n xs | n <= 0 = ([], xs)
splitAt _ [] = ([], [])
splitAt n (x:xs) = (x:xs', xs'')
where
(xs', xs'') = splitAt (n-1) xs
"splitAt - > unfused " [ 1 ] forall n xs .
Stream.splitAt n ( stream xs ) = splitAt n xs
"splitAt -> unfused" [1] forall n xs.
Stream.splitAt n (stream xs) = splitAt n xs
| /O(n)/,/fusion/. 'takeWhile', applied to a predicate @p@ and a list @xs@, returns the
longest prefix (possibly empty) of @xs@ of elements that satisfy @p@:
"takeWhile - > unfused " [ 1 ] forall f xs .
unstream ( Stream.takeWhile f ( stream xs ) ) = takeWhile f xs
"takeWhile -> unfused" [1] forall f xs.
unstream (Stream.takeWhile f (stream xs)) = takeWhile f xs
> dropWhile (< 0) [1,2,3] == [1,2,3]
"dropWhile - > unfused " [ 1 ] forall f xs .
unstream ( Stream.dropWhile f ( stream xs ) ) = dropWhile f xs
"dropWhile -> unfused" [1] forall f xs.
unstream (Stream.dropWhile f (stream xs)) = dropWhile f xs
| 'span', applied to a predicate @p@ and a list @xs@, returns a tuple where
> span (< 0) [1,2,3] == ([],[1,2,3])
Hmm, these do a lot of sharing, but is it worth it?
| 'break', applied to a predicate @p@ and a list @xs@, returns a tuple where
| The 'group' function takes a list and returns a list of lists such
that the concatenation of the result is equal to the argument. Moreover,
each sublist in the result contains only equal elements. For example,
It is a special case of 'groupBy', which allows the programmer to supply
their own equality test.
| The 'inits' function returns all initial segments of the argument,
| The 'tails' function returns all final segments of the argument,
longest first. For example,
----------------------------------------------------------------------
* Predicates
"isPrefixOf - > unfused " [ 1 ] forall xs ys .
Stream.isPrefixOf ( stream xs ) ( stream ys ) = #
"isPrefixOf -> unfused" [1] forall xs ys.
Stream.isPrefixOf (stream xs) (stream ys) = isPrefixOf xs ys
Both lists must be finite.
Example:
---------------------------------------------------------------------
* Searching lists
** Searching by equality
| /O(n)/, /fusion/. 'elem' is the list membership predicate, usually written
in infix form, e.g., @x `elem` xs@.
"elem - > unfused " [ 1 ] forall x xs .
Stream.elem x ( stream xs ) = elem x xs
"elem -> unfused" [1] forall x xs.
Stream.elem x (stream xs) = elem x xs
| /O(n)/, /fusion/. 'notElem' is the negation of 'elem'.
We do n't provide an expicilty fusible version , since not . elem is
just as good .
We don't provide an expicilty fusible version, since not . elem is
just as good.
| /O(n)/,/fusion/. 'lookup' @key assocs@ looks up a key in an association list.
"lookup - > unfused " [ 1 ] forall x xs .
Stream.lookup x ( stream xs ) = lookup x xs
"lookup -> unfused" [1] forall x xs.
Stream.lookup x (stream xs) = lookup x xs
| /O(n)/,/fusion/. 'filter', applied to a predicate and a list, returns the list of
those elements that satisfy the predicate; i.e.,
> filter p xs = [ x | x <- xs, p x]
Properties:
"filter - > unfused " [ 1 ] forall f xs .
unstream ( Stream.filter f ( stream xs ) ) = filter f xs
"filter -> unfused" [1] forall f xs.
unstream (Stream.filter f (stream xs)) = filter f xs
----------------------------------------------------------------------
** Searching with a predicate
| /O(n)/,/fusion/. The 'find' function takes a predicate and a list and returns the
there is no such element.
"find - > unfused " [ 1 ] forall f xs .
Stream.find f ( stream xs ) = find f xs
"find -> unfused" [1] forall f xs.
Stream.find f (stream xs) = find f xs
| The 'partition' function takes a predicate a list and returns
the pair of lists of elements which do and do not satisfy the
predicate, respectively; i.e.,
> partition p xs == (filter p xs, filter (not . p) xs)
----------------------------------------------------------------------
* Indexing lists
| /O(n)/,/fusion/. List index (subscript) operator, starting from 0.
It is an instance of the more general 'Data.List.genericIndex',
which takes an index of any integral type.
" ! ! - > unfused " [ 1 ] forall -- Stream.index ( stream xs ) n = xs ! ! n
"!! -> unfused" [1] forall xs n.
Stream.index (stream xs) n = xs !! n
in the given list which is equal (by '==') to the query element,
or 'Nothing' if there is no such element.
Properties:
> elemIndex x xs = listToMaybe [ n | (n,a) <- zip [0..] xs, a == x ]
> elemIndex x xs = findIndex (x==) xs
# INLINE elemIndex #
# NOINLINE [1] elemIndex #
| /O(n)/,/fusion/. The 'elemIndices' function extends 'elemIndex', by
returning the indices of all elements equal to the query element, in
ascending order.
Properties:
# NOINLINE [1] elemIndices #
| The 'findIndex' function takes a predicate and a list and returns
or 'Nothing' if there is no such element.
Properties:
> findIndex p xs = listToMaybe [ n | (n,x) <- zip [0..] xs, p x ]
" findIndex - > unfused " [ 1 ] forall f xs .
Stream.findIndex f ( stream xs ) = findIndex f xs
"findIndex -> unfused" [1] forall f xs.
Stream.findIndex f (stream xs) = findIndex f xs
| /O(n)/,/fusion/. The 'findIndices' function extends 'findIndex', by
returning the indices of all elements satisfying the predicate, in
ascending order.
Properties:
" findIndices - > unfused " [ 1 ] forall p xs .
unstream ( Stream.findIndices p ( stream xs ) ) = findIndices p xs
"findIndices -> unfused" [1] forall p xs.
unstream (Stream.findIndices p (stream xs)) = findIndices p xs
----------------------------------------------------------------------
* Zipping and unzipping lists
the longer list are discarded.
Properties:
> zip a b = zipWith (,) a b
" zip - > unfused " [ 1 ] forall xs ys .
unstream ( Stream.zip ( stream xs ) ( stream ys ) ) = zip xs ys
"zip -> unfused" [1] forall xs ys.
unstream (Stream.zip (stream xs) (stream ys)) = zip xs ys
triples, analogous to 'zip'.
Properties:
> zip3 a b c = zipWith (,,) a b c
" zip3 - > unfused " [ 1 ] forall xs ys zs .
unstream ( Stream.zipWith3 ( , , ) ( stream xs ) ( stream ys ) ( stream zs ) ) = zip3 xs ys zs
"zip3 -> unfused" [1] forall xs ys zs.
unstream (Stream.zipWith3 (,,) (stream xs) (stream ys) (stream zs)) = zip3 xs ys zs
quadruples, analogous to 'zip'.
analogous to 'zip'.
| /O(n)/,/fusion/. 'zipWith' generalises 'zip' by zipping with the
list of corresponding sums.
Properties:
> zipWith (,) = zip
a loop, why? Do we have some dodgy recursive rules somewhere?
" zipWith - > unfused " [ 1 ] forall f xs ys .
unstream ( Stream.zipWith f ( stream xs ) ( stream ys ) ) = zipWith f xs ys
"zipWith -> unfused" [1] forall f xs ys.
unstream (Stream.zipWith f (stream xs) (stream ys)) = zipWith f xs ys
| /O(n)/,/fusion/. The 'zipWith3' function takes a function which
their point-wise combination, analogous to 'zipWith'.
Properties:
> zipWith3 (,,) = zip3
" zipWith3 - > unfused " [ 1 ] forall f xs ys zs .
unstream ( Stream.zipWith3 f ( stream xs ) ( stream ys ) ( stream zs ) ) = zipWith3 f xs ys zs
"zipWith3 -> unfused" [1] forall f xs ys zs.
unstream (Stream.zipWith3 f (stream xs) (stream ys) (stream zs)) = zipWith3 f xs ys zs
combination, analogous to 'zipWith'.
" zipWith4 - > unfused " [ 1 ] forall f ws xs ys zs .
unstream ( Stream.zipWith4 f ( stream ws ) ( stream xs ) ( stream ys ) ( stream zs ) ) =
"zipWith4 -> unfused" [1] forall f ws xs ys zs.
unstream (Stream.zipWith4 f (stream ws) (stream xs) (stream ys) (stream zs)) = zipWith4 f ws xs ys zs
combination, analogous to 'zipWith'.
combination, analogous to 'zipWith'.
combination, analogous to 'zipWith'.
----------------------------------------------------------------------
unzips
lists, analogous to 'unzip'.
lists, analogous to 'unzip'.
lists, analogous to 'unzip'.
lists, analogous to 'unzip'.
----------------------------------------------------------------------
* Special lists
** Functions on strings
| /O(O)/,/fusion/. 'lines' breaks a string up into a list of strings
at newline characters. The resulting strings do not contain
newlines.
TODO: can we do better than this and preserve the same strictness?
-- This implementation is fast but too strict :-(
-- it doesn't yield each line until it has seen the ending '\n'
lines :: String -> [String]
lines [] = []
lines cs0 = go [] cs0
where
go l [] = reverse l : []
go l ('\n':cs) = reverse l : case cs of
[] -> []
_ -> go [] cs
go l ( c :cs) = go (c:l) cs
| 'words' breaks a string up into a list of words, which were delimited
by white space.
TODO: can we do better than this and preserve the same strictness?
-- This implementation is fast but too strict :-(
-- it doesn't yield each word until it has seen the ending space
words cs0 = dropSpaces cs0
where
dropSpaces :: String -> [String]
dropSpaces [] = []
dropSpaces (c:cs)
| isSpace c = dropSpaces cs
| otherwise = munchWord [c] cs
munchWord :: String -> String -> [String]
munchWord w [] = reverse w : []
munchWord w (c:cs)
| isSpace c = reverse w : dropSpaces cs
| otherwise = munchWord (c:w) cs
| /O(n)/,/fusion/. 'unlines' is an inverse operation to 'lines'.
It joins lines, after appending a terminating newline to each.
> unlines xs = concatMap (++"\n")
fuse via:
unlines xs = concatMap (snoc xs '\n')
| 'unwords' is an inverse operation to 'words'.
It joins words with separating spaces.
----------------------------------------------------------------------
** \"Set\" operations
| The 'nub' function removes duplicate elements from a list.
It is a special case of 'nubBy', which allows the programmer to supply
their own equality test.
RULES
-- ndm's optimisation
"sort/nub" forall xs. sort (nub xs) = map head (group (sort xs))
For example,
It is a special case of 'deleteBy', which allows the programmer to
supply their own equality test.
| The '\\' function is list difference ((non-associative).
@ys@ in turn (if any) has been removed from @xs@. Thus
> (xs ++ ys) \\ xs == ys.
It is a special case of 'deleteFirstsBy', which allows the programmer
to supply their own equality test.
For example,
> "dog" `union` "cow" == "dogcw"
the result.
It is a special case of 'unionBy', which allows the programmer to supply
their own equality test.
For example,
It is a special case of 'intersectBy', which allows the programmer to
supply their own equality test.
----------------------------------------------------------------------
** Ordered lists
| The 'sort' function implements a stable sorting algorithm.
It is a special case of 'sortBy', which allows the programmer to supply
their own comparison function.
Properties:
> not (null x) ==> (head . sort) x = minimum x
> not (null x) ==> (last . sort) x = maximum x
| /O(n)/,/fusion/. The 'insert' function takes an element and a list and inserts the
element into the list at the last position where it is still less
than or equal to the next element. In particular, if the list
is sorted before the call, the result will also be sorted.
It is a special case of 'insertBy', which allows the programmer to
supply their own comparison function.
----------------------------------------------------------------------
* Generalized functions
** The \"By\" operations
| The 'nubBy' function behaves just like 'nub', except it uses a
user-supplied equality predicate instead of the overloaded '=='
function.
Not exported:
Note that we keep the call to `eq` with arguments in the
same order as in the reference implementation
'xs' is the list of things we've seen so far,
'y' is the potential new element
| The 'deleteBy' function behaves like 'delete', but takes a
user-supplied equality predicate.
| The 'unionBy' function is the non-overloaded version of 'union'.
| The 'intersectBy' function is the non-overloaded version of 'intersect'.
| The 'groupBy' function is the non-overloaded version of 'group'.
----------------------------------------------------------------------
| The 'sortBy' function is the non-overloaded version of 'sort'.
| /O(n)/,/fusion/. The non-overloaded version of 'insert'.
" insertBy - > unfused " [ 1 ] forall f x xs .
unstream ( Stream.insertBy f x ( stream xs ) ) = insertBy f x xs
"insertBy -> unfused" [1] forall f x xs.
unstream (Stream.insertBy f x (stream xs)) = insertBy f x xs
| /O(n)/,/fusion/. The 'maximumBy' function takes a comparison function and a list
and returns the greatest element of the list by the comparison function.
The list must be finite and non-empty.
" maximumBy - > unfused " [ 1 ] forall p xs .
Stream.maximumBy p ( stream xs ) = xs
"maximumBy -> unfused" [1] forall p xs.
Stream.maximumBy p (stream xs) = maximumBy p xs
| /O(n)/,/fusion/. The 'minimumBy' function takes a comparison function and a list
and returns the least element of the list by the comparison function.
The list must be finite and non-empty.
" minimumBy - > unfused " [ 1 ] forall p xs .
( stream xs ) = minimumBy p xs
"minimumBy -> unfused" [1] forall p xs.
Stream.minimumBy p (stream xs) = minimumBy p xs
----------------------------------------------------------------------
* The \"generic\" operations
particular, instead of returning an 'Int', it returns any type which is
" genericLength - > unfused " [ 1 ] forall xs .
Stream.genericLength ( stream xs ) = genericLength xs
"genericLength -> unfused" [1] forall xs.
Stream.genericLength (stream xs) = genericLength xs
| /O(n)/,/fusion/. The 'genericTake' function is an overloaded version of 'take', which
accepts any 'Integral' value as the number of elements to take.
" genericTake - > unfused " [ 1 ] forall -- unstream ( Stream.genericTake n ( stream xs ) ) = genericTake n xs
"genericTake -> unfused" [1] forall xs n.
unstream (Stream.genericTake n (stream xs)) = genericTake n xs
# RULES
"genericTake -> take/Int" genericTake = take :: Int -> [a] -> [a]
#
| /O(n)/,/fusion/. The 'genericDrop' function is an overloaded version of 'drop', which
accepts any 'Integral' value as the number of elements to drop.
" genericDrop - > unfused " [ 1 ] forall -- unstream ( Stream.genericDrop n ( stream xs ) ) = genericDrop n xs
"genericDrop -> unfused" [1] forall xs n.
unstream (Stream.genericDrop n (stream xs)) = genericDrop n xs
# RULES
"genericDrop -> drop/Int" genericDrop = drop :: Int -> [a] -> [a]
#
| /O(n)/,/fusion/. The 'genericIndex' function is an overloaded version of '!!', which
accepts any 'Integral' value as the index.
can we pull the n > 0 test out and do it just once?
probably not since we don't know what n-1 does!!
can only specialise it for sane Integral instances :-(
" genericIndex - > unfused " [ 1 ] forall -- ( stream xs ) n = genericIndex n xs
"genericIndex -> unfused" [1] forall xs n.
Stream.genericIndex (stream xs) n = genericIndex n xs
# RULES
"genericIndex -> index/Int" genericIndex = (!!) :: [a] -> Int -> a
#
| /O(n)/,/fusion/. The 'genericSplitAt' function is an overloaded
version of 'splitAt', which accepts any 'Integral' value as the
position at which to split.
" genericSplitAt - > unfused " [ 1 ] forall -- Stream.genericSplitAt n ( stream xs ) = genericSplitAt n xs
"genericSplitAt -> unfused" [1] forall xs n.
Stream.genericSplitAt n (stream xs) = genericSplitAt n xs
# RULES
"genericSplitAt -> splitAt/Int" genericSplitAt = splitAt :: Int -> [a] -> ([a], [a])
#
| /O(n)/,/fusion/. The 'genericReplicate' function is an overloaded version of 'replicate',
which accepts any 'Integral' value as the number of repetitions to make.
---------------------------------------------------------------------
Common up near identical calls to `error' to reduce the number
constant strings created when compiled: | Copyright : ( c ) 2007
( c ) 2007 - 2013
A reimplementation of the standard list library to take advantage of
stream fusion , and new GHC optimisations . The fusion mechanism is
, and , ICFP 2007 .
* /Rewriting Haskell Strings/ , by , and
Roman Leshchinskiy , Practical Aspects of Declarative Languages
8th International Symposium , PADL 2007 , 2007 .
module Data.List.Stream (
$ fusion_intro
: : a = > [ a ] - > a
: : a = > [ a ] - > a
: : a = > [ a ] - > a
: : a = > [ a ] - > a
: : ( acc - > x - > ( acc , y ) ) - > acc - > [ x ] - > ( acc , [ y ] )
: : ( acc - > x - > ( acc , y ) ) - > acc - > [ x ] - > ( acc , [ y ] )
zip4,
zip5,
zip6,
zip7,
| The zipWith family generalises the zip family by zipping with the
function given as the first argument , instead of a tupling function .
zipWith4,
zipWith5,
zipWith6,
zipWith7,
unzip4,
unzip5,
unzip6,
unzip7,
: : a = > [ a ] - > [ a ]
: : a = > a - > [ a ] - > [ a ]
* * * User - supplied equality ( replacing an Eq context )
* * * User - supplied comparison ( replacing an context )
is a generalized version of a " Prelude " function .
: : i = > [ b ] - > i
helper for GHC.List
) where
#ifndef EXTERNAL_PACKAGE
import GHC.Base (Int, Eq(..), Ord(..), Ordering(..),
Bool(..), not, Ordering(..),
seq, otherwise, flip,
Monad(..),
Char, String,
Int(I#), Int#, (+#),
foldr, (++), map
)
import Data.Maybe (Maybe(..))
#else
import GHC.Exts (Int(I#), Int#, (+#))
import Prelude (Int,
Integral,
Num(..), Eq(..), Ord(..), Ordering(..),
Bool(..), not, Maybe(..), Char, String,
error, seq, otherwise, flip)
import Data.Char (isSpace)
#endif
import qualified Data.Stream as Stream
import Data.Stream (stream ,unstream)
#ifdef EXTERNAL_PACKAGE
infixr 5 ++
#endif
comment to fool
infixl 9 !!
infix 4 `elem`, `notElem`
$ fusion_intro
the fusion framework described in /Rewriting Haskell Strings/.
> ( unstream . mapS f . ) xs
When two or more fusible functions are in close proximity ( i.e.
> map f . map
> unstream . mapS f . . unstream . . stream
> unstream . mapS f . . stream
Notes on simplifer phasing
* This implies a NOINLINE [ 1 ] on the top level functions , so if ghc wants
" + + - > unfused " [ 1 ] forall xs ys .
RULES
" (: ) - > fusible " [ ~1 ] forall x xs .
x : xs = unstream ( Stream.cons x ( stream xs ) )
" (: ) - > unfused " [ 1 ] forall x xs .
unstream ( Stream.cons x ( stream xs ) ) = x : xs
"(:) -> fusible" [~1] forall x xs.
x : xs = unstream (Stream.cons x (stream xs))
"(:) -> unfused" [1] forall x xs.
unstream (Stream.cons x (stream xs)) = x : xs
-}
| /O(n)/ , /fusion/. Append two lists , i.e. ,
If the first list is not finite , the result is the first list .
The spine of the first list argument must be copied .
#ifdef EXTERNAL_PACKAGE
(++) :: [a] -> [a] -> [a]
(++) [] ys = ys
(++) (x:xs) ys = x : xs ++ ys
# NOINLINE [ 1 ] ( + + ) #
#endif
# RULES
" + + - > fused on 1st arg " [ ~1 ] forall xs ys .
unstream xs + + ys = " + + - > fused on 2nd arg " [ ~1 ] forall xs ys .
( unstream ys ) = unstream ( Stream.append xs ys )
" + + - > fused ( 1 ) " [ ~1 ] forall xs ys .
stream ( xs + + ys ) = Stream.append ( stream xs ) ( stream ys )
" + + - > fused ( 2 ) " [ ~1 ] forall xs ys .
stream ( ) = Stream.append xs ( stream ys )
" + + - > 1st arg empty " forall xs .
[ ] + + xs = xs
" + + - > 2nd arg empty " forall xs .
xs + + [ ] = xs
" + + / : " forall x xs ys .
( x : xs ) + + ys = x : ( xs + + ys )
#
"++ -> fused on 1st arg" [~1] forall xs ys.
unstream xs ++ ys = Stream.append1 xs ys
"++ -> fused on 2nd arg" [~1] forall xs ys.
Stream.append1 xs (unstream ys) = unstream (Stream.append xs ys)
"++ -> fused (1)" [~1] forall xs ys.
stream (xs ++ ys) = Stream.append (stream xs) (stream ys)
"++ -> fused (2)" [~1] forall xs ys.
stream (Stream.append1 xs ys) = Stream.append xs (stream ys)
"++ -> 1st arg empty" forall xs.
[] ++ xs = xs
"++ -> 2nd arg empty" forall xs.
xs ++ [] = xs
"++ / :" forall x xs ys.
(x:xs) ++ ys = x : (xs ++ ys)
#-}
| /O(1)/ , /fusion/. Extract the first element of a list , which must be
head :: [a] -> a
head (x:_) = x
head [] = errorEmptyList "head"
# NOINLINE [ 1 ] head #
# RULES
" head - > fusible " [ ~1 ] forall xs .
head xs = Stream.head ( stream xs )
#
"head -> fusible" [~1] forall xs.
head xs = Stream.head (stream xs)
#-}
last :: [a] -> a
last [] = errorEmptyList "last"
last (x:xs) = last' x xs
where
last' y [] = y
last' _ (y:ys) = last' y ys
# NOINLINE [ 1 ] last #
# RULES
" last - > fusible " [ ~1 ] forall xs .
last xs = Stream.last ( stream xs )
#
"last -> fusible" [~1] forall xs.
last xs = Stream.last (stream xs)
#-}
tail :: [a] -> [a]
tail (_:xs) = xs
tail [] = errorEmptyList "tail"
# NOINLINE [ 1 ] tail #
# RULES
" tail - > fusible " [ ~1 ] forall xs .
tail xs = unstream ( Stream.tail ( stream xs ) )
#
"tail -> fusible" [~1] forall xs.
tail xs = unstream (Stream.tail (stream xs))
#-}
init :: [a] -> [a]
init [] = errorEmptyList "init"
init (x:xs) = init' x xs
where
init' _ [] = []
init' y (z:zs) = y : init' z zs
# NOINLINE [ 1 ] init #
# RULES
" init - > fusible " [ ~1 ] forall xs .
init xs = unstream ( Stream.init ( stream xs ) )
#
"init -> fusible" [~1] forall xs.
init xs = unstream (Stream.init (stream xs))
#-}
null :: [a] -> Bool
null [] = True
null (_:_) = False
# NOINLINE [ 1 ] null #
# RULES
" null - > fusible " [ ~1 ] forall xs .
null xs = Stream.null ( stream xs )
#
"null -> fusible" [~1] forall xs.
null xs = Stream.null (stream xs)
#-}
length :: [a] -> Int
length xs0 = len xs0 0#
#ifndef __HADDOCK__
where
len :: [a] -> Int# -> Int
len [] a# = I# a#
len (_:xs) a# = len xs (a# +# 1#)
#endif
# NOINLINE [ 1 ] length #
# RULES
" length - > fusible " [ ~1 ] forall xs .
length xs = Stream.length ( stream xs )
#
"length -> fusible" [~1] forall xs.
length xs = Stream.length (stream xs)
#-}
| /O(n)/ , /fusion/. ' map ' is the list obtained by applying @f@ to each element
of @xs@ , i.e. ,
> map f . map = map ( f . )
#ifdef EXTERNAL_PACKAGE
map :: (a -> b) -> [a] -> [b]
map _ [] = []
map f (x:xs) = f x : map f xs
# NOINLINE [ 1 ] map #
#endif
# RULES
" map - > fusible " [ ~1 ] forall f xs .
map f xs = unstream ( Stream.map f ( stream xs ) )
#
"map -> fusible" [~1] forall f xs.
map f xs = unstream (Stream.map f (stream xs))
#-}
reverse :: [a] -> [a]
reverse = foldl' (flip (:)) []
# INLINE reverse #
forceSpine : : [ a ] - > [ a ]
forceSpine xs = forceSpine ' xs ` seq ` xs
{ - # INLINE forceSpine #
forceSpine :: [a] -> [a]
forceSpine xs = forceSpine' xs `seq` xs
forceSpine' :: [a] -> ()
forceSpine' [] = ()
forceSpine' (_:xs') = forceSpine' xs'
# NOINLINE forceSpine ' #
-}
' that element between the elements of the list .
intersperse :: a -> [a] -> [a]
intersperse _ [] = []
intersperse sep (x0:xs0) = x0 : go xs0
where
go [] = []
go (x:xs) = sep : x : go xs
# NOINLINE [ 1 ] intersperse #
RULES
" intersperse - > fusible " [ ~1 ] forall x xs .
intersperse x xs = unstream ( Stream.intersperse x ( stream xs ) )
" intersperse - > unfused " [ 1 ] forall x xs .
unstream ( Stream.intersperse x ( stream xs ) ) = intersperse x xs
"intersperse -> fusible" [~1] forall x xs.
intersperse x xs = unstream (Stream.intersperse x (stream xs))
"intersperse -> unfused" [1] forall x xs.
unstream (Stream.intersperse x (stream xs)) = intersperse x xs
-}
| /O(n)/ , /fusion/. ' intercalate ' @xs xss@ is equivalent to @('concat ' ( ' intersperse ' xs xss))@.
intercalate :: [a] -> [[a]] -> [a]
intercalate sep xss = go (intersperse sep xss)
where
go [] = []
go (y:ys) = y ++ go ys
# NOINLINE [ 1 ] intercalate #
intercalate _ [ ] = [ ]
intercalate sep ( xs0 : ) = go xs0 where
go [ ] xss = to xss
go ( x : xs ) xss = x : go xs xss
to [ ] = [ ]
to ( xs : xss ) = go ' sep xs xss
go ' [ ] xs xss = go xs xss
go ' ( s : ss ) xs xss = s : go ' ss xs xss
{ - # NOINLINE [ 1 ] intercalate #
intercalate _ [] = []
intercalate sep (xs0:xss0) = go xs0 xss0
where
go [] xss = to xss
go (x:xs) xss = x : go xs xss
to [] = []
to (xs:xss) = go' sep xs xss
go' [] xs xss = go xs xss
go' (s:ss) xs xss = s : go' ss xs xss
-}
RULES
" intercalate - > fusible " [ ~1 ] forall x xs .
intercalate x xs = Stream.concat ( Stream.intersperse x ( stream xs ) )
" intercalate - > unfused " [ 1 ] forall x xs .
Stream.concat ( Stream.intersperse x ( stream xs ) ) = intercalate x xs
"intercalate -> fusible" [~1] forall x xs.
intercalate x xs = Stream.concat (Stream.intersperse x (stream xs))
"intercalate -> unfused" [1] forall x xs.
Stream.concat (Stream.intersperse x (stream xs)) = intercalate x xs
-}
transpose :: [[a]] -> [[a]]
transpose [] = []
transpose ([] : xss) = transpose xss
transpose ((x:xs) : xss) = (x : [h | (h:_t) <- xss])
: transpose (xs : [ t | (_h:t) <- xss])
TODO fuse
foldl :: (a -> b -> a) -> a -> [b] -> a
foldl f z0 xs0 = go z0 xs0
where
go z [] = z
go z (x:xs) = go (f z x) xs
# INLINE [ 1 ] foldl #
# RULES
" foldl - > fusible " [ ~1 ] forall f z xs .
foldl f z xs = Stream.foldl f z ( stream xs )
#
"foldl -> fusible" [~1] forall f z xs.
foldl f z xs = Stream.foldl f z (stream xs)
#-}
foldl' :: (a -> b -> a) -> a -> [b] -> a
foldl' f z0 xs0 = go z0 xs0
#ifndef __HADDOCK__
where
go !z [] = z
go !z (x:xs) = go (f z x) xs
#endif
# INLINE [ 1 ] foldl ' #
# RULES
" foldl ' - > fusible " [ ~1 ] forall f z xs .
foldl ' f z xs = Stream.foldl ' f z ( stream xs )
#
"foldl' -> fusible" [~1] forall f z xs.
foldl' f z xs = Stream.foldl' f z (stream xs)
#-}
| /O(n)/ , /fusion/. ' foldl1 ' is a variant of ' foldl ' that has no starting value argument ,
foldl1 :: (a -> a -> a) -> [a] -> a
foldl1 _ [] = errorEmptyList "foldl1"
foldl1 f (x0:xs0) = go x0 xs0
where
go z [] = z
go z (x:xs) = go (f z x) xs
# INLINE [ 1 ] foldl1 #
# RULES
" foldl1 - > fusible " [ ~1 ] forall f xs .
foldl1 f xs = Stream.foldl1 f ( stream xs )
#
"foldl1 -> fusible" [~1] forall f xs.
foldl1 f xs = Stream.foldl1 f (stream xs)
#-}
| /O(n)/ , /fusion/. A strict version of ' foldl1 '
foldl1' :: (a -> a -> a) -> [a] -> a
foldl1' _ [] = errorEmptyList "foldl1'"
foldl1' f (x0:xs0) = go x0 xs0
#ifndef __HADDOCK__
where
go !z [] = z
go !z (x:xs) = go (f z x) xs
#endif
# INLINE [ 1 ] foldl1 ' #
# RULES
" foldl1 ' - > fusible " [ ~1 ] forall f xs .
foldl1 ' f xs = Stream.foldl1 ' f ( stream xs )
#
"foldl1' -> fusible" [~1] forall f xs.
foldl1' f xs = Stream.foldl1' f (stream xs)
#-}
#ifdef EXTERNAL_PACKAGE
foldr :: (a -> b -> b) -> b -> [a] -> b
foldr k z xs = go xs
where
go [] = z
go (y:ys) = y `k` go ys
#endif
# RULES
" foldr - > fusible " [ ~1 ] forall f z xs .
foldr f z xs = Stream.foldr f z ( stream xs )
#
"foldr -> fusible" [~1] forall f z xs.
foldr f z xs = Stream.foldr f z (stream xs)
#-}
| /O(n)/ , /fusion/. ' ' is a variant of ' foldr ' that has no starting value argument ,
foldr1 :: (a -> a -> a) -> [a] -> a
foldr1 _ [] = errorEmptyList "foldr1"
foldr1 k (x0:xs0) = go x0 xs0
where go x [] = x
go x (x':xs) = k x (go x' xs)
# RULES
" foldr1 - > fusible " [ ~1 ] forall f xs .
foldr1 f xs = Stream.foldr1 f ( stream xs )
#
"foldr1 -> fusible" [~1] forall f xs.
foldr1 f xs = Stream.foldr1 f (stream xs)
#-}
| /O(n)/ , /fusion/. a list of lists .
concat :: [[a]] -> [a]
concat xss0 = to xss0
where go [] xss = to xss
go (x:xs) xss = x : go xs xss
to [] = []
# NOINLINE [ 1 ] concat #
# RULES
" concat - > fused " [ ~1 ] forall xs .
concat xs = Stream.concat ( stream xs )
#
"concat -> fused" [~1] forall xs.
concat xs = Stream.concat (stream xs)
#-}
concatMap :: (a -> [b]) -> [a] -> [b]
# INLINE concatMap #
concatMap f as0 = to as0
where
go [ ] as = to as
go ( b : bs ) as = b : go bs as
to [ ] = [ ]
to ( a : as ) = go ( f a ) as
{ - # NOINLINE [ 1 ] concatMap #
concatMap f as0 = to as0
where
go [] as = to as
go (b:bs) as = b : go bs as
to [] = []
to (a:as) = go (f a) as
-}
RULES
" concatMap - > fusible " [ ~1 ] forall f xs .
concatMap f xs = Stream.concatMap f ( stream xs )
" concatMap - > unfused " [ 1 ] forall f xs .
Stream.concatMap f ( stream xs ) = concatMap f xs
"concatMap -> fusible" [~1] forall f xs.
concatMap f xs = Stream.concatMap f (stream xs)
"concatMap -> unfused" [1] forall f xs.
Stream.concatMap f (stream xs) = concatMap f xs
-}
| /O(n)/ , /fusion/. ' and ' returns the conjunction of a Boolean list . For the result to be
and :: [Bool] -> Bool
and [] = True
and (False:_ ) = False
and (_ :xs) = and xs
# NOINLINE [ 1 ] and #
# RULES
" and - > fused " [ ~1 ] forall xs .
and xs = Stream.and ( stream xs )
#
"and -> fused" [~1] forall xs.
and xs = Stream.and (stream xs)
#-}
| /O(n)/ , /fusion/. ' or ' returns the disjunction of a Boolean list . For the result to be
or :: [Bool] -> Bool
or [] = False
or (True:_ ) = True
or (_ :xs) = or xs
# NOINLINE [ 1 ] or #
# RULES
" or - > fused " [ ~1 ] forall xs .
or xs = Stream.or ( stream xs )
#
"or -> fused" [~1] forall xs.
or xs = Stream.or (stream xs)
#-}
any :: (a -> Bool) -> [a] -> Bool
any p xs0 = go xs0
where go [] = False
go (x:xs) = case p x of
True -> True
False -> go xs
# NOINLINE [ 1 ] any #
TODO : check if being lazy in p is a cost ,
should we do [ ] as a special case and then strictly evaluate p ?
# RULES
" any - > fusible " [ ~1 ] forall f xs .
any f xs = Stream.any f ( stream xs )
#
"any -> fusible" [~1] forall f xs.
any f xs = Stream.any f (stream xs)
#-}
all :: (a -> Bool) -> [a] -> Bool
all p xs0 = go xs0
where go [] = True
go (x:xs) = case p x of
True -> go xs
False -> False
# NOINLINE [ 1 ] all #
# RULES
" all - > fusible " [ ~1 ] forall f xs .
all f xs = Stream.all f ( stream xs )
#
"all -> fusible" [~1] forall f xs.
all f xs = Stream.all f (stream xs)
#-}
sum :: Num a => [a] -> a
sum l = sum' l 0
#ifndef __HADDOCK__
where
sum' [] a = a
sum' (x:xs) a = sum' xs (a+x)
#endif
# NOINLINE [ 1 ] sum #
sumInt :: [Int] -> Int
sumInt l = sum' l 0
#ifndef __HADDOCK__
where
sum' [] a = a
sum' (x:xs) !a = sum' xs (a+x)
#endif
# NOINLINE [ 1 ] sumInt #
# RULES
" sum - > fusible " [ ~1 ] forall xs .
sum xs = Stream.sum ( stream xs )
#
"sum -> fusible" [~1] forall xs.
sum xs = Stream.sum (stream xs)
#-}
# RULES
" sumInt - > fusible " [ ~1 ] forall ( xs : : [ Int ] ) .
sumInt xs = Stream.sum ( stream xs )
#
"sumInt -> fusible" [~1] forall (xs :: [Int]).
sumInt xs = Stream.sum (stream xs)
#-}
product :: Num a => [a] -> a
product l = prod l 1
#ifndef __HADDOCK__
where
prod [] a = a
prod (x:xs) a = prod xs (a*x)
#endif
# NOINLINE [ 1 ] product #
productInt :: [Int] -> Int
productInt l = product' l 0
#ifndef __HADDOCK__
where
product' [] a = a
product' (x:xs) !a = product' xs (a*x)
#endif
# NOINLINE [ 1 ] productInt #
# RULES
" product - > fused " [ ~1 ] forall xs .
product xs = Stream.product ( stream xs )
#
"product -> fused" [~1] forall xs.
product xs = Stream.product (stream xs)
#-}
# RULES
" productInt - > fusible " [ ~1 ] forall ( xs : : [ Int ] ) .
productInt xs = Stream.product ( stream xs )
#
"productInt -> fusible" [~1] forall (xs :: [Int]).
productInt xs = Stream.product (stream xs)
#-}
maximum :: Ord a => [a] -> a
maximum [] = errorEmptyList "maximum"
maximum xs = foldl1 max xs
# NOINLINE [ 1 ] maximum #
# RULES
" maximum - > fused " [ ~1 ] forall xs .
maximum xs = Stream.maximum ( stream xs )
#
"maximum -> fused" [~1] forall xs.
maximum xs = Stream.maximum (stream xs)
#-}
changing its semantics ( might not be strict ) , but we can for
# RULES
" maximumInt " maximum = ( strictMaximum : : [ Int ] - > Int ) ;
" maximumChar " maximum = ( strictMaximum : : [ )
#
"maximumInt" maximum = (strictMaximum :: [Int] -> Int);
"maximumChar" maximum = (strictMaximum :: [Char] -> Char)
#-}
strictMaximum :: (Ord a) => [a] -> a
strictMaximum [] = errorEmptyList "maximum"
strictMaximum xs = foldl1' max xs
# NOINLINE [ 1 ] strictMaximum #
# RULES
" strictMaximum - > fused " [ ~1 ] forall xs .
strictMaximum xs = Stream.strictMaximum ( stream xs )
#
"strictMaximum -> fused" [~1] forall xs.
strictMaximum xs = Stream.strictMaximum (stream xs)
#-}
minimum :: Ord a => [a] -> a
minimum [] = errorEmptyList "minimum"
minimum xs = foldl1 min xs
# NOINLINE [ 1 ] minimum #
# RULES
" minimum - > fused " [ ~1 ] forall xs .
minimum xs = Stream.minimum ( stream xs )
#
"minimum -> fused" [~1] forall xs.
minimum xs = Stream.minimum (stream xs)
#-}
# RULES
" minimumInt " minimum = ( strictMinimum : : [ Int ] - > Int ) ;
" minimumChar " minimum = ( strictMinimum : : [ )
#
"minimumInt" minimum = (strictMinimum :: [Int] -> Int);
"minimumChar" minimum = (strictMinimum :: [Char] -> Char)
#-}
strictMinimum :: (Ord a) => [a] -> a
strictMinimum [] = errorEmptyList "maximum"
strictMinimum xs = foldl1' min xs
# NOINLINE [ 1 ] strictMinimum #
# RULES
" strictMinimum - > fused " [ ~1 ] forall xs .
strictMinimum xs = Stream.strictMinimum ( stream xs )
#
"strictMinimum -> fused" [~1] forall xs.
strictMinimum xs = Stream.strictMinimum (stream xs)
#-}
| /O(n)/ , /fusion/. ' ' is similar to ' foldl ' , but returns a list of successive
> f z [ x1 , x2 , ... ] = = [ z , z ` f ` x1 , ( z ` f ` x1 ) ` f ` x2 , ... ]
> last ( f z xs ) = = foldl f z x
scanl :: (a -> b -> a) -> a -> [b] -> [a]
scanl f q ls = q : case ls of
[] -> []
x:xs -> scanl f (f q x) xs
# INLINE [ 1 ] scanl #
or perhaps :
f q xs0 = q : go q xs0
where go q [ ] = [ ]
go q ( x : xs ) = let q ' = f q x
in q ' : go q ' xs
scanl f q xs0 = q : go q xs0
where go q [] = []
go q (x:xs) = let q' = f q x
in q' : go q' xs
-}
note : 's ' scan ' is a bit weird , as it always puts the initial
# RULES
" - > fusible " [ ~1 ] forall f z xs .
f z xs = unstream ( Stream.scanl f z ( Stream.snoc ( stream xs ) bottom ) )
#
"scanl -> fusible" [~1] forall f z xs.
scanl f z xs = unstream (Stream.scanl f z (Stream.snoc (stream xs) bottom))
#-}
| /O(n)/,/fusion/. ' ' is a variant of ' ' that has no starting value argument :
> f [ x1 , x2 , ... ] = = [ x1 , x1 ` f ` x2 , ... ]
scanl1 :: (a -> a -> a) -> [a] -> [a]
scanl1 f (x:xs) = scanl f x xs
scanl1 _ [] = []
# RULES
" scanl1 - > fusible " [ ~1 ] forall f xs .
scanl1 f xs = unstream ( Stream.scanl1 f ( Stream.snoc ( stream xs ) bottom ) )
#
"scanl1 -> fusible" [~1] forall f xs.
scanl1 f xs = unstream (Stream.scanl1 f (Stream.snoc (stream xs) bottom))
#-}
| /O(n)/. ' scanr ' is the right - to - left dual of ' ' .
> head ( scanr f z xs ) = = foldr f z xs
scanr :: (a -> b -> b) -> b -> [a] -> [b]
scanr _ q0 [] = [q0]
scanr f q0 (x:xs) = f x q : qs
where qs@(q:_) = scanr f q0 xs
# INLINE [ 1 ] scanr #
RULES
" scanr - > fusible " [ ~1 ] forall f z xs .
scanr f z xs = unstream ( Stream.scanr f z ( Stream.cons bottom ( stream xs ) ) )
" scanr - > unfused " [ 1 ] forall f z xs .
unstream ( Stream.scanr f z ( Stream.cons bottom ( stream xs ) ) ) = scanr f z xs
"scanr -> fusible" [~1] forall f z xs.
scanr f z xs = unstream (Stream.scanr f z (Stream.cons bottom (stream xs)))
"scanr -> unfused" [1] forall f z xs.
unstream (Stream.scanr f z (Stream.cons bottom (stream xs))) = scanr f z xs
-}
| ' ' is a variant of ' scanr ' that has no starting value argument .
scanr1 :: (a -> a -> a) -> [a] -> [a]
scanr1 _ [] = []
scanr1 _ [x] = [x]
scanr1 f (x:xs) = f x q : qs
where qs@(q:_) = scanr1 f xs
TODO fuse
mapAccumL :: (acc -> x -> (acc, y)) -> acc -> [x] -> (acc, [y])
mapAccumL _ s [] = (s, [])
mapAccumL f s (x:xs) = (s'',y:ys)
where (s', y ) = f s x
(s'',ys) = mapAccumL f s' xs
TODO fuse
mapAccumR :: (acc -> x -> (acc, y)) -> acc -> [x] -> (acc, [y])
mapAccumR _ s [] = (s, [])
mapAccumR f s (x:xs) = (s'', y:ys)
where (s'',y ) = f s' x
(s', ys) = mapAccumR f s xs
TODO fuse
| /fusion/. ' iterate ' @f returns an infinite list of repeated applications
of to @x@ :
iterate :: (a -> a) -> a -> [a]
iterate f x = x : iterate f (f x)
# NOINLINE [ 1 ] iterate #
# RULES
" iterate - > fusible " [ ~1 ] forall f x.
iterate f x = unstream ( Stream.iterate f x )
#
"iterate -> fusible" [~1] forall f x.
iterate f x = unstream (Stream.iterate f x)
#-}
repeat :: a -> [a]
repeat x = xs where xs = x : xs
# INLINE [ 1 ] repeat #
# RULES
" repeat - > fusible " [ ~1 ] forall x.
repeat x = unstream ( Stream.repeat x )
#
"repeat -> fusible" [~1] forall x.
repeat x = unstream (Stream.repeat x)
#-}
| /O(n)/ , /fusion/. ' replicate ' @n is a list of length @n@ with @x@ the value of
It is an instance of the more general ' Data . List.genericReplicate ' ,
in which @n@ may be of any integral type .
replicate :: Int -> a -> [a]
replicate n0 _ | n0 <= 0 = []
replicate n0 x = go n0
where
go 0 = []
go n = x : go (n-1)
# NOINLINE [ 1 ] replicate #
# RULES
" replicate - > fusible " [ ~1 ]
replicate = - > unstream ( Stream.replicate n x )
#
"replicate -> fusible" [~1]
replicate = \n x -> unstream (Stream.replicate n x)
#-}
cycle :: [a] -> [a]
cycle [] = error "Prelude.cycle: empty list"
cycle xs0 = go xs0
where
go [] = go xs0
go (x:xs) = x : go xs
# NOINLINE [ 1 ] cycle #
# RULES
" cycle - > fusible " [ ~1 ] forall xs .
cycle xs = unstream ( Stream.cycle ( stream xs ) )
#
"cycle -> fusible" [~1] forall xs.
cycle xs = unstream (Stream.cycle (stream xs))
#-}
case , @a@ is a prepended to the list and @b@ is used as the next
> iterate f = = unfoldr ( \x - > Just ( x , f x ) )
> unfoldr ( \b - > if b = = 0 then Nothing else Just ( b , b-1 ) ) 10
unfoldr :: (b -> Maybe (a, b)) -> b -> [a]
unfoldr f b0 = unfold b0
where
unfold b = case f b of
Just (a,b') -> a : unfold b'
Nothing -> []
# INLINE [ 1 ] unfoldr #
# RULES
" unfoldr - > fusible " [ ~1 ] forall f x.
unfoldr f x = unstream ( Stream.unfoldr f x )
#
"unfoldr -> fusible" [~1] forall f x.
unfoldr f x = unstream (Stream.unfoldr f x)
#-}
of length @n@ , or @xs@ itself if @n > ' length ' xs@ :
> take 5 " Hello World ! " = = " Hello "
> take 3 [ 1,2,3,4,5 ] = = [ 1,2,3 ]
> take 3 [ 1,2 ] = = [ 1,2 ]
> take 3 [ ] = = [ ]
> take 0 [ 1,2 ] = = [ ]
It is an instance of the more general ' Data . List.genericTake ' ,
in which @n@ may be of any integral type .
take :: Int -> [a] -> [a]
take i _ | i <= 0 = []
take i ls = take' i ls
where
take' :: Int -> [a] -> [a]
take' 0 _ = []
take' _ [] = []
take' n (x:xs) = x : take' (n-1) xs
# NOINLINE [ 1 ] take #
# RULES
" take - > fusible " [ ~1 ] forall n x.
take n x = unstream ( Stream.take n ( stream x ) )
#
"take -> fusible" [~1] forall n x.
take n x = unstream (Stream.take n (stream x))
#-}
take : : Int - > [ a ] - > [ a ]
take ( I # n # ) xs = takeUInt n # xs
takeUInt : : Int # - > [ b ] - > [ b ]
takeUInt n xs
| n > = # 0 # = take_unsafe_UInt n xs
| otherwise = [ ]
take_unsafe_UInt : : Int # - > [ b ] - > [ b ]
take_unsafe_UInt 0 # _ = [ ]
take_unsafe_UInt m ls =
case ls of
[ ] - > [ ]
( x : xs ) - > x : take_unsafe_UInt ( m - # 1 # ) xs
take :: Int -> [a] -> [a]
take (I# n#) xs = takeUInt n# xs
takeUInt :: Int# -> [b] -> [b]
takeUInt n xs
| n >=# 0# = take_unsafe_UInt n xs
| otherwise = []
take_unsafe_UInt :: Int# -> [b] -> [b]
take_unsafe_UInt 0# _ = []
take_unsafe_UInt m ls =
case ls of
[] -> []
(x:xs) -> x : take_unsafe_UInt (m -# 1#) xs
-}
| /O(n)/,/fusion/. ' drop ' @n returns the suffix of @xs@
after the first @n@ elements , or @[]@ if @n > ' length ' xs@ :
> drop 6 " Hello World ! " = = " World ! "
> drop 3 [ 1,2,3,4,5 ] = = [ 4,5 ]
> drop 3 [ 1,2 ] = = [ ]
> drop 3 [ ] = = [ ]
> drop 0 [ 1,2 ] = = [ 1,2 ]
in which @n@ may be of any integral type .
drop :: Int -> [a] -> [a]
drop n ls
| n < 0 = ls
| otherwise = drop' n ls
where
drop' :: Int -> [a] -> [a]
drop' 0 xs = xs
drop' _ xs@[] = xs
drop' m (_:xs) = drop' (m-1) xs
# NOINLINE [ 1 ] drop #
# RULES
" drop - > fusible " [ ~1 ] forall n x.
drop n x = unstream ( Stream.drop n ( stream x ) )
#
"drop -> fusible" [~1] forall n x.
drop n x = unstream (Stream.drop n (stream x))
#-}
| ' splitAt ' @n returns a tuple where first element is @xs@ prefix of
length @n@ and second element is the remainder of the list :
> splitAt 6 " Hello World ! " = = ( " Hello " , " World ! " )
> splitAt 3 [ 1,2,3,4,5 ] = = ( [ 1,2,3],[4,5 ] )
> splitAt 1 [ 1,2,3 ] = = ( [ 1],[2,3 ] )
> splitAt 3 [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
> splitAt 4 [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
in which @n@ may be of any integral type .
splitAt :: Int -> [a] -> ([a], [a])
splitAt n ls
| n < 0 = ([], ls)
| otherwise = splitAt' n ls
where
splitAt' :: Int -> [a] -> ([a], [a])
splitAt' 0 xs = ([], xs)
splitAt' _ xs@[] = (xs, xs)
splitAt' m (x:xs) = (x:xs', xs'')
where
(xs', xs'') = splitAt' (m-1) xs
# NOINLINE [ 1 ] splitAt #
# RULES
" splitAt - > fusible " [ ~1 ] forall n xs .
splitAt n xs = Stream.splitAt n ( stream xs )
#
"splitAt -> fusible" [~1] forall n xs.
splitAt n xs = Stream.splitAt n (stream xs)
#-}
> ( < 3 ) [ 1,2,3,4,1,2,3,4 ] = = [ 1,2 ]
> ( < 9 ) [ 1,2,3 ] = = [ 1,2,3 ]
> ( < 0 ) [ 1,2,3 ] = = [ ]
takeWhile :: (a -> Bool) -> [a] -> [a]
takeWhile _ [] = []
takeWhile p xs0 = go xs0
where
go [] = []
go (x:xs)
| p x = x : go xs
| otherwise = []
# NOINLINE [ 1 ] takeWhile #
# RULES
" takeWhile - > fusible " [ ~1 ] forall f xs .
takeWhile f xs = unstream ( Stream.takeWhile f ( stream xs ) )
#
"takeWhile -> fusible" [~1] forall f xs.
takeWhile f xs = unstream (Stream.takeWhile f (stream xs))
#-}
| /O(n)/,/fusion/. ' dropWhile ' @p xs@ returns the suffix remaining after ' takeWhile ' @p xs@ :
> dropWhile ( < 3 ) [ 1,2,3,4,5,1,2,3 ] = = [ 3,4,5,1,2,3 ]
> dropWhile ( < 9 ) [ 1,2,3 ] = = [ ]
dropWhile :: (a -> Bool) -> [a] -> [a]
dropWhile _ [] = []
dropWhile p xs0 = go xs0
where
go [] = []
go xs@(x:xs')
| p x = go xs'
| otherwise = xs
# NOINLINE [ 1 ] dropWhile #
# RULES
" dropWhile - > fusible " [ ~1 ] forall f xs .
dropWhile f xs = unstream ( Stream.dropWhile f ( stream xs ) )
#
"dropWhile -> fusible" [~1] forall f xs.
dropWhile f xs = unstream (Stream.dropWhile f (stream xs))
#-}
first element is longest prefix ( possibly empty ) of @xs@ of elements that
satisfy @p@ and second element is the remainder of the list :
> span ( < 3 ) [ 1,2,3,4,1,2,3,4 ] = = ( [ 1,2],[3,4,1,2,3,4 ] )
> span ( < 9 ) [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
' span ' @p xs@ is equivalent to @('takeWhile ' p xs , ' dropWhile ' p xs)@
span :: (a -> Bool) -> [a] -> ([a], [a])
span _ [] = ([], [])
span p xs0 = go xs0
where
go [] = ([], [])
go xs@(x:xs')
| p x = let (ys,zs) = go xs'
in (x:ys,zs)
| otherwise = ([],xs)
TODO fuse
first element is longest prefix ( possibly empty ) of @xs@ of elements that
/do not satisfy/ @p@ and second element is the remainder of the list :
> break ( > 3 ) [ 1,2,3,4,1,2,3,4 ] = = ( [ 1,2,3],[4,1,2,3,4 ] )
> break ( < 9 ) [ 1,2,3 ] = = ( [ ] , [ 1,2,3 ] )
> break ( > 9 ) [ 1,2,3 ] = = ( [ 1,2,3 ] , [ ] )
' break ' @p@ is equivalent to @'span ' ( ' not ' . p)@.
break :: (a -> Bool) -> [a] -> ([a], [a])
break _ [] = ([], [])
break p xs0 = go xs0
where
go [] = ([], [])
go xs@(x:xs')
| p x = ([],xs)
| otherwise = let (ys,zs) = go xs'
in (x:ys,zs)
TODO fuse
> group " Mississippi " = [ " M","i","ss","i","ss","i","pp","i " ]
group :: Eq a => [a] -> [[a]]
group [] = []
group (x:xs) = (x:ys) : group zs
where (ys,zs) = span (x ==) xs
TODO fuse
shortest first . For example ,
> inits " abc " = = [ " " , " a","ab","abc " ]
inits :: [a] -> [[a]]
inits [] = [] : []
inits (x:xs) = [] : map (x:) (inits xs)
TODO fuse
> tails " abc " = = [ " abc " , " bc " , " c " , " " ]
tails :: [a] -> [[a]]
tails [] = [] : []
tails xxs@(_:xs) = xxs : tails xs
TODO fuse
| /O(n)/,/fusion/. The ' isPrefixOf ' function takes two lists and
returns ' True ' iff the first list is a prefix of the second .
isPrefixOf :: Eq a => [a] -> [a] -> Bool
isPrefixOf [] _ = True
isPrefixOf _ [] = False
isPrefixOf (x:xs) (y:ys) | x == y = isPrefixOf xs ys
| otherwise = False
# NOINLINE [ 1 ] isPrefixOf #
# RULES
" isPrefixOf - > fusible " [ ~1 ] forall xs ys .
isPrefixOf xs ys = Stream.isPrefixOf ( stream xs ) ( stream ys )
"isPrefixOf -> fusible" [~1] forall xs ys.
isPrefixOf xs ys = Stream.isPrefixOf (stream xs) (stream ys)
#-}
| The ' isSuffixOf ' function takes two lists and returns ' True '
iff the first list is a suffix of the second .
isSuffixOf :: Eq a => [a] -> [a] -> Bool
isSuffixOf x y = reverse x `isPrefixOf` reverse y
TODO fuse
| The ' isInfixOf ' function takes two lists and returns ' True '
iff the first list is contained , wholly and intact ,
anywhere within the second .
> isInfixOf " Haskell " " I really like . " - > True
> isInfixOf " Ial " " I really like . " - > False
isInfixOf :: Eq a => [a] -> [a] -> Bool
isInfixOf needle haystack = any (isPrefixOf needle) (tails haystack)
TODO fuse
elem :: Eq a => a -> [a] -> Bool
elem _ [] = False
elem x (y:ys)
| x == y = True
| otherwise = elem x ys
# NOINLINE [ 1 ] elem #
# RULES
" elem - > fusible " [ ~1 ] forall x xs .
elem x xs = Stream.elem x ( stream xs )
#
"elem -> fusible" [~1] forall x xs.
elem x xs = Stream.elem x (stream xs)
#-}
notElem :: Eq a => a -> [a] -> Bool
notElem x xs = not (elem x xs)
# INLINE notElem #
RULES
" notElem - > fusible " [ ~1 ] forall x xs .
notElem x xs = Stream.notElem x ( stream xs )
" notElem - > unfused " [ 1 ] forall x xs .
Stream.notElem x ( stream xs ) = notElem x xs
"notElem -> fusible" [~1] forall x xs.
notElem x xs = Stream.notElem x (stream xs)
"notElem -> unfused" [1] forall x xs.
Stream.notElem x (stream xs) = notElem x xs
-}
lookup :: Eq a => a -> [(a, b)] -> Maybe b
lookup _ [] = Nothing
lookup key xys0 = go xys0
where
go [] = Nothing
go ((x,y):xys)
| key == x = Just y
| otherwise = lookup key xys
# NOINLINE [ 1 ] lookup #
# RULES
" lookup - > fusible " [ ~1 ] forall x xs .
lookup x xs = Stream.lookup x ( stream xs )
#
"lookup -> fusible" [~1] forall x xs.
lookup x xs = Stream.lookup x (stream xs)
#-}
> filter p ( filter q s ) = filter ( \x - > q x & & p x ) s
filter :: (a -> Bool) -> [a] -> [a]
filter _ [] = []
filter p xs0 = go xs0
where
go [] = []
go (x:xs)
| p x = x : go xs
| otherwise = go xs
# NOINLINE [ 1 ] filter #
# RULES
" filter - > fusible " [ ~1 ] forall f xs .
filter f xs = unstream ( Stream.filter f ( stream xs ) )
#
"filter -> fusible" [~1] forall f xs.
filter f xs = unstream (Stream.filter f (stream xs))
#-}
first element in the list matching the predicate , or ' Nothing ' if
find :: (a -> Bool) -> [a] -> Maybe a
find _ [] = Nothing
find p xs0 = go xs0
where
go [] = Nothing
go (x:xs) | p x = Just x
| otherwise = go xs
# NOINLINE [ 1 ] find #
# RULES
" find - > fusible " [ ~1 ] forall f xs .
find f xs = Stream.find f ( stream xs )
#
"find -> fusible" [~1] forall f xs.
find f xs = Stream.find f (stream xs)
#-}
partition :: (a -> Bool) -> [a] -> ([a], [a])
partition p xs = foldr (select p) ([],[]) xs
# INLINE partition #
TODO fuse
select :: (a -> Bool) -> a -> ([a], [a]) -> ([a], [a])
select p x ~(ts,fs) | p x = (x:ts,fs)
| otherwise = (ts, x:fs)
(!!) :: [a] -> Int -> a
xs0 !! n0
| n0 < 0 = error "Prelude.(!!): negative index"
| otherwise = index xs0 n0
#ifndef __HADDOCK__
where
index [] _ = error "Prelude.(!!): index too large"
index (y:ys) n = if n == 0 then y else index ys (n-1)
#endif
# NOINLINE [ 1 ] ( ! ! ) #
# RULES
" ! ! - > fusible " [ ~1 ] forall xs n.
xs ! ! n = Stream.index ( stream xs ) n
#
"!! -> fusible" [~1] forall xs n.
xs !! n = Stream.index (stream xs) n
#-}
| The ' elemIndex ' function returns the index of the first element
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex x = findIndex (x==)
elemIndex : : Eq a = > a - > [ a ] - > Maybe Int
elemIndex y xs0 = loop_elemIndex xs0 0
# ifndef _ _ HADDOCK _ _
where
loop_elemIndex [ ] ! _ = Nothing
loop_elemIndex ( x : xs ) ! n
| p x = Just n
| otherwise = loop_elemIndex xs ( n + 1 )
p = ( y =
{ - # NOINLINE [ 1 ] elemIndex #
elemIndex :: Eq a => a -> [a] -> Maybe Int
elemIndex y xs0 = loop_elemIndex xs0 0
#ifndef __HADDOCK__
where
loop_elemIndex [] !_ = Nothing
loop_elemIndex (x:xs) !n
| p x = Just n
| otherwise = loop_elemIndex xs (n + 1)
p = (y ==)
#endif
-}
RULES
" elemIndex - > fusible " [ ~1 ] forall x xs .
elemIndex x xs = Stream.elemIndex x ( stream xs )
" elemIndex - > unfused " [ 1 ] forall x xs .
Stream.elemIndex x ( stream xs ) = elemIndex x xs
"elemIndex -> fusible" [~1] forall x xs.
elemIndex x xs = Stream.elemIndex x (stream xs)
"elemIndex -> unfused" [1] forall x xs.
Stream.elemIndex x (stream xs) = elemIndex x xs
-}
> length ( filter (= = a ) xs ) = length ( elemIndices a xs )
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices x = findIndices (x==)
# INLINE elemIndices #
elemIndices : : Eq a = > a - > [ a ] - > [ Int ]
elemIndices y xs0 = loop_elemIndices xs0 0
# ifndef _ _ HADDOCK _ _
where
loop_elemIndices [ ] ! _ = [ ]
loop_elemIndices ( x : xs ) ! n
| p x = n : loop_elemIndices xs ( n + 1 )
| otherwise = loop_elemIndices xs ( n + 1 )
p = ( y =
{ - # NOINLINE [ 1 ] elemIndices #
elemIndices :: Eq a => a -> [a] -> [Int]
elemIndices y xs0 = loop_elemIndices xs0 0
#ifndef __HADDOCK__
where
loop_elemIndices [] !_ = []
loop_elemIndices (x:xs) !n
| p x = n : loop_elemIndices xs (n + 1)
| otherwise = loop_elemIndices xs (n + 1)
p = (y ==)
#endif
-}
RULES
" elemIndices - > fusible " [ ~1 ] forall x xs .
elemIndices x xs = unstream ( Stream.elemIndices x ( stream xs ) )
" elemIndices - > unfused " [ 1 ] forall x xs .
unstream ( Stream.elemIndices x ( stream xs ) ) = elemIndices x xs
"elemIndices -> fusible" [~1] forall x xs.
elemIndices x xs = unstream (Stream.elemIndices x (stream xs))
"elemIndices -> unfused" [1] forall x xs.
unstream (Stream.elemIndices x (stream xs)) = elemIndices x xs
-}
the index of the first element in the list satisfying the predicate ,
findIndex :: (a -> Bool) -> [a] -> Maybe Int
findIndex p ls = loop_findIndex ls 0#
where
loop_findIndex [] _ = Nothing
loop_findIndex (x:xs) n
| p x = Just (I# n)
| otherwise = loop_findIndex xs (n +# 1#)
# NOINLINE [ 1 ] findIndex #
# RULES
" findIndex - > fusible " [ ~1 ] forall f xs .
findIndex f xs = Stream.findIndex f ( stream xs )
#
"findIndex -> fusible" [~1] forall f xs.
findIndex f xs = Stream.findIndex f (stream xs)
#-}
> length ( filter p xs ) = length ( findIndices p xs )
findIndices :: (a -> Bool) -> [a] -> [Int]
findIndices p ls = loop_findIndices ls 0#
where
loop_findIndices [] _ = []
loop_findIndices (x:xs) n
| p x = I# n : loop_findIndices xs (n +# 1#)
| otherwise = loop_findIndices xs (n +# 1#)
# NOINLINE [ 1 ] findIndices #
# RULES
" findIndices - > fusible " [ ~1 ] forall p xs .
findIndices p xs = unstream ( Stream.findIndices p ( stream xs ) )
#
"findIndices -> fusible" [~1] forall p xs.
findIndices p xs = unstream (Stream.findIndices p (stream xs))
#-}
| /O(n)/,/fusion/. ' zip ' takes two lists and returns a list of
corresponding pairs . If one input list is short , excess elements of
zip :: [a] -> [b] -> [(a, b)]
zip (a:as) (b:bs) = (a,b) : zip as bs
zip _ _ = []
# NOINLINE [ 1 ] zip #
# RULES
" zip - > fusible " [ ~1 ] forall xs ys .
zip xs ys = unstream ( Stream.zip ( stream xs ) ( stream ys ) )
#
"zip -> fusible" [~1] forall xs ys.
zip xs ys = unstream (Stream.zip (stream xs) (stream ys))
#-}
| /O(n)/,/fusion/. ' zip3 ' takes three lists and returns a list of
zip3 :: [a] -> [b] -> [c] -> [(a, b, c)]
zip3 (a:as) (b:bs) (c:cs) = (a,b,c) : zip3 as bs cs
zip3 _ _ _ = []
# NOINLINE [ 1 ] zip3 #
# RULES
" zip3 - > fusible " [ ~1 ] forall xs ys zs .
= unstream ( Stream.zipWith3 ( , , ) ( stream xs ) ( stream ys ) ( stream zs ) )
#
"zip3 -> fusible" [~1] forall xs ys zs.
zip3 xs ys zs = unstream (Stream.zipWith3 (,,) (stream xs) (stream ys) (stream zs))
#-}
| /O(n)/,/fusion/. The ' zip4 ' function takes four lists and returns a list of
zip4 :: [a] -> [b] -> [c] -> [d] -> [(a, b, c, d)]
zip4 = zipWith4 (,,,)
# INLINE zip4 #
| The ' zip5 ' function takes five lists and returns a list of
five - tuples , analogous to ' zip ' .
zip5 :: [a] -> [b] -> [c] -> [d] -> [e] -> [(a, b, c, d, e)]
zip5 = zipWith5 (,,,,)
| The ' ' function takes six lists and returns a list of six - tuples ,
zip6 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [(a, b, c, d, e, f)]
zip6 = zipWith6 (,,,,,)
| The ' zip7 ' function takes seven lists and returns a list of
seven - tuples , analogous to ' zip ' .
zip7 :: [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g] -> [(a, b, c, d, e, f, g)]
zip7 = zipWith7 (,,,,,,)
function given as the first argument , instead of a tupling function .
For example , @'zipWith ' ( + ) @ is applied to two lists to produce the
zipWith :: (a -> b -> c) -> [a] -> [b] -> [c]
zipWith f (a:as) (b:bs) = f a b : zipWith f as bs
zipWith _ _ _ = []
# INLINE [ 1 ] zipWith #
FIXME : If we change the above INLINE to NOINLINE then ghc goes into
# RULES
" zipWith - > fusible " [ ~1 ] forall f xs ys .
zipWith f xs ys = unstream ( Stream.zipWith f ( stream xs ) ( stream ys ) )
#
"zipWith -> fusible" [~1] forall f xs ys.
zipWith f xs ys = unstream (Stream.zipWith f (stream xs) (stream ys))
#-}
combines three elements , as well as three lists and returns a list of
zipWith3 :: (a -> b -> c -> d) -> [a] -> [b] -> [c] -> [d]
zipWith3 z (a:as) (b:bs) (c:cs) = z a b c : zipWith3 z as bs cs
zipWith3 _ _ _ _ = []
# NOINLINE [ 1 ] zipWith3 #
# RULES
" zipWith3 - > fusible " [ ~1 ] forall f xs ys zs .
zipWith3 f xs ys zs = unstream ( Stream.zipWith3 f ( stream xs ) ( stream ys ) ( stream zs ) )
#
"zipWith3 -> fusible" [~1] forall f xs ys zs.
zipWith3 f xs ys zs = unstream (Stream.zipWith3 f (stream xs) (stream ys) (stream zs))
#-}
| /O(n)/,/fusion/. The ' zipWith4 ' function takes a function which combines four
elements , as well as four lists and returns a list of their point - wise
zipWith4 :: (a -> b -> c -> d -> e) -> [a] -> [b] -> [c] -> [d] -> [e]
zipWith4 z (a:as) (b:bs) (c:cs) (d:ds)
= z a b c d : zipWith4 z as bs cs ds
zipWith4 _ _ _ _ _ = []
# NOINLINE [ 1 ] zipWith4 #
# RULES
" zipWith4 - > fusible " [ ~1 ] forall f ws xs ys zs .
zipWith4 f ws xs ys zs = unstream ( Stream.zipWith4 f ( stream ws ) ( stream xs ) ( stream ys ) ( stream zs ) )
#
"zipWith4 -> fusible" [~1] forall f ws xs ys zs.
zipWith4 f ws xs ys zs = unstream (Stream.zipWith4 f (stream ws) (stream xs) (stream ys) (stream zs))
#-}
| The ' zipWith5 ' function takes a function which combines five
elements , as well as five lists and returns a list of their point - wise
zipWith5 :: (a -> b -> c -> d -> e -> f)
-> [a] -> [b] -> [c] -> [d] -> [e] -> [f]
zipWith5 z (a:as) (b:bs) (c:cs) (d:ds) (e:es)
= z a b c d e : zipWith5 z as bs cs ds es
zipWith5 _ _ _ _ _ _ = []
TODO fuse
| The ' zipWith6 ' function takes a function which combines six
elements , as well as six lists and returns a list of their point - wise
zipWith6 :: (a -> b -> c -> d -> e -> f -> g)
-> [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g]
zipWith6 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs)
= z a b c d e f : zipWith6 z as bs cs ds es fs
zipWith6 _ _ _ _ _ _ _ = []
TODO fuse
| The ' zipWith7 ' function takes a function which combines seven
elements , as well as seven lists and returns a list of their point - wise
zipWith7 :: (a -> b -> c -> d -> e -> f -> g -> h)
-> [a] -> [b] -> [c] -> [d] -> [e] -> [f] -> [g] -> [h]
zipWith7 z (a:as) (b:bs) (c:cs) (d:ds) (e:es) (f:fs) (g:gs)
= z a b c d e f g : zipWith7 z as bs cs ds es fs gs
zipWith7 _ _ _ _ _ _ _ _ = []
TODO fuse
| ' unzip ' transforms a list of pairs into a list of first components
and a list of second components .
unzip :: [(a, b)] -> ([a], [b])
unzip = foldr (\(a,b) ~(as,bs) -> (a:as,b:bs)) ([],[])
TODO fuse
| The ' unzip3 ' function takes a list of triples and returns three
unzip3 :: [(a, b, c)] -> ([a], [b], [c])
unzip3 = foldr (\(a,b,c) ~(as,bs,cs) -> (a:as,b:bs,c:cs)) ([],[],[])
TODO fuse
| The ' ' function takes a list of quadruples and returns four
unzip4 :: [(a, b, c, d)] -> ([a], [b], [c], [d])
unzip4 = foldr (\(a,b,c,d) ~(as,bs,cs,ds) ->
(a:as,b:bs,c:cs,d:ds))
([],[],[],[])
TODO fuse
| The ' ' function takes a list of five - tuples and returns five
unzip5 :: [(a, b, c, d, e)] -> ([a], [b], [c], [d], [e])
unzip5 = foldr (\(a,b,c,d,e) ~(as,bs,cs,ds,es) ->
(a:as,b:bs,c:cs,d:ds,e:es))
([],[],[],[],[])
TODO fuse
| The ' unzip6 ' function takes a list of six - tuples and returns six
unzip6 :: [(a, b, c, d, e, f)] -> ([a], [b], [c], [d], [e], [f])
unzip6 = foldr (\(a,b,c,d,e,f) ~(as,bs,cs,ds,es,fs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs))
([],[],[],[],[],[])
TODO fuse
| The ' ' function takes a list of seven - tuples and returns
seven lists , analogous to ' unzip ' .
unzip7 :: [(a, b, c, d, e, f, g)] -> ([a], [b], [c], [d], [e], [f], [g])
unzip7 = foldr (\(a,b,c,d,e,f,g) ~(as,bs,cs,ds,es,fs,gs) ->
(a:as,b:bs,c:cs,d:ds,e:es,f:fs,g:gs))
([],[],[],[],[],[],[])
TODO fuse
lines :: String -> [String]
lines [] = []
lines s = let (l, s') = break (== '\n') s
in l : case s' of
[] -> []
(_:s'') -> lines s''
# INLINE [ 1 ] lines #
RULES
" lines - > fusible " [ ~1 ] forall xs .
lines xs = unstream ( Stream.lines ( stream xs ) )
" lines - > unfused " [ 1 ] forall xs .
unstream ( Stream.lines ( stream xs ) ) = lines xs
"lines -> fusible" [~1] forall xs.
lines xs = unstream (Stream.lines (stream xs))
"lines -> unfused" [1] forall xs.
unstream (Stream.lines (stream xs)) = lines xs
-}
words :: String -> [String]
words s = case dropWhile isSpace s of
"" -> []
s' -> w : words s''
where (w, s'') = break isSpace s'
TODO fuse
unlines :: [String] -> String
unlines css0 = to css0
where go [] css = '\n' : to css
go (c:cs) css = c : go cs css
to [] = []
to (cs:css) = go cs css
# NOINLINE [ 1 ] unlines #
RULES
" unlines - > fusible " [ ~1 ] forall xs .
unlines xs = unstream ( Stream.concatMap ( \x - > Stream.snoc ( stream x ) ' \n ' ) ( stream xs ) )
" unlines - > unfused " [ 1 ] forall xs .
unstream ( Stream.concatMap ( \x - > Stream.snoc ( stream x ) ' \n ' ) ( stream xs ) ) = unlines xs
"unlines -> fusible" [~1] forall xs.
unlines xs = unstream (Stream.concatMap (\x -> Stream.snoc (stream x) '\n') (stream xs))
"unlines -> unfused" [1] forall xs.
unstream (Stream.concatMap (\x -> Stream.snoc (stream x) '\n') (stream xs)) = unlines xs
-}
unwords :: [String] -> String
unwords [] = []
unwords (cs0:css0) = go cs0 css0
where go [] css = to css
go (c:cs) css = c : go cs css
to [] = []
to (cs:ccs) = ' ' : go cs ccs
TODO fuse
In particular , it keeps only the first occurrence of each element .
( The name ' nub ' means \`essence\ ' . )
nub :: Eq a => [a] -> [a]
nub l = nub' l []
where
nub' [] _ = []
nub' (x:xs) ls
| x `elem` ls = nub' xs ls
| otherwise = x : nub' xs (x:ls)
TODO fuse
| ' delete ' @x@ removes the first occurrence of @x@ from its list argument .
> delete ' a ' " banana " = = " "
delete :: Eq a => a -> [a] -> [a]
delete = deleteBy (==)
TODO fuse
In the result of @xs@ ' \\ ' @ys@ , the first occurrence of each element of
(\\) :: Eq a => [a] -> [a] -> [a]
(\\) = foldl (flip delete)
| The ' union ' function returns the list union of the two lists .
Duplicates , and elements of the first list , are removed from the
the second list , but if the first list contains duplicates , so will
union :: Eq a => [a] -> [a] -> [a]
union = unionBy (==)
TODO fuse
| The ' intersect ' function takes the list intersection of two lists .
> [ 1,2,3,4 ] ` intersect ` [ 2,4,6,8 ] = = [ 2,4 ]
If the first list contains duplicates , so will the result .
intersect :: Eq a => [a] -> [a] -> [a]
intersect = intersectBy (==)
TODO fuse
TODO stuff in Ord can use Map / IntMap
, an Ord constraint ! we could use a better structure .
sort :: Ord a => [a] -> [a]
sort l = mergesort compare l
TODO fuse , we have an Ord constraint !
insert :: Ord a => a -> [a] -> [a]
insert e ls = insertBy (compare) e ls
# INLINE insert #
* * * User - supplied equality ( replacing an Eq context )
nubBy :: (a -> a -> Bool) -> [a] -> [a]
nubBy eq l = nubBy' l []
where
nubBy' [] _ = []
nubBy' (y:ys) xs
| elem_by eq y xs = nubBy' ys xs
| otherwise = y : nubBy' ys (y:xs)
TODO fuse
elem_by :: (a -> a -> Bool) -> a -> [a] -> Bool
elem_by _ _ [] = False
elem_by eq y (x:xs) = if x `eq` y then True else elem_by eq y xs
deleteBy :: (a -> a -> Bool) -> a -> [a] -> [a]
deleteBy _ _ [] = []
deleteBy eq x (y:ys) = if x `eq` y then ys else y : deleteBy eq x ys
TODO fuse
deleteFirstsBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
deleteFirstsBy eq = foldl (flip (deleteBy eq))
unionBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
unionBy eq xs ys = xs ++ foldl (flip (deleteBy eq)) (nubBy eq ys) xs
TODO fuse
intersectBy :: (a -> a -> Bool) -> [a] -> [a] -> [a]
intersectBy eq xs ys = [x | x <- xs, any (eq x) ys]
TODO fuse
groupBy :: (a -> a -> Bool) -> [a] -> [[a]]
groupBy _ [] = []
groupBy eq (x:xs) = (x:ys) : groupBy eq zs
where (ys,zs) = span (eq x) xs
TODO fuse
* * * User - supplied comparison ( replacing an context )
sortBy :: (a -> a -> Ordering) -> [a] -> [a]
sortBy cmp l = mergesort cmp l
TODO fuse
mergesort :: (a -> a -> Ordering) -> [a] -> [a]
mergesort cmp xs = mergesort' cmp (map wrap xs)
mergesort' :: (a -> a -> Ordering) -> [[a]] -> [a]
mergesort' _ [] = []
mergesort' _ [xs] = xs
mergesort' cmp xss = mergesort' cmp (merge_pairs cmp xss)
merge_pairs :: (a -> a -> Ordering) -> [[a]] -> [[a]]
merge_pairs _ [] = []
merge_pairs _ [xs] = [xs]
merge_pairs cmp (xs:ys:xss) = merge cmp xs ys : merge_pairs cmp xss
merge :: (a -> a -> Ordering) -> [a] -> [a] -> [a]
merge _ xs [] = xs
merge _ [] ys = ys
merge cmp (x:xs) (y:ys)
= case x `cmp` y of
GT -> y : merge cmp (x:xs) ys
_ -> x : merge cmp xs (y:ys)
wrap :: a -> [a]
wrap x = [x]
insertBy :: (a -> a -> Ordering) -> a -> [a] -> [a]
insertBy _ x [] = [x]
insertBy cmp x ys@(y:ys')
= case cmp x y of
GT -> y : insertBy cmp x ys'
_ -> x : ys
# NOINLINE [ 1 ] insertBy #
# RULES
" insertBy - > fusible " [ ~1 ] forall f x xs .
insertBy f x xs = unstream ( Stream.insertBy f x ( stream xs ) )
#
"insertBy -> fusible" [~1] forall f x xs.
insertBy f x xs = unstream (Stream.insertBy f x (stream xs))
#-}
maximumBy :: (a -> a -> Ordering) -> [a] -> a
maximumBy _ [] = error "List.maximumBy: empty list"
maximumBy cmp xs = foldl1 max' xs
where
max' x y = case cmp x y of
GT -> x
_ -> y
# NOINLINE [ 1 ] maximumBy #
# RULES
" maximumBy - > fused " [ ~1 ] forall p xs .
( stream xs )
#
"maximumBy -> fused" [~1] forall p xs.
maximumBy p xs = Stream.maximumBy p (stream xs)
#-}
minimumBy :: (a -> a -> Ordering) -> [a] -> a
minimumBy _ [] = error "List.minimumBy: empty list"
minimumBy cmp xs = foldl1 min' xs
where
min' x y = case cmp x y of
GT -> y
_ -> x
# NOINLINE [ 1 ] minimumBy #
# RULES
" minimumBy - > fused " [ ~1 ] forall p xs .
minimumBy p xs = ( stream xs )
#
"minimumBy -> fused" [~1] forall p xs.
minimumBy p xs = Stream.minimumBy p (stream xs)
#-}
| The ' genericLength ' function is an overloaded version of ' length ' . In
an instance of ' ' . It is , however , less efficient than ' length ' .
genericLength :: Num i => [b] -> i
genericLength [] = 0
genericLength (_:l) = 1 + genericLength l
# NOINLINE [ 1 ] genericLength #
# RULES
" genericLength - > fusible " [ ~1 ] forall xs .
genericLength xs = Stream.genericLength ( stream xs )
#
"genericLength -> fusible" [~1] forall xs.
genericLength xs = Stream.genericLength (stream xs)
#-}
# RULES
" genericLength - > length / Int " genericLength = length : : [ a ] - > Int
#
"genericLength -> length/Int" genericLength = length :: [a] -> Int
#-}
genericTake :: Integral i => i -> [a] -> [a]
genericTake 0 _ = []
genericTake _ [] = []
genericTake n (x:xs)
| n > 0 = x : genericTake (n-1) xs
| otherwise = error "List.genericTake: negative argument"
# NOINLINE [ 1 ] genericTake #
# RULES
" genericTake - > fusible " [ ~1 ] forall xs n.
genericTake n xs = unstream ( Stream.genericTake n ( stream xs ) )
#
"genericTake -> fusible" [~1] forall xs n.
genericTake n xs = unstream (Stream.genericTake n (stream xs))
#-}
genericDrop :: Integral i => i -> [a] -> [a]
genericDrop 0 xs = xs
genericDrop _ [] = []
genericDrop n (_:xs) | n > 0 = genericDrop (n-1) xs
genericDrop _ _ = error "List.genericDrop: negative argument"
# NOINLINE [ 1 ] genericDrop #
# RULES
" genericDrop - > fusible " [ ~1 ] forall xs n.
genericDrop n xs = unstream ( Stream.genericDrop n ( stream xs ) )
#
"genericDrop -> fusible" [~1] forall xs n.
genericDrop n xs = unstream (Stream.genericDrop n (stream xs))
#-}
genericIndex :: Integral a => [b] -> a -> b
genericIndex (x:_) 0 = x
genericIndex (_:xs) n
| n > 0 = genericIndex xs (n-1)
| otherwise = error "List.genericIndex: negative argument."
genericIndex _ _ = error "List.genericIndex: index too large."
# NOINLINE [ 1 ] genericIndex #
# RULES
" genericIndex - > fusible " [ ~1 ] forall xs n.
genericIndex xs n = Stream.genericIndex ( stream xs ) n
#
"genericIndex -> fusible" [~1] forall xs n.
genericIndex xs n = Stream.genericIndex (stream xs) n
#-}
genericSplitAt :: Integral i => i -> [a] -> ([a], [a])
genericSplitAt 0 xs = ([],xs)
genericSplitAt _ [] = ([],[])
genericSplitAt n (x:xs) | n > 0 = (x:xs',xs'')
where (xs',xs'') = genericSplitAt (n-1) xs
genericSplitAt _ _ = error "List.genericSplitAt: negative argument"
# RULES
" genericSplitAt - > fusible " [ ~1 ] forall xs n.
genericSplitAt n xs = Stream.genericSplitAt n ( stream xs )
#
"genericSplitAt -> fusible" [~1] forall xs n.
genericSplitAt n xs = Stream.genericSplitAt n (stream xs)
#-}
genericReplicate :: Integral i => i -> a -> [a]
genericReplicate n x = genericTake n (repeat x)
# INLINE genericReplicate #
# RULES
" genericReplicate - > replicate / Int " genericReplicate = replicate : : Int - > a - > [ a ]
#
"genericReplicate -> replicate/Int" genericReplicate = replicate :: Int -> a -> [a]
#-}
Internal utilities
errorEmptyList :: String -> a
errorEmptyList fun = moduleError fun "empty list"
# NOINLINE errorEmptyList #
moduleError :: String -> String -> a
moduleError fun msg = error ("Data.List." ++ fun ++ ':':' ':msg)
# NOINLINE moduleError #
bottom :: a
bottom = error "Data.List.Stream: bottom"
# NOINLINE bottom #
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.