_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
e98ba9bc8ad55a739037c52b8385ecbbecfcacd7a5bc7ea5e4ccaba7dc36de34 | fukamachi/lack | builder.lisp | (in-package :cl-user)
(defpackage lack.builder
(:use :cl)
(:import-from :lack.component
:to-app)
(:import-from :lack.util
:find-middleware)
(:export :builder))
(in-package :lack.builder)
(defun clack-middleware-symbol-p (symbol)
(and (symbolp symbol)
(find-package :clack.middleware)
(find-class symbol nil)
(subtypep (find-class symbol)
(intern (string :<middleware>)
:clack.middleware))))
(defun convert-to-middleware-form (mw)
(let ((app (gensym "APP"))
(res-mw (gensym "RES-MW")))
(etypecase mw
(null)
(function mw)
(keyword `(find-middleware ,mw))
;; for old Clack middlewares
(symbol (if (clack-middleware-symbol-p mw)
`(lambda (,app)
(funcall (intern (string :wrap) :clack.middleware)
(make-instance ',mw)
,app))
mw))
(cons
(typecase (car mw)
(keyword `(lambda (,app)
(funcall (find-middleware ,(car mw)) ,app
,@(cdr mw))))
(symbol
;; for old Clack middlewares
(if (clack-middleware-symbol-p (car mw))
`(lambda (,app)
(funcall (intern (string :wrap) :clack.middleware)
(make-instance ',(car mw) ,@(cdr mw))
,app))
;; Normal form
(let ((res (gensym "RES")))
;; reconvert the result of the form
`(let ((,res ,mw))
(typecase ,res
(keyword (find-middleware ,res))
(cons (if (keywordp (car ,res))
(let ((,res-mw (find-middleware (car ,res))))
(lambda (,app)
(apply ,res-mw ,app (cdr ,res))))
,res))
(standard-object
(lambda (,app)
(funcall (intern (string :wrap) :clack.middleware) ,res ,app)))
(otherwise ,res))))))
(otherwise mw))))))
(defmacro builder (&rest app-or-middlewares)
(let ((middlewares (butlast app-or-middlewares)))
`(reduce #'funcall
(remove-if
#'null
(list
,@(loop for mw in middlewares
when mw
collect (convert-to-middleware-form mw))))
:initial-value (to-app ,(car (last app-or-middlewares)))
:from-end t)))
| null | https://raw.githubusercontent.com/fukamachi/lack/1f155216aeea36291b325c519f041e469262a399/src/builder.lisp | lisp | for old Clack middlewares
for old Clack middlewares
Normal form
reconvert the result of the form | (in-package :cl-user)
(defpackage lack.builder
(:use :cl)
(:import-from :lack.component
:to-app)
(:import-from :lack.util
:find-middleware)
(:export :builder))
(in-package :lack.builder)
(defun clack-middleware-symbol-p (symbol)
(and (symbolp symbol)
(find-package :clack.middleware)
(find-class symbol nil)
(subtypep (find-class symbol)
(intern (string :<middleware>)
:clack.middleware))))
(defun convert-to-middleware-form (mw)
(let ((app (gensym "APP"))
(res-mw (gensym "RES-MW")))
(etypecase mw
(null)
(function mw)
(keyword `(find-middleware ,mw))
(symbol (if (clack-middleware-symbol-p mw)
`(lambda (,app)
(funcall (intern (string :wrap) :clack.middleware)
(make-instance ',mw)
,app))
mw))
(cons
(typecase (car mw)
(keyword `(lambda (,app)
(funcall (find-middleware ,(car mw)) ,app
,@(cdr mw))))
(symbol
(if (clack-middleware-symbol-p (car mw))
`(lambda (,app)
(funcall (intern (string :wrap) :clack.middleware)
(make-instance ',(car mw) ,@(cdr mw))
,app))
(let ((res (gensym "RES")))
`(let ((,res ,mw))
(typecase ,res
(keyword (find-middleware ,res))
(cons (if (keywordp (car ,res))
(let ((,res-mw (find-middleware (car ,res))))
(lambda (,app)
(apply ,res-mw ,app (cdr ,res))))
,res))
(standard-object
(lambda (,app)
(funcall (intern (string :wrap) :clack.middleware) ,res ,app)))
(otherwise ,res))))))
(otherwise mw))))))
(defmacro builder (&rest app-or-middlewares)
(let ((middlewares (butlast app-or-middlewares)))
`(reduce #'funcall
(remove-if
#'null
(list
,@(loop for mw in middlewares
when mw
collect (convert-to-middleware-form mw))))
:initial-value (to-app ,(car (last app-or-middlewares)))
:from-end t)))
|
e196956ba926567454045d70c06ab9f71fd7461a7518eb16b98e5f12470300c7 | reasonml/reason | eol_convert.ml | open Eol_detect
let lf_to_crlf s =
let rec loop sz =
match String.index sz '\n' with
| exception Not_found -> sz
| idx ->
let l = (String.sub sz 0 idx) ^ "\r\n" in
let length = String.length sz in
l ^ (loop (String.sub sz (idx + 1) ((length - idx) - 1))) in
loop s
let get_formatter output_channel eol =
let f = Format.formatter_of_out_channel output_channel in
let out_functions = Format.pp_get_formatter_out_functions f () in
let out_string s p n =
match eol with
| LF -> out_functions.out_string s p n
| CRLF ->
let str = String.sub s p n in
let str = lf_to_crlf str in
out_functions.out_string str 0 (String.length str) in
let new_functions = { out_functions with out_string } in
Format.pp_set_formatter_out_functions f new_functions; f
| null | https://raw.githubusercontent.com/reasonml/reason/daa11255cb4716ce1c370925251021bd6e3bd974/src/refmt/eol_convert.ml | ocaml | open Eol_detect
let lf_to_crlf s =
let rec loop sz =
match String.index sz '\n' with
| exception Not_found -> sz
| idx ->
let l = (String.sub sz 0 idx) ^ "\r\n" in
let length = String.length sz in
l ^ (loop (String.sub sz (idx + 1) ((length - idx) - 1))) in
loop s
let get_formatter output_channel eol =
let f = Format.formatter_of_out_channel output_channel in
let out_functions = Format.pp_get_formatter_out_functions f () in
let out_string s p n =
match eol with
| LF -> out_functions.out_string s p n
| CRLF ->
let str = String.sub s p n in
let str = lf_to_crlf str in
out_functions.out_string str 0 (String.length str) in
let new_functions = { out_functions with out_string } in
Format.pp_set_formatter_out_functions f new_functions; f
| |
e3de3796cf333da2d9f9acf84940161762b0cffa7084683ebfda7ac0a8f9a132 | craigl64/clim-ccl | activities.lisp | -*- Mode : Lisp ; Syntax : ANSI - Common - Lisp ; Package : CLIM - INTERNALS ; Base : 10 ; Lowercase : Yes -*-
;; See the file LICENSE for the full license governing this code.
;;
(in-package :clim-internals)
" Copyright ( c ) 1992 Symbolics , Inc. All rights reserved .
Portions copyright ( c ) 1992 Franz , Inc. All rights reserved . "
;;; Activities
;; An activity acts like both an application frame and a frame manager
;;--- Implement other relevant frame and frame-manager methods
;;--- What about methods to choose the default values for the standard streams?
(defclass activity (application-frame)
((frames :initform nil :accessor frame-manager-frames)
(pretty-name :accessor frame-pretty-name)
(top-level-process :initform nil)
(active-frame :initform nil :initarg :initial-frame :accessor activity-active-frame)
(input-buffer :initform (make-locking-queue) :accessor frame-input-buffer)
(auto-select :initform nil :initarg :auto-select :accessor activity-auto-select)
(frame-manager :initform nil :initarg :frame-manager
:accessor frame-manager)
(actual-pointer-documentation-pane :initform nil :accessor frame-actual-pointer-documentation-pane)))
(defmethod initialize-instance :after ((activity activity) &key pretty-name)
(setf (frame-pretty-name activity)
(or pretty-name (title-capitalize (string (frame-name activity)))))
(unless (frame-manager activity)
(setf (frame-manager activity) (find-frame-manager))))
(defmethod frame-name ((activity activity))
(type-of activity))
(defmethod run-frame-top-level :around ((activity activity) &key)
(handler-bind ((frame-exit
#'(lambda (condition)
(let ((exit-frame (frame-exit-frame condition)))
(when (eq activity exit-frame)
(return-from run-frame-top-level nil))))))
(loop
(with-simple-restart (nil "~A top level" (frame-pretty-name activity))
(loop
#-CCL-2
(return-from run-frame-top-level (call-next-method))
#+CCL-2
(let ((results (multiple-value-list (call-next-method))))
(return-from run-frame-top-level (values-list results))))))))
(defmethod run-frame-top-level ((activity activity) &key)
(with-slots (top-level-process) activity
(when top-level-process
(cerror "Bludgeon ahead, assuming the risk"
"The process ~S is already running the top-level function for frame ~S"
top-level-process activity))
(unwind-protect
(progn
(setq top-level-process (current-process))
(default-frame-top-level activity))
(setq top-level-process nil))))
(defmethod frame-exit ((activity activity) &rest keys)
(signal 'frame-exit :frame activity :options keys))
(defmethod default-frame-top-level ((activity activity) &key &allow-other-keys)
(if (frame-manager-frames activity)
(progn
(setf (activity-active-frame activity)
(select-activity-initial-frame activity))
(enable-activity-frames activity))
(setf (activity-active-frame activity)
(prog1 (start-initial-application-frame activity)
(start-other-application-frames activity))))
(unwind-protect
(loop
(let ((*activity* activity)
(*original-stream* nil)
(*input-wait-test* nil)
(*input-wait-handler* nil)
(*pointer-button-press-handler* nil)
(*numeric-argument* nil)
(*delimiter-gestures* nil)
(*activation-gestures* nil)
(*accelerator-gestures* nil)
(*accelerator-numeric-argument* nil)
(*input-context* nil)
(*accept-help* nil)
(*assume-all-commands-enabled* nil)
(*command-parser* 'command-line-command-parser)
(*command-unparser* 'command-line-command-unparser)
(*partial-command-parser*
'command-line-read-remaining-arguments-for-partial-command)
;; Start these out nowhere
;;--- Surely these are bound by the frame top level
;; (*standard-output* nil)
;; (*standard-input* nil)
;; (*query-io* nil)
(*pointer-documentation-output* nil))
(loop
(with-simple-restart (nil "~A top level" (frame-pretty-name activity))
(catch 'window-resynchronize
(unless (activity-active-frame activity)
(setf (activity-active-frame activity)
(select-next-active-frame activity)))
(unless (activity-active-frame activity)
(frame-exit activity))
(let* ((frame (activity-active-frame activity))
(*application-frame* frame)
(*standard-output* (or (frame-standard-output frame)
*standard-output*))
(*standard-input* (or (frame-standard-input frame)
*standard-input*))
(top-level (frame-top-level frame)))
(unwind-protect
(loop
(catch 'layout-changed
(with-frame-manager ((frame-manager frame))
(setf (slot-value frame 'top-level-process)
(slot-value activity 'top-level-process))
(if (atom top-level)
(funcall top-level frame)
(apply (first top-level) frame (rest top-level))))
;;--- Well, what *are* we supposed to do here?
(break "do something")))
(setf (slot-value frame 'top-level-process) nil))))))))
(disable-activity-frames activity)))
(defmethod select-activity-initial-frame ((activity activity))
(first (frame-manager-frames activity)))
(defmethod select-next-active-frame ((activity activity))
(and (activity-auto-select activity)
(first (frame-manager-frames activity))))
(defmethod disable-activity-frames ((activity activity))
(mapc #'disable-frame (frame-manager-frames activity)))
(defmethod enable-activity-frames ((activity activity))
(mapc #'enable-frame (frame-manager-frames activity)))
;; Closes all of the frames in the activity and exits the activity's
;; top level loop
(defmethod activity-quit ((activity activity))
(dolist (frame (frame-manager-frames activity))
(stop-application-frame activity frame)))
;;; Application frames within an activity
;; An application frame that participates in an activity
(defclass activity-frame (standard-application-frame)
((activity :initform nil :accessor frame-activity :initarg :activity)))
(defmethod frame-actual-pointer-documentation-pane ((frame activity-frame))
(let ((act (frame-activity frame)))
(and act (frame-actual-pointer-documentation-pane act))))
(defmethod (setf frame-actual-pointer-documentation-pane) (value (frame activity-frame))
(let ((act (frame-activity frame)))
(when act (setf (frame-actual-pointer-documentation-pane act) value)))
value)
(defmethod frame-top-level-process ((frame activity-frame))
(let ((act (frame-activity frame)))
(and act (slot-value act 'top-level-process))))
(defmethod initialize-instance :after ((frame activity-frame) &key activity)
(assert activity () "The activity frame ~S requires an activity" frame))
(defclass secondary-activity-frame (activity-frame)
())
(defmethod initialize-instance :after ((frame secondary-activity-frame) &key activity)
(assert (frame-manager-frames activity) ()
"Other frames must be created before secondary activity frames"))
;; Starts an application frame and registers it in the activity
(defmethod start-application-frame ((activity activity) frame-name &rest frame-options)
(declare (dynamic-extent frame-options))
(let* ((frame (apply #'make-application-frame frame-name
:input-buffer (frame-input-buffer activity)
:activity activity
:frame-manager (frame-manager activity)
frame-options)))
(enable-frame frame)
(push frame (frame-manager-frames activity))
frame))
Closes the application - frame frame and un - registers it from the activity .
(defmethod stop-application-frame ((activity activity) (frame activity-frame))
;; Do we have to take care of the input buffer that it will not be
;; reused by some magic resource mechanism?
(disable-frame frame)
(setf (frame-manager-frames activity)
(delete frame (frame-manager-frames activity)))
(setf (frame-activity frame) nil)
(when (eq frame (activity-active-frame activity))
(if (frame-manager-frames activity)
(setf (activity-active-frame activity) nil)
(frame-exit activity)))
(throw 'window-resynchronize :window-resynchronize))
;; Starts the initial application, when the activity is started up.
;; There is no default method, so subclasses must implement this.
(defgeneric start-initial-application-frame (activity))
(defmethod start-other-application-frames ((activity activity))
nil)
;;; Callbacks from the window manager
;; This function is called when the window manager of the host's display
;; server selects the top-level window of the frame activity-frame.
;; Methods can specialize on this callback to provide customized behavior
-- e.g. dependent from the state of the CP throw out of the command
;; loop or not (see example code). The default method sets activity-frame
;; as the active frame and throws to the tag 'window-resynchronize to
;; restart the command loop.
(defmethod activity-frame-window-select ((frame activity-frame))
;;--- We should think about exporting the cp-state and so
;;--- give the the application more of a chance.
(let ((activity (frame-activity frame)))
(when (and (frame-activity frame)
(activity-auto-select activity)
*input-buffer-empty*
*reading-frame-command*)
(select-activity-active-frame activity frame))))
(defmethod activity-frame-window-select :around ((activity-frame activity-frame))
(unless (let ((activity (frame-activity activity-frame)))
(and activity
(eq activity-frame (activity-active-frame activity))))
(call-next-method)))
(defun select-activity-active-frame (activity frame)
--- current frame in need of throwing
(setf (activity-active-frame activity) frame)
(throw 'window-resynchronize :window-resynchronize))
(defmethod receive-gesture :after
((stream top-level-sheet) (gesture pointer-enter-event))
(unless (eq (pointer-boundary-event-kind gesture) :inferior)
(let ((frame (pane-frame stream)))
(when (and (typep frame 'activity-frame)
(typep *application-frame* 'activity-frame))
(activity-frame-window-select frame)))))
;; This function is called when the window manager of the host's display
;; server closes the top-level window of the frame activity-frame. Methods
;; can specialize on this callback to provide customized behavior. The
;; default method calls STOP-APPLICATION-FRAME on the frame's activity and
;; the frame as arguments.
(defmethod activity-frame-window-close ((frame activity-frame))
(when (frame-activity frame)
(stop-application-frame (frame-activity frame) frame)
;; we dont want throw out from here, dont we?
--- current frame in need of throwing
#+++ignore (throw 'window-resynchronize :window-resynchronize)))
Exit from just this frame , not the whole activity
(defmethod frame-exit ((frame activity-frame) &rest keys)
(declare (ignore keys))
(activity-frame-window-close frame))
(defmethod redisplay-frame-panes ((frame activity-frame) &key force-p)
First display all the : accept - values panes , then display the rest .
;; We do this to ensure that all side-effects from :accept-values panes
;; have taken place.
(let ((activity (frame-activity frame)))
(dolist (frame (frame-manager-frames activity))
(map-over-sheets #'(lambda (sheet)
(when (typep sheet 'accept-values-pane)
(redisplay-frame-pane frame sheet :force-p force-p)))
(frame-top-level-sheet frame)))
(dolist (frame (frame-manager-frames activity))
(map-over-sheets #'(lambda (sheet)
(when (and (typep sheet 'clim-stream-pane)
(not (typep sheet 'accept-values-pane)))
(redisplay-frame-pane frame sheet :force-p force-p)))
(frame-top-level-sheet frame))))
;; Once we've redisplayed everything, the layout is done changing
(setq *frame-layout-changing-p* nil))
(defmethod disable-frame :after ((frame activity-frame))
(let ((activity (frame-activity frame)))
(when (and activity (eq (activity-active-frame activity) frame))
(setf (activity-active-frame activity) nil)
;;--- Need to resynchronize at this point
;;--- We should set flag in frame so that the default top level
;;--- notices this and exits or throws
)))
(defmethod destroy-activity ((activity activity))
;;-- Need it do anything else?
(mapc #'destroy-frame (frame-manager-frames activity)))
| null | https://raw.githubusercontent.com/craigl64/clim-ccl/301efbd770745b429f2b00b4e8ca6624de9d9ea9/clim/activities.lisp | lisp | Syntax : ANSI - Common - Lisp ; Package : CLIM - INTERNALS ; Base : 10 ; Lowercase : Yes -*-
See the file LICENSE for the full license governing this code.
Activities
An activity acts like both an application frame and a frame manager
--- Implement other relevant frame and frame-manager methods
--- What about methods to choose the default values for the standard streams?
Start these out nowhere
--- Surely these are bound by the frame top level
(*standard-output* nil)
(*standard-input* nil)
(*query-io* nil)
--- Well, what *are* we supposed to do here?
Closes all of the frames in the activity and exits the activity's
top level loop
Application frames within an activity
An application frame that participates in an activity
Starts an application frame and registers it in the activity
Do we have to take care of the input buffer that it will not be
reused by some magic resource mechanism?
Starts the initial application, when the activity is started up.
There is no default method, so subclasses must implement this.
Callbacks from the window manager
This function is called when the window manager of the host's display
server selects the top-level window of the frame activity-frame.
Methods can specialize on this callback to provide customized behavior
loop or not (see example code). The default method sets activity-frame
as the active frame and throws to the tag 'window-resynchronize to
restart the command loop.
--- We should think about exporting the cp-state and so
--- give the the application more of a chance.
This function is called when the window manager of the host's display
server closes the top-level window of the frame activity-frame. Methods
can specialize on this callback to provide customized behavior. The
default method calls STOP-APPLICATION-FRAME on the frame's activity and
the frame as arguments.
we dont want throw out from here, dont we?
We do this to ensure that all side-effects from :accept-values panes
have taken place.
Once we've redisplayed everything, the layout is done changing
--- Need to resynchronize at this point
--- We should set flag in frame so that the default top level
--- notices this and exits or throws
-- Need it do anything else? |
(in-package :clim-internals)
" Copyright ( c ) 1992 Symbolics , Inc. All rights reserved .
Portions copyright ( c ) 1992 Franz , Inc. All rights reserved . "
(defclass activity (application-frame)
((frames :initform nil :accessor frame-manager-frames)
(pretty-name :accessor frame-pretty-name)
(top-level-process :initform nil)
(active-frame :initform nil :initarg :initial-frame :accessor activity-active-frame)
(input-buffer :initform (make-locking-queue) :accessor frame-input-buffer)
(auto-select :initform nil :initarg :auto-select :accessor activity-auto-select)
(frame-manager :initform nil :initarg :frame-manager
:accessor frame-manager)
(actual-pointer-documentation-pane :initform nil :accessor frame-actual-pointer-documentation-pane)))
(defmethod initialize-instance :after ((activity activity) &key pretty-name)
(setf (frame-pretty-name activity)
(or pretty-name (title-capitalize (string (frame-name activity)))))
(unless (frame-manager activity)
(setf (frame-manager activity) (find-frame-manager))))
(defmethod frame-name ((activity activity))
(type-of activity))
(defmethod run-frame-top-level :around ((activity activity) &key)
(handler-bind ((frame-exit
#'(lambda (condition)
(let ((exit-frame (frame-exit-frame condition)))
(when (eq activity exit-frame)
(return-from run-frame-top-level nil))))))
(loop
(with-simple-restart (nil "~A top level" (frame-pretty-name activity))
(loop
#-CCL-2
(return-from run-frame-top-level (call-next-method))
#+CCL-2
(let ((results (multiple-value-list (call-next-method))))
(return-from run-frame-top-level (values-list results))))))))
(defmethod run-frame-top-level ((activity activity) &key)
(with-slots (top-level-process) activity
(when top-level-process
(cerror "Bludgeon ahead, assuming the risk"
"The process ~S is already running the top-level function for frame ~S"
top-level-process activity))
(unwind-protect
(progn
(setq top-level-process (current-process))
(default-frame-top-level activity))
(setq top-level-process nil))))
(defmethod frame-exit ((activity activity) &rest keys)
(signal 'frame-exit :frame activity :options keys))
(defmethod default-frame-top-level ((activity activity) &key &allow-other-keys)
(if (frame-manager-frames activity)
(progn
(setf (activity-active-frame activity)
(select-activity-initial-frame activity))
(enable-activity-frames activity))
(setf (activity-active-frame activity)
(prog1 (start-initial-application-frame activity)
(start-other-application-frames activity))))
(unwind-protect
(loop
(let ((*activity* activity)
(*original-stream* nil)
(*input-wait-test* nil)
(*input-wait-handler* nil)
(*pointer-button-press-handler* nil)
(*numeric-argument* nil)
(*delimiter-gestures* nil)
(*activation-gestures* nil)
(*accelerator-gestures* nil)
(*accelerator-numeric-argument* nil)
(*input-context* nil)
(*accept-help* nil)
(*assume-all-commands-enabled* nil)
(*command-parser* 'command-line-command-parser)
(*command-unparser* 'command-line-command-unparser)
(*partial-command-parser*
'command-line-read-remaining-arguments-for-partial-command)
(*pointer-documentation-output* nil))
(loop
(with-simple-restart (nil "~A top level" (frame-pretty-name activity))
(catch 'window-resynchronize
(unless (activity-active-frame activity)
(setf (activity-active-frame activity)
(select-next-active-frame activity)))
(unless (activity-active-frame activity)
(frame-exit activity))
(let* ((frame (activity-active-frame activity))
(*application-frame* frame)
(*standard-output* (or (frame-standard-output frame)
*standard-output*))
(*standard-input* (or (frame-standard-input frame)
*standard-input*))
(top-level (frame-top-level frame)))
(unwind-protect
(loop
(catch 'layout-changed
(with-frame-manager ((frame-manager frame))
(setf (slot-value frame 'top-level-process)
(slot-value activity 'top-level-process))
(if (atom top-level)
(funcall top-level frame)
(apply (first top-level) frame (rest top-level))))
(break "do something")))
(setf (slot-value frame 'top-level-process) nil))))))))
(disable-activity-frames activity)))
(defmethod select-activity-initial-frame ((activity activity))
(first (frame-manager-frames activity)))
(defmethod select-next-active-frame ((activity activity))
(and (activity-auto-select activity)
(first (frame-manager-frames activity))))
(defmethod disable-activity-frames ((activity activity))
(mapc #'disable-frame (frame-manager-frames activity)))
(defmethod enable-activity-frames ((activity activity))
(mapc #'enable-frame (frame-manager-frames activity)))
(defmethod activity-quit ((activity activity))
(dolist (frame (frame-manager-frames activity))
(stop-application-frame activity frame)))
(defclass activity-frame (standard-application-frame)
((activity :initform nil :accessor frame-activity :initarg :activity)))
(defmethod frame-actual-pointer-documentation-pane ((frame activity-frame))
(let ((act (frame-activity frame)))
(and act (frame-actual-pointer-documentation-pane act))))
(defmethod (setf frame-actual-pointer-documentation-pane) (value (frame activity-frame))
(let ((act (frame-activity frame)))
(when act (setf (frame-actual-pointer-documentation-pane act) value)))
value)
(defmethod frame-top-level-process ((frame activity-frame))
(let ((act (frame-activity frame)))
(and act (slot-value act 'top-level-process))))
(defmethod initialize-instance :after ((frame activity-frame) &key activity)
(assert activity () "The activity frame ~S requires an activity" frame))
(defclass secondary-activity-frame (activity-frame)
())
(defmethod initialize-instance :after ((frame secondary-activity-frame) &key activity)
(assert (frame-manager-frames activity) ()
"Other frames must be created before secondary activity frames"))
(defmethod start-application-frame ((activity activity) frame-name &rest frame-options)
(declare (dynamic-extent frame-options))
(let* ((frame (apply #'make-application-frame frame-name
:input-buffer (frame-input-buffer activity)
:activity activity
:frame-manager (frame-manager activity)
frame-options)))
(enable-frame frame)
(push frame (frame-manager-frames activity))
frame))
Closes the application - frame frame and un - registers it from the activity .
(defmethod stop-application-frame ((activity activity) (frame activity-frame))
(disable-frame frame)
(setf (frame-manager-frames activity)
(delete frame (frame-manager-frames activity)))
(setf (frame-activity frame) nil)
(when (eq frame (activity-active-frame activity))
(if (frame-manager-frames activity)
(setf (activity-active-frame activity) nil)
(frame-exit activity)))
(throw 'window-resynchronize :window-resynchronize))
(defgeneric start-initial-application-frame (activity))
(defmethod start-other-application-frames ((activity activity))
nil)
-- e.g. dependent from the state of the CP throw out of the command
(defmethod activity-frame-window-select ((frame activity-frame))
(let ((activity (frame-activity frame)))
(when (and (frame-activity frame)
(activity-auto-select activity)
*input-buffer-empty*
*reading-frame-command*)
(select-activity-active-frame activity frame))))
(defmethod activity-frame-window-select :around ((activity-frame activity-frame))
(unless (let ((activity (frame-activity activity-frame)))
(and activity
(eq activity-frame (activity-active-frame activity))))
(call-next-method)))
(defun select-activity-active-frame (activity frame)
--- current frame in need of throwing
(setf (activity-active-frame activity) frame)
(throw 'window-resynchronize :window-resynchronize))
(defmethod receive-gesture :after
((stream top-level-sheet) (gesture pointer-enter-event))
(unless (eq (pointer-boundary-event-kind gesture) :inferior)
(let ((frame (pane-frame stream)))
(when (and (typep frame 'activity-frame)
(typep *application-frame* 'activity-frame))
(activity-frame-window-select frame)))))
(defmethod activity-frame-window-close ((frame activity-frame))
(when (frame-activity frame)
(stop-application-frame (frame-activity frame) frame)
--- current frame in need of throwing
#+++ignore (throw 'window-resynchronize :window-resynchronize)))
Exit from just this frame , not the whole activity
(defmethod frame-exit ((frame activity-frame) &rest keys)
(declare (ignore keys))
(activity-frame-window-close frame))
(defmethod redisplay-frame-panes ((frame activity-frame) &key force-p)
First display all the : accept - values panes , then display the rest .
(let ((activity (frame-activity frame)))
(dolist (frame (frame-manager-frames activity))
(map-over-sheets #'(lambda (sheet)
(when (typep sheet 'accept-values-pane)
(redisplay-frame-pane frame sheet :force-p force-p)))
(frame-top-level-sheet frame)))
(dolist (frame (frame-manager-frames activity))
(map-over-sheets #'(lambda (sheet)
(when (and (typep sheet 'clim-stream-pane)
(not (typep sheet 'accept-values-pane)))
(redisplay-frame-pane frame sheet :force-p force-p)))
(frame-top-level-sheet frame))))
(setq *frame-layout-changing-p* nil))
(defmethod disable-frame :after ((frame activity-frame))
(let ((activity (frame-activity frame)))
(when (and activity (eq (activity-active-frame activity) frame))
(setf (activity-active-frame activity) nil)
)))
(defmethod destroy-activity ((activity activity))
(mapc #'destroy-frame (frame-manager-frames activity)))
|
20e3da71345c5740532ea8a74ae134e089758781c473c349961379c04aed7aab | graninas/Functional-Design-and-Architecture | ConsoleLogger1.hs | module Framework.Logging.Logger.Impl.ConsoleLogger1 where
import qualified Data.Text.IO as T
import qualified Data.Text as T
import Control.Monad.Free.Church
import Control.Monad
import Framework.Logging.Types
import Framework.Logging.Logger.Language
interpretLoggerMethod
:: LoggerF a
-> IO a
interpretLoggerMethod (LogMessage lvl msg next) = do
T.putStrLn $ "[" <> T.pack (show lvl) <> "] " <> msg
pure $ next ()
runLogger
:: LoggerL a
-> IO a
runLogger logAction = foldF interpretLoggerMethod logAction
| null | https://raw.githubusercontent.com/graninas/Functional-Design-and-Architecture/17c555a35dd884415a4aeedf1e1ba1416dc35cb1/Second-Edition-Manning-Publications/BookSamples/CH10/Section10p1/src/Framework/Logging/Logger/Impl/ConsoleLogger1.hs | haskell | module Framework.Logging.Logger.Impl.ConsoleLogger1 where
import qualified Data.Text.IO as T
import qualified Data.Text as T
import Control.Monad.Free.Church
import Control.Monad
import Framework.Logging.Types
import Framework.Logging.Logger.Language
interpretLoggerMethod
:: LoggerF a
-> IO a
interpretLoggerMethod (LogMessage lvl msg next) = do
T.putStrLn $ "[" <> T.pack (show lvl) <> "] " <> msg
pure $ next ()
runLogger
:: LoggerL a
-> IO a
runLogger logAction = foldF interpretLoggerMethod logAction
| |
2f2b24fac983277bcd305472c19f48fb96359362e76b736a5e9f58ebab538fc5 | bhauman/figwheel-main | core.cljs | (ns exproj.core
(:require
[goog.events]
[goog.object :as gobj]
[clojure.string :as string]
#_[exproj.other]
#_[react]
[cljs.test :refer [deftest is]]))
#_(js/console.log react/Component)
#_(defn)
#_(prn "ouchy")
(defn hello []
"hello exproj")
(defn ^:after-load after-hook []
(js/console.log "Called the AFTER hook!!!"))
(defn ^:before-load befor-hook [& args]
(js/console.log "Called the before hook!!!"))
(deftest this-is-a-test
(prn "hello")
(is false))
#_(d)
;; stable reference
#_(defonce after-load (fn [e] (prn :after (.. e -data))))
;; idempotent with stable reference
#_(.addEventListener js/document.body "figwheel.after-load" after-load)
#_(cljs.pprint/pprint (deref js/figwheel.core.state))
#_(defonce before-load (fn [e] (prn :before (.. e -data))))
;; idempotent with stable reference
#_(.addEventListener js/document.body "figwheel.before-load" before-load)
#_(defonce after-css-load (fn [e] (prn :after-css-load (.. e -data))))
;; idempotent with stable reference
#_(.addEventListener js/document.body "figwheel.after-css-load" after-css-load)
(defn -main [& args]
(prn 35)
35)
#_(defn)
#_(d d d d d d d)
| null | https://raw.githubusercontent.com/bhauman/figwheel-main/ead06b94b1b2747ed37e5d8c37118d6e7ae77193/devel/exproj/core.cljs | clojure | stable reference
idempotent with stable reference
idempotent with stable reference
idempotent with stable reference | (ns exproj.core
(:require
[goog.events]
[goog.object :as gobj]
[clojure.string :as string]
#_[exproj.other]
#_[react]
[cljs.test :refer [deftest is]]))
#_(js/console.log react/Component)
#_(defn)
#_(prn "ouchy")
(defn hello []
"hello exproj")
(defn ^:after-load after-hook []
(js/console.log "Called the AFTER hook!!!"))
(defn ^:before-load befor-hook [& args]
(js/console.log "Called the before hook!!!"))
(deftest this-is-a-test
(prn "hello")
(is false))
#_(d)
#_(defonce after-load (fn [e] (prn :after (.. e -data))))
#_(.addEventListener js/document.body "figwheel.after-load" after-load)
#_(cljs.pprint/pprint (deref js/figwheel.core.state))
#_(defonce before-load (fn [e] (prn :before (.. e -data))))
#_(.addEventListener js/document.body "figwheel.before-load" before-load)
#_(defonce after-css-load (fn [e] (prn :after-css-load (.. e -data))))
#_(.addEventListener js/document.body "figwheel.after-css-load" after-css-load)
(defn -main [& args]
(prn 35)
35)
#_(defn)
#_(d d d d d d d)
|
da0740f63c565840c3b968219428cbe7ea9cae7d1af062f080955d546a202f12 | nubank/selvage | flow_test.clj | (ns selvage.midje.flow-test
(:require [matcher-combinators.midje :refer [match]]
[matcher-combinators.matchers :as m]
[selvage.core :as core]
[selvage.midje.flow :as f :refer [*flow* *world* flow tabular-flow]]
[midje.emission.api :as emission.api]
[midje.emission.state :as emission.states]
[midje.repl :refer [last-fact-checked]]
[midje.sweet :refer :all]
[taoensso.timbre :as timbre])
(:import [clojure.lang Atom]))
(defn step1 [world] (assoc world :1 1))
(defn step2 [world] (assoc world :2 2))
(defn step3 [world] (assoc world :3 3))
(defn step4 [world] (assoc world :4 4))
(defn step5 [world] (assoc world :5 5))
(defn step6 [world] (assoc world :6 6))
(fact "flow passes the world through transition functions"
(flow) => true
(flow step1) => true
(provided (step1 {}) => {:1 1})
(flow step1 step2) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2})
(flow "world goes through" step1 step2) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2}))
(fact "flow has the CID used"
(flow (fact "test" (+ 1 1) => 2)) => true
(fact "flow meta contains the CID"
(meta (last-fact-checked)) => (match {:selvage true
:flow/cid #(re-find #"FLOW.*" %)})))
(fact "it exposes flow information"
(flow
(fact *flow* => (match {:name #(re-find #"selvage.midje.flow-test\:\d+" %)
:title nil}))) => true
(flow "title"
(fact *flow* => (match {:name #(re-find #"selvage.midje.flow-test\:\d+" %)
:title "title"}))) => true)
(fact "embedding tests"
(flow (fact 1 => 1)) => truthy)
(fact "flow interleaves world-transition functions and facts"
(flow (fact 1 => 1) step1) => truthy
(flow step1
(fact *world* => {:1 1})) => truthy
(flow step1
(fact *world* => {:1 1})
step2) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2})
(flow step1
step2
(fact *world* => (match (m/equals {:1 1 :2 2})))
step3
step4
(fact *world* => (match (m/equals {:1 1 :2 2 :3 3 :4 4})))
step5
step6) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2}
(step3 {:1 1 :2 2}) => {:1 1 :2 2 :3 3}
(step4 {:1 1 :2 2 :3 3}) => {:1 1 :2 2 :3 3 :4 4}
(step5 {:1 1 :2 2 :3 3 :4 4}) => {:1 1 :2 2 :3 3 :4 4 :5 5}
(step6 {:1 1 :2 2 :3 3 :4 4 :5 5}) => {:1 1 :2 2 :3 3 :4 4 :5 5 :6 6}))
(facts "handles non-homoiconic data"
(flow
#(assoc % :atom (atom 1))
(fact *world* => (match {:atom #(instance? Atom %)}))
#(assoc % :function (constantly 42))
(fact *world* => (match {:function fn?}))
#(assoc % :byte-array (byte-array 1))
(fact *world* => (match {:byte-array anything})))
=> truthy)
(fact "flow fails when a step throws an exception"
(emission.api/silently
(timbre/with-level :fatal
(flow step1
(fn [_] (throw (ex-info "Some exception" {:a "a"})))
step2)))
=> falsey
(provided
(step1 anything) => {}
(step2 anything) => irrelevant :times 0))
(fact "flow should fail if 'transition' step doesn't return a valid world"
(emission.api/silently
(flow step1
(fn [_] :not-a-valid-world)
step2))
=> falsey
(provided
(step1 anything) => {}
(step2 anything) => irrelevant :times 0))
(fact "flow accepts a string as the first form"
(flow "2 + 2 = 4" (fact (+ 2 2) => 4)) => truthy)
(defmacro world-fn [& body]
`(fn [world#] (do ~@body) world#))
(binding [f/*probe-timeout* 10]
(emission.api/silently
(def fact-when-step-succeeds
(fact "this will succeed"
(flow step1 (fact "passes" 1 => 1) step2) => truthy))
(def fact-when-step-fails
(fact "this will fail because a check fails"
(flow step1 (fact "fails" 1 => 2) step2) => truthy))
(def last-called (atom 0))
(def stops-at-failure
(fact "flow doesn't execute steps post failure"
(flow (world-fn (reset! last-called 1))
(fact "nope" 1 => 2)
(world-fn (reset! last-called 2))) => truthy))
(def step-throwing-exception-is-a-failure
(fact "step throwing exception is also a test failure"
(timbre/with-level :fatal
(flow (fn [_] (throw (ex-info "expected exception" {:a "a"})))))
=> truthy))))
(facts "checking for success and failure"
fact-when-step-succeeds => truthy
fact-when-step-fails => falsey
@last-called => 1
step-throwing-exception-is-a-failure => falsey)
(facts "checks are retried"
(let [counter (atom -1)]
(def fails-first-run-then-succeeds
(fact "this will succeed by retrying the fact (which increments the atom until it's pos?)"
(flow (fact (swap! counter inc) => pos?)) => truthy))
(facts "every check is retried until it passes"
fails-first-run-then-succeeds => truthy)))
(def defnq-counts (atom {:step-1 0 :step-2 0 :step-3 0}))
(f/defnq query-step-1 [w]
(swap! defnq-counts update-in [:step-1] inc))
(f/defnq query-step-3 [w]
(swap! defnq-counts update-in [:step-3] inc))
(f/defnq factory-for-queries [key]
(fn [world]
(let [calls (swap! (:calls world) inc)]
(if (> calls 2)
(assoc world key ::finally-ok)
world))))
(f/defnq query-taking-args [key world]
(let [calls (swap! (:calls world) inc)]
(if (> calls 2)
(assoc world key ::finally-ok)
world)))
(facts
(let [query-count (atom 0)]
(fact "query steps preceeding checks are also retried"
(def succeeds-on-third-step-execution
(fact
(flow (f/fnq [w]
{:x (swap! query-count inc)})
(fact *world* => (match {:x 3}))) => truthy))))
(let [counts (atom {:step-1 0 :step-2 0 :step-3 0})]
(fact "retries several query steps preceeding a check until it passes"
(def preceeding-queries-succeed-on-third-step-execution
(fact
(flow (f/fnq [w]
(swap! counts update-in [:step-1] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:step-1 3 :step-2 3 :step-3 3}))) => truthy))))
(fact "retries several query steps preceeding a check until it passes"
(let [counts (atom {:non-query-step 0 :step-2 0 :step-3 0})]
(fact "positive test"
(def non-query-steps-are-not-retried-positive
(fact
(flow (fn [w]
(swap! counts update-in [:non-query-step] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:step-2 3 :step-3 3}))) => truthy))))
(let [counts (atom {:non-query-step 0 :step-2 0 :step-3 0})]
(binding [f/*probe-timeout* 30 f/*probe-sleep-period* 1]
(emission.api/silently
(fact "negative test"
(def non-query-steps-are-not-retried-negative
(fact
(flow (fn [w]
(swap! counts update-in [:non-query-step] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:non-query-step 3}))) => truthy)))))))
(fact "only query steps immediately preceeding a check are retried"
(let [counts (atom {:not-immediately-preceeding 0 :step-2 0 :step-3 0})]
(fact "positive test"
(def only-immediately-preceeding-query-steps-are-retried-positive
(fact
(flow (fn [w]
(swap! counts update-in [:not-immediately-preceeding] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:step-2 3 :step-3 3}))) => truthy))))
(let [counts (atom {:not-immediately-preceeding 0 :step-2 0 :step-3 0})]
(binding [f/*probe-timeout* 10 f/*probe-sleep-period* 1]
(emission.api/silently
(fact "negative test, inserting a regular - perhaps imperative - step in-between query steps"
(def only-immediately-preceeding-query-steps-are-retried-negative
(fact
(flow (f/fnq [w]
(swap! counts update-in [:not-immediately-preceeding] inc))
step1
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:not-immediately-preceeding 3}))) => truthy)))))))
(fact "retries query steps marked via f/defnq"
(def retries-with-defnq
(fact
(flow query-step-1
(f/fnq [w]
(swap! defnq-counts update-in [:step-2] inc))
query-step-3
(facts
(:step-1 *world*) => #(> % 3)
(:step-2 *world*) => #(> % 3)
(:step-3 *world*) => #(> % 3))) => truthy)))
(fact "retries queries returned by factory functions"
(def retries-factory-queries
(fact
(flow
(fn [_] {:calls (atom 0)})
(factory-for-queries :foo)
(fact "fnq was retried 2 times until this test passed"
*world* => (match {:foo ::finally-ok}))) => truthy)))
(fact "retries steps built by partially applying query functions"
(def retries-partially-applied-queries
(fact
(flow
(fn [_] {:calls (atom 0)})
(partial query-taking-args :foo)
(fact "fnq was retried 2 times until this test passed"
*world* => (match {:foo ::finally-ok}))) => truthy)))
(fact "retries steps built by composing a query function with other functions"
(def retries-comp-queries
(fact
(flow
(fn [_] {:calls (atom 0)})
(comp (partial query-taking-args :foo) identity)
(fact "fnq was retried 2 times until this test passed"
*world* => (match {:foo ::finally-ok}))) => truthy)))
(facts "checks and query steps are retried"
succeeds-on-third-step-execution => truthy
preceeding-queries-succeed-on-third-step-execution => truthy
non-query-steps-are-not-retried-positive => truthy
non-query-steps-are-not-retried-negative => falsey
only-immediately-preceeding-query-steps-are-retried-positive => truthy
only-immediately-preceeding-query-steps-are-retried-negative => falsey
retries-with-defnq => truthy
retries-factory-queries => truthy
retries-partially-applied-queries => truthy
retries-comp-queries => truthy))
(binding [f/*probe-timeout* 10
f/*probe-sleep-period* 1]
(facts "on the impact on a test run:"
(fact "when a test passes, midje records no failures"
(emission.api/silently
(flow (fact true => truthy)) => truthy
(emission.states/output-counters))
=> (match {:midje-failures 0
:midje-passes 1}))
(fact "when a probe times out and fails, midje records that failure"
(emission.api/silently
(flow (fact false => truthy)) => falsey
(emission.states/output-counters))
=> (match {:midje-failures 1}))
(def counter2 (atom -2))
(fact "when a test passes after a few tries, midje still records no failures"
(emission.api/silently
(flow (fact (swap! counter2 inc) => pos?)) => truthy
(emission.states/output-counters))
=> (match {:midje-failures 0}))))
(facts "it logs ns and line number on flow"
(fact "when a test description is given"
(flow "test flow log" (fact 1 => 1)) => irrelevant
(provided
(core/emit-debug-ln #"Running flow: selvage.midje.flow-test:\d+ test flow log" anything) => irrelevant
(core/emit-debug-ln anything anything) => irrelevant :times 3))
(fact "when no test description is given"
(flow (fact 1 => 1)) => irrelevant
(provided
(core/emit-debug-ln #"Running flow: selvage.midje.flow-test:\d+" anything) => irrelevant
(core/emit-debug-ln anything anything) => irrelevant :times 3)))
(fact "wrap flow forms inside fact with metadata"
(macroexpand-1 '(flow "rataria" (fact 1 => 1)))
=> (match
(list 'schema.core/with-fn-validation
(m/embeds
(list 'midje.sweet/facts
:selvage
#(re-find #"selvage.midje.flow-test:[0-9]+ rataria" %))))))
(facts "Tabular works as expected"
(emission.api/silently
(tabular-flow
(flow "Simple check"
(fact ?a => ?b))
?a ?b
1 1
2 2
2 1)
;; All checks are doubled, because we need to wrap the flow in a fact.
(emission.states/output-counters)) => {:midje-failures 2, :midje-passes 4})
(facts "future-fact"
(let [future-check (atom [])]
(fact "core flow with future-fact"
(flow
(fact "First valid assertion"
(swap! future-check conj :first) => [:first])
(future-fact "Second ignored assertion"
(swap! future-check conj :second) => [:second])
(fact "Third valid assertion"
(swap! future-check conj :third) => [:first :third])) => true)
(fact "check future-fact pass through"
@future-check => [:first :third])))
(fact "binding *verbose* works"
(with-out-str
(binding [f/*verbose* false]
(flow "any name" (fact 1 => 1))))
=> empty?
(with-out-str
(binding [f/*verbose* true]
(flow "any name" (fact 1 => 1))))
=> (complement empty?))
| null | https://raw.githubusercontent.com/nubank/selvage/3b2da9c95e82c7f2cbacb6cae3ed3af131d8a5e4/test/selvage/midje/flow_test.clj | clojure | All checks are doubled, because we need to wrap the flow in a fact. | (ns selvage.midje.flow-test
(:require [matcher-combinators.midje :refer [match]]
[matcher-combinators.matchers :as m]
[selvage.core :as core]
[selvage.midje.flow :as f :refer [*flow* *world* flow tabular-flow]]
[midje.emission.api :as emission.api]
[midje.emission.state :as emission.states]
[midje.repl :refer [last-fact-checked]]
[midje.sweet :refer :all]
[taoensso.timbre :as timbre])
(:import [clojure.lang Atom]))
(defn step1 [world] (assoc world :1 1))
(defn step2 [world] (assoc world :2 2))
(defn step3 [world] (assoc world :3 3))
(defn step4 [world] (assoc world :4 4))
(defn step5 [world] (assoc world :5 5))
(defn step6 [world] (assoc world :6 6))
(fact "flow passes the world through transition functions"
(flow) => true
(flow step1) => true
(provided (step1 {}) => {:1 1})
(flow step1 step2) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2})
(flow "world goes through" step1 step2) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2}))
(fact "flow has the CID used"
(flow (fact "test" (+ 1 1) => 2)) => true
(fact "flow meta contains the CID"
(meta (last-fact-checked)) => (match {:selvage true
:flow/cid #(re-find #"FLOW.*" %)})))
(fact "it exposes flow information"
(flow
(fact *flow* => (match {:name #(re-find #"selvage.midje.flow-test\:\d+" %)
:title nil}))) => true
(flow "title"
(fact *flow* => (match {:name #(re-find #"selvage.midje.flow-test\:\d+" %)
:title "title"}))) => true)
(fact "embedding tests"
(flow (fact 1 => 1)) => truthy)
(fact "flow interleaves world-transition functions and facts"
(flow (fact 1 => 1) step1) => truthy
(flow step1
(fact *world* => {:1 1})) => truthy
(flow step1
(fact *world* => {:1 1})
step2) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2})
(flow step1
step2
(fact *world* => (match (m/equals {:1 1 :2 2})))
step3
step4
(fact *world* => (match (m/equals {:1 1 :2 2 :3 3 :4 4})))
step5
step6) => true
(provided (step1 {}) => {:1 1}
(step2 {:1 1}) => {:1 1 :2 2}
(step3 {:1 1 :2 2}) => {:1 1 :2 2 :3 3}
(step4 {:1 1 :2 2 :3 3}) => {:1 1 :2 2 :3 3 :4 4}
(step5 {:1 1 :2 2 :3 3 :4 4}) => {:1 1 :2 2 :3 3 :4 4 :5 5}
(step6 {:1 1 :2 2 :3 3 :4 4 :5 5}) => {:1 1 :2 2 :3 3 :4 4 :5 5 :6 6}))
(facts "handles non-homoiconic data"
(flow
#(assoc % :atom (atom 1))
(fact *world* => (match {:atom #(instance? Atom %)}))
#(assoc % :function (constantly 42))
(fact *world* => (match {:function fn?}))
#(assoc % :byte-array (byte-array 1))
(fact *world* => (match {:byte-array anything})))
=> truthy)
(fact "flow fails when a step throws an exception"
(emission.api/silently
(timbre/with-level :fatal
(flow step1
(fn [_] (throw (ex-info "Some exception" {:a "a"})))
step2)))
=> falsey
(provided
(step1 anything) => {}
(step2 anything) => irrelevant :times 0))
(fact "flow should fail if 'transition' step doesn't return a valid world"
(emission.api/silently
(flow step1
(fn [_] :not-a-valid-world)
step2))
=> falsey
(provided
(step1 anything) => {}
(step2 anything) => irrelevant :times 0))
(fact "flow accepts a string as the first form"
(flow "2 + 2 = 4" (fact (+ 2 2) => 4)) => truthy)
(defmacro world-fn [& body]
`(fn [world#] (do ~@body) world#))
(binding [f/*probe-timeout* 10]
(emission.api/silently
(def fact-when-step-succeeds
(fact "this will succeed"
(flow step1 (fact "passes" 1 => 1) step2) => truthy))
(def fact-when-step-fails
(fact "this will fail because a check fails"
(flow step1 (fact "fails" 1 => 2) step2) => truthy))
(def last-called (atom 0))
(def stops-at-failure
(fact "flow doesn't execute steps post failure"
(flow (world-fn (reset! last-called 1))
(fact "nope" 1 => 2)
(world-fn (reset! last-called 2))) => truthy))
(def step-throwing-exception-is-a-failure
(fact "step throwing exception is also a test failure"
(timbre/with-level :fatal
(flow (fn [_] (throw (ex-info "expected exception" {:a "a"})))))
=> truthy))))
(facts "checking for success and failure"
fact-when-step-succeeds => truthy
fact-when-step-fails => falsey
@last-called => 1
step-throwing-exception-is-a-failure => falsey)
(facts "checks are retried"
(let [counter (atom -1)]
(def fails-first-run-then-succeeds
(fact "this will succeed by retrying the fact (which increments the atom until it's pos?)"
(flow (fact (swap! counter inc) => pos?)) => truthy))
(facts "every check is retried until it passes"
fails-first-run-then-succeeds => truthy)))
(def defnq-counts (atom {:step-1 0 :step-2 0 :step-3 0}))
(f/defnq query-step-1 [w]
(swap! defnq-counts update-in [:step-1] inc))
(f/defnq query-step-3 [w]
(swap! defnq-counts update-in [:step-3] inc))
(f/defnq factory-for-queries [key]
(fn [world]
(let [calls (swap! (:calls world) inc)]
(if (> calls 2)
(assoc world key ::finally-ok)
world))))
(f/defnq query-taking-args [key world]
(let [calls (swap! (:calls world) inc)]
(if (> calls 2)
(assoc world key ::finally-ok)
world)))
(facts
(let [query-count (atom 0)]
(fact "query steps preceeding checks are also retried"
(def succeeds-on-third-step-execution
(fact
(flow (f/fnq [w]
{:x (swap! query-count inc)})
(fact *world* => (match {:x 3}))) => truthy))))
(let [counts (atom {:step-1 0 :step-2 0 :step-3 0})]
(fact "retries several query steps preceeding a check until it passes"
(def preceeding-queries-succeed-on-third-step-execution
(fact
(flow (f/fnq [w]
(swap! counts update-in [:step-1] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:step-1 3 :step-2 3 :step-3 3}))) => truthy))))
(fact "retries several query steps preceeding a check until it passes"
(let [counts (atom {:non-query-step 0 :step-2 0 :step-3 0})]
(fact "positive test"
(def non-query-steps-are-not-retried-positive
(fact
(flow (fn [w]
(swap! counts update-in [:non-query-step] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:step-2 3 :step-3 3}))) => truthy))))
(let [counts (atom {:non-query-step 0 :step-2 0 :step-3 0})]
(binding [f/*probe-timeout* 30 f/*probe-sleep-period* 1]
(emission.api/silently
(fact "negative test"
(def non-query-steps-are-not-retried-negative
(fact
(flow (fn [w]
(swap! counts update-in [:non-query-step] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:non-query-step 3}))) => truthy)))))))
(fact "only query steps immediately preceeding a check are retried"
(let [counts (atom {:not-immediately-preceeding 0 :step-2 0 :step-3 0})]
(fact "positive test"
(def only-immediately-preceeding-query-steps-are-retried-positive
(fact
(flow (fn [w]
(swap! counts update-in [:not-immediately-preceeding] inc))
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:step-2 3 :step-3 3}))) => truthy))))
(let [counts (atom {:not-immediately-preceeding 0 :step-2 0 :step-3 0})]
(binding [f/*probe-timeout* 10 f/*probe-sleep-period* 1]
(emission.api/silently
(fact "negative test, inserting a regular - perhaps imperative - step in-between query steps"
(def only-immediately-preceeding-query-steps-are-retried-negative
(fact
(flow (f/fnq [w]
(swap! counts update-in [:not-immediately-preceeding] inc))
step1
(f/fnq [w]
(swap! counts update-in [:step-2] inc))
(f/fnq [w]
(swap! counts update-in [:step-3] inc))
(fact *world* => (match {:not-immediately-preceeding 3}))) => truthy)))))))
(fact "retries query steps marked via f/defnq"
(def retries-with-defnq
(fact
(flow query-step-1
(f/fnq [w]
(swap! defnq-counts update-in [:step-2] inc))
query-step-3
(facts
(:step-1 *world*) => #(> % 3)
(:step-2 *world*) => #(> % 3)
(:step-3 *world*) => #(> % 3))) => truthy)))
(fact "retries queries returned by factory functions"
(def retries-factory-queries
(fact
(flow
(fn [_] {:calls (atom 0)})
(factory-for-queries :foo)
(fact "fnq was retried 2 times until this test passed"
*world* => (match {:foo ::finally-ok}))) => truthy)))
(fact "retries steps built by partially applying query functions"
(def retries-partially-applied-queries
(fact
(flow
(fn [_] {:calls (atom 0)})
(partial query-taking-args :foo)
(fact "fnq was retried 2 times until this test passed"
*world* => (match {:foo ::finally-ok}))) => truthy)))
(fact "retries steps built by composing a query function with other functions"
(def retries-comp-queries
(fact
(flow
(fn [_] {:calls (atom 0)})
(comp (partial query-taking-args :foo) identity)
(fact "fnq was retried 2 times until this test passed"
*world* => (match {:foo ::finally-ok}))) => truthy)))
(facts "checks and query steps are retried"
succeeds-on-third-step-execution => truthy
preceeding-queries-succeed-on-third-step-execution => truthy
non-query-steps-are-not-retried-positive => truthy
non-query-steps-are-not-retried-negative => falsey
only-immediately-preceeding-query-steps-are-retried-positive => truthy
only-immediately-preceeding-query-steps-are-retried-negative => falsey
retries-with-defnq => truthy
retries-factory-queries => truthy
retries-partially-applied-queries => truthy
retries-comp-queries => truthy))
(binding [f/*probe-timeout* 10
f/*probe-sleep-period* 1]
(facts "on the impact on a test run:"
(fact "when a test passes, midje records no failures"
(emission.api/silently
(flow (fact true => truthy)) => truthy
(emission.states/output-counters))
=> (match {:midje-failures 0
:midje-passes 1}))
(fact "when a probe times out and fails, midje records that failure"
(emission.api/silently
(flow (fact false => truthy)) => falsey
(emission.states/output-counters))
=> (match {:midje-failures 1}))
(def counter2 (atom -2))
(fact "when a test passes after a few tries, midje still records no failures"
(emission.api/silently
(flow (fact (swap! counter2 inc) => pos?)) => truthy
(emission.states/output-counters))
=> (match {:midje-failures 0}))))
(facts "it logs ns and line number on flow"
(fact "when a test description is given"
(flow "test flow log" (fact 1 => 1)) => irrelevant
(provided
(core/emit-debug-ln #"Running flow: selvage.midje.flow-test:\d+ test flow log" anything) => irrelevant
(core/emit-debug-ln anything anything) => irrelevant :times 3))
(fact "when no test description is given"
(flow (fact 1 => 1)) => irrelevant
(provided
(core/emit-debug-ln #"Running flow: selvage.midje.flow-test:\d+" anything) => irrelevant
(core/emit-debug-ln anything anything) => irrelevant :times 3)))
(fact "wrap flow forms inside fact with metadata"
(macroexpand-1 '(flow "rataria" (fact 1 => 1)))
=> (match
(list 'schema.core/with-fn-validation
(m/embeds
(list 'midje.sweet/facts
:selvage
#(re-find #"selvage.midje.flow-test:[0-9]+ rataria" %))))))
(facts "Tabular works as expected"
(emission.api/silently
(tabular-flow
(flow "Simple check"
(fact ?a => ?b))
?a ?b
1 1
2 2
2 1)
(emission.states/output-counters)) => {:midje-failures 2, :midje-passes 4})
(facts "future-fact"
(let [future-check (atom [])]
(fact "core flow with future-fact"
(flow
(fact "First valid assertion"
(swap! future-check conj :first) => [:first])
(future-fact "Second ignored assertion"
(swap! future-check conj :second) => [:second])
(fact "Third valid assertion"
(swap! future-check conj :third) => [:first :third])) => true)
(fact "check future-fact pass through"
@future-check => [:first :third])))
(fact "binding *verbose* works"
(with-out-str
(binding [f/*verbose* false]
(flow "any name" (fact 1 => 1))))
=> empty?
(with-out-str
(binding [f/*verbose* true]
(flow "any name" (fact 1 => 1))))
=> (complement empty?))
|
8ca30ba5e9262cb6682168c205095dee32f7c11543dfe8ca617f886bc802ec64 | tonyrog/bt | bt_send_wav.erl | @author < >
( C ) 2021 ,
%%% @doc
Send wav file to bluetooth headset
%%% @end
Created : 20 Dec 2021 by < >
-module(bt_send_wav).
-export([file/2]).
-export([test_jbl/0, test_jabra/0]).
-define(PAYLOAD_TYPE , 96 ) . % % SBC payload ? what number 96 ? ? ? ?
%% -define(SOURCE_NUM, 1).
-include("../include/avdtp.hrl").
test_jbl() ->
Sounds = filename:join(code:lib_dir(alsa), "sounds"),
File = filename:join(Sounds, "Front_Left.wav"),
bt_send_wav:file("FC:A8:9A:A9:10:30", File).
test_jabra() ->
Sounds = filename:join(code:lib_dir(alsa), "sounds"),
File = filename:join(Sounds, "Front_Left.wav"),
bt_send_wav:file("50:C2:ED:5A:06:ED", File).
file(Address, WavFile) ->
case file:open(WavFile, [read, raw, binary]) of
{ok,Fd} ->
case alsa_wav:read_header(Fd) of
{ok, Wav} ->
io:format("wav header = ~p\n", [Wav]),
{ok,PortList} = bt_sdp:protocol_port(Address, "AudioSink"),
{value,{_,L2Port}} = lists:keysearch("L2CAP", 1, PortList),
{value,{_,AVDTP}} = lists:keysearch("AVDTP", 1, PortList),
io:format("L2Port=~w, AVDTP=~w\n", [L2Port,AVDTP]),
{ok, S} = l2cap:connect(Address, L2Port),
%% Get protocol info
Trans = 13,
{ok,[Info|_]} = bt_avdtp:discover(S, Trans),
ACP = Info#seid_info.seid,
{ok,Caps} = bt_avdtp:get_capabilities(S, Trans, ACP),
io:format("Caps = ~w\n", [Caps]),
{ok,_Open} = bt_avdtp:open(S,Trans,ACP),
{ok,_Start} = bt_avdtp:start(S,Trans,ACP),
send_(Fd, Wav, S, Info),
file:close(Fd),
l2cap:close(S),
ok;
Error ->
file:close(Fd),
Error
end;
Error ->
Error
end.
send_(Fd, Wav, L2CAP, Info) ->
Rate = maps:get(rate, Wav),
Channels = maps:get(channels, Wav),
s16_le = maps:get(format, Wav), %% assert!
A2DP_Conf = [{rate, Rate},
{channel_mode,
case Channels of
2 -> stereo;
1 -> mono
end}],
io:format("a2dp conf = ~w\n", [A2DP_Conf]),
%% {ok,Enc} = alsa_sbc:new(msbc),
{ok,Enc} = alsa_sbc:new(a2dp, A2DP_Conf),
PayloadType = Info#seid_info.media_type,
SSRC = Info#seid_info.seid, %% ??
RtpSource = rtp:init_source(PayloadType, SSRC, []),
{ok, SBCFrameLength} = alsa_sbc:get_frame_length(Enc),
{ok, FrameDurationUS} = alsa_sbc:get_frame_duration(Enc),
{ok, CodeSize} = alsa_sbc:get_codesize(Enc),
{ok, Mtu} = bt_l2cap:get_mtu(L2CAP),
FramesPerSBCFrame = CodeSize div (Channels*2),
DurationPerSBCFrameUS = trunc((FramesPerSBCFrame*1000000)/Rate),
io:format("FrameLength ~w\n", [SBCFrameLength]),
io:format("FrameDuration ~wus\n", [FrameDurationUS]),
io:format("CodeSize ~w\n", [CodeSize]),
io:format("Mtu ~w\n", [Mtu]),
io:format("FramesPerSBCFrame=~w\n", [FramesPerSBCFrame]),
io:format("DurationPerSBCFrame=~wus\n", [DurationPerSBCFrameUS]),
send_(Fd, L2CAP, Enc, CodeSize, FramesPerSBCFrame,
DurationPerSBCFrameUS, RtpSource, <<>>).
send_(Fd, L2CAP, Enc, CodeSize, FramesPerSBCFrame,
DurationPerSBCFrameUS, RtpSource, Data0) ->
case file:read(Fd, CodeSize) of
{ok, Data1} ->
{ok,Fs,Data2} = alsa_sbc:encode(Enc, <<Data0/binary,Data1/binary>>),
FRAG = 2#00000000,
NSBCFrames = length(Fs),
NFrames = FramesPerSBCFrame*NSBCFrames,
SBCPacket = iolist_to_binary([(FRAG+NSBCFrames),
lists:reverse(Fs)]),
{L2CapPacket,RtpSource1} = rtp:next(RtpSource,NFrames,SBCPacket),
io:format("#frames = ~w, packet size = ~p\n",
[NFrames, byte_size(L2CapPacket)]),
l2cap:send(L2CAP, L2CapPacket),
timer:sleep(DurationPerSBCFrameUS div 1000),
send_(Fd,L2CAP,Enc,CodeSize,FramesPerSBCFrame,
DurationPerSBCFrameUS,RtpSource1,Data2);
eof ->
ok
end.
| null | https://raw.githubusercontent.com/tonyrog/bt/d7cbea62c114c74de38ce19137abd18f49d736fa/src/bt_send_wav.erl | erlang | @doc
@end
% SBC payload ? what number 96 ? ? ? ?
-define(SOURCE_NUM, 1).
Get protocol info
assert!
{ok,Enc} = alsa_sbc:new(msbc),
?? | @author < >
( C ) 2021 ,
Send wav file to bluetooth headset
Created : 20 Dec 2021 by < >
-module(bt_send_wav).
-export([file/2]).
-export([test_jbl/0, test_jabra/0]).
-include("../include/avdtp.hrl").
test_jbl() ->
Sounds = filename:join(code:lib_dir(alsa), "sounds"),
File = filename:join(Sounds, "Front_Left.wav"),
bt_send_wav:file("FC:A8:9A:A9:10:30", File).
test_jabra() ->
Sounds = filename:join(code:lib_dir(alsa), "sounds"),
File = filename:join(Sounds, "Front_Left.wav"),
bt_send_wav:file("50:C2:ED:5A:06:ED", File).
file(Address, WavFile) ->
case file:open(WavFile, [read, raw, binary]) of
{ok,Fd} ->
case alsa_wav:read_header(Fd) of
{ok, Wav} ->
io:format("wav header = ~p\n", [Wav]),
{ok,PortList} = bt_sdp:protocol_port(Address, "AudioSink"),
{value,{_,L2Port}} = lists:keysearch("L2CAP", 1, PortList),
{value,{_,AVDTP}} = lists:keysearch("AVDTP", 1, PortList),
io:format("L2Port=~w, AVDTP=~w\n", [L2Port,AVDTP]),
{ok, S} = l2cap:connect(Address, L2Port),
Trans = 13,
{ok,[Info|_]} = bt_avdtp:discover(S, Trans),
ACP = Info#seid_info.seid,
{ok,Caps} = bt_avdtp:get_capabilities(S, Trans, ACP),
io:format("Caps = ~w\n", [Caps]),
{ok,_Open} = bt_avdtp:open(S,Trans,ACP),
{ok,_Start} = bt_avdtp:start(S,Trans,ACP),
send_(Fd, Wav, S, Info),
file:close(Fd),
l2cap:close(S),
ok;
Error ->
file:close(Fd),
Error
end;
Error ->
Error
end.
send_(Fd, Wav, L2CAP, Info) ->
Rate = maps:get(rate, Wav),
Channels = maps:get(channels, Wav),
A2DP_Conf = [{rate, Rate},
{channel_mode,
case Channels of
2 -> stereo;
1 -> mono
end}],
io:format("a2dp conf = ~w\n", [A2DP_Conf]),
{ok,Enc} = alsa_sbc:new(a2dp, A2DP_Conf),
PayloadType = Info#seid_info.media_type,
RtpSource = rtp:init_source(PayloadType, SSRC, []),
{ok, SBCFrameLength} = alsa_sbc:get_frame_length(Enc),
{ok, FrameDurationUS} = alsa_sbc:get_frame_duration(Enc),
{ok, CodeSize} = alsa_sbc:get_codesize(Enc),
{ok, Mtu} = bt_l2cap:get_mtu(L2CAP),
FramesPerSBCFrame = CodeSize div (Channels*2),
DurationPerSBCFrameUS = trunc((FramesPerSBCFrame*1000000)/Rate),
io:format("FrameLength ~w\n", [SBCFrameLength]),
io:format("FrameDuration ~wus\n", [FrameDurationUS]),
io:format("CodeSize ~w\n", [CodeSize]),
io:format("Mtu ~w\n", [Mtu]),
io:format("FramesPerSBCFrame=~w\n", [FramesPerSBCFrame]),
io:format("DurationPerSBCFrame=~wus\n", [DurationPerSBCFrameUS]),
send_(Fd, L2CAP, Enc, CodeSize, FramesPerSBCFrame,
DurationPerSBCFrameUS, RtpSource, <<>>).
send_(Fd, L2CAP, Enc, CodeSize, FramesPerSBCFrame,
DurationPerSBCFrameUS, RtpSource, Data0) ->
case file:read(Fd, CodeSize) of
{ok, Data1} ->
{ok,Fs,Data2} = alsa_sbc:encode(Enc, <<Data0/binary,Data1/binary>>),
FRAG = 2#00000000,
NSBCFrames = length(Fs),
NFrames = FramesPerSBCFrame*NSBCFrames,
SBCPacket = iolist_to_binary([(FRAG+NSBCFrames),
lists:reverse(Fs)]),
{L2CapPacket,RtpSource1} = rtp:next(RtpSource,NFrames,SBCPacket),
io:format("#frames = ~w, packet size = ~p\n",
[NFrames, byte_size(L2CapPacket)]),
l2cap:send(L2CAP, L2CapPacket),
timer:sleep(DurationPerSBCFrameUS div 1000),
send_(Fd,L2CAP,Enc,CodeSize,FramesPerSBCFrame,
DurationPerSBCFrameUS,RtpSource1,Data2);
eof ->
ok
end.
|
d689936b9ded3b56dec1413056393b0a4673d38412410a869e74d2ad5e46999f | aws-beam/aws-erlang | aws_codecommit.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
%% @doc AWS CodeCommit
%%
%% This is the AWS CodeCommit API Reference.
%%
%% This reference provides descriptions of the operations and data types for
%% AWS CodeCommit API along with usage examples.
%%
%% You can use the AWS CodeCommit API to work with the following objects:
%%
%% Repositories, by calling the following:
%%
< ul > < li > ` BatchGetRepositories ' , which returns information about one
%% or more repositories associated with your AWS account.
%%
< /li > < li > ` CreateRepository ' , which creates an AWS CodeCommit
%% repository.
%%
< /li > < li > ` DeleteRepository ' , which deletes an AWS CodeCommit
%% repository.
%%
< /li > < li > ` GetRepository ' , which returns information about a
%% specified repository.
%%
< /li > < li > ` ListRepositories ' , which lists all AWS CodeCommit
%% repositories associated with your AWS account.
%%
< /li > < li > ` UpdateRepositoryDescription ' , which sets or updates the
%% description of the repository.
%%
%% </li> <li> `UpdateRepositoryName', which changes the name of the
%% repository. If you change the name of a repository, no other users of that
repository can access it until you send them the new HTTPS or SSH URL to
%% use.
%%
%% </li> </ul> Branches, by calling the following:
%%
%% <ul> <li> `CreateBranch', which creates a branch in a specified
%% repository.
%%
< /li > < li > ` DeleteBranch ' , which deletes the specified branch in a
%% repository unless it is the default branch.
%%
< /li > < li > ` GetBranch ' , which returns information about a specified
%% branch.
%%
< /li > < li > ` ListBranches ' , which lists all branches for a specified
%% repository.
%%
%% </li> <li> `UpdateDefaultBranch', which changes the default branch for
%% a repository.
%%
%% </li> </ul> Files, by calling the following:
%%
< ul > < li > ` DeleteFile ' , which deletes the content of a specified file
%% from a specified branch.
%%
< /li > < li > ` GetBlob ' , which returns the base-64 encoded content of an
%% individual Git blob object in a repository.
%%
< /li > < li > ` GetFile ' , which returns the base-64 encoded content of a
%% specified file.
%%
< /li > < li > ` GetFolder ' , which returns the contents of a specified
%% folder or directory.
%%
%% </li> <li> `PutFile', which adds or modifies a single file in a
%% specified repository and branch.
%%
%% </li> </ul> Commits, by calling the following:
%%
< ul > < li > ` BatchGetCommits ' , which returns information about one or
%% more commits in a repository.
%%
%% </li> <li> `CreateCommit', which creates a commit for changes to a
%% repository.
%%
%% </li> <li> `GetCommit', which returns information about a commit,
%% including commit messages and author and committer information.
%%
%% </li> <li> `GetDifferences', which returns information about the
differences in a valid commit specifier ( such as a branch , tag , HEAD ,
%% commit ID, or other fully qualified reference).
%%
%% </li> </ul> Merges, by calling the following:
%%
%% <ul> <li> `BatchDescribeMergeConflicts', which returns information
%% about conflicts in a merge between commits in a repository.
%%
%% </li> <li> `CreateUnreferencedMergeCommit', which creates an
unreferenced commit between two branches or commits for the purpose of
%% comparing them and identifying any potential conflicts.
%%
%% </li> <li> `DescribeMergeConflicts', which returns information about
%% merge conflicts between the base, source, and destination versions of a
%% file in a potential merge.
%%
%% </li> <li> `GetMergeCommit', which returns information about the merge
%% between a source and destination commit.
%%
%% </li> <li> `GetMergeConflicts', which returns information about merge
%% conflicts between the source and destination branch in a pull request.
%%
%% </li> <li> `GetMergeOptions', which returns information about the
available merge options between two branches or commit specifiers .
%%
< /li > < li > ` MergeBranchesByFastForward ' , which merges two branches
%% using the fast-forward merge option.
%%
< /li > < li > ` MergeBranchesBySquash ' , which merges two branches using
%% the squash merge option.
%%
< /li > < li > ` MergeBranchesByThreeWay ' , which merges two branches using
the three - way merge option .
%%
%% </li> </ul> Pull requests, by calling the following:
%%
%% <ul> <li> `CreatePullRequest', which creates a pull request in a
%% specified repository.
%%
%% </li> <li> `CreatePullRequestApprovalRule', which creates an approval
%% rule for a specified pull request.
%%
%% </li> <li> `DeletePullRequestApprovalRule', which deletes an approval
%% rule for a specified pull request.
%%
%% </li> <li> `DescribePullRequestEvents', which returns information
about one or more pull request events .
%%
< /li > < li > ` EvaluatePullRequestApprovalRules ' , which evaluates whether
%% a pull request has met all the conditions specified in its associated
%% approval rules.
%%
%% </li> <li> `GetCommentsForPullRequest', which returns information
%% about comments on a specified pull request.
%%
%% </li> <li> `GetPullRequest', which returns information about a
%% specified pull request.
%%
< /li > < li > ` GetPullRequestApprovalStates ' , which returns information
%% about the approval states for a specified pull request.
%%
%% </li> <li> `GetPullRequestOverrideState', which returns information
%% about whether approval rules have been set aside (overriden) for a pull
request , and if so , the Amazon Resource Name ( ARN ) of the user or identity
%% that overrode the rules and their requirements for the pull request.
%%
%% </li> <li> `ListPullRequests', which lists all pull requests for a
%% repository.
%%
%% </li> <li> `MergePullRequestByFastForward', which merges the source
%% destination branch of a pull request into the specified destination branch
%% for that pull request using the fast-forward merge option.
%%
%% </li> <li> `MergePullRequestBySquash', which merges the source
%% destination branch of a pull request into the specified destination branch
%% for that pull request using the squash merge option.
%%
< /li > < li > ` MergePullRequestByThreeWay ' . which merges the source
%% destination branch of a pull request into the specified destination branch
for that pull request using the three - way merge option .
%%
%% </li> <li> `OverridePullRequestApprovalRules', which sets aside all
%% approval rule requirements for a pull request.
%%
%% </li> <li> `PostCommentForPullRequest', which posts a comment to a
%% pull request at the specified line, file, or request.
%%
%% </li> <li> `UpdatePullRequestApprovalRuleContent', which updates the
%% structure of an approval rule for a pull request.
%%
< /li > < li > ` UpdatePullRequestApprovalState ' , which updates the state
%% of an approval on a pull request.
%%
%% </li> <li> `UpdatePullRequestDescription', which updates the
%% description of a pull request.
%%
< /li > < li > ` UpdatePullRequestStatus ' , which updates the status of a
%% pull request.
%%
< /li > < li > ` UpdatePullRequestTitle ' , which updates the title of a pull
%% request.
%%
%% </li> </ul> Approval rule templates, by calling the following:
%%
%% <ul> <li> `AssociateApprovalRuleTemplateWithRepository', which
%% associates a template with a specified repository. After the template is
%% associated with a repository, AWS CodeCommit creates approval rules that
%% match the template conditions on every pull request created in the
%% specified repository.
%%
%% </li> <li> `BatchAssociateApprovalRuleTemplateWithRepositories', which
associates a template with one or more specified repositories . After the
%% template is associated with a repository, AWS CodeCommit creates approval
%% rules that match the template conditions on every pull request created in
%% the specified repositories.
%%
%% </li> <li> `BatchDisassociateApprovalRuleTemplateFromRepositories',
%% which removes the association between a template and specified
%% repositories so that approval rules based on the template are not
%% automatically created when pull requests are created in those
%% repositories.
%%
%% </li> <li> `CreateApprovalRuleTemplate', which creates a template for
approval rules that can then be associated with one or more repositories
%% in your AWS account.
%%
%% </li> <li> `DeleteApprovalRuleTemplate', which deletes the specified
%% template. It does not remove approval rules on pull requests already
%% created with the template.
%%
%% </li> <li> `DisassociateApprovalRuleTemplateFromRepository', which
%% removes the association between a template and a repository so that
%% approval rules based on the template are not automatically created when
%% pull requests are created in the specified repository.
%%
%% </li> <li> `GetApprovalRuleTemplate', which returns information about
%% an approval rule template.
%%
%% </li> <li> `ListApprovalRuleTemplates', which lists all approval rule
templates in the AWS Region in your AWS account .
%%
%% </li> <li> `ListAssociatedApprovalRuleTemplatesForRepository', which
%% lists all approval rule templates that are associated with a specified
%% repository.
%%
%% </li> <li> `ListRepositoriesForApprovalRuleTemplate', which lists all
%% repositories associated with the specified approval rule template.
%%
%% </li> <li> `UpdateApprovalRuleTemplateDescription', which updates the
%% description of an approval rule template.
%%
%% </li> <li> `UpdateApprovalRuleTemplateName', which updates the name of
%% an approval rule template.
%%
%% </li> <li> `UpdateApprovalRuleTemplateContent', which updates the
%% content of an approval rule template.
%%
%% </li> </ul> Comments in a repository, by calling the following:
%%
< ul > < li > ` DeleteCommentContent ' , which deletes the content of a
%% comment on a commit in a repository.
%%
< /li > < li > ` GetComment ' , which returns information about a comment on
%% a commit.
%%
%% </li> <li> `GetCommentReactions', which returns information about
%% emoji reactions to comments.
%%
%% </li> <li> `GetCommentsForComparedCommit', which returns information
about comments on the comparison between two commit specifiers in a
%% repository.
%%
%% </li> <li> `PostCommentForComparedCommit', which creates a comment on
the comparison between two commit specifiers in a repository .
%%
%% </li> <li> `PostCommentReply', which creates a reply to a comment.
%%
%% </li> <li> `PutCommentReaction', which creates or updates an emoji
%% reaction to a comment.
%%
%% </li> <li> `UpdateComment', which updates the content of a comment on
%% a commit in a repository.
%%
%% </li> </ul> Tags used to tag resources in AWS CodeCommit (not Git tags),
%% by calling the following:
%%
%% <ul> <li> `ListTagsForResource', which gets information about AWS tags
for a specified Amazon Resource Name ( ARN ) in AWS CodeCommit .
%%
< /li > < li > ` TagResource ' , which adds or updates tags for a resource in
%% AWS CodeCommit.
%%
< /li > < li > ` UntagResource ' , which removes tags for a resource in AWS
CodeCommit .
%%
%% </li> </ul> Triggers, by calling the following:
%%
%% <ul> <li> `GetRepositoryTriggers', which returns information about
%% triggers configured for a repository.
%%
< /li > < li > ` PutRepositoryTriggers ' , which replaces all triggers for a
%% repository and can be used to create or delete triggers.
%%
%% </li> <li> `TestRepositoryTriggers', which tests the functionality of
%% a repository trigger by sending data to the trigger target.
%%
%% </li> </ul> For information about how to use AWS CodeCommit, see the AWS
%% CodeCommit User Guide.
-module(aws_codecommit).
-export([associate_approval_rule_template_with_repository/2,
associate_approval_rule_template_with_repository/3,
batch_associate_approval_rule_template_with_repositories/2,
batch_associate_approval_rule_template_with_repositories/3,
batch_describe_merge_conflicts/2,
batch_describe_merge_conflicts/3,
batch_disassociate_approval_rule_template_from_repositories/2,
batch_disassociate_approval_rule_template_from_repositories/3,
batch_get_commits/2,
batch_get_commits/3,
batch_get_repositories/2,
batch_get_repositories/3,
create_approval_rule_template/2,
create_approval_rule_template/3,
create_branch/2,
create_branch/3,
create_commit/2,
create_commit/3,
create_pull_request/2,
create_pull_request/3,
create_pull_request_approval_rule/2,
create_pull_request_approval_rule/3,
create_repository/2,
create_repository/3,
create_unreferenced_merge_commit/2,
create_unreferenced_merge_commit/3,
delete_approval_rule_template/2,
delete_approval_rule_template/3,
delete_branch/2,
delete_branch/3,
delete_comment_content/2,
delete_comment_content/3,
delete_file/2,
delete_file/3,
delete_pull_request_approval_rule/2,
delete_pull_request_approval_rule/3,
delete_repository/2,
delete_repository/3,
describe_merge_conflicts/2,
describe_merge_conflicts/3,
describe_pull_request_events/2,
describe_pull_request_events/3,
disassociate_approval_rule_template_from_repository/2,
disassociate_approval_rule_template_from_repository/3,
evaluate_pull_request_approval_rules/2,
evaluate_pull_request_approval_rules/3,
get_approval_rule_template/2,
get_approval_rule_template/3,
get_blob/2,
get_blob/3,
get_branch/2,
get_branch/3,
get_comment/2,
get_comment/3,
get_comment_reactions/2,
get_comment_reactions/3,
get_comments_for_compared_commit/2,
get_comments_for_compared_commit/3,
get_comments_for_pull_request/2,
get_comments_for_pull_request/3,
get_commit/2,
get_commit/3,
get_differences/2,
get_differences/3,
get_file/2,
get_file/3,
get_folder/2,
get_folder/3,
get_merge_commit/2,
get_merge_commit/3,
get_merge_conflicts/2,
get_merge_conflicts/3,
get_merge_options/2,
get_merge_options/3,
get_pull_request/2,
get_pull_request/3,
get_pull_request_approval_states/2,
get_pull_request_approval_states/3,
get_pull_request_override_state/2,
get_pull_request_override_state/3,
get_repository/2,
get_repository/3,
get_repository_triggers/2,
get_repository_triggers/3,
list_approval_rule_templates/2,
list_approval_rule_templates/3,
list_associated_approval_rule_templates_for_repository/2,
list_associated_approval_rule_templates_for_repository/3,
list_branches/2,
list_branches/3,
list_pull_requests/2,
list_pull_requests/3,
list_repositories/2,
list_repositories/3,
list_repositories_for_approval_rule_template/2,
list_repositories_for_approval_rule_template/3,
list_tags_for_resource/2,
list_tags_for_resource/3,
merge_branches_by_fast_forward/2,
merge_branches_by_fast_forward/3,
merge_branches_by_squash/2,
merge_branches_by_squash/3,
merge_branches_by_three_way/2,
merge_branches_by_three_way/3,
merge_pull_request_by_fast_forward/2,
merge_pull_request_by_fast_forward/3,
merge_pull_request_by_squash/2,
merge_pull_request_by_squash/3,
merge_pull_request_by_three_way/2,
merge_pull_request_by_three_way/3,
override_pull_request_approval_rules/2,
override_pull_request_approval_rules/3,
post_comment_for_compared_commit/2,
post_comment_for_compared_commit/3,
post_comment_for_pull_request/2,
post_comment_for_pull_request/3,
post_comment_reply/2,
post_comment_reply/3,
put_comment_reaction/2,
put_comment_reaction/3,
put_file/2,
put_file/3,
put_repository_triggers/2,
put_repository_triggers/3,
tag_resource/2,
tag_resource/3,
test_repository_triggers/2,
test_repository_triggers/3,
untag_resource/2,
untag_resource/3,
update_approval_rule_template_content/2,
update_approval_rule_template_content/3,
update_approval_rule_template_description/2,
update_approval_rule_template_description/3,
update_approval_rule_template_name/2,
update_approval_rule_template_name/3,
update_comment/2,
update_comment/3,
update_default_branch/2,
update_default_branch/3,
update_pull_request_approval_rule_content/2,
update_pull_request_approval_rule_content/3,
update_pull_request_approval_state/2,
update_pull_request_approval_state/3,
update_pull_request_description/2,
update_pull_request_description/3,
update_pull_request_status/2,
update_pull_request_status/3,
update_pull_request_title/2,
update_pull_request_title/3,
update_repository_description/2,
update_repository_description/3,
update_repository_name/2,
update_repository_name/3]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Creates an association between an approval rule template and a
%% specified repository.
%%
%% Then, the next time a pull request is created in the repository where the
%% destination reference (if specified) matches the destination reference
%% (branch) for the pull request, an approval rule that matches the template
%% conditions is automatically created for that pull request. If no
%% destination references are specified in the template, an approval rule
%% that matches the template contents is created for all pull requests in
%% that repository.
associate_approval_rule_template_with_repository(Client, Input)
when is_map(Client), is_map(Input) ->
associate_approval_rule_template_with_repository(Client, Input, []).
associate_approval_rule_template_with_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AssociateApprovalRuleTemplateWithRepository">>, Input, Options).
@doc Creates an association between an approval rule template and one or
%% more specified repositories.
batch_associate_approval_rule_template_with_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
batch_associate_approval_rule_template_with_repositories(Client, Input, []).
batch_associate_approval_rule_template_with_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchAssociateApprovalRuleTemplateWithRepositories">>, Input, Options).
@doc Returns information about one or more merge conflicts in the
attempted merge of two commit specifiers using the squash or three - way
%% merge strategy.
batch_describe_merge_conflicts(Client, Input)
when is_map(Client), is_map(Input) ->
batch_describe_merge_conflicts(Client, Input, []).
batch_describe_merge_conflicts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchDescribeMergeConflicts">>, Input, Options).
@doc Removes the association between an approval rule template and one or
%% more specified repositories.
batch_disassociate_approval_rule_template_from_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
batch_disassociate_approval_rule_template_from_repositories(Client, Input, []).
batch_disassociate_approval_rule_template_from_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchDisassociateApprovalRuleTemplateFromRepositories">>, Input, Options).
@doc Returns information about the contents of one or more commits in a
%% repository.
batch_get_commits(Client, Input)
when is_map(Client), is_map(Input) ->
batch_get_commits(Client, Input, []).
batch_get_commits(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchGetCommits">>, Input, Options).
@doc Returns information about one or more repositories .
%%
%% The description field for a repository accepts all HTML characters and all
%% valid Unicode characters. Applications that do not HTML-encode the
%% description and display it in a webpage can expose users to potentially
%% malicious code. Make sure that you HTML-encode the description field in
%% any application that uses this API to display the repository description
%% on a webpage.
batch_get_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
batch_get_repositories(Client, Input, []).
batch_get_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchGetRepositories">>, Input, Options).
%% @doc Creates a template for approval rules that can then be associated
with one or more repositories in your AWS account .
%%
%% When you associate a template with a repository, AWS CodeCommit creates an
%% approval rule that matches the conditions of the template for all pull
%% requests that meet the conditions of the template. For more information,
%% see `AssociateApprovalRuleTemplateWithRepository'.
create_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
create_approval_rule_template(Client, Input, []).
create_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateApprovalRuleTemplate">>, Input, Options).
%% @doc Creates a branch in a repository and points the branch to a commit.
%%
%% Calling the create branch operation does not set a repository's
%% default branch. To do this, call the update default branch operation.
create_branch(Client, Input)
when is_map(Client), is_map(Input) ->
create_branch(Client, Input, []).
create_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateBranch">>, Input, Options).
%% @doc Creates a commit for a repository on the tip of a specified branch.
create_commit(Client, Input)
when is_map(Client), is_map(Input) ->
create_commit(Client, Input, []).
create_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateCommit">>, Input, Options).
%% @doc Creates a pull request in the specified repository.
create_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
create_pull_request(Client, Input, []).
create_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePullRequest">>, Input, Options).
%% @doc Creates an approval rule for a pull request.
create_pull_request_approval_rule(Client, Input)
when is_map(Client), is_map(Input) ->
create_pull_request_approval_rule(Client, Input, []).
create_pull_request_approval_rule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePullRequestApprovalRule">>, Input, Options).
%% @doc Creates a new, empty repository.
create_repository(Client, Input)
when is_map(Client), is_map(Input) ->
create_repository(Client, Input, []).
create_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateRepository">>, Input, Options).
%% @doc Creates an unreferenced commit that represents the result of merging
two branches using a specified merge strategy .
%%
%% This can help you determine the outcome of a potential merge. This API
%% cannot be used with the fast-forward merge strategy because that strategy
%% does not create a merge commit.
%%
This unreferenced merge commit can only be accessed using the GetCommit
%% API or through git commands such as git fetch. To retrieve this commit,
%% you must specify its commit ID or otherwise reference it.
create_unreferenced_merge_commit(Client, Input)
when is_map(Client), is_map(Input) ->
create_unreferenced_merge_commit(Client, Input, []).
create_unreferenced_merge_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateUnreferencedMergeCommit">>, Input, Options).
%% @doc Deletes a specified approval rule template.
%%
%% Deleting a template does not remove approval rules on pull requests
%% already created with the template.
delete_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
delete_approval_rule_template(Client, Input, []).
delete_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteApprovalRuleTemplate">>, Input, Options).
%% @doc Deletes a branch from a repository, unless that branch is the default
%% branch for the repository.
delete_branch(Client, Input)
when is_map(Client), is_map(Input) ->
delete_branch(Client, Input, []).
delete_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteBranch">>, Input, Options).
%% @doc Deletes the content of a comment made on a change, file, or commit in
%% a repository.
delete_comment_content(Client, Input)
when is_map(Client), is_map(Input) ->
delete_comment_content(Client, Input, []).
delete_comment_content(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteCommentContent">>, Input, Options).
%% @doc Deletes a specified file from a specified branch.
%%
%% A commit is created on the branch that contains the revision. The file
%% still exists in the commits earlier to the commit that contains the
%% deletion.
delete_file(Client, Input)
when is_map(Client), is_map(Input) ->
delete_file(Client, Input, []).
delete_file(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteFile">>, Input, Options).
%% @doc Deletes an approval rule from a specified pull request.
%%
%% Approval rules can be deleted from a pull request only if the pull request
%% is open, and if the approval rule was created specifically for a pull
%% request and not generated from an approval rule template associated with
%% the repository where the pull request was created. You cannot delete an
%% approval rule from a merged or closed pull request.
delete_pull_request_approval_rule(Client, Input)
when is_map(Client), is_map(Input) ->
delete_pull_request_approval_rule(Client, Input, []).
delete_pull_request_approval_rule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePullRequestApprovalRule">>, Input, Options).
%% @doc Deletes a repository.
%%
%% If a specified repository was already deleted, a null repository ID is
%% returned.
%%
%% Deleting a repository also deletes all associated objects and metadata.
%% After a repository is deleted, all future push calls to the deleted
%% repository fail.
delete_repository(Client, Input)
when is_map(Client), is_map(Input) ->
delete_repository(Client, Input, []).
delete_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteRepository">>, Input, Options).
@doc Returns information about one or more merge conflicts in the
attempted merge of two commit specifiers using the squash or three - way
%% merge strategy.
%%
%% If the merge option for the attempted merge is specified as
%% FAST_FORWARD_MERGE, an exception is thrown.
describe_merge_conflicts(Client, Input)
when is_map(Client), is_map(Input) ->
describe_merge_conflicts(Client, Input, []).
describe_merge_conflicts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeMergeConflicts">>, Input, Options).
@doc Returns information about one or more pull request events .
describe_pull_request_events(Client, Input)
when is_map(Client), is_map(Input) ->
describe_pull_request_events(Client, Input, []).
describe_pull_request_events(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePullRequestEvents">>, Input, Options).
%% @doc Removes the association between a template and a repository so that
%% approval rules based on the template are not automatically created when
%% pull requests are created in the specified repository.
%%
%% This does not delete any approval rules previously created for pull
%% requests through the template association.
disassociate_approval_rule_template_from_repository(Client, Input)
when is_map(Client), is_map(Input) ->
disassociate_approval_rule_template_from_repository(Client, Input, []).
disassociate_approval_rule_template_from_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisassociateApprovalRuleTemplateFromRepository">>, Input, Options).
%% @doc Evaluates whether a pull request has met all the conditions specified
%% in its associated approval rules.
evaluate_pull_request_approval_rules(Client, Input)
when is_map(Client), is_map(Input) ->
evaluate_pull_request_approval_rules(Client, Input, []).
evaluate_pull_request_approval_rules(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"EvaluatePullRequestApprovalRules">>, Input, Options).
%% @doc Returns information about a specified approval rule template.
get_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
get_approval_rule_template(Client, Input, []).
get_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetApprovalRuleTemplate">>, Input, Options).
@doc Returns the base-64 encoded content of an individual blob in a
%% repository.
get_blob(Client, Input)
when is_map(Client), is_map(Input) ->
get_blob(Client, Input, []).
get_blob(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetBlob">>, Input, Options).
%% @doc Returns information about a repository branch, including its name and
%% the last commit ID.
get_branch(Client, Input)
when is_map(Client), is_map(Input) ->
get_branch(Client, Input, []).
get_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetBranch">>, Input, Options).
%% @doc Returns the content of a comment made on a change, file, or commit in
%% a repository.
%%
%% Reaction counts might include numbers from user identities who were
%% deleted after the reaction was made. For a count of reactions from active
%% identities, use GetCommentReactions.
get_comment(Client, Input)
when is_map(Client), is_map(Input) ->
get_comment(Client, Input, []).
get_comment(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetComment">>, Input, Options).
%% @doc Returns information about reactions to a specified comment ID.
%%
%% Reactions from users who have been deleted will not be included in the
%% count.
get_comment_reactions(Client, Input)
when is_map(Client), is_map(Input) ->
get_comment_reactions(Client, Input, []).
get_comment_reactions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommentReactions">>, Input, Options).
@doc Returns information about comments made on the comparison between two
%% commits.
%%
%% Reaction counts might include numbers from user identities who were
%% deleted after the reaction was made. For a count of reactions from active
%% identities, use GetCommentReactions.
get_comments_for_compared_commit(Client, Input)
when is_map(Client), is_map(Input) ->
get_comments_for_compared_commit(Client, Input, []).
get_comments_for_compared_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommentsForComparedCommit">>, Input, Options).
%% @doc Returns comments made on a pull request.
%%
%% Reaction counts might include numbers from user identities who were
%% deleted after the reaction was made. For a count of reactions from active
%% identities, use GetCommentReactions.
get_comments_for_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
get_comments_for_pull_request(Client, Input, []).
get_comments_for_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommentsForPullRequest">>, Input, Options).
%% @doc Returns information about a commit, including commit message and
%% committer information.
get_commit(Client, Input)
when is_map(Client), is_map(Input) ->
get_commit(Client, Input, []).
get_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommit">>, Input, Options).
%% @doc Returns information about the differences in a valid commit specifier
( such as a branch , tag , HEAD , commit ID , or other fully qualified
%% reference).
%%
%% Results can be limited to a specified path.
get_differences(Client, Input)
when is_map(Client), is_map(Input) ->
get_differences(Client, Input, []).
get_differences(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetDifferences">>, Input, Options).
@doc Returns the base-64 encoded contents of a specified file and its
%% metadata.
get_file(Client, Input)
when is_map(Client), is_map(Input) ->
get_file(Client, Input, []).
get_file(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetFile">>, Input, Options).
%% @doc Returns the contents of a specified folder in a repository.
get_folder(Client, Input)
when is_map(Client), is_map(Input) ->
get_folder(Client, Input, []).
get_folder(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetFolder">>, Input, Options).
%% @doc Returns information about a specified merge commit.
get_merge_commit(Client, Input)
when is_map(Client), is_map(Input) ->
get_merge_commit(Client, Input, []).
get_merge_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetMergeCommit">>, Input, Options).
%% @doc Returns information about merge conflicts between the before and
%% after commit IDs for a pull request in a repository.
get_merge_conflicts(Client, Input)
when is_map(Client), is_map(Input) ->
get_merge_conflicts(Client, Input, []).
get_merge_conflicts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetMergeConflicts">>, Input, Options).
@doc Returns information about the merge options available for merging two
%% specified branches.
%%
%% For details about why a merge option is not available, use
%% GetMergeConflicts or DescribeMergeConflicts.
get_merge_options(Client, Input)
when is_map(Client), is_map(Input) ->
get_merge_options(Client, Input, []).
get_merge_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetMergeOptions">>, Input, Options).
%% @doc Gets information about a pull request in a specified repository.
get_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
get_pull_request(Client, Input, []).
get_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetPullRequest">>, Input, Options).
%% @doc Gets information about the approval states for a specified pull
%% request.
%%
Approval states only apply to pull requests that have one or more approval
%% rules applied to them.
get_pull_request_approval_states(Client, Input)
when is_map(Client), is_map(Input) ->
get_pull_request_approval_states(Client, Input, []).
get_pull_request_approval_states(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetPullRequestApprovalStates">>, Input, Options).
%% @doc Returns information about whether approval rules have been set aside
( overridden ) for a pull request , and if so , the Amazon Resource Name ( ARN )
%% of the user or identity that overrode the rules and their requirements for
%% the pull request.
get_pull_request_override_state(Client, Input)
when is_map(Client), is_map(Input) ->
get_pull_request_override_state(Client, Input, []).
get_pull_request_override_state(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetPullRequestOverrideState">>, Input, Options).
%% @doc Returns information about a repository.
%%
%% The description field for a repository accepts all HTML characters and all
%% valid Unicode characters. Applications that do not HTML-encode the
%% description and display it in a webpage can expose users to potentially
%% malicious code. Make sure that you HTML-encode the description field in
%% any application that uses this API to display the repository description
%% on a webpage.
get_repository(Client, Input)
when is_map(Client), is_map(Input) ->
get_repository(Client, Input, []).
get_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetRepository">>, Input, Options).
%% @doc Gets information about triggers configured for a repository.
get_repository_triggers(Client, Input)
when is_map(Client), is_map(Input) ->
get_repository_triggers(Client, Input, []).
get_repository_triggers(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetRepositoryTriggers">>, Input, Options).
%% @doc Lists all approval rule templates in the specified AWS Region in your
%% AWS account.
%%
If an AWS Region is not specified , the AWS Region where you are signed in
%% is used.
list_approval_rule_templates(Client, Input)
when is_map(Client), is_map(Input) ->
list_approval_rule_templates(Client, Input, []).
list_approval_rule_templates(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListApprovalRuleTemplates">>, Input, Options).
%% @doc Lists all approval rule templates that are associated with a
%% specified repository.
list_associated_approval_rule_templates_for_repository(Client, Input)
when is_map(Client), is_map(Input) ->
list_associated_approval_rule_templates_for_repository(Client, Input, []).
list_associated_approval_rule_templates_for_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListAssociatedApprovalRuleTemplatesForRepository">>, Input, Options).
@doc Gets information about one or more branches in a repository .
list_branches(Client, Input)
when is_map(Client), is_map(Input) ->
list_branches(Client, Input, []).
list_branches(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListBranches">>, Input, Options).
%% @doc Returns a list of pull requests for a specified repository.
%%
%% The return list can be refined by pull request status or pull request
author ARN .
list_pull_requests(Client, Input)
when is_map(Client), is_map(Input) ->
list_pull_requests(Client, Input, []).
list_pull_requests(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListPullRequests">>, Input, Options).
@doc Gets information about one or more repositories .
list_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
list_repositories(Client, Input, []).
list_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListRepositories">>, Input, Options).
%% @doc Lists all repositories associated with the specified approval rule
%% template.
list_repositories_for_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
list_repositories_for_approval_rule_template(Client, Input, []).
list_repositories_for_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListRepositoriesForApprovalRuleTemplate">>, Input, Options).
@doc Gets information about AWS tags for a specified Amazon Resource Name
( ARN ) in AWS CodeCommit .
%%
For a list of valid resources in AWS CodeCommit , see CodeCommit Resources
and Operations in the AWS CodeCommit User Guide .
list_tags_for_resource(Client, Input)
when is_map(Client), is_map(Input) ->
list_tags_for_resource(Client, Input, []).
list_tags_for_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListTagsForResource">>, Input, Options).
@doc Merges two branches using the fast - forward merge strategy .
merge_branches_by_fast_forward(Client, Input)
when is_map(Client), is_map(Input) ->
merge_branches_by_fast_forward(Client, Input, []).
merge_branches_by_fast_forward(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergeBranchesByFastForward">>, Input, Options).
@doc Merges two branches using the squash merge strategy .
merge_branches_by_squash(Client, Input)
when is_map(Client), is_map(Input) ->
merge_branches_by_squash(Client, Input, []).
merge_branches_by_squash(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergeBranchesBySquash">>, Input, Options).
@doc Merges two specified branches using the three - way merge strategy .
merge_branches_by_three_way(Client, Input)
when is_map(Client), is_map(Input) ->
merge_branches_by_three_way(Client, Input, []).
merge_branches_by_three_way(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergeBranchesByThreeWay">>, Input, Options).
%% @doc Attempts to merge the source commit of a pull request into the
%% specified destination branch for that pull request at the specified commit
%% using the fast-forward merge strategy.
%%
%% If the merge is successful, it closes the pull request.
merge_pull_request_by_fast_forward(Client, Input)
when is_map(Client), is_map(Input) ->
merge_pull_request_by_fast_forward(Client, Input, []).
merge_pull_request_by_fast_forward(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergePullRequestByFastForward">>, Input, Options).
%% @doc Attempts to merge the source commit of a pull request into the
%% specified destination branch for that pull request at the specified commit
%% using the squash merge strategy.
%%
%% If the merge is successful, it closes the pull request.
merge_pull_request_by_squash(Client, Input)
when is_map(Client), is_map(Input) ->
merge_pull_request_by_squash(Client, Input, []).
merge_pull_request_by_squash(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergePullRequestBySquash">>, Input, Options).
%% @doc Attempts to merge the source commit of a pull request into the
%% specified destination branch for that pull request at the specified commit
using the three - way merge strategy .
%%
%% If the merge is successful, it closes the pull request.
merge_pull_request_by_three_way(Client, Input)
when is_map(Client), is_map(Input) ->
merge_pull_request_by_three_way(Client, Input, []).
merge_pull_request_by_three_way(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergePullRequestByThreeWay">>, Input, Options).
%% @doc Sets aside (overrides) all approval rule requirements for a specified
%% pull request.
override_pull_request_approval_rules(Client, Input)
when is_map(Client), is_map(Input) ->
override_pull_request_approval_rules(Client, Input, []).
override_pull_request_approval_rules(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"OverridePullRequestApprovalRules">>, Input, Options).
@doc Posts a comment on the comparison between two commits .
post_comment_for_compared_commit(Client, Input)
when is_map(Client), is_map(Input) ->
post_comment_for_compared_commit(Client, Input, []).
post_comment_for_compared_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PostCommentForComparedCommit">>, Input, Options).
%% @doc Posts a comment on a pull request.
post_comment_for_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
post_comment_for_pull_request(Client, Input, []).
post_comment_for_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PostCommentForPullRequest">>, Input, Options).
%% @doc Posts a comment in reply to an existing comment on a comparison
%% between commits or a pull request.
post_comment_reply(Client, Input)
when is_map(Client), is_map(Input) ->
post_comment_reply(Client, Input, []).
post_comment_reply(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PostCommentReply">>, Input, Options).
%% @doc Adds or updates a reaction to a specified comment for the user whose
%% identity is used to make the request.
%%
%% You can only add or update a reaction for yourself. You cannot add,
%% modify, or delete a reaction for another user.
put_comment_reaction(Client, Input)
when is_map(Client), is_map(Input) ->
put_comment_reaction(Client, Input, []).
put_comment_reaction(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PutCommentReaction">>, Input, Options).
%% @doc Adds or updates a file in a branch in an AWS CodeCommit repository,
%% and generates a commit for the addition in the specified branch.
put_file(Client, Input)
when is_map(Client), is_map(Input) ->
put_file(Client, Input, []).
put_file(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PutFile">>, Input, Options).
%% @doc Replaces all triggers for a repository.
%%
%% Used to create or delete triggers.
put_repository_triggers(Client, Input)
when is_map(Client), is_map(Input) ->
put_repository_triggers(Client, Input, []).
put_repository_triggers(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PutRepositoryTriggers">>, Input, Options).
%% @doc Adds or updates tags for a resource in AWS CodeCommit.
%%
For a list of valid resources in AWS CodeCommit , see CodeCommit Resources
and Operations in the AWS CodeCommit User Guide .
tag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
tag_resource(Client, Input, []).
tag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"TagResource">>, Input, Options).
%% @doc Tests the functionality of repository triggers by sending information
%% to the trigger target.
%%
%% If real data is available in the repository, the test sends data from the
%% last commit. If no data is available, sample data is generated.
test_repository_triggers(Client, Input)
when is_map(Client), is_map(Input) ->
test_repository_triggers(Client, Input, []).
test_repository_triggers(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"TestRepositoryTriggers">>, Input, Options).
%% @doc Removes tags for a resource in AWS CodeCommit.
%%
For a list of valid resources in AWS CodeCommit , see CodeCommit Resources
and Operations in the AWS CodeCommit User Guide .
untag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
untag_resource(Client, Input, []).
untag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UntagResource">>, Input, Options).
%% @doc Updates the content of an approval rule template.
%%
%% You can change the number of required approvals, the membership of the
%% approval rule, and whether an approval pool is defined.
update_approval_rule_template_content(Client, Input)
when is_map(Client), is_map(Input) ->
update_approval_rule_template_content(Client, Input, []).
update_approval_rule_template_content(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateApprovalRuleTemplateContent">>, Input, Options).
%% @doc Updates the description for a specified approval rule template.
update_approval_rule_template_description(Client, Input)
when is_map(Client), is_map(Input) ->
update_approval_rule_template_description(Client, Input, []).
update_approval_rule_template_description(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateApprovalRuleTemplateDescription">>, Input, Options).
%% @doc Updates the name of a specified approval rule template.
update_approval_rule_template_name(Client, Input)
when is_map(Client), is_map(Input) ->
update_approval_rule_template_name(Client, Input, []).
update_approval_rule_template_name(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateApprovalRuleTemplateName">>, Input, Options).
%% @doc Replaces the contents of a comment.
update_comment(Client, Input)
when is_map(Client), is_map(Input) ->
update_comment(Client, Input, []).
update_comment(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateComment">>, Input, Options).
%% @doc Sets or changes the default branch name for the specified repository.
%%
%% If you use this operation to change the default branch name to the current
%% default branch name, a success message is returned even though the default
%% branch did not change.
update_default_branch(Client, Input)
when is_map(Client), is_map(Input) ->
update_default_branch(Client, Input, []).
update_default_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateDefaultBranch">>, Input, Options).
%% @doc Updates the structure of an approval rule created specifically for a
%% pull request.
%%
%% For example, you can change the number of required approvers and the
%% approval pool for approvers.
update_pull_request_approval_rule_content(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_approval_rule_content(Client, Input, []).
update_pull_request_approval_rule_content(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestApprovalRuleContent">>, Input, Options).
%% @doc Updates the state of a user's approval on a pull request.
%%
%% The user is derived from the signed-in account when the request is made.
update_pull_request_approval_state(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_approval_state(Client, Input, []).
update_pull_request_approval_state(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestApprovalState">>, Input, Options).
%% @doc Replaces the contents of the description of a pull request.
update_pull_request_description(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_description(Client, Input, []).
update_pull_request_description(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestDescription">>, Input, Options).
%% @doc Updates the status of a pull request.
update_pull_request_status(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_status(Client, Input, []).
update_pull_request_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestStatus">>, Input, Options).
%% @doc Replaces the title of a pull request.
update_pull_request_title(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_title(Client, Input, []).
update_pull_request_title(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestTitle">>, Input, Options).
%% @doc Sets or changes the comment or description for a repository.
%%
%% The description field for a repository accepts all HTML characters and all
%% valid Unicode characters. Applications that do not HTML-encode the
%% description and display it in a webpage can expose users to potentially
%% malicious code. Make sure that you HTML-encode the description field in
%% any application that uses this API to display the repository description
%% on a webpage.
update_repository_description(Client, Input)
when is_map(Client), is_map(Input) ->
update_repository_description(Client, Input, []).
update_repository_description(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateRepositoryDescription">>, Input, Options).
@doc Renames a repository .
%%
%% The repository name must be unique across the calling AWS account.
Repository names are limited to 100 alphanumeric , dash , and underscore
%% characters, and cannot include certain characters. The suffix .git is
%% prohibited. For more information about the limits on repository names, see
%% Limits in the AWS CodeCommit User Guide.
update_repository_name(Client, Input)
when is_map(Client), is_map(Input) ->
update_repository_name(Client, Input, []).
update_repository_name(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateRepositoryName">>, Input, Options).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), binary(), map(), list()) ->
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map() | undefined,
Error :: map().
request(Client, Action, Input, Options) ->
RequestFun = fun() -> do_request(Client, Action, Input, Options) end,
aws_request:request(RequestFun, Options).
do_request(Client, Action, Input0, Options) ->
Client1 = Client#{service => <<"codecommit">>},
Host = build_host(<<"codecommit">>, Client1),
URL = build_url(Host, Client1),
Headers = [
{<<"Host">>, Host},
{<<"Content-Type">>, <<"application/x-amz-json-1.1">>},
{<<"X-Amz-Target">>, <<"CodeCommit_20150413.", Action/binary>>}
],
Input = Input0,
Payload = jsx:encode(Input),
SignedHeaders = aws_request:sign_request(Client1, <<"POST">>, URL, Headers, Payload),
Response = hackney:request(post, URL, SignedHeaders, Payload, Options),
handle_response(Response).
handle_response({ok, 200, ResponseHeaders, Client}) ->
case hackney:body(Client) of
{ok, <<>>} ->
{ok, undefined, {200, ResponseHeaders, Client}};
{ok, Body} ->
Result = jsx:decode(Body),
{ok, Result, {200, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, ResponseHeaders, Client}) ->
{ok, Body} = hackney:body(Client),
Error = jsx:decode(Body),
{error, Error, {StatusCode, ResponseHeaders, Client}};
handle_response({error, Reason}) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Client) ->
Proto = aws_client:proto(Client),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, <<"/">>], <<"">>).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_codecommit.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
@doc AWS CodeCommit
This is the AWS CodeCommit API Reference.
This reference provides descriptions of the operations and data types for
AWS CodeCommit API along with usage examples.
You can use the AWS CodeCommit API to work with the following objects:
Repositories, by calling the following:
or more repositories associated with your AWS account.
repository.
repository.
specified repository.
repositories associated with your AWS account.
description of the repository.
</li> <li> `UpdateRepositoryName', which changes the name of the
repository. If you change the name of a repository, no other users of that
use.
</li> </ul> Branches, by calling the following:
<ul> <li> `CreateBranch', which creates a branch in a specified
repository.
repository unless it is the default branch.
branch.
repository.
</li> <li> `UpdateDefaultBranch', which changes the default branch for
a repository.
</li> </ul> Files, by calling the following:
from a specified branch.
individual Git blob object in a repository.
specified file.
folder or directory.
</li> <li> `PutFile', which adds or modifies a single file in a
specified repository and branch.
</li> </ul> Commits, by calling the following:
more commits in a repository.
</li> <li> `CreateCommit', which creates a commit for changes to a
repository.
</li> <li> `GetCommit', which returns information about a commit,
including commit messages and author and committer information.
</li> <li> `GetDifferences', which returns information about the
commit ID, or other fully qualified reference).
</li> </ul> Merges, by calling the following:
<ul> <li> `BatchDescribeMergeConflicts', which returns information
about conflicts in a merge between commits in a repository.
</li> <li> `CreateUnreferencedMergeCommit', which creates an
comparing them and identifying any potential conflicts.
</li> <li> `DescribeMergeConflicts', which returns information about
merge conflicts between the base, source, and destination versions of a
file in a potential merge.
</li> <li> `GetMergeCommit', which returns information about the merge
between a source and destination commit.
</li> <li> `GetMergeConflicts', which returns information about merge
conflicts between the source and destination branch in a pull request.
</li> <li> `GetMergeOptions', which returns information about the
using the fast-forward merge option.
the squash merge option.
</li> </ul> Pull requests, by calling the following:
<ul> <li> `CreatePullRequest', which creates a pull request in a
specified repository.
</li> <li> `CreatePullRequestApprovalRule', which creates an approval
rule for a specified pull request.
</li> <li> `DeletePullRequestApprovalRule', which deletes an approval
rule for a specified pull request.
</li> <li> `DescribePullRequestEvents', which returns information
a pull request has met all the conditions specified in its associated
approval rules.
</li> <li> `GetCommentsForPullRequest', which returns information
about comments on a specified pull request.
</li> <li> `GetPullRequest', which returns information about a
specified pull request.
about the approval states for a specified pull request.
</li> <li> `GetPullRequestOverrideState', which returns information
about whether approval rules have been set aside (overriden) for a pull
that overrode the rules and their requirements for the pull request.
</li> <li> `ListPullRequests', which lists all pull requests for a
repository.
</li> <li> `MergePullRequestByFastForward', which merges the source
destination branch of a pull request into the specified destination branch
for that pull request using the fast-forward merge option.
</li> <li> `MergePullRequestBySquash', which merges the source
destination branch of a pull request into the specified destination branch
for that pull request using the squash merge option.
destination branch of a pull request into the specified destination branch
</li> <li> `OverridePullRequestApprovalRules', which sets aside all
approval rule requirements for a pull request.
</li> <li> `PostCommentForPullRequest', which posts a comment to a
pull request at the specified line, file, or request.
</li> <li> `UpdatePullRequestApprovalRuleContent', which updates the
structure of an approval rule for a pull request.
of an approval on a pull request.
</li> <li> `UpdatePullRequestDescription', which updates the
description of a pull request.
pull request.
request.
</li> </ul> Approval rule templates, by calling the following:
<ul> <li> `AssociateApprovalRuleTemplateWithRepository', which
associates a template with a specified repository. After the template is
associated with a repository, AWS CodeCommit creates approval rules that
match the template conditions on every pull request created in the
specified repository.
</li> <li> `BatchAssociateApprovalRuleTemplateWithRepositories', which
template is associated with a repository, AWS CodeCommit creates approval
rules that match the template conditions on every pull request created in
the specified repositories.
</li> <li> `BatchDisassociateApprovalRuleTemplateFromRepositories',
which removes the association between a template and specified
repositories so that approval rules based on the template are not
automatically created when pull requests are created in those
repositories.
</li> <li> `CreateApprovalRuleTemplate', which creates a template for
in your AWS account.
</li> <li> `DeleteApprovalRuleTemplate', which deletes the specified
template. It does not remove approval rules on pull requests already
created with the template.
</li> <li> `DisassociateApprovalRuleTemplateFromRepository', which
removes the association between a template and a repository so that
approval rules based on the template are not automatically created when
pull requests are created in the specified repository.
</li> <li> `GetApprovalRuleTemplate', which returns information about
an approval rule template.
</li> <li> `ListApprovalRuleTemplates', which lists all approval rule
</li> <li> `ListAssociatedApprovalRuleTemplatesForRepository', which
lists all approval rule templates that are associated with a specified
repository.
</li> <li> `ListRepositoriesForApprovalRuleTemplate', which lists all
repositories associated with the specified approval rule template.
</li> <li> `UpdateApprovalRuleTemplateDescription', which updates the
description of an approval rule template.
</li> <li> `UpdateApprovalRuleTemplateName', which updates the name of
an approval rule template.
</li> <li> `UpdateApprovalRuleTemplateContent', which updates the
content of an approval rule template.
</li> </ul> Comments in a repository, by calling the following:
comment on a commit in a repository.
a commit.
</li> <li> `GetCommentReactions', which returns information about
emoji reactions to comments.
</li> <li> `GetCommentsForComparedCommit', which returns information
repository.
</li> <li> `PostCommentForComparedCommit', which creates a comment on
</li> <li> `PostCommentReply', which creates a reply to a comment.
</li> <li> `PutCommentReaction', which creates or updates an emoji
reaction to a comment.
</li> <li> `UpdateComment', which updates the content of a comment on
a commit in a repository.
</li> </ul> Tags used to tag resources in AWS CodeCommit (not Git tags),
by calling the following:
<ul> <li> `ListTagsForResource', which gets information about AWS tags
AWS CodeCommit.
</li> </ul> Triggers, by calling the following:
<ul> <li> `GetRepositoryTriggers', which returns information about
triggers configured for a repository.
repository and can be used to create or delete triggers.
</li> <li> `TestRepositoryTriggers', which tests the functionality of
a repository trigger by sending data to the trigger target.
</li> </ul> For information about how to use AWS CodeCommit, see the AWS
CodeCommit User Guide.
====================================================================
API
====================================================================
@doc Creates an association between an approval rule template and a
specified repository.
Then, the next time a pull request is created in the repository where the
destination reference (if specified) matches the destination reference
(branch) for the pull request, an approval rule that matches the template
conditions is automatically created for that pull request. If no
destination references are specified in the template, an approval rule
that matches the template contents is created for all pull requests in
that repository.
more specified repositories.
merge strategy.
more specified repositories.
repository.
The description field for a repository accepts all HTML characters and all
valid Unicode characters. Applications that do not HTML-encode the
description and display it in a webpage can expose users to potentially
malicious code. Make sure that you HTML-encode the description field in
any application that uses this API to display the repository description
on a webpage.
@doc Creates a template for approval rules that can then be associated
When you associate a template with a repository, AWS CodeCommit creates an
approval rule that matches the conditions of the template for all pull
requests that meet the conditions of the template. For more information,
see `AssociateApprovalRuleTemplateWithRepository'.
@doc Creates a branch in a repository and points the branch to a commit.
Calling the create branch operation does not set a repository's
default branch. To do this, call the update default branch operation.
@doc Creates a commit for a repository on the tip of a specified branch.
@doc Creates a pull request in the specified repository.
@doc Creates an approval rule for a pull request.
@doc Creates a new, empty repository.
@doc Creates an unreferenced commit that represents the result of merging
This can help you determine the outcome of a potential merge. This API
cannot be used with the fast-forward merge strategy because that strategy
does not create a merge commit.
API or through git commands such as git fetch. To retrieve this commit,
you must specify its commit ID or otherwise reference it.
@doc Deletes a specified approval rule template.
Deleting a template does not remove approval rules on pull requests
already created with the template.
@doc Deletes a branch from a repository, unless that branch is the default
branch for the repository.
@doc Deletes the content of a comment made on a change, file, or commit in
a repository.
@doc Deletes a specified file from a specified branch.
A commit is created on the branch that contains the revision. The file
still exists in the commits earlier to the commit that contains the
deletion.
@doc Deletes an approval rule from a specified pull request.
Approval rules can be deleted from a pull request only if the pull request
is open, and if the approval rule was created specifically for a pull
request and not generated from an approval rule template associated with
the repository where the pull request was created. You cannot delete an
approval rule from a merged or closed pull request.
@doc Deletes a repository.
If a specified repository was already deleted, a null repository ID is
returned.
Deleting a repository also deletes all associated objects and metadata.
After a repository is deleted, all future push calls to the deleted
repository fail.
merge strategy.
If the merge option for the attempted merge is specified as
FAST_FORWARD_MERGE, an exception is thrown.
@doc Removes the association between a template and a repository so that
approval rules based on the template are not automatically created when
pull requests are created in the specified repository.
This does not delete any approval rules previously created for pull
requests through the template association.
@doc Evaluates whether a pull request has met all the conditions specified
in its associated approval rules.
@doc Returns information about a specified approval rule template.
repository.
@doc Returns information about a repository branch, including its name and
the last commit ID.
@doc Returns the content of a comment made on a change, file, or commit in
a repository.
Reaction counts might include numbers from user identities who were
deleted after the reaction was made. For a count of reactions from active
identities, use GetCommentReactions.
@doc Returns information about reactions to a specified comment ID.
Reactions from users who have been deleted will not be included in the
count.
commits.
Reaction counts might include numbers from user identities who were
deleted after the reaction was made. For a count of reactions from active
identities, use GetCommentReactions.
@doc Returns comments made on a pull request.
Reaction counts might include numbers from user identities who were
deleted after the reaction was made. For a count of reactions from active
identities, use GetCommentReactions.
@doc Returns information about a commit, including commit message and
committer information.
@doc Returns information about the differences in a valid commit specifier
reference).
Results can be limited to a specified path.
metadata.
@doc Returns the contents of a specified folder in a repository.
@doc Returns information about a specified merge commit.
@doc Returns information about merge conflicts between the before and
after commit IDs for a pull request in a repository.
specified branches.
For details about why a merge option is not available, use
GetMergeConflicts or DescribeMergeConflicts.
@doc Gets information about a pull request in a specified repository.
@doc Gets information about the approval states for a specified pull
request.
rules applied to them.
@doc Returns information about whether approval rules have been set aside
of the user or identity that overrode the rules and their requirements for
the pull request.
@doc Returns information about a repository.
The description field for a repository accepts all HTML characters and all
valid Unicode characters. Applications that do not HTML-encode the
description and display it in a webpage can expose users to potentially
malicious code. Make sure that you HTML-encode the description field in
any application that uses this API to display the repository description
on a webpage.
@doc Gets information about triggers configured for a repository.
@doc Lists all approval rule templates in the specified AWS Region in your
AWS account.
is used.
@doc Lists all approval rule templates that are associated with a
specified repository.
@doc Returns a list of pull requests for a specified repository.
The return list can be refined by pull request status or pull request
@doc Lists all repositories associated with the specified approval rule
template.
@doc Attempts to merge the source commit of a pull request into the
specified destination branch for that pull request at the specified commit
using the fast-forward merge strategy.
If the merge is successful, it closes the pull request.
@doc Attempts to merge the source commit of a pull request into the
specified destination branch for that pull request at the specified commit
using the squash merge strategy.
If the merge is successful, it closes the pull request.
@doc Attempts to merge the source commit of a pull request into the
specified destination branch for that pull request at the specified commit
If the merge is successful, it closes the pull request.
@doc Sets aside (overrides) all approval rule requirements for a specified
pull request.
@doc Posts a comment on a pull request.
@doc Posts a comment in reply to an existing comment on a comparison
between commits or a pull request.
@doc Adds or updates a reaction to a specified comment for the user whose
identity is used to make the request.
You can only add or update a reaction for yourself. You cannot add,
modify, or delete a reaction for another user.
@doc Adds or updates a file in a branch in an AWS CodeCommit repository,
and generates a commit for the addition in the specified branch.
@doc Replaces all triggers for a repository.
Used to create or delete triggers.
@doc Adds or updates tags for a resource in AWS CodeCommit.
@doc Tests the functionality of repository triggers by sending information
to the trigger target.
If real data is available in the repository, the test sends data from the
last commit. If no data is available, sample data is generated.
@doc Removes tags for a resource in AWS CodeCommit.
@doc Updates the content of an approval rule template.
You can change the number of required approvals, the membership of the
approval rule, and whether an approval pool is defined.
@doc Updates the description for a specified approval rule template.
@doc Updates the name of a specified approval rule template.
@doc Replaces the contents of a comment.
@doc Sets or changes the default branch name for the specified repository.
If you use this operation to change the default branch name to the current
default branch name, a success message is returned even though the default
branch did not change.
@doc Updates the structure of an approval rule created specifically for a
pull request.
For example, you can change the number of required approvers and the
approval pool for approvers.
@doc Updates the state of a user's approval on a pull request.
The user is derived from the signed-in account when the request is made.
@doc Replaces the contents of the description of a pull request.
@doc Updates the status of a pull request.
@doc Replaces the title of a pull request.
@doc Sets or changes the comment or description for a repository.
The description field for a repository accepts all HTML characters and all
valid Unicode characters. Applications that do not HTML-encode the
description and display it in a webpage can expose users to potentially
malicious code. Make sure that you HTML-encode the description field in
any application that uses this API to display the repository description
on a webpage.
The repository name must be unique across the calling AWS account.
characters, and cannot include certain characters. The suffix .git is
prohibited. For more information about the limits on repository names, see
Limits in the AWS CodeCommit User Guide.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
< ul > < li > ` BatchGetRepositories ' , which returns information about one
< /li > < li > ` CreateRepository ' , which creates an AWS CodeCommit
< /li > < li > ` DeleteRepository ' , which deletes an AWS CodeCommit
< /li > < li > ` GetRepository ' , which returns information about a
< /li > < li > ` ListRepositories ' , which lists all AWS CodeCommit
< /li > < li > ` UpdateRepositoryDescription ' , which sets or updates the
repository can access it until you send them the new HTTPS or SSH URL to
< /li > < li > ` DeleteBranch ' , which deletes the specified branch in a
< /li > < li > ` GetBranch ' , which returns information about a specified
< /li > < li > ` ListBranches ' , which lists all branches for a specified
< ul > < li > ` DeleteFile ' , which deletes the content of a specified file
< /li > < li > ` GetBlob ' , which returns the base-64 encoded content of an
< /li > < li > ` GetFile ' , which returns the base-64 encoded content of a
< /li > < li > ` GetFolder ' , which returns the contents of a specified
< ul > < li > ` BatchGetCommits ' , which returns information about one or
differences in a valid commit specifier ( such as a branch , tag , HEAD ,
unreferenced commit between two branches or commits for the purpose of
available merge options between two branches or commit specifiers .
< /li > < li > ` MergeBranchesByFastForward ' , which merges two branches
< /li > < li > ` MergeBranchesBySquash ' , which merges two branches using
< /li > < li > ` MergeBranchesByThreeWay ' , which merges two branches using
the three - way merge option .
about one or more pull request events .
< /li > < li > ` EvaluatePullRequestApprovalRules ' , which evaluates whether
< /li > < li > ` GetPullRequestApprovalStates ' , which returns information
request , and if so , the Amazon Resource Name ( ARN ) of the user or identity
< /li > < li > ` MergePullRequestByThreeWay ' . which merges the source
for that pull request using the three - way merge option .
< /li > < li > ` UpdatePullRequestApprovalState ' , which updates the state
< /li > < li > ` UpdatePullRequestStatus ' , which updates the status of a
< /li > < li > ` UpdatePullRequestTitle ' , which updates the title of a pull
associates a template with one or more specified repositories . After the
approval rules that can then be associated with one or more repositories
templates in the AWS Region in your AWS account .
< ul > < li > ` DeleteCommentContent ' , which deletes the content of a
< /li > < li > ` GetComment ' , which returns information about a comment on
about comments on the comparison between two commit specifiers in a
the comparison between two commit specifiers in a repository .
for a specified Amazon Resource Name ( ARN ) in AWS CodeCommit .
< /li > < li > ` TagResource ' , which adds or updates tags for a resource in
< /li > < li > ` UntagResource ' , which removes tags for a resource in AWS
CodeCommit .
< /li > < li > ` PutRepositoryTriggers ' , which replaces all triggers for a
-module(aws_codecommit).
-export([associate_approval_rule_template_with_repository/2,
associate_approval_rule_template_with_repository/3,
batch_associate_approval_rule_template_with_repositories/2,
batch_associate_approval_rule_template_with_repositories/3,
batch_describe_merge_conflicts/2,
batch_describe_merge_conflicts/3,
batch_disassociate_approval_rule_template_from_repositories/2,
batch_disassociate_approval_rule_template_from_repositories/3,
batch_get_commits/2,
batch_get_commits/3,
batch_get_repositories/2,
batch_get_repositories/3,
create_approval_rule_template/2,
create_approval_rule_template/3,
create_branch/2,
create_branch/3,
create_commit/2,
create_commit/3,
create_pull_request/2,
create_pull_request/3,
create_pull_request_approval_rule/2,
create_pull_request_approval_rule/3,
create_repository/2,
create_repository/3,
create_unreferenced_merge_commit/2,
create_unreferenced_merge_commit/3,
delete_approval_rule_template/2,
delete_approval_rule_template/3,
delete_branch/2,
delete_branch/3,
delete_comment_content/2,
delete_comment_content/3,
delete_file/2,
delete_file/3,
delete_pull_request_approval_rule/2,
delete_pull_request_approval_rule/3,
delete_repository/2,
delete_repository/3,
describe_merge_conflicts/2,
describe_merge_conflicts/3,
describe_pull_request_events/2,
describe_pull_request_events/3,
disassociate_approval_rule_template_from_repository/2,
disassociate_approval_rule_template_from_repository/3,
evaluate_pull_request_approval_rules/2,
evaluate_pull_request_approval_rules/3,
get_approval_rule_template/2,
get_approval_rule_template/3,
get_blob/2,
get_blob/3,
get_branch/2,
get_branch/3,
get_comment/2,
get_comment/3,
get_comment_reactions/2,
get_comment_reactions/3,
get_comments_for_compared_commit/2,
get_comments_for_compared_commit/3,
get_comments_for_pull_request/2,
get_comments_for_pull_request/3,
get_commit/2,
get_commit/3,
get_differences/2,
get_differences/3,
get_file/2,
get_file/3,
get_folder/2,
get_folder/3,
get_merge_commit/2,
get_merge_commit/3,
get_merge_conflicts/2,
get_merge_conflicts/3,
get_merge_options/2,
get_merge_options/3,
get_pull_request/2,
get_pull_request/3,
get_pull_request_approval_states/2,
get_pull_request_approval_states/3,
get_pull_request_override_state/2,
get_pull_request_override_state/3,
get_repository/2,
get_repository/3,
get_repository_triggers/2,
get_repository_triggers/3,
list_approval_rule_templates/2,
list_approval_rule_templates/3,
list_associated_approval_rule_templates_for_repository/2,
list_associated_approval_rule_templates_for_repository/3,
list_branches/2,
list_branches/3,
list_pull_requests/2,
list_pull_requests/3,
list_repositories/2,
list_repositories/3,
list_repositories_for_approval_rule_template/2,
list_repositories_for_approval_rule_template/3,
list_tags_for_resource/2,
list_tags_for_resource/3,
merge_branches_by_fast_forward/2,
merge_branches_by_fast_forward/3,
merge_branches_by_squash/2,
merge_branches_by_squash/3,
merge_branches_by_three_way/2,
merge_branches_by_three_way/3,
merge_pull_request_by_fast_forward/2,
merge_pull_request_by_fast_forward/3,
merge_pull_request_by_squash/2,
merge_pull_request_by_squash/3,
merge_pull_request_by_three_way/2,
merge_pull_request_by_three_way/3,
override_pull_request_approval_rules/2,
override_pull_request_approval_rules/3,
post_comment_for_compared_commit/2,
post_comment_for_compared_commit/3,
post_comment_for_pull_request/2,
post_comment_for_pull_request/3,
post_comment_reply/2,
post_comment_reply/3,
put_comment_reaction/2,
put_comment_reaction/3,
put_file/2,
put_file/3,
put_repository_triggers/2,
put_repository_triggers/3,
tag_resource/2,
tag_resource/3,
test_repository_triggers/2,
test_repository_triggers/3,
untag_resource/2,
untag_resource/3,
update_approval_rule_template_content/2,
update_approval_rule_template_content/3,
update_approval_rule_template_description/2,
update_approval_rule_template_description/3,
update_approval_rule_template_name/2,
update_approval_rule_template_name/3,
update_comment/2,
update_comment/3,
update_default_branch/2,
update_default_branch/3,
update_pull_request_approval_rule_content/2,
update_pull_request_approval_rule_content/3,
update_pull_request_approval_state/2,
update_pull_request_approval_state/3,
update_pull_request_description/2,
update_pull_request_description/3,
update_pull_request_status/2,
update_pull_request_status/3,
update_pull_request_title/2,
update_pull_request_title/3,
update_repository_description/2,
update_repository_description/3,
update_repository_name/2,
update_repository_name/3]).
-include_lib("hackney/include/hackney_lib.hrl").
associate_approval_rule_template_with_repository(Client, Input)
when is_map(Client), is_map(Input) ->
associate_approval_rule_template_with_repository(Client, Input, []).
associate_approval_rule_template_with_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"AssociateApprovalRuleTemplateWithRepository">>, Input, Options).
@doc Creates an association between an approval rule template and one or
batch_associate_approval_rule_template_with_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
batch_associate_approval_rule_template_with_repositories(Client, Input, []).
batch_associate_approval_rule_template_with_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchAssociateApprovalRuleTemplateWithRepositories">>, Input, Options).
@doc Returns information about one or more merge conflicts in the
attempted merge of two commit specifiers using the squash or three - way
batch_describe_merge_conflicts(Client, Input)
when is_map(Client), is_map(Input) ->
batch_describe_merge_conflicts(Client, Input, []).
batch_describe_merge_conflicts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchDescribeMergeConflicts">>, Input, Options).
@doc Removes the association between an approval rule template and one or
batch_disassociate_approval_rule_template_from_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
batch_disassociate_approval_rule_template_from_repositories(Client, Input, []).
batch_disassociate_approval_rule_template_from_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchDisassociateApprovalRuleTemplateFromRepositories">>, Input, Options).
@doc Returns information about the contents of one or more commits in a
batch_get_commits(Client, Input)
when is_map(Client), is_map(Input) ->
batch_get_commits(Client, Input, []).
batch_get_commits(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchGetCommits">>, Input, Options).
@doc Returns information about one or more repositories .
batch_get_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
batch_get_repositories(Client, Input, []).
batch_get_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"BatchGetRepositories">>, Input, Options).
with one or more repositories in your AWS account .
create_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
create_approval_rule_template(Client, Input, []).
create_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateApprovalRuleTemplate">>, Input, Options).
create_branch(Client, Input)
when is_map(Client), is_map(Input) ->
create_branch(Client, Input, []).
create_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateBranch">>, Input, Options).
create_commit(Client, Input)
when is_map(Client), is_map(Input) ->
create_commit(Client, Input, []).
create_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateCommit">>, Input, Options).
create_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
create_pull_request(Client, Input, []).
create_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePullRequest">>, Input, Options).
create_pull_request_approval_rule(Client, Input)
when is_map(Client), is_map(Input) ->
create_pull_request_approval_rule(Client, Input, []).
create_pull_request_approval_rule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreatePullRequestApprovalRule">>, Input, Options).
create_repository(Client, Input)
when is_map(Client), is_map(Input) ->
create_repository(Client, Input, []).
create_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateRepository">>, Input, Options).
two branches using a specified merge strategy .
This unreferenced merge commit can only be accessed using the GetCommit
create_unreferenced_merge_commit(Client, Input)
when is_map(Client), is_map(Input) ->
create_unreferenced_merge_commit(Client, Input, []).
create_unreferenced_merge_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"CreateUnreferencedMergeCommit">>, Input, Options).
delete_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
delete_approval_rule_template(Client, Input, []).
delete_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteApprovalRuleTemplate">>, Input, Options).
delete_branch(Client, Input)
when is_map(Client), is_map(Input) ->
delete_branch(Client, Input, []).
delete_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteBranch">>, Input, Options).
delete_comment_content(Client, Input)
when is_map(Client), is_map(Input) ->
delete_comment_content(Client, Input, []).
delete_comment_content(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteCommentContent">>, Input, Options).
delete_file(Client, Input)
when is_map(Client), is_map(Input) ->
delete_file(Client, Input, []).
delete_file(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteFile">>, Input, Options).
delete_pull_request_approval_rule(Client, Input)
when is_map(Client), is_map(Input) ->
delete_pull_request_approval_rule(Client, Input, []).
delete_pull_request_approval_rule(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeletePullRequestApprovalRule">>, Input, Options).
delete_repository(Client, Input)
when is_map(Client), is_map(Input) ->
delete_repository(Client, Input, []).
delete_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DeleteRepository">>, Input, Options).
@doc Returns information about one or more merge conflicts in the
attempted merge of two commit specifiers using the squash or three - way
describe_merge_conflicts(Client, Input)
when is_map(Client), is_map(Input) ->
describe_merge_conflicts(Client, Input, []).
describe_merge_conflicts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribeMergeConflicts">>, Input, Options).
@doc Returns information about one or more pull request events .
describe_pull_request_events(Client, Input)
when is_map(Client), is_map(Input) ->
describe_pull_request_events(Client, Input, []).
describe_pull_request_events(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DescribePullRequestEvents">>, Input, Options).
disassociate_approval_rule_template_from_repository(Client, Input)
when is_map(Client), is_map(Input) ->
disassociate_approval_rule_template_from_repository(Client, Input, []).
disassociate_approval_rule_template_from_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"DisassociateApprovalRuleTemplateFromRepository">>, Input, Options).
evaluate_pull_request_approval_rules(Client, Input)
when is_map(Client), is_map(Input) ->
evaluate_pull_request_approval_rules(Client, Input, []).
evaluate_pull_request_approval_rules(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"EvaluatePullRequestApprovalRules">>, Input, Options).
get_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
get_approval_rule_template(Client, Input, []).
get_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetApprovalRuleTemplate">>, Input, Options).
@doc Returns the base-64 encoded content of an individual blob in a
get_blob(Client, Input)
when is_map(Client), is_map(Input) ->
get_blob(Client, Input, []).
get_blob(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetBlob">>, Input, Options).
get_branch(Client, Input)
when is_map(Client), is_map(Input) ->
get_branch(Client, Input, []).
get_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetBranch">>, Input, Options).
get_comment(Client, Input)
when is_map(Client), is_map(Input) ->
get_comment(Client, Input, []).
get_comment(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetComment">>, Input, Options).
get_comment_reactions(Client, Input)
when is_map(Client), is_map(Input) ->
get_comment_reactions(Client, Input, []).
get_comment_reactions(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommentReactions">>, Input, Options).
@doc Returns information about comments made on the comparison between two
get_comments_for_compared_commit(Client, Input)
when is_map(Client), is_map(Input) ->
get_comments_for_compared_commit(Client, Input, []).
get_comments_for_compared_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommentsForComparedCommit">>, Input, Options).
get_comments_for_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
get_comments_for_pull_request(Client, Input, []).
get_comments_for_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommentsForPullRequest">>, Input, Options).
get_commit(Client, Input)
when is_map(Client), is_map(Input) ->
get_commit(Client, Input, []).
get_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetCommit">>, Input, Options).
( such as a branch , tag , HEAD , commit ID , or other fully qualified
get_differences(Client, Input)
when is_map(Client), is_map(Input) ->
get_differences(Client, Input, []).
get_differences(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetDifferences">>, Input, Options).
@doc Returns the base-64 encoded contents of a specified file and its
get_file(Client, Input)
when is_map(Client), is_map(Input) ->
get_file(Client, Input, []).
get_file(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetFile">>, Input, Options).
get_folder(Client, Input)
when is_map(Client), is_map(Input) ->
get_folder(Client, Input, []).
get_folder(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetFolder">>, Input, Options).
get_merge_commit(Client, Input)
when is_map(Client), is_map(Input) ->
get_merge_commit(Client, Input, []).
get_merge_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetMergeCommit">>, Input, Options).
get_merge_conflicts(Client, Input)
when is_map(Client), is_map(Input) ->
get_merge_conflicts(Client, Input, []).
get_merge_conflicts(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetMergeConflicts">>, Input, Options).
@doc Returns information about the merge options available for merging two
get_merge_options(Client, Input)
when is_map(Client), is_map(Input) ->
get_merge_options(Client, Input, []).
get_merge_options(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetMergeOptions">>, Input, Options).
get_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
get_pull_request(Client, Input, []).
get_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetPullRequest">>, Input, Options).
Approval states only apply to pull requests that have one or more approval
get_pull_request_approval_states(Client, Input)
when is_map(Client), is_map(Input) ->
get_pull_request_approval_states(Client, Input, []).
get_pull_request_approval_states(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetPullRequestApprovalStates">>, Input, Options).
( overridden ) for a pull request , and if so , the Amazon Resource Name ( ARN )
get_pull_request_override_state(Client, Input)
when is_map(Client), is_map(Input) ->
get_pull_request_override_state(Client, Input, []).
get_pull_request_override_state(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetPullRequestOverrideState">>, Input, Options).
get_repository(Client, Input)
when is_map(Client), is_map(Input) ->
get_repository(Client, Input, []).
get_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetRepository">>, Input, Options).
get_repository_triggers(Client, Input)
when is_map(Client), is_map(Input) ->
get_repository_triggers(Client, Input, []).
get_repository_triggers(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"GetRepositoryTriggers">>, Input, Options).
If an AWS Region is not specified , the AWS Region where you are signed in
list_approval_rule_templates(Client, Input)
when is_map(Client), is_map(Input) ->
list_approval_rule_templates(Client, Input, []).
list_approval_rule_templates(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListApprovalRuleTemplates">>, Input, Options).
list_associated_approval_rule_templates_for_repository(Client, Input)
when is_map(Client), is_map(Input) ->
list_associated_approval_rule_templates_for_repository(Client, Input, []).
list_associated_approval_rule_templates_for_repository(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListAssociatedApprovalRuleTemplatesForRepository">>, Input, Options).
@doc Gets information about one or more branches in a repository .
list_branches(Client, Input)
when is_map(Client), is_map(Input) ->
list_branches(Client, Input, []).
list_branches(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListBranches">>, Input, Options).
author ARN .
list_pull_requests(Client, Input)
when is_map(Client), is_map(Input) ->
list_pull_requests(Client, Input, []).
list_pull_requests(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListPullRequests">>, Input, Options).
@doc Gets information about one or more repositories .
list_repositories(Client, Input)
when is_map(Client), is_map(Input) ->
list_repositories(Client, Input, []).
list_repositories(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListRepositories">>, Input, Options).
list_repositories_for_approval_rule_template(Client, Input)
when is_map(Client), is_map(Input) ->
list_repositories_for_approval_rule_template(Client, Input, []).
list_repositories_for_approval_rule_template(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListRepositoriesForApprovalRuleTemplate">>, Input, Options).
@doc Gets information about AWS tags for a specified Amazon Resource Name
( ARN ) in AWS CodeCommit .
For a list of valid resources in AWS CodeCommit , see CodeCommit Resources
and Operations in the AWS CodeCommit User Guide .
list_tags_for_resource(Client, Input)
when is_map(Client), is_map(Input) ->
list_tags_for_resource(Client, Input, []).
list_tags_for_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"ListTagsForResource">>, Input, Options).
@doc Merges two branches using the fast - forward merge strategy .
merge_branches_by_fast_forward(Client, Input)
when is_map(Client), is_map(Input) ->
merge_branches_by_fast_forward(Client, Input, []).
merge_branches_by_fast_forward(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergeBranchesByFastForward">>, Input, Options).
@doc Merges two branches using the squash merge strategy .
merge_branches_by_squash(Client, Input)
when is_map(Client), is_map(Input) ->
merge_branches_by_squash(Client, Input, []).
merge_branches_by_squash(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergeBranchesBySquash">>, Input, Options).
@doc Merges two specified branches using the three - way merge strategy .
merge_branches_by_three_way(Client, Input)
when is_map(Client), is_map(Input) ->
merge_branches_by_three_way(Client, Input, []).
merge_branches_by_three_way(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergeBranchesByThreeWay">>, Input, Options).
merge_pull_request_by_fast_forward(Client, Input)
when is_map(Client), is_map(Input) ->
merge_pull_request_by_fast_forward(Client, Input, []).
merge_pull_request_by_fast_forward(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergePullRequestByFastForward">>, Input, Options).
merge_pull_request_by_squash(Client, Input)
when is_map(Client), is_map(Input) ->
merge_pull_request_by_squash(Client, Input, []).
merge_pull_request_by_squash(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergePullRequestBySquash">>, Input, Options).
using the three - way merge strategy .
merge_pull_request_by_three_way(Client, Input)
when is_map(Client), is_map(Input) ->
merge_pull_request_by_three_way(Client, Input, []).
merge_pull_request_by_three_way(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"MergePullRequestByThreeWay">>, Input, Options).
override_pull_request_approval_rules(Client, Input)
when is_map(Client), is_map(Input) ->
override_pull_request_approval_rules(Client, Input, []).
override_pull_request_approval_rules(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"OverridePullRequestApprovalRules">>, Input, Options).
@doc Posts a comment on the comparison between two commits .
post_comment_for_compared_commit(Client, Input)
when is_map(Client), is_map(Input) ->
post_comment_for_compared_commit(Client, Input, []).
post_comment_for_compared_commit(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PostCommentForComparedCommit">>, Input, Options).
post_comment_for_pull_request(Client, Input)
when is_map(Client), is_map(Input) ->
post_comment_for_pull_request(Client, Input, []).
post_comment_for_pull_request(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PostCommentForPullRequest">>, Input, Options).
post_comment_reply(Client, Input)
when is_map(Client), is_map(Input) ->
post_comment_reply(Client, Input, []).
post_comment_reply(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PostCommentReply">>, Input, Options).
put_comment_reaction(Client, Input)
when is_map(Client), is_map(Input) ->
put_comment_reaction(Client, Input, []).
put_comment_reaction(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PutCommentReaction">>, Input, Options).
put_file(Client, Input)
when is_map(Client), is_map(Input) ->
put_file(Client, Input, []).
put_file(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PutFile">>, Input, Options).
put_repository_triggers(Client, Input)
when is_map(Client), is_map(Input) ->
put_repository_triggers(Client, Input, []).
put_repository_triggers(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"PutRepositoryTriggers">>, Input, Options).
For a list of valid resources in AWS CodeCommit , see CodeCommit Resources
and Operations in the AWS CodeCommit User Guide .
tag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
tag_resource(Client, Input, []).
tag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"TagResource">>, Input, Options).
test_repository_triggers(Client, Input)
when is_map(Client), is_map(Input) ->
test_repository_triggers(Client, Input, []).
test_repository_triggers(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"TestRepositoryTriggers">>, Input, Options).
For a list of valid resources in AWS CodeCommit , see CodeCommit Resources
and Operations in the AWS CodeCommit User Guide .
untag_resource(Client, Input)
when is_map(Client), is_map(Input) ->
untag_resource(Client, Input, []).
untag_resource(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UntagResource">>, Input, Options).
update_approval_rule_template_content(Client, Input)
when is_map(Client), is_map(Input) ->
update_approval_rule_template_content(Client, Input, []).
update_approval_rule_template_content(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateApprovalRuleTemplateContent">>, Input, Options).
update_approval_rule_template_description(Client, Input)
when is_map(Client), is_map(Input) ->
update_approval_rule_template_description(Client, Input, []).
update_approval_rule_template_description(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateApprovalRuleTemplateDescription">>, Input, Options).
update_approval_rule_template_name(Client, Input)
when is_map(Client), is_map(Input) ->
update_approval_rule_template_name(Client, Input, []).
update_approval_rule_template_name(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateApprovalRuleTemplateName">>, Input, Options).
update_comment(Client, Input)
when is_map(Client), is_map(Input) ->
update_comment(Client, Input, []).
update_comment(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateComment">>, Input, Options).
update_default_branch(Client, Input)
when is_map(Client), is_map(Input) ->
update_default_branch(Client, Input, []).
update_default_branch(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateDefaultBranch">>, Input, Options).
update_pull_request_approval_rule_content(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_approval_rule_content(Client, Input, []).
update_pull_request_approval_rule_content(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestApprovalRuleContent">>, Input, Options).
update_pull_request_approval_state(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_approval_state(Client, Input, []).
update_pull_request_approval_state(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestApprovalState">>, Input, Options).
update_pull_request_description(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_description(Client, Input, []).
update_pull_request_description(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestDescription">>, Input, Options).
update_pull_request_status(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_status(Client, Input, []).
update_pull_request_status(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestStatus">>, Input, Options).
update_pull_request_title(Client, Input)
when is_map(Client), is_map(Input) ->
update_pull_request_title(Client, Input, []).
update_pull_request_title(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdatePullRequestTitle">>, Input, Options).
update_repository_description(Client, Input)
when is_map(Client), is_map(Input) ->
update_repository_description(Client, Input, []).
update_repository_description(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateRepositoryDescription">>, Input, Options).
@doc Renames a repository .
Repository names are limited to 100 alphanumeric , dash , and underscore
update_repository_name(Client, Input)
when is_map(Client), is_map(Input) ->
update_repository_name(Client, Input, []).
update_repository_name(Client, Input, Options)
when is_map(Client), is_map(Input), is_list(Options) ->
request(Client, <<"UpdateRepositoryName">>, Input, Options).
Internal functions
-spec request(aws_client:aws_client(), binary(), map(), list()) ->
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map() | undefined,
Error :: map().
request(Client, Action, Input, Options) ->
RequestFun = fun() -> do_request(Client, Action, Input, Options) end,
aws_request:request(RequestFun, Options).
do_request(Client, Action, Input0, Options) ->
Client1 = Client#{service => <<"codecommit">>},
Host = build_host(<<"codecommit">>, Client1),
URL = build_url(Host, Client1),
Headers = [
{<<"Host">>, Host},
{<<"Content-Type">>, <<"application/x-amz-json-1.1">>},
{<<"X-Amz-Target">>, <<"CodeCommit_20150413.", Action/binary>>}
],
Input = Input0,
Payload = jsx:encode(Input),
SignedHeaders = aws_request:sign_request(Client1, <<"POST">>, URL, Headers, Payload),
Response = hackney:request(post, URL, SignedHeaders, Payload, Options),
handle_response(Response).
handle_response({ok, 200, ResponseHeaders, Client}) ->
case hackney:body(Client) of
{ok, <<>>} ->
{ok, undefined, {200, ResponseHeaders, Client}};
{ok, Body} ->
Result = jsx:decode(Body),
{ok, Result, {200, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, ResponseHeaders, Client}) ->
{ok, Body} = hackney:body(Client),
Error = jsx:decode(Body),
{error, Error, {StatusCode, ResponseHeaders, Client}};
handle_response({error, Reason}) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{region := Region, endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Region, Endpoint], <<".">>).
build_url(Host, Client) ->
Proto = aws_client:proto(Client),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, <<"/">>], <<"">>).
|
b456736272eb1a8e7a417d55d8824854b5a6fc1d5dd5eb29e3eedd43499e5cda | mzp/coq-ruby | envars.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
val coqlib : unit -> string
val coqbin : unit -> string
val camlbin : unit -> string
val camlp4bin : unit -> string
val camllib : unit -> string
val camlp4lib : unit -> string
| null | https://raw.githubusercontent.com/mzp/coq-ruby/99b9f87c4397f705d1210702416176b13f8769c1/lib/envars.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
********************************************************************** | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
val coqlib : unit -> string
val coqbin : unit -> string
val camlbin : unit -> string
val camlp4bin : unit -> string
val camllib : unit -> string
val camlp4lib : unit -> string
|
9a3232b5633c1d4b199de4eaea996254310c0e95033956e065a53c9632c78eab | kosmikus/multirec | HFunctor.hs | # LANGUAGE GADTs #
{-# LANGUAGE RankNTypes #-}
{-# LANGUAGE TypeOperators #-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
-----------------------------------------------------------------------------
-- |
-- Module : Generics.MultiRec.HFunctor
Copyright : ( c ) 2008 - -2010 Universiteit Utrecht
-- License : BSD3
--
Maintainer :
-- Stability : experimental
-- Portability : non-portable
--
-- The definition of functorial map.
--
-----------------------------------------------------------------------------
module Generics.MultiRec.HFunctor where
import Control.Applicative (Applicative(..), (<$>), (<*>), WrappedMonad(..))
import Data.Traversable (Traversable(..))
import Generics.MultiRec.Base
-- * Generic map
-- We define a general 'hmapA' that works on applicative functors.
The simpler ' hmap ' is a special case .
class HFunctor phi f where
hmapA :: (Applicative a) =>
(forall ix. phi ix -> r ix -> a (r' ix)) ->
phi ix -> f r ix -> a (f r' ix)
instance El phi xi => HFunctor phi (I xi) where
hmapA f _ (I x) = I <$> f proof x
instance HFunctor phi (K x) where
hmapA _ _ (K x) = pure (K x)
instance HFunctor phi U where
hmapA _ _ U = pure U
instance (HFunctor phi f, HFunctor phi g) => HFunctor phi (f :+: g) where
hmapA f p (L x) = L <$> hmapA f p x
hmapA f p (R y) = R <$> hmapA f p y
instance (HFunctor phi f, HFunctor phi g) => HFunctor phi (f :*: g) where
hmapA f p (x :*: y) = (:*:) <$> hmapA f p x <*> hmapA f p y
instance HFunctor phi f => HFunctor phi (f :>: ix) where
hmapA f p (Tag x) = Tag <$> hmapA f p x
instance (Traversable f, HFunctor phi g) => HFunctor phi (f :.: g) where
hmapA f p (D x) = D <$> traverse (hmapA f p) x
instance (Constructor c, HFunctor phi f) => HFunctor phi (C c f) where
hmapA f p (C x) = C <$> hmapA f p x
| The function ' hmap ' takes a functor @f@. All the recursive instances
in that functor are wrapped by an application of @r@. The argument to
' hmap ' takes a function that transformes @r@ occurrences into @r'@
occurrences , for every In order to associate the index
-- with the correct family @phi@, the argument to @hmap@ is additionally
-- parameterized by a witness of type @phi ix@.
hmap :: (HFunctor phi f) =>
(forall ix. phi ix -> r ix -> r' ix) ->
phi ix -> f r ix -> f r' ix
hmap f p x = unI0 (hmapA (\ ix x -> I0 (f ix x)) p x)
| Monadic version of ' hmap ' .
hmapM :: (HFunctor phi f, Monad m) =>
(forall ix. phi ix -> r ix -> m (r' ix)) ->
phi ix -> f r ix -> m (f r' ix)
hmapM f p x = unwrapMonad (hmapA (\ ix x -> WrapMonad (f ix x)) p x)
| null | https://raw.githubusercontent.com/kosmikus/multirec/10ea901f0e1067bbe3632b1fab34103c83eeff0f/src/Generics/MultiRec/HFunctor.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE TypeOperators #
---------------------------------------------------------------------------
|
Module : Generics.MultiRec.HFunctor
License : BSD3
Stability : experimental
Portability : non-portable
The definition of functorial map.
---------------------------------------------------------------------------
* Generic map
We define a general 'hmapA' that works on applicative functors.
with the correct family @phi@, the argument to @hmap@ is additionally
parameterized by a witness of type @phi ix@. | # LANGUAGE GADTs #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
Copyright : ( c ) 2008 - -2010 Universiteit Utrecht
Maintainer :
module Generics.MultiRec.HFunctor where
import Control.Applicative (Applicative(..), (<$>), (<*>), WrappedMonad(..))
import Data.Traversable (Traversable(..))
import Generics.MultiRec.Base
The simpler ' hmap ' is a special case .
class HFunctor phi f where
hmapA :: (Applicative a) =>
(forall ix. phi ix -> r ix -> a (r' ix)) ->
phi ix -> f r ix -> a (f r' ix)
instance El phi xi => HFunctor phi (I xi) where
hmapA f _ (I x) = I <$> f proof x
instance HFunctor phi (K x) where
hmapA _ _ (K x) = pure (K x)
instance HFunctor phi U where
hmapA _ _ U = pure U
instance (HFunctor phi f, HFunctor phi g) => HFunctor phi (f :+: g) where
hmapA f p (L x) = L <$> hmapA f p x
hmapA f p (R y) = R <$> hmapA f p y
instance (HFunctor phi f, HFunctor phi g) => HFunctor phi (f :*: g) where
hmapA f p (x :*: y) = (:*:) <$> hmapA f p x <*> hmapA f p y
instance HFunctor phi f => HFunctor phi (f :>: ix) where
hmapA f p (Tag x) = Tag <$> hmapA f p x
instance (Traversable f, HFunctor phi g) => HFunctor phi (f :.: g) where
hmapA f p (D x) = D <$> traverse (hmapA f p) x
instance (Constructor c, HFunctor phi f) => HFunctor phi (C c f) where
hmapA f p (C x) = C <$> hmapA f p x
| The function ' hmap ' takes a functor @f@. All the recursive instances
in that functor are wrapped by an application of @r@. The argument to
' hmap ' takes a function that transformes @r@ occurrences into @r'@
occurrences , for every In order to associate the index
hmap :: (HFunctor phi f) =>
(forall ix. phi ix -> r ix -> r' ix) ->
phi ix -> f r ix -> f r' ix
hmap f p x = unI0 (hmapA (\ ix x -> I0 (f ix x)) p x)
| Monadic version of ' hmap ' .
hmapM :: (HFunctor phi f, Monad m) =>
(forall ix. phi ix -> r ix -> m (r' ix)) ->
phi ix -> f r ix -> m (f r' ix)
hmapM f p x = unwrapMonad (hmapA (\ ix x -> WrapMonad (f ix x)) p x)
|
e945fcfcfb8ec5b797cf311b76a59b9ce96b1c7b9fd7dffe9d61feb32c2bd8da | lispbuilder/lispbuilder | objects.lisp | ;;;;; Converted from the "Objects" Processing example at:
;;;;; ""
;;;;; (C)2006 Luke J Crook
(in-package #:sdl-gfx-examples)
(defvar *width* 200)
(defvar *height* 200)
(defclass m-rect ()
((w :accessor w :initform 0 :initarg :w)
(xpos :accessor xpos :initform 0 :initarg :xpos)
(h :accessor h :initform 0 :initarg :h)
(ypos :accessor ypos :initform 0 :initarg :ypos)
(d :accessor d :initform 0 :initarg :d)
(tt :accessor tt :initform 0 :initarg :tt)))
(defvar *r1* (make-instance 'm-rect :w 1 :xpos 134.0 :h 0.532 :ypos (* 0.083 *height*) :d 10.0 :tt 60))
(defvar *r2* (make-instance 'm-rect :w 2 :xpos 44.0 :h 0.166 :ypos (* 0.332 *height*) :d 5.0 :tt 50))
(defvar *r3* (make-instance 'm-rect :w 2 :xpos 58.0 :h 0.332 :ypos (* 0.4482 *height*) :d 10.0 :tt 35))
(defvar *r4* (make-instance 'm-rect :w 1 :xpos 120.0 :h 0.0498 :ypos (* 0.913 *height*) :d 15.0 :tt 60))
(defmethod move-to-y ((rect m-rect) posy damping)
(let ((dif (- (ypos rect) posy)))
(if (> (abs dif) 1)
(decf (ypos rect) (/ dif damping)))))
(defmethod move-to-x ((rect m-rect) posx damping)
(let ((dif (- (xpos rect) posx)))
(if (> (abs dif) 1)
(decf (xpos rect) (/ dif damping)))))
(defmethod display ((rect m-rect) &key (surface sdl:*default-display*))
(dotimes (i (tt rect))
(sdl-gfx:draw-box-* (sdl-base:to-int (sdl-base::clamp-to-sshort (+ (xpos rect)
(* i (+ (d rect)
(w rect))))))
(sdl-base:to-int (sdl-base::clamp-to-sshort (ypos rect)))
(sdl-base:to-int (sdl-base::clamp-to-sshort (w rect)))
(sdl-base:to-int (sdl-base::clamp-to-sshort (* (h rect)
(sdl:height surface))))
:surface surface
:color sdl:*white*)))
(defun objects ()
(let ((mouse-x 0) (mouse-y 0)
(100-frames-p (every-n-frames 100)))
(sdl:with-init ()
(sdl:window *width* *height* :title-caption "Objects, from Processing.org")
(setf (sdl:frame-rate) 60)
(sdl:clear-display (sdl:color))
(sdl-gfx:initialise-default-font)
(draw-fps "Calculating FPS....." 10 150 sdl:*default-font* sdl:*default-display* t)
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:mouse-motion-event (:x x :y y)
(setf mouse-x x
mouse-y y))
(:idle ()
(sdl:clear-display (sdl:color))
(display *r1*)
(display *r2*)
(display *r3*)
(display *r4*)
(move-to-x *r1* (- mouse-x (/ *width* 2)) 30)
(move-to-x *r2* (mod (+ mouse-x (* *width* 0.05)) *width*) 20)
(move-to-x *r3* (/ mouse-x 4) 40)
(move-to-x *r4* (- mouse-x (/ *width* 2)) 50)
(move-to-y *r1* (+ mouse-y (* *height* 0.1)) 30)
(move-to-y *r2* (+ mouse-y (* *height* 0.025)) 20)
(move-to-y *r3* (- mouse-y (* *height* 0.025)) 40)
(move-to-y *r4* (- *height* mouse-y) 50)
(draw-fps (format nil "FPS : ~2$" (sdl:average-fps))
10 150 sdl:*default-font* sdl:*default-display*
(funcall 100-frames-p))
(sdl:update-display))))))
| null | https://raw.githubusercontent.com/lispbuilder/lispbuilder/589b3c6d552bbec4b520f61388117d6c7b3de5ab/lispbuilder-sdl-gfx/examples/objects.lisp | lisp | Converted from the "Objects" Processing example at:
""
(C)2006 Luke J Crook |
(in-package #:sdl-gfx-examples)
(defvar *width* 200)
(defvar *height* 200)
(defclass m-rect ()
((w :accessor w :initform 0 :initarg :w)
(xpos :accessor xpos :initform 0 :initarg :xpos)
(h :accessor h :initform 0 :initarg :h)
(ypos :accessor ypos :initform 0 :initarg :ypos)
(d :accessor d :initform 0 :initarg :d)
(tt :accessor tt :initform 0 :initarg :tt)))
(defvar *r1* (make-instance 'm-rect :w 1 :xpos 134.0 :h 0.532 :ypos (* 0.083 *height*) :d 10.0 :tt 60))
(defvar *r2* (make-instance 'm-rect :w 2 :xpos 44.0 :h 0.166 :ypos (* 0.332 *height*) :d 5.0 :tt 50))
(defvar *r3* (make-instance 'm-rect :w 2 :xpos 58.0 :h 0.332 :ypos (* 0.4482 *height*) :d 10.0 :tt 35))
(defvar *r4* (make-instance 'm-rect :w 1 :xpos 120.0 :h 0.0498 :ypos (* 0.913 *height*) :d 15.0 :tt 60))
(defmethod move-to-y ((rect m-rect) posy damping)
(let ((dif (- (ypos rect) posy)))
(if (> (abs dif) 1)
(decf (ypos rect) (/ dif damping)))))
(defmethod move-to-x ((rect m-rect) posx damping)
(let ((dif (- (xpos rect) posx)))
(if (> (abs dif) 1)
(decf (xpos rect) (/ dif damping)))))
(defmethod display ((rect m-rect) &key (surface sdl:*default-display*))
(dotimes (i (tt rect))
(sdl-gfx:draw-box-* (sdl-base:to-int (sdl-base::clamp-to-sshort (+ (xpos rect)
(* i (+ (d rect)
(w rect))))))
(sdl-base:to-int (sdl-base::clamp-to-sshort (ypos rect)))
(sdl-base:to-int (sdl-base::clamp-to-sshort (w rect)))
(sdl-base:to-int (sdl-base::clamp-to-sshort (* (h rect)
(sdl:height surface))))
:surface surface
:color sdl:*white*)))
(defun objects ()
(let ((mouse-x 0) (mouse-y 0)
(100-frames-p (every-n-frames 100)))
(sdl:with-init ()
(sdl:window *width* *height* :title-caption "Objects, from Processing.org")
(setf (sdl:frame-rate) 60)
(sdl:clear-display (sdl:color))
(sdl-gfx:initialise-default-font)
(draw-fps "Calculating FPS....." 10 150 sdl:*default-font* sdl:*default-display* t)
(sdl:with-events ()
(:quit-event () t)
(:video-expose-event () (sdl:update-display))
(:mouse-motion-event (:x x :y y)
(setf mouse-x x
mouse-y y))
(:idle ()
(sdl:clear-display (sdl:color))
(display *r1*)
(display *r2*)
(display *r3*)
(display *r4*)
(move-to-x *r1* (- mouse-x (/ *width* 2)) 30)
(move-to-x *r2* (mod (+ mouse-x (* *width* 0.05)) *width*) 20)
(move-to-x *r3* (/ mouse-x 4) 40)
(move-to-x *r4* (- mouse-x (/ *width* 2)) 50)
(move-to-y *r1* (+ mouse-y (* *height* 0.1)) 30)
(move-to-y *r2* (+ mouse-y (* *height* 0.025)) 20)
(move-to-y *r3* (- mouse-y (* *height* 0.025)) 40)
(move-to-y *r4* (- *height* mouse-y) 50)
(draw-fps (format nil "FPS : ~2$" (sdl:average-fps))
10 150 sdl:*default-font* sdl:*default-display*
(funcall 100-frames-p))
(sdl:update-display))))))
|
7a536963eef12a66bca4136331a7a6e894de6bde206139345f963125bf400a0c | bhauman/advent-of-clojure | day24.clj | (ns advent-2017.day24
(:require
[clojure.java.io :as io]
[clojure.string :as string]))
(def data
(->> (io/resource "2017/day24")
slurp))
(defn make-index [raw-data]
(->> (string/split-lines raw-data)
(map #(string/replace % "/" " "))
(map #(format "[%s]" %))
(mapv read-string)
(reduce (fn [accum [h t :as part]]
(-> accum
(update h (fnil conj #{}) part)
(update t (fnil conj #{}) part)))
{})))
;; an indexed version of the data for constant time look ups
(def index (make-index data))
(defn remove-part [parts-index [h t :as part]]
(-> parts-index
(update h disj part)
(update t disj part)))
(defn strength [parts]
(reduce + 0 (flatten parts)))
(defn other-member [h? [h t]]
(if (= h? h) t h))
;; ignores partial briges as they don't matter for solution
(defn all-bridges [parts-index main-part tail-part]
(if (empty? (parts-index tail-part))
[[main-part]]
(->> (parts-index tail-part)
(mapcat #(all-bridges (remove-part parts-index %) % (other-member tail-part %)))
(mapv #(cons main-part %)))))
part 1
#_(with-redefs [all-bridges (memoize all-bridges)]
(time (->> (all-bridges index [0 0] 0)
(map strength)
(reduce max))))
= > 1906
Elapsed time : 21145.979859 msecs
part 2
#_(with-redefs [all-bridges (memoize all-bridges)]
(time
(let [bridges (all-bridges index [0 0] 0)
max-length (reduce max (map count bridges))]
(->> bridges
(filter #(= (count %) max-length))
(map strength)
(reduce max)))))
= > 1824
Elapsed time : 3697.887612 msec
;; A tree-seq based solution
;; it collects the needed totals as it travels
(defn branch? [[parts-index available-pin strength-total length]]
(parts-index available-pin))
(defn children [[parts-index available-pin strength-total length]]
(when-let [childs (parts-index available-pin)]
(map
(fn [next-part]
[(remove-part parts-index next-part)
(other-member available-pin next-part)
(+ strength-total (apply + next-part))
(inc length)])
childs)))
part 1
#_(->> (tree-seq branch? children [index 0 0 0])
(map #(nth % 2))
(reduce max)
time)
= > 1906
Elapsed time : 4472.040553 msecs
part 2
#_(time
(let [bridges (tree-seq branch? children [index 0 0 0])
max-length (reduce max (map last bridges))]
(->> bridges
(filter #(= (last %) max-length))
(map #(nth % 3))
(reduce max))))
= > 1824
Elapsed time : 6435.686022 msecs
| null | https://raw.githubusercontent.com/bhauman/advent-of-clojure/856763baf45bf7bf452ffd304dc1b89f9bc879a6/src/advent-2017/day24.clj | clojure | an indexed version of the data for constant time look ups
ignores partial briges as they don't matter for solution
A tree-seq based solution
it collects the needed totals as it travels | (ns advent-2017.day24
(:require
[clojure.java.io :as io]
[clojure.string :as string]))
(def data
(->> (io/resource "2017/day24")
slurp))
(defn make-index [raw-data]
(->> (string/split-lines raw-data)
(map #(string/replace % "/" " "))
(map #(format "[%s]" %))
(mapv read-string)
(reduce (fn [accum [h t :as part]]
(-> accum
(update h (fnil conj #{}) part)
(update t (fnil conj #{}) part)))
{})))
(def index (make-index data))
(defn remove-part [parts-index [h t :as part]]
(-> parts-index
(update h disj part)
(update t disj part)))
(defn strength [parts]
(reduce + 0 (flatten parts)))
(defn other-member [h? [h t]]
(if (= h? h) t h))
(defn all-bridges [parts-index main-part tail-part]
(if (empty? (parts-index tail-part))
[[main-part]]
(->> (parts-index tail-part)
(mapcat #(all-bridges (remove-part parts-index %) % (other-member tail-part %)))
(mapv #(cons main-part %)))))
part 1
#_(with-redefs [all-bridges (memoize all-bridges)]
(time (->> (all-bridges index [0 0] 0)
(map strength)
(reduce max))))
= > 1906
Elapsed time : 21145.979859 msecs
part 2
#_(with-redefs [all-bridges (memoize all-bridges)]
(time
(let [bridges (all-bridges index [0 0] 0)
max-length (reduce max (map count bridges))]
(->> bridges
(filter #(= (count %) max-length))
(map strength)
(reduce max)))))
= > 1824
Elapsed time : 3697.887612 msec
(defn branch? [[parts-index available-pin strength-total length]]
(parts-index available-pin))
(defn children [[parts-index available-pin strength-total length]]
(when-let [childs (parts-index available-pin)]
(map
(fn [next-part]
[(remove-part parts-index next-part)
(other-member available-pin next-part)
(+ strength-total (apply + next-part))
(inc length)])
childs)))
part 1
#_(->> (tree-seq branch? children [index 0 0 0])
(map #(nth % 2))
(reduce max)
time)
= > 1906
Elapsed time : 4472.040553 msecs
part 2
#_(time
(let [bridges (tree-seq branch? children [index 0 0 0])
max-length (reduce max (map last bridges))]
(->> bridges
(filter #(= (last %) max-length))
(map #(nth % 3))
(reduce max))))
= > 1824
Elapsed time : 6435.686022 msecs
|
45233827ba53576ca87e287fe3908472c77f8e9b2e6aaad9b61e615a67ea5faf | tuura/plato | Celement_with_env_3.hs | module Concept where
import Tuura.Concept.STG
-- C-element with environment circuit described using protocol-level concepts
component a b c = handshake00 a c <> handshake00 b c
<> inputs [a, b] <> outputs [c]
| null | https://raw.githubusercontent.com/tuura/plato/4b528f73ad677cf634dde7644a0ec5c759114baf/examples/Celement_with_env_3.hs | haskell | C-element with environment circuit described using protocol-level concepts | module Concept where
import Tuura.Concept.STG
component a b c = handshake00 a c <> handshake00 b c
<> inputs [a, b] <> outputs [c]
|
ef18d27056beacd9eda3267e4faaf14fd09ab7d5d0bef66ba48daa10b8b701f6 | fredrikt/yxa | sipsocket_test.erl | %%%-------------------------------------------------------------------
%%% File : sipsocket_test.erl
@author < >
%%% @doc Sipsocket test module, fakes network communication to
%%% make it possible to test other modules.
%%%
@since 18 Jul 2005 by < >
%%% @end
%%%-------------------------------------------------------------------
-module(sipsocket_test).
%%-compile(export_all).
-behaviour(sipsocket).
%%--------------------------------------------------------------------
%% External exports
%%--------------------------------------------------------------------
-export([
start_link/0,
send/5,
is_reliable_transport/1,
get_socket/1,
get_specific_socket/1,
get_raw_socket/1,
get_remote_peer/1,
close_socket/1
]).
%%--------------------------------------------------------------------
%% Include files
%%--------------------------------------------------------------------
-include("sipsocket.hrl").
%%====================================================================
%% External functions
%%====================================================================
%%--------------------------------------------------------------------
%% @spec () -> ignore
%%
%% @doc Would've done some useful initialization if this was not
%% merely a test module.
%% @end
%%--------------------------------------------------------------------
start_link() ->
ignore.
%%====================================================================
Interface functions
%%====================================================================
%%--------------------------------------------------------------------
@spec ( SipSocket , Proto , Host , Port , Message ) - >
%% ok |
%% {error, Reason}
%%
%% SipSocket = #sipsocket{}
Proto = atom ( ) " yxa_test "
%% Host = string()
Port = integer ( )
%% Message = term()
%%
%% Reason = string()
%%
%% @doc Fake sending Message to Host:Port. Return failure or
%% success based on process dictionary.
%% @end
%%--------------------------------------------------------------------
send(SipSocket, Proto, _Host, _Port, _Message)
when is_record(SipSocket, sipsocket), SipSocket#sipsocket.proto /= Proto ->
{error, "Protocol mismatch"};
send(SipSocket, yxa_test, Host, Port, Message) when is_record(SipSocket, sipsocket) ->
Proto = SipSocket#sipsocket.proto,
self() ! {sipsocket_test, send, {Proto, Host, Port}, Message},
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, send_result}) of
{true, {error, Reason}} ->
{error, Reason};
false ->
ok
end.
%%--------------------------------------------------------------------
%% @spec (Dst) -> #sipsocket{} | term()
%%
Dst = # sipdst { }
%%
%% @doc Return a fake socket or a term based on process dict.
%% @end
%%--------------------------------------------------------------------
get_socket(#sipdst{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_socket}) of
{true, Res} ->
Res;
false ->
#sipsocket{module = ?MODULE,
proto = yxa_test,
pid = self()
}
end.
%%--------------------------------------------------------------------
%% @spec (Id) -> #sipsocket{} | term()
%%
%% Id = #ob_id{}
%%
%% @doc Return a fake socket or a term based on process dict.
%% @end
%%--------------------------------------------------------------------
get_specific_socket(#ob_id{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_specific_socket}) of
{true, Res} ->
Res;
false ->
#sipsocket{module = ?MODULE,
proto = yxa_test,
pid = self()
}
end.
%%--------------------------------------------------------------------
@spec ( SipSocket ) - > # sipsocket { } | term ( )
%%
%% SipSocket = #sipsocket{}
%%
%% @doc Return a fake raw socket or a term based on process dict.
%% @end
%%--------------------------------------------------------------------
get_raw_socket(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_raw_socket}) of
{true, Res} ->
Res;
false ->
{sipsocket_test, fake_raw_socket}
end.
%%--------------------------------------------------------------------
%% @spec (SipSocket) ->
{ ok , Proto , Addr , Port } | term ( )
%%
%% SipSocket = #sipsocket{}
%%
%% Proto = yxa_test
= string ( ) " \"192.0.2.242\ " "
%% Port = integer()
%%
%% @doc Return fake remote peer info based on process dictionary.
%% @end
%%--------------------------------------------------------------------
get_remote_peer(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_remote_peer}) of
{true, Res} ->
Res;
false ->
{ok, yxa_test, "192.0.2.242", sipsocket:get_listenport(yxa_test)}
end.
%%--------------------------------------------------------------------
%% @spec (SipSocket) -> true | false
%%
%% SipSocket = #sipsocket{}
%%
%% @doc Fake response based on process dictionary.
%% @end
%%--------------------------------------------------------------------
is_reliable_transport(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, is_reliable_transport}) of
{true, Res} ->
Res;
false ->
false
end.
%%--------------------------------------------------------------------
%% @spec (SipSocket) ->
%% ok |
%% {error, Reason}
%%
%% SipSocket = #sipsocket{}
%%
%% Reason = not_applicable | term()
%%
%% @doc Fake response based on process dictionary.
%% @end
%%--------------------------------------------------------------------
close_socket(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, close_socket}) of
{true, Res} ->
Res;
false ->
ok
end.
%%====================================================================
Internal functions
%%====================================================================
%%--------------------------------------------------------------------
%% Function:
. :
%% Returns :
%%--------------------------------------------------------------------
| null | https://raw.githubusercontent.com/fredrikt/yxa/85da46a999d083e6f00b5f156a634ca9be65645b/src/transportlayer/sipsocket_test.erl | erlang | -------------------------------------------------------------------
File : sipsocket_test.erl
@doc Sipsocket test module, fakes network communication to
make it possible to test other modules.
@end
-------------------------------------------------------------------
-compile(export_all).
--------------------------------------------------------------------
External exports
--------------------------------------------------------------------
--------------------------------------------------------------------
Include files
--------------------------------------------------------------------
====================================================================
External functions
====================================================================
--------------------------------------------------------------------
@spec () -> ignore
@doc Would've done some useful initialization if this was not
merely a test module.
@end
--------------------------------------------------------------------
====================================================================
====================================================================
--------------------------------------------------------------------
ok |
{error, Reason}
SipSocket = #sipsocket{}
Host = string()
Message = term()
Reason = string()
@doc Fake sending Message to Host:Port. Return failure or
success based on process dictionary.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@spec (Dst) -> #sipsocket{} | term()
@doc Return a fake socket or a term based on process dict.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@spec (Id) -> #sipsocket{} | term()
Id = #ob_id{}
@doc Return a fake socket or a term based on process dict.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
SipSocket = #sipsocket{}
@doc Return a fake raw socket or a term based on process dict.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@spec (SipSocket) ->
SipSocket = #sipsocket{}
Proto = yxa_test
Port = integer()
@doc Return fake remote peer info based on process dictionary.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@spec (SipSocket) -> true | false
SipSocket = #sipsocket{}
@doc Fake response based on process dictionary.
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@spec (SipSocket) ->
ok |
{error, Reason}
SipSocket = #sipsocket{}
Reason = not_applicable | term()
@doc Fake response based on process dictionary.
@end
--------------------------------------------------------------------
====================================================================
====================================================================
--------------------------------------------------------------------
Function:
Returns :
-------------------------------------------------------------------- | @author < >
@since 18 Jul 2005 by < >
-module(sipsocket_test).
-behaviour(sipsocket).
-export([
start_link/0,
send/5,
is_reliable_transport/1,
get_socket/1,
get_specific_socket/1,
get_raw_socket/1,
get_remote_peer/1,
close_socket/1
]).
-include("sipsocket.hrl").
start_link() ->
ignore.
Interface functions
@spec ( SipSocket , Proto , Host , Port , Message ) - >
Proto = atom ( ) " yxa_test "
Port = integer ( )
send(SipSocket, Proto, _Host, _Port, _Message)
when is_record(SipSocket, sipsocket), SipSocket#sipsocket.proto /= Proto ->
{error, "Protocol mismatch"};
send(SipSocket, yxa_test, Host, Port, Message) when is_record(SipSocket, sipsocket) ->
Proto = SipSocket#sipsocket.proto,
self() ! {sipsocket_test, send, {Proto, Host, Port}, Message},
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, send_result}) of
{true, {error, Reason}} ->
{error, Reason};
false ->
ok
end.
Dst = # sipdst { }
get_socket(#sipdst{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_socket}) of
{true, Res} ->
Res;
false ->
#sipsocket{module = ?MODULE,
proto = yxa_test,
pid = self()
}
end.
get_specific_socket(#ob_id{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_specific_socket}) of
{true, Res} ->
Res;
false ->
#sipsocket{module = ?MODULE,
proto = yxa_test,
pid = self()
}
end.
@spec ( SipSocket ) - > # sipsocket { } | term ( )
get_raw_socket(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_raw_socket}) of
{true, Res} ->
Res;
false ->
{sipsocket_test, fake_raw_socket}
end.
{ ok , Proto , Addr , Port } | term ( )
= string ( ) " \"192.0.2.242\ " "
get_remote_peer(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, get_remote_peer}) of
{true, Res} ->
Res;
false ->
{ok, yxa_test, "192.0.2.242", sipsocket:get_listenport(yxa_test)}
end.
is_reliable_transport(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, is_reliable_transport}) of
{true, Res} ->
Res;
false ->
false
end.
close_socket(#sipsocket{proto = yxa_test}) ->
case autotest_util:is_unit_testing(?MODULE, {sipsocket_test, close_socket}) of
{true, Res} ->
Res;
false ->
ok
end.
Internal functions
. :
|
4cb82f2ac4371a77c4192106b3fd9b69d39ff9d98aae71f3da97f70de8357a18 | roelvandijk/numerals | TestData.hs | |
[ @ISO639 - 1@ ] -
[ @ISO639 - 2@ ] -
[ @ISO639 - 3@ ] xpq
[ @Native name@ ] -
[ @English name@ ] Mohegan - Pequot
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] xpq
[@Native name@] -
[@English name@] Mohegan-Pequot
-}
module Text.Numeral.Language.XPQ.TestData (cardinals) where
--------------------------------------------------------------------------------
-- Imports
--------------------------------------------------------------------------------
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
--------------------------------------------------------------------------------
-- Test data
--------------------------------------------------------------------------------
{-
Sources:
-to-count-in-mohegan-pequot/en/xpq/
-}
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "nuqut")
, (2, "nis")
, (3, "shwi")
, (4, "yáw")
, (5, "nupáw")
, (6, "qutôsk")
, (7, "nisôsk")
, (8, "shwôsk")
, (9, "pásukokun")
, (10, "páyaq")
, (11, "páyaq napni nuqut")
, (12, "páyaq napni nis")
, (13, "páyaq napni shwi")
, (14, "páyaq napni yáw")
, (15, "páyaq napni nupáw")
, (16, "páyaq napni qutôsk")
, (17, "páyaq napni nisôsk")
, (18, "páyaq napni shwôsk")
, (19, "páyaq napni pásukokun")
, (20, "nisuncák")
, (21, "nisuncák napni nuqut")
, (22, "nisuncák napni nis")
, (23, "nisuncák napni shwi")
, (24, "nisuncák napni yáw")
, (25, "nisuncák napni nupáw")
, (26, "nisuncák napni qutôsk")
, (27, "nisuncák napni nisôsk")
, (28, "nisuncák napni shwôsk")
, (29, "nisuncák napni pásukokun")
, (30, "swuncák")
, (31, "swuncák napni nuqut")
, (32, "swuncák napni nis")
, (33, "swuncák napni shwi")
, (34, "swuncák napni yáw")
, (35, "swuncák napni nupáw")
, (36, "swuncák napni qutôsk")
, (37, "swuncák napni nisôsk")
, (38, "swuncák napni shwôsk")
, (39, "swuncák napni pásukokun")
, (40, "yáwuncák")
, (41, "yáwuncák napni nuqut")
, (42, "yáwuncák napni nis")
, (43, "yáwuncák napni shwi")
, (44, "yáwuncák napni yáw")
, (45, "yáwuncák napni nupáw")
, (46, "yáwuncák napni qutôsk")
, (47, "yáwuncák napni nisôsk")
, (48, "yáwuncák napni shwôsk")
, (49, "yáwuncák napni pásukokun")
, (50, "nupáw-cahshuncák")
, (51, "nupáw-cahshuncák napni nuqut")
, (52, "nupáw-cahshuncák napni nis")
, (53, "nupáw-cahshuncák napni shwi")
, (54, "nupáw-cahshuncák napni yáw")
, (55, "nupáw-cahshuncák napni nupáw")
, (56, "nupáw-cahshuncák napni qutôsk")
, (57, "nupáw-cahshuncák napni nisôsk")
, (58, "nupáw-cahshuncák napni shwôsk")
, (59, "nupáw-cahshuncák napni pásukokun")
, (60, "qutôsk-cahshuncák")
, (61, "qutôsk-cahshuncák napni nuqut")
, (62, "qutôsk-cahshuncák napni nis")
, (63, "qutôsk-cahshuncák napni shwi")
, (64, "qutôsk-cahshuncák napni yáw")
, (65, "qutôsk-cahshuncák napni nupáw")
, (66, "qutôsk-cahshuncák napni qutôsk")
, (67, "qutôsk-cahshuncák napni nisôsk")
, (68, "qutôsk-cahshuncák napni shwôsk")
, (69, "qutôsk-cahshuncák napni pásukokun")
, (70, "nisôsk-cahshuncák")
, (71, "nisôsk-cahshuncák napni nuqut")
, (72, "nisôsk-cahshuncák napni nis")
, (73, "nisôsk-cahshuncák napni shwi")
, (74, "nisôsk-cahshuncák napni yáw")
, (75, "nisôsk-cahshuncák napni nupáw")
, (76, "nisôsk-cahshuncák napni qutôsk")
, (77, "nisôsk-cahshuncák napni nisôsk")
, (78, "nisôsk-cahshuncák napni shwôsk")
, (79, "nisôsk-cahshuncák napni pásukokun")
, (80, "shwôsk-cahshuncák")
, (81, "shwôsk-cahshuncák napni nuqut")
, (82, "shwôsk-cahshuncák napni nis")
, (83, "shwôsk-cahshuncák napni shwi")
, (84, "shwôsk-cahshuncák napni yáw")
, (85, "shwôsk-cahshuncák napni nupáw")
, (86, "shwôsk-cahshuncák napni qutôsk")
, (87, "shwôsk-cahshuncák napni nisôsk")
, (88, "shwôsk-cahshuncák napni shwôsk")
, (89, "shwôsk-cahshuncák napni pásukokun")
, (90, "pásukokun-cahshuncák")
, (91, "pásukokun-cahshuncák napni nuqut")
, (92, "pásukokun-cahshuncák napni nis")
, (93, "pásukokun-cahshuncák napni shwi")
, (94, "pásukokun-cahshuncák napni yáw")
, (95, "pásukokun-cahshuncák napni nupáw")
, (96, "pásukokun-cahshuncák napni qutôsk")
, (97, "pásukokun-cahshuncák napni nisôsk")
, (98, "pásukokun-cahshuncák napni shwôsk")
, (99, "pásukokun-cahshuncák napni pásukokun")
, (100, "pásuq")
]
)
]
| null | https://raw.githubusercontent.com/roelvandijk/numerals/b1e4121e0824ac0646a3230bd311818e159ec127/src-test/Text/Numeral/Language/XPQ/TestData.hs | haskell | ------------------------------------------------------------------------------
Imports
------------------------------------------------------------------------------
------------------------------------------------------------------------------
Test data
------------------------------------------------------------------------------
Sources:
-to-count-in-mohegan-pequot/en/xpq/
| |
[ @ISO639 - 1@ ] -
[ @ISO639 - 2@ ] -
[ @ISO639 - 3@ ] xpq
[ @Native name@ ] -
[ @English name@ ] Mohegan - Pequot
[@ISO639-1@] -
[@ISO639-2@] -
[@ISO639-3@] xpq
[@Native name@] -
[@English name@] Mohegan-Pequot
-}
module Text.Numeral.Language.XPQ.TestData (cardinals) where
import "numerals" Text.Numeral.Grammar ( defaultInflection )
import "this" Text.Numeral.Test ( TestData )
cardinals :: (Num i) => TestData i
cardinals =
[ ( "default"
, defaultInflection
, [ (1, "nuqut")
, (2, "nis")
, (3, "shwi")
, (4, "yáw")
, (5, "nupáw")
, (6, "qutôsk")
, (7, "nisôsk")
, (8, "shwôsk")
, (9, "pásukokun")
, (10, "páyaq")
, (11, "páyaq napni nuqut")
, (12, "páyaq napni nis")
, (13, "páyaq napni shwi")
, (14, "páyaq napni yáw")
, (15, "páyaq napni nupáw")
, (16, "páyaq napni qutôsk")
, (17, "páyaq napni nisôsk")
, (18, "páyaq napni shwôsk")
, (19, "páyaq napni pásukokun")
, (20, "nisuncák")
, (21, "nisuncák napni nuqut")
, (22, "nisuncák napni nis")
, (23, "nisuncák napni shwi")
, (24, "nisuncák napni yáw")
, (25, "nisuncák napni nupáw")
, (26, "nisuncák napni qutôsk")
, (27, "nisuncák napni nisôsk")
, (28, "nisuncák napni shwôsk")
, (29, "nisuncák napni pásukokun")
, (30, "swuncák")
, (31, "swuncák napni nuqut")
, (32, "swuncák napni nis")
, (33, "swuncák napni shwi")
, (34, "swuncák napni yáw")
, (35, "swuncák napni nupáw")
, (36, "swuncák napni qutôsk")
, (37, "swuncák napni nisôsk")
, (38, "swuncák napni shwôsk")
, (39, "swuncák napni pásukokun")
, (40, "yáwuncák")
, (41, "yáwuncák napni nuqut")
, (42, "yáwuncák napni nis")
, (43, "yáwuncák napni shwi")
, (44, "yáwuncák napni yáw")
, (45, "yáwuncák napni nupáw")
, (46, "yáwuncák napni qutôsk")
, (47, "yáwuncák napni nisôsk")
, (48, "yáwuncák napni shwôsk")
, (49, "yáwuncák napni pásukokun")
, (50, "nupáw-cahshuncák")
, (51, "nupáw-cahshuncák napni nuqut")
, (52, "nupáw-cahshuncák napni nis")
, (53, "nupáw-cahshuncák napni shwi")
, (54, "nupáw-cahshuncák napni yáw")
, (55, "nupáw-cahshuncák napni nupáw")
, (56, "nupáw-cahshuncák napni qutôsk")
, (57, "nupáw-cahshuncák napni nisôsk")
, (58, "nupáw-cahshuncák napni shwôsk")
, (59, "nupáw-cahshuncák napni pásukokun")
, (60, "qutôsk-cahshuncák")
, (61, "qutôsk-cahshuncák napni nuqut")
, (62, "qutôsk-cahshuncák napni nis")
, (63, "qutôsk-cahshuncák napni shwi")
, (64, "qutôsk-cahshuncák napni yáw")
, (65, "qutôsk-cahshuncák napni nupáw")
, (66, "qutôsk-cahshuncák napni qutôsk")
, (67, "qutôsk-cahshuncák napni nisôsk")
, (68, "qutôsk-cahshuncák napni shwôsk")
, (69, "qutôsk-cahshuncák napni pásukokun")
, (70, "nisôsk-cahshuncák")
, (71, "nisôsk-cahshuncák napni nuqut")
, (72, "nisôsk-cahshuncák napni nis")
, (73, "nisôsk-cahshuncák napni shwi")
, (74, "nisôsk-cahshuncák napni yáw")
, (75, "nisôsk-cahshuncák napni nupáw")
, (76, "nisôsk-cahshuncák napni qutôsk")
, (77, "nisôsk-cahshuncák napni nisôsk")
, (78, "nisôsk-cahshuncák napni shwôsk")
, (79, "nisôsk-cahshuncák napni pásukokun")
, (80, "shwôsk-cahshuncák")
, (81, "shwôsk-cahshuncák napni nuqut")
, (82, "shwôsk-cahshuncák napni nis")
, (83, "shwôsk-cahshuncák napni shwi")
, (84, "shwôsk-cahshuncák napni yáw")
, (85, "shwôsk-cahshuncák napni nupáw")
, (86, "shwôsk-cahshuncák napni qutôsk")
, (87, "shwôsk-cahshuncák napni nisôsk")
, (88, "shwôsk-cahshuncák napni shwôsk")
, (89, "shwôsk-cahshuncák napni pásukokun")
, (90, "pásukokun-cahshuncák")
, (91, "pásukokun-cahshuncák napni nuqut")
, (92, "pásukokun-cahshuncák napni nis")
, (93, "pásukokun-cahshuncák napni shwi")
, (94, "pásukokun-cahshuncák napni yáw")
, (95, "pásukokun-cahshuncák napni nupáw")
, (96, "pásukokun-cahshuncák napni qutôsk")
, (97, "pásukokun-cahshuncák napni nisôsk")
, (98, "pásukokun-cahshuncák napni shwôsk")
, (99, "pásukokun-cahshuncák napni pásukokun")
, (100, "pásuq")
]
)
]
|
024ab3cf2c5b6f227d17bd4ef4a10dce44d381bb1ee7e3bf39106a83ed7d976d | emqx/emqx | emqx_gateway_api_authn.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_gateway_api_authn).
-behaviour(minirest_api).
-include("emqx_gateway_http.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-include_lib("typerefl/include/types.hrl").
-import(hoconsc, [mk/2, ref/2]).
-import(emqx_dashboard_swagger, [error_codes/2]).
-import(
emqx_gateway_http,
[
return_http_error/2,
with_gateway/2,
with_authn/2
]
).
minirest / dashbaord_swagger behaviour callbacks
-export([
api_spec/0,
paths/0,
schema/1
]).
%% http handlers
-export([
authn/2,
users/2,
users_insta/2
]).
%% internal export for emqx_gateway_api_listeners module
-export([schema_authn/0]).
-define(TAGS, [<<"Gateway Authentication">>]).
%%--------------------------------------------------------------------
minirest behaviour callbacks
%%--------------------------------------------------------------------
api_spec() ->
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
paths() ->
emqx_gateway_utils:make_deprecated_paths([
"/gateways/:name/authentication",
"/gateways/:name/authentication/users",
"/gateways/:name/authentication/users/:uid"
]).
%%--------------------------------------------------------------------
%% http handlers
authn(get, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
try emqx_gateway_http:authn(GwName) of
Authn -> {200, Authn}
catch
error:{config_not_found, _} ->
FIXME : should return 404 ?
{204}
end
end);
authn(put, #{
bindings := #{name := Name0},
body := Body
}) ->
with_gateway(Name0, fun(GwName, _) ->
{ok, Authn} = emqx_gateway_http:update_authn(GwName, Body),
{200, Authn}
end);
authn(post, #{
bindings := #{name := Name0},
body := Body
}) ->
with_gateway(Name0, fun(GwName, _) ->
{ok, Authn} = emqx_gateway_http:add_authn(GwName, Body),
{201, Authn}
end);
authn(delete, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:remove_authn(GwName),
{204}
end).
users(get, #{bindings := #{name := Name0}, query_string := Qs}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:list_users(ChainName, AuthId, parse_qstring(Qs))
end);
users(post, #{
bindings := #{name := Name0},
body := Body
}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:add_user(ChainName, AuthId, Body)
end).
users_insta(get, #{bindings := #{name := Name0, uid := UserId}}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:find_user(ChainName, AuthId, UserId)
end);
users_insta(put, #{
bindings := #{name := Name0, uid := UserId},
body := Body
}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:update_user(ChainName, AuthId, UserId, Body)
end);
users_insta(delete, #{bindings := #{name := Name0, uid := UserId}}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:delete_user(ChainName, AuthId, UserId)
end).
%%--------------------------------------------------------------------
Utils
parse_qstring(Qs) ->
maps:with(
[
<<"page">>,
<<"limit">>,
<<"like_user_id">>,
<<"is_superuser">>
],
Qs
).
%%--------------------------------------------------------------------
%% Swagger defines
%%--------------------------------------------------------------------
schema("/gateways/:name/authentication") ->
#{
'operationId' => authn,
get =>
#{
tags => ?TAGS,
desc => ?DESC(get_authn),
summary => <<"Get Authenticator Configuration">>,
parameters => params_gateway_name_in_path(),
responses =>
?STANDARD_RESP(
#{
200 => schema_authn(),
204 => <<"Authenticator doesn't initiated">>
}
)
},
put =>
#{
tags => ?TAGS,
desc => ?DESC(update_authn),
summary => <<"Update Authenticator Configuration">>,
parameters => params_gateway_name_in_path(),
'requestBody' => schema_authn(),
responses =>
?STANDARD_RESP(#{200 => schema_authn()})
},
post =>
#{
tags => ?TAGS,
desc => ?DESC(add_authn),
summary => <<"Create an Authenticator for a Gateway">>,
parameters => params_gateway_name_in_path(),
'requestBody' => schema_authn(),
responses =>
?STANDARD_RESP(#{201 => schema_authn()})
},
delete =>
#{
tags => ?TAGS,
desc => ?DESC(delete_authn),
summary => <<"Delete the Gateway Authenticator">>,
parameters => params_gateway_name_in_path(),
responses =>
?STANDARD_RESP(#{204 => <<"Deleted">>})
}
};
schema("/gateways/:name/authentication/users") ->
#{
'operationId' => users,
get =>
#{
tags => ?TAGS,
desc => ?DESC(list_users),
summary => <<"List users for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_paging_in_qs() ++
params_fuzzy_in_qs(),
responses =>
?STANDARD_RESP(
#{
200 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_users),
emqx_authn_api:response_users_example()
)
}
)
},
post =>
#{
tags => ?TAGS,
desc => ?DESC(add_user),
summary => <<"Add User for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path(),
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
ref(emqx_authn_api, request_user_create),
emqx_authn_api:request_user_create_examples()
),
responses =>
?STANDARD_RESP(
#{
201 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_user),
emqx_authn_api:response_user_examples()
)
}
)
}
};
schema("/gateways/:name/authentication/users/:uid") ->
#{
'operationId' => users_insta,
get =>
#{
tags => ?TAGS,
desc => ?DESC(get_user),
summary => <<"Get User Info for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_userid_in_path(),
responses =>
?STANDARD_RESP(
#{
200 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_user),
emqx_authn_api:response_user_examples()
)
}
)
},
put =>
#{
tags => ?TAGS,
desc => ?DESC(update_user),
summary => <<"Update User Info for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_userid_in_path(),
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
ref(emqx_authn_api, request_user_update),
emqx_authn_api:request_user_update_examples()
),
responses =>
?STANDARD_RESP(
#{
200 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_user),
emqx_authn_api:response_user_examples()
)
}
)
},
delete =>
#{
tags => ?TAGS,
desc => ?DESC(delete_user),
summary => <<"Delete User for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_userid_in_path(),
responses =>
?STANDARD_RESP(#{204 => <<"User Deleted">>})
}
};
schema(Path) ->
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
%%--------------------------------------------------------------------
%% params defines
params_gateway_name_in_path() ->
[
{name,
mk(
binary(),
#{
in => path,
desc => ?DESC(emqx_gateway_api, gateway_name_in_qs),
example => <<"stomp">>
}
)}
].
params_userid_in_path() ->
[
{uid,
mk(
binary(),
#{
in => path,
desc => ?DESC(user_id),
example => <<"test_username">>
}
)}
].
params_paging_in_qs() ->
emqx_dashboard_swagger:fields(page) ++
emqx_dashboard_swagger:fields(limit).
params_fuzzy_in_qs() ->
[
{like_user_id,
mk(
binary(),
#{
in => query,
required => false,
desc => ?DESC(like_user_id),
example => <<"test_">>
}
)},
{is_superuser,
mk(
boolean(),
#{
in => query,
required => false,
desc => ?DESC(is_superuser)
}
)}
].
%%--------------------------------------------------------------------
%% schemas
schema_authn() ->
emqx_dashboard_swagger:schema_with_examples(
emqx_authn_schema:authenticator_type_without_scram(),
emqx_authn_api:authenticator_examples()
).
| null | https://raw.githubusercontent.com/emqx/emqx/dbc10c2eed3df314586c7b9ac6292083204f1f68/apps/emqx_gateway/src/emqx_gateway_api_authn.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
http handlers
internal export for emqx_gateway_api_listeners module
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
http handlers
--------------------------------------------------------------------
--------------------------------------------------------------------
Swagger defines
--------------------------------------------------------------------
--------------------------------------------------------------------
params defines
--------------------------------------------------------------------
schemas | Copyright ( c ) 2021 - 2023 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_gateway_api_authn).
-behaviour(minirest_api).
-include("emqx_gateway_http.hrl").
-include_lib("hocon/include/hoconsc.hrl").
-include_lib("typerefl/include/types.hrl").
-import(hoconsc, [mk/2, ref/2]).
-import(emqx_dashboard_swagger, [error_codes/2]).
-import(
emqx_gateway_http,
[
return_http_error/2,
with_gateway/2,
with_authn/2
]
).
minirest / dashbaord_swagger behaviour callbacks
-export([
api_spec/0,
paths/0,
schema/1
]).
-export([
authn/2,
users/2,
users_insta/2
]).
-export([schema_authn/0]).
-define(TAGS, [<<"Gateway Authentication">>]).
minirest behaviour callbacks
api_spec() ->
emqx_dashboard_swagger:spec(?MODULE, #{check_schema => true}).
paths() ->
emqx_gateway_utils:make_deprecated_paths([
"/gateways/:name/authentication",
"/gateways/:name/authentication/users",
"/gateways/:name/authentication/users/:uid"
]).
authn(get, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
try emqx_gateway_http:authn(GwName) of
Authn -> {200, Authn}
catch
error:{config_not_found, _} ->
FIXME : should return 404 ?
{204}
end
end);
authn(put, #{
bindings := #{name := Name0},
body := Body
}) ->
with_gateway(Name0, fun(GwName, _) ->
{ok, Authn} = emqx_gateway_http:update_authn(GwName, Body),
{200, Authn}
end);
authn(post, #{
bindings := #{name := Name0},
body := Body
}) ->
with_gateway(Name0, fun(GwName, _) ->
{ok, Authn} = emqx_gateway_http:add_authn(GwName, Body),
{201, Authn}
end);
authn(delete, #{bindings := #{name := Name0}}) ->
with_gateway(Name0, fun(GwName, _) ->
ok = emqx_gateway_http:remove_authn(GwName),
{204}
end).
users(get, #{bindings := #{name := Name0}, query_string := Qs}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:list_users(ChainName, AuthId, parse_qstring(Qs))
end);
users(post, #{
bindings := #{name := Name0},
body := Body
}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:add_user(ChainName, AuthId, Body)
end).
users_insta(get, #{bindings := #{name := Name0, uid := UserId}}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:find_user(ChainName, AuthId, UserId)
end);
users_insta(put, #{
bindings := #{name := Name0, uid := UserId},
body := Body
}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:update_user(ChainName, AuthId, UserId, Body)
end);
users_insta(delete, #{bindings := #{name := Name0, uid := UserId}}) ->
with_authn(Name0, fun(
_GwName,
#{
id := AuthId,
chain_name := ChainName
}
) ->
emqx_authn_api:delete_user(ChainName, AuthId, UserId)
end).
Utils
parse_qstring(Qs) ->
maps:with(
[
<<"page">>,
<<"limit">>,
<<"like_user_id">>,
<<"is_superuser">>
],
Qs
).
schema("/gateways/:name/authentication") ->
#{
'operationId' => authn,
get =>
#{
tags => ?TAGS,
desc => ?DESC(get_authn),
summary => <<"Get Authenticator Configuration">>,
parameters => params_gateway_name_in_path(),
responses =>
?STANDARD_RESP(
#{
200 => schema_authn(),
204 => <<"Authenticator doesn't initiated">>
}
)
},
put =>
#{
tags => ?TAGS,
desc => ?DESC(update_authn),
summary => <<"Update Authenticator Configuration">>,
parameters => params_gateway_name_in_path(),
'requestBody' => schema_authn(),
responses =>
?STANDARD_RESP(#{200 => schema_authn()})
},
post =>
#{
tags => ?TAGS,
desc => ?DESC(add_authn),
summary => <<"Create an Authenticator for a Gateway">>,
parameters => params_gateway_name_in_path(),
'requestBody' => schema_authn(),
responses =>
?STANDARD_RESP(#{201 => schema_authn()})
},
delete =>
#{
tags => ?TAGS,
desc => ?DESC(delete_authn),
summary => <<"Delete the Gateway Authenticator">>,
parameters => params_gateway_name_in_path(),
responses =>
?STANDARD_RESP(#{204 => <<"Deleted">>})
}
};
schema("/gateways/:name/authentication/users") ->
#{
'operationId' => users,
get =>
#{
tags => ?TAGS,
desc => ?DESC(list_users),
summary => <<"List users for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_paging_in_qs() ++
params_fuzzy_in_qs(),
responses =>
?STANDARD_RESP(
#{
200 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_users),
emqx_authn_api:response_users_example()
)
}
)
},
post =>
#{
tags => ?TAGS,
desc => ?DESC(add_user),
summary => <<"Add User for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path(),
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
ref(emqx_authn_api, request_user_create),
emqx_authn_api:request_user_create_examples()
),
responses =>
?STANDARD_RESP(
#{
201 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_user),
emqx_authn_api:response_user_examples()
)
}
)
}
};
schema("/gateways/:name/authentication/users/:uid") ->
#{
'operationId' => users_insta,
get =>
#{
tags => ?TAGS,
desc => ?DESC(get_user),
summary => <<"Get User Info for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_userid_in_path(),
responses =>
?STANDARD_RESP(
#{
200 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_user),
emqx_authn_api:response_user_examples()
)
}
)
},
put =>
#{
tags => ?TAGS,
desc => ?DESC(update_user),
summary => <<"Update User Info for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_userid_in_path(),
'requestBody' => emqx_dashboard_swagger:schema_with_examples(
ref(emqx_authn_api, request_user_update),
emqx_authn_api:request_user_update_examples()
),
responses =>
?STANDARD_RESP(
#{
200 => emqx_dashboard_swagger:schema_with_example(
ref(emqx_authn_api, response_user),
emqx_authn_api:response_user_examples()
)
}
)
},
delete =>
#{
tags => ?TAGS,
desc => ?DESC(delete_user),
summary => <<"Delete User for a Gateway Authenticator">>,
parameters => params_gateway_name_in_path() ++
params_userid_in_path(),
responses =>
?STANDARD_RESP(#{204 => <<"User Deleted">>})
}
};
schema(Path) ->
emqx_gateway_utils:make_compatible_schema(Path, fun schema/1).
params_gateway_name_in_path() ->
[
{name,
mk(
binary(),
#{
in => path,
desc => ?DESC(emqx_gateway_api, gateway_name_in_qs),
example => <<"stomp">>
}
)}
].
params_userid_in_path() ->
[
{uid,
mk(
binary(),
#{
in => path,
desc => ?DESC(user_id),
example => <<"test_username">>
}
)}
].
params_paging_in_qs() ->
emqx_dashboard_swagger:fields(page) ++
emqx_dashboard_swagger:fields(limit).
params_fuzzy_in_qs() ->
[
{like_user_id,
mk(
binary(),
#{
in => query,
required => false,
desc => ?DESC(like_user_id),
example => <<"test_">>
}
)},
{is_superuser,
mk(
boolean(),
#{
in => query,
required => false,
desc => ?DESC(is_superuser)
}
)}
].
schema_authn() ->
emqx_dashboard_swagger:schema_with_examples(
emqx_authn_schema:authenticator_type_without_scram(),
emqx_authn_api:authenticator_examples()
).
|
dec23ab52d9185eaa92081b6c4a5ea96d903c7fe73a5bbc4b9cb9fe34e113583 | helium/blockchain-node | bn_wallets.erl | -module(bn_wallets).
-include("bn_jsonrpc.hrl").
-behavior(bn_jsonrpc_handler).
-behavior(gen_server).
%% gen_server
-export([start_link/1, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]).
%% jsonrpc_handler
-export([handle_rpc/2]).
-define(DB_FILE, "wallets.db").
-define(SERVER, ?MODULE).
-define(KEY_TIMEOUT, 60000).
-define(UNLOCK_TIMEOUT, 30000).
-record(state, {
dir :: file:filename_all(),
db :: rocksdb:db_handle(),
default :: rocksdb:cf_handle(),
wallets :: rocksdb:cf_handle(),
keys = #{} :: #{libp2p_crypto:pubkey_bin() => libp2p_crypto:key_map()}
}).
-spec unlock(libp2p_crypto:pubkey_bin(), binary()) -> ok | {error, term()}.
unlock(Address, Password) ->
gen_server:call(?SERVER, {unlock, Address, Password}, ?UNLOCK_TIMEOUT).
-spec sign(libp2p_crypto:pubkey_bin(), blockchain_txn:txn()) ->
{ok, blockchain_txn:txn()} | {error, term()}.
sign(Address, Txn) ->
gen_server:call(?SERVER, {sign, Address, Txn}).
-spec keys(libp2p_crypto:pubkey_bin()) -> {ok, lib2p_crypto:key_map()} | {error, term()}.
keys(Address) ->
gen_server:call(?SERVER, {keys, Address}).
-spec lock(libp2p_crypto:pubkey_bin()) -> ok.
lock(Address) ->
gen_server:call(?SERVER, {lock, Address}).
-spec is_locked(libp2p_crypto:pubkey_bin()) -> boolean().
is_locked(Address) ->
gen_server:call(?SERVER, {is_locked, Address}).
restore(Path, BackupID) ->
gen_server:call(?SERVER, {restore, Path, BackupID}).
%%
%% gen_server
%%
start_link(Args) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, Args, []).
init(Args) ->
Dir = filename:join(proplists:get_value(base_dir, Args, "data"), ?DB_FILE),
case load_db(Dir) of
{ok, State} ->
persistent_term:put(?MODULE, State),
{ok, State};
Error ->
Error
end.
handle_call({unlock, Address, Password}, _From, State) ->
case maps:is_key(Address, State#state.keys) of
true ->
{reply, ok, State};
false ->
case get_wallet(Address, State) of
{error, Error} ->
{reply, {error, Error}, State};
{ok, Wallet} ->
case wallet:decrypt(Password, Wallet) of
{error, Error} ->
{reply, {error, Error}, State};
{ok, KeyMap} ->
timer:send_after(?KEY_TIMEOUT, self(), {key_timeout, Address}),
{reply, ok, State#state{
keys = maps:put(Address, KeyMap, State#state.keys)
}}
end
end
end;
handle_call({lock, Address}, _From, State) ->
{reply, ok, State#state{keys = maps:remove(Address, State#state.keys)}};
handle_call({is_locked, Address}, _From, State) ->
{reply, not maps:is_key(Address, State#state.keys), State};
handle_call({keys, Address}, _From, State) ->
case maps:get(Address, State#state.keys, false) of
false ->
{reply, {error, not_found}, State};
KeyMap ->
{reply, {ok, KeyMap}, State}
end;
handle_call({sign, Address, Txn}, _From, State) ->
case maps:get(Address, State#state.keys, false) of
false ->
{reply, {error, not_found}, State};
#{secret := PrivKey} ->
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
{reply, {ok, blockchain_txn:sign(Txn, SigFun)}, State}
end;
handle_call({restore, Path, BackupID}, _From, State) ->
{ok, Engine} = rocksdb:open_backup_engine(Path),
case rocksdb:verify_backup(Engine, BackupID) of
{error, Error} ->
{reply, {error, Error}, State};
ok ->
rocksdb:close(State#state.db),
case rocksdb:restore_db_from_backup(Engine, BackupID, State#state.dir) of
ok ->
case load_db(State#state.dir) of
{ok, NewState} ->
persistent_term:put(?MODULE, NewState),
{reply, ok, NewState};
Error ->
{reply, Error, State}
end
end
end;
handle_call(Request, _From, State) ->
lager:notice("Unhandled call ~p", [Request]),
{reply, ok, State}.
handle_cast(Msg, State) ->
lager:notice("Unhandled cast ~p", [Msg]),
{noreply, State}.
handle_info({key_timeout, Address}, State) ->
{noreply, State#state{keys = maps:remove(Address, State#state.keys)}};
handle_info(Info, State) ->
lager:notice("Unhandled info ~p", [Info]),
{noreply, State}.
terminate(_Reason, #state{db = DB}) ->
rocksdb:close(DB).
%%
%% jsonrpc_handler
%%
handle_rpc(<<"wallet_create">>, {Param}) ->
KeyMap = libp2p_crypto:generate_keys(ed25519),
Password =
case ?jsonrpc_get_param(<<"password">>, Param) of
V when is_binary(V) andalso byte_size(V) > 0 -> V;
_ -> ?jsonrpc_error(invalid_params)
end,
{ok, State} = get_state(),
{ok, Wallet} = wallet:encrypt(KeyMap, Password),
ok = save_wallet(Wallet, State),
?BIN_TO_B58(wallet:pubkey_bin(Wallet));
handle_rpc(<<"wallet_delete">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
{ok, State} = get_state(),
case delete_wallet(Address, State) of
{error, _} = Error ->
?jsonrpc_error(Error);
ok ->
true
end;
handle_rpc(<<"wallet_list">>, _Params) ->
{ok, State} = get_state(),
[?BIN_TO_B58(Addr) || Addr <- get_wallet_list(State)];
handle_rpc(<<"wallet_unlock">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Password = ?jsonrpc_get_param(<<"password">>, Param),
case unlock(Address, Password) of
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet not found"});
{error, decrypt} ->
?jsonrpc_error(invalid_password);
ok ->
true
end;
handle_rpc(<<"wallet_lock">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
ok = lock(Address),
true;
handle_rpc(<<"wallet_is_locked">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
is_locked(Address);
handle_rpc(<<"wallet_pay">>, {Param}) ->
Payer = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Payee = ?jsonrpc_b58_to_bin(<<"payee">>, Param),
Amount = ?jsonrpc_get_param(<<"bones">>, Param, undefined),
Max = ?jsonrpc_get_param(<<"max">>, Param, false),
TokenBin = ?jsonrpc_get_param(<<"token_type">>, Param, <<"hnt">>),
Token = jsonrpc_binary_to_token_type(TokenBin),
Chain = blockchain_worker:blockchain(),
Nonce = jsonrpc_nonce_param(Param, Payer, balance, Chain),
case mk_payment_txn_v2(Payer, [{Payee, Token, Amount, Max}], Nonce, Chain) of
{ok, Txn} ->
case sign(Payer, Txn) of
{ok, SignedTxn} ->
{ok, _} = bn_pending_txns:submit_txn(SignedTxn),
blockchain_txn:to_json(SignedTxn, []);
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet is locked"})
end;
{error, invalid_payment} ->
?jsonrpc_error({invalid_params, "Missing or invalid payment amount"})
end;
handle_rpc(<<"wallet_pay_multi">>, {Param}) ->
Payer = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Payments =
case ?jsonrpc_get_param(<<"payments">>, Param, false) of
L when is_list(L) andalso length(L) > 0 ->
lists:map(
fun({Entry}) ->
Payee = ?jsonrpc_b58_to_bin(<<"payee">>, Entry),
Amount = ?jsonrpc_get_param(<<"bones">>, Entry, undefined),
Max = ?jsonrpc_get_param(<<"max">>, Entry, false),
TokenBin = ?jsonrpc_get_param(<<"token_type">>, Entry, <<"hnt">>),
Token = jsonrpc_binary_to_token_type(TokenBin),
{Payee, Token, Amount, Max}
end,
L
);
_ ->
?jsonrpc_error({invalid_params, "Missing or empty payment list"})
end,
Chain = blockchain_worker:blockchain(),
Nonce = jsonrpc_nonce_param(Param, Payer, balance, Chain),
case mk_payment_txn_v2(Payer, Payments, Nonce, Chain) of
{ok, Txn} ->
case sign(Payer, Txn) of
{ok, SignedTxn} ->
{ok, _} = bn_pending_txns:submit_txn(SignedTxn),
blockchain_txn:to_json(SignedTxn, []);
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet is locked"})
end;
{error, invalid_payment} ->
?jsonrpc_error({invalid_params, "Missing or invalid payment(s)"})
end;
handle_rpc(<<"wallet_import">>, {Param}) ->
Password = ?jsonrpc_get_param(<<"password">>, Param),
Path = ?jsonrpc_get_param(<<"path">>, Param),
{ok, State} = get_state(),
case file:read_file(Path) of
{error, enoent} ->
?jsonrpc_error({not_found, "Path not found"});
{error, _} = Error ->
?jsonrpc_error(Error);
{ok, FileBin} ->
case wallet:from_binary(FileBin) of
{error, _} = Error ->
?jsonrpc_error(Error);
{ok, Wallet} ->
case wallet:decrypt(Password, Wallet) of
{error, decrypt} ->
?jsonrpc_error(invalid_password);
{ok, _} ->
ok = save_wallet(Wallet, State),
?BIN_TO_B58(wallet:pubkey_bin(Wallet))
end
end
end;
handle_rpc(<<"wallet_export">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Path = ?jsonrpc_get_param(<<"path">>, Param),
{ok, State} = get_state(),
case get_wallet(Address, State) of
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet not found"});
{ok, Wallet} ->
WalletBin = wallet:to_binary(Wallet),
case file:write_file(Path, WalletBin) of
ok -> true;
{error, _} = Error -> ?jsonrpc_error(Error)
end
end;
handle_rpc(<<"wallet_export_secret">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Path = ?jsonrpc_get_param(<<"path">>, Param),
case keys(Address) of
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet not found"});
{ok, #{secret := {ed25519, <<Secret/binary>>}}} ->
case jsone:try_encode(binary:bin_to_list(Secret), []) of
{ok, Json} ->
case file:write_file(Path, Json) of
ok -> true;
{error, _} = Error -> ?jsonrpc_error(Error)
end;
{error, _} = Error -> ?jsonrpc_error(Error)
end;
{ok, _} ->
?jsonrpc_error({not_supported, "Wallet not ed25519"})
end;
handle_rpc(<<"wallet_backup_list">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
{ok, Engine} = rocksdb:open_backup_engine(binary_to_list(Path)),
{ok, Info} = rocksdb:get_backup_info(Engine),
Info;
handle_rpc(<<"wallet_backup_create">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
NumBackupToKeep = ?jsonrpc_get_param(<<"max_backups">>, Param),
{ok, Engine} = rocksdb:open_backup_engine(binary_to_list(Path)),
{ok, #state{db = DB}} = get_state(),
ok = rocksdb:create_new_backup(Engine, DB),
ok = rocksdb:purge_old_backup(Engine, NumBackupToKeep),
{ok, Info} = rocksdb:get_backup_info(Engine),
LastBackup = hd(Info),
LastBackup;
handle_rpc(<<"wallet_backup_delete">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
BackupID = ?jsonrpc_get_param(<<"backup_id">>, Param),
{ok, Engine} = rocksdb:open_backup_engine(binary_to_list(Path)),
case rocksdb:delete_backup(Engine, BackupID) of
ok ->
true;
{error, not_found} ->
?jsonrpc_error({not_found, "Backup not found: ~p", [BackupID]});
{error, _} = Error ->
?jsonrpc_error(Error)
end;
handle_rpc(<<"wallet_backup_restore">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
BackupID = ?jsonrpc_get_param(<<"backup_id">>, Param),
case restore(binary_to_list(Path), BackupID) of
ok ->
true;
{error, not_found} ->
?jsonrpc_error({not_found, "Backup not found: ~p", [BackupID]});
{error, _} = Error ->
?jsonrpc_error(Error)
end;
handle_rpc(_, _) ->
?jsonrpc_error(method_not_found).
%%
Internal
%%
%% Gets a nonce from a given jsonrpc parameter list. If not present it gets the
speculative nonce for the given account address and adds one to construct a new
%% nonce.
-spec jsonrpc_nonce_param(
[term()],
libp2p_crypto:pubkey_bin(),
bn_pending_txns:nonce_type(),
blockchain:chain()
) ->
non_neg_integer().
jsonrpc_nonce_param(Param, Address, NonceType, Chain) ->
case ?jsonrpc_get_param(<<"nonce">>, Param, false) of
false ->
bn_accounts:get_speculative_nonce(
Address,
NonceType,
blockchain:ledger(Chain)
) + 1;
V when is_integer(V) ->
V;
_ ->
?jsonrpc_error({invalid_params, Param})
end.
-spec mk_payment_txn_v2(
Payer :: libp2p_crypto:pubkey_bin(),
[
{
Payee :: libp2p_crypto:pubkey_bin(),
Token :: atom(),
Bones :: pos_integer() | undefined,
Max :: boolean()
}
],
Nonce :: non_neg_integer(),
Chain :: blockchain:blockchain()
) ->
{ok, blockchain_txn:txn()} | {error, term()}.
mk_payment_txn_v2(Payer, PaymentList, Nonce, Chain) ->
try
Payments = [
mk_payment(Payee, Token, Bones, Max)
|| {Payee, Token, Bones, Max} <- PaymentList
],
Txn = blockchain_txn_payment_v2:new(Payer, Payments, Nonce),
TxnFee = blockchain_txn_payment_v2:calculate_fee(Txn, Chain),
{ok, blockchain_txn_payment_v2:fee(Txn, TxnFee)}
catch
_:_ -> {error, invalid_payment}
end.
-spec mk_payment(
Payee :: libp2p_crypto:pubkey_bin(),
Token :: blockchain_token_v1:type(),
Bones :: (undefined | non_neg_integer()),
PayMaximum :: boolean()
) -> blockchain_payment_v2:payment().
mk_payment(Payee, Token, undefined, true) ->
blockchain_payment_v2:new(Payee, max, 0, Token);
mk_payment(Payee, Token, Bones, false) ->
blockchain_payment_v2:new(Payee, Bones, 0, Token).
get_state() ->
case persistent_term:get(?MODULE, false) of
false ->
{error, {no_database, ?MODULE}};
State ->
{ok, State}
end.
-spec get_wallet(libp2p_crypto:pubkey_bin(), #state{}) ->
{ok, wallet:wallet()} | {error, term()}.
get_wallet(Address, #state{db = DB, wallets = WalletCF}) ->
case rocksdb:get(DB, WalletCF, Address, []) of
not_found ->
{error, not_found};
{ok, BinWallet} ->
wallet:from_binary(BinWallet);
Error ->
Error
end.
get_wallet_list(#state{db = DB, wallets = WalletCF}) ->
{ok, Itr} = rocksdb:iterator(DB, WalletCF, []),
Wallets = get_wallet_list(Itr, rocksdb:iterator_move(Itr, first), []),
catch rocksdb:iterator_close(Itr),
Wallets.
get_wallet_list(_Itr, {error, _Error}, Acc) ->
lists:reverse(Acc);
get_wallet_list(Itr, {ok, Addr, _}, Acc) ->
get_wallet_list(Itr, rocksdb:iterator_move(Itr, next), [Addr | Acc]).
-spec save_wallet(wallet:wallet(), #state{}) -> ok | {error, term()}.
save_wallet(Wallet, #state{db = DB, wallets = WalletCF}) ->
PubKeyBin = wallet:pubkey_bin(Wallet),
WalletBin = wallet:to_binary(Wallet),
rocksdb:put(DB, WalletCF, PubKeyBin, WalletBin, [{sync, true}]).
-spec delete_wallet(Address :: libp2p_crypto:pubkey_bin(), #state{}) ->
ok | {error, term()}.
delete_wallet(Address, #state{db = DB, wallets = WalletCF}) ->
rocksdb:delete(DB, WalletCF, Address, [{sync, true}]).
-spec load_db(file:filename_all()) -> {ok, #state{}} | {error, any()}.
load_db(Dir) ->
case bn_db:open_db(Dir, ["default", "wallets"]) of
{error, _Reason} = Error ->
Error;
{ok, DB, [DefaultCF, WalletCF]} ->
State = #state{
dir = Dir,
db = DB,
default = DefaultCF,
wallets = WalletCF
},
compact_db(State),
{ok, State}
end.
compact_db(#state{db = DB, default = Default, wallets = WalletCF}) ->
rocksdb:compact_range(DB, Default, undefined, undefined, []),
rocksdb:compact_range(DB, WalletCF, undefined, undefined, []),
ok.
-spec jsonrpc_binary_to_token_type(
TokenBin :: binary()
) -> atom().
jsonrpc_binary_to_token_type(Token) ->
case catch binary_to_existing_atom(Token) of
TokenType when is_atom(TokenType) -> TokenType;
_ -> ?jsonrpc_error({invalid_params, "Invalid token type found"})
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
token_test() ->
?assertEqual(jsonrpc_binary_to_token_type(<<"hnt">>), hnt),
?assertThrow({invalid_params, _}, jsonrpc_binary_to_token_type(<<" NOT EVER A TOKEN">>)).
-endif.
| null | https://raw.githubusercontent.com/helium/blockchain-node/de938c8cede2eb3013b7c7459feae0574a5ca3eb/src/bn_wallets.erl | erlang | gen_server
jsonrpc_handler
gen_server
jsonrpc_handler
Gets a nonce from a given jsonrpc parameter list. If not present it gets the
nonce. | -module(bn_wallets).
-include("bn_jsonrpc.hrl").
-behavior(bn_jsonrpc_handler).
-behavior(gen_server).
-export([start_link/1, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2]).
-export([handle_rpc/2]).
-define(DB_FILE, "wallets.db").
-define(SERVER, ?MODULE).
-define(KEY_TIMEOUT, 60000).
-define(UNLOCK_TIMEOUT, 30000).
-record(state, {
dir :: file:filename_all(),
db :: rocksdb:db_handle(),
default :: rocksdb:cf_handle(),
wallets :: rocksdb:cf_handle(),
keys = #{} :: #{libp2p_crypto:pubkey_bin() => libp2p_crypto:key_map()}
}).
-spec unlock(libp2p_crypto:pubkey_bin(), binary()) -> ok | {error, term()}.
unlock(Address, Password) ->
gen_server:call(?SERVER, {unlock, Address, Password}, ?UNLOCK_TIMEOUT).
-spec sign(libp2p_crypto:pubkey_bin(), blockchain_txn:txn()) ->
{ok, blockchain_txn:txn()} | {error, term()}.
sign(Address, Txn) ->
gen_server:call(?SERVER, {sign, Address, Txn}).
-spec keys(libp2p_crypto:pubkey_bin()) -> {ok, lib2p_crypto:key_map()} | {error, term()}.
keys(Address) ->
gen_server:call(?SERVER, {keys, Address}).
-spec lock(libp2p_crypto:pubkey_bin()) -> ok.
lock(Address) ->
gen_server:call(?SERVER, {lock, Address}).
-spec is_locked(libp2p_crypto:pubkey_bin()) -> boolean().
is_locked(Address) ->
gen_server:call(?SERVER, {is_locked, Address}).
restore(Path, BackupID) ->
gen_server:call(?SERVER, {restore, Path, BackupID}).
start_link(Args) ->
gen_server:start_link({local, ?SERVER}, ?MODULE, Args, []).
init(Args) ->
Dir = filename:join(proplists:get_value(base_dir, Args, "data"), ?DB_FILE),
case load_db(Dir) of
{ok, State} ->
persistent_term:put(?MODULE, State),
{ok, State};
Error ->
Error
end.
handle_call({unlock, Address, Password}, _From, State) ->
case maps:is_key(Address, State#state.keys) of
true ->
{reply, ok, State};
false ->
case get_wallet(Address, State) of
{error, Error} ->
{reply, {error, Error}, State};
{ok, Wallet} ->
case wallet:decrypt(Password, Wallet) of
{error, Error} ->
{reply, {error, Error}, State};
{ok, KeyMap} ->
timer:send_after(?KEY_TIMEOUT, self(), {key_timeout, Address}),
{reply, ok, State#state{
keys = maps:put(Address, KeyMap, State#state.keys)
}}
end
end
end;
handle_call({lock, Address}, _From, State) ->
{reply, ok, State#state{keys = maps:remove(Address, State#state.keys)}};
handle_call({is_locked, Address}, _From, State) ->
{reply, not maps:is_key(Address, State#state.keys), State};
handle_call({keys, Address}, _From, State) ->
case maps:get(Address, State#state.keys, false) of
false ->
{reply, {error, not_found}, State};
KeyMap ->
{reply, {ok, KeyMap}, State}
end;
handle_call({sign, Address, Txn}, _From, State) ->
case maps:get(Address, State#state.keys, false) of
false ->
{reply, {error, not_found}, State};
#{secret := PrivKey} ->
SigFun = libp2p_crypto:mk_sig_fun(PrivKey),
{reply, {ok, blockchain_txn:sign(Txn, SigFun)}, State}
end;
handle_call({restore, Path, BackupID}, _From, State) ->
{ok, Engine} = rocksdb:open_backup_engine(Path),
case rocksdb:verify_backup(Engine, BackupID) of
{error, Error} ->
{reply, {error, Error}, State};
ok ->
rocksdb:close(State#state.db),
case rocksdb:restore_db_from_backup(Engine, BackupID, State#state.dir) of
ok ->
case load_db(State#state.dir) of
{ok, NewState} ->
persistent_term:put(?MODULE, NewState),
{reply, ok, NewState};
Error ->
{reply, Error, State}
end
end
end;
handle_call(Request, _From, State) ->
lager:notice("Unhandled call ~p", [Request]),
{reply, ok, State}.
handle_cast(Msg, State) ->
lager:notice("Unhandled cast ~p", [Msg]),
{noreply, State}.
handle_info({key_timeout, Address}, State) ->
{noreply, State#state{keys = maps:remove(Address, State#state.keys)}};
handle_info(Info, State) ->
lager:notice("Unhandled info ~p", [Info]),
{noreply, State}.
terminate(_Reason, #state{db = DB}) ->
rocksdb:close(DB).
handle_rpc(<<"wallet_create">>, {Param}) ->
KeyMap = libp2p_crypto:generate_keys(ed25519),
Password =
case ?jsonrpc_get_param(<<"password">>, Param) of
V when is_binary(V) andalso byte_size(V) > 0 -> V;
_ -> ?jsonrpc_error(invalid_params)
end,
{ok, State} = get_state(),
{ok, Wallet} = wallet:encrypt(KeyMap, Password),
ok = save_wallet(Wallet, State),
?BIN_TO_B58(wallet:pubkey_bin(Wallet));
handle_rpc(<<"wallet_delete">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
{ok, State} = get_state(),
case delete_wallet(Address, State) of
{error, _} = Error ->
?jsonrpc_error(Error);
ok ->
true
end;
handle_rpc(<<"wallet_list">>, _Params) ->
{ok, State} = get_state(),
[?BIN_TO_B58(Addr) || Addr <- get_wallet_list(State)];
handle_rpc(<<"wallet_unlock">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Password = ?jsonrpc_get_param(<<"password">>, Param),
case unlock(Address, Password) of
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet not found"});
{error, decrypt} ->
?jsonrpc_error(invalid_password);
ok ->
true
end;
handle_rpc(<<"wallet_lock">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
ok = lock(Address),
true;
handle_rpc(<<"wallet_is_locked">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
is_locked(Address);
handle_rpc(<<"wallet_pay">>, {Param}) ->
Payer = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Payee = ?jsonrpc_b58_to_bin(<<"payee">>, Param),
Amount = ?jsonrpc_get_param(<<"bones">>, Param, undefined),
Max = ?jsonrpc_get_param(<<"max">>, Param, false),
TokenBin = ?jsonrpc_get_param(<<"token_type">>, Param, <<"hnt">>),
Token = jsonrpc_binary_to_token_type(TokenBin),
Chain = blockchain_worker:blockchain(),
Nonce = jsonrpc_nonce_param(Param, Payer, balance, Chain),
case mk_payment_txn_v2(Payer, [{Payee, Token, Amount, Max}], Nonce, Chain) of
{ok, Txn} ->
case sign(Payer, Txn) of
{ok, SignedTxn} ->
{ok, _} = bn_pending_txns:submit_txn(SignedTxn),
blockchain_txn:to_json(SignedTxn, []);
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet is locked"})
end;
{error, invalid_payment} ->
?jsonrpc_error({invalid_params, "Missing or invalid payment amount"})
end;
handle_rpc(<<"wallet_pay_multi">>, {Param}) ->
Payer = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Payments =
case ?jsonrpc_get_param(<<"payments">>, Param, false) of
L when is_list(L) andalso length(L) > 0 ->
lists:map(
fun({Entry}) ->
Payee = ?jsonrpc_b58_to_bin(<<"payee">>, Entry),
Amount = ?jsonrpc_get_param(<<"bones">>, Entry, undefined),
Max = ?jsonrpc_get_param(<<"max">>, Entry, false),
TokenBin = ?jsonrpc_get_param(<<"token_type">>, Entry, <<"hnt">>),
Token = jsonrpc_binary_to_token_type(TokenBin),
{Payee, Token, Amount, Max}
end,
L
);
_ ->
?jsonrpc_error({invalid_params, "Missing or empty payment list"})
end,
Chain = blockchain_worker:blockchain(),
Nonce = jsonrpc_nonce_param(Param, Payer, balance, Chain),
case mk_payment_txn_v2(Payer, Payments, Nonce, Chain) of
{ok, Txn} ->
case sign(Payer, Txn) of
{ok, SignedTxn} ->
{ok, _} = bn_pending_txns:submit_txn(SignedTxn),
blockchain_txn:to_json(SignedTxn, []);
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet is locked"})
end;
{error, invalid_payment} ->
?jsonrpc_error({invalid_params, "Missing or invalid payment(s)"})
end;
handle_rpc(<<"wallet_import">>, {Param}) ->
Password = ?jsonrpc_get_param(<<"password">>, Param),
Path = ?jsonrpc_get_param(<<"path">>, Param),
{ok, State} = get_state(),
case file:read_file(Path) of
{error, enoent} ->
?jsonrpc_error({not_found, "Path not found"});
{error, _} = Error ->
?jsonrpc_error(Error);
{ok, FileBin} ->
case wallet:from_binary(FileBin) of
{error, _} = Error ->
?jsonrpc_error(Error);
{ok, Wallet} ->
case wallet:decrypt(Password, Wallet) of
{error, decrypt} ->
?jsonrpc_error(invalid_password);
{ok, _} ->
ok = save_wallet(Wallet, State),
?BIN_TO_B58(wallet:pubkey_bin(Wallet))
end
end
end;
handle_rpc(<<"wallet_export">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Path = ?jsonrpc_get_param(<<"path">>, Param),
{ok, State} = get_state(),
case get_wallet(Address, State) of
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet not found"});
{ok, Wallet} ->
WalletBin = wallet:to_binary(Wallet),
case file:write_file(Path, WalletBin) of
ok -> true;
{error, _} = Error -> ?jsonrpc_error(Error)
end
end;
handle_rpc(<<"wallet_export_secret">>, {Param}) ->
Address = ?jsonrpc_b58_to_bin(<<"address">>, Param),
Path = ?jsonrpc_get_param(<<"path">>, Param),
case keys(Address) of
{error, not_found} ->
?jsonrpc_error({not_found, "Wallet not found"});
{ok, #{secret := {ed25519, <<Secret/binary>>}}} ->
case jsone:try_encode(binary:bin_to_list(Secret), []) of
{ok, Json} ->
case file:write_file(Path, Json) of
ok -> true;
{error, _} = Error -> ?jsonrpc_error(Error)
end;
{error, _} = Error -> ?jsonrpc_error(Error)
end;
{ok, _} ->
?jsonrpc_error({not_supported, "Wallet not ed25519"})
end;
handle_rpc(<<"wallet_backup_list">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
{ok, Engine} = rocksdb:open_backup_engine(binary_to_list(Path)),
{ok, Info} = rocksdb:get_backup_info(Engine),
Info;
handle_rpc(<<"wallet_backup_create">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
NumBackupToKeep = ?jsonrpc_get_param(<<"max_backups">>, Param),
{ok, Engine} = rocksdb:open_backup_engine(binary_to_list(Path)),
{ok, #state{db = DB}} = get_state(),
ok = rocksdb:create_new_backup(Engine, DB),
ok = rocksdb:purge_old_backup(Engine, NumBackupToKeep),
{ok, Info} = rocksdb:get_backup_info(Engine),
LastBackup = hd(Info),
LastBackup;
handle_rpc(<<"wallet_backup_delete">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
BackupID = ?jsonrpc_get_param(<<"backup_id">>, Param),
{ok, Engine} = rocksdb:open_backup_engine(binary_to_list(Path)),
case rocksdb:delete_backup(Engine, BackupID) of
ok ->
true;
{error, not_found} ->
?jsonrpc_error({not_found, "Backup not found: ~p", [BackupID]});
{error, _} = Error ->
?jsonrpc_error(Error)
end;
handle_rpc(<<"wallet_backup_restore">>, {Param}) ->
Path = ?jsonrpc_get_param(<<"path">>, Param),
BackupID = ?jsonrpc_get_param(<<"backup_id">>, Param),
case restore(binary_to_list(Path), BackupID) of
ok ->
true;
{error, not_found} ->
?jsonrpc_error({not_found, "Backup not found: ~p", [BackupID]});
{error, _} = Error ->
?jsonrpc_error(Error)
end;
handle_rpc(_, _) ->
?jsonrpc_error(method_not_found).
Internal
speculative nonce for the given account address and adds one to construct a new
-spec jsonrpc_nonce_param(
[term()],
libp2p_crypto:pubkey_bin(),
bn_pending_txns:nonce_type(),
blockchain:chain()
) ->
non_neg_integer().
jsonrpc_nonce_param(Param, Address, NonceType, Chain) ->
case ?jsonrpc_get_param(<<"nonce">>, Param, false) of
false ->
bn_accounts:get_speculative_nonce(
Address,
NonceType,
blockchain:ledger(Chain)
) + 1;
V when is_integer(V) ->
V;
_ ->
?jsonrpc_error({invalid_params, Param})
end.
-spec mk_payment_txn_v2(
Payer :: libp2p_crypto:pubkey_bin(),
[
{
Payee :: libp2p_crypto:pubkey_bin(),
Token :: atom(),
Bones :: pos_integer() | undefined,
Max :: boolean()
}
],
Nonce :: non_neg_integer(),
Chain :: blockchain:blockchain()
) ->
{ok, blockchain_txn:txn()} | {error, term()}.
mk_payment_txn_v2(Payer, PaymentList, Nonce, Chain) ->
try
Payments = [
mk_payment(Payee, Token, Bones, Max)
|| {Payee, Token, Bones, Max} <- PaymentList
],
Txn = blockchain_txn_payment_v2:new(Payer, Payments, Nonce),
TxnFee = blockchain_txn_payment_v2:calculate_fee(Txn, Chain),
{ok, blockchain_txn_payment_v2:fee(Txn, TxnFee)}
catch
_:_ -> {error, invalid_payment}
end.
-spec mk_payment(
Payee :: libp2p_crypto:pubkey_bin(),
Token :: blockchain_token_v1:type(),
Bones :: (undefined | non_neg_integer()),
PayMaximum :: boolean()
) -> blockchain_payment_v2:payment().
mk_payment(Payee, Token, undefined, true) ->
blockchain_payment_v2:new(Payee, max, 0, Token);
mk_payment(Payee, Token, Bones, false) ->
blockchain_payment_v2:new(Payee, Bones, 0, Token).
get_state() ->
case persistent_term:get(?MODULE, false) of
false ->
{error, {no_database, ?MODULE}};
State ->
{ok, State}
end.
-spec get_wallet(libp2p_crypto:pubkey_bin(), #state{}) ->
{ok, wallet:wallet()} | {error, term()}.
get_wallet(Address, #state{db = DB, wallets = WalletCF}) ->
case rocksdb:get(DB, WalletCF, Address, []) of
not_found ->
{error, not_found};
{ok, BinWallet} ->
wallet:from_binary(BinWallet);
Error ->
Error
end.
get_wallet_list(#state{db = DB, wallets = WalletCF}) ->
{ok, Itr} = rocksdb:iterator(DB, WalletCF, []),
Wallets = get_wallet_list(Itr, rocksdb:iterator_move(Itr, first), []),
catch rocksdb:iterator_close(Itr),
Wallets.
get_wallet_list(_Itr, {error, _Error}, Acc) ->
lists:reverse(Acc);
get_wallet_list(Itr, {ok, Addr, _}, Acc) ->
get_wallet_list(Itr, rocksdb:iterator_move(Itr, next), [Addr | Acc]).
-spec save_wallet(wallet:wallet(), #state{}) -> ok | {error, term()}.
save_wallet(Wallet, #state{db = DB, wallets = WalletCF}) ->
PubKeyBin = wallet:pubkey_bin(Wallet),
WalletBin = wallet:to_binary(Wallet),
rocksdb:put(DB, WalletCF, PubKeyBin, WalletBin, [{sync, true}]).
-spec delete_wallet(Address :: libp2p_crypto:pubkey_bin(), #state{}) ->
ok | {error, term()}.
delete_wallet(Address, #state{db = DB, wallets = WalletCF}) ->
rocksdb:delete(DB, WalletCF, Address, [{sync, true}]).
-spec load_db(file:filename_all()) -> {ok, #state{}} | {error, any()}.
load_db(Dir) ->
case bn_db:open_db(Dir, ["default", "wallets"]) of
{error, _Reason} = Error ->
Error;
{ok, DB, [DefaultCF, WalletCF]} ->
State = #state{
dir = Dir,
db = DB,
default = DefaultCF,
wallets = WalletCF
},
compact_db(State),
{ok, State}
end.
compact_db(#state{db = DB, default = Default, wallets = WalletCF}) ->
rocksdb:compact_range(DB, Default, undefined, undefined, []),
rocksdb:compact_range(DB, WalletCF, undefined, undefined, []),
ok.
-spec jsonrpc_binary_to_token_type(
TokenBin :: binary()
) -> atom().
jsonrpc_binary_to_token_type(Token) ->
case catch binary_to_existing_atom(Token) of
TokenType when is_atom(TokenType) -> TokenType;
_ -> ?jsonrpc_error({invalid_params, "Invalid token type found"})
end.
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
token_test() ->
?assertEqual(jsonrpc_binary_to_token_type(<<"hnt">>), hnt),
?assertThrow({invalid_params, _}, jsonrpc_binary_to_token_type(<<" NOT EVER A TOKEN">>)).
-endif.
|
091ca3cea55ce0eb33b9644aadc68af83e526d957dc16bb892c78c8f33f6bdb4 | nuprl/gradual-typing-performance | scope2.rkt | The first three lines of this file were inserted by . They record metadata
;; about the language level of this file in a form that our tools can easily process.
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname scope2) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
(define (h x)
(f (* 2 x)))
(define (f x)
(+ (* x x) 25))
(define (g x)
(+ (f (+ x 1)) (f (- x 1))))
| null | https://raw.githubusercontent.com/nuprl/gradual-typing-performance/35442b3221299a9cadba6810573007736b0d65d4/pre-benchmark/htdp/base/Images/scope2.rkt | racket | about the language level of this file in a form that our tools can easily process.
| The first three lines of this file were inserted by . They record metadata
#reader(lib "htdp-intermediate-lambda-reader.ss" "lang")((modname scope2) (read-case-sensitive #t) (teachpacks ()) (htdp-settings #(#t constructor repeating-decimal #f #t none #f ())))
(define (h x)
(f (* 2 x)))
(define (f x)
(+ (* x x) 25))
(define (g x)
(+ (f (+ x 1)) (f (- x 1))))
|
b7177c56e494d1b010deb87d9afce4859cc5bd9f6266061539ac0c07341efe19 | racket/libs | pack-all.rkt | #lang racket/base
SPDX - License - Identifier : ( Apache-2.0 OR MIT )
(require racket/cmdline
racket/file
racket/port
racket/string
racket/list
racket/path
file/zip
openssl/sha1
net/url
pkg/strip
pkg/lib
setup/getinfo)
(provide pack-all)
;; Increment this number if something about the way packages are
;; generated changes, so that previously generated packages are
;; reliably replaced:
(define package-format-version 2)
;; Generate ".zip" files for all native packages at `src-dir`,
;; generating the output to `dest-dir`. The output includes a
;; "catalog" dir with information about the generated packages,
;; and the catalog entries point to a "pkgs" directory
;; that has the packages sorted by checksum.
;; There's more generality interally in the function than needed,
;; because it's based on code that packed all main-distribution
;; packages back when they were in a single source repository.
(define (pack-all #:src-dir src-dir #:work-dir work-dir)
(define pack-dest-dir work-dir)
(define catalog-dirs (list (build-path work-dir "catalog")))
(define native? #f)
(define relative? #t)
(define get-modules? #f)
(define checksum-dir (build-path work-dir "pkgs"))
(define source-checksums? #f)
(define src-dirs (list src-dir))
(when pack-dest-dir
(make-directory* pack-dest-dir))
(for ([catalog-dir (in-list catalog-dirs)])
(make-directory* catalog-dir))
(define metadata-ns (make-base-namespace))
(define (status fmt . args)
(apply printf fmt args)
(flush-output))
(define (stream-directory d)
(define-values (i o) (make-pipe (* 100 4096)))
(write package-format-version o)
(define (skip-path? p)
(let-values ([(base name dir?) (split-path p)])
(define s (path->string name))
(or (member s '("compiled"))
(regexp-match? #rx#"^(?:[.]git.*|[.]svn|.*~|#.*#)$" s))))
(thread (lambda ()
(let loop ([d d])
(for ([f (directory-list d #:build? #t)])
(cond
[(skip-path? f) (void)]
[(directory-exists? f)
(write (filter-not skip-path? (directory-list f)) o)
(loop f)]
[(file-exists? f)
(call-with-input-file*
f
(lambda (i) (copy-port i o)))])))
(close-output-port o)))
i)
(define (do-package src-dir pkg-name)
(define zip-file (path-add-suffix pkg-name #".zip"))
(define dest-zip (and pack-dest-dir
(build-path (path->complete-path pack-dest-dir)
zip-file)))
(define pkg-src-dir (build-path src-dir pkg-name))
(when pack-dest-dir
(define sum-file (path-add-suffix pkg-name #".srcsum"))
(status "summing ~a\n" pkg-src-dir)
(define src-sha1 (sha1 (stream-directory pkg-src-dir)))
(define dest-sum (build-path (path->complete-path pack-dest-dir) sum-file))
(unless (and (file-exists? dest-zip)
(file-exists? dest-sum)
(equal? (list (version) src-sha1)
(call-with-input-file* dest-sum read)))
(status "packing ~a\n" zip-file)
(define tmp-dir (make-temporary-file "~a-pkg" 'directory))
(parameterize ([strip-binary-compile-info #f]) ; for deterministic checksum
(generate-stripped-directory (if native? 'binary 'source)
pkg-src-dir
tmp-dir))
(parameterize ([current-directory tmp-dir])
(when (file-exists? dest-zip) (delete-file dest-zip))
(apply zip dest-zip (directory-list)
;; Use a constant timestamp so that the checksum does
;; not depend on timestamps:
#:timestamp 1359788400
#:utc-timestamps? #t
#:system-type 'unix))
(delete-directory/files tmp-dir)
(call-with-output-file*
dest-sum
#:exists 'truncate/replace
(lambda (o)
(write (list (version) src-sha1) o)
(newline o)))))
(define info-path (build-path src-dir pkg-name))
(define i (get-info/full info-path))
(define (get key)
(i key (lambda ()
(error 'catalog-local
"missing `~a'\n path: ~a"
key
(build-path info-path "info.rkt")))))
(define (write-catalog-entry catalog-dir)
(define catalog-dir/normal (simplify-path (path->complete-path catalog-dir)))
(define catalog-pkg-dir (build-path catalog-dir "pkg"))
(define checksum (if dest-zip
(call-with-input-file* dest-zip sha1)
(if source-checksums?
(begin
(status "summing ~a\n" pkg-src-dir)
(sha1 (stream-directory pkg-src-dir)))
"0")))
(define orig-dest (if dest-zip
(build-path pack-dest-dir zip-file)
#f))
(define checksum-dest (if checksum-dir
(build-path checksum-dir checksum zip-file)
orig-dest))
(define pkg-dir (build-path src-dir pkg-name))
(define info (and get-modules?
(get-info/full pkg-dir
#:namespace metadata-ns
#:bootstrap? #t)))
(when dest-zip
(when checksum-dir
(make-directory* (build-path checksum-dir checksum))
(copy-file orig-dest checksum-dest #t))
(call-with-output-file*
(build-path (path-replace-suffix checksum-dest #".zip.CHECKSUM"))
#:exists 'truncate/replace
(lambda (o)
(display checksum o))))
(make-directory* catalog-pkg-dir)
(call-with-output-file*
(build-path catalog-pkg-dir pkg-name)
#:exists 'truncate
(lambda (o)
(write (hash 'source (path->string
(let ([p (path->complete-path
(if dest-zip
checksum-dest
(path->directory-path pkg-dir)))])
(if relative?
(find-relative-path catalog-dir/normal
(simplify-path p))
p)))
'checksum checksum
'name (path->string pkg-name)
'author (string-join (for/list ([r (get 'pkg-authors)])
(if (symbol? r)
(format "~" r)
r))
" ")
'description (get 'pkg-desc)
'tags '()
'dependencies (if get-modules?
(append
(info 'deps (lambda () null))
(info 'build-deps (lambda () null)))
'())
'modules (if get-modules?
(pkg-directory->module-paths
pkg-dir
(path->string pkg-name)
#:namespace metadata-ns)
'()))
o)
(newline o))))
(for ([catalog-dir (in-list catalog-dirs)])
(write-catalog-entry catalog-dir)))
(define found (make-hash))
Recur through directory tree , and treat each directory
;; that has an "info.rkt" file as a package (and don't recur
;; further into the package)
(for ([src-dir (in-list src-dirs)])
(let loop ([src-dir src-dir])
(for ([f (in-list (directory-list src-dir))])
(define src-f (build-path src-dir f))
(cond
[(file-exists? (build-path src-f "info.rkt"))
(when (hash-ref found f #f)
(error 'pack-local
"found packages multiple times: ~a and ~a"
(hash-ref found f)
src-f))
(hash-set! found f src-f)
(do-package src-dir f)]
[(directory-exists? src-f)
(loop src-f)])))))
| null | https://raw.githubusercontent.com/racket/libs/ebcea119197dc0cb86be1ccbbfbe5806f7280976/pack-all.rkt | racket | Increment this number if something about the way packages are
generated changes, so that previously generated packages are
reliably replaced:
Generate ".zip" files for all native packages at `src-dir`,
generating the output to `dest-dir`. The output includes a
"catalog" dir with information about the generated packages,
and the catalog entries point to a "pkgs" directory
that has the packages sorted by checksum.
There's more generality interally in the function than needed,
because it's based on code that packed all main-distribution
packages back when they were in a single source repository.
for deterministic checksum
Use a constant timestamp so that the checksum does
not depend on timestamps:
that has an "info.rkt" file as a package (and don't recur
further into the package) | #lang racket/base
SPDX - License - Identifier : ( Apache-2.0 OR MIT )
(require racket/cmdline
racket/file
racket/port
racket/string
racket/list
racket/path
file/zip
openssl/sha1
net/url
pkg/strip
pkg/lib
setup/getinfo)
(provide pack-all)
(define package-format-version 2)
(define (pack-all #:src-dir src-dir #:work-dir work-dir)
(define pack-dest-dir work-dir)
(define catalog-dirs (list (build-path work-dir "catalog")))
(define native? #f)
(define relative? #t)
(define get-modules? #f)
(define checksum-dir (build-path work-dir "pkgs"))
(define source-checksums? #f)
(define src-dirs (list src-dir))
(when pack-dest-dir
(make-directory* pack-dest-dir))
(for ([catalog-dir (in-list catalog-dirs)])
(make-directory* catalog-dir))
(define metadata-ns (make-base-namespace))
(define (status fmt . args)
(apply printf fmt args)
(flush-output))
(define (stream-directory d)
(define-values (i o) (make-pipe (* 100 4096)))
(write package-format-version o)
(define (skip-path? p)
(let-values ([(base name dir?) (split-path p)])
(define s (path->string name))
(or (member s '("compiled"))
(regexp-match? #rx#"^(?:[.]git.*|[.]svn|.*~|#.*#)$" s))))
(thread (lambda ()
(let loop ([d d])
(for ([f (directory-list d #:build? #t)])
(cond
[(skip-path? f) (void)]
[(directory-exists? f)
(write (filter-not skip-path? (directory-list f)) o)
(loop f)]
[(file-exists? f)
(call-with-input-file*
f
(lambda (i) (copy-port i o)))])))
(close-output-port o)))
i)
(define (do-package src-dir pkg-name)
(define zip-file (path-add-suffix pkg-name #".zip"))
(define dest-zip (and pack-dest-dir
(build-path (path->complete-path pack-dest-dir)
zip-file)))
(define pkg-src-dir (build-path src-dir pkg-name))
(when pack-dest-dir
(define sum-file (path-add-suffix pkg-name #".srcsum"))
(status "summing ~a\n" pkg-src-dir)
(define src-sha1 (sha1 (stream-directory pkg-src-dir)))
(define dest-sum (build-path (path->complete-path pack-dest-dir) sum-file))
(unless (and (file-exists? dest-zip)
(file-exists? dest-sum)
(equal? (list (version) src-sha1)
(call-with-input-file* dest-sum read)))
(status "packing ~a\n" zip-file)
(define tmp-dir (make-temporary-file "~a-pkg" 'directory))
(generate-stripped-directory (if native? 'binary 'source)
pkg-src-dir
tmp-dir))
(parameterize ([current-directory tmp-dir])
(when (file-exists? dest-zip) (delete-file dest-zip))
(apply zip dest-zip (directory-list)
#:timestamp 1359788400
#:utc-timestamps? #t
#:system-type 'unix))
(delete-directory/files tmp-dir)
(call-with-output-file*
dest-sum
#:exists 'truncate/replace
(lambda (o)
(write (list (version) src-sha1) o)
(newline o)))))
(define info-path (build-path src-dir pkg-name))
(define i (get-info/full info-path))
(define (get key)
(i key (lambda ()
(error 'catalog-local
"missing `~a'\n path: ~a"
key
(build-path info-path "info.rkt")))))
(define (write-catalog-entry catalog-dir)
(define catalog-dir/normal (simplify-path (path->complete-path catalog-dir)))
(define catalog-pkg-dir (build-path catalog-dir "pkg"))
(define checksum (if dest-zip
(call-with-input-file* dest-zip sha1)
(if source-checksums?
(begin
(status "summing ~a\n" pkg-src-dir)
(sha1 (stream-directory pkg-src-dir)))
"0")))
(define orig-dest (if dest-zip
(build-path pack-dest-dir zip-file)
#f))
(define checksum-dest (if checksum-dir
(build-path checksum-dir checksum zip-file)
orig-dest))
(define pkg-dir (build-path src-dir pkg-name))
(define info (and get-modules?
(get-info/full pkg-dir
#:namespace metadata-ns
#:bootstrap? #t)))
(when dest-zip
(when checksum-dir
(make-directory* (build-path checksum-dir checksum))
(copy-file orig-dest checksum-dest #t))
(call-with-output-file*
(build-path (path-replace-suffix checksum-dest #".zip.CHECKSUM"))
#:exists 'truncate/replace
(lambda (o)
(display checksum o))))
(make-directory* catalog-pkg-dir)
(call-with-output-file*
(build-path catalog-pkg-dir pkg-name)
#:exists 'truncate
(lambda (o)
(write (hash 'source (path->string
(let ([p (path->complete-path
(if dest-zip
checksum-dest
(path->directory-path pkg-dir)))])
(if relative?
(find-relative-path catalog-dir/normal
(simplify-path p))
p)))
'checksum checksum
'name (path->string pkg-name)
'author (string-join (for/list ([r (get 'pkg-authors)])
(if (symbol? r)
(format "~" r)
r))
" ")
'description (get 'pkg-desc)
'tags '()
'dependencies (if get-modules?
(append
(info 'deps (lambda () null))
(info 'build-deps (lambda () null)))
'())
'modules (if get-modules?
(pkg-directory->module-paths
pkg-dir
(path->string pkg-name)
#:namespace metadata-ns)
'()))
o)
(newline o))))
(for ([catalog-dir (in-list catalog-dirs)])
(write-catalog-entry catalog-dir)))
(define found (make-hash))
Recur through directory tree , and treat each directory
(for ([src-dir (in-list src-dirs)])
(let loop ([src-dir src-dir])
(for ([f (in-list (directory-list src-dir))])
(define src-f (build-path src-dir f))
(cond
[(file-exists? (build-path src-f "info.rkt"))
(when (hash-ref found f #f)
(error 'pack-local
"found packages multiple times: ~a and ~a"
(hash-ref found f)
src-f))
(hash-set! found f src-f)
(do-package src-dir f)]
[(directory-exists? src-f)
(loop src-f)])))))
|
f59523c16b9fd97ea9747db388529cb8e78e6110c2b41d514b341ffd8c83c1e1 | egraphdb/egraphdb | egraph_reindexing_server.erl | %%%-------------------------------------------------------------------
@author neerajsharma
( C ) 2018 ,
%%% @doc
%%%
%%% @end
%%%
%%% %CopyrightBegin%
%%%
Copyright < > 2017 .
All Rights Reserved .
%%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%%% you may not use this file except in compliance with the License.
%%% You may obtain a copy of the License at
%%%
%%% -2.0
%%%
%%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%%% See the License for the specific language governing permissions and
%%% limitations under the License.
%%%
%%% %CopyrightEnd%
%%%-------------------------------------------------------------------
-module(egraph_reindexing_server).
-behaviour(gen_server).
-include("egraph_constants.hrl").
%% API
-export([start_link/1]).
-ignore_xref([start_link/1]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-define(LAGER_ATTRS, [{type, reindex_server}]).
-record(state, {
ref = undefined :: reference(),
timeout_msec :: pos_integer(),
shard_id :: non_neg_integer(),
dbinfo = #{} :: map()
}).
%%%===================================================================
%%% API
%%%===================================================================
%%--------------------------------------------------------------------
%% @doc
%% Starts the server
%%
@spec start_link(Opts : : list ( ) ) - > { ok , Pid } | ignore | { error , Reason }
%% @end
%%--------------------------------------------------------------------
start_link(Opts) ->
Name = proplists:get_value(name, Opts),
gen_server:start_link({local, Name}, ?MODULE, Opts, []).
%%%===================================================================
%%% gen_server callbacks
%%%===================================================================
%%--------------------------------------------------------------------
@private
%% @doc
%% Initializes the server
%%
) - > { ok , State } |
{ ok , State , Timeout } |
%% ignore |
%% {stop, Reason}
%% @end
%%--------------------------------------------------------------------
init(Opts) ->
TimeoutMsec = proplists:get_value(
timeout_msec,
Opts,
?DEFAULT_REINDEXER_SERVER_TIMEOUT_MSEC),
lager:debug("TimeoutMsec = ~p", [TimeoutMsec]),
ShardId = proplists:get_value(shard_id, Opts),
Ref = erlang : start_timer(TimeoutMsec , self ( ) , tick ) ,
{ok,
#state{timeout_msec = TimeoutMsec, shard_id = ShardId}}.
# state{ref = Ref , timeout_msec = TimeoutMsec , shard_id = } } .
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling call messages
%%
, From , State ) - >
%% {reply, Reply, State} |
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, Reply, State} |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_call({refresh, DbInfo}, _From, #state{dbinfo = LocalDbInfo} = State) ->
LocalVersion = maps:get(<<"version">>, LocalDbInfo, -1),
ReceivedVersion = maps:get(<<"version">>, DbInfo),
State2 = case ReceivedVersion >= LocalVersion of
true ->
work_sync(DbInfo, State);
false ->
%% ignore older event
State
end,
{reply, ok, State2};
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling cast messages
%%
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_cast({refresh, DbInfo}, #state{dbinfo = LocalDbInfo} = State) ->
lager:debug("DbInfo = ~p, State = ~p", [DbInfo, State]),
LocalVersion = maps:get(<<"version">>, LocalDbInfo, -1),
ReceivedVersion = maps:get(<<"version">>, DbInfo),
State2 = case ReceivedVersion >= LocalVersion of
true ->
lager:debug("[~p >= ~p] DbInfo = ~p, LocalVersion = ~p",
[ReceivedVersion, LocalVersion, DbInfo, LocalVersion]),
work_sync(DbInfo, State);
false ->
%% ignore older event
State
end,
{noreply, State2};
handle_cast(_Msg, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% Handling all non call/cast messages
%%
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
%% {stop, Reason, State}
%% @end
%%--------------------------------------------------------------------
handle_info({timeout, _R, tick},
#state{ref = _R, timeout_msec = _TimeoutMsec}
= State) ->
lager:debug(?LAGER_ATTRS, "[~p] ~p refreshing", [self(), ?MODULE]),
process_tick(TimeoutMsec , State ) ;
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
%%--------------------------------------------------------------------
@private
%% @doc
%% This function is called by a gen_server when it is about to
%% terminate. It should be the opposite of Module:init/1 and do any
%% necessary cleaning up. When it returns, the gen_server terminates
with . The return value is ignored .
%%
, State ) - > void ( )
%% @end
%%--------------------------------------------------------------------
terminate(_Reason, _State) ->
ok.
%%--------------------------------------------------------------------
@private
%% @doc
%% Convert process state when code is changed
%%
, State , Extra ) - > { ok , NewState }
%% @end
%%--------------------------------------------------------------------
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%%===================================================================
Internal functions
%%%===================================================================
work_sync(DbInfo, State) ->
CallbackPid = self(),
OldTrapExitFlag = erlang:process_flag(trap_exit, true),
WorkerPid = erlang:spawn_link(fun() ->
State2 = process_db_info(DbInfo, State),
CallbackPid ! {ok, State2}
end),
%% infinitely wait for worker process
State4 = work_sync_receive_result(WorkerPid, DbInfo, State),
erlang:process_flag(trap_exit, OldTrapExitFlag),
State4.
work_sync_receive_result(WorkerPid, DbInfo, State) ->
%% infinitely wait for worker process
LocalVersion = maps:get(<<"version">>, DbInfo, -1),
receive
{ok, State3} ->
%% consume worker process exit message
receive
{'EXIT', WorkerPid, _Msg} -> ok
after 0 -> ok
end,
State3;
{'$gen_cast', {refresh,
#{<<"version">> := ReceivedVersion} = _DbInfo2}
} when ReceivedVersion =< LocalVersion ->
work_sync_receive_result(WorkerPid, DbInfo, State);
{'EXIT', WorkerPid, _Msg} ->
State
end.
-spec process_tick(TimeoutMsec : : ( ) ,
State : : term ( ) ) - > { noreply , State : : term ( ) } .
process_tick(TimeoutMsec , # state{shard_id = ShardId } = State ) - >
TODO
%%case egraph_reindex_model:read_resource(ShardId) of
{ ok , [ ] } - >
process_db_info(DbInfo , State ) ;
TableError - >
lager : debug("Cannot read from table , TableError = ~p " , [ TableError ] ) ,
%% ok
%%end,
Ref = erlang : start_timer(TimeoutMsec , self ( ) , tick ) ,
{ noreply , State#state{ref = Ref } } .
process_db_info(DbInfo, #state{shard_id = ShardId} = State) ->
lager:debug("ShardId = ~p, DbInfo = ~p", [ShardId, DbInfo]),
case maps:get(<<"is_reindexing">>, DbInfo, 0) of
1 ->
%% TODO: enable or continue if running already
ReindexDetails = maps:get(<<"details">>, DbInfo),
MaxDataUpdatedDatetime = egraph_util:convert_binary_to_datetime(
maps:get(<<"max_data_updated_datetime">>, ReindexDetails)),
MinDataUpdatedDatetime = egraph_util:convert_binary_to_datetime(
maps:get(<<"min_data_updated_datetime">>, ReindexDetails)),
ReindexingDataUpdatedDatetime = case maps:get(
<<"reindexing_data_updated_datetime">>,
ReindexDetails,
undefined) of
undefined ->
MaxDataUpdatedDatetime;
ReindexDatetimeBin ->
egraph_util:convert_binary_to_datetime(
ReindexDatetimeBin)
end,
NumRowsPerRun = maps:get(<<"num_rows_per_run">>, ReindexDetails),
case egraph_detail_model:search_resource(
ShardId,
previous,
ReindexingDataUpdatedDatetime,
MinDataUpdatedDatetime,
NumRowsPerRun) of
{ok, Records} ->
%% {error,{1062,<<"23000">>,
< < " Duplicate entry ' abc - zY&\\xE6\\xA9\\x13 ' for key ' PRIMARY ' " > > } }
%%
%% IMPORTANT: the records is in ascending order, so reverse it to get min
%% datetime use lists:foldr/3
R = lists:foldr(fun(_E, error) ->
error;
(E, _AccIn) ->
Details = maps:get(<<"details">>, E),
Indexes = maps:get(<<"indexes">>, E, null),
case egraph_detail_model:reindex_key(
Details, Indexes) of
[] ->
maps:get(<<"updated_datetime">>, E);
_ ->
error
end
end, ok, Records),
case R of
error ->
%% failed.
lager:error("Reindex failed, will retry again later"),
State;
LeastDatetimeBin ->
%% move ahead
%% NOTE: The leastDatetimeBin is used as-is, so next time
%% the same record
LeastDatetime = egraph_util:convert_binary_to_datetime(LeastDatetimeBin),
NextLowerDatetime = qdate:to_date(qdate:add_seconds(-1, LeastDatetime)),
lager:debug("LeastDatetime = ~p, NextLowerDatetime = ~p", [LeastDatetime, NextLowerDatetime]),
UpdatedReindexDetails = ReindexDetails#{
<<"reindexing_data_updated_datetime">> => egraph_util:convert_datetime_to_binary(NextLowerDatetime)},
UpdatedDbInfo = DbInfo#{<<"details">> => UpdatedReindexDetails},
egraph_reindex_model:create(undefined, UpdatedDbInfo, []),
OldVersion = maps:get(<<"version">>, UpdatedDbInfo),
State#state{dbinfo = UpdatedDbInfo#{<<"version">> => OldVersion + 1}}
end;
{error, not_found} ->
TODO mark done
UpdatedDbInfo = DbInfo#{<<"is_reindexing">> => 0},
egraph_reindex_model:create(undefined, UpdatedDbInfo, []),
OldVersion = maps:get(<<"version">>, UpdatedDbInfo),
State#state{dbinfo = UpdatedDbInfo#{<<"version">> => OldVersion + 1}};
_ ->
%% temporary database error, so retry later
State
end;
_ ->
%% TODO: no reindexing running
State
end.
| null | https://raw.githubusercontent.com/egraphdb/egraphdb/41a0131be227f7f0a35ba0e2c1cb23d70cd86b03/src/egraph_reindexing_server.erl | erlang | -------------------------------------------------------------------
@doc
@end
%CopyrightBegin%
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
%CopyrightEnd%
-------------------------------------------------------------------
API
gen_server callbacks
===================================================================
API
===================================================================
--------------------------------------------------------------------
@doc
Starts the server
@end
--------------------------------------------------------------------
===================================================================
gen_server callbacks
===================================================================
--------------------------------------------------------------------
@doc
Initializes the server
ignore |
{stop, Reason}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Handling call messages
{reply, Reply, State} |
{stop, Reason, Reply, State} |
{stop, Reason, State}
@end
--------------------------------------------------------------------
ignore older event
--------------------------------------------------------------------
@doc
Handling cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
ignore older event
--------------------------------------------------------------------
@doc
Handling all non call/cast messages
{stop, Reason, State}
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
This function is called by a gen_server when it is about to
terminate. It should be the opposite of Module:init/1 and do any
necessary cleaning up. When it returns, the gen_server terminates
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Convert process state when code is changed
@end
--------------------------------------------------------------------
===================================================================
===================================================================
infinitely wait for worker process
infinitely wait for worker process
consume worker process exit message
case egraph_reindex_model:read_resource(ShardId) of
ok
end,
TODO: enable or continue if running already
{error,{1062,<<"23000">>,
IMPORTANT: the records is in ascending order, so reverse it to get min
datetime use lists:foldr/3
failed.
move ahead
NOTE: The leastDatetimeBin is used as-is, so next time
the same record
temporary database error, so retry later
TODO: no reindexing running | @author neerajsharma
( C ) 2018 ,
Copyright < > 2017 .
All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(egraph_reindexing_server).
-behaviour(gen_server).
-include("egraph_constants.hrl").
-export([start_link/1]).
-ignore_xref([start_link/1]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-define(LAGER_ATTRS, [{type, reindex_server}]).
-record(state, {
ref = undefined :: reference(),
timeout_msec :: pos_integer(),
shard_id :: non_neg_integer(),
dbinfo = #{} :: map()
}).
@spec start_link(Opts : : list ( ) ) - > { ok , Pid } | ignore | { error , Reason }
start_link(Opts) ->
Name = proplists:get_value(name, Opts),
gen_server:start_link({local, Name}, ?MODULE, Opts, []).
@private
) - > { ok , State } |
{ ok , State , Timeout } |
init(Opts) ->
TimeoutMsec = proplists:get_value(
timeout_msec,
Opts,
?DEFAULT_REINDEXER_SERVER_TIMEOUT_MSEC),
lager:debug("TimeoutMsec = ~p", [TimeoutMsec]),
ShardId = proplists:get_value(shard_id, Opts),
Ref = erlang : start_timer(TimeoutMsec , self ( ) , tick ) ,
{ok,
#state{timeout_msec = TimeoutMsec, shard_id = ShardId}}.
# state{ref = Ref , timeout_msec = TimeoutMsec , shard_id = } } .
@private
, From , State ) - >
{ reply , Reply , State , Timeout } |
{ noreply , State } |
{ noreply , State , Timeout } |
handle_call({refresh, DbInfo}, _From, #state{dbinfo = LocalDbInfo} = State) ->
LocalVersion = maps:get(<<"version">>, LocalDbInfo, -1),
ReceivedVersion = maps:get(<<"version">>, DbInfo),
State2 = case ReceivedVersion >= LocalVersion of
true ->
work_sync(DbInfo, State);
false ->
State
end,
{reply, ok, State2};
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
@private
@spec handle_cast(Msg , State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_cast({refresh, DbInfo}, #state{dbinfo = LocalDbInfo} = State) ->
lager:debug("DbInfo = ~p, State = ~p", [DbInfo, State]),
LocalVersion = maps:get(<<"version">>, LocalDbInfo, -1),
ReceivedVersion = maps:get(<<"version">>, DbInfo),
State2 = case ReceivedVersion >= LocalVersion of
true ->
lager:debug("[~p >= ~p] DbInfo = ~p, LocalVersion = ~p",
[ReceivedVersion, LocalVersion, DbInfo, LocalVersion]),
work_sync(DbInfo, State);
false ->
State
end,
{noreply, State2};
handle_cast(_Msg, State) ->
{noreply, State}.
@private
, State ) - > { noreply , State } |
{ noreply , State , Timeout } |
handle_info({timeout, _R, tick},
#state{ref = _R, timeout_msec = _TimeoutMsec}
= State) ->
lager:debug(?LAGER_ATTRS, "[~p] ~p refreshing", [self(), ?MODULE]),
process_tick(TimeoutMsec , State ) ;
{noreply, State};
handle_info(_Info, State) ->
{noreply, State}.
@private
with . The return value is ignored .
, State ) - > void ( )
terminate(_Reason, _State) ->
ok.
@private
, State , Extra ) - > { ok , NewState }
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
work_sync(DbInfo, State) ->
CallbackPid = self(),
OldTrapExitFlag = erlang:process_flag(trap_exit, true),
WorkerPid = erlang:spawn_link(fun() ->
State2 = process_db_info(DbInfo, State),
CallbackPid ! {ok, State2}
end),
State4 = work_sync_receive_result(WorkerPid, DbInfo, State),
erlang:process_flag(trap_exit, OldTrapExitFlag),
State4.
work_sync_receive_result(WorkerPid, DbInfo, State) ->
LocalVersion = maps:get(<<"version">>, DbInfo, -1),
receive
{ok, State3} ->
receive
{'EXIT', WorkerPid, _Msg} -> ok
after 0 -> ok
end,
State3;
{'$gen_cast', {refresh,
#{<<"version">> := ReceivedVersion} = _DbInfo2}
} when ReceivedVersion =< LocalVersion ->
work_sync_receive_result(WorkerPid, DbInfo, State);
{'EXIT', WorkerPid, _Msg} ->
State
end.
-spec process_tick(TimeoutMsec : : ( ) ,
State : : term ( ) ) - > { noreply , State : : term ( ) } .
process_tick(TimeoutMsec , # state{shard_id = ShardId } = State ) - >
TODO
{ ok , [ ] } - >
process_db_info(DbInfo , State ) ;
TableError - >
lager : debug("Cannot read from table , TableError = ~p " , [ TableError ] ) ,
Ref = erlang : start_timer(TimeoutMsec , self ( ) , tick ) ,
{ noreply , State#state{ref = Ref } } .
process_db_info(DbInfo, #state{shard_id = ShardId} = State) ->
lager:debug("ShardId = ~p, DbInfo = ~p", [ShardId, DbInfo]),
case maps:get(<<"is_reindexing">>, DbInfo, 0) of
1 ->
ReindexDetails = maps:get(<<"details">>, DbInfo),
MaxDataUpdatedDatetime = egraph_util:convert_binary_to_datetime(
maps:get(<<"max_data_updated_datetime">>, ReindexDetails)),
MinDataUpdatedDatetime = egraph_util:convert_binary_to_datetime(
maps:get(<<"min_data_updated_datetime">>, ReindexDetails)),
ReindexingDataUpdatedDatetime = case maps:get(
<<"reindexing_data_updated_datetime">>,
ReindexDetails,
undefined) of
undefined ->
MaxDataUpdatedDatetime;
ReindexDatetimeBin ->
egraph_util:convert_binary_to_datetime(
ReindexDatetimeBin)
end,
NumRowsPerRun = maps:get(<<"num_rows_per_run">>, ReindexDetails),
case egraph_detail_model:search_resource(
ShardId,
previous,
ReindexingDataUpdatedDatetime,
MinDataUpdatedDatetime,
NumRowsPerRun) of
{ok, Records} ->
< < " Duplicate entry ' abc - zY&\\xE6\\xA9\\x13 ' for key ' PRIMARY ' " > > } }
R = lists:foldr(fun(_E, error) ->
error;
(E, _AccIn) ->
Details = maps:get(<<"details">>, E),
Indexes = maps:get(<<"indexes">>, E, null),
case egraph_detail_model:reindex_key(
Details, Indexes) of
[] ->
maps:get(<<"updated_datetime">>, E);
_ ->
error
end
end, ok, Records),
case R of
error ->
lager:error("Reindex failed, will retry again later"),
State;
LeastDatetimeBin ->
LeastDatetime = egraph_util:convert_binary_to_datetime(LeastDatetimeBin),
NextLowerDatetime = qdate:to_date(qdate:add_seconds(-1, LeastDatetime)),
lager:debug("LeastDatetime = ~p, NextLowerDatetime = ~p", [LeastDatetime, NextLowerDatetime]),
UpdatedReindexDetails = ReindexDetails#{
<<"reindexing_data_updated_datetime">> => egraph_util:convert_datetime_to_binary(NextLowerDatetime)},
UpdatedDbInfo = DbInfo#{<<"details">> => UpdatedReindexDetails},
egraph_reindex_model:create(undefined, UpdatedDbInfo, []),
OldVersion = maps:get(<<"version">>, UpdatedDbInfo),
State#state{dbinfo = UpdatedDbInfo#{<<"version">> => OldVersion + 1}}
end;
{error, not_found} ->
TODO mark done
UpdatedDbInfo = DbInfo#{<<"is_reindexing">> => 0},
egraph_reindex_model:create(undefined, UpdatedDbInfo, []),
OldVersion = maps:get(<<"version">>, UpdatedDbInfo),
State#state{dbinfo = UpdatedDbInfo#{<<"version">> => OldVersion + 1}};
_ ->
State
end;
_ ->
State
end.
|
3dfa411a199466bfc3dd51a3705192999a2917c92d4891c24870cf1f9e2291a4 | Kappa-Dev/KappaTools | result_util.ml | (******************************************************************************)
(* _ __ * The Kappa Language *)
| |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF
(* | ' / *********************************************************************)
(* | . \ * This file is distributed under the terms of the *)
(* |_|\_\ * GNU Lesser General Public License Version 3 *)
(******************************************************************************)
type status =
[ `OK | `Accepted | `Created |
`Bad_request | `Conflict | `Not_found | `Request_timeout ]
(** The subset of [Cohttp.Code.status] we need *)
type message = {
severity : Logs.level;
text : string ; (*should be an algebraic type*)
range : Locality.t option;
}
type ('a,'b) t = {
value : ('a,'b) Result.result;
status : status;
messages : message list;
}
let write_severity ob x =
let () = Buffer.add_char ob '"' in
let () = Buffer.add_string ob (Logs.level_to_string (Some x)) in
Buffer.add_char ob '"'
let read_severity p lb =
match Logs.level_of_string (Yojson.Basic.read_string p lb) with
| Result.Ok (Some x) -> x
| Result.Ok None -> raise (Yojson.Json_error "Message of no severity")
| Result.Error (`Msg x) ->
raise (Yojson.Json_error ("While reading severity: "^x))
let write_status ob = function
| `OK -> Buffer.add_string ob "200"
| `Accepted -> Buffer.add_string ob "202"
| `Created -> Buffer.add_string ob "201"
| `Bad_request -> Buffer.add_string ob "400"
| `Conflict -> Buffer.add_string ob "409"
| `Not_found -> Buffer.add_string ob "404"
| `Request_timeout -> Buffer.add_string ob "408"
let read_status p lb =
match Yojson.Basic.read_int p lb with
| 200 -> `OK
| 202 -> `Accepted
| 201 -> `Created
| 400 -> `Bad_request
| 409 -> `Conflict
| 404 -> `Not_found
| 408 -> `Request_timeout
| x -> raise (Yojson.Json_error
("Status "^string_of_int x^" is out of the scope of Kappa"))
let write_message ob { severity; text; range } =
let () = Buffer.add_char ob '{' in
let () = JsonUtil.write_field "severity" write_severity ob severity in
let () = JsonUtil.write_comma ob in
let () = JsonUtil.write_field "text" Yojson.Basic.write_string ob text in
let () = match range with
| None -> ()
| Some r ->
let () = JsonUtil.write_comma ob in
JsonUtil.write_field "range" Locality.write_range ob r in
Buffer.add_char ob '}'
let read_message p lb =
let (severity,text,range) =
Yojson.Basic.read_fields
(fun (s,t,r) key p lb ->
if key = "severity" then (read_severity p lb,t,r)
else if key = "text" then (s,Yojson.Basic.read_string p lb,r)
else if key = "range" then (s,t,Some (Locality.read_range p lb))
else raise (Yojson.Json_error ("No field "^key^" expected in message")))
(Logs.App,"",None) p lb in
{ severity; text; range }
let print_message f { range; text; _ } =
match range with
| Some range ->
Locality.print_annot Format.pp_print_string f (text,range)
| None -> Format.pp_print_string f text
let write_t write__ok write__error = fun ob -> function
| { value = Result.Ok x; status; messages } ->
Buffer.add_string ob "[\"Ok\",";
write__ok ob x;
Buffer.add_char ob ',';
write_status ob status;
Buffer.add_char ob ',';
JsonUtil.write_list write_message ob messages;
Buffer.add_char ob ']'
| { value = Result.Error x; status; messages } ->
Buffer.add_string ob "[\"Error\",";
write__error ob x;
Buffer.add_char ob ',';
write_status ob status;
Buffer.add_char ob ',';
JsonUtil.write_list write_message ob messages;
Buffer.add_char ob ']'
let string_of_t write__ok write__error ?(len = 1024) x =
let ob = Buffer.create len in
write_t write__ok write__error ob x;
Buffer.contents ob
let read_t_content f p lb =
let v = f p lb in
let () = JsonUtil.read_between_spaces
Yojson.Basic.read_comma p lb in
let s = read_status p lb in
let () = JsonUtil.read_between_spaces
Yojson.Basic.read_comma p lb in
(v,s,Yojson.Basic.read_list read_message p lb)
let read_t read__ok read__error = fun p lb ->
let aux_read_t closing p lb =
Yojson.Basic.read_space p lb;
let out = Yojson.Basic.map_ident p
(fun s pos len ->
Yojson.Basic.read_space p lb;
Yojson.Basic.read_comma p lb;
Yojson.Basic.read_space p lb;
match String.sub s pos len with
| "Ok" ->
let (v,status,messages) = read_t_content read__ok p lb in
{ value = Result.Ok v; status; messages }
| "Error" ->
let (v,status,messages) = read_t_content read__error p lb in
{ value = Result.Error v; status; messages }
| x -> raise (Yojson.Json_error ("Field \""^x^
"\" does not belong to the result type"))
) lb in
Yojson.Basic.read_space p lb;
closing p lb;
Yojson.Basic.read_space p lb;
out in
match Yojson.Basic.start_any_variant p lb with
| `Edgy_bracket -> aux_read_t Yojson.Basic.read_gt p lb
| `Double_quote ->
raise (Yojson.Json_error "Not of result type")
| `Square_bracket -> aux_read_t Yojson.Basic.read_rbr p lb
let t_of_string read__ok read__error s =
read_t read__ok read__error (Yojson.Safe.init_lexer ()) (Lexing.from_string s)
let lift ?(ok_status = `OK) ?(error_status = `Bad_request) = function
| Result.Ok _ as value -> { value; status = ok_status; messages = [] }
| Result.Error _ as value -> { value; status = error_status; messages = [] }
let fold
~(ok:'ok -> 'a)
~(error:'error -> 'a) : ('ok,'error) t -> 'a
=
function
| { value = Result.Ok o; _ } -> ok o
| { value = Result.Error e; _ } -> error e
let bind:
type ok a err. ?overwrite_status:status -> ?error_status: status ->
(ok -> (a,err) Result.result) -> (ok,err) t -> (a,err) t =
fun ?overwrite_status ?(error_status = `Bad_request) ok -> function
| { value = Result.Error _; _ } as e -> e
| { value = Result.Ok o; status; messages } ->
match ok o with
| Result.Error _ as value -> { value; status = error_status; messages }
| Result.Ok _ as value -> match overwrite_status with
| None -> { value; status; messages }
| Some status -> { value; status; messages }
let map: type ok a err. (ok -> a) -> (ok,err) t -> (a,err) t =
fun ok -> function
| { value = Result.Ok o; status; messages } ->
{ value = Result.Ok (ok o); status; messages }
| { value = Result.Error _; _ } as e -> e
let map2:
type a b ok err. (a -> b -> ok) -> (a,err) t -> (b,err) t -> (ok,err) t =
fun f a b -> match a,b with
| { value = Result.Ok a; messages; _ },
{ value = Result.Ok b; status; messages = m' } -> {
value = Result.Ok (f a b);
status;
messages = List.rev_append (List.rev m') messages;
}
| { value = Result.Error _; _ } as e, _ -> e
| { value = Result.Ok _; _ }, ({ value = Result.Error _; _ } as e) -> e
let error ?(status=`Bad_request) (error:'error ) : ('ok,'error) t =
{ value = Result.Error error; status; messages = []; }
let ok ?(status=`OK) (ok : 'ok) : ('ok,'error) t =
{ value = Result.Ok ok; status; messages = []; }
| null | https://raw.githubusercontent.com/Kappa-Dev/KappaTools/777835b82f449d3d379713df76ff25fd5926b762/core/dataStructures/result_util.ml | ocaml | ****************************************************************************
_ __ * The Kappa Language
| ' / ********************************************************************
| . \ * This file is distributed under the terms of the
|_|\_\ * GNU Lesser General Public License Version 3
****************************************************************************
* The subset of [Cohttp.Code.status] we need
should be an algebraic type | | |/ / * Copyright 2010 - 2020 CNRS - Harvard Medical School - INRIA - IRIF
type status =
[ `OK | `Accepted | `Created |
`Bad_request | `Conflict | `Not_found | `Request_timeout ]
type message = {
severity : Logs.level;
range : Locality.t option;
}
type ('a,'b) t = {
value : ('a,'b) Result.result;
status : status;
messages : message list;
}
let write_severity ob x =
let () = Buffer.add_char ob '"' in
let () = Buffer.add_string ob (Logs.level_to_string (Some x)) in
Buffer.add_char ob '"'
let read_severity p lb =
match Logs.level_of_string (Yojson.Basic.read_string p lb) with
| Result.Ok (Some x) -> x
| Result.Ok None -> raise (Yojson.Json_error "Message of no severity")
| Result.Error (`Msg x) ->
raise (Yojson.Json_error ("While reading severity: "^x))
let write_status ob = function
| `OK -> Buffer.add_string ob "200"
| `Accepted -> Buffer.add_string ob "202"
| `Created -> Buffer.add_string ob "201"
| `Bad_request -> Buffer.add_string ob "400"
| `Conflict -> Buffer.add_string ob "409"
| `Not_found -> Buffer.add_string ob "404"
| `Request_timeout -> Buffer.add_string ob "408"
let read_status p lb =
match Yojson.Basic.read_int p lb with
| 200 -> `OK
| 202 -> `Accepted
| 201 -> `Created
| 400 -> `Bad_request
| 409 -> `Conflict
| 404 -> `Not_found
| 408 -> `Request_timeout
| x -> raise (Yojson.Json_error
("Status "^string_of_int x^" is out of the scope of Kappa"))
let write_message ob { severity; text; range } =
let () = Buffer.add_char ob '{' in
let () = JsonUtil.write_field "severity" write_severity ob severity in
let () = JsonUtil.write_comma ob in
let () = JsonUtil.write_field "text" Yojson.Basic.write_string ob text in
let () = match range with
| None -> ()
| Some r ->
let () = JsonUtil.write_comma ob in
JsonUtil.write_field "range" Locality.write_range ob r in
Buffer.add_char ob '}'
let read_message p lb =
let (severity,text,range) =
Yojson.Basic.read_fields
(fun (s,t,r) key p lb ->
if key = "severity" then (read_severity p lb,t,r)
else if key = "text" then (s,Yojson.Basic.read_string p lb,r)
else if key = "range" then (s,t,Some (Locality.read_range p lb))
else raise (Yojson.Json_error ("No field "^key^" expected in message")))
(Logs.App,"",None) p lb in
{ severity; text; range }
let print_message f { range; text; _ } =
match range with
| Some range ->
Locality.print_annot Format.pp_print_string f (text,range)
| None -> Format.pp_print_string f text
let write_t write__ok write__error = fun ob -> function
| { value = Result.Ok x; status; messages } ->
Buffer.add_string ob "[\"Ok\",";
write__ok ob x;
Buffer.add_char ob ',';
write_status ob status;
Buffer.add_char ob ',';
JsonUtil.write_list write_message ob messages;
Buffer.add_char ob ']'
| { value = Result.Error x; status; messages } ->
Buffer.add_string ob "[\"Error\",";
write__error ob x;
Buffer.add_char ob ',';
write_status ob status;
Buffer.add_char ob ',';
JsonUtil.write_list write_message ob messages;
Buffer.add_char ob ']'
let string_of_t write__ok write__error ?(len = 1024) x =
let ob = Buffer.create len in
write_t write__ok write__error ob x;
Buffer.contents ob
let read_t_content f p lb =
let v = f p lb in
let () = JsonUtil.read_between_spaces
Yojson.Basic.read_comma p lb in
let s = read_status p lb in
let () = JsonUtil.read_between_spaces
Yojson.Basic.read_comma p lb in
(v,s,Yojson.Basic.read_list read_message p lb)
let read_t read__ok read__error = fun p lb ->
let aux_read_t closing p lb =
Yojson.Basic.read_space p lb;
let out = Yojson.Basic.map_ident p
(fun s pos len ->
Yojson.Basic.read_space p lb;
Yojson.Basic.read_comma p lb;
Yojson.Basic.read_space p lb;
match String.sub s pos len with
| "Ok" ->
let (v,status,messages) = read_t_content read__ok p lb in
{ value = Result.Ok v; status; messages }
| "Error" ->
let (v,status,messages) = read_t_content read__error p lb in
{ value = Result.Error v; status; messages }
| x -> raise (Yojson.Json_error ("Field \""^x^
"\" does not belong to the result type"))
) lb in
Yojson.Basic.read_space p lb;
closing p lb;
Yojson.Basic.read_space p lb;
out in
match Yojson.Basic.start_any_variant p lb with
| `Edgy_bracket -> aux_read_t Yojson.Basic.read_gt p lb
| `Double_quote ->
raise (Yojson.Json_error "Not of result type")
| `Square_bracket -> aux_read_t Yojson.Basic.read_rbr p lb
let t_of_string read__ok read__error s =
read_t read__ok read__error (Yojson.Safe.init_lexer ()) (Lexing.from_string s)
let lift ?(ok_status = `OK) ?(error_status = `Bad_request) = function
| Result.Ok _ as value -> { value; status = ok_status; messages = [] }
| Result.Error _ as value -> { value; status = error_status; messages = [] }
let fold
~(ok:'ok -> 'a)
~(error:'error -> 'a) : ('ok,'error) t -> 'a
=
function
| { value = Result.Ok o; _ } -> ok o
| { value = Result.Error e; _ } -> error e
let bind:
type ok a err. ?overwrite_status:status -> ?error_status: status ->
(ok -> (a,err) Result.result) -> (ok,err) t -> (a,err) t =
fun ?overwrite_status ?(error_status = `Bad_request) ok -> function
| { value = Result.Error _; _ } as e -> e
| { value = Result.Ok o; status; messages } ->
match ok o with
| Result.Error _ as value -> { value; status = error_status; messages }
| Result.Ok _ as value -> match overwrite_status with
| None -> { value; status; messages }
| Some status -> { value; status; messages }
let map: type ok a err. (ok -> a) -> (ok,err) t -> (a,err) t =
fun ok -> function
| { value = Result.Ok o; status; messages } ->
{ value = Result.Ok (ok o); status; messages }
| { value = Result.Error _; _ } as e -> e
let map2:
type a b ok err. (a -> b -> ok) -> (a,err) t -> (b,err) t -> (ok,err) t =
fun f a b -> match a,b with
| { value = Result.Ok a; messages; _ },
{ value = Result.Ok b; status; messages = m' } -> {
value = Result.Ok (f a b);
status;
messages = List.rev_append (List.rev m') messages;
}
| { value = Result.Error _; _ } as e, _ -> e
| { value = Result.Ok _; _ }, ({ value = Result.Error _; _ } as e) -> e
let error ?(status=`Bad_request) (error:'error ) : ('ok,'error) t =
{ value = Result.Error error; status; messages = []; }
let ok ?(status=`OK) (ok : 'ok) : ('ok,'error) t =
{ value = Result.Ok ok; status; messages = []; }
|
6d14748ee3cb31a5df4f1e6282d4d051ce293344fc6019e62029c6ee2075c35c | riemann/riemann | slack_test.clj | (ns riemann.slack-test
(:require [riemann.logging :as logging]
[riemann.slack :as slack]
[cheshire.core :as json]
[clj-http.client :as client]
[clojure.test :refer :all]))
(def api-key (System/getenv "SLACK_API_KEY"))
(def room (System/getenv "SLACK_ALERT_ROOM"))
(def account (System/getenv "SLACK_ALERT_ACCOUNT"))
(def user "Riemann_Slack_Test")
(when-not api-key
(println "export SLACK_API_KEY=\"...\" to run these tests."))
(when-not room
(println "export SLACK_ALERT_ROOM=\"...\" to run these tests."))
(when-not account
(println "export SLACK_ALERT_ACCOUNT=\"...\" to run these tests."))
(logging/init)
(deftest ^:slack ^:integration test_event
(let [slack_connect (slack/slack account api-key user room)]
(slack_connect {:host "localhost"
:service "good event test"
:description "Testing slack.com alerts from riemann"
:metric 42
:state "ok"})))
(defn- capture-post [result-atom url params]
(reset! result-atom {:url url, :body (get-in params [:form-params :payload])}))
(def ^:private any-account {:account "any", :token "any"})
(defn- with-formatter [formatter-fn]
{:username "any", :channel "any", :formatter formatter-fn})
(deftest slack
(let [post-request (atom {})]
(with-redefs [client/post (partial capture-post post-request)]
(testing "forms correct slack URL"
(let [slacker (slack/slack "test-account" "test-token" "any" "any")]
(slacker {})
(is (= (:url @post-request)
"-account.slack.com/services/hooks/incoming-webhook?token=test-token"))))
(testing "formats event by default with default formatter"
(let [slacker (slack/slack "any" "any" "test-user" "#test-channel")]
(slacker {:host "localhost", :service "mailer", :state "error",
:description "Mailer failed", :metric 42, :tags ["first", "second"]})
(is (= (json/parse-string (:body @post-request))
{"attachments" [{"fields" [{"title" "Riemann Event"
"value" "Host: localhost\nService: mailer\nState: error\nDescription: Mailer failed\nMetric: 42\nTags: [\"first\" \"second\"]\n" "short" true}]
"fallback" "*Host:* localhost *Service:* mailer *State:* error *Description:* Mailer failed *Metric:* 42"}]
"channel" "#test-channel"
"username" "test-user"
"icon_emoji" ":warning:"}))))
(testing "formats event with bundled extended formatter"
(let [slacker (slack/slack any-account (with-formatter slack/extended-formatter))]
(slacker {:host "localhost", :service "mailer", :state "error",
:description "Mailer failed", :metric 42, :tags ["first", "second"]})
(is (= (json/parse-string (:body @post-request))
{"text" "This event requires your attention!"
"attachments" [{"text" "Mailer failed"
"pretext" "Event Details:"
"color" "warning"
"fields" [{"title" "Host" "value" "localhost" "short" true}
{"title" "Service" "value" "mailer" "short" true}
{"title" "Metric" "value" 42 "short" true}
{"title" "State" "value" "error" "short" true}
{"title" "Description" "value" "Mailer failed" "short" true}
{"title" "Tags" "value" "[\"first\" \"second\"]" "short" true}]
"fallback" "*Host:* localhost *Service:* mailer *State:* error *Description:* Mailer failed *Metric:* 42"}]
"channel" "any"
"username" "any"
"icon_emoji" ":warning:"}))))
(testing "allows formatting characters in main message text with custom formatter"
(let [formatter (fn [e] {:text (str "</" (:service e) "|" (:service e) ">")})
slacker (slack/slack any-account (with-formatter formatter))]
(slacker {:service "my-service"})
(is (seq (re-seq #"<\.check\.api/my-service\|my-service>" (:body @post-request))))))
(testing "allows for empty message text"
(let [slacker (slack/slack any-account (with-formatter (constantly {})))]
(slacker {:host "empty"})
(is (= (:body @post-request)
(str "{\"channel\":\"any\","
"\"username\":\"any\","
"\"icon_emoji\":\":warning:\"}")))))
(testing "specifies username, channel and icon when initializing slacker"
(let [slacker (slack/slack any-account
{:username "test-user", :channel "#test-channel", :icon ":ogre:"
:formatter (constantly {})})]
(slacker [{:host "localhost", :service "mailer"}])
(is (= (json/parse-string (:body @post-request))
{"channel" "#test-channel"
"username" "test-user"
"icon_emoji" ":ogre:"}))))
(testing "formats multiple events with a custom formatter"
(let [slacker (slack/slack
{:account "any", :token "any"}
{:username "test-user", :channel "#test-channel", :icon ":ogre:"
:formatter (fn [events]
{:text (apply str (map #(str (:tags %)) events))
:icon ":ship:"
:username "another-user"
:channel "#another-channel"
:attachments [{:pretext "pretext"}]})})]
(slacker [{:host "localhost", :service "mailer", :tags ["first" "second"]}
{:host "localhost", :service "mailer", :tags ["third" "fourth"]}])
(is (= (json/parse-string (:body @post-request))
{"attachments" [{"pretext" "pretext"}]
"text" "[\"first\" \"second\"][\"third\" \"fourth\"]"
"channel" "#another-channel"
"username" "another-user"
"icon_emoji" ":ship:"})))))))
| null | https://raw.githubusercontent.com/riemann/riemann/1649687c0bd913c378701ee0b964a9863bde7c7c/test/riemann/slack_test.clj | clojure | (ns riemann.slack-test
(:require [riemann.logging :as logging]
[riemann.slack :as slack]
[cheshire.core :as json]
[clj-http.client :as client]
[clojure.test :refer :all]))
(def api-key (System/getenv "SLACK_API_KEY"))
(def room (System/getenv "SLACK_ALERT_ROOM"))
(def account (System/getenv "SLACK_ALERT_ACCOUNT"))
(def user "Riemann_Slack_Test")
(when-not api-key
(println "export SLACK_API_KEY=\"...\" to run these tests."))
(when-not room
(println "export SLACK_ALERT_ROOM=\"...\" to run these tests."))
(when-not account
(println "export SLACK_ALERT_ACCOUNT=\"...\" to run these tests."))
(logging/init)
(deftest ^:slack ^:integration test_event
(let [slack_connect (slack/slack account api-key user room)]
(slack_connect {:host "localhost"
:service "good event test"
:description "Testing slack.com alerts from riemann"
:metric 42
:state "ok"})))
(defn- capture-post [result-atom url params]
(reset! result-atom {:url url, :body (get-in params [:form-params :payload])}))
(def ^:private any-account {:account "any", :token "any"})
(defn- with-formatter [formatter-fn]
{:username "any", :channel "any", :formatter formatter-fn})
(deftest slack
(let [post-request (atom {})]
(with-redefs [client/post (partial capture-post post-request)]
(testing "forms correct slack URL"
(let [slacker (slack/slack "test-account" "test-token" "any" "any")]
(slacker {})
(is (= (:url @post-request)
"-account.slack.com/services/hooks/incoming-webhook?token=test-token"))))
(testing "formats event by default with default formatter"
(let [slacker (slack/slack "any" "any" "test-user" "#test-channel")]
(slacker {:host "localhost", :service "mailer", :state "error",
:description "Mailer failed", :metric 42, :tags ["first", "second"]})
(is (= (json/parse-string (:body @post-request))
{"attachments" [{"fields" [{"title" "Riemann Event"
"value" "Host: localhost\nService: mailer\nState: error\nDescription: Mailer failed\nMetric: 42\nTags: [\"first\" \"second\"]\n" "short" true}]
"fallback" "*Host:* localhost *Service:* mailer *State:* error *Description:* Mailer failed *Metric:* 42"}]
"channel" "#test-channel"
"username" "test-user"
"icon_emoji" ":warning:"}))))
(testing "formats event with bundled extended formatter"
(let [slacker (slack/slack any-account (with-formatter slack/extended-formatter))]
(slacker {:host "localhost", :service "mailer", :state "error",
:description "Mailer failed", :metric 42, :tags ["first", "second"]})
(is (= (json/parse-string (:body @post-request))
{"text" "This event requires your attention!"
"attachments" [{"text" "Mailer failed"
"pretext" "Event Details:"
"color" "warning"
"fields" [{"title" "Host" "value" "localhost" "short" true}
{"title" "Service" "value" "mailer" "short" true}
{"title" "Metric" "value" 42 "short" true}
{"title" "State" "value" "error" "short" true}
{"title" "Description" "value" "Mailer failed" "short" true}
{"title" "Tags" "value" "[\"first\" \"second\"]" "short" true}]
"fallback" "*Host:* localhost *Service:* mailer *State:* error *Description:* Mailer failed *Metric:* 42"}]
"channel" "any"
"username" "any"
"icon_emoji" ":warning:"}))))
(testing "allows formatting characters in main message text with custom formatter"
(let [formatter (fn [e] {:text (str "</" (:service e) "|" (:service e) ">")})
slacker (slack/slack any-account (with-formatter formatter))]
(slacker {:service "my-service"})
(is (seq (re-seq #"<\.check\.api/my-service\|my-service>" (:body @post-request))))))
(testing "allows for empty message text"
(let [slacker (slack/slack any-account (with-formatter (constantly {})))]
(slacker {:host "empty"})
(is (= (:body @post-request)
(str "{\"channel\":\"any\","
"\"username\":\"any\","
"\"icon_emoji\":\":warning:\"}")))))
(testing "specifies username, channel and icon when initializing slacker"
(let [slacker (slack/slack any-account
{:username "test-user", :channel "#test-channel", :icon ":ogre:"
:formatter (constantly {})})]
(slacker [{:host "localhost", :service "mailer"}])
(is (= (json/parse-string (:body @post-request))
{"channel" "#test-channel"
"username" "test-user"
"icon_emoji" ":ogre:"}))))
(testing "formats multiple events with a custom formatter"
(let [slacker (slack/slack
{:account "any", :token "any"}
{:username "test-user", :channel "#test-channel", :icon ":ogre:"
:formatter (fn [events]
{:text (apply str (map #(str (:tags %)) events))
:icon ":ship:"
:username "another-user"
:channel "#another-channel"
:attachments [{:pretext "pretext"}]})})]
(slacker [{:host "localhost", :service "mailer", :tags ["first" "second"]}
{:host "localhost", :service "mailer", :tags ["third" "fourth"]}])
(is (= (json/parse-string (:body @post-request))
{"attachments" [{"pretext" "pretext"}]
"text" "[\"first\" \"second\"][\"third\" \"fourth\"]"
"channel" "#another-channel"
"username" "another-user"
"icon_emoji" ":ship:"})))))))
| |
7b357064a0d555f8e3c29dd272aae301504a55575b392421a90ea667cddd3313 | deadpendency/deadpendency | PublishComponentResult.hs | # LANGUAGE TemplateHaskell #
module Common.Effect.PublishComponentResult.PublishComponentResult
( PublishComponentResult (..),
publishComponentResult,
)
where
import Common.Effect.PublishComponentResult.Model.ComponentResult
import Control.Effect.TH
data PublishComponentResult (p :: Type) (m :: Type -> Type) k where
PublishComponentResult :: ComponentResult p -> PublishComponentResult p m ()
makeSmartConstructors ''PublishComponentResult
| null | https://raw.githubusercontent.com/deadpendency/deadpendency/170d6689658f81842168b90aa3d9e235d416c8bd/apps/common/src/Common/Effect/PublishComponentResult/PublishComponentResult.hs | haskell | # LANGUAGE TemplateHaskell #
module Common.Effect.PublishComponentResult.PublishComponentResult
( PublishComponentResult (..),
publishComponentResult,
)
where
import Common.Effect.PublishComponentResult.Model.ComponentResult
import Control.Effect.TH
data PublishComponentResult (p :: Type) (m :: Type -> Type) k where
PublishComponentResult :: ComponentResult p -> PublishComponentResult p m ()
makeSmartConstructors ''PublishComponentResult
| |
7a5bd8693b26722a7d78bff21b3020c53d413e036aa6e1f80f8b4d881c2aec4d | gsakkas/rite | 0107.ml | CaseG VarG [(VarPatG,Nothing,IteG EmptyG EmptyG EmptyG)]
match helper with
| f -> if f = b
then (f , false)
else (f , true)
| null | https://raw.githubusercontent.com/gsakkas/rite/958a0ad2460e15734447bc07bd181f5d35956d3b/data/sp14/clusters/0107.ml | ocaml | CaseG VarG [(VarPatG,Nothing,IteG EmptyG EmptyG EmptyG)]
match helper with
| f -> if f = b
then (f , false)
else (f , true)
| |
f05acddb551c0696175f58bd4ebaf4e5cf52b5b2297c20c03d5fb8a840cc9a64 | mauny/the-functional-approach-to-programming | arith.ml | (* +type_big_int+ *)
type sign = Neg | Pos;;
type big_int = {big_sign:sign; big_val:nat};;
(* +type_big_int+ *)
(* +big_int_utilities+ *)
let prod_sign = fun
(Neg,Neg) -> Pos
| (Neg,Pos) -> Neg
| (Pos,Neg) -> Neg
| (Pos,Pos) -> Pos;;
let big_int_of_int n =
{big_sign= if n >= 0 then Pos else Neg;
big_val= nat_of_int (abs n)};;
let big_int_of_nat n = {big_sign=Pos; big_val=n};;
let lt_big n1 n2 =
match (n1.big_sign,n2.big_sign) with
(Neg,Pos) -> true
| (Pos,Neg) -> false
| (Neg,Neg) -> lt_nat n2.big_val n1.big_val
| (Pos,Pos) -> lt_nat n1.big_val n2.big_val;;
let le_big n1 n2 =
match (n1.big_sign,n2.big_sign) with
(Neg,Pos) -> true
| (Pos,Neg) -> false
| (Neg,Neg) -> le_nat n2.big_val n1.big_val
| (Pos,Pos) -> le_nat n1.big_val n2.big_val;;
let zero_big n = n.big_val=[];;
let pos_big n= n.big_sign=Pos;;
let neg_big n= n.big_sign=Neg;;
let add_big n1 n2 =
let (l1,l2) = n1.big_val,n2.big_val in
match (n1.big_sign, n2.big_sign) with
(Neg,Pos) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if ilt_nat l1 l2
then {big_sign=Pos;big_val=sub_nat l2 l1}
else {big_sign=Neg;big_val=sub_nat l1 l2}
| (Pos,Neg) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if ilt_nat l1 l2
then {big_sign=Neg;big_val=sub_nat l2 l1}
else {big_sign=Pos;big_val=sub_nat l1 l2}
| (Neg,Neg) -> {big_sign=Neg;big_val=add_nat l1 l2}
| (Pos,Pos) -> {big_sign=Pos;big_val=add_nat l1 l2};;
let sub_big n1 n2 =
let (l1,l2) = n1.big_val,n2.big_val in
match (n1.big_sign, n2.big_sign) with
(Neg,Pos) -> {big_sign=Neg;big_val=add_nat l1 l2}
| (Pos,Neg) -> {big_sign=Pos;big_val=add_nat l1 l2}
| (Neg,Neg) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if lt_nat l1 l2
then {big_sign=Pos;big_val=sub_nat l2 l1}
else {big_sign=Neg;big_val=sub_nat l1 l2}
| (Pos,Pos) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if lt_nat l1 l2
then {big_sign=Neg;big_val=sub_nat l2 l1}
else {big_sign=Pos;big_val=sub_nat l1 l2};;
let mult_big n1 n2 =
{big_sign=prod_sign (n1.big_sign,n2.big_sign);
big_val= mult_nat n1.big_val n2.big_val};;
let div_big n1 n2 =
let dvd = rev n1.big_val and dvs= rev n2.big_val in
let quot,rmd = div_nat dvd dvs in
match n1.big_sign, n2.big_sign with
Pos,Pos -> {big_sign=Pos; big_val= rev quot},
{big_sign=Pos; big_val= rev rmd}
| Pos,Neg -> {big_sign=Neg; big_val= rev quot},
{big_sign=Pos; big_val= rev rmd}
| Neg,Pos -> {big_sign=Neg; big_val= rev quot},
{big_sign=Neg; big_val= rev rmd}
| Neg,Neg -> {big_sign=Pos; big_val= rev quot},
{big_sign=Neg; big_val= rev rmd};;
let exp_big = fun
{big_sign=s1; big_val=n1} {big_sign=s2; big_val=n2}
-> if s1=Pos & s2=Pos
then {big_sign=Pos; big_val= rev(exp_nat (rev n1) (rev n2))}
else failwith "exp_big: args must be positive";;
let sqrt_big = fun
{big_sign=s; big_val=n}
-> if s=Pos
then {big_sign=Pos; big_val=rev(sqrt_nat(rev n))}
else failwith "sqrt_big: arg must be positive";;
(* +big_int_utilities+ *)
(* +big_int_parser+ *)
let int_of_digit = function
`0`..`9` as c -> (int_of_char c)-(int_of_char `0`)
| _ -> failwith "Not a Digit";;
let rec nat_parser n = function
[< ' `0`..`9` as c;
(nat_parser (add_digit_nat (int_of_digit c)
(mult_digit_nat 10 n) )) r>]
-> r
| [<>] -> n;;
let parse_nat s =
(function [<(nat_parser []) n >] -> n) (stream_of_string s);;
let nat_of_string = parse_nat;;
let rec big_parser = function
[<'`-`; (nat_parser []) n >] -> {big_sign=Neg ;big_val=n}
| [<(nat_parser []) n >] -> {big_sign=Pos ;big_val=n};;
let parse_big_int s =
big_parser (stream_of_string s);;
let big_int_of_string = parse_big_int;;
(* +big_int_parser+ *)
(* +big_int_printer+ *)
let print_digit_nat n =
let s= string_of_int n in
match string_length s with
1 -> print_string ("000" ^ s)
| 2 -> print_string ("00" ^ s)
| 3 -> print_string ("0" ^ s)
| 4 -> print_string s
| _ -> failwith "print_digit_nat: wrong digit";;
let rec print_nat l =
let rec print_rec = fun
[] -> ()
| (a::l) -> print_digit_nat a ; print_rec l in
match rev l with
[] -> print_int 0
| [a] -> print_int a
| (a::l) -> print_int a ; print_rec l;;
let print_big_int {big_sign=s; big_val=l}=
print_newline ();
(match s with Neg -> print_string "-" | Pos -> ()) ; print_nat l;;
(* +big_int_printer+ *)
new_printer "big_int" print_big_int;;
let n= parse_big_int "999999999999999"
in mult_big n n;;
let rec fact n =
if n = 0 then big_int_of_int 1
else mult_big (big_int_of_int n) (fact (n-1));;
fact 50;;
(* +type_rat+ *)
type rat = {rat_sign:sign; rat_num:nat ; rat_den:nat};;
(* +type_rat+ *)
(* +rat_utilities+ *)
let rat_of_int n = {rat_sign=Pos ;
rat_num=nat_of_int n;
rat_den= [1]};;
let rat_of_nat n = {rat_sign=Pos ;rat_num=n; rat_den=[1]};;
let rat_of_big_int {big_sign=s; big_val=n} =
{rat_sign=s ;rat_num=n; rat_den=[1]};;
let null_rat {rat_sign=s ;rat_num=n; rat_den=d} =
n=[];;
let eq_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> s1=s2 & mult_nat n1 d2 = mult_nat n2 d1 ;;
let lt_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> match (s1,s2) with
Neg,Pos -> true
| Pos,Neg -> false
| Pos,Pos -> lt_nat (mult_nat n1 d2) (mult_nat n2 d1)
| Neg,Neg -> lt_nat (mult_nat n2 d1) (mult_nat n1 d2) ;;
let rec add_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> if s1=s2 then {rat_sign=s1 ;
rat_num= add_nat (mult_nat n1 d2) (mult_nat n2 d1);
rat_den= mult_nat d1 d2}
else
if s1=Pos then
let x= mult_nat n1 d2
and y= mult_nat n2 d1 in
if lt_nat x y then
{rat_sign=Neg ;
rat_num= sub_nat y x;
rat_den= mult_nat d1 d2}
else
{rat_sign=Pos;
rat_num= sub_nat x y;
rat_den= mult_nat d1 d2}
else
add_rat r2 r1;;
let sub_rat = fun
r1 {rat_sign=s2 ;rat_num=n2; rat_den=d2}
-> let s = if s2=Pos then Neg else Pos in
add_rat r1 {rat_sign=s ;rat_num=n2; rat_den=d2};;
let mult_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> {rat_sign=prod_sign (s1,s2) ;
rat_num=mult_nat n1 n2;
rat_den=mult_nat d1 d2};;
let div_rat = fun
r1 {rat_sign=s2 ;rat_num=n2; rat_den=d2} ->
mult_rat r1 {rat_sign=s2 ;rat_num=d2; rat_den=n2};;
(* +rat_utilities+ *)
(* +pgcd+ *)
let rec pgcd l1 l2 =
if l1=l2 then l1 else
if lt_nat l1 l2 then pgcd l2 l1
else
let (q,r) = div_nat l1 l2 in
if r=[] then l2 else pgcd l2 r ;;
(* +pgcd+ *)
(* +reduce+ *)
let reduce {rat_sign=s; rat_num=num; rat_den=den}=
let rnum= rev num and rden= rev den in
let p= pgcd rnum rden in
{rat_sign=s; rat_num=rev(fst (div_nat rnum p));
rat_den=rev(fst(div_nat rden p))};;
(* +reduce+ *)
Tests
reduce
{ rat_sign = Pos ; rat_num=(fact 40).big_val ; rat_den=(fact 50).big_val } ; ;
reduce
{ rat_sign = Pos ; rat_num=(fact 38).big_val ; rat_den=(fact 40).big_val } ; ;
reduce
{ rat_sign = Pos ; rat_num=(fact 40).big_val ; rat_den=(fact 50).big_val } ; ;
let add_rrat r1 r2= reduce ( add_rat r1 r2 ) ; ;
let sub_rrat r1 reduce ( sub_rat r1 r2 ) ; ;
let mult_rrat r1 r2= reduce ( mult_rat r1 r2 ) ; ;
let div_rrat r1 reduce ( div_rat r1 r2 ) ; ;
reduce
{rat_sign=Pos; rat_num=(fact 40).big_val; rat_den=(fact 50).big_val};;
reduce
{rat_sign=Pos; rat_num=(fact 38).big_val; rat_den=(fact 40).big_val};;
reduce
{rat_sign=Pos; rat_num=(fact 40).big_val; rat_den=(fact 50).big_val};;
let add_rrat r1 r2= reduce (add_rat r1 r2);;
let sub_rrat r1 r2= reduce (sub_rat r1 r2);;
let mult_rrat r1 r2= reduce (mult_rat r1 r2);;
let div_rrat r1 r2= reduce (div_rat r1 r2);;
*)
(* +rat_parser+ *)
let rec lexer = function
[<'`-`; (nat []) n; rest_lexer d >]
-> {rat_sign=Neg ;rat_num= n; rat_den= d}
| [<(nat []) n; rest_lexer d >]
-> {rat_sign=Pos ;rat_num= n; rat_den= d}
and rest_lexer =
function [<'`/`; (nat []) d >] -> d
| [< >] -> [1] ;;
let parse_rat s = reduce(lexer (stream_of_string s));;
let rat_of_string = parse_rat;;
(* +rat_parser+ *)
(* +rat_printer+ *)
let max_int m n = if m>n then m else n;;
let print_rat = fun
{rat_sign=s; rat_num=num; rat_den=den}
-> (match s with Neg -> print_string "-" | Pos -> ()) ;
print_nat num;
print_string "/";
print_nat den;;
new_printer "rat" print_rat;;
(* +rat_printer+ *)
Tests
parse_rat " 4446464/32 " ; ;
parse_rat " 4446464/3257 " ; ;
parse_rat " 4446464/3256 " ; ;
parse_rat "4446464/32";;
parse_rat "4446464/3257";;
parse_rat "4446464/3256";;
*)
(* +rat_printer+ *)
let default_frac_length= ref 3;;
let set_frac_length n = default_frac_length:= n;;
let print_frac_rat = fun
{rat_sign=s; rat_num=num; rat_den=den}
-> (match s with Neg -> print_string "-" | Pos -> ()) ;
let q,r= div_nat (rev num) (rev den) in
let frac_part= compute_frac_part r (rev den) !default_frac_length in
begin print_nat (rev q); print_string "."; print_nat (rev frac_part) end;;
new_printer "rat" print_frac_rat;;
(* +rat_printer+ *)
let parse_rat " 86400 " ; ;
let year= mult_rat day ( parse_rat " 365 " ) ; ;
let years= mult_rat ( parse_rat " 114 " ) year ; ;
let yearb= mult_rat day ( parse_rat " 366 " ) ; ;
let yearsb= mult_rat ( parse_rat " 28 " ) ; ;
let year94= mult_rat day ( parse_rat " 243 " ) ; ;
let total= add_rat years ( add_rat ) ; ;
let res= div_rat ( parse_rat " 25000000000 " ) total ; ;
CONCOURS CACHOUS LAJAUNIE
let day= parse_rat "86400";;
let year= mult_rat day (parse_rat "365");;
let years= mult_rat (parse_rat "114") year;;
let yearb= mult_rat day (parse_rat "366");;
let yearsb= mult_rat (parse_rat "28") yearb;;
let year94= mult_rat day (parse_rat "243");;
let total= add_rat years (add_rat yearsb year94);;
let res= div_rat (parse_rat "25000000000") total;;
*)
(* +nombre_e+ *)
let rat0= {rat_sign=Pos; rat_num=[]; rat_den=[1]};;
let rec sigma f (a,b)=
if a>b then rat0
else add_rat (f a) (sigma f (a+1,b));;
let inv_fact n =
{rat_sign=Pos; rat_num=[1]; rat_den=(fact n).big_val};;
let e n = sigma inv_fact (0,n);;
(* +nombre_e+ *)
Calcul de E
a ) sans effectuer les reductions a chaque operation
let rat0= { rat_sign = Pos ; rat_num= [ ] ; rat_den=[1 ] } ; ;
let rec sigma f ( a , b)=
if a > b then rat0
else add_rat ( f a ) ( sigma f ( a+1,b ) ) ; ;
let inv_fact n = { rat_sign = Pos ; rat_num=[1 ] ;
rat_den=(fact n).big_val } ; ;
let E n = sigma inv_fact ( 0,n ) ; ;
b ) en effectuant les reductions
let rec sigma f ( a , b)=
if a > b then rat0
else add_rrat ( f a ) ( sigma f ( a+1,b ) ) ; ;
let E n = reduce ( sigma ( fun n - > { rat_sign = Pos ; rat_num=[1 ] ;
rat_den=(fact n).big_val } )
( 0,n ) ) ; ;
C'est en fait plus long : 43s au lieu de 39s pour E 20 sur PwB
a) sans effectuer les reductions a chaque operation
let rat0= {rat_sign=Pos; rat_num=[]; rat_den=[1]};;
let rec sigma f (a,b)=
if a>b then rat0
else add_rat (f a) (sigma f (a+1,b));;
let inv_fact n = {rat_sign=Pos; rat_num=[1];
rat_den=(fact n).big_val};;
let E n = sigma inv_fact (0,n);;
b) en effectuant les reductions
let rec sigma f (a,b)=
if a>b then rat0
else add_rrat (f a) (sigma f (a+1,b));;
let E n = reduce (sigma (fun n -> {rat_sign=Pos; rat_num=[1];
rat_den=(fact n).big_val})
(0,n));;
C'est en fait plus long: 43s au lieu de 39s pour E 20 sur PwB
*)
let sqrt640320 digits =
let pow = rev (exp_nat [10] [digits]) in
(rev(sqrt_nat (rev (mult_nat (mult_nat [320;64] pow) pow))), pow);;
let size_nat = list_length;;
let test (x,y,z,t) = size_nat x + size_nat y + z > size_nat t;;
let mult_nat_list = it_list mult_nat [1];;
let approx_pi digits =
let prod = [12]
and sum = [1409;1359]
and D= [320;64]
and N= mult_nat [1409;1359] [12]
and sn = []
and binom= [1]
and pown3 = []
(* and sqrt,pow = sqrt640320 (digits-2) *)
and pow3= rev(exp_nat (rev [320;64]) [3]) in
let sizeB= 1+ size_nat pow in
approx_rec prod sum pown3 D N sn binom true
where rec approx_rec prod sum pown3 D N sn binom pos=
if test(prod,sum,sizeB,D)
then let prod = mult_nat_list [[8];
add_digit_nat 1 sn;
add_digit_nat 3 sn;
add_digit_nat 5 sn;
prod] in
let sum = add_nat [134;4514;5] sum in
let pown3 = add_nat binom pown3 in
let D = mult_nat_list [pown3;pow3;D] in
let N = (if pos then sub_nat else add_nat)
(mult_nat_list [pown3;pow3;N])
(mult_nat_list [prod; sum]) in
let sn = add_digit_nat 6 sn in
let binom = add_nat sn binom in
approx_rec prod sum pown3 D N sn binom (not pos)
else div_rat (rat_of_nat (mult_nat sqrt D))
(rat_of_nat (mult_nat N pow));;
let sqrt640320' digits=
let pow = exp_big (big_int_of_nat [10]) (big_int_of_nat [digits]) in
(sqrt_big (mult_big (mult_big (big_int_of_string "640320") pow) pow), pow);;
let size_big n = list_length n.big_val;;
let test' (x,y,z,t) = size_big x + size_big y + z > size_big t;;
let mult_big_list = it_list mult_big (big_int_of_nat [1]);;
let approx_pi' digits =
let prod = big_int_of_string "12"
and sum = big_int_of_string "13591409"
and D = big_int_of_string "640320"
and N = mult_big (big_int_of_string "13591409")
(big_int_of_string "12")
and sn = big_int_of_string "0"
and binom = big_int_of_string "1"
and pown3 = big_int_of_string "0"
and (sqrt,pow) = sqrt640320' (digits-2)
and pow3 = exp_big (big_int_of_string "640320") (big_int_of_string "3") in
let sizeB = 1+ size_big pow in
approx_rec prod sum pown3 D N sn binom
where rec approx_rec prod sum pown3 D N sn binom =
if test'(prod,sum,sizeB,D)
then let prod= mult_big_list [big_int_of_string "-8";
add_big (big_int_of_string "1") sn;
add_big (big_int_of_string "3") sn;
add_big (big_int_of_string "5") sn;
prod] in
let sum = add_big (big_int_of_string "545140134") sum in
let pown3 = add_big binom pown3 in
let D = mult_big_list [pown3;pow3;D] in
let N = add_big (mult_big_list [pown3;pow3;N])
(mult_big_list [prod; sum]) in
let sn = add_big (big_int_of_string "6") sn in
let binom = add_big sn binom in
approx_rec prod sum pown3 D N sn binom
else div_rat (rat_of_big_int (mult_big sqrt D))
(rat_of_big_int (mult_big N pow));;
| null | https://raw.githubusercontent.com/mauny/the-functional-approach-to-programming/1ec8bed5d33d3a67bbd67d09afb3f5c3c8978838/cl-75/Arith/arith.ml | ocaml | +type_big_int+
+type_big_int+
+big_int_utilities+
+big_int_utilities+
+big_int_parser+
+big_int_parser+
+big_int_printer+
+big_int_printer+
+type_rat+
+type_rat+
+rat_utilities+
+rat_utilities+
+pgcd+
+pgcd+
+reduce+
+reduce+
+rat_parser+
+rat_parser+
+rat_printer+
+rat_printer+
+rat_printer+
+rat_printer+
+nombre_e+
+nombre_e+
and sqrt,pow = sqrt640320 (digits-2) |
type sign = Neg | Pos;;
type big_int = {big_sign:sign; big_val:nat};;
let prod_sign = fun
(Neg,Neg) -> Pos
| (Neg,Pos) -> Neg
| (Pos,Neg) -> Neg
| (Pos,Pos) -> Pos;;
let big_int_of_int n =
{big_sign= if n >= 0 then Pos else Neg;
big_val= nat_of_int (abs n)};;
let big_int_of_nat n = {big_sign=Pos; big_val=n};;
let lt_big n1 n2 =
match (n1.big_sign,n2.big_sign) with
(Neg,Pos) -> true
| (Pos,Neg) -> false
| (Neg,Neg) -> lt_nat n2.big_val n1.big_val
| (Pos,Pos) -> lt_nat n1.big_val n2.big_val;;
let le_big n1 n2 =
match (n1.big_sign,n2.big_sign) with
(Neg,Pos) -> true
| (Pos,Neg) -> false
| (Neg,Neg) -> le_nat n2.big_val n1.big_val
| (Pos,Pos) -> le_nat n1.big_val n2.big_val;;
let zero_big n = n.big_val=[];;
let pos_big n= n.big_sign=Pos;;
let neg_big n= n.big_sign=Neg;;
let add_big n1 n2 =
let (l1,l2) = n1.big_val,n2.big_val in
match (n1.big_sign, n2.big_sign) with
(Neg,Pos) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if ilt_nat l1 l2
then {big_sign=Pos;big_val=sub_nat l2 l1}
else {big_sign=Neg;big_val=sub_nat l1 l2}
| (Pos,Neg) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if ilt_nat l1 l2
then {big_sign=Neg;big_val=sub_nat l2 l1}
else {big_sign=Pos;big_val=sub_nat l1 l2}
| (Neg,Neg) -> {big_sign=Neg;big_val=add_nat l1 l2}
| (Pos,Pos) -> {big_sign=Pos;big_val=add_nat l1 l2};;
let sub_big n1 n2 =
let (l1,l2) = n1.big_val,n2.big_val in
match (n1.big_sign, n2.big_sign) with
(Neg,Pos) -> {big_sign=Neg;big_val=add_nat l1 l2}
| (Pos,Neg) -> {big_sign=Pos;big_val=add_nat l1 l2}
| (Neg,Neg) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if lt_nat l1 l2
then {big_sign=Pos;big_val=sub_nat l2 l1}
else {big_sign=Neg;big_val=sub_nat l1 l2}
| (Pos,Pos) -> if l1=l2
then {big_sign=Pos;big_val=[]}
else if lt_nat l1 l2
then {big_sign=Neg;big_val=sub_nat l2 l1}
else {big_sign=Pos;big_val=sub_nat l1 l2};;
let mult_big n1 n2 =
{big_sign=prod_sign (n1.big_sign,n2.big_sign);
big_val= mult_nat n1.big_val n2.big_val};;
let div_big n1 n2 =
let dvd = rev n1.big_val and dvs= rev n2.big_val in
let quot,rmd = div_nat dvd dvs in
match n1.big_sign, n2.big_sign with
Pos,Pos -> {big_sign=Pos; big_val= rev quot},
{big_sign=Pos; big_val= rev rmd}
| Pos,Neg -> {big_sign=Neg; big_val= rev quot},
{big_sign=Pos; big_val= rev rmd}
| Neg,Pos -> {big_sign=Neg; big_val= rev quot},
{big_sign=Neg; big_val= rev rmd}
| Neg,Neg -> {big_sign=Pos; big_val= rev quot},
{big_sign=Neg; big_val= rev rmd};;
let exp_big = fun
{big_sign=s1; big_val=n1} {big_sign=s2; big_val=n2}
-> if s1=Pos & s2=Pos
then {big_sign=Pos; big_val= rev(exp_nat (rev n1) (rev n2))}
else failwith "exp_big: args must be positive";;
let sqrt_big = fun
{big_sign=s; big_val=n}
-> if s=Pos
then {big_sign=Pos; big_val=rev(sqrt_nat(rev n))}
else failwith "sqrt_big: arg must be positive";;
let int_of_digit = function
`0`..`9` as c -> (int_of_char c)-(int_of_char `0`)
| _ -> failwith "Not a Digit";;
let rec nat_parser n = function
[< ' `0`..`9` as c;
(nat_parser (add_digit_nat (int_of_digit c)
(mult_digit_nat 10 n) )) r>]
-> r
| [<>] -> n;;
let parse_nat s =
(function [<(nat_parser []) n >] -> n) (stream_of_string s);;
let nat_of_string = parse_nat;;
let rec big_parser = function
[<'`-`; (nat_parser []) n >] -> {big_sign=Neg ;big_val=n}
| [<(nat_parser []) n >] -> {big_sign=Pos ;big_val=n};;
let parse_big_int s =
big_parser (stream_of_string s);;
let big_int_of_string = parse_big_int;;
let print_digit_nat n =
let s= string_of_int n in
match string_length s with
1 -> print_string ("000" ^ s)
| 2 -> print_string ("00" ^ s)
| 3 -> print_string ("0" ^ s)
| 4 -> print_string s
| _ -> failwith "print_digit_nat: wrong digit";;
let rec print_nat l =
let rec print_rec = fun
[] -> ()
| (a::l) -> print_digit_nat a ; print_rec l in
match rev l with
[] -> print_int 0
| [a] -> print_int a
| (a::l) -> print_int a ; print_rec l;;
let print_big_int {big_sign=s; big_val=l}=
print_newline ();
(match s with Neg -> print_string "-" | Pos -> ()) ; print_nat l;;
new_printer "big_int" print_big_int;;
let n= parse_big_int "999999999999999"
in mult_big n n;;
let rec fact n =
if n = 0 then big_int_of_int 1
else mult_big (big_int_of_int n) (fact (n-1));;
fact 50;;
type rat = {rat_sign:sign; rat_num:nat ; rat_den:nat};;
let rat_of_int n = {rat_sign=Pos ;
rat_num=nat_of_int n;
rat_den= [1]};;
let rat_of_nat n = {rat_sign=Pos ;rat_num=n; rat_den=[1]};;
let rat_of_big_int {big_sign=s; big_val=n} =
{rat_sign=s ;rat_num=n; rat_den=[1]};;
let null_rat {rat_sign=s ;rat_num=n; rat_den=d} =
n=[];;
let eq_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> s1=s2 & mult_nat n1 d2 = mult_nat n2 d1 ;;
let lt_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> match (s1,s2) with
Neg,Pos -> true
| Pos,Neg -> false
| Pos,Pos -> lt_nat (mult_nat n1 d2) (mult_nat n2 d1)
| Neg,Neg -> lt_nat (mult_nat n2 d1) (mult_nat n1 d2) ;;
let rec add_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> if s1=s2 then {rat_sign=s1 ;
rat_num= add_nat (mult_nat n1 d2) (mult_nat n2 d1);
rat_den= mult_nat d1 d2}
else
if s1=Pos then
let x= mult_nat n1 d2
and y= mult_nat n2 d1 in
if lt_nat x y then
{rat_sign=Neg ;
rat_num= sub_nat y x;
rat_den= mult_nat d1 d2}
else
{rat_sign=Pos;
rat_num= sub_nat x y;
rat_den= mult_nat d1 d2}
else
add_rat r2 r1;;
let sub_rat = fun
r1 {rat_sign=s2 ;rat_num=n2; rat_den=d2}
-> let s = if s2=Pos then Neg else Pos in
add_rat r1 {rat_sign=s ;rat_num=n2; rat_den=d2};;
let mult_rat = fun
({rat_sign=s1 ;rat_num=n1; rat_den=d1} as r1)
({rat_sign=s2 ;rat_num=n2; rat_den=d2} as r2)
-> {rat_sign=prod_sign (s1,s2) ;
rat_num=mult_nat n1 n2;
rat_den=mult_nat d1 d2};;
let div_rat = fun
r1 {rat_sign=s2 ;rat_num=n2; rat_den=d2} ->
mult_rat r1 {rat_sign=s2 ;rat_num=d2; rat_den=n2};;
let rec pgcd l1 l2 =
if l1=l2 then l1 else
if lt_nat l1 l2 then pgcd l2 l1
else
let (q,r) = div_nat l1 l2 in
if r=[] then l2 else pgcd l2 r ;;
let reduce {rat_sign=s; rat_num=num; rat_den=den}=
let rnum= rev num and rden= rev den in
let p= pgcd rnum rden in
{rat_sign=s; rat_num=rev(fst (div_nat rnum p));
rat_den=rev(fst(div_nat rden p))};;
Tests
reduce
{ rat_sign = Pos ; rat_num=(fact 40).big_val ; rat_den=(fact 50).big_val } ; ;
reduce
{ rat_sign = Pos ; rat_num=(fact 38).big_val ; rat_den=(fact 40).big_val } ; ;
reduce
{ rat_sign = Pos ; rat_num=(fact 40).big_val ; rat_den=(fact 50).big_val } ; ;
let add_rrat r1 r2= reduce ( add_rat r1 r2 ) ; ;
let sub_rrat r1 reduce ( sub_rat r1 r2 ) ; ;
let mult_rrat r1 r2= reduce ( mult_rat r1 r2 ) ; ;
let div_rrat r1 reduce ( div_rat r1 r2 ) ; ;
reduce
{rat_sign=Pos; rat_num=(fact 40).big_val; rat_den=(fact 50).big_val};;
reduce
{rat_sign=Pos; rat_num=(fact 38).big_val; rat_den=(fact 40).big_val};;
reduce
{rat_sign=Pos; rat_num=(fact 40).big_val; rat_den=(fact 50).big_val};;
let add_rrat r1 r2= reduce (add_rat r1 r2);;
let sub_rrat r1 r2= reduce (sub_rat r1 r2);;
let mult_rrat r1 r2= reduce (mult_rat r1 r2);;
let div_rrat r1 r2= reduce (div_rat r1 r2);;
*)
let rec lexer = function
[<'`-`; (nat []) n; rest_lexer d >]
-> {rat_sign=Neg ;rat_num= n; rat_den= d}
| [<(nat []) n; rest_lexer d >]
-> {rat_sign=Pos ;rat_num= n; rat_den= d}
and rest_lexer =
function [<'`/`; (nat []) d >] -> d
| [< >] -> [1] ;;
let parse_rat s = reduce(lexer (stream_of_string s));;
let rat_of_string = parse_rat;;
let max_int m n = if m>n then m else n;;
let print_rat = fun
{rat_sign=s; rat_num=num; rat_den=den}
-> (match s with Neg -> print_string "-" | Pos -> ()) ;
print_nat num;
print_string "/";
print_nat den;;
new_printer "rat" print_rat;;
Tests
parse_rat " 4446464/32 " ; ;
parse_rat " 4446464/3257 " ; ;
parse_rat " 4446464/3256 " ; ;
parse_rat "4446464/32";;
parse_rat "4446464/3257";;
parse_rat "4446464/3256";;
*)
let default_frac_length= ref 3;;
let set_frac_length n = default_frac_length:= n;;
let print_frac_rat = fun
{rat_sign=s; rat_num=num; rat_den=den}
-> (match s with Neg -> print_string "-" | Pos -> ()) ;
let q,r= div_nat (rev num) (rev den) in
let frac_part= compute_frac_part r (rev den) !default_frac_length in
begin print_nat (rev q); print_string "."; print_nat (rev frac_part) end;;
new_printer "rat" print_frac_rat;;
let parse_rat " 86400 " ; ;
let year= mult_rat day ( parse_rat " 365 " ) ; ;
let years= mult_rat ( parse_rat " 114 " ) year ; ;
let yearb= mult_rat day ( parse_rat " 366 " ) ; ;
let yearsb= mult_rat ( parse_rat " 28 " ) ; ;
let year94= mult_rat day ( parse_rat " 243 " ) ; ;
let total= add_rat years ( add_rat ) ; ;
let res= div_rat ( parse_rat " 25000000000 " ) total ; ;
CONCOURS CACHOUS LAJAUNIE
let day= parse_rat "86400";;
let year= mult_rat day (parse_rat "365");;
let years= mult_rat (parse_rat "114") year;;
let yearb= mult_rat day (parse_rat "366");;
let yearsb= mult_rat (parse_rat "28") yearb;;
let year94= mult_rat day (parse_rat "243");;
let total= add_rat years (add_rat yearsb year94);;
let res= div_rat (parse_rat "25000000000") total;;
*)
let rat0= {rat_sign=Pos; rat_num=[]; rat_den=[1]};;
let rec sigma f (a,b)=
if a>b then rat0
else add_rat (f a) (sigma f (a+1,b));;
let inv_fact n =
{rat_sign=Pos; rat_num=[1]; rat_den=(fact n).big_val};;
let e n = sigma inv_fact (0,n);;
Calcul de E
a ) sans effectuer les reductions a chaque operation
let rat0= { rat_sign = Pos ; rat_num= [ ] ; rat_den=[1 ] } ; ;
let rec sigma f ( a , b)=
if a > b then rat0
else add_rat ( f a ) ( sigma f ( a+1,b ) ) ; ;
let inv_fact n = { rat_sign = Pos ; rat_num=[1 ] ;
rat_den=(fact n).big_val } ; ;
let E n = sigma inv_fact ( 0,n ) ; ;
b ) en effectuant les reductions
let rec sigma f ( a , b)=
if a > b then rat0
else add_rrat ( f a ) ( sigma f ( a+1,b ) ) ; ;
let E n = reduce ( sigma ( fun n - > { rat_sign = Pos ; rat_num=[1 ] ;
rat_den=(fact n).big_val } )
( 0,n ) ) ; ;
C'est en fait plus long : 43s au lieu de 39s pour E 20 sur PwB
a) sans effectuer les reductions a chaque operation
let rat0= {rat_sign=Pos; rat_num=[]; rat_den=[1]};;
let rec sigma f (a,b)=
if a>b then rat0
else add_rat (f a) (sigma f (a+1,b));;
let inv_fact n = {rat_sign=Pos; rat_num=[1];
rat_den=(fact n).big_val};;
let E n = sigma inv_fact (0,n);;
b) en effectuant les reductions
let rec sigma f (a,b)=
if a>b then rat0
else add_rrat (f a) (sigma f (a+1,b));;
let E n = reduce (sigma (fun n -> {rat_sign=Pos; rat_num=[1];
rat_den=(fact n).big_val})
(0,n));;
C'est en fait plus long: 43s au lieu de 39s pour E 20 sur PwB
*)
let sqrt640320 digits =
let pow = rev (exp_nat [10] [digits]) in
(rev(sqrt_nat (rev (mult_nat (mult_nat [320;64] pow) pow))), pow);;
let size_nat = list_length;;
let test (x,y,z,t) = size_nat x + size_nat y + z > size_nat t;;
let mult_nat_list = it_list mult_nat [1];;
let approx_pi digits =
let prod = [12]
and sum = [1409;1359]
and D= [320;64]
and N= mult_nat [1409;1359] [12]
and sn = []
and binom= [1]
and pown3 = []
and pow3= rev(exp_nat (rev [320;64]) [3]) in
let sizeB= 1+ size_nat pow in
approx_rec prod sum pown3 D N sn binom true
where rec approx_rec prod sum pown3 D N sn binom pos=
if test(prod,sum,sizeB,D)
then let prod = mult_nat_list [[8];
add_digit_nat 1 sn;
add_digit_nat 3 sn;
add_digit_nat 5 sn;
prod] in
let sum = add_nat [134;4514;5] sum in
let pown3 = add_nat binom pown3 in
let D = mult_nat_list [pown3;pow3;D] in
let N = (if pos then sub_nat else add_nat)
(mult_nat_list [pown3;pow3;N])
(mult_nat_list [prod; sum]) in
let sn = add_digit_nat 6 sn in
let binom = add_nat sn binom in
approx_rec prod sum pown3 D N sn binom (not pos)
else div_rat (rat_of_nat (mult_nat sqrt D))
(rat_of_nat (mult_nat N pow));;
let sqrt640320' digits=
let pow = exp_big (big_int_of_nat [10]) (big_int_of_nat [digits]) in
(sqrt_big (mult_big (mult_big (big_int_of_string "640320") pow) pow), pow);;
let size_big n = list_length n.big_val;;
let test' (x,y,z,t) = size_big x + size_big y + z > size_big t;;
let mult_big_list = it_list mult_big (big_int_of_nat [1]);;
let approx_pi' digits =
let prod = big_int_of_string "12"
and sum = big_int_of_string "13591409"
and D = big_int_of_string "640320"
and N = mult_big (big_int_of_string "13591409")
(big_int_of_string "12")
and sn = big_int_of_string "0"
and binom = big_int_of_string "1"
and pown3 = big_int_of_string "0"
and (sqrt,pow) = sqrt640320' (digits-2)
and pow3 = exp_big (big_int_of_string "640320") (big_int_of_string "3") in
let sizeB = 1+ size_big pow in
approx_rec prod sum pown3 D N sn binom
where rec approx_rec prod sum pown3 D N sn binom =
if test'(prod,sum,sizeB,D)
then let prod= mult_big_list [big_int_of_string "-8";
add_big (big_int_of_string "1") sn;
add_big (big_int_of_string "3") sn;
add_big (big_int_of_string "5") sn;
prod] in
let sum = add_big (big_int_of_string "545140134") sum in
let pown3 = add_big binom pown3 in
let D = mult_big_list [pown3;pow3;D] in
let N = add_big (mult_big_list [pown3;pow3;N])
(mult_big_list [prod; sum]) in
let sn = add_big (big_int_of_string "6") sn in
let binom = add_big sn binom in
approx_rec prod sum pown3 D N sn binom
else div_rat (rat_of_big_int (mult_big sqrt D))
(rat_of_big_int (mult_big N pow));;
|
40f5e1cf966ec80f277f1edea11dbd0668c946e0fbf147b244ae4c4d1dbb26da | smanek/trivial-lisp-webapp | util.lisp | -*- Mode : LISP ; Syntax : COMMON - LISP ; Package : HUNCHENTOOT ; Base : 10 -*-
$ Header : /usr / local / cvsrep / hunchentoot / util.lisp , v 1.35 2008/04/08 14:39:18 edi Exp $
Copyright ( c ) 2004 - 2009 , Dr. . All rights reserved .
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;; * Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;; * Redistributions in binary form must reproduce the above
;;; copyright notice, this list of conditions and the following
;;; disclaimer in the documentation and/or other materials
;;; provided with the distribution.
;;; THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
;;; OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
;;; WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
;;; ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
;;; DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
;;; GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
;;; WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
(in-package :hunchentoot)
(defun starts-with-p (seq subseq &key (test 'eql))
"Tests whether the sequence SEQ starts with the sequence
SUBSEQ. Individual elements are compared with TEST."
(let* ((length (length subseq))
(mismatch (mismatch subseq seq
:test test)))
(or (null mismatch)
(<= length mismatch))))
(defun starts-with-one-of-p (seq subseq-list &key (test 'eql))
"Tests whether the sequence SEQ starts with one of the
sequences in SUBSEQ-LIST. Individual elements are compared with
TEST."
(some (lambda (subseq)
(starts-with-p seq subseq :test test))
subseq-list))
(defun create-random-string (&optional (n 10) (base 16))
"Returns a random number \(as a string) with base BASE and N
digits."
(with-output-to-string (s)
(dotimes (i n)
(format s "~VR" base
(random base *the-random-state*)))))
(defun reason-phrase (return-code)
"Returns a reason phrase for the HTTP return code RETURN-CODE
\(which should be an integer) or NIL for return codes Hunchentoot
doesn't know."
(gethash return-code *http-reason-phrase-map*))
(defgeneric assoc* (thing alist)
(:documentation "Similar to CL:ASSOC, but 'does the right thing' if
THING is a string or a symbol.")
(:method ((thing symbol) alist)
(assoc thing alist :test #'eq))
(:method ((thing string) alist)
(assoc thing alist :test #'string-equal))
(:method (thing alist)
(assoc thing alist :test #'eql)))
(defun md5-hex (string)
"Calculates the md5 sum of the string STRING and returns it as a hex string."
(with-output-to-string (s)
(loop for code across (md5:md5sum-sequence (coerce string 'simple-string))
do (format s "~2,'0x" code))))
(defun escape-for-html (string)
"Escapes the characters #\\<, #\\>, #\\', #\\\", and #\\& for HTML output."
(with-output-to-string (out)
(with-input-from-string (in string)
(loop for char = (read-char in nil nil)
while char
do (case char
((#\<) (write-string "<" out))
((#\>) (write-string ">" out))
((#\") (write-string """ out))
((#\') (write-string "'" out))
((#\&) (write-string "&" out))
(otherwise (write-char char out)))))))
(defun http-token-p (token)
"Tests whether TOKEN is a string which is a valid 'token'
according to HTTP/1.1 \(RFC 2068)."
(and (stringp token)
(plusp (length token))
(every (lambda (char)
CHAR is US - ASCII but not control character or ESC
(< 31 (char-code char) 127)
;; CHAR is not 'tspecial'
(not (find char "()<>@,;:\\\"/[]?={} " :test #'char=))))
token)))
(defun rfc-1123-date (&optional (time (get-universal-time)))
"Generates a time string according to RFC 1123. Default is current time."
(multiple-value-bind
(second minute hour date month year day-of-week)
(decode-universal-time time 0)
(format nil "~A, ~2,'0d ~A ~4d ~2,'0d:~2,'0d:~2,'0d GMT"
(svref +day-names+ day-of-week)
date
(svref +month-names+ (1- month))
year
hour
minute
second)))
(defun iso-time (&optional (time (get-universal-time)))
"Returns the universal time TIME as a string in full ISO format."
(multiple-value-bind (second minute hour date month year)
(decode-universal-time time)
(format nil "~4,'0d-~2,'0d-~2,'0d ~2,'0d:~2,'0d:~2,'0d"
year month date hour minute second)))
(let ((counter 0))
(declare (ignorable counter))
(defun make-tmp-file-name (&optional (prefix "hunchentoot"))
"Generates a unique name for a temporary file. This function is
called from the RFC2388 library when a file is uploaded."
(let ((tmp-file-name
#+:allegro
(pathname (system:make-temp-file-name prefix *tmp-directory*))
#-:allegro
(loop for pathname = (make-pathname :name (format nil "~A-~A"
prefix (incf counter))
:type nil
:defaults *tmp-directory*)
unless (probe-file pathname)
return pathname)))
(push tmp-file-name *tmp-files*)
;; maybe call hook for file uploads
(when *file-upload-hook*
(funcall *file-upload-hook* tmp-file-name))
tmp-file-name)))
(defun quote-string (string)
"Quotes string according to RFC 2616's definition of `quoted-string'."
(with-output-to-string (out)
(with-input-from-string (in string)
(loop for char = (read-char in nil nil)
while char
unless (or (char< char #\Space)
(char= char #\Rubout))
do (case char
((#\\) (write-string "\\\\" out))
((#\") (write-string "\\\"" out))
(otherwise (write-char char out)))))))
(defmacro upgrade-vector (vector new-type &key converter)
"Returns a vector with the same length and the same elements as
VECTOR \(a variable holding a vector) but having element type
NEW-TYPE. If CONVERTER is not NIL, it should designate a function
which will be applied to each element of VECTOR before the result is
stored in the new vector. The resulting vector will have a fill
pointer set to its end.
The macro also uses SETQ to store the new vector in VECTOR."
`(setq ,vector
(loop with length = (length ,vector)
with new-vector = (make-array length
:element-type ,new-type
:fill-pointer length)
for i below length
do (setf (aref new-vector i) ,(if converter
`(funcall ,converter (aref ,vector i))
`(aref ,vector i)))
finally (return new-vector))))
(defun url-decode (string &optional (external-format *hunchentoot-default-external-format*))
"Decodes a URL-encoded STRING which is assumed to be encoded using
the external format EXTERNAL-FORMAT."
(when (zerop (length string))
(return-from url-decode ""))
(let ((vector (make-array (length string) :element-type 'octet :fill-pointer 0))
(i 0)
unicodep)
(loop
(unless (< i (length string))
(return))
(let ((char (aref string i)))
(labels ((decode-hex (length)
(prog1
(parse-integer string :start i :end (+ i length) :radix 16)
(incf i length)))
(push-integer (integer)
(vector-push integer vector))
(peek ()
(aref string i))
(advance ()
(setq char (peek))
(incf i)))
(cond
((char= #\% char)
(advance)
(cond
((char= #\u (peek))
(unless unicodep
(setq unicodep t)
(upgrade-vector vector '(integer 0 65535)))
(advance)
(push-integer (decode-hex 4)))
(t
(push-integer (decode-hex 2)))))
(t
(push-integer (char-code (case char
((#\+) #\Space)
(otherwise char))))
(advance))))))
(cond (unicodep
(upgrade-vector vector 'character :converter #'code-char))
(t (octets-to-string vector :external-format external-format)))))
(defun form-url-encoded-list-to-alist (form-url-encoded-list
&optional (external-format *hunchentoot-default-external-format*))
"Converts a list FORM-URL-ENCODED-LIST of name/value pairs into an
alist. Both names and values are url-decoded while doing this."
(mapcar #'(lambda (entry)
(destructuring-bind (name &optional value)
(split "=" entry :limit 2)
(cons (string-trim " " (url-decode name external-format))
(url-decode (or value "") external-format))))
form-url-encoded-list))
(defun url-encode (string &optional (external-format *hunchentoot-default-external-format*))
"URL-encodes a string using the external format EXTERNAL-FORMAT."
(with-output-to-string (s)
(loop for c across string
for index from 0
do (cond ((or (char<= #\0 c #\9)
(char<= #\a c #\z)
(char<= #\A c #\Z)
;; note that there's no comma in there - because of cookies
(find c "$-_.!*'()" :test #'char=))
(write-char c s))
(t (loop for octet across (string-to-octets string
:start index
:end (1+ index)
:external-format external-format)
do (format s "%~2,'0x" octet)))))))
(defun parse-content-type (content-type-header)
"Reads and parses a `Content-Type' header and returns it as three
values - the type, the subtype, and the requests' character set as
specified in the 'charset' parameter in the header, if there is one
and if the content type is \"text\". CONTENT-TYPE-HEADER is supposed
to be the corresponding header value as a string."
(with-input-from-sequence (stream (map 'list 'char-code content-type-header))
(with-character-stream-semantics
(let* ((*current-error-message* "Corrupted Content-Type header:")
(type (read-token stream))
(subtype (if (eql #\/ (read-char* stream nil))
(read-token stream)
(return-from parse-content-type
;; try to return something meaningful
(values "application" "octet-stream" nil))))
(parameters (read-name-value-pairs stream))
(charset (cdr (assoc "charset" parameters :test #'string=)))
(charset
(when (string-equal type "text")
charset)))
(values type subtype charset)))))
(defun keep-alive-p (request)
"Returns a true value unless the incoming request's headers or the
server's PERSISTENT-CONNECTIONS-P setting obviate a keep-alive reply.
The second return value denotes whether the client has explicitly
asked for a persistent connection."
(let ((connection-values
;; the header might consist of different values separated by commas
(when-let (connection-header (header-in :connection request))
(split "\\s*,\\s*" connection-header))))
(flet ((connection-value-p (value)
"Checks whether the string VALUE is one of the
values of the `Connection' header."
(member value connection-values :test #'string-equal)))
(let ((keep-alive-requested-p (connection-value-p "keep-alive")))
(values (and (acceptor-persistent-connections-p *acceptor*)
(or (and (eq (server-protocol request) :http/1.1)
(not (connection-value-p "close")))
(and (eq (server-protocol request) :http/1.0)
keep-alive-requested-p)))
keep-alive-requested-p)))))
(defun address-string ()
"Returns a string with information about Hunchentoot suitable for
inclusion in HTML output."
(format nil "<address><a href='/'>Hunchentoot ~A</a> <a href='~A'>(~A ~A)</a>~@[ at ~A~:[ (port ~D)~;~]~]</address>"
*hunchentoot-version*
+implementation-link+
(escape-for-html (lisp-implementation-type))
(escape-for-html (lisp-implementation-version))
(or (host *request*) (acceptor-address *acceptor*))
(scan ":\\d+$" (or (host *request*) ""))
(acceptor-port *acceptor*)))
(defun server-name-header ()
"Returns a string which can be used for 'Server' headers."
(format nil "Hunchentoot ~A" *hunchentoot-version*))
(defun input-chunking-p ()
"Whether input chunking is currently switched on for
*HUNCHENTOOT-STREAM* - note that this will return NIL if the stream
not a chunked stream."
(chunked-stream-input-chunking-p *hunchentoot-stream*))
(defun ssl-p (&optional (acceptor *acceptor*))
"Whether the current connection to the client is secure."
(acceptor-ssl-p acceptor))
(defmacro with-mapped-conditions (() &body body)
"Run BODY with usocket condition mapping in effect, i.e. platform specific network errors will be
signalled as usocket conditions. For Lispworks, no mapping is performed."
#+:lispworks
`(progn ,@body)
#-:lispworks
`(usocket:with-mapped-conditions ()
,@body)) | null | https://raw.githubusercontent.com/smanek/trivial-lisp-webapp/36816c17ea378822e02a123c1be960fd9ce3e29d/aux/hunchentoot/util.lisp | lisp | Syntax : COMMON - LISP ; Package : HUNCHENTOOT ; Base : 10 -*-
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials
provided with the distribution.
THIS SOFTWARE IS PROVIDED BY THE AUTHOR 'AS IS' AND ANY EXPRESSED
OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
" out))
CHAR is not 'tspecial'
maybe call hook for file uploads
note that there's no comma in there - because of cookies
try to return something meaningful
the header might consist of different values separated by commas | $ Header : /usr / local / cvsrep / hunchentoot / util.lisp , v 1.35 2008/04/08 14:39:18 edi Exp $
Copyright ( c ) 2004 - 2009 , Dr. . All rights reserved .
DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR CONSEQUENTIAL
INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
(in-package :hunchentoot)
(defun starts-with-p (seq subseq &key (test 'eql))
"Tests whether the sequence SEQ starts with the sequence
SUBSEQ. Individual elements are compared with TEST."
(let* ((length (length subseq))
(mismatch (mismatch subseq seq
:test test)))
(or (null mismatch)
(<= length mismatch))))
(defun starts-with-one-of-p (seq subseq-list &key (test 'eql))
"Tests whether the sequence SEQ starts with one of the
sequences in SUBSEQ-LIST. Individual elements are compared with
TEST."
(some (lambda (subseq)
(starts-with-p seq subseq :test test))
subseq-list))
(defun create-random-string (&optional (n 10) (base 16))
"Returns a random number \(as a string) with base BASE and N
digits."
(with-output-to-string (s)
(dotimes (i n)
(format s "~VR" base
(random base *the-random-state*)))))
(defun reason-phrase (return-code)
"Returns a reason phrase for the HTTP return code RETURN-CODE
\(which should be an integer) or NIL for return codes Hunchentoot
doesn't know."
(gethash return-code *http-reason-phrase-map*))
(defgeneric assoc* (thing alist)
(:documentation "Similar to CL:ASSOC, but 'does the right thing' if
THING is a string or a symbol.")
(:method ((thing symbol) alist)
(assoc thing alist :test #'eq))
(:method ((thing string) alist)
(assoc thing alist :test #'string-equal))
(:method (thing alist)
(assoc thing alist :test #'eql)))
(defun md5-hex (string)
"Calculates the md5 sum of the string STRING and returns it as a hex string."
(with-output-to-string (s)
(loop for code across (md5:md5sum-sequence (coerce string 'simple-string))
do (format s "~2,'0x" code))))
(defun escape-for-html (string)
"Escapes the characters #\\<, #\\>, #\\', #\\\", and #\\& for HTML output."
(with-output-to-string (out)
(with-input-from-string (in string)
(loop for char = (read-char in nil nil)
while char
do (case char
((#\<) (write-string "<" out))
((#\>) (write-string ">" out))
((#\') (write-string "'" out))
((#\&) (write-string "&" out))
(otherwise (write-char char out)))))))
(defun http-token-p (token)
"Tests whether TOKEN is a string which is a valid 'token'
according to HTTP/1.1 \(RFC 2068)."
(and (stringp token)
(plusp (length token))
(every (lambda (char)
CHAR is US - ASCII but not control character or ESC
(< 31 (char-code char) 127)
(not (find char "()<>@,;:\\\"/[]?={} " :test #'char=))))
token)))
(defun rfc-1123-date (&optional (time (get-universal-time)))
"Generates a time string according to RFC 1123. Default is current time."
(multiple-value-bind
(second minute hour date month year day-of-week)
(decode-universal-time time 0)
(format nil "~A, ~2,'0d ~A ~4d ~2,'0d:~2,'0d:~2,'0d GMT"
(svref +day-names+ day-of-week)
date
(svref +month-names+ (1- month))
year
hour
minute
second)))
(defun iso-time (&optional (time (get-universal-time)))
"Returns the universal time TIME as a string in full ISO format."
(multiple-value-bind (second minute hour date month year)
(decode-universal-time time)
(format nil "~4,'0d-~2,'0d-~2,'0d ~2,'0d:~2,'0d:~2,'0d"
year month date hour minute second)))
(let ((counter 0))
(declare (ignorable counter))
(defun make-tmp-file-name (&optional (prefix "hunchentoot"))
"Generates a unique name for a temporary file. This function is
called from the RFC2388 library when a file is uploaded."
(let ((tmp-file-name
#+:allegro
(pathname (system:make-temp-file-name prefix *tmp-directory*))
#-:allegro
(loop for pathname = (make-pathname :name (format nil "~A-~A"
prefix (incf counter))
:type nil
:defaults *tmp-directory*)
unless (probe-file pathname)
return pathname)))
(push tmp-file-name *tmp-files*)
(when *file-upload-hook*
(funcall *file-upload-hook* tmp-file-name))
tmp-file-name)))
(defun quote-string (string)
"Quotes string according to RFC 2616's definition of `quoted-string'."
(with-output-to-string (out)
(with-input-from-string (in string)
(loop for char = (read-char in nil nil)
while char
unless (or (char< char #\Space)
(char= char #\Rubout))
do (case char
((#\\) (write-string "\\\\" out))
((#\") (write-string "\\\"" out))
(otherwise (write-char char out)))))))
(defmacro upgrade-vector (vector new-type &key converter)
"Returns a vector with the same length and the same elements as
VECTOR \(a variable holding a vector) but having element type
NEW-TYPE. If CONVERTER is not NIL, it should designate a function
which will be applied to each element of VECTOR before the result is
stored in the new vector. The resulting vector will have a fill
pointer set to its end.
The macro also uses SETQ to store the new vector in VECTOR."
`(setq ,vector
(loop with length = (length ,vector)
with new-vector = (make-array length
:element-type ,new-type
:fill-pointer length)
for i below length
do (setf (aref new-vector i) ,(if converter
`(funcall ,converter (aref ,vector i))
`(aref ,vector i)))
finally (return new-vector))))
(defun url-decode (string &optional (external-format *hunchentoot-default-external-format*))
"Decodes a URL-encoded STRING which is assumed to be encoded using
the external format EXTERNAL-FORMAT."
(when (zerop (length string))
(return-from url-decode ""))
(let ((vector (make-array (length string) :element-type 'octet :fill-pointer 0))
(i 0)
unicodep)
(loop
(unless (< i (length string))
(return))
(let ((char (aref string i)))
(labels ((decode-hex (length)
(prog1
(parse-integer string :start i :end (+ i length) :radix 16)
(incf i length)))
(push-integer (integer)
(vector-push integer vector))
(peek ()
(aref string i))
(advance ()
(setq char (peek))
(incf i)))
(cond
((char= #\% char)
(advance)
(cond
((char= #\u (peek))
(unless unicodep
(setq unicodep t)
(upgrade-vector vector '(integer 0 65535)))
(advance)
(push-integer (decode-hex 4)))
(t
(push-integer (decode-hex 2)))))
(t
(push-integer (char-code (case char
((#\+) #\Space)
(otherwise char))))
(advance))))))
(cond (unicodep
(upgrade-vector vector 'character :converter #'code-char))
(t (octets-to-string vector :external-format external-format)))))
(defun form-url-encoded-list-to-alist (form-url-encoded-list
&optional (external-format *hunchentoot-default-external-format*))
"Converts a list FORM-URL-ENCODED-LIST of name/value pairs into an
alist. Both names and values are url-decoded while doing this."
(mapcar #'(lambda (entry)
(destructuring-bind (name &optional value)
(split "=" entry :limit 2)
(cons (string-trim " " (url-decode name external-format))
(url-decode (or value "") external-format))))
form-url-encoded-list))
(defun url-encode (string &optional (external-format *hunchentoot-default-external-format*))
"URL-encodes a string using the external format EXTERNAL-FORMAT."
(with-output-to-string (s)
(loop for c across string
for index from 0
do (cond ((or (char<= #\0 c #\9)
(char<= #\a c #\z)
(char<= #\A c #\Z)
(find c "$-_.!*'()" :test #'char=))
(write-char c s))
(t (loop for octet across (string-to-octets string
:start index
:end (1+ index)
:external-format external-format)
do (format s "%~2,'0x" octet)))))))
(defun parse-content-type (content-type-header)
"Reads and parses a `Content-Type' header and returns it as three
values - the type, the subtype, and the requests' character set as
specified in the 'charset' parameter in the header, if there is one
and if the content type is \"text\". CONTENT-TYPE-HEADER is supposed
to be the corresponding header value as a string."
(with-input-from-sequence (stream (map 'list 'char-code content-type-header))
(with-character-stream-semantics
(let* ((*current-error-message* "Corrupted Content-Type header:")
(type (read-token stream))
(subtype (if (eql #\/ (read-char* stream nil))
(read-token stream)
(return-from parse-content-type
(values "application" "octet-stream" nil))))
(parameters (read-name-value-pairs stream))
(charset (cdr (assoc "charset" parameters :test #'string=)))
(charset
(when (string-equal type "text")
charset)))
(values type subtype charset)))))
(defun keep-alive-p (request)
"Returns a true value unless the incoming request's headers or the
server's PERSISTENT-CONNECTIONS-P setting obviate a keep-alive reply.
The second return value denotes whether the client has explicitly
asked for a persistent connection."
(let ((connection-values
(when-let (connection-header (header-in :connection request))
(split "\\s*,\\s*" connection-header))))
(flet ((connection-value-p (value)
"Checks whether the string VALUE is one of the
values of the `Connection' header."
(member value connection-values :test #'string-equal)))
(let ((keep-alive-requested-p (connection-value-p "keep-alive")))
(values (and (acceptor-persistent-connections-p *acceptor*)
(or (and (eq (server-protocol request) :http/1.1)
(not (connection-value-p "close")))
(and (eq (server-protocol request) :http/1.0)
keep-alive-requested-p)))
keep-alive-requested-p)))))
(defun address-string ()
"Returns a string with information about Hunchentoot suitable for
inclusion in HTML output."
(format nil "<address><a href='/'>Hunchentoot ~A</a> <a href='~A'>(~A ~A)</a>~@[ at ~A~:[ (port ~D)~;~]~]</address>"
*hunchentoot-version*
+implementation-link+
(escape-for-html (lisp-implementation-type))
(escape-for-html (lisp-implementation-version))
(or (host *request*) (acceptor-address *acceptor*))
(scan ":\\d+$" (or (host *request*) ""))
(acceptor-port *acceptor*)))
(defun server-name-header ()
"Returns a string which can be used for 'Server' headers."
(format nil "Hunchentoot ~A" *hunchentoot-version*))
(defun input-chunking-p ()
"Whether input chunking is currently switched on for
*HUNCHENTOOT-STREAM* - note that this will return NIL if the stream
not a chunked stream."
(chunked-stream-input-chunking-p *hunchentoot-stream*))
(defun ssl-p (&optional (acceptor *acceptor*))
"Whether the current connection to the client is secure."
(acceptor-ssl-p acceptor))
(defmacro with-mapped-conditions (() &body body)
"Run BODY with usocket condition mapping in effect, i.e. platform specific network errors will be
signalled as usocket conditions. For Lispworks, no mapping is performed."
#+:lispworks
`(progn ,@body)
#-:lispworks
`(usocket:with-mapped-conditions ()
,@body)) |
1c1bd2a1b7bf20b3c318289468ac9badb910f60c7de77543a2e4e40c990520b7 | stevenvar/OMicroB | solilet.ml | (*************************************************************************)
(* *)
(* OMicrob *)
(* *)
(* *)
This file is distributed under the terms of the CeCILL license .
(* See file ../../LICENSE-en. *)
(* *)
(*************************************************************************)
open Avr
let ignore x = () ;;
type peg = Out | Empty | Peg ;;
exception Found;;
let board = [|
[| Out; Out; Out; Out; Out; Out; Out; Out; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Peg; Peg; Peg; Peg; Peg; Peg; Peg; Out|];
[| Out; Peg; Peg; Peg; Empty; Peg; Peg; Peg; Out|];
[| Out; Peg; Peg; Peg; Peg; Peg; Peg; Peg; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Out; Out; Out; Out; Out; Out; Out; Out|]
|]
;;
let moves = Array.make 31 ([||] : int array array);;
let dir = [| [|0;1|]; [|1;0|];[|0;0-1|];[|0-1;0|] |];;
let counter = ref 0
let rec solve m =
incr counter;
if m = 31 then (
match board.(4).(4) with
| Peg -> true
| _ -> false
)
else
try
for i=1 to 7 do
for j=1 to 7 do
match board.(i).(j) with
Peg ->
for k=0 to 3 do
let d1 = dir.(k).(0) in
let d2 = dir.(k).(1) in
let i1 = i+d1 in
let i2 = i1+d1 in
let j1 = j+d2 in
let j2 = j1+d2 in
match board.(i1).(j1) with
Peg ->
begin match board.(i2).(j2) with
Empty ->
board.(i).(j) <- Empty;
board.(i1).(j1) <- Empty;
board.(i2).(j2) <- Peg;
if solve(m+1) then begin
moves.(m) <- [| [| i; j |]; [| i2; j2 |] |];
raise Found
end;
board.(i).(j) <- Peg;
board.(i1).(j1) <- Peg;
board.(i2).(j2) <- Empty;
()
| _ -> ()
end
| _ -> ()
done
| _ -> ()
done
done;
false
with Found ->
true
;;
(*
let main () =
digital_write PIN1 LOW; (* D3 *)
begin
try
let i = ref 0 in
while (!i) < 1 do
ignore (solve 0);
incr i;
done;
digital_write PIN2 LOW (* D1 *)
with
| Stack_overflow -> digital_write PIN3 LOW (* D0 *)
| Out_of_memory -> digital_write PIN3 LOW
| _ -> digital_write PIN3 LOW
end
;;
*)
let check() =
let cpt = ref 0 in
Array.iter (fun v -> cpt := !cpt + v.(0).(0) + v.(0).(1) + v.(1).(0) + v.(1).(1))
moves ;
!cpt
;;
let init_pins () =
pin_mode PIN9 OUTPUT; digital_write PIN9 LOW; (* VERT *)
ROUGE
BLEU
digital_write PIN9 HIGH;
digital_write PIN10 HIGH;
digital_write PIN11 HIGH;
()
;;
let _ =
init_pins();
if solve 0 then digital_write PIN9 LOW else digital_write PIN10 LOW;
let r = check() in
digital_write PIN4 LOW ( * D4
;;
(*
.........
... ...
... ...
. .
. $ .
. .
... ...
... ...
.........
*)
| null | https://raw.githubusercontent.com/stevenvar/OMicroB/e4324d0736ac677b3086741dfdefb0e46775642b/tests/tuto_6_solilet/solilet.ml | ocaml | ***********************************************************************
OMicrob
See file ../../LICENSE-en.
***********************************************************************
let main () =
digital_write PIN1 LOW; (* D3
D1
D0
VERT
.........
... ...
... ...
. .
. $ .
. .
... ...
... ...
.........
| This file is distributed under the terms of the CeCILL license .
open Avr
let ignore x = () ;;
type peg = Out | Empty | Peg ;;
exception Found;;
let board = [|
[| Out; Out; Out; Out; Out; Out; Out; Out; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Peg; Peg; Peg; Peg; Peg; Peg; Peg; Out|];
[| Out; Peg; Peg; Peg; Empty; Peg; Peg; Peg; Out|];
[| Out; Peg; Peg; Peg; Peg; Peg; Peg; Peg; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Out; Out; Peg; Peg; Peg; Out; Out; Out|];
[| Out; Out; Out; Out; Out; Out; Out; Out; Out|]
|]
;;
let moves = Array.make 31 ([||] : int array array);;
let dir = [| [|0;1|]; [|1;0|];[|0;0-1|];[|0-1;0|] |];;
let counter = ref 0
let rec solve m =
incr counter;
if m = 31 then (
match board.(4).(4) with
| Peg -> true
| _ -> false
)
else
try
for i=1 to 7 do
for j=1 to 7 do
match board.(i).(j) with
Peg ->
for k=0 to 3 do
let d1 = dir.(k).(0) in
let d2 = dir.(k).(1) in
let i1 = i+d1 in
let i2 = i1+d1 in
let j1 = j+d2 in
let j2 = j1+d2 in
match board.(i1).(j1) with
Peg ->
begin match board.(i2).(j2) with
Empty ->
board.(i).(j) <- Empty;
board.(i1).(j1) <- Empty;
board.(i2).(j2) <- Peg;
if solve(m+1) then begin
moves.(m) <- [| [| i; j |]; [| i2; j2 |] |];
raise Found
end;
board.(i).(j) <- Peg;
board.(i1).(j1) <- Peg;
board.(i2).(j2) <- Empty;
()
| _ -> ()
end
| _ -> ()
done
| _ -> ()
done
done;
false
with Found ->
true
;;
begin
try
let i = ref 0 in
while (!i) < 1 do
ignore (solve 0);
incr i;
done;
with
| Out_of_memory -> digital_write PIN3 LOW
| _ -> digital_write PIN3 LOW
end
;;
*)
let check() =
let cpt = ref 0 in
Array.iter (fun v -> cpt := !cpt + v.(0).(0) + v.(0).(1) + v.(1).(0) + v.(1).(1))
moves ;
!cpt
;;
let init_pins () =
ROUGE
BLEU
digital_write PIN9 HIGH;
digital_write PIN10 HIGH;
digital_write PIN11 HIGH;
()
;;
let _ =
init_pins();
if solve 0 then digital_write PIN9 LOW else digital_write PIN10 LOW;
let r = check() in
digital_write PIN4 LOW ( * D4
;;
|
2f4a91dc13f015499113901a569b7c58a699f3dc1f81c4b4a78cd71e12949f9a | shortishly/pgec | pgec_h.erl | Copyright ( c ) 2022 < >
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
-module(pgec_h).
-define(JSON, <<"application/json">>).
-export([init/2]).
-include_lib("kernel/include/logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
init(Req, Opts) ->
?FUNCTION_NAME(Req, Opts, cowboy_req:path_info(Req)).
init(#{bindings := #{publication := Publication,
table := Table}} = Req,
Opts,
[] = Keys) ->
?LOG_DEBUG(#{req => Req,
opts => Opts,
keys => Keys}),
ContentType = negotiate_content_type(Req),
case ets:lookup(
pgec_metadata,
{Publication, Table}) of
[{_, Metadata}] ->
{ok,
cowboy_req:reply(
200,
headers(ContentType),
[encode(
ContentType,
#{rows => ets:foldl(
pgec_kv:row(Metadata, ContentType),
[],
table(Metadata, Req))}),
"\n"],
Req),
Opts};
[] ->
{ok, not_found(Req), Opts}
end;
init(#{bindings := #{publication := Publication,
table := Table}} = Req,
Opts,
Keys) ->
?LOG_DEBUG(#{req => Req, opts => Opts, keys => Keys}),
case ets:lookup(
pgec_metadata,
{Publication, Table}) of
[{_, Metadata}] ->
?LOG_DEBUG(#{metadata => Metadata}),
try lookup(Metadata, Req) of
[_] = Row ->
ContentType = negotiate_content_type(Req),
{ok,
cowboy_req:reply(
200,
headers(ContentType),
[encode(
ContentType,
hd(lists:foldl(
pgec_kv:row(Metadata, ContentType),
[],
Row))),
"\n"],
Req),
Opts};
[] ->
{ok,
not_found(Req,
#{publication => Publication,
keys => Keys,
table => Table}),
Opts}
catch error:badarg ->
{ok,
not_found(Req,
#{publication => Publication,
keys => Keys,
table => Table}),
Opts}
end;
[] ->
?LOG_DEBUG(#{metadata => not_found}),
{ok,
not_found(Req,
#{publication => Publication,
keys => Keys,
table => Table}),
Opts}
end;
init(#{bindings := #{publication := Publication}} = Req,
Opts,
_) ->
ContentType = negotiate_content_type(Req),
{ok,
cowboy_req:reply(
200,
headers(ContentType),
[encode(
ContentType,
#{tables => ets:select(
pgec_metadata,
ets:fun2ms(
fun
({{Pub, Table}, _}) when Publication == Pub ->
Table
end))}),
"\n"],
Req),
Opts};
init(Req, Opts, _) ->
?LOG_DEBUG(#{req => Req, opts => Opts}),
{ok,
cowboy_req:reply(
200,
headers(),
[jsx:encode(#{publications => pgmp_config:replication(
logical,
publication_names)}),
"\n"],
Req),
Opts}.
%% Not much negotiation here, we only support JSON right now.
%%
negotiate_content_type(#{headers := #{}}) ->
?JSON.
headers() ->
?FUNCTION_NAME(?JSON).
headers(ContentType) ->
#{<<"content-type">> => ContentType}.
not_found(Req, Body) ->
cowboy_req:reply(404, headers(), [jsx:encode(Body), "\n"], Req).
not_found(Req) ->
cowboy_req:reply(404, #{}, <<>>, Req).
lookup(Metadata, Req) ->
?LOG_DEBUG(#{metadata => Metadata, req => Req}),
ets:lookup(table(Metadata, Req), key(Metadata, Req)).
table(Metadata, #{bindings := #{table := Table}}) ->
?LOG_DEBUG(#{metadata => Metadata, table => Table}),
binary_to_existing_atom(Table).
key(Metadata, Req) ->
?LOG_DEBUG(#{metadata => Metadata, req => Req}),
pgec_kv:key(Metadata, cowboy_req:path_info(Req)).
encode(?JSON, Content) ->
jsx:encode(Content).
| null | https://raw.githubusercontent.com/shortishly/pgec/9b665f68379eef5b06304b739542fc19338a6c56/src/pgec_h.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Not much negotiation here, we only support JSON right now.
| Copyright ( c ) 2022 < >
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(pgec_h).
-define(JSON, <<"application/json">>).
-export([init/2]).
-include_lib("kernel/include/logger.hrl").
-include_lib("stdlib/include/ms_transform.hrl").
init(Req, Opts) ->
?FUNCTION_NAME(Req, Opts, cowboy_req:path_info(Req)).
init(#{bindings := #{publication := Publication,
table := Table}} = Req,
Opts,
[] = Keys) ->
?LOG_DEBUG(#{req => Req,
opts => Opts,
keys => Keys}),
ContentType = negotiate_content_type(Req),
case ets:lookup(
pgec_metadata,
{Publication, Table}) of
[{_, Metadata}] ->
{ok,
cowboy_req:reply(
200,
headers(ContentType),
[encode(
ContentType,
#{rows => ets:foldl(
pgec_kv:row(Metadata, ContentType),
[],
table(Metadata, Req))}),
"\n"],
Req),
Opts};
[] ->
{ok, not_found(Req), Opts}
end;
init(#{bindings := #{publication := Publication,
table := Table}} = Req,
Opts,
Keys) ->
?LOG_DEBUG(#{req => Req, opts => Opts, keys => Keys}),
case ets:lookup(
pgec_metadata,
{Publication, Table}) of
[{_, Metadata}] ->
?LOG_DEBUG(#{metadata => Metadata}),
try lookup(Metadata, Req) of
[_] = Row ->
ContentType = negotiate_content_type(Req),
{ok,
cowboy_req:reply(
200,
headers(ContentType),
[encode(
ContentType,
hd(lists:foldl(
pgec_kv:row(Metadata, ContentType),
[],
Row))),
"\n"],
Req),
Opts};
[] ->
{ok,
not_found(Req,
#{publication => Publication,
keys => Keys,
table => Table}),
Opts}
catch error:badarg ->
{ok,
not_found(Req,
#{publication => Publication,
keys => Keys,
table => Table}),
Opts}
end;
[] ->
?LOG_DEBUG(#{metadata => not_found}),
{ok,
not_found(Req,
#{publication => Publication,
keys => Keys,
table => Table}),
Opts}
end;
init(#{bindings := #{publication := Publication}} = Req,
Opts,
_) ->
ContentType = negotiate_content_type(Req),
{ok,
cowboy_req:reply(
200,
headers(ContentType),
[encode(
ContentType,
#{tables => ets:select(
pgec_metadata,
ets:fun2ms(
fun
({{Pub, Table}, _}) when Publication == Pub ->
Table
end))}),
"\n"],
Req),
Opts};
init(Req, Opts, _) ->
?LOG_DEBUG(#{req => Req, opts => Opts}),
{ok,
cowboy_req:reply(
200,
headers(),
[jsx:encode(#{publications => pgmp_config:replication(
logical,
publication_names)}),
"\n"],
Req),
Opts}.
negotiate_content_type(#{headers := #{}}) ->
?JSON.
headers() ->
?FUNCTION_NAME(?JSON).
headers(ContentType) ->
#{<<"content-type">> => ContentType}.
not_found(Req, Body) ->
cowboy_req:reply(404, headers(), [jsx:encode(Body), "\n"], Req).
not_found(Req) ->
cowboy_req:reply(404, #{}, <<>>, Req).
lookup(Metadata, Req) ->
?LOG_DEBUG(#{metadata => Metadata, req => Req}),
ets:lookup(table(Metadata, Req), key(Metadata, Req)).
table(Metadata, #{bindings := #{table := Table}}) ->
?LOG_DEBUG(#{metadata => Metadata, table => Table}),
binary_to_existing_atom(Table).
key(Metadata, Req) ->
?LOG_DEBUG(#{metadata => Metadata, req => Req}),
pgec_kv:key(Metadata, cowboy_req:path_info(Req)).
encode(?JSON, Content) ->
jsx:encode(Content).
|
637f42841aaf6eecfbfae1d7fb90167a33b68118c2d0c05310807102601b1496 | LeventErkok/hArduino | LCD.hs | -------------------------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Arduino.Parts.LCD
Copyright : ( c )
-- License : BSD3
-- Maintainer :
-- Stability : experimental
--
LCD ( Liquid Crystal Display ) parts supported by hArduino . The code
below has partly been implemented following the Arduino LiquidCrystal project
-- source code: </>
--
The Hitachi44780 data sheet is at : < -linux.sourceforge.net/pdfdocs/hd44780.pdf >
--
For an example program using this library , see " System . Hardware . Arduino . SamplePrograms . LCD " .
-------------------------------------------------------------------------------------------------
# LANGUAGE NamedFieldPuns #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
module System.Hardware.Arduino.Parts.LCD(
-- * LCD types and registration
LCD, LCDController(..), lcdRegister
-- * Writing text on the LCD
, lcdClear, lcdWrite
-- * Moving the cursor
, lcdHome, lcdSetCursor
-- * Scrolling
, lcdAutoScrollOn, lcdAutoScrollOff
, lcdScrollDisplayLeft, lcdScrollDisplayRight
-- * Display properties
, lcdLeftToRight, lcdRightToLeft
, lcdBlinkOn, lcdBlinkOff
, lcdCursorOn, lcdCursorOff
, lcdDisplayOn, lcdDisplayOff
-- * Accessing internal symbols,
, LCDSymbol, lcdInternalSymbol, lcdWriteSymbol
-- Creating custom symbols
, lcdCreateSymbol
-- * Misc helpers
, lcdFlash
) where
import Control.Concurrent (modifyMVar, withMVar)
import Control.Monad (when)
import Control.Monad.State (gets, liftIO)
import Data.Bits (testBit, (.|.), (.&.), setBit, clearBit, shiftL, bit)
import Data.Char (ord, isSpace)
import Data.Maybe (fromMaybe)
import Data.Word (Word8)
import qualified Data.Map as M
import System.Hardware.Arduino.Data
import System.Hardware.Arduino.Firmata
import qualified System.Hardware.Arduino.Utils as U
import System.Exit (exitFailure)
---------------------------------------------------------------------------------------
-- Low level interface, not available to the user
---------------------------------------------------------------------------------------
| Commands understood by Hitachi
data Cmd = LCD_INITIALIZE
| LCD_INITIALIZE_END
| LCD_FUNCTIONSET
| LCD_DISPLAYCONTROL Word8
| LCD_CLEARDISPLAY
| LCD_ENTRYMODESET Word8
| LCD_RETURNHOME
| LCD_SETDDRAMADDR Word8
| LCD_CURSORSHIFT Word8
| LCD_SETCGRAMADDR Word8
-- | Convert a command to a data-word
getCmdVal :: LCDController -> Cmd -> Word8
getCmdVal Hitachi44780{lcdRows, dotMode5x10} = get
bit 3
| lcdRows > 1 = 0x08 :: Word8
| True = 0x00 :: Word8
bit 2
| dotMode5x10 = 0x04 :: Word8
| True = 0x00 :: Word8
displayFunction = multiLine .|. dotMode
get LCD_INITIALIZE = 0x33
get LCD_INITIALIZE_END = 0x32
get LCD_FUNCTIONSET = 0x20 .|. displayFunction
get (LCD_DISPLAYCONTROL w) = 0x08 .|. w
get LCD_CLEARDISPLAY = 0x01
get (LCD_ENTRYMODESET w) = 0x04 .|. w
get LCD_RETURNHOME = 0x02
get (LCD_SETDDRAMADDR w) = 0x80 .|. w
get (LCD_CURSORSHIFT w) = 0x10 .|. 0x08 .|. w -- NB. LCD_DISPLAYMOVE (0x08) hard coded here
get (LCD_SETCGRAMADDR w) = 0x40 .|. w `shiftL` 3
-- | Initialize the LCD. Follows the data sheet <-linux.sourceforge.net/pdfdocs/hd44780.pdf>,
page 46 ; figure 24 .
initLCD :: LCD -> LCDController -> Arduino ()
initLCD lcd c@Hitachi44780{lcdRS, lcdEN, lcdD4, lcdD5, lcdD6, lcdD7} = do
debug "Starting the LCD initialization sequence"
mapM_ (`setPinMode` OUTPUT) [lcdRS, lcdEN, lcdD4, lcdD5, lcdD6, lcdD7]
Wait for 50ms , data - sheet says at least 40ms for 2.7V version , so be safe
delay 50
sendCmd c LCD_INITIALIZE
delay 5
sendCmd c LCD_INITIALIZE_END
sendCmd c LCD_FUNCTIONSET
lcdCursorOff lcd
lcdBlinkOff lcd
lcdLeftToRight lcd
lcdAutoScrollOff lcd
lcdHome lcd
lcdClear lcd
lcdDisplayOn lcd
-- | Get the controller associated with the LCD
getController :: LCD -> Arduino LCDController
getController lcd = do
bs <- gets boardState
err <- gets bailOut
liftIO $ withMVar bs $ \bst -> case lcd `M.lookup` lcds bst of
Nothing -> do err ("hArduino: Cannot locate " ++ show lcd) []
exitFailure
Just ld -> return $ lcdController ld
-- | Send a command to the LCD controller
sendCmd :: LCDController -> Cmd -> Arduino ()
sendCmd c = transmit False c . getCmdVal c
| Send 4 - bit data to the LCD controller
sendData :: LCDController -> Word8 -> Arduino ()
sendData lcd n = do debug $ "Transmitting LCD data: " ++ U.showByte n
transmit True lcd n
-- | By controlling the enable-pin, indicate to the controller that
-- the data is ready for it to process.
pulseEnable :: LCDController -> Arduino ()
pulseEnable Hitachi44780{lcdEN} = do
debug "Sending LCD pulseEnable"
digitalWrite lcdEN False
delay 1
digitalWrite lcdEN True
delay 1
digitalWrite lcdEN False
delay 1
-- | Transmit data down to the LCD
transmit :: Bool -> LCDController -> Word8 -> Arduino ()
transmit mode c@Hitachi44780{lcdRS, lcdEN, lcdD4, lcdD5, lcdD6, lcdD7} val = do
digitalWrite lcdRS mode
digitalWrite lcdEN False
let [b7, b6, b5, b4, b3, b2, b1, b0] = [val `testBit` i | i <- [7, 6 .. 0]]
Send down the first 4 bits
digitalWrite lcdD4 b4
digitalWrite lcdD5 b5
digitalWrite lcdD6 b6
digitalWrite lcdD7 b7
pulseEnable c
-- Send down the remaining batch
digitalWrite lcdD4 b0
digitalWrite lcdD5 b1
digitalWrite lcdD6 b2
digitalWrite lcdD7 b3
pulseEnable c
-- | Helper function to simplify library programming, not exposed to the user.
withLCD :: LCD -> String -> (LCDController -> Arduino a) -> Arduino a
withLCD lcd what action = do
debug what
c <- getController lcd
action c
---------------------------------------------------------------------------------------
-- High level interface, exposed to the user
---------------------------------------------------------------------------------------
-- | Register an LCD controller. When registration is complete, the LCD will be initialized so that:
--
-- * Set display ON (Use 'lcdDisplayOn' / 'lcdDisplayOff' to change.)
--
-- * Set cursor OFF (Use 'lcdCursorOn' / 'lcdCursorOff' to change.)
--
* Set blink OFF ( Use ' lcdBlinkOn ' / ' lcdBlinkOff ' to change . )
--
-- * Clear display (Use 'lcdClear' to clear, 'lcdWrite' to display text.)
--
-- * Set entry mode left to write (Use 'lcdLeftToRight' / 'lcdRightToLeft' to control.)
--
-- * Set autoscrolling OFF (Use 'lcdAutoScrollOff' / 'lcdAutoScrollOn' to control.)
--
-- * Put the cursor into home position (Use 'lcdSetCursor' or 'lcdHome' to move around.)
lcdRegister :: LCDController -> Arduino LCD
lcdRegister controller = do
bs <- gets boardState
lcd <- liftIO $ modifyMVar bs $ \bst -> do
let n = M.size $ lcds bst
ld = LCDData { lcdDisplayMode = 0
, lcdDisplayControl = 0
, lcdGlyphCount = 0
, lcdController = controller
}
return (bst {lcds = M.insert (LCD n) ld (lcds bst)}, LCD n)
case controller of
Hitachi44780{} -> initLCD lcd controller
return lcd
-- | Write a string on the LCD at the current cursor position
lcdWrite :: LCD -> String -> Arduino ()
lcdWrite lcd m = withLCD lcd ("Writing " ++ show m ++ " to LCD") $ \c -> mapM_ (sendData c) m'
where m' = map (\ch -> fromIntegral (ord ch) .&. 0xFF) m
-- | Clear the LCD
lcdClear :: LCD -> Arduino ()
lcdClear lcd = withLCD lcd "Sending clearLCD" $ \c ->
do sendCmd c LCD_CLEARDISPLAY
delay 2 -- give some time to make sure LCD is really cleared
-- | Send the cursor to home position
lcdHome :: LCD -> Arduino ()
lcdHome lcd = withLCD lcd "Sending the cursor home" $ \c ->
do sendCmd c LCD_RETURNHOME
delay 2
-- | Set the cursor location. The pair of arguments is the new column and row numbers
-- respectively:
--
* The first value is the column , the second is the row . ( This is counter - intuitive , but
-- is in line with what the standard Arduino programmers do, so we follow the same convention.)
--
-- * Counting starts at 0 (both for column and row no)
--
-- * If the new location is out-of-bounds of your LCD, we will put it the cursor to the closest
-- possible location on the LCD.
lcdSetCursor :: LCD -> (Int, Int) -> Arduino ()
lcdSetCursor lcd (givenCol, givenRow) = withLCD lcd ("Sending the cursor to Row: " ++ show givenRow ++ " Col: " ++ show givenCol) set
where set c@Hitachi44780{lcdRows, lcdCols} = sendCmd c (LCD_SETDDRAMADDR offset)
where align :: Int -> Int -> Word8
align i m
| i < 0 = 0
| i >= m = fromIntegral $ m-1
| True = fromIntegral i
col = align givenCol lcdCols
row = align givenRow lcdRows
-- The magic row-offsets come from various web sources
-- I don't follow the logic in these numbers, but it seems to work
rowOffsets = [(0, 0), (1, 0x40), (2, 0x14), (3, 0x54)]
offset = col + fromMaybe 0x54 (row `lookup` rowOffsets)
| Scroll the display to the left by 1 character . Project idea : Using a tilt sensor , scroll the contents of the display
-- left/right depending on the tilt.
lcdScrollDisplayLeft :: LCD -> Arduino ()
lcdScrollDisplayLeft lcd = withLCD lcd "Scrolling display to the left by 1" $ \c -> sendCmd c (LCD_CURSORSHIFT lcdMoveLeft)
where lcdMoveLeft = 0x00
| Scroll the display to the right by 1 character
lcdScrollDisplayRight :: LCD -> Arduino ()
lcdScrollDisplayRight lcd = withLCD lcd "Scrolling display to the right by 1" $ \c -> sendCmd c (LCD_CURSORSHIFT lcdMoveRight)
where lcdMoveRight = 0x04
-- | Display characteristics helper, set the new control/mode and send
-- appropriate commands if anything changed
updateDisplayData :: String -> (Word8 -> Word8, Word8 -> Word8) -> LCD -> Arduino ()
updateDisplayData what (f, g) lcd = do
debug what
bs <- gets boardState
err <- gets bailOut
( LCDData {lcdDisplayControl = oldC, lcdDisplayMode = oldM}
, LCDData {lcdDisplayControl = newC, lcdDisplayMode = newM, lcdController = c})
<- liftIO $ modifyMVar bs $ \bst ->
case lcd `M.lookup` lcds bst of
Nothing -> do err ("hArduino: Cannot locate " ++ show lcd) []
exitFailure
Just ld@LCDData{lcdDisplayControl, lcdDisplayMode}
-> do let ld' = ld { lcdDisplayControl = f lcdDisplayControl
, lcdDisplayMode = g lcdDisplayMode
}
return (bst{lcds = M.insert lcd ld' (lcds bst)}, (ld, ld'))
when (oldC /= newC) $ sendCmd c (LCD_DISPLAYCONTROL newC)
when (oldM /= newM) $ sendCmd c (LCD_ENTRYMODESET newM)
-- | Update the display control word
updateDisplayControl :: String -> (Word8 -> Word8) -> LCD -> Arduino ()
updateDisplayControl what f = updateDisplayData what (f, id)
-- | Update the display mode word
updateDisplayMode :: String -> (Word8 -> Word8) -> LCD -> Arduino ()
updateDisplayMode what g = updateDisplayData what (id, g)
| Various control masks for the Hitachi44780
^ bit @0@ Controls whether cursor blinks
| LCD_CURSORON -- ^ bit @1@ Controls whether cursor is on
^ bit Controls whether display is on
^ bit @0@ Controls left / right scroll
| LCD_ENTRYLEFT -- ^ bit @1@ Controls left/right entry mode
-- | Convert the mask value to the bit no
maskBit :: Hitachi44780Mask -> Int
maskBit LCD_BLINKON = 0
maskBit LCD_CURSORON = 1
maskBit LCD_DISPLAYON = 2
maskBit LCD_ENTRYSHIFTINCREMENT = 0
maskBit LCD_ENTRYLEFT = 1
-- | Clear by the mask
clearMask :: Hitachi44780Mask -> Word8 -> Word8
clearMask m w = w `clearBit` maskBit m
-- | Set by the mask
setMask :: Hitachi44780Mask -> Word8 -> Word8
setMask m w = w `setBit` maskBit m
-- | Do not blink the cursor
lcdBlinkOff :: LCD -> Arduino ()
lcdBlinkOff = updateDisplayControl "Turning blinking off" (clearMask LCD_BLINKON)
-- | Blink the cursor
lcdBlinkOn :: LCD -> Arduino ()
lcdBlinkOn = updateDisplayControl "Turning blinking on" (setMask LCD_BLINKON)
| Hide the cursor . Note that a blinking cursor can not be hidden , you must first
-- turn off blinking.
lcdCursorOff :: LCD -> Arduino ()
lcdCursorOff = updateDisplayControl "Not showing the cursor" (clearMask LCD_CURSORON)
-- | Show the cursor
lcdCursorOn :: LCD -> Arduino ()
lcdCursorOn = updateDisplayControl "Showing the cursor" (setMask LCD_CURSORON)
-- | Turn the display off. Note that turning the display off does not mean you are
-- powering it down. It simply means that the characters will not be shown until
you turn it back on using ' lcdDisplayOn ' . ( Also , the contents will /not/ be
-- forgotten when you call this function.) Therefore, this function is useful
-- for temporarily hiding the display contents.
lcdDisplayOff :: LCD -> Arduino ()
lcdDisplayOff = updateDisplayControl "Turning display off" (clearMask LCD_DISPLAYON)
-- | Turn the display on
lcdDisplayOn :: LCD -> Arduino ()
lcdDisplayOn = updateDisplayControl "Turning display on" (setMask LCD_DISPLAYON)
-- | Set writing direction: Left to Right
lcdLeftToRight :: LCD -> Arduino ()
lcdLeftToRight = updateDisplayMode "Setting left-to-right entry mode" (setMask LCD_ENTRYLEFT)
-- | Set writing direction: Right to Left
lcdRightToLeft :: LCD -> Arduino ()
lcdRightToLeft = updateDisplayMode "Setting right-to-left entry mode" (clearMask LCD_ENTRYLEFT)
| Turn on auto - scrolling . In the context of the Hitachi44780 controller , this means that
each time a letter is added , all the text is moved one space to the left . This can be
confusing at first : It does /not/ mean that your strings will continuously scroll :
-- It just means that if you write a string whose length exceeds the column-count
-- of your LCD, then you'll see the tail-end of it. (Of course, this will create a scrolling
-- effect as the string is being printed character by character.)
--
-- Having said that, it is easy to program a scrolling string program: Simply write your string
-- by calling 'lcdWrite', and then use the 'lcdScrollDisplayLeft' and 'lcdScrollDisplayRight' functions
-- with appropriate delays to simulate the scrolling.
lcdAutoScrollOn :: LCD -> Arduino ()
lcdAutoScrollOn = updateDisplayMode "Setting auto-scroll ON" (setMask LCD_ENTRYSHIFTINCREMENT)
-- | Turn off auto-scrolling. See the comments for 'lcdAutoScrollOn' for details. When turned
off ( which is the default ) , you will /not/ see the characters at the end of your strings that
-- do not fit into the display.
lcdAutoScrollOff :: LCD -> Arduino ()
lcdAutoScrollOff = updateDisplayMode "Setting auto-scroll OFF" (clearMask LCD_ENTRYSHIFTINCREMENT)
-- | Flash contents of the LCD screen
lcdFlash :: LCD
-> Int -- ^ Flash count
^ Delay amount ( in milli - seconds )
-> Arduino ()
lcdFlash lcd n d = sequence_ $ concat $ replicate n [lcdDisplayOff lcd, delay d, lcdDisplayOn lcd, delay d]
-- | An abstract symbol type for user created symbols
newtype LCDSymbol = LCDSymbol Word8
-- | Create a custom symbol for later display. Note that controllers
-- have limited capability for such symbols, typically storing no more
-- than 8. The behavior is undefined if you create more symbols than your
-- LCD can handle.
--
The input is a simple description of the glyph , as a list of precisely 8
strings , each of which must have 5 characters . Any space character is
-- interpreted as a empty pixel, any non-space is a full pixel, corresponding
to the pixel in the 5x8 characters we have on the LCD . For instance , here 's
-- a happy-face glyph you can use:
--
-- >
-- > [ " "
-- > , "@ @"
-- > , " "
-- > , " "
-- > , "@ @"
-- > , " @@@ "
-- > , " "
-- > , " "
-- > ]
-- >
lcdCreateSymbol :: LCD -> [String] -> Arduino LCDSymbol
lcdCreateSymbol lcd glyph
| length glyph /= 8 || any ((/= 5) . length) glyph
= die "hArduino: lcdCreateSymbol: Invalid glyph description: must be 8x5!" ("Received:" : glyph)
| True
= do bs <- gets boardState
err <- gets bailOut
(i, c) <- liftIO $ modifyMVar bs $ \bst ->
case lcd `M.lookup` lcds bst of
Nothing -> do err ("hArduino: Cannot locate " ++ show lcd) []
exitFailure
Just ld@LCDData{lcdGlyphCount, lcdController}
-> do let ld' = ld { lcdGlyphCount = lcdGlyphCount + 1 }
return (bst{lcds = M.insert lcd ld' (lcds bst)}, (lcdGlyphCount, lcdController))
sendCmd c (LCD_SETCGRAMADDR i)
let cvt :: String -> Word8
cvt s = foldr (.|.) 0 [bit p | (ch, p) <- zip (reverse s) [0..], not (isSpace ch)]
mapM_ (sendData c . cvt) glyph
return $ LCDSymbol i
-- | Display a user created symbol on the LCD. (See 'lcdCreateSymbol' for details.)
lcdWriteSymbol :: LCD -> LCDSymbol -> Arduino ()
lcdWriteSymbol lcd (LCDSymbol i) = withLCD lcd ("Writing custom symbol " ++ show i ++ " to LCD") $ \c -> sendData c i
-- | Access an internally stored symbol, one that is not available via its ASCII equivalent. See
the Hitachi datasheet for possible values : < -linux.sourceforge.net/pdfdocs/hd44780.pdf > , Table 4 on page 17 .
--
-- For instance, to access the symbol right-arrow:
--
* Locate it in the above table : Right - arrow is at the second - to - last row , 7th character from left .
--
-- * Check the upper/higher bits as specified in the table: For Right-arrow, upper bits are @0111@ and the
lower bits are @1110@ ; which gives us the code @01111110@ , or @0x7E@.
--
-- * So, right-arrow can be accessed by symbol code 'lcdInternalSymbol' @0x7E@, which will give us a 'LCDSymbol' value
that can be passed to the ' lcdWriteSymbol ' function . The code would look like this : @lcdWriteSymbol ( lcdInternalSymbol 0x7E)@.
lcdInternalSymbol :: Word8 -> LCDSymbol
lcdInternalSymbol = LCDSymbol
| null | https://raw.githubusercontent.com/LeventErkok/hArduino/b568087b51b75babb1dc90d3f7a6e7b3c8d134b9/System/Hardware/Arduino/Parts/LCD.hs | haskell | -----------------------------------------------------------------------------------------------
|
Module : System.Hardware.Arduino.Parts.LCD
License : BSD3
Maintainer :
Stability : experimental
source code: </>
-----------------------------------------------------------------------------------------------
* LCD types and registration
* Writing text on the LCD
* Moving the cursor
* Scrolling
* Display properties
* Accessing internal symbols,
Creating custom symbols
* Misc helpers
-------------------------------------------------------------------------------------
Low level interface, not available to the user
-------------------------------------------------------------------------------------
| Convert a command to a data-word
NB. LCD_DISPLAYMOVE (0x08) hard coded here
| Initialize the LCD. Follows the data sheet <-linux.sourceforge.net/pdfdocs/hd44780.pdf>,
| Get the controller associated with the LCD
| Send a command to the LCD controller
| By controlling the enable-pin, indicate to the controller that
the data is ready for it to process.
| Transmit data down to the LCD
Send down the remaining batch
| Helper function to simplify library programming, not exposed to the user.
-------------------------------------------------------------------------------------
High level interface, exposed to the user
-------------------------------------------------------------------------------------
| Register an LCD controller. When registration is complete, the LCD will be initialized so that:
* Set display ON (Use 'lcdDisplayOn' / 'lcdDisplayOff' to change.)
* Set cursor OFF (Use 'lcdCursorOn' / 'lcdCursorOff' to change.)
* Clear display (Use 'lcdClear' to clear, 'lcdWrite' to display text.)
* Set entry mode left to write (Use 'lcdLeftToRight' / 'lcdRightToLeft' to control.)
* Set autoscrolling OFF (Use 'lcdAutoScrollOff' / 'lcdAutoScrollOn' to control.)
* Put the cursor into home position (Use 'lcdSetCursor' or 'lcdHome' to move around.)
| Write a string on the LCD at the current cursor position
| Clear the LCD
give some time to make sure LCD is really cleared
| Send the cursor to home position
| Set the cursor location. The pair of arguments is the new column and row numbers
respectively:
is in line with what the standard Arduino programmers do, so we follow the same convention.)
* Counting starts at 0 (both for column and row no)
* If the new location is out-of-bounds of your LCD, we will put it the cursor to the closest
possible location on the LCD.
The magic row-offsets come from various web sources
I don't follow the logic in these numbers, but it seems to work
left/right depending on the tilt.
| Display characteristics helper, set the new control/mode and send
appropriate commands if anything changed
| Update the display control word
| Update the display mode word
^ bit @1@ Controls whether cursor is on
^ bit @1@ Controls left/right entry mode
| Convert the mask value to the bit no
| Clear by the mask
| Set by the mask
| Do not blink the cursor
| Blink the cursor
turn off blinking.
| Show the cursor
| Turn the display off. Note that turning the display off does not mean you are
powering it down. It simply means that the characters will not be shown until
forgotten when you call this function.) Therefore, this function is useful
for temporarily hiding the display contents.
| Turn the display on
| Set writing direction: Left to Right
| Set writing direction: Right to Left
It just means that if you write a string whose length exceeds the column-count
of your LCD, then you'll see the tail-end of it. (Of course, this will create a scrolling
effect as the string is being printed character by character.)
Having said that, it is easy to program a scrolling string program: Simply write your string
by calling 'lcdWrite', and then use the 'lcdScrollDisplayLeft' and 'lcdScrollDisplayRight' functions
with appropriate delays to simulate the scrolling.
| Turn off auto-scrolling. See the comments for 'lcdAutoScrollOn' for details. When turned
do not fit into the display.
| Flash contents of the LCD screen
^ Flash count
| An abstract symbol type for user created symbols
| Create a custom symbol for later display. Note that controllers
have limited capability for such symbols, typically storing no more
than 8. The behavior is undefined if you create more symbols than your
LCD can handle.
interpreted as a empty pixel, any non-space is a full pixel, corresponding
a happy-face glyph you can use:
>
> [ " "
> , "@ @"
> , " "
> , " "
> , "@ @"
> , " @@@ "
> , " "
> , " "
> ]
>
| Display a user created symbol on the LCD. (See 'lcdCreateSymbol' for details.)
| Access an internally stored symbol, one that is not available via its ASCII equivalent. See
For instance, to access the symbol right-arrow:
* Check the upper/higher bits as specified in the table: For Right-arrow, upper bits are @0111@ and the
* So, right-arrow can be accessed by symbol code 'lcdInternalSymbol' @0x7E@, which will give us a 'LCDSymbol' value | Copyright : ( c )
LCD ( Liquid Crystal Display ) parts supported by hArduino . The code
below has partly been implemented following the Arduino LiquidCrystal project
The Hitachi44780 data sheet is at : < -linux.sourceforge.net/pdfdocs/hd44780.pdf >
For an example program using this library , see " System . Hardware . Arduino . SamplePrograms . LCD " .
# LANGUAGE NamedFieldPuns #
# OPTIONS_GHC -Wno - incomplete - uni - patterns #
module System.Hardware.Arduino.Parts.LCD(
LCD, LCDController(..), lcdRegister
, lcdClear, lcdWrite
, lcdHome, lcdSetCursor
, lcdAutoScrollOn, lcdAutoScrollOff
, lcdScrollDisplayLeft, lcdScrollDisplayRight
, lcdLeftToRight, lcdRightToLeft
, lcdBlinkOn, lcdBlinkOff
, lcdCursorOn, lcdCursorOff
, lcdDisplayOn, lcdDisplayOff
, LCDSymbol, lcdInternalSymbol, lcdWriteSymbol
, lcdCreateSymbol
, lcdFlash
) where
import Control.Concurrent (modifyMVar, withMVar)
import Control.Monad (when)
import Control.Monad.State (gets, liftIO)
import Data.Bits (testBit, (.|.), (.&.), setBit, clearBit, shiftL, bit)
import Data.Char (ord, isSpace)
import Data.Maybe (fromMaybe)
import Data.Word (Word8)
import qualified Data.Map as M
import System.Hardware.Arduino.Data
import System.Hardware.Arduino.Firmata
import qualified System.Hardware.Arduino.Utils as U
import System.Exit (exitFailure)
| Commands understood by Hitachi
data Cmd = LCD_INITIALIZE
| LCD_INITIALIZE_END
| LCD_FUNCTIONSET
| LCD_DISPLAYCONTROL Word8
| LCD_CLEARDISPLAY
| LCD_ENTRYMODESET Word8
| LCD_RETURNHOME
| LCD_SETDDRAMADDR Word8
| LCD_CURSORSHIFT Word8
| LCD_SETCGRAMADDR Word8
getCmdVal :: LCDController -> Cmd -> Word8
getCmdVal Hitachi44780{lcdRows, dotMode5x10} = get
bit 3
| lcdRows > 1 = 0x08 :: Word8
| True = 0x00 :: Word8
bit 2
| dotMode5x10 = 0x04 :: Word8
| True = 0x00 :: Word8
displayFunction = multiLine .|. dotMode
get LCD_INITIALIZE = 0x33
get LCD_INITIALIZE_END = 0x32
get LCD_FUNCTIONSET = 0x20 .|. displayFunction
get (LCD_DISPLAYCONTROL w) = 0x08 .|. w
get LCD_CLEARDISPLAY = 0x01
get (LCD_ENTRYMODESET w) = 0x04 .|. w
get LCD_RETURNHOME = 0x02
get (LCD_SETDDRAMADDR w) = 0x80 .|. w
get (LCD_SETCGRAMADDR w) = 0x40 .|. w `shiftL` 3
page 46 ; figure 24 .
initLCD :: LCD -> LCDController -> Arduino ()
initLCD lcd c@Hitachi44780{lcdRS, lcdEN, lcdD4, lcdD5, lcdD6, lcdD7} = do
debug "Starting the LCD initialization sequence"
mapM_ (`setPinMode` OUTPUT) [lcdRS, lcdEN, lcdD4, lcdD5, lcdD6, lcdD7]
Wait for 50ms , data - sheet says at least 40ms for 2.7V version , so be safe
delay 50
sendCmd c LCD_INITIALIZE
delay 5
sendCmd c LCD_INITIALIZE_END
sendCmd c LCD_FUNCTIONSET
lcdCursorOff lcd
lcdBlinkOff lcd
lcdLeftToRight lcd
lcdAutoScrollOff lcd
lcdHome lcd
lcdClear lcd
lcdDisplayOn lcd
getController :: LCD -> Arduino LCDController
getController lcd = do
bs <- gets boardState
err <- gets bailOut
liftIO $ withMVar bs $ \bst -> case lcd `M.lookup` lcds bst of
Nothing -> do err ("hArduino: Cannot locate " ++ show lcd) []
exitFailure
Just ld -> return $ lcdController ld
sendCmd :: LCDController -> Cmd -> Arduino ()
sendCmd c = transmit False c . getCmdVal c
| Send 4 - bit data to the LCD controller
sendData :: LCDController -> Word8 -> Arduino ()
sendData lcd n = do debug $ "Transmitting LCD data: " ++ U.showByte n
transmit True lcd n
pulseEnable :: LCDController -> Arduino ()
pulseEnable Hitachi44780{lcdEN} = do
debug "Sending LCD pulseEnable"
digitalWrite lcdEN False
delay 1
digitalWrite lcdEN True
delay 1
digitalWrite lcdEN False
delay 1
transmit :: Bool -> LCDController -> Word8 -> Arduino ()
transmit mode c@Hitachi44780{lcdRS, lcdEN, lcdD4, lcdD5, lcdD6, lcdD7} val = do
digitalWrite lcdRS mode
digitalWrite lcdEN False
let [b7, b6, b5, b4, b3, b2, b1, b0] = [val `testBit` i | i <- [7, 6 .. 0]]
Send down the first 4 bits
digitalWrite lcdD4 b4
digitalWrite lcdD5 b5
digitalWrite lcdD6 b6
digitalWrite lcdD7 b7
pulseEnable c
digitalWrite lcdD4 b0
digitalWrite lcdD5 b1
digitalWrite lcdD6 b2
digitalWrite lcdD7 b3
pulseEnable c
withLCD :: LCD -> String -> (LCDController -> Arduino a) -> Arduino a
withLCD lcd what action = do
debug what
c <- getController lcd
action c
* Set blink OFF ( Use ' lcdBlinkOn ' / ' lcdBlinkOff ' to change . )
lcdRegister :: LCDController -> Arduino LCD
lcdRegister controller = do
bs <- gets boardState
lcd <- liftIO $ modifyMVar bs $ \bst -> do
let n = M.size $ lcds bst
ld = LCDData { lcdDisplayMode = 0
, lcdDisplayControl = 0
, lcdGlyphCount = 0
, lcdController = controller
}
return (bst {lcds = M.insert (LCD n) ld (lcds bst)}, LCD n)
case controller of
Hitachi44780{} -> initLCD lcd controller
return lcd
lcdWrite :: LCD -> String -> Arduino ()
lcdWrite lcd m = withLCD lcd ("Writing " ++ show m ++ " to LCD") $ \c -> mapM_ (sendData c) m'
where m' = map (\ch -> fromIntegral (ord ch) .&. 0xFF) m
lcdClear :: LCD -> Arduino ()
lcdClear lcd = withLCD lcd "Sending clearLCD" $ \c ->
do sendCmd c LCD_CLEARDISPLAY
lcdHome :: LCD -> Arduino ()
lcdHome lcd = withLCD lcd "Sending the cursor home" $ \c ->
do sendCmd c LCD_RETURNHOME
delay 2
* The first value is the column , the second is the row . ( This is counter - intuitive , but
lcdSetCursor :: LCD -> (Int, Int) -> Arduino ()
lcdSetCursor lcd (givenCol, givenRow) = withLCD lcd ("Sending the cursor to Row: " ++ show givenRow ++ " Col: " ++ show givenCol) set
where set c@Hitachi44780{lcdRows, lcdCols} = sendCmd c (LCD_SETDDRAMADDR offset)
where align :: Int -> Int -> Word8
align i m
| i < 0 = 0
| i >= m = fromIntegral $ m-1
| True = fromIntegral i
col = align givenCol lcdCols
row = align givenRow lcdRows
rowOffsets = [(0, 0), (1, 0x40), (2, 0x14), (3, 0x54)]
offset = col + fromMaybe 0x54 (row `lookup` rowOffsets)
| Scroll the display to the left by 1 character . Project idea : Using a tilt sensor , scroll the contents of the display
lcdScrollDisplayLeft :: LCD -> Arduino ()
lcdScrollDisplayLeft lcd = withLCD lcd "Scrolling display to the left by 1" $ \c -> sendCmd c (LCD_CURSORSHIFT lcdMoveLeft)
where lcdMoveLeft = 0x00
| Scroll the display to the right by 1 character
lcdScrollDisplayRight :: LCD -> Arduino ()
lcdScrollDisplayRight lcd = withLCD lcd "Scrolling display to the right by 1" $ \c -> sendCmd c (LCD_CURSORSHIFT lcdMoveRight)
where lcdMoveRight = 0x04
updateDisplayData :: String -> (Word8 -> Word8, Word8 -> Word8) -> LCD -> Arduino ()
updateDisplayData what (f, g) lcd = do
debug what
bs <- gets boardState
err <- gets bailOut
( LCDData {lcdDisplayControl = oldC, lcdDisplayMode = oldM}
, LCDData {lcdDisplayControl = newC, lcdDisplayMode = newM, lcdController = c})
<- liftIO $ modifyMVar bs $ \bst ->
case lcd `M.lookup` lcds bst of
Nothing -> do err ("hArduino: Cannot locate " ++ show lcd) []
exitFailure
Just ld@LCDData{lcdDisplayControl, lcdDisplayMode}
-> do let ld' = ld { lcdDisplayControl = f lcdDisplayControl
, lcdDisplayMode = g lcdDisplayMode
}
return (bst{lcds = M.insert lcd ld' (lcds bst)}, (ld, ld'))
when (oldC /= newC) $ sendCmd c (LCD_DISPLAYCONTROL newC)
when (oldM /= newM) $ sendCmd c (LCD_ENTRYMODESET newM)
updateDisplayControl :: String -> (Word8 -> Word8) -> LCD -> Arduino ()
updateDisplayControl what f = updateDisplayData what (f, id)
updateDisplayMode :: String -> (Word8 -> Word8) -> LCD -> Arduino ()
updateDisplayMode what g = updateDisplayData what (id, g)
| Various control masks for the Hitachi44780
^ bit @0@ Controls whether cursor blinks
^ bit Controls whether display is on
^ bit @0@ Controls left / right scroll
maskBit :: Hitachi44780Mask -> Int
maskBit LCD_BLINKON = 0
maskBit LCD_CURSORON = 1
maskBit LCD_DISPLAYON = 2
maskBit LCD_ENTRYSHIFTINCREMENT = 0
maskBit LCD_ENTRYLEFT = 1
clearMask :: Hitachi44780Mask -> Word8 -> Word8
clearMask m w = w `clearBit` maskBit m
setMask :: Hitachi44780Mask -> Word8 -> Word8
setMask m w = w `setBit` maskBit m
lcdBlinkOff :: LCD -> Arduino ()
lcdBlinkOff = updateDisplayControl "Turning blinking off" (clearMask LCD_BLINKON)
lcdBlinkOn :: LCD -> Arduino ()
lcdBlinkOn = updateDisplayControl "Turning blinking on" (setMask LCD_BLINKON)
| Hide the cursor . Note that a blinking cursor can not be hidden , you must first
lcdCursorOff :: LCD -> Arduino ()
lcdCursorOff = updateDisplayControl "Not showing the cursor" (clearMask LCD_CURSORON)
lcdCursorOn :: LCD -> Arduino ()
lcdCursorOn = updateDisplayControl "Showing the cursor" (setMask LCD_CURSORON)
you turn it back on using ' lcdDisplayOn ' . ( Also , the contents will /not/ be
lcdDisplayOff :: LCD -> Arduino ()
lcdDisplayOff = updateDisplayControl "Turning display off" (clearMask LCD_DISPLAYON)
lcdDisplayOn :: LCD -> Arduino ()
lcdDisplayOn = updateDisplayControl "Turning display on" (setMask LCD_DISPLAYON)
lcdLeftToRight :: LCD -> Arduino ()
lcdLeftToRight = updateDisplayMode "Setting left-to-right entry mode" (setMask LCD_ENTRYLEFT)
lcdRightToLeft :: LCD -> Arduino ()
lcdRightToLeft = updateDisplayMode "Setting right-to-left entry mode" (clearMask LCD_ENTRYLEFT)
| Turn on auto - scrolling . In the context of the Hitachi44780 controller , this means that
each time a letter is added , all the text is moved one space to the left . This can be
confusing at first : It does /not/ mean that your strings will continuously scroll :
lcdAutoScrollOn :: LCD -> Arduino ()
lcdAutoScrollOn = updateDisplayMode "Setting auto-scroll ON" (setMask LCD_ENTRYSHIFTINCREMENT)
off ( which is the default ) , you will /not/ see the characters at the end of your strings that
lcdAutoScrollOff :: LCD -> Arduino ()
lcdAutoScrollOff = updateDisplayMode "Setting auto-scroll OFF" (clearMask LCD_ENTRYSHIFTINCREMENT)
lcdFlash :: LCD
^ Delay amount ( in milli - seconds )
-> Arduino ()
lcdFlash lcd n d = sequence_ $ concat $ replicate n [lcdDisplayOff lcd, delay d, lcdDisplayOn lcd, delay d]
newtype LCDSymbol = LCDSymbol Word8
The input is a simple description of the glyph , as a list of precisely 8
strings , each of which must have 5 characters . Any space character is
to the pixel in the 5x8 characters we have on the LCD . For instance , here 's
lcdCreateSymbol :: LCD -> [String] -> Arduino LCDSymbol
lcdCreateSymbol lcd glyph
| length glyph /= 8 || any ((/= 5) . length) glyph
= die "hArduino: lcdCreateSymbol: Invalid glyph description: must be 8x5!" ("Received:" : glyph)
| True
= do bs <- gets boardState
err <- gets bailOut
(i, c) <- liftIO $ modifyMVar bs $ \bst ->
case lcd `M.lookup` lcds bst of
Nothing -> do err ("hArduino: Cannot locate " ++ show lcd) []
exitFailure
Just ld@LCDData{lcdGlyphCount, lcdController}
-> do let ld' = ld { lcdGlyphCount = lcdGlyphCount + 1 }
return (bst{lcds = M.insert lcd ld' (lcds bst)}, (lcdGlyphCount, lcdController))
sendCmd c (LCD_SETCGRAMADDR i)
let cvt :: String -> Word8
cvt s = foldr (.|.) 0 [bit p | (ch, p) <- zip (reverse s) [0..], not (isSpace ch)]
mapM_ (sendData c . cvt) glyph
return $ LCDSymbol i
lcdWriteSymbol :: LCD -> LCDSymbol -> Arduino ()
lcdWriteSymbol lcd (LCDSymbol i) = withLCD lcd ("Writing custom symbol " ++ show i ++ " to LCD") $ \c -> sendData c i
the Hitachi datasheet for possible values : < -linux.sourceforge.net/pdfdocs/hd44780.pdf > , Table 4 on page 17 .
* Locate it in the above table : Right - arrow is at the second - to - last row , 7th character from left .
lower bits are @1110@ ; which gives us the code @01111110@ , or @0x7E@.
that can be passed to the ' lcdWriteSymbol ' function . The code would look like this : @lcdWriteSymbol ( lcdInternalSymbol 0x7E)@.
lcdInternalSymbol :: Word8 -> LCDSymbol
lcdInternalSymbol = LCDSymbol
|
4f8d5f2874e754e1a963f34f4ab0b58728988a48ffdb9c0cc0e73120516f4227 | yoshihiro503/ocamltter | conf.ml | let oauth_signature_method = `Hmac_sha1
let oauth_callback = Some None (* oob *)
let host = "www.flickr.com"
let request_path = "/services/oauth/request_token"
let access_path = "/services/oauth/access_token"
let authorize_url = "="
let app = App.app
| null | https://raw.githubusercontent.com/yoshihiro503/ocamltter/be7ac68c8076bc2ca8ccec216d6647c94ec9f814/flickr/conf.ml | ocaml | oob | let oauth_signature_method = `Hmac_sha1
let host = "www.flickr.com"
let request_path = "/services/oauth/request_token"
let access_path = "/services/oauth/access_token"
let authorize_url = "="
let app = App.app
|
361d28a23bc95753a5407b25cfbefe83fa7888e8b0d7075094877294db14b89f | chetmurthy/pa_ppx | surveil.ml | (* camlp5r *)
(* surveil.ml,v *)
Copyright ( c ) INRIA 2007 - 2017
#load "q_MLast.cmo";
#load "pa_extfun.cmo";
open Asttools;
open MLast;
open Pa_ppx_utils ;
open Pa_ppx_base ;
open Pa_passthru ;
open Ppxutil ;
open Pa_deriving ;
value debug = Pa_passthru.debug ;
* Attributes and extensions in deriving come in three forms :
@opaque , % printer -- short - form
@show.opaque , % show.printer -- medium - form
@deriving.show.opaque , @deriving.show.printer -- long - form
computes which form to demand of @@deriving plugins . The
rule is that if any extension or attribute registered to more
than one plugin is used in short - form , then ALL plugins must use
medium - form .
It is ALWAYS permitted to use long - form attributes / extensions .
@opaque, %printer -- short-form
@show.opaque, %show.printer -- medium-form
@deriving.show.opaque, @deriving.show.printer -- long-form
Surveil computes which form to demand of @@deriving plugins. The
rule is that if any extension or attribute registered to more
than one plugin is used in short-form, then ALL plugins must use
medium-form.
It is ALWAYS permitted to use long-form attributes/extensions.
*)
* scans the entire file , looking for :
( a ) @@driving -- and records eery plugin that gets invoked
( b ) % extension -- and records every one
( c ) @attr -- and records every one , and specifically for those that appear within
an @@deriving , it records which @@deriving they were beneath
After scanning , computes the following :
( 1 ) if there are any plugins invoked , but not loaded -- and for these ,
if they 're NOT invoked with @optional = true in which case an error is raised
( 2 ) if there are any invoked short - form extensions that are registered to more than
one plugin : again this is an error
(a) @@driving -- and records eery plugin that gets invoked
(b) %extension -- and records every one
(c) @attr -- and records every one, and specifically for those that appear within
an @@deriving, it records which @@deriving they were beneath
After scanning, Surveil computes the following:
(1) if there are any plugins invoked, but not loaded -- and for these,
if they're NOT invoked with @optional=true in which case an error is raised
(2) if there are any invoked short-form extensions that are registered to more than
one plugin: again this is an error
*)
module DerivingConfig = struct
value addset r s =
if not (List.mem s r.val) then Std.push r s else ()
;
value addsetl r l = List.iter (addset r) l ;
type form_t = [ Short | Medium | Long ] ;
type t =
{
all_plugins : ref (list string)
; all_attributes : ref (list string)
; current_plugins : ref (list string)
; current_attributes : ref (list string)
; allowed_form : ref (option (Ploc.t * form_t))
}
;
value mk () = {
all_plugins = ref []
; all_attributes = ref []
; current_plugins = ref []
; current_attributes = ref []
; allowed_form = ref None
} ;
type scratchdata_t += [ Pa_deriving of t ] ;
value get arg =
match Ctxt.refscratchdata arg "deriving" with [
Pa_deriving dc -> dc
| _ -> assert False
]
;
value init arg =
Ctxt.init_refscratchdata arg "deriving" (Pa_deriving (mk()))
;
value legitimate_plugin_reference dc (na, options) =
match Registry.get na with [
pi ->
List.for_all (fun (oname,_) -> List.mem oname pi.PI.options) options
| exception Not_found ->
List.exists (fun [ ("optional", <:expr< True >>) -> True | _ -> False ]) options
]
;
value start_decl loc dc plugins = do {
assert ([] = dc.current_plugins.val) ;
assert ([] = dc.current_attributes.val) ;
List.iter (fun ((na, _) as r) ->
if not (legitimate_plugin_reference dc r) then
Ploc.raise loc (Failure (Printf.sprintf "ill-formed plugin reference %s" na))
else ()) plugins ;
let plugins = Std.filter (fun (na,_) -> Registry.mem na) plugins in
dc.current_plugins.val := List.map fst plugins ;
plugins
}
;
value end_decl dc = do {
let attributes = dc.current_attributes.val in
dc.current_plugins.val := [] ;
dc.current_attributes.val := [] ;
attributes
}
;
value set_form loc dc f =
match dc.allowed_form.val with [
None ->
dc.allowed_form.val := Some (loc, f)
| Some (loc', f') when f = f' -> ()
| Some (loc', f') ->
Ploc.raise loc (Failure (Printf.sprintf "DC.set_form: form of attributes/extensions already set; trying to set it to different value: previously set at %s" (Ploc.string_of_location loc')))
]
;
value get_form dc =
match dc.allowed_form.val with [ None -> Short | Some (_, f) -> f ] ;
value (dump : Fmt.t t) ofmt dc =
let ssl = Fmt.(list ~{sep=semi} string) in
let ppform ppf = fun [
(_, Short) -> Fmt.(const string "Short" ppf ())
| (_, Medium) -> Fmt.(const string "Medium" ppf ())
| (_, Long) -> Fmt.(const string "Lon" ppf ()) ] in
Fmt.(pf ofmt "<dc< {@[ @[all_plugins = [ %a ];@]@, @[all_attributes = [ %a ];@]@, @[current_plugins = [ %a ]@] @[current_attributes = [ %a ];@]@, @[allowed_form = %a@] } >>@.%!"
ssl dc.all_plugins.val
ssl dc.all_attributes.val
ssl dc.current_plugins.val
ssl dc.current_attributes.val
(option ppform) dc.allowed_form.val
)
;
value allowed_attribute dc piname attrname = do {
if not (List.mem attrname Registry.((get piname).alg_attributes)) then
None
else
match dc.allowed_form.val with [
(None | Some (_, Short)) -> Some attrname
| Some (_, Medium) -> Some (Printf.sprintf "%s.%s" piname attrname)
| Some (_, Long) -> Some (Printf.sprintf "deriving.%s.%s" piname attrname)
]
}
;
value is_allowed_attribute dc piname attrname attr =
let wantid = allowed_attribute dc piname attrname in
wantid = Some (attr_id attr)
;
end ;
module DC = DerivingConfig ;
value implem arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value interf arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value top_phrase arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value use_file arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value add_current_attribute arg id =
let dc = DC.get arg in
DC.addset dc.current_attributes id
;
value add_deriving_attributes ctxt attrs = do {
let dc = DC.get ctxt in
let attrs = Std.filter is_deriving_attribute attrs in
let plugins = extract_deriving0 (List.hd attrs) in
if plugins = [] then failwith "Surveil.str_item: @@deriving with no plugins"
else DC.addsetl dc.all_plugins (List.map fst plugins) ;
plugins
}
;
value sig_item arg fallback = fun [
<:sig_item:< type $_flag:_$ $list:tdl$ >> as z -> do {
let td = fst (sep_last tdl) in
let plugins = add_deriving_attributes arg (uv td.tdAttributes) in
let dc = DC.get arg in
let plugins = DC.start_decl loc dc plugins in
let rv = fallback arg z in
let attributes = DC.end_decl dc in
let reg_short_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.attributes
|> List.concat in
let reg_short_form_duplicated = duplicated reg_short_form_attributes in
let reg_medium_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.medium_form_attributes
|> List.concat in
let reg_long_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.long_form_attributes
|> List.concat in
let short_form_attributes = Std.intersect attributes reg_short_form_attributes in
let medium_form_attributes = Std.intersect attributes reg_medium_form_attributes in
let long_form_attributes = Std.intersect attributes reg_long_form_attributes in
if not (match (short_form_attributes<>[], medium_form_attributes<>[], long_form_attributes<>[]) with [
(True, False, False) -> True
| (False, True, False) -> True
| (False, False, True) -> True
| (False, False, False) -> True
| _ -> False
]) then Ploc.raise loc (Failure "mixed short/medium/long-form attributes")
else () ;
if short_form_attributes <> [] && reg_short_form_duplicated then
Ploc.raise loc (Failure "short-form attributes used, but some apply to more than one plugin")
else () ;
if [] <> long_form_attributes then DC.(set_form loc dc Long)
else if [] <> medium_form_attributes then DC.(set_form loc dc Medium)
else if [] <> short_form_attributes then DC.(set_form loc dc Short)
else () ;
rv
}
| _ -> assert False
]
;
value str_item arg fallback = fun [
<:str_item:< type $_flag:_$ $list:tdl$ >> as z -> do {
let td = fst (sep_last tdl) in
let plugins = add_deriving_attributes arg (uv td.tdAttributes) in
let dc = DC.get arg in
let plugins = DC.start_decl loc dc plugins in
let rv = fallback arg z in
let attributes = DC.end_decl dc in
let reg_short_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.attributes
|> List.concat in
let reg_medium_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.medium_form_attributes
|> List.concat in
let reg_long_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.long_form_attributes
|> List.concat in
let used_short_form_attributes = Std.filter (fun s -> List.mem s attributes) reg_short_form_attributes in
let used_medium_form_attributes = Std.filter (fun s -> List.mem s attributes) reg_medium_form_attributes in
let used_long_form_attributes = Std.filter (fun s -> List.mem s attributes) reg_long_form_attributes in
if not (match (used_short_form_attributes<>[],
used_medium_form_attributes<>[],
used_long_form_attributes<>[]) with [
(True, False, False) -> True
| (False, True, False) -> True
| (False, False, True) -> True
| (False, False, False) -> True
| _ -> False
]) then Ploc.raise loc (Failure "mixed short/medium/long-form attributes")
else () ;
if duplicated used_short_form_attributes then
Ploc.raise loc (Failure "short-form attributes used, but some apply to more than one plugin")
else () ;
if [] <> used_long_form_attributes then DC.(set_form loc dc Long)
else if [] <> used_medium_form_attributes then DC.(set_form loc dc Medium)
else if [] <> used_short_form_attributes then DC.(set_form loc dc Short)
else () ;
rv
}
| _ -> assert False
]
;
value install () =
let ef = EF.mk() in
let ef = EF.{ (ef) with
str_item = extfun ef.str_item with [
<:str_item:< type $_flag:_$ $list:tdl$ >> as z
when 1 = count is_deriving_attribute (uv (fst (sep_last tdl)).tdAttributes) ->
fun arg fallback -> Some (str_item arg fallback z)
] } in
let ef = EF.{ (ef) with
sig_item = extfun ef.sig_item with [
<:sig_item:< type $_flag:_$ $list:tdl$ >> as z
when 1 = count is_deriving_attribute (uv (fst (sep_last tdl)).tdAttributes) ->
fun arg fallback -> Some (sig_item arg fallback z)
] } in
let ef = EF.{ (ef) with
ctyp = extfun ef.ctyp with [
<:ctyp:< $_$ [@ $_attribute:attr$ ] >> ->
fun arg _ -> do {
add_current_attribute arg (attr_id attr) ;
None
}
| <:ctyp:< [ $list:l$ ] >> ->
fun arg _ -> do {
List.iter (fun [
(loc, cid, tyl, <:vala< None >>, attrs) ->
List.iter (fun a -> add_current_attribute arg (attr_id a)) (uv attrs)
| _ -> ()
]) l ;
None
}
] } in
let ef = EF.{ (ef) with
implem = extfun ef.implem with [
z ->
fun arg fallback ->
let rv = implem arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
let ef = EF.{ (ef) with
interf = extfun ef.interf with [
z ->
fun arg fallback ->
let rv = interf arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
let ef = EF.{ (ef) with
top_phrase = extfun ef.top_phrase with [
z ->
fun arg fallback ->
let rv = top_phrase arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
let ef = EF.{ (ef) with
use_file = extfun ef.use_file with [
z ->
fun arg fallback ->
let rv = use_file arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
Pa_passthru.(install { name = "surveil" ; ef = ef ; pass = None ; before = [] ; after = ["pa_import"] })
;
| null | https://raw.githubusercontent.com/chetmurthy/pa_ppx/7c662fcf4897c978ae8a5ea230af0e8b2fa5858b/pa_deriving/surveil.ml | ocaml | camlp5r
surveil.ml,v | Copyright ( c ) INRIA 2007 - 2017
#load "q_MLast.cmo";
#load "pa_extfun.cmo";
open Asttools;
open MLast;
open Pa_ppx_utils ;
open Pa_ppx_base ;
open Pa_passthru ;
open Ppxutil ;
open Pa_deriving ;
value debug = Pa_passthru.debug ;
* Attributes and extensions in deriving come in three forms :
@opaque , % printer -- short - form
@show.opaque , % show.printer -- medium - form
@deriving.show.opaque , @deriving.show.printer -- long - form
computes which form to demand of @@deriving plugins . The
rule is that if any extension or attribute registered to more
than one plugin is used in short - form , then ALL plugins must use
medium - form .
It is ALWAYS permitted to use long - form attributes / extensions .
@opaque, %printer -- short-form
@show.opaque, %show.printer -- medium-form
@deriving.show.opaque, @deriving.show.printer -- long-form
Surveil computes which form to demand of @@deriving plugins. The
rule is that if any extension or attribute registered to more
than one plugin is used in short-form, then ALL plugins must use
medium-form.
It is ALWAYS permitted to use long-form attributes/extensions.
*)
* scans the entire file , looking for :
( a ) @@driving -- and records eery plugin that gets invoked
( b ) % extension -- and records every one
( c ) @attr -- and records every one , and specifically for those that appear within
an @@deriving , it records which @@deriving they were beneath
After scanning , computes the following :
( 1 ) if there are any plugins invoked , but not loaded -- and for these ,
if they 're NOT invoked with @optional = true in which case an error is raised
( 2 ) if there are any invoked short - form extensions that are registered to more than
one plugin : again this is an error
(a) @@driving -- and records eery plugin that gets invoked
(b) %extension -- and records every one
(c) @attr -- and records every one, and specifically for those that appear within
an @@deriving, it records which @@deriving they were beneath
After scanning, Surveil computes the following:
(1) if there are any plugins invoked, but not loaded -- and for these,
if they're NOT invoked with @optional=true in which case an error is raised
(2) if there are any invoked short-form extensions that are registered to more than
one plugin: again this is an error
*)
module DerivingConfig = struct
value addset r s =
if not (List.mem s r.val) then Std.push r s else ()
;
value addsetl r l = List.iter (addset r) l ;
type form_t = [ Short | Medium | Long ] ;
type t =
{
all_plugins : ref (list string)
; all_attributes : ref (list string)
; current_plugins : ref (list string)
; current_attributes : ref (list string)
; allowed_form : ref (option (Ploc.t * form_t))
}
;
value mk () = {
all_plugins = ref []
; all_attributes = ref []
; current_plugins = ref []
; current_attributes = ref []
; allowed_form = ref None
} ;
type scratchdata_t += [ Pa_deriving of t ] ;
value get arg =
match Ctxt.refscratchdata arg "deriving" with [
Pa_deriving dc -> dc
| _ -> assert False
]
;
value init arg =
Ctxt.init_refscratchdata arg "deriving" (Pa_deriving (mk()))
;
value legitimate_plugin_reference dc (na, options) =
match Registry.get na with [
pi ->
List.for_all (fun (oname,_) -> List.mem oname pi.PI.options) options
| exception Not_found ->
List.exists (fun [ ("optional", <:expr< True >>) -> True | _ -> False ]) options
]
;
value start_decl loc dc plugins = do {
assert ([] = dc.current_plugins.val) ;
assert ([] = dc.current_attributes.val) ;
List.iter (fun ((na, _) as r) ->
if not (legitimate_plugin_reference dc r) then
Ploc.raise loc (Failure (Printf.sprintf "ill-formed plugin reference %s" na))
else ()) plugins ;
let plugins = Std.filter (fun (na,_) -> Registry.mem na) plugins in
dc.current_plugins.val := List.map fst plugins ;
plugins
}
;
value end_decl dc = do {
let attributes = dc.current_attributes.val in
dc.current_plugins.val := [] ;
dc.current_attributes.val := [] ;
attributes
}
;
value set_form loc dc f =
match dc.allowed_form.val with [
None ->
dc.allowed_form.val := Some (loc, f)
| Some (loc', f') when f = f' -> ()
| Some (loc', f') ->
Ploc.raise loc (Failure (Printf.sprintf "DC.set_form: form of attributes/extensions already set; trying to set it to different value: previously set at %s" (Ploc.string_of_location loc')))
]
;
value get_form dc =
match dc.allowed_form.val with [ None -> Short | Some (_, f) -> f ] ;
value (dump : Fmt.t t) ofmt dc =
let ssl = Fmt.(list ~{sep=semi} string) in
let ppform ppf = fun [
(_, Short) -> Fmt.(const string "Short" ppf ())
| (_, Medium) -> Fmt.(const string "Medium" ppf ())
| (_, Long) -> Fmt.(const string "Lon" ppf ()) ] in
Fmt.(pf ofmt "<dc< {@[ @[all_plugins = [ %a ];@]@, @[all_attributes = [ %a ];@]@, @[current_plugins = [ %a ]@] @[current_attributes = [ %a ];@]@, @[allowed_form = %a@] } >>@.%!"
ssl dc.all_plugins.val
ssl dc.all_attributes.val
ssl dc.current_plugins.val
ssl dc.current_attributes.val
(option ppform) dc.allowed_form.val
)
;
value allowed_attribute dc piname attrname = do {
if not (List.mem attrname Registry.((get piname).alg_attributes)) then
None
else
match dc.allowed_form.val with [
(None | Some (_, Short)) -> Some attrname
| Some (_, Medium) -> Some (Printf.sprintf "%s.%s" piname attrname)
| Some (_, Long) -> Some (Printf.sprintf "deriving.%s.%s" piname attrname)
]
}
;
value is_allowed_attribute dc piname attrname attr =
let wantid = allowed_attribute dc piname attrname in
wantid = Some (attr_id attr)
;
end ;
module DC = DerivingConfig ;
value implem arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value interf arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value top_phrase arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value use_file arg fallback x = do {
DC.init arg ;
Some (fallback arg x)
}
;
value add_current_attribute arg id =
let dc = DC.get arg in
DC.addset dc.current_attributes id
;
value add_deriving_attributes ctxt attrs = do {
let dc = DC.get ctxt in
let attrs = Std.filter is_deriving_attribute attrs in
let plugins = extract_deriving0 (List.hd attrs) in
if plugins = [] then failwith "Surveil.str_item: @@deriving with no plugins"
else DC.addsetl dc.all_plugins (List.map fst plugins) ;
plugins
}
;
value sig_item arg fallback = fun [
<:sig_item:< type $_flag:_$ $list:tdl$ >> as z -> do {
let td = fst (sep_last tdl) in
let plugins = add_deriving_attributes arg (uv td.tdAttributes) in
let dc = DC.get arg in
let plugins = DC.start_decl loc dc plugins in
let rv = fallback arg z in
let attributes = DC.end_decl dc in
let reg_short_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.attributes
|> List.concat in
let reg_short_form_duplicated = duplicated reg_short_form_attributes in
let reg_medium_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.medium_form_attributes
|> List.concat in
let reg_long_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.long_form_attributes
|> List.concat in
let short_form_attributes = Std.intersect attributes reg_short_form_attributes in
let medium_form_attributes = Std.intersect attributes reg_medium_form_attributes in
let long_form_attributes = Std.intersect attributes reg_long_form_attributes in
if not (match (short_form_attributes<>[], medium_form_attributes<>[], long_form_attributes<>[]) with [
(True, False, False) -> True
| (False, True, False) -> True
| (False, False, True) -> True
| (False, False, False) -> True
| _ -> False
]) then Ploc.raise loc (Failure "mixed short/medium/long-form attributes")
else () ;
if short_form_attributes <> [] && reg_short_form_duplicated then
Ploc.raise loc (Failure "short-form attributes used, but some apply to more than one plugin")
else () ;
if [] <> long_form_attributes then DC.(set_form loc dc Long)
else if [] <> medium_form_attributes then DC.(set_form loc dc Medium)
else if [] <> short_form_attributes then DC.(set_form loc dc Short)
else () ;
rv
}
| _ -> assert False
]
;
value str_item arg fallback = fun [
<:str_item:< type $_flag:_$ $list:tdl$ >> as z -> do {
let td = fst (sep_last tdl) in
let plugins = add_deriving_attributes arg (uv td.tdAttributes) in
let dc = DC.get arg in
let plugins = DC.start_decl loc dc plugins in
let rv = fallback arg z in
let attributes = DC.end_decl dc in
let reg_short_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.attributes
|> List.concat in
let reg_medium_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.medium_form_attributes
|> List.concat in
let reg_long_form_attributes =
plugins
|> List.map fst
|> List.map Registry.get
|> List.map PI.long_form_attributes
|> List.concat in
let used_short_form_attributes = Std.filter (fun s -> List.mem s attributes) reg_short_form_attributes in
let used_medium_form_attributes = Std.filter (fun s -> List.mem s attributes) reg_medium_form_attributes in
let used_long_form_attributes = Std.filter (fun s -> List.mem s attributes) reg_long_form_attributes in
if not (match (used_short_form_attributes<>[],
used_medium_form_attributes<>[],
used_long_form_attributes<>[]) with [
(True, False, False) -> True
| (False, True, False) -> True
| (False, False, True) -> True
| (False, False, False) -> True
| _ -> False
]) then Ploc.raise loc (Failure "mixed short/medium/long-form attributes")
else () ;
if duplicated used_short_form_attributes then
Ploc.raise loc (Failure "short-form attributes used, but some apply to more than one plugin")
else () ;
if [] <> used_long_form_attributes then DC.(set_form loc dc Long)
else if [] <> used_medium_form_attributes then DC.(set_form loc dc Medium)
else if [] <> used_short_form_attributes then DC.(set_form loc dc Short)
else () ;
rv
}
| _ -> assert False
]
;
value install () =
let ef = EF.mk() in
let ef = EF.{ (ef) with
str_item = extfun ef.str_item with [
<:str_item:< type $_flag:_$ $list:tdl$ >> as z
when 1 = count is_deriving_attribute (uv (fst (sep_last tdl)).tdAttributes) ->
fun arg fallback -> Some (str_item arg fallback z)
] } in
let ef = EF.{ (ef) with
sig_item = extfun ef.sig_item with [
<:sig_item:< type $_flag:_$ $list:tdl$ >> as z
when 1 = count is_deriving_attribute (uv (fst (sep_last tdl)).tdAttributes) ->
fun arg fallback -> Some (sig_item arg fallback z)
] } in
let ef = EF.{ (ef) with
ctyp = extfun ef.ctyp with [
<:ctyp:< $_$ [@ $_attribute:attr$ ] >> ->
fun arg _ -> do {
add_current_attribute arg (attr_id attr) ;
None
}
| <:ctyp:< [ $list:l$ ] >> ->
fun arg _ -> do {
List.iter (fun [
(loc, cid, tyl, <:vala< None >>, attrs) ->
List.iter (fun a -> add_current_attribute arg (attr_id a)) (uv attrs)
| _ -> ()
]) l ;
None
}
] } in
let ef = EF.{ (ef) with
implem = extfun ef.implem with [
z ->
fun arg fallback ->
let rv = implem arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
let ef = EF.{ (ef) with
interf = extfun ef.interf with [
z ->
fun arg fallback ->
let rv = interf arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
let ef = EF.{ (ef) with
top_phrase = extfun ef.top_phrase with [
z ->
fun arg fallback ->
let rv = top_phrase arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
let ef = EF.{ (ef) with
use_file = extfun ef.use_file with [
z ->
fun arg fallback ->
let rv = use_file arg fallback z in do {
if debug.val then Fmt.(DC.dump stderr (DC.get arg)) else () ;
rv }
] } in
Pa_passthru.(install { name = "surveil" ; ef = ef ; pass = None ; before = [] ; after = ["pa_import"] })
;
|
dbb4459921d988bbf0cff16b3234b92b1939b7dbb9f22bc8c6032cfeea662b8d | viercc/kitchen-sink-hs | balanced-tree.hs | #!/usr/bin/env cabal
{- cabal:
build-depends: base
-}
module Main where
main :: IO ()
main = ppr $ buildBalanced [1..23 :: Int]
data Tree a = Node (Tree a) (Tree a) | Leaf a | Empty
deriving (Show, Read, Eq, Ord)
ppr :: Show a => Tree a -> IO ()
ppr = putStrLn . prettyTree
prettyTree :: Show a => Tree a -> String
prettyTree = go 0
where
go indent t = case t of
Empty -> "Empty"
Leaf a -> show a
Node t1 t2 -> "* " ++ go (indent+2) t1 ++ "\n" ++
replicate (indent+2) ' ' ++ go (indent+2) t2
buildBalanced :: [a] -> Tree a
buildBalanced = fromPart . toPart
" Part " is a list of complete binary tree ( 2^d elements ) with following
constraint :
* The first element is depth 0 tree ( 2 ^ 0 = 1 element ) , which is Leaf
* Depth of i - th element is i ( full node ) or i-1 ( half node )
Example :
# of elements | List of depth | Is full ?
1 | [ 0 ] | [ 1 ]
2 | [ 0,0 ] | [ 1,0 ]
3 | [ 0,1 ] | [ 1,1 ]
4 | [ 0,0,1 ] | [ 1,0,0 ]
5 | [ 0,1,1 ] | [ 1,1,0 ]
6 | [ 0,0,2 ] | [ 1,0,1 ]
7 | [ 0,1,2 ] | [ 1,1,1 ]
8 | [ 0,0,1,2 ] | [ 1,0,0,0 ]
9 | [ 0,1,1,2 ] | [ 1,1,0,0 ]
10 | [ 0,0,2,2 ] | [ 1,0,1,0 ]
11 | [ 0,1,2,2 ] | [ 1,1,1,0 ]
12 | [ 0,0,1,3 ] | [ 1,0,0,1 ]
"Part" is a list of complete binary tree (2^d elements) with following
constraint:
* The first element is depth 0 tree (2^0 = 1 element), which is Leaf
* Depth of i-th element is i (full node) or i-1 (half node)
Example:
# of elements | List of depth | Is full?
1 | [0] | [1]
2 | [0,0] | [1,0]
3 | [0,1] | [1,1]
4 | [0,0,1] | [1,0,0]
5 | [0,1,1] | [1,1,0]
6 | [0,0,2] | [1,0,1]
7 | [0,1,2] | [1,1,1]
8 | [0,0,1,2] | [1,0,0,0]
9 | [0,1,1,2] | [1,1,0,0]
10 | [0,0,2,2] | [1,0,1,0]
11 | [0,1,2,2] | [1,1,1,0]
12 | [0,0,1,3] | [1,0,0,1]
-}
toPart :: [a] -> [(Bool, Tree a)]
toPart = foldr insertPart []
insertPart :: a -> [(Bool, Tree a)] -> [(Bool, Tree a)]
insertPart = incr0
where
incr0 a [] = [(True, Leaf a)]
incr0 a ((_, t) : rest) = (True, Leaf a) : incr t rest
incr t [] = [(False, t)]
incr t ((d_i, t_i) : rest) =
if d_i then (False, t) : incr t_i rest
else (True, Node t t_i) : rest
fromPart :: [(Bool, Tree a)] -> Tree a
fromPart [] = Empty
fromPart ((_,t0) : rest) = foldl (\t (_,t') -> Node t t') t0 rest
| null | https://raw.githubusercontent.com/viercc/kitchen-sink-hs/5038b17a39e4e6f19e6fb4779a7c8aaddf64d922/scripts/balanced-tree.hs | haskell | cabal:
build-depends: base
| #!/usr/bin/env cabal
module Main where
main :: IO ()
main = ppr $ buildBalanced [1..23 :: Int]
data Tree a = Node (Tree a) (Tree a) | Leaf a | Empty
deriving (Show, Read, Eq, Ord)
ppr :: Show a => Tree a -> IO ()
ppr = putStrLn . prettyTree
prettyTree :: Show a => Tree a -> String
prettyTree = go 0
where
go indent t = case t of
Empty -> "Empty"
Leaf a -> show a
Node t1 t2 -> "* " ++ go (indent+2) t1 ++ "\n" ++
replicate (indent+2) ' ' ++ go (indent+2) t2
buildBalanced :: [a] -> Tree a
buildBalanced = fromPart . toPart
" Part " is a list of complete binary tree ( 2^d elements ) with following
constraint :
* The first element is depth 0 tree ( 2 ^ 0 = 1 element ) , which is Leaf
* Depth of i - th element is i ( full node ) or i-1 ( half node )
Example :
# of elements | List of depth | Is full ?
1 | [ 0 ] | [ 1 ]
2 | [ 0,0 ] | [ 1,0 ]
3 | [ 0,1 ] | [ 1,1 ]
4 | [ 0,0,1 ] | [ 1,0,0 ]
5 | [ 0,1,1 ] | [ 1,1,0 ]
6 | [ 0,0,2 ] | [ 1,0,1 ]
7 | [ 0,1,2 ] | [ 1,1,1 ]
8 | [ 0,0,1,2 ] | [ 1,0,0,0 ]
9 | [ 0,1,1,2 ] | [ 1,1,0,0 ]
10 | [ 0,0,2,2 ] | [ 1,0,1,0 ]
11 | [ 0,1,2,2 ] | [ 1,1,1,0 ]
12 | [ 0,0,1,3 ] | [ 1,0,0,1 ]
"Part" is a list of complete binary tree (2^d elements) with following
constraint:
* The first element is depth 0 tree (2^0 = 1 element), which is Leaf
* Depth of i-th element is i (full node) or i-1 (half node)
Example:
# of elements | List of depth | Is full?
1 | [0] | [1]
2 | [0,0] | [1,0]
3 | [0,1] | [1,1]
4 | [0,0,1] | [1,0,0]
5 | [0,1,1] | [1,1,0]
6 | [0,0,2] | [1,0,1]
7 | [0,1,2] | [1,1,1]
8 | [0,0,1,2] | [1,0,0,0]
9 | [0,1,1,2] | [1,1,0,0]
10 | [0,0,2,2] | [1,0,1,0]
11 | [0,1,2,2] | [1,1,1,0]
12 | [0,0,1,3] | [1,0,0,1]
-}
toPart :: [a] -> [(Bool, Tree a)]
toPart = foldr insertPart []
insertPart :: a -> [(Bool, Tree a)] -> [(Bool, Tree a)]
insertPart = incr0
where
incr0 a [] = [(True, Leaf a)]
incr0 a ((_, t) : rest) = (True, Leaf a) : incr t rest
incr t [] = [(False, t)]
incr t ((d_i, t_i) : rest) =
if d_i then (False, t) : incr t_i rest
else (True, Node t t_i) : rest
fromPart :: [(Bool, Tree a)] -> Tree a
fromPart [] = Empty
fromPart ((_,t0) : rest) = foldl (\t (_,t') -> Node t t') t0 rest
|
dca607956462e3d7d431f6b7ffec4e99d159fd190b8bc541d126c18a5d0b644e | potapenko/playphraseme-site | view.cljs | (ns playphraseme.views.not-found.view
(:require [reagent.core :as r]))
(defn page []
(r/create-class
{:component-did-mount
(fn [])
:reagent-render
(fn []
[:div.page-container
[:h1 "Page not found."]])}))
| null | https://raw.githubusercontent.com/potapenko/playphraseme-site/d50a62a6bc8f463e08365dca96b3a6e5dde4fb12/src/cljs/playphraseme/views/not_found/view.cljs | clojure | (ns playphraseme.views.not-found.view
(:require [reagent.core :as r]))
(defn page []
(r/create-class
{:component-did-mount
(fn [])
:reagent-render
(fn []
[:div.page-container
[:h1 "Page not found."]])}))
| |
be8b0aaca033b1b8453af5e9898f77f07bb181a055da3ea7d265dc7694e8e137 | disteph/cdsat | arrays.ml | open Top
module Known = struct
let known =
let open Symbols in
function
| Eq _ | Select _ | Store _ | Diff _ -> true
| _ -> false
end
include Generic.Make(Known)
| null | https://raw.githubusercontent.com/disteph/cdsat/1b569f3eae59802148f4274186746a9ed3e667ed/src/kernel/kernel.mld/termstructures.mld/VarSet.mld/arrays.ml | ocaml | open Top
module Known = struct
let known =
let open Symbols in
function
| Eq _ | Select _ | Store _ | Diff _ -> true
| _ -> false
end
include Generic.Make(Known)
| |
0b2664feb65a8687f4c555b2e1857e7b0a996743b3258484c641c180d67d948b | fnumatic/messaging-client | reagent_hooks.cljs | (ns mailclient.tools.reagent-hooks
(:require [reagent.core :as r]))
(defn use-state [value]
(let [r (r/atom value)]
[r #(reset! r %)]))
(defn use-ref []
(let [a (volatile! nil)]
(reify
IDeref
(-deref [_]
@a)
IFn
(-invoke [this value]
(vreset! a value)))))
(defn use-effect [f]
(let [current-component ^js (r/current-component)
did-mount (.-componentDidMount current-component)
will-unmount (.-componentWillUnmount current-component)
handler (volatile! nil)]
(set! (.-componentDidMount current-component)
(fn [& args]
(let [h (f)]
(when (fn? h)
(vreset! handler h)))
(when (fn? did-mount)
(apply did-mount args))))
(set! (.-componentWillUnmount current-component)
(fn [& args]
(when (fn? @handler) (@handler))
(when (fn? will-unmount)
(apply will-unmount args))))))
(defn use-reducer [reducer initial-state]
(let [val (r/atom initial-state)]
[val #(swap! val reducer %)])) | null | https://raw.githubusercontent.com/fnumatic/messaging-client/bcdc351d064b6c3a06250086a5f8e7d544a5d1b1/src/mailclient/tools/reagent_hooks.cljs | clojure | (ns mailclient.tools.reagent-hooks
(:require [reagent.core :as r]))
(defn use-state [value]
(let [r (r/atom value)]
[r #(reset! r %)]))
(defn use-ref []
(let [a (volatile! nil)]
(reify
IDeref
(-deref [_]
@a)
IFn
(-invoke [this value]
(vreset! a value)))))
(defn use-effect [f]
(let [current-component ^js (r/current-component)
did-mount (.-componentDidMount current-component)
will-unmount (.-componentWillUnmount current-component)
handler (volatile! nil)]
(set! (.-componentDidMount current-component)
(fn [& args]
(let [h (f)]
(when (fn? h)
(vreset! handler h)))
(when (fn? did-mount)
(apply did-mount args))))
(set! (.-componentWillUnmount current-component)
(fn [& args]
(when (fn? @handler) (@handler))
(when (fn? will-unmount)
(apply will-unmount args))))))
(defn use-reducer [reducer initial-state]
(let [val (r/atom initial-state)]
[val #(swap! val reducer %)])) | |
67fc4ffb2f242eba91e36c4635d64f7a256282eabeaf4efa732a1f9765209eb4 | hspec/hspec-expectations | Spec.hs | module Main where
import Test.Hspec
import qualified Test.Hspec.ExpectationsSpec
import qualified Test.Hspec.Expectations.MatcherSpec
spec :: Spec
spec = do
describe "Test.Hspec.ExpectationsSpec" Test.Hspec.ExpectationsSpec.spec
describe "Test.Hspec.Expectations.MatcherSpec" Test.Hspec.Expectations.MatcherSpec.spec
main :: IO ()
main = hspec spec
| null | https://raw.githubusercontent.com/hspec/hspec-expectations/55f00d0fd98421cf67c4726e8272ec1d33f2c6f2/test/Spec.hs | haskell | module Main where
import Test.Hspec
import qualified Test.Hspec.ExpectationsSpec
import qualified Test.Hspec.Expectations.MatcherSpec
spec :: Spec
spec = do
describe "Test.Hspec.ExpectationsSpec" Test.Hspec.ExpectationsSpec.spec
describe "Test.Hspec.Expectations.MatcherSpec" Test.Hspec.Expectations.MatcherSpec.spec
main :: IO ()
main = hspec spec
| |
daa8594f86eadaec7337ee0c514de5166dddb05c97ef80ea33241c292cf42306 | GaloisInc/mistral | DeadCode.hs | # LANGUAGE FlexibleInstances #
-- | Eliminate dead code from a Mistral program.
module Mistral.CodeGen.DeadCode (
elimDeadCode
) where
import Mistral.Driver
import Mistral.TypeCheck.AST
import Mistral.Utils.Names
import Mistral.Utils.PP
import Mistral.Utils.SCC
import qualified Data.Foldable as F
import Data.List ( partition )
import Data.Maybe ( catMaybes )
import qualified Data.Set as Set
-- Dead Code Removal -----------------------------------------------------------
-- | Remove declarations that aren't in the transitive closure of any task
-- declaration.
elimDeadCode :: Program -> Driver Program
elimDeadCode prog = phase "dc" $
do let tasks = concatMap (nTasks . nValue) (progNodes prog)
(keep,dead) = prune (freeVars tasks) [ (freeVars l,l)
| l <- concatMap F.toList (progBinds prog) ]
traceMsg $ hang (text "initial task sets:")
2 (commas (map (pp . nName) tasks))
$$ hang (text "removing:")
2 (commas (map (pp . bName) dead))
$$ hang (text "keeping:")
2 (commas (map (pp . bName) keep))
let prog' = prog { progBinds = scc keep }
progFinal <- elimDeadLocalVars prog'
traceMsg (pp progFinal)
return progFinal
-- Eliminate dead local variables.
elimDeadLocalVars :: Program -> Driver Program
elimDeadLocalVars prog = return (elimUnused prog)
elimBinds :: [Action] -> [Action]
elimBinds [] = []
elimBinds (ABind Nothing e t : rest ) = ABind Nothing (elimUnused e) t : elimBinds rest
elimBinds (ABind (Just nm) e t : rest ) =
if nm `Set.member` freeVars rest
then ABind (Just nm) (elimUnused e) t : elimBinds rest
else ABind Nothing (elimUnused e) t : elimBinds rest
elimBinds (AReceive rst fs tm df : rest) =
AReceive rst (elimUnused fs) (elimUnused tm) (elimUnused df) : elimBinds rest
-- Let statements
class ElimUnused a where
elimUnused :: a -> a
instance ElimUnused Program where
elimUnused p = p { progBinds = elimUnused (progBinds p) }
instance ElimUnused Module where
elimUnused m = m { modBinds = elimUnused (modBinds m) }
instance ElimUnused (Group Decl) where
elimUnused = groupMap elimUnused
instance ElimUnused (Bind Expr) where
elimUnused b = b { bBody = elimUnused (bBody b) }
instance ElimUnused a => ElimUnused [a] where
elimUnused xs = map elimUnused xs
instance ElimUnused Action where
elimUnused a =
case a of
ABind n e t -> ABind n (elimUnused e) t
AReceive r fs t d -> AReceive r (elimUnused fs) (fmap elimUnused t) (fmap elimUnused d)
instance ElimUnused a => ElimUnused (Maybe a) where
elimUnused = fmap elimUnused
instance ElimUnused Expr where
elimUnused (ELet b e t) = elimELet b e t
elimUnused (EApp e1 e2) = EApp (elimUnused e1) (elimUnused e2)
elimUnused (ECase s m t) = ECase s (elimUnused m) t
elimUnused (EStmts t1 t2 as) = EStmts t1 t2 (elimBinds as)
elimUnused (ETApp e1 ts) = ETApp (elimUnused e1) ts
elimUnused (ECApp e es) = ECApp (elimUnused e) (elimUnused es)
elimUnused (EMkTuple te) = EMkTuple (map (\(t,e) -> (t, elimUnused e)) te)
elimUnused (EMkList t es) = EMkList t (elimUnused es)
elimUnused e = e
instance ElimUnused (Match Pattern) where
elimUnused m =
case m of
MCase e t m' -> MCase (elimUnused e) t (elimUnused m')
MRename n e t m' -> MRename n (elimUnused e) t (elimUnused m')
MGuard g m' -> MGuard g (elimUnused m')
MPat p m' -> MPat p (elimUnused m')
MSplit l r -> MSplit (elimUnused l) (elimUnused r)
MExpr e -> MExpr (elimUnused e)
MFail -> MFail
instance ElimUnused Timeout where
elimUnused t = t { toBody = elimUnused (toBody t) }
instance ElimUnused From where
elimUnused f = f { fBody = elimUnused (fBody f) }
elimELet :: [Group Decl] -> Expr -> Type -> Expr
elimELet gd e t =
let e' = elimUnused e
fv = freeVars e'
pruneGroup :: Group Decl -> Maybe (Group Decl)
pruneGroup g =
case g of
NonRecursive r -> case fst (prune fv [(freeVars r, r)]) of
[x] -> Just (NonRecursive x)
_ -> Nothing
Recursive es -> case fst (prune fv [(freeVars b, b) | b <- es]) of
[] -> Nothing
xs -> Just (Recursive xs)
keepGroups = catMaybes [pruneGroup g | g <- gd]
in if null keepGroups
then e'
else ELet keepGroups e' t
-- | Partition declarations by whether or not they are reachable from the
-- initial set given.
prune :: Set.Set Name -> [(Set.Set Name, Decl)] -> ([Decl],[Decl])
prune refs ds
| null reachable = ([], map snd next)
| otherwise = (map snd reachable ++ rs, dead)
where
isReachable (_,d) = bName d `Set.member` refs
(reachable,next) = partition isReachable ds
(rs,dead) = prune (Set.unions (map fst reachable)) next
| null | https://raw.githubusercontent.com/GaloisInc/mistral/3464ab332d73c608e64512e822fe2b8a619ec8f3/src/Mistral/CodeGen/DeadCode.hs | haskell | | Eliminate dead code from a Mistral program.
Dead Code Removal -----------------------------------------------------------
| Remove declarations that aren't in the transitive closure of any task
declaration.
Eliminate dead local variables.
Let statements
| Partition declarations by whether or not they are reachable from the
initial set given. | # LANGUAGE FlexibleInstances #
module Mistral.CodeGen.DeadCode (
elimDeadCode
) where
import Mistral.Driver
import Mistral.TypeCheck.AST
import Mistral.Utils.Names
import Mistral.Utils.PP
import Mistral.Utils.SCC
import qualified Data.Foldable as F
import Data.List ( partition )
import Data.Maybe ( catMaybes )
import qualified Data.Set as Set
elimDeadCode :: Program -> Driver Program
elimDeadCode prog = phase "dc" $
do let tasks = concatMap (nTasks . nValue) (progNodes prog)
(keep,dead) = prune (freeVars tasks) [ (freeVars l,l)
| l <- concatMap F.toList (progBinds prog) ]
traceMsg $ hang (text "initial task sets:")
2 (commas (map (pp . nName) tasks))
$$ hang (text "removing:")
2 (commas (map (pp . bName) dead))
$$ hang (text "keeping:")
2 (commas (map (pp . bName) keep))
let prog' = prog { progBinds = scc keep }
progFinal <- elimDeadLocalVars prog'
traceMsg (pp progFinal)
return progFinal
elimDeadLocalVars :: Program -> Driver Program
elimDeadLocalVars prog = return (elimUnused prog)
elimBinds :: [Action] -> [Action]
elimBinds [] = []
elimBinds (ABind Nothing e t : rest ) = ABind Nothing (elimUnused e) t : elimBinds rest
elimBinds (ABind (Just nm) e t : rest ) =
if nm `Set.member` freeVars rest
then ABind (Just nm) (elimUnused e) t : elimBinds rest
else ABind Nothing (elimUnused e) t : elimBinds rest
elimBinds (AReceive rst fs tm df : rest) =
AReceive rst (elimUnused fs) (elimUnused tm) (elimUnused df) : elimBinds rest
class ElimUnused a where
elimUnused :: a -> a
instance ElimUnused Program where
elimUnused p = p { progBinds = elimUnused (progBinds p) }
instance ElimUnused Module where
elimUnused m = m { modBinds = elimUnused (modBinds m) }
instance ElimUnused (Group Decl) where
elimUnused = groupMap elimUnused
instance ElimUnused (Bind Expr) where
elimUnused b = b { bBody = elimUnused (bBody b) }
instance ElimUnused a => ElimUnused [a] where
elimUnused xs = map elimUnused xs
instance ElimUnused Action where
elimUnused a =
case a of
ABind n e t -> ABind n (elimUnused e) t
AReceive r fs t d -> AReceive r (elimUnused fs) (fmap elimUnused t) (fmap elimUnused d)
instance ElimUnused a => ElimUnused (Maybe a) where
elimUnused = fmap elimUnused
instance ElimUnused Expr where
elimUnused (ELet b e t) = elimELet b e t
elimUnused (EApp e1 e2) = EApp (elimUnused e1) (elimUnused e2)
elimUnused (ECase s m t) = ECase s (elimUnused m) t
elimUnused (EStmts t1 t2 as) = EStmts t1 t2 (elimBinds as)
elimUnused (ETApp e1 ts) = ETApp (elimUnused e1) ts
elimUnused (ECApp e es) = ECApp (elimUnused e) (elimUnused es)
elimUnused (EMkTuple te) = EMkTuple (map (\(t,e) -> (t, elimUnused e)) te)
elimUnused (EMkList t es) = EMkList t (elimUnused es)
elimUnused e = e
instance ElimUnused (Match Pattern) where
elimUnused m =
case m of
MCase e t m' -> MCase (elimUnused e) t (elimUnused m')
MRename n e t m' -> MRename n (elimUnused e) t (elimUnused m')
MGuard g m' -> MGuard g (elimUnused m')
MPat p m' -> MPat p (elimUnused m')
MSplit l r -> MSplit (elimUnused l) (elimUnused r)
MExpr e -> MExpr (elimUnused e)
MFail -> MFail
instance ElimUnused Timeout where
elimUnused t = t { toBody = elimUnused (toBody t) }
instance ElimUnused From where
elimUnused f = f { fBody = elimUnused (fBody f) }
elimELet :: [Group Decl] -> Expr -> Type -> Expr
elimELet gd e t =
let e' = elimUnused e
fv = freeVars e'
pruneGroup :: Group Decl -> Maybe (Group Decl)
pruneGroup g =
case g of
NonRecursive r -> case fst (prune fv [(freeVars r, r)]) of
[x] -> Just (NonRecursive x)
_ -> Nothing
Recursive es -> case fst (prune fv [(freeVars b, b) | b <- es]) of
[] -> Nothing
xs -> Just (Recursive xs)
keepGroups = catMaybes [pruneGroup g | g <- gd]
in if null keepGroups
then e'
else ELet keepGroups e' t
prune :: Set.Set Name -> [(Set.Set Name, Decl)] -> ([Decl],[Decl])
prune refs ds
| null reachable = ([], map snd next)
| otherwise = (map snd reachable ++ rs, dead)
where
isReachable (_,d) = bName d `Set.member` refs
(reachable,next) = partition isReachable ds
(rs,dead) = prune (Set.unions (map fst reachable)) next
|
d370d84425881c6b5309111e97795348efda1d1d2b5a434460d415bb815772ca | ocsigen/js_of_ocaml | test.ml | (* This is demo input for wiki. It will be loaded automatically into window on startup *)
let test1 =
"\n\n\
====this is h4\n\n\
# number list el 1\n\
# number list e2 2 //with italic text\n\n\n\
//with italic\n\n\
* bullet list el1 ** with bold text\n\
* bullet list el2 ** with bold // and italic text\n\n\
<<youtube 1XNTjVScm_8>>\n\n\
[[|Link to Yandex]]\n\n\
[[]]\n\n\
{{-search.com/img/yellowicon/firefox_win.zip/Firefox_Thunderbird_Win-icons-Firefox.ico-128x128.png|mail \
icon}}\n\n\
{{{\n\
== [[Nowiki]]:\n\
//**don't** format//\n\
}}}\n\n\n"
| null | https://raw.githubusercontent.com/ocsigen/js_of_ocaml/58210fabc947c4839b6e71ffbbf353a4ede0dbb7/examples/wiki/test.ml | ocaml | This is demo input for wiki. It will be loaded automatically into window on startup |
let test1 =
"\n\n\
====this is h4\n\n\
# number list el 1\n\
# number list e2 2 //with italic text\n\n\n\
//with italic\n\n\
* bullet list el1 ** with bold text\n\
* bullet list el2 ** with bold // and italic text\n\n\
<<youtube 1XNTjVScm_8>>\n\n\
[[|Link to Yandex]]\n\n\
[[]]\n\n\
{{-search.com/img/yellowicon/firefox_win.zip/Firefox_Thunderbird_Win-icons-Firefox.ico-128x128.png|mail \
icon}}\n\n\
{{{\n\
== [[Nowiki]]:\n\
//**don't** format//\n\
}}}\n\n\n"
|
0ea310a6a7e1483be908fd8161652aef19ea6d627444567fcc22b0f5d4a715d1 | jeanparpaillon/erlang-dbus | dbus_remote_service.erl | %%
@author < >
Copyright 2014
%% @doc Implements a remote service ...
%%
%% @todo Remember what this module does ;)
%% @end
-module(dbus_remote_service).
-behaviour(gen_server).
-include("dbus.hrl").
%% api
-export([
start_link/3,
get_object/2,
release_object/2
]).
-export([
init/1,
code_change/3,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2
]).
-record(state, {
name,
bus,
conn,
objects
}).
start_link(Bus, Conn, ServiceName) ->
gen_server:start_link(?MODULE, [Bus, Conn, ServiceName], []).
-spec get_object(dbus_name(), dbus_path()) -> {ok, pid()} | {error, term()}.
get_object(Service, Path) ->
gen_server:call(Service, {get_object, Path}).
release_object(Service, Object) ->
gen_server:call(Service, {release_object, Object}).
%%
%% gen_server callbacks
%%
init([Bus, Conn, ServiceName]) ->
Reg = ets:new(objects, [set, private]),
{ok, #state{name=ServiceName, bus=Bus, conn=Conn, objects=Reg}}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_call({get_object, Path}, {Pid, _Tag},
#state{objects=Reg, conn=Conn, name=Name}=State) ->
case ets:lookup(Reg, Path) of
[{Path, Object, Pids}] ->
ets:insert(Reg, {Path, Object, sets:add_element(Pid, Pids)}),
{reply, {ok, Object}, State};
[] ->
case dbus_proxy:start_link(Conn, Name, Path) of
{ok, Object} ->
ets:insert(Reg, {Path, Object, sets:from_list([Pid])}),
{reply, {ok, Object}, State};
{error, Err} ->
?error("Error starting object ~p: ~p~n", [Path, Err]),
{reply, {error, Err}, State}
end
end;
handle_call({release_object, Object}, {Pid, _}, State) ->
case handle_release_object(Object, Pid, State) of
{ok, State1} ->
{reply, ok, State1};
{error, Reason, State1} ->
{reply, Reason, State1};
{stop, State1} ->
{stop, normal, ok, State1}
end;
handle_call(Request, _From, State) ->
?error("Unhandled call in ~p: ~p~n", [?MODULE, Request]),
{reply, ok, State}.
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(Request, State) ->
?error("Unhandled cast in ~p: ~p~n", [?MODULE, Request]),
{noreply, State}.
handle_info(setup, State) ->
{noreply, State};
handle_info({'EXIT', Pid, Reason}, State) ->
case handle_release_all_objects(Pid, State) of
{ok, State1} ->
{noreply, State1};
{stop, State1} ->
{stop, normal, State1};
{error, not_registered, State1} ->
if
Reason /= normal ->
{stop, Reason, State1};
true ->
{noreply, State1}
end
end;
handle_info({proxy, ok, From, Obj}, State) ->
gen_server:reply(From, {ok, Obj}),
{noreply, State};
handle_info({proxy, Result, From, _Obj}, State) ->
gen_server:reply(From, Result),
{noreply, State};
handle_info(Info, State) ->
?error("Unhandled info in ~p: ~p~n", [?MODULE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
terminated.
handle_release_object(Object, Pid, #state{objects=Reg}=State) ->
?debug("~p: ~p handle_release_object ~p~n", [?MODULE, self(), Object]),
case ets:match_object(Reg, {'_', Object, '_'}) of
[{Path, _, Pids}] ->
case sets:is_element(Pid, Pids) of
true ->
true = unlink(Pid),
Pids2 = sets:del_element(Pid, Pids),
case sets:size(Pids2) of
0 ->
% No more pids, remove object
?debug("object terminated ~p ~p~n", [Object, Path]),
ets:delete(Reg, Path),
case ets:info(Reg, size) of
0 ->
?debug("No more object in service, stopping service ~p~n", [State#state.name]),
{stop, State};
_ ->
{ok, State}
end;
_ ->
% Update registry entry
ets:insert(Reg, {Path, Object, Pids2}),
{ok, State}
end;
false ->
Pid was not in Pids
{error, not_resgitered, State}
end;
[] ->
{error, not_registered, State}
end.
handle_release_all_objects(_Pid, _State) ->
throw(unimplemented).
| null | https://raw.githubusercontent.com/jeanparpaillon/erlang-dbus/a1feab01d2cdf07baf09d3761ab1175a8ce56435/src/dbus_remote_service.erl | erlang |
@doc Implements a remote service ...
@todo Remember what this module does ;)
@end
api
gen_server callbacks
No more pids, remove object
Update registry entry | @author < >
Copyright 2014
-module(dbus_remote_service).
-behaviour(gen_server).
-include("dbus.hrl").
-export([
start_link/3,
get_object/2,
release_object/2
]).
-export([
init/1,
code_change/3,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2
]).
-record(state, {
name,
bus,
conn,
objects
}).
start_link(Bus, Conn, ServiceName) ->
gen_server:start_link(?MODULE, [Bus, Conn, ServiceName], []).
-spec get_object(dbus_name(), dbus_path()) -> {ok, pid()} | {error, term()}.
get_object(Service, Path) ->
gen_server:call(Service, {get_object, Path}).
release_object(Service, Object) ->
gen_server:call(Service, {release_object, Object}).
init([Bus, Conn, ServiceName]) ->
Reg = ets:new(objects, [set, private]),
{ok, #state{name=ServiceName, bus=Bus, conn=Conn, objects=Reg}}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
handle_call({get_object, Path}, {Pid, _Tag},
#state{objects=Reg, conn=Conn, name=Name}=State) ->
case ets:lookup(Reg, Path) of
[{Path, Object, Pids}] ->
ets:insert(Reg, {Path, Object, sets:add_element(Pid, Pids)}),
{reply, {ok, Object}, State};
[] ->
case dbus_proxy:start_link(Conn, Name, Path) of
{ok, Object} ->
ets:insert(Reg, {Path, Object, sets:from_list([Pid])}),
{reply, {ok, Object}, State};
{error, Err} ->
?error("Error starting object ~p: ~p~n", [Path, Err]),
{reply, {error, Err}, State}
end
end;
handle_call({release_object, Object}, {Pid, _}, State) ->
case handle_release_object(Object, Pid, State) of
{ok, State1} ->
{reply, ok, State1};
{error, Reason, State1} ->
{reply, Reason, State1};
{stop, State1} ->
{stop, normal, ok, State1}
end;
handle_call(Request, _From, State) ->
?error("Unhandled call in ~p: ~p~n", [?MODULE, Request]),
{reply, ok, State}.
handle_cast(stop, State) ->
{stop, normal, State};
handle_cast(Request, State) ->
?error("Unhandled cast in ~p: ~p~n", [?MODULE, Request]),
{noreply, State}.
handle_info(setup, State) ->
{noreply, State};
handle_info({'EXIT', Pid, Reason}, State) ->
case handle_release_all_objects(Pid, State) of
{ok, State1} ->
{noreply, State1};
{stop, State1} ->
{stop, normal, State1};
{error, not_registered, State1} ->
if
Reason /= normal ->
{stop, Reason, State1};
true ->
{noreply, State1}
end
end;
handle_info({proxy, ok, From, Obj}, State) ->
gen_server:reply(From, {ok, Obj}),
{noreply, State};
handle_info({proxy, Result, From, _Obj}, State) ->
gen_server:reply(From, Result),
{noreply, State};
handle_info(Info, State) ->
?error("Unhandled info in ~p: ~p~n", [?MODULE, Info]),
{noreply, State}.
terminate(_Reason, _State) ->
terminated.
handle_release_object(Object, Pid, #state{objects=Reg}=State) ->
?debug("~p: ~p handle_release_object ~p~n", [?MODULE, self(), Object]),
case ets:match_object(Reg, {'_', Object, '_'}) of
[{Path, _, Pids}] ->
case sets:is_element(Pid, Pids) of
true ->
true = unlink(Pid),
Pids2 = sets:del_element(Pid, Pids),
case sets:size(Pids2) of
0 ->
?debug("object terminated ~p ~p~n", [Object, Path]),
ets:delete(Reg, Path),
case ets:info(Reg, size) of
0 ->
?debug("No more object in service, stopping service ~p~n", [State#state.name]),
{stop, State};
_ ->
{ok, State}
end;
_ ->
ets:insert(Reg, {Path, Object, Pids2}),
{ok, State}
end;
false ->
Pid was not in Pids
{error, not_resgitered, State}
end;
[] ->
{error, not_registered, State}
end.
handle_release_all_objects(_Pid, _State) ->
throw(unimplemented).
|
0665831a2ce5583822a645802a32e08c3ce6cca33fe009fc1345fc2688065b0b | vikram/lisplibraries | boolean.lisp |
(in-package :weblocks)
(export '(predicate predicate-presentation
predicate-presentation-false-string
predicate-presentation-true-string
checkbox checkbox-presentation
predicate-parser))
;;; Data presentation
(defclass predicate-presentation (text-presentation)
((false-string :initform "No"
:accessor predicate-presentation-false-string
:initarg :false-string
:documentation "A string to be printed when the
predicate is false.")
(true-string :initform "Yes"
:accessor predicate-presentation-true-string
:initarg :true-string
:documentation "A string to be printed when the
predicate is true."))
(:documentation "A default presentation that renders values as
predicates, where nil is treated as false, and any other value is
treated as true."))
(defmethod render-view-field-value ((value null) (presentation predicate-presentation)
field view widget obj &rest args)
(apply #'call-next-method value presentation field view widget obj
:ignore-nulls-p t
args))
(defmethod print-view-field-value (value (presentation predicate-presentation)
field view widget obj &rest args)
(declare (ignore args))
(if value
(predicate-presentation-true-string presentation)
(predicate-presentation-false-string presentation)))
;;; Form presentation
(defclass checkbox-presentation (form-presentation)
()
(:documentation "Treats values as predicates and presents them as a
checkbox."))
(defmethod render-view-field-value (value (presentation checkbox-presentation)
(field form-view-field) (view form-view) widget obj
&rest args &key intermediate-values &allow-other-keys)
(declare (ignore args))
(multiple-value-bind (intermediate-value intermediate-value-p)
(form-field-intermediate-value field intermediate-values)
(render-checkbox (view-field-slot-name field)
(if intermediate-value-p
intermediate-value
value))))
Parser
(defclass predicate-parser (parser)
((error-message :initform "checked or unchecked"))
(:documentation "A parser designed to parse strings into
predicates."))
(defmethod parse-view-field-value ((parser predicate-parser) value obj
(view form-view) (field form-view-field) &rest args)
(declare (ignore args))
(cond
((member value '("t" "f") :test #'string-equal) (values t t t))
((null value) (values t t nil))
(t nil)))
;;; Scaffolding magic
(defmethod typespec->view-field-presentation ((scaffold scaffold)
(typespec (eql 'boolean)) args)
(values t (make-instance 'predicate-presentation)))
(defmethod typespec->view-field-presentation ((scaffold-type form-scaffold)
(typespec (eql 'boolean)) args)
(values t (make-instance 'checkbox-presentation)))
(defmethod typespec->form-view-field-parser ((scaffold-type form-scaffold)
(typespec (eql 'boolean)) args)
(values t (make-instance 'predicate-parser)))
| null | https://raw.githubusercontent.com/vikram/lisplibraries/105e3ef2d165275eb78f36f5090c9e2cdd0754dd/site/weblocks-stable/src/views/types/boolean.lisp | lisp | Data presentation
Form presentation
Scaffolding magic |
(in-package :weblocks)
(export '(predicate predicate-presentation
predicate-presentation-false-string
predicate-presentation-true-string
checkbox checkbox-presentation
predicate-parser))
(defclass predicate-presentation (text-presentation)
((false-string :initform "No"
:accessor predicate-presentation-false-string
:initarg :false-string
:documentation "A string to be printed when the
predicate is false.")
(true-string :initform "Yes"
:accessor predicate-presentation-true-string
:initarg :true-string
:documentation "A string to be printed when the
predicate is true."))
(:documentation "A default presentation that renders values as
predicates, where nil is treated as false, and any other value is
treated as true."))
(defmethod render-view-field-value ((value null) (presentation predicate-presentation)
field view widget obj &rest args)
(apply #'call-next-method value presentation field view widget obj
:ignore-nulls-p t
args))
(defmethod print-view-field-value (value (presentation predicate-presentation)
field view widget obj &rest args)
(declare (ignore args))
(if value
(predicate-presentation-true-string presentation)
(predicate-presentation-false-string presentation)))
(defclass checkbox-presentation (form-presentation)
()
(:documentation "Treats values as predicates and presents them as a
checkbox."))
(defmethod render-view-field-value (value (presentation checkbox-presentation)
(field form-view-field) (view form-view) widget obj
&rest args &key intermediate-values &allow-other-keys)
(declare (ignore args))
(multiple-value-bind (intermediate-value intermediate-value-p)
(form-field-intermediate-value field intermediate-values)
(render-checkbox (view-field-slot-name field)
(if intermediate-value-p
intermediate-value
value))))
Parser
(defclass predicate-parser (parser)
((error-message :initform "checked or unchecked"))
(:documentation "A parser designed to parse strings into
predicates."))
(defmethod parse-view-field-value ((parser predicate-parser) value obj
(view form-view) (field form-view-field) &rest args)
(declare (ignore args))
(cond
((member value '("t" "f") :test #'string-equal) (values t t t))
((null value) (values t t nil))
(t nil)))
(defmethod typespec->view-field-presentation ((scaffold scaffold)
(typespec (eql 'boolean)) args)
(values t (make-instance 'predicate-presentation)))
(defmethod typespec->view-field-presentation ((scaffold-type form-scaffold)
(typespec (eql 'boolean)) args)
(values t (make-instance 'checkbox-presentation)))
(defmethod typespec->form-view-field-parser ((scaffold-type form-scaffold)
(typespec (eql 'boolean)) args)
(values t (make-instance 'predicate-parser)))
|
7dae9fe89d247d2068946217d7e26923f063a467e5cea1dd0dea5366e71f82f3 | pcarbonn/H-Calc | B_Add.hs | module Interpreter.B_Add where
-- this module adds the following language construct to the DSL
( α i )
( Add α ( i1,i2 ) )
-------------------------------------------------------
import Interpreter.A_Nucleus
import Interpreter.Transfos
import Fmt
import Haskus.Utils.EADT
import Haskus.Utils.EADT.TH
import Text.Megaparsec
import Text.Megaparsec.Char as M
import Text.Show
-- define nodes
--------------------------------------------------------
data ValF e = ValF e Int deriving (Functor)
data FloatValF e = FloatValF e Float deriving (Functor)
data AddF e = AddF e (e, e) deriving (Functor)
-- define patterns, for creation and pattern matching
eadtPattern 'ValF "Val"
eadtPattern 'FloatValF "FloatVal"
eadtPattern 'AddF "Add"
-- syntactic sugar for embedded DSL
--------------------------------------------------------
fromInteger :: ('[EmptyNoteF, ValF] :<<: xs) => Integer -> EADT xs
fromInteger i = Val EmptyNote $ fromIntegral i
fromRational :: ('[EmptyNoteF, FloatValF] :<<: xs) => Rational -> EADT xs
fromRational i = FloatVal EmptyNote $ realToFrac i
(.+) :: ('[EmptyNoteF, AddF] :<<: xs) => EADT xs -> EADT xs -> EADT xs
(.+) a b = Add EmptyNote (a,b)
neg :: ('[HErrorF, EmptyNoteF, ValF] :<<: xs, Functor (VariantF xs), AlgVariantF Algebra Text xs)
=> EADT xs -> EADT xs
neg (Val α i) = Val α (-i)
neg v = HError EmptyNote $ format "can't negate {}" (showAST v)
-- parser
--------------------------------------------------------
valParser :: ('[EmptyNoteF, ValF] :<<: xs) => MParser (EADT xs)
valParser = Val EmptyNote . toInt <$> do
s <- option "+" (string "-")
i <- some M.digitChar
_ <- option () spaceConsumer
return (s,i)
where toInt :: (Text, [Char]) -> Int
toInt (s, cs) = s' * (foldl' (\a i -> a * 10 + digitToInt i) 0 cs)
where s' = if s == "+" then 1 else -1
digitToInt :: Char -> Int
digitToInt c = ord c - ord '0'
floatValParser :: ('[EmptyNoteF, FloatValF] :<<: xs) => MParser (EADT xs)
floatValParser = FloatVal EmptyNote . toFloat <$> do
s <- option "+" (string "-")
i1 <- some M.digitChar
_ <- string "."
i2 <- some M.digitChar
_ <- option () spaceConsumer
return (s, i1, i2)
where toFloat :: (Text, [Char], [Char]) -> Float
toFloat (s, i1,i2)
= s' * (foldl' (\a i -> a * 10.0 + realToFrac (digitToInt i)) 0.0 i1
+ (foldl' (\a i -> a * 10.0 + realToFrac (digitToInt i)) 0.0 i2)
/ (10.0 ^ (length i2))
)
where s' = if s == "+" then 1 else -1
digitToInt :: Char -> Int
digitToInt c = ord c - ord '0'
addParser :: ('[EmptyNoteF, AddF] :<<: xs) => MParser (EADT xs) -> MParser (EADT xs)
addParser termP = Add EmptyNote <$> do
i1 <- termP
_ <- symbol "+"
i2 <- termP
return (i1,i2)
-- Algebra
--------------------------------------------------------
instance Algebra ValF where
showAST' (ValF α i) = format "{}{}" i α
instance Algebra FloatValF where
showAST' (FloatValF α f) = format "{}{}" f α
instance Algebra AddF where
showAST' (AddF α (v1,v2)) = format "({} + {}){}" v1 v2 α -- no recursive call
-- Isomorphism
--------------------------------------------------------
instance ('[TypF, ValF] :<<: xs)
=> Isomorphism xs ValF where
getAnnotation (ValF α _) = α
setType' (ValF α i) = Val (Typ α TInt) i
instance ('[TypF, FloatValF] :<<: xs)
=> Isomorphism xs FloatValF where
getAnnotation (FloatValF α _) = α
setType' (FloatValF α f) = FloatVal (Typ α TFloat) f
instance ('[HErrorF, EmptyNoteF, TypF, AddF] :<<: xs
, Functor (VariantF xs), Algebra (VariantF xs)
, AlgVariantF (Isomorphism xs) (EADT xs) xs, Isomorphism xs (VariantF xs))
=> Isomorphism xs AddF where
getAnnotation (AddF α _) = α
setType' (AddF α (v1, v2)) =
case (v1,v2) of
(HError _ _, _) -> v1
(_, HError _ _) -> v2
_ -> case (getType v1, getType v2) of
(Just TInt , Just TInt ) -> Add (Typ α TInt) (v1,v2)
(Just TFloat, Just TFloat) -> Add (Typ α TFloat) (v1,v2)
(Just t1 , Just t2 ) ->
HError α $ format "can't add `{}` whose type is {} with `{}` whose type is "
(showAST v1) (show t1) (showAST v2) (show t2)
(_,_) -> HError α "Missing type info in addition"
| null | https://raw.githubusercontent.com/pcarbonn/H-Calc/00f6d8d4344f43a3356d29fd45c9ec3c23ccce65/src/Interpreter/B_Add.hs | haskell | this module adds the following language construct to the DSL
-----------------------------------------------------
define nodes
------------------------------------------------------
define patterns, for creation and pattern matching
syntactic sugar for embedded DSL
------------------------------------------------------
parser
------------------------------------------------------
Algebra
------------------------------------------------------
no recursive call
Isomorphism
------------------------------------------------------ | module Interpreter.B_Add where
( α i )
( Add α ( i1,i2 ) )
import Interpreter.A_Nucleus
import Interpreter.Transfos
import Fmt
import Haskus.Utils.EADT
import Haskus.Utils.EADT.TH
import Text.Megaparsec
import Text.Megaparsec.Char as M
import Text.Show
data ValF e = ValF e Int deriving (Functor)
data FloatValF e = FloatValF e Float deriving (Functor)
data AddF e = AddF e (e, e) deriving (Functor)
eadtPattern 'ValF "Val"
eadtPattern 'FloatValF "FloatVal"
eadtPattern 'AddF "Add"
fromInteger :: ('[EmptyNoteF, ValF] :<<: xs) => Integer -> EADT xs
fromInteger i = Val EmptyNote $ fromIntegral i
fromRational :: ('[EmptyNoteF, FloatValF] :<<: xs) => Rational -> EADT xs
fromRational i = FloatVal EmptyNote $ realToFrac i
(.+) :: ('[EmptyNoteF, AddF] :<<: xs) => EADT xs -> EADT xs -> EADT xs
(.+) a b = Add EmptyNote (a,b)
neg :: ('[HErrorF, EmptyNoteF, ValF] :<<: xs, Functor (VariantF xs), AlgVariantF Algebra Text xs)
=> EADT xs -> EADT xs
neg (Val α i) = Val α (-i)
neg v = HError EmptyNote $ format "can't negate {}" (showAST v)
valParser :: ('[EmptyNoteF, ValF] :<<: xs) => MParser (EADT xs)
valParser = Val EmptyNote . toInt <$> do
s <- option "+" (string "-")
i <- some M.digitChar
_ <- option () spaceConsumer
return (s,i)
where toInt :: (Text, [Char]) -> Int
toInt (s, cs) = s' * (foldl' (\a i -> a * 10 + digitToInt i) 0 cs)
where s' = if s == "+" then 1 else -1
digitToInt :: Char -> Int
digitToInt c = ord c - ord '0'
floatValParser :: ('[EmptyNoteF, FloatValF] :<<: xs) => MParser (EADT xs)
floatValParser = FloatVal EmptyNote . toFloat <$> do
s <- option "+" (string "-")
i1 <- some M.digitChar
_ <- string "."
i2 <- some M.digitChar
_ <- option () spaceConsumer
return (s, i1, i2)
where toFloat :: (Text, [Char], [Char]) -> Float
toFloat (s, i1,i2)
= s' * (foldl' (\a i -> a * 10.0 + realToFrac (digitToInt i)) 0.0 i1
+ (foldl' (\a i -> a * 10.0 + realToFrac (digitToInt i)) 0.0 i2)
/ (10.0 ^ (length i2))
)
where s' = if s == "+" then 1 else -1
digitToInt :: Char -> Int
digitToInt c = ord c - ord '0'
addParser :: ('[EmptyNoteF, AddF] :<<: xs) => MParser (EADT xs) -> MParser (EADT xs)
addParser termP = Add EmptyNote <$> do
i1 <- termP
_ <- symbol "+"
i2 <- termP
return (i1,i2)
instance Algebra ValF where
showAST' (ValF α i) = format "{}{}" i α
instance Algebra FloatValF where
showAST' (FloatValF α f) = format "{}{}" f α
instance Algebra AddF where
instance ('[TypF, ValF] :<<: xs)
=> Isomorphism xs ValF where
getAnnotation (ValF α _) = α
setType' (ValF α i) = Val (Typ α TInt) i
instance ('[TypF, FloatValF] :<<: xs)
=> Isomorphism xs FloatValF where
getAnnotation (FloatValF α _) = α
setType' (FloatValF α f) = FloatVal (Typ α TFloat) f
instance ('[HErrorF, EmptyNoteF, TypF, AddF] :<<: xs
, Functor (VariantF xs), Algebra (VariantF xs)
, AlgVariantF (Isomorphism xs) (EADT xs) xs, Isomorphism xs (VariantF xs))
=> Isomorphism xs AddF where
getAnnotation (AddF α _) = α
setType' (AddF α (v1, v2)) =
case (v1,v2) of
(HError _ _, _) -> v1
(_, HError _ _) -> v2
_ -> case (getType v1, getType v2) of
(Just TInt , Just TInt ) -> Add (Typ α TInt) (v1,v2)
(Just TFloat, Just TFloat) -> Add (Typ α TFloat) (v1,v2)
(Just t1 , Just t2 ) ->
HError α $ format "can't add `{}` whose type is {} with `{}` whose type is "
(showAST v1) (show t1) (showAST v2) (show t2)
(_,_) -> HError α "Missing type info in addition"
|
fd43fdb1d0fc35da39e83674ddd396004f30e207d3a43fc93c24ee7e075d568b | exercism/clojure | restructure.clj | (import java.io.File)
(require '[cheshire.core :as json]
'[clojure.java.io :as io]
'[clojure.java.shell :refer [sh with-sh-dir]]
'[clojure.string :as string])
(doseq [problem ((json/parse-string (slurp "config.json")) "problems")]
(with-sh-dir problem
(sh "mkdir" "-p" "src" "test")
(sh "sh" "-c" "mv *_test.clj test")
(sh "sh" "-c" "mv example.clj src"))
(sh "sh" "-c" "gsed -i '/\\(source\\|test\\)-paths/d' */project.clj"))
| null | https://raw.githubusercontent.com/exercism/clojure/7ed96a5ae3c471c37db2602baf3db2be3b5a2d1a/bin/restructure.clj | clojure | (import java.io.File)
(require '[cheshire.core :as json]
'[clojure.java.io :as io]
'[clojure.java.shell :refer [sh with-sh-dir]]
'[clojure.string :as string])
(doseq [problem ((json/parse-string (slurp "config.json")) "problems")]
(with-sh-dir problem
(sh "mkdir" "-p" "src" "test")
(sh "sh" "-c" "mv *_test.clj test")
(sh "sh" "-c" "mv example.clj src"))
(sh "sh" "-c" "gsed -i '/\\(source\\|test\\)-paths/d' */project.clj"))
| |
667878b2ab4630a4056090717f434d9e2602eed73a270c358395066419b2a361 | bcc32/advent-of-code | b.ml | open! Core
open! Async
open! Import
let try_ elf_attack_power lines =
let battlefield = Battlefield.create lines ~elf_attack_power in
let rec loop rounds =
match Battlefield.perform_round battlefield with
| () -> loop (rounds + 1)
| exception Battlefield.End_of_combat { winning_team = Elf }
when Battlefield.kill_count battlefield Elf = 0 -> Some (battlefield, rounds)
| exception Battlefield.End_of_combat _ -> None
in
loop 0
;;
let main input =
let lines = input |> String.strip |> String.split_lines in
let rec loop elf_attack_power =
match try_ elf_attack_power lines with
| None -> loop (elf_attack_power + 1)
| Some (battlefield, full_rounds) ->
let outcome = full_rounds * Battlefield.sum_of_hit_points battlefield in
printf
!"%d\n%{Battlefield#hum}\n%d rounds, %d hit points"
outcome
battlefield
full_rounds
(Battlefield.sum_of_hit_points battlefield)
in
loop 4;
return ()
;;
let%expect_test "b" =
let%bind lines = Reader.file_contents "input" in
let%bind () = main lines in
[%expect
{|
57820
################################
###################..###########
####################.....#######
####################...#.#######
####################..##...#####
#################..........#####
########..########........######
########..#.##..###.......######
#######.....#..........E.....###
#######...#...............E....#
#########.##.......E.....E....##
###########.............E...#.##
#####..####...#####........E.E.#
####....###..#######.....E#....#
####....###.#########......##..#
####........#########........E.#
###.........#########......E..##
##..#.......#########.........##
#...#.......#########.........##
#..#.....#...#######........####
#.............#####.....#...####
#.#.........................####
#........#..................####
###...##....................####
###...##....................####
####..#####..#............######
##########...#.##....##...######
##########.....##...############
###########.#......#############
#############...################
###############.################
################################
59 rounds, 980 hit points |}];
return ()
;;
| null | https://raw.githubusercontent.com/bcc32/advent-of-code/86a9387c3d6be2afe07d2657a0607749217b1b77/2018/15/b.ml | ocaml | open! Core
open! Async
open! Import
let try_ elf_attack_power lines =
let battlefield = Battlefield.create lines ~elf_attack_power in
let rec loop rounds =
match Battlefield.perform_round battlefield with
| () -> loop (rounds + 1)
| exception Battlefield.End_of_combat { winning_team = Elf }
when Battlefield.kill_count battlefield Elf = 0 -> Some (battlefield, rounds)
| exception Battlefield.End_of_combat _ -> None
in
loop 0
;;
let main input =
let lines = input |> String.strip |> String.split_lines in
let rec loop elf_attack_power =
match try_ elf_attack_power lines with
| None -> loop (elf_attack_power + 1)
| Some (battlefield, full_rounds) ->
let outcome = full_rounds * Battlefield.sum_of_hit_points battlefield in
printf
!"%d\n%{Battlefield#hum}\n%d rounds, %d hit points"
outcome
battlefield
full_rounds
(Battlefield.sum_of_hit_points battlefield)
in
loop 4;
return ()
;;
let%expect_test "b" =
let%bind lines = Reader.file_contents "input" in
let%bind () = main lines in
[%expect
{|
57820
################################
###################..###########
####################.....#######
####################...#.#######
####################..##...#####
#################..........#####
########..########........######
########..#.##..###.......######
#######.....#..........E.....###
#######...#...............E....#
#########.##.......E.....E....##
###########.............E...#.##
#####..####...#####........E.E.#
####....###..#######.....E#....#
####....###.#########......##..#
####........#########........E.#
###.........#########......E..##
##..#.......#########.........##
#...#.......#########.........##
#..#.....#...#######........####
#.............#####.....#...####
#.#.........................####
#........#..................####
###...##....................####
###...##....................####
####..#####..#............######
##########...#.##....##...######
##########.....##...############
###########.#......#############
#############...################
###############.################
################################
59 rounds, 980 hit points |}];
return ()
;;
| |
ac2ef2c40292308eaf0ef92463cd797dcbe4d19bf22eccec0524345b8c9ad7ad | tamarit/edd | mergesort.erl | -module(mergesort).
-export([mergesort/1, mergesort/2, mergesort/3, comp/2]).
mergesort(L) ->
mergesort(L, fun ?MODULE:comp/2, none).
mergesort(L, Comp) ->
mergesort(L, Comp, none).
mergesort([], _Comp, Parent) -> send_return([],Parent);
mergesort([X], _Comp, Parent) -> send_return([X],Parent);
mergesort(L, Comp, Parent) ->
Half = length(L) div 2,
L1 = take(Half, L),
L2 = last(length(L) - Half, L),
spawn(?MODULE, mergesort , [L1, Comp, self()]),
spawn(?MODULE, mergesort , [L2, Comp, self()]),
LOrd1 =
receive
{result, LOrd1_} ->
LOrd1_
end,
LOrd2 =
receive
{result, LOrd2_} ->
LOrd2_
end,
send_return(merge(LOrd1, LOrd2, Comp), Parent).
send_return(Result,none) ->
Result;
send_return(Result,Pid) ->
Pid!{result, Result}.
merge([], [], _Comp) ->
[];
merge([], S2, _Comp) ->
S2;
merge(S1, [], _Comp) ->
S1;
merge([H1 | T1], [H2 | T2], Comp) ->
case Comp(H1,H2) of
false -> [H2 | merge([H1 | T1], T2, Comp)]; % Correct
%false -> [H1 | merge([H2 | T1], T2, Comp)]; % Incorrect
true -> [H1 | merge(T1, [H2 | T2], Comp)]
end.
comp(X,Y) ->
X < Y.
take(0,_) -> [];
take(1,[H|_])->[H];
take(_,[])->[];
% take(N,[H|T])->[H | take(N-1, T)]. % Correct
Incorrect
last(N, List) ->
lists:reverse(take(N, lists:reverse(List))).
| null | https://raw.githubusercontent.com/tamarit/edd/867f287efe951bec6a8213743a218b86e4f5bbf7/examples/Concurrency/mergesort/mergesort.erl | erlang | Correct
false -> [H1 | merge([H2 | T1], T2, Comp)]; % Incorrect
take(N,[H|T])->[H | take(N-1, T)]. % Correct | -module(mergesort).
-export([mergesort/1, mergesort/2, mergesort/3, comp/2]).
mergesort(L) ->
mergesort(L, fun ?MODULE:comp/2, none).
mergesort(L, Comp) ->
mergesort(L, Comp, none).
mergesort([], _Comp, Parent) -> send_return([],Parent);
mergesort([X], _Comp, Parent) -> send_return([X],Parent);
mergesort(L, Comp, Parent) ->
Half = length(L) div 2,
L1 = take(Half, L),
L2 = last(length(L) - Half, L),
spawn(?MODULE, mergesort , [L1, Comp, self()]),
spawn(?MODULE, mergesort , [L2, Comp, self()]),
LOrd1 =
receive
{result, LOrd1_} ->
LOrd1_
end,
LOrd2 =
receive
{result, LOrd2_} ->
LOrd2_
end,
send_return(merge(LOrd1, LOrd2, Comp), Parent).
send_return(Result,none) ->
Result;
send_return(Result,Pid) ->
Pid!{result, Result}.
merge([], [], _Comp) ->
[];
merge([], S2, _Comp) ->
S2;
merge(S1, [], _Comp) ->
S1;
merge([H1 | T1], [H2 | T2], Comp) ->
case Comp(H1,H2) of
true -> [H1 | merge(T1, [H2 | T2], Comp)]
end.
comp(X,Y) ->
X < Y.
take(0,_) -> [];
take(1,[H|_])->[H];
take(_,[])->[];
Incorrect
last(N, List) ->
lists:reverse(take(N, lists:reverse(List))).
|
49df27ece1740fc06ac6d158e6ebee310ba774cbe1190c39533bd7db2c7615c0 | thufschmitt/tix | of_onix.ml | *
Conversion between [ . Ast.t ] and [ Ast.t ]
Conversion between [Parse.Ast.t] and [Ast.t]
*)
module A = Common.Type_annotations
module O = Parse.Ast
module N = Ast
module Loc = Common.Location
module W = Common.Writer.Make (Common.Warning.List)
module WL = Loc.With_loc
open W.Infix
let map_loc = WL.map
let rec partition_binop op = function
| [] -> []
| hd::_ as l ->
let (partition_elt, rest) =
CCList.partition (op hd) l
in partition_elt :: partition_binop op rest
let filter_inherit fields =
CCList.partition_map
(function
| { WL.description = O.Finherit (e, fields); _ } -> `Right (e, fields)
| { WL.description = O.Fdef (ap, value); location} ->
`Left { WL.description = (ap, value); location; })
fields
let rec flatten (fields : ((O.access_path * A.t option) * O.expr) WL.t list) :
((O.ap_field * A.t option) * O.expr) WL.t list W.t =
let flattened_record fields =
O.(Erecord {
recursive = false;
fields =
List.map
(WL.map
(fun ((apf, annot), expr) ->
Fdef (([apf], annot), expr))) fields;
})
in
(* Invariant: all the access paths are non-empty *)
let partitionned_by_first_element = partition_binop
(CCFun.compose_binop (fun f ->
(CCList.hd @@ fst @@ fst f.WL.description).WL.description)
(=))
fields
in
W.map_l
(function
| { WL.description = (([], _), _); _ } :: _ -> assert false
(* The access-path can't be empty *)
A record must have at least one field
| [ { WL.description = (([ident], annot),e); location = _ } as field ] ->
W.return { field with WL.description = ((ident, annot), e) }
| { WL.description = ((ident::_, annot), _); location = loc } :: _
as fields ->
let module E = struct
exception MultipleField of Common.Location.t * O.expr
end in
begin try
let sub_fields =
W.map_l
(fun { WL.description; location } ->
let description = match description with
| ((_::(_::_ as tl), annot), e) ->
W.return ((tl, annot), e)
| ((_::_, _), e) ->
raise (E.MultipleField (location, e))
| (([],_),_) -> assert false (* This shouldn't happen *)
in
W.append
(W.log description)
(W.return
{ WL.description = W.value description; location; }))
fields
in
sub_fields >>= fun sub_fields ->
flatten sub_fields >|= fun flattened ->
{WL.description = ((ident, None),
WL.mk loc @@ flattened_record flattened);
location = loc;
}
with
E.MultipleField (loc, e) ->
W.append
[ Common.Warning.make loc @@
Format.sprintf
"The field %s is defined several times"
(Parse.Pp.pp_ap_field Format.str_formatter ident;
Format.flush_str_formatter ())]
(W.return
{ WL.description = ((ident, annot), e); location = loc })
end
)
partitionned_by_first_element
let binop : O.binop -> N.binop = function
| O.Ocons -> N.Ocons
| O.Oeq -> N.Oeq
| O.Oplus -> N.Oplus
| O.Ominus-> N.Ominus
| O.Oand-> N.Oand
| O.Oor-> N.Oor
| O.Omerge -> N.Omerge
| O.Oconcat -> N.Oconcat
| O.OnonEq
| O.Oimplies -> assert false (* treated separately *)
let monop : O.monop -> N.monop = function
| O.Onot-> N.Onot
| O.Oneg -> N.Oneg
let rec expr_desc : O.expr_desc -> N.expr_desc W.t = function
| O.Evar s -> W.return @@ N.Evar s
| O.Econstant c -> W.return @@ N.Econstant (constant c)
| O.Elambda (pat, e) -> lambda pat e
| O.EfunApp (e1, e2) ->
funApp e1 e2
| O.EtyAnnot (e, t) -> expr e >|= fun e -> N.EtyAnnot (e, t)
| O.Ebinop (O.OnonEq, e1, e2) ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Emonop (N.Onot, WL.mk
(WL.loc e1)
(N.Ebinop (N.Oeq, e1, e2)))
| O.Ebinop (O.Oimplies, e1, e2) ->
let e1_loc = WL.loc e1 in
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Ebinop (N.Oor, e2, WL.mk e1_loc (N.Emonop (N.Onot, e1)))
| O.Ebinop (o, e1, e2) ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Ebinop (binop o, e1, e2)
| O.Emonop (o, e) ->
expr e >|= fun e ->
N.Emonop (monop o, e)
| O.Elet (binds, e) ->
bindings binds >>= fun binds ->
expr e >|= fun e ->
N.Elet (binds, e)
| O.Eite (e0, e1, e2) ->
expr e0 >>= fun e0 ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Eite (e0, e1, e2)
(* TODO: smarter compilation of some form of if-then-else *)
| O.Epragma (p, e) -> expr e >|= fun e -> N.Epragma (p, e)
| O.Erecord r -> record r
| O.Eaccess (e, ap, default) ->
expr e >>= fun e ->
access_path ap >>= fun ap ->
W.map_opt expr default >|= fun default ->
N.EaccessPath (e, ap, default)
| O.EtestMember (e, ap) ->
expr e >>= fun e ->
(access_path ap
|> if List.length ap > 1 then
W.append
[Common.Warning.(make ~kind:Warning (WL.loc (CCList.hd ap))
"The tail of this access_path has been dropped")]
else fun x -> x)
>|= fun ap ->
N.Ebinop (N.OrecordMember, e, List.hd ap)
(* FIXME: don't drop the tail of the access_path *)
| O.Ewith (e1, e2) ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Ewith (e1, e2)
and access_path ap = W.map_l ap_field ap
and apf_to_expr = function
| O.AFexpr e -> e.WL.description
| O.AFidentifier s -> O.Econstant (O.Cstring s)
and ap_field f = expr @@ map_loc apf_to_expr f
and bindings b =
let non_inherit_fields, _ = filter_inherit b in
flatten non_inherit_fields >>= fun b ->
W.map_l binding b
and binding b =
let ((apf, annot), e) = WL.description b in
expr e >>= fun e ->
match WL.description apf with
| O.AFidentifier s ->
W.return ((s, annot), e)
| O.AFexpr e' ->
W.append [Common.Warning.make
~kind:Common.Warning.Error
(WL.loc e')
"Dynamic let-bindings are not allowed"] @@
W.return (("%%INVALID_LHS%%", annot), e)
and expr e =
expr_desc (WL.description e) >|= fun description ->
{ e with WL.description }
and open_flag = function
| O.Open -> N.Open
| O.Closed -> N.Closed
and pattern_record_field { O.field_name; default_value; type_annot } =
W.map_opt (fun e ->
expr e >|= fun e ->
(field_name, type_annot), e)
default_value
>|= fun value ->
((field_name, (CCOpt.is_some default_value, type_annot)), value)
and nontrivial_pattern loc :
O.nontrivial_pattern -> (N.nontrivial_pattern * N.binding list) W.t
= function
| O.NPrecord (fields, flag) ->
W.map_l pattern_record_field fields >|=
List.split >>= fun (new_fields, default_values) ->
let default_values = CCList.flat_map CCOpt.to_list default_values
in
begin
try W.return @@ Record.of_list_uniq new_fields
with Invalid_argument _ ->
W.append
[Common.Warning.make loc "Duplicate element in pattern"]
(W.return Record.empty)
end
>|= fun fields ->
N.NPrecord (fields, open_flag flag), default_values
and pattern_desc loc : O.pattern_desc -> (N.pattern_desc * N.binding list) W.t
= function
| O.Pvar (s, mt) -> W.return (N.Pvar (s, mt), [])
| O.Pnontrivial (sub_pat, alias) ->
nontrivial_pattern loc sub_pat >|= fun (sub_pat, default_values) ->
N.Pnontrivial (sub_pat, alias), default_values
and pattern p =
let loc = WL.loc p in
pattern_desc loc @@ WL.description p >|= fun (new_pat, default_values) ->
(WL.mk loc new_pat, default_values)
and constant = function
| O.Cint i -> N.Cint i
| O.Cbool b -> N.Cbool b
| O.Cstring s -> N.Cstring s
| O.Cpath s -> N.Cpath s
| O.Cbracketed s -> N.Cbracketed s
and record r =
let { O.fields; recursive } = r in
if recursive then
let loc = List.hd fields
|> WL.loc
in
bindings fields >|= fun created_bindings ->
let new_record =
N.Erecord (List.map (fun ((var, annot), { WL.location = loc; _}) ->
(WL.mk loc @@ N.Econstant (N.Cstring var),
annot,
WL.mk loc @@ N.Evar var))
created_bindings)
in
N.Elet (created_bindings, WL.mk loc new_record)
else
let non_inherit_fields, inherit_fields = filter_inherit fields
in
W.map_l inherit_to_classic inherit_fields >>= fun inherit_fields ->
flatten non_inherit_fields >>= fun flattened ->
W.map_l
(fun { WL.description = ((apf, annot), e); location } ->
apf_to_expr (WL.description apf)
|> WL.mk location
|> expr
>>= fun label_expr ->
expr e >|= fun rhs_expr ->
(label_expr, annot, rhs_expr))
flattened
>|= fun new_record ->
N.Erecord (new_record @ CCList.flatten inherit_fields)
and lambda pat e =
pattern pat >>= fun (new_pat, default_values) ->
let loc = WL.loc e in
let mangle_name = (^) "%%" in
let mangled_values_def =
List.map
(* [let %%x /*: t | %%undef */ = x ] *)
(fun ((var, annot), e) ->
let annot = CCOpt.map
(fun a ->
let loc = WL.loc a in
WL.mk loc
A.(Infix (
Infix_constructors.Or, WL.mk loc (Var "%%undef"), a)))
annot
in
((mangle_name var, annot),
WL.mk (WL.loc e) @@ N.Evar var))
default_values
in
let substitute_values =
List.map
(fun ((var, annot), e) ->
let loc = WL.loc e in
let al = WL.mk loc in
let new_expr =
(* [if isUndef [%e %%var] then [%e e] else [%e %%var]] *)
al @@ N.Eite
(al @@ N.EfunApp
(al @@ N.Evar "%%isUndef", al @@ N.Evar (mangle_name var)),
e,
al @@ N.Evar (mangle_name var))
in
((var, annot), new_expr))
default_values
in
expr e >|= fun body ->
let body =
if default_values = [] then body else
WL.mk loc
(N.Elet (mangled_values_def,
(WL.mk loc
(N.Elet (substitute_values, body)))))
in
N.Elambda (new_pat, body)
and funApp e1 e2 =
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
match WL.description e1 with
| N.Evar "import" -> N.Eimport e2
| _ -> N.EfunApp (e1, e2)
and inherit_to_classic ((base_expr, fields) : O.inherit_)
: N.field list W.t =
let mk_classic { WL.description = name; location = loc } =
let value = match base_expr with
| None -> W.return @@ WL.mk loc @@ N.Evar name
| Some e ->
expr e >|= fun e ->
WL.mk loc
@@ N.EaccessPath (e, [WL.mk loc @@ N.Econstant (N.Cstring name)], None)
in
value >|= fun value ->
(WL.mk loc @@ N.Econstant (N.Cstring name), None, value)
in
W.map_l mk_classic fields
| null | https://raw.githubusercontent.com/thufschmitt/tix/bfbf7b6de9160bae8af3e0686b783bbc75c79fd7/lib/simple/of_onix.ml | ocaml | Invariant: all the access paths are non-empty
The access-path can't be empty
This shouldn't happen
treated separately
TODO: smarter compilation of some form of if-then-else
FIXME: don't drop the tail of the access_path
[let %%x /*: t | %%undef */ = x ]
[if isUndef [%e %%var] then [%e e] else [%e %%var]] | *
Conversion between [ . Ast.t ] and [ Ast.t ]
Conversion between [Parse.Ast.t] and [Ast.t]
*)
module A = Common.Type_annotations
module O = Parse.Ast
module N = Ast
module Loc = Common.Location
module W = Common.Writer.Make (Common.Warning.List)
module WL = Loc.With_loc
open W.Infix
let map_loc = WL.map
let rec partition_binop op = function
| [] -> []
| hd::_ as l ->
let (partition_elt, rest) =
CCList.partition (op hd) l
in partition_elt :: partition_binop op rest
let filter_inherit fields =
CCList.partition_map
(function
| { WL.description = O.Finherit (e, fields); _ } -> `Right (e, fields)
| { WL.description = O.Fdef (ap, value); location} ->
`Left { WL.description = (ap, value); location; })
fields
let rec flatten (fields : ((O.access_path * A.t option) * O.expr) WL.t list) :
((O.ap_field * A.t option) * O.expr) WL.t list W.t =
let flattened_record fields =
O.(Erecord {
recursive = false;
fields =
List.map
(WL.map
(fun ((apf, annot), expr) ->
Fdef (([apf], annot), expr))) fields;
})
in
let partitionned_by_first_element = partition_binop
(CCFun.compose_binop (fun f ->
(CCList.hd @@ fst @@ fst f.WL.description).WL.description)
(=))
fields
in
W.map_l
(function
| { WL.description = (([], _), _); _ } :: _ -> assert false
A record must have at least one field
| [ { WL.description = (([ident], annot),e); location = _ } as field ] ->
W.return { field with WL.description = ((ident, annot), e) }
| { WL.description = ((ident::_, annot), _); location = loc } :: _
as fields ->
let module E = struct
exception MultipleField of Common.Location.t * O.expr
end in
begin try
let sub_fields =
W.map_l
(fun { WL.description; location } ->
let description = match description with
| ((_::(_::_ as tl), annot), e) ->
W.return ((tl, annot), e)
| ((_::_, _), e) ->
raise (E.MultipleField (location, e))
in
W.append
(W.log description)
(W.return
{ WL.description = W.value description; location; }))
fields
in
sub_fields >>= fun sub_fields ->
flatten sub_fields >|= fun flattened ->
{WL.description = ((ident, None),
WL.mk loc @@ flattened_record flattened);
location = loc;
}
with
E.MultipleField (loc, e) ->
W.append
[ Common.Warning.make loc @@
Format.sprintf
"The field %s is defined several times"
(Parse.Pp.pp_ap_field Format.str_formatter ident;
Format.flush_str_formatter ())]
(W.return
{ WL.description = ((ident, annot), e); location = loc })
end
)
partitionned_by_first_element
let binop : O.binop -> N.binop = function
| O.Ocons -> N.Ocons
| O.Oeq -> N.Oeq
| O.Oplus -> N.Oplus
| O.Ominus-> N.Ominus
| O.Oand-> N.Oand
| O.Oor-> N.Oor
| O.Omerge -> N.Omerge
| O.Oconcat -> N.Oconcat
| O.OnonEq
let monop : O.monop -> N.monop = function
| O.Onot-> N.Onot
| O.Oneg -> N.Oneg
let rec expr_desc : O.expr_desc -> N.expr_desc W.t = function
| O.Evar s -> W.return @@ N.Evar s
| O.Econstant c -> W.return @@ N.Econstant (constant c)
| O.Elambda (pat, e) -> lambda pat e
| O.EfunApp (e1, e2) ->
funApp e1 e2
| O.EtyAnnot (e, t) -> expr e >|= fun e -> N.EtyAnnot (e, t)
| O.Ebinop (O.OnonEq, e1, e2) ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Emonop (N.Onot, WL.mk
(WL.loc e1)
(N.Ebinop (N.Oeq, e1, e2)))
| O.Ebinop (O.Oimplies, e1, e2) ->
let e1_loc = WL.loc e1 in
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Ebinop (N.Oor, e2, WL.mk e1_loc (N.Emonop (N.Onot, e1)))
| O.Ebinop (o, e1, e2) ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Ebinop (binop o, e1, e2)
| O.Emonop (o, e) ->
expr e >|= fun e ->
N.Emonop (monop o, e)
| O.Elet (binds, e) ->
bindings binds >>= fun binds ->
expr e >|= fun e ->
N.Elet (binds, e)
| O.Eite (e0, e1, e2) ->
expr e0 >>= fun e0 ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Eite (e0, e1, e2)
| O.Epragma (p, e) -> expr e >|= fun e -> N.Epragma (p, e)
| O.Erecord r -> record r
| O.Eaccess (e, ap, default) ->
expr e >>= fun e ->
access_path ap >>= fun ap ->
W.map_opt expr default >|= fun default ->
N.EaccessPath (e, ap, default)
| O.EtestMember (e, ap) ->
expr e >>= fun e ->
(access_path ap
|> if List.length ap > 1 then
W.append
[Common.Warning.(make ~kind:Warning (WL.loc (CCList.hd ap))
"The tail of this access_path has been dropped")]
else fun x -> x)
>|= fun ap ->
N.Ebinop (N.OrecordMember, e, List.hd ap)
| O.Ewith (e1, e2) ->
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
N.Ewith (e1, e2)
and access_path ap = W.map_l ap_field ap
and apf_to_expr = function
| O.AFexpr e -> e.WL.description
| O.AFidentifier s -> O.Econstant (O.Cstring s)
and ap_field f = expr @@ map_loc apf_to_expr f
and bindings b =
let non_inherit_fields, _ = filter_inherit b in
flatten non_inherit_fields >>= fun b ->
W.map_l binding b
and binding b =
let ((apf, annot), e) = WL.description b in
expr e >>= fun e ->
match WL.description apf with
| O.AFidentifier s ->
W.return ((s, annot), e)
| O.AFexpr e' ->
W.append [Common.Warning.make
~kind:Common.Warning.Error
(WL.loc e')
"Dynamic let-bindings are not allowed"] @@
W.return (("%%INVALID_LHS%%", annot), e)
and expr e =
expr_desc (WL.description e) >|= fun description ->
{ e with WL.description }
and open_flag = function
| O.Open -> N.Open
| O.Closed -> N.Closed
and pattern_record_field { O.field_name; default_value; type_annot } =
W.map_opt (fun e ->
expr e >|= fun e ->
(field_name, type_annot), e)
default_value
>|= fun value ->
((field_name, (CCOpt.is_some default_value, type_annot)), value)
and nontrivial_pattern loc :
O.nontrivial_pattern -> (N.nontrivial_pattern * N.binding list) W.t
= function
| O.NPrecord (fields, flag) ->
W.map_l pattern_record_field fields >|=
List.split >>= fun (new_fields, default_values) ->
let default_values = CCList.flat_map CCOpt.to_list default_values
in
begin
try W.return @@ Record.of_list_uniq new_fields
with Invalid_argument _ ->
W.append
[Common.Warning.make loc "Duplicate element in pattern"]
(W.return Record.empty)
end
>|= fun fields ->
N.NPrecord (fields, open_flag flag), default_values
and pattern_desc loc : O.pattern_desc -> (N.pattern_desc * N.binding list) W.t
= function
| O.Pvar (s, mt) -> W.return (N.Pvar (s, mt), [])
| O.Pnontrivial (sub_pat, alias) ->
nontrivial_pattern loc sub_pat >|= fun (sub_pat, default_values) ->
N.Pnontrivial (sub_pat, alias), default_values
and pattern p =
let loc = WL.loc p in
pattern_desc loc @@ WL.description p >|= fun (new_pat, default_values) ->
(WL.mk loc new_pat, default_values)
and constant = function
| O.Cint i -> N.Cint i
| O.Cbool b -> N.Cbool b
| O.Cstring s -> N.Cstring s
| O.Cpath s -> N.Cpath s
| O.Cbracketed s -> N.Cbracketed s
and record r =
let { O.fields; recursive } = r in
if recursive then
let loc = List.hd fields
|> WL.loc
in
bindings fields >|= fun created_bindings ->
let new_record =
N.Erecord (List.map (fun ((var, annot), { WL.location = loc; _}) ->
(WL.mk loc @@ N.Econstant (N.Cstring var),
annot,
WL.mk loc @@ N.Evar var))
created_bindings)
in
N.Elet (created_bindings, WL.mk loc new_record)
else
let non_inherit_fields, inherit_fields = filter_inherit fields
in
W.map_l inherit_to_classic inherit_fields >>= fun inherit_fields ->
flatten non_inherit_fields >>= fun flattened ->
W.map_l
(fun { WL.description = ((apf, annot), e); location } ->
apf_to_expr (WL.description apf)
|> WL.mk location
|> expr
>>= fun label_expr ->
expr e >|= fun rhs_expr ->
(label_expr, annot, rhs_expr))
flattened
>|= fun new_record ->
N.Erecord (new_record @ CCList.flatten inherit_fields)
and lambda pat e =
pattern pat >>= fun (new_pat, default_values) ->
let loc = WL.loc e in
let mangle_name = (^) "%%" in
let mangled_values_def =
List.map
(fun ((var, annot), e) ->
let annot = CCOpt.map
(fun a ->
let loc = WL.loc a in
WL.mk loc
A.(Infix (
Infix_constructors.Or, WL.mk loc (Var "%%undef"), a)))
annot
in
((mangle_name var, annot),
WL.mk (WL.loc e) @@ N.Evar var))
default_values
in
let substitute_values =
List.map
(fun ((var, annot), e) ->
let loc = WL.loc e in
let al = WL.mk loc in
let new_expr =
al @@ N.Eite
(al @@ N.EfunApp
(al @@ N.Evar "%%isUndef", al @@ N.Evar (mangle_name var)),
e,
al @@ N.Evar (mangle_name var))
in
((var, annot), new_expr))
default_values
in
expr e >|= fun body ->
let body =
if default_values = [] then body else
WL.mk loc
(N.Elet (mangled_values_def,
(WL.mk loc
(N.Elet (substitute_values, body)))))
in
N.Elambda (new_pat, body)
and funApp e1 e2 =
expr e1 >>= fun e1 ->
expr e2 >|= fun e2 ->
match WL.description e1 with
| N.Evar "import" -> N.Eimport e2
| _ -> N.EfunApp (e1, e2)
and inherit_to_classic ((base_expr, fields) : O.inherit_)
: N.field list W.t =
let mk_classic { WL.description = name; location = loc } =
let value = match base_expr with
| None -> W.return @@ WL.mk loc @@ N.Evar name
| Some e ->
expr e >|= fun e ->
WL.mk loc
@@ N.EaccessPath (e, [WL.mk loc @@ N.Econstant (N.Cstring name)], None)
in
value >|= fun value ->
(WL.mk loc @@ N.Econstant (N.Cstring name), None, value)
in
W.map_l mk_classic fields
|
ba6ae9456dd170ae5a9b85aa69e074a4d29cabaa18fcaccd3d50a785f6e5339c | SquidDev/illuaminate | parser.ml | open IlluaminateCore
let parse ~name contents =
let lexbuf = Lexing.from_string contents in
let errs = Error.make () in
let name = Span.Filename.mk name in
match IlluaminateParser.program name lexbuf with
| Error err ->
let buffer = Buffer.create 128 in
IlluaminateParser.Error.report errs err.span err.value;
Error.display_of_string ~out:(Format.formatter_of_buffer buffer) (fun _ -> Some contents) errs;
Buffer.contents buffer
| Ok parsed -> Syntax.show_program parsed
let tests =
OmnomnomGolden.of_directory parse ~group:"The parser" ~directory:"data/parser" ~extension:".lua"
()
| null | https://raw.githubusercontent.com/SquidDev/illuaminate/da18b101b4710881b71c42554d70a3a7d17c3cd6/test/parser.ml | ocaml | open IlluaminateCore
let parse ~name contents =
let lexbuf = Lexing.from_string contents in
let errs = Error.make () in
let name = Span.Filename.mk name in
match IlluaminateParser.program name lexbuf with
| Error err ->
let buffer = Buffer.create 128 in
IlluaminateParser.Error.report errs err.span err.value;
Error.display_of_string ~out:(Format.formatter_of_buffer buffer) (fun _ -> Some contents) errs;
Buffer.contents buffer
| Ok parsed -> Syntax.show_program parsed
let tests =
OmnomnomGolden.of_directory parse ~group:"The parser" ~directory:"data/parser" ~extension:".lua"
()
| |
b78cb9a4c67598b0e943c67bb8bd2322aa4f23e802035214eba32d49549fd7e2 | samrushing/irken-compiler | t12.scm |
(include "lib/core.scm")
(define (thing1 a b c) (+ a 1))
(define (thing2 a b c) (+ b 1))
(define (thing3 a b c) (+ c 1))
(print (thing1 10 11 12))
(print (thing2 10 11 12))
(print (thing3 10 11 12))
| null | https://raw.githubusercontent.com/samrushing/irken-compiler/690da48852d55497f873738df54f14e8e135d006/vm/tests/t12.scm | scheme |
(include "lib/core.scm")
(define (thing1 a b c) (+ a 1))
(define (thing2 a b c) (+ b 1))
(define (thing3 a b c) (+ c 1))
(print (thing1 10 11 12))
(print (thing2 10 11 12))
(print (thing3 10 11 12))
| |
207623c794de257f8e5a8cefd38bc909b33dc23412ce98f1ded5372bb2922aa4 | goranmoomin/cl-cowsay | default.lisp | ;;;; default.lisp
(defpackage #:cl-cowsay.default
(:use #:cl
#:alexandria)
(:export #:*defaults*
#:defaults))
(in-package #:cl-cowsay.default)
(defparameter *defaults*
(alist-hash-table
'((:borg . (:eyes "==" :tongue " " :thoughts "\\"))
(:dead . (:eyes "xx" :tongue "U " :thoughts "\\"))
(:greedy . (:eyes "$$" :tongue " " :thoughts "\\"))
(:paranoia . (:eyes "@@" :tongue " " :thoughts "\\"))
(:stoned . (:eyes "**" :tongue "U " :thoughts "\\"))
(:tired . (:eyes "--" :tongue " " :thoughts "\\"))
(:wired . (:eyes "OO" :tongue " " :thoughts "\\"))
(:youthful . (:eyes ".." :tongue " " :thoughts "\\")))))
(defun defaults (mode)
(gethash mode *defaults* '(:eyes "oo" :tongue " " :thoughts "\\")))
| null | https://raw.githubusercontent.com/goranmoomin/cl-cowsay/36b1123cdbefeb9bcde4572f992705cc7da5acaf/default.lisp | lisp | default.lisp |
(defpackage #:cl-cowsay.default
(:use #:cl
#:alexandria)
(:export #:*defaults*
#:defaults))
(in-package #:cl-cowsay.default)
(defparameter *defaults*
(alist-hash-table
'((:borg . (:eyes "==" :tongue " " :thoughts "\\"))
(:dead . (:eyes "xx" :tongue "U " :thoughts "\\"))
(:greedy . (:eyes "$$" :tongue " " :thoughts "\\"))
(:paranoia . (:eyes "@@" :tongue " " :thoughts "\\"))
(:stoned . (:eyes "**" :tongue "U " :thoughts "\\"))
(:tired . (:eyes "--" :tongue " " :thoughts "\\"))
(:wired . (:eyes "OO" :tongue " " :thoughts "\\"))
(:youthful . (:eyes ".." :tongue " " :thoughts "\\")))))
(defun defaults (mode)
(gethash mode *defaults* '(:eyes "oo" :tongue " " :thoughts "\\")))
|
fff1748b38c83c76f78c8850455d4cb01fdd84e88d6f3eb1db2076a27e9a3d8a | CloudI/CloudI | ct_expand.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
%% --------------------------------------------------
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%% --------------------------------------------------
%% File : ct_expand.erl
@author :
%% @end
Created : 7 Apr 2010 by < >
%%-------------------------------------------------------------------
%% @doc Compile-time expansion utility
%%
%% This module serves as an example of parse_trans-based transforms,
%% but might also be a useful utility in its own right.
%% The transform searches for calls to the pseudo-function
` ct_expand : ) ' , and then replaces the call site with the
result of evaluating ' at compile - time .
%%
%% For example, the line
%%
%% `ct_expand:term(lists:sort([3,5,2,1,4]))'
%%
would be expanded at compile - time to ` [ 1,2,3,4,5 ] ' .
%%
%% ct_expand has now been extended to also evaluate calls to local functions.
%% See examples/ct_expand_test.erl for some examples.
%%
%% A debugging facility exists: passing the option {ct_expand_trace, Flags} as an option,
%% or adding a compiler attribute -ct_expand_trace(Flags) will enable a form of call trace.
%%
%% `Flags' can be `[]' (no trace) or `[F]', where `F' is `c' (call trace),
%% `r' (return trace), or `x' (exception trace)'.
%%
%% @end
-module(ct_expand).
-export([parse_transform/2]).
-export([extract_fun/3,
lfun_rewrite/2]).
-type form() :: any().
-type forms() :: [form()].
-type options() :: [{atom(), any()}].
-spec parse_transform(forms(), options()) ->
forms().
parse_transform(Forms, Options) ->
Trace = ct_trace_opt(Options, Forms),
case parse_trans:depth_first(fun(T,F,C,A) ->
xform_fun(T,F,C,A,Forms, Trace)
end, [], Forms, Options) of
{error, Es} ->
Es ++ Forms;
{NewForms, _} ->
parse_trans:revert(NewForms)
end.
ct_trace_opt(Options, Forms) ->
case proplists:get_value(ct_expand_trace, Options) of
undefined ->
case [Opt || {attribute,_,ct_expand_trace,Opt} <- Forms] of
[] ->
[];
[_|_] = L ->
lists:last(L)
end;
Flags when is_list(Flags) ->
Flags
end.
xform_fun(application, Form, _Ctxt, Acc, Forms, Trace) ->
MFA = erl_syntax_lib:analyze_application(Form),
case MFA of
{?MODULE, {term, 1}} ->
LFH = fun(Name, Args, Bs) ->
eval_lfun(
extract_fun(Name, length(Args), Forms),
Args, Bs, Forms, Trace)
end,
Args = erl_syntax:application_arguments(Form),
RevArgs = parse_trans:revert(Args),
case erl_eval:exprs(RevArgs, [], {eval, LFH}) of
{value, Value,[]} ->
{abstract(Value), Acc};
Other ->
parse_trans:error(cannot_evaluate,?LINE,
[{expr, RevArgs},
{error, Other}])
end;
_ ->
{Form, Acc}
end;
xform_fun(_, Form, _Ctxt, Acc, _, _) ->
{Form, Acc}.
extract_fun(Name, Arity, Forms) ->
case [F_ || {function,_,N_,A_,_Cs} = F_ <- Forms,
N_ == Name, A_ == Arity] of
[] ->
erlang:error({undef, [{Name, Arity}]});
[FForm] ->
FForm
end.
eval_lfun({function,L,F,_,Clauses}, Args, Bs, Forms, Trace) ->
try
{ArgsV, Bs1} = lists:mapfoldl(
fun(A, Bs_) ->
{value,AV,Bs1_} =
erl_eval:expr(A, Bs_, lfh(Forms, Trace)),
{abstract(AV), Bs1_}
end, Bs, Args),
Expr = {call, L, {'fun', L, {clauses, lfun_rewrite(Clauses, Forms)}}, ArgsV},
call_trace(Trace =/= [], L, F, ArgsV),
{value, Ret, _} =
erl_eval:expr(Expr, erl_eval:new_bindings(), lfh(Forms, Trace)),
ret_trace(lists:member(r, Trace) orelse lists:member(x, Trace),
L, F, Args, Ret),
%% restore bindings
{value, Ret, Bs1}
catch
error:Err ->
exception_trace(lists:member(x, Trace), L, F, Args, Err),
error(Err)
end.
lfh(Forms, Trace) ->
{eval, fun(Name, As, Bs1) ->
eval_lfun(
extract_fun(Name, length(As), Forms),
As, Bs1, Forms, Trace)
end}.
call_trace(false, _, _, _) -> ok;
call_trace(true, L, F, As) ->
io:fwrite("ct_expand (~w): call ~s~n", [L, pp_function(F, As)]).
pp_function(F, []) ->
atom_to_list(F) ++ "()";
pp_function(F, [A|As]) ->
lists:flatten([atom_to_list(F), "(",
[io_lib:fwrite("~w", [erl_parse:normalise(A)]) |
[[",", io_lib:fwrite("~w", [erl_parse:normalise(A_)])] || A_ <- As]],
")"]).
ret_trace(false, _, _, _, _) -> ok;
ret_trace(true, L, F, Args, Res) ->
io:fwrite("ct_expand (~w): returned from ~w/~w: ~w~n",
[L, F, length(Args), Res]).
exception_trace(false, _, _, _, _) -> ok;
exception_trace(true, L, F, Args, Err) ->
io:fwrite("ct_expand (~w): exception from ~w/~w: ~p~n", [L, F, length(Args), Err]).
lfun_rewrite(Exprs, Forms) ->
parse_trans:plain_transform(
fun({'fun',L,{function,F,A}}) ->
{function,_,_,_,Cs} = extract_fun(F, A, Forms),
{'fun',L,{clauses, Cs}};
(_) ->
continue
end, Exprs).
%% abstract/1 - modified from erl_eval:abstract/1:
-type abstract_expr() :: term().
-spec abstract(Data) -> AbsTerm when
Data :: term(),
AbsTerm :: abstract_expr().
abstract(T) when is_function(T) ->
case erlang:fun_info(T, module) of
{module, erl_eval} ->
case erl_eval:fun_data(T) of
{fun_data, _Imports, Clauses} ->
{'fun', 0, {clauses, Clauses}};
false ->
mimicking erl_parse : )
end;
_ ->
erlang:error(function_clause)
end;
abstract(T) when is_integer(T) -> {integer,0,T};
abstract(T) when is_float(T) -> {float,0,T};
abstract(T) when is_atom(T) -> {atom,0,T};
abstract([]) -> {nil,0};
abstract(B) when is_bitstring(B) ->
{bin, 0, [abstract_byte(Byte, 0) || Byte <- bitstring_to_list(B)]};
abstract([C|T]) when is_integer(C), 0 =< C, C < 256 ->
abstract_string(T, [C]);
abstract([H|T]) ->
{cons,0,abstract(H),abstract(T)};
abstract(Map) when is_map(Map) ->
{map,0,abstract_map(Map)};
abstract(Tuple) when is_tuple(Tuple) ->
{tuple,0,abstract_list(tuple_to_list(Tuple))}.
abstract_string([C|T], String) when is_integer(C), 0 =< C, C < 256 ->
abstract_string(T, [C|String]);
abstract_string([], String) ->
{string, 0, lists:reverse(String)};
abstract_string(T, String) ->
not_string(String, abstract(T)).
not_string([C|T], Result) ->
not_string(T, {cons, 0, {integer, 0, C}, Result});
not_string([], Result) ->
Result.
abstract_list([H|T]) ->
[abstract(H)|abstract_list(T)];
abstract_list([]) ->
[].
abstract_map(Map) ->
[{map_field_assoc,0,abstract(K),abstract(V)}
|| {K,V} <- maps:to_list(Map)
].
abstract_byte(Byte, Line) when is_integer(Byte) ->
{bin_element, Line, {integer, Line, Byte}, default, default};
abstract_byte(Bits, Line) ->
Sz = bit_size(Bits),
<<Val:Sz>> = Bits,
{bin_element, Line, {integer, Line, Val}, {integer, Line, Sz}, default}.
| null | https://raw.githubusercontent.com/CloudI/CloudI/3e45031c7ee3e974ead2612ea7dd06c9edf973c9/src/external/cloudi_x_parse_trans/src/ct_expand.erl | erlang | --------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
--------------------------------------------------
File : ct_expand.erl
@end
-------------------------------------------------------------------
@doc Compile-time expansion utility
This module serves as an example of parse_trans-based transforms,
but might also be a useful utility in its own right.
The transform searches for calls to the pseudo-function
For example, the line
`ct_expand:term(lists:sort([3,5,2,1,4]))'
ct_expand has now been extended to also evaluate calls to local functions.
See examples/ct_expand_test.erl for some examples.
A debugging facility exists: passing the option {ct_expand_trace, Flags} as an option,
or adding a compiler attribute -ct_expand_trace(Flags) will enable a form of call trace.
`Flags' can be `[]' (no trace) or `[F]', where `F' is `c' (call trace),
`r' (return trace), or `x' (exception trace)'.
@end
restore bindings
abstract/1 - modified from erl_eval:abstract/1: | -*- erlang - indent - level : 4;indent - tabs - mode : nil -*-
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@author :
Created : 7 Apr 2010 by < >
` ct_expand : ) ' , and then replaces the call site with the
result of evaluating ' at compile - time .
would be expanded at compile - time to ` [ 1,2,3,4,5 ] ' .
-module(ct_expand).
-export([parse_transform/2]).
-export([extract_fun/3,
lfun_rewrite/2]).
-type form() :: any().
-type forms() :: [form()].
-type options() :: [{atom(), any()}].
-spec parse_transform(forms(), options()) ->
forms().
parse_transform(Forms, Options) ->
Trace = ct_trace_opt(Options, Forms),
case parse_trans:depth_first(fun(T,F,C,A) ->
xform_fun(T,F,C,A,Forms, Trace)
end, [], Forms, Options) of
{error, Es} ->
Es ++ Forms;
{NewForms, _} ->
parse_trans:revert(NewForms)
end.
ct_trace_opt(Options, Forms) ->
case proplists:get_value(ct_expand_trace, Options) of
undefined ->
case [Opt || {attribute,_,ct_expand_trace,Opt} <- Forms] of
[] ->
[];
[_|_] = L ->
lists:last(L)
end;
Flags when is_list(Flags) ->
Flags
end.
xform_fun(application, Form, _Ctxt, Acc, Forms, Trace) ->
MFA = erl_syntax_lib:analyze_application(Form),
case MFA of
{?MODULE, {term, 1}} ->
LFH = fun(Name, Args, Bs) ->
eval_lfun(
extract_fun(Name, length(Args), Forms),
Args, Bs, Forms, Trace)
end,
Args = erl_syntax:application_arguments(Form),
RevArgs = parse_trans:revert(Args),
case erl_eval:exprs(RevArgs, [], {eval, LFH}) of
{value, Value,[]} ->
{abstract(Value), Acc};
Other ->
parse_trans:error(cannot_evaluate,?LINE,
[{expr, RevArgs},
{error, Other}])
end;
_ ->
{Form, Acc}
end;
xform_fun(_, Form, _Ctxt, Acc, _, _) ->
{Form, Acc}.
extract_fun(Name, Arity, Forms) ->
case [F_ || {function,_,N_,A_,_Cs} = F_ <- Forms,
N_ == Name, A_ == Arity] of
[] ->
erlang:error({undef, [{Name, Arity}]});
[FForm] ->
FForm
end.
eval_lfun({function,L,F,_,Clauses}, Args, Bs, Forms, Trace) ->
try
{ArgsV, Bs1} = lists:mapfoldl(
fun(A, Bs_) ->
{value,AV,Bs1_} =
erl_eval:expr(A, Bs_, lfh(Forms, Trace)),
{abstract(AV), Bs1_}
end, Bs, Args),
Expr = {call, L, {'fun', L, {clauses, lfun_rewrite(Clauses, Forms)}}, ArgsV},
call_trace(Trace =/= [], L, F, ArgsV),
{value, Ret, _} =
erl_eval:expr(Expr, erl_eval:new_bindings(), lfh(Forms, Trace)),
ret_trace(lists:member(r, Trace) orelse lists:member(x, Trace),
L, F, Args, Ret),
{value, Ret, Bs1}
catch
error:Err ->
exception_trace(lists:member(x, Trace), L, F, Args, Err),
error(Err)
end.
lfh(Forms, Trace) ->
{eval, fun(Name, As, Bs1) ->
eval_lfun(
extract_fun(Name, length(As), Forms),
As, Bs1, Forms, Trace)
end}.
call_trace(false, _, _, _) -> ok;
call_trace(true, L, F, As) ->
io:fwrite("ct_expand (~w): call ~s~n", [L, pp_function(F, As)]).
pp_function(F, []) ->
atom_to_list(F) ++ "()";
pp_function(F, [A|As]) ->
lists:flatten([atom_to_list(F), "(",
[io_lib:fwrite("~w", [erl_parse:normalise(A)]) |
[[",", io_lib:fwrite("~w", [erl_parse:normalise(A_)])] || A_ <- As]],
")"]).
ret_trace(false, _, _, _, _) -> ok;
ret_trace(true, L, F, Args, Res) ->
io:fwrite("ct_expand (~w): returned from ~w/~w: ~w~n",
[L, F, length(Args), Res]).
exception_trace(false, _, _, _, _) -> ok;
exception_trace(true, L, F, Args, Err) ->
io:fwrite("ct_expand (~w): exception from ~w/~w: ~p~n", [L, F, length(Args), Err]).
lfun_rewrite(Exprs, Forms) ->
parse_trans:plain_transform(
fun({'fun',L,{function,F,A}}) ->
{function,_,_,_,Cs} = extract_fun(F, A, Forms),
{'fun',L,{clauses, Cs}};
(_) ->
continue
end, Exprs).
-type abstract_expr() :: term().
-spec abstract(Data) -> AbsTerm when
Data :: term(),
AbsTerm :: abstract_expr().
abstract(T) when is_function(T) ->
case erlang:fun_info(T, module) of
{module, erl_eval} ->
case erl_eval:fun_data(T) of
{fun_data, _Imports, Clauses} ->
{'fun', 0, {clauses, Clauses}};
false ->
mimicking erl_parse : )
end;
_ ->
erlang:error(function_clause)
end;
abstract(T) when is_integer(T) -> {integer,0,T};
abstract(T) when is_float(T) -> {float,0,T};
abstract(T) when is_atom(T) -> {atom,0,T};
abstract([]) -> {nil,0};
abstract(B) when is_bitstring(B) ->
{bin, 0, [abstract_byte(Byte, 0) || Byte <- bitstring_to_list(B)]};
abstract([C|T]) when is_integer(C), 0 =< C, C < 256 ->
abstract_string(T, [C]);
abstract([H|T]) ->
{cons,0,abstract(H),abstract(T)};
abstract(Map) when is_map(Map) ->
{map,0,abstract_map(Map)};
abstract(Tuple) when is_tuple(Tuple) ->
{tuple,0,abstract_list(tuple_to_list(Tuple))}.
abstract_string([C|T], String) when is_integer(C), 0 =< C, C < 256 ->
abstract_string(T, [C|String]);
abstract_string([], String) ->
{string, 0, lists:reverse(String)};
abstract_string(T, String) ->
not_string(String, abstract(T)).
not_string([C|T], Result) ->
not_string(T, {cons, 0, {integer, 0, C}, Result});
not_string([], Result) ->
Result.
abstract_list([H|T]) ->
[abstract(H)|abstract_list(T)];
abstract_list([]) ->
[].
abstract_map(Map) ->
[{map_field_assoc,0,abstract(K),abstract(V)}
|| {K,V} <- maps:to_list(Map)
].
abstract_byte(Byte, Line) when is_integer(Byte) ->
{bin_element, Line, {integer, Line, Byte}, default, default};
abstract_byte(Bits, Line) ->
Sz = bit_size(Bits),
<<Val:Sz>> = Bits,
{bin_element, Line, {integer, Line, Val}, {integer, Line, Sz}, default}.
|
16f2490ddf85158029f5170081434ae20529c66caa9fee4692c03576881e5b6e | mokus0/junkbox | Moku.hs | module Math.Moku
where
x = 5
y = (6, "Hello")
z = x * fst y
roots [a, b, c] =
[
(-b + det) / (2 * a),
(-b - det) / (2 * a)
]
where
det = sqrt (b*b - 4*a*c)
fib 0 = 0
fib 1 = 1
fib n = if n > 1
then fib (n - 1) + fib (n - 2)
else fib (n + 2) - fib (n + 1)
xor True = not
xor False = id
join :: [a] -> [[a]] -> [a]
join _ [x] = x
join j (x:xs) = x ++ j ++ (join j xs)
join _ [] = []
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
first _ [] = Nothing
first p (x:xs) = if p x
then Just x
else first p xs
unmaybe x Nothing = x
unmaybe _ Just x = x
primes = 2 : map primeafter primes
where primeafter x = unmaybe 0 first isPrime [ x+1 .. ]
-- isPrime x = | null | https://raw.githubusercontent.com/mokus0/junkbox/151014bbef9db2b9205209df66c418d6d58b0d9e/Haskell/Math/Moku.hs | haskell | isPrime x = | module Math.Moku
where
x = 5
y = (6, "Hello")
z = x * fst y
roots [a, b, c] =
[
(-b + det) / (2 * a),
(-b - det) / (2 * a)
]
where
det = sqrt (b*b - 4*a*c)
fib 0 = 0
fib 1 = 1
fib n = if n > 1
then fib (n - 1) + fib (n - 2)
else fib (n + 2) - fib (n + 1)
xor True = not
xor False = id
join :: [a] -> [[a]] -> [a]
join _ [x] = x
join j (x:xs) = x ++ j ++ (join j xs)
join _ [] = []
fibs = 0 : 1 : zipWith (+) fibs (tail fibs)
first _ [] = Nothing
first p (x:xs) = if p x
then Just x
else first p xs
unmaybe x Nothing = x
unmaybe _ Just x = x
primes = 2 : map primeafter primes
where primeafter x = unmaybe 0 first isPrime [ x+1 .. ] |
71be2f11be072e66d31c6475c1deef577938d6c6082e8c0dbf43dd4b64e6ffbb | haskell/cabal | CabalGH5213Other.hs | module CabalGH5213Other where
bar :: Int
bar = 42
| null | https://raw.githubusercontent.com/haskell/cabal/496d6fcc26779e754523a6cc7576aea49ef8056e/cabal-testsuite/PackageTests/Regression/T5213ExeCoverage/src/CabalGH5213Other.hs | haskell | module CabalGH5213Other where
bar :: Int
bar = 42
| |
26615bb071acafa749e76a4f68be4de8a2e684709df0a6fdb3cde8a453c8a1b2 | KMahoney/squee | StdLib.hs | module Squee.StdLib (stdLib) where
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Squee.Types.Type as Type
import qualified Database.Schema as Schema
import qualified Squee.QueryBuilder as QB
import Squee.Types.CommonType
import Squee.AST (Symbol(..))
import Squee.Eval
type EnvEntry = (Value, Type.TypeSchema)
stdLib :: M.Map Symbol EnvEntry
stdLib =
M.fromList
[ (Symbol "map", stdMap)
, (Symbol "filter", stdFilter)
, (Symbol "order", stdOrder)
, (Symbol "natjoin", stdNatJoin)
, (Symbol "join", stdJoin)
, (Symbol "|", stdPipe)
, (Symbol "aggregate", stdAggregate)
, (Symbol "sum", stdSum)
, (Symbol "count", stdCount)
, stdBinOp "=" stdEqT
, stdBinOp "+" stdNumOpT
, stdBinOp "-" stdNumOpT
, stdBinOp "*" stdNumOpT
, stdBinOp "/" stdNumOpT
, stdBinOp "<" stdCompareT
, stdBinOp ">" stdCompareT
]
-- Helpers
fnValue :: ([Value] -> Value) -> Int -> Value
fnValue f arity = VFn (FnValue f arity [])
tv :: Int -> Type.Type
tv = Type.TypeVar
dbRow :: Int -> Type.Pred
dbRow = Type.ValuesInClass Type.DbValue . tv
schema :: [Int] -> Type.Type -> Type.TypeSchema
schema i = Type.TypeSchema i . Type.Qual []
schemaQual :: [Int] -> [Type.Pred] -> Type.Type -> Type.TypeSchema
schemaQual i q t = Type.TypeSchema i (Type.Qual q t)
queryToRowValue :: QB.Query -> Value
queryToRowValue =
VRow . M.fromList . map (\(Schema.ColumnName c) -> (Symbol c, VSqlExpr (QB.EField c))) . QB.columnNames
queryToQualRowValue :: Schema.TableName -> QB.Query -> Value
queryToQualRowValue (Schema.TableName table) =
VRow . M.fromList . map (\(Schema.ColumnName c) -> (Symbol c, VSqlExpr (QB.EQualifiedField table c))) . QB.columnNames
(-->) :: Type.Type -> Type.Type -> Type.Type
> ) = tFn
infixr -->
-- Filter
stdFilter :: EnvEntry
stdFilter = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case fnEval fn [queryToRowValue q] of
VSqlExpr e ->
VQuery (QB.applyFilter e q)
_ ->
error "expecting sql expression"
impl _ = undefined
ty = schemaQual [0] [dbRow 0] $ ((tRow (tv 0)) --> tBool) --> tQuery (tRow (tv 0)) --> tQuery (tRow (tv 0))
-- Order
stdOrder :: EnvEntry
stdOrder = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case evalFn fn [queryToRowValue q] of
VSqlExpr e ->
VQuery (QB.applyOrder e q)
_ ->
error "expecting sql expression"
impl _ = undefined
ty = schemaQual [0, 1] [dbRow 0, Type.InClass Type.Comparable (tv 1)] $
> ( tv 1 ) ) -- > tQuery ( tRow ( tv 0 ) ) -- > tQuery ( tRow ( tv 0 ) )
-- Map
stdMap :: EnvEntry
stdMap = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case evalFn fn [queryToRowValue q] of
VRow rowExprs ->
let rowExprs' = M.fromList $ map (\(Symbol k, VSqlExpr expr) -> (k, expr)) $ M.toList rowExprs in
VQuery (QB.applyMap rowExprs' q)
_ ->
error "expecting row"
impl _ = undefined
> tRow ( tv 1 ) ) -- > tQuery ( tRow ( tv 0 ) ) -- > tQuery ( tRow ( tv 1 ) )
-- Natural Join
stdNatJoin :: EnvEntry
stdNatJoin = (fnValue impl 2, ty)
where
impl [VQuery a, VQuery b] =
VQuery $ QB.applyNatJoin a b
impl _ = undefined
ty = schemaQual [0, 1, 2] [dbRow 0, dbRow 1, dbRow 2, Type.NatJoin (tv 2) (tv 0) (tv 1)] $
> tQuery ( tRow ( tv 1 ) ) -- > tQuery ( tRow ( tv 2 ) )
-- Join
stdJoin :: EnvEntry
stdJoin = (fnValue impl 4, ty)
where
impl [VFn condFn, VFn mergeFn, VQuery a, VQuery b] =
let rowA = queryToQualRowValue (Schema.TableName "_t") a
rowB = queryToQualRowValue (Schema.TableName "_j0") b
VSqlExpr cond = fnEval condFn [rowA, rowB]
VRow merge = fnEval mergeFn [rowA, rowB]
merge' = M.fromList $ map (\(Symbol k, VSqlExpr expr) -> (k, expr)) $ M.toList merge
in
VQuery $ QB.applyJoin cond merge' a b
impl values = error $ "invalid join application: " <> show values
rA = tRow (tv 0)
rB = tRow (tv 1)
rC = tRow (tv 2)
qA = tQuery rA
qB = tQuery rB
qC = tQuery rC
ty = schemaQual [0, 1, 2] [dbRow 0, dbRow 1, dbRow 2] $
(rA --> rB --> tBool) --> (rA --> rB --> rC) --> qA --> qB --> qC
-- Pipe
stdPipe :: EnvEntry
stdPipe = (fnValue impl 2, ty)
where
impl [x, VFn fn] = evalFn fn [x]
impl _ = undefined
> ( tv 0 -- > tv 1 ) -- > tv 1
-- Aggregate
stdAggregate :: EnvEntry
stdAggregate = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case evalFn fn [queryToRowValue q] of
VRow rowExprs ->
let rowExprs' = M.fromList $ map (\(Symbol k, VSqlExpr expr) -> (k, expr)) $ M.toList rowExprs in
VQuery (QB.applyMap rowExprs' q)
_ ->
error "expecting row"
impl _ = undefined
> tRow ( tv 1 ) ) -- > tQuery ( tRow ( tv 0 ) ) -- > tQuery ( tRow ( tv 2 ) )
-- Sum
stdSum :: EnvEntry
stdSum = (fnValue impl 1, ty)
where
impl [VSqlExpr x] = VSqlExpr $ QB.EFn "sum" [x]
impl _ = undefined
ty = schemaQual [0] [Type.InClass Type.Num (tv 0)] $ tv 0 --> tAgg (tv 0)
-- Count
stdCount :: EnvEntry
stdCount = (VSqlExpr (QB.ERaw "count(*)"), ty)
where
ty = schema [] $ tAgg tInt4
-- Binary operations
stdBinOp :: T.Text -> Type.TypeSchema -> (Symbol, EnvEntry)
stdBinOp op t =
(Symbol op, (fnValue (stdBinOpImpl op) 2, t))
stdBinOpImpl :: T.Text -> [Value] -> Value
stdBinOpImpl op [a, b] = case (a, b) of
(VSqlExpr a', VSqlExpr b') ->
VSqlExpr (QB.EBinOp op a' b')
_ ->
undefined
stdBinOpImpl _ _ = undefined
-- Numeric operations
stdEqT :: Type.TypeSchema
stdEqT = schemaQual [0] [Type.InClass Type.Comparable (tv 0)] $ (tv 0) --> (tv 0) --> tBool
stdNumOpT :: Type.TypeSchema
stdNumOpT = schemaQual [0] [Type.InClass Type.Num (tv 0)] $ (tv 0) --> (tv 0) --> (tv 0)
stdCompareT :: Type.TypeSchema
stdCompareT = schemaQual [0] [Type.InClass Type.Comparable (tv 0)] $ (tv 0) --> (tv 0) --> tBool
| null | https://raw.githubusercontent.com/KMahoney/squee/b2ebad2ec6f3c866398600793d0a231edc8c23e4/src/Squee/StdLib.hs | haskell | Helpers
>) :: Type.Type -> Type.Type -> Type.Type
>
Filter
> tBool) --> tQuery (tRow (tv 0)) --> tQuery (tRow (tv 0))
Order
> tQuery ( tRow ( tv 0 ) ) -- > tQuery ( tRow ( tv 0 ) )
Map
> tQuery ( tRow ( tv 0 ) ) -- > tQuery ( tRow ( tv 1 ) )
Natural Join
> tQuery ( tRow ( tv 2 ) )
Join
> rB --> tBool) --> (rA --> rB --> rC) --> qA --> qB --> qC
Pipe
> tv 1 ) -- > tv 1
Aggregate
> tQuery ( tRow ( tv 0 ) ) -- > tQuery ( tRow ( tv 2 ) )
Sum
> tAgg (tv 0)
Count
Binary operations
Numeric operations
> (tv 0) --> tBool
> (tv 0) --> (tv 0)
> (tv 0) --> tBool | module Squee.StdLib (stdLib) where
import qualified Data.Text as T
import qualified Data.Map as M
import qualified Squee.Types.Type as Type
import qualified Database.Schema as Schema
import qualified Squee.QueryBuilder as QB
import Squee.Types.CommonType
import Squee.AST (Symbol(..))
import Squee.Eval
type EnvEntry = (Value, Type.TypeSchema)
stdLib :: M.Map Symbol EnvEntry
stdLib =
M.fromList
[ (Symbol "map", stdMap)
, (Symbol "filter", stdFilter)
, (Symbol "order", stdOrder)
, (Symbol "natjoin", stdNatJoin)
, (Symbol "join", stdJoin)
, (Symbol "|", stdPipe)
, (Symbol "aggregate", stdAggregate)
, (Symbol "sum", stdSum)
, (Symbol "count", stdCount)
, stdBinOp "=" stdEqT
, stdBinOp "+" stdNumOpT
, stdBinOp "-" stdNumOpT
, stdBinOp "*" stdNumOpT
, stdBinOp "/" stdNumOpT
, stdBinOp "<" stdCompareT
, stdBinOp ">" stdCompareT
]
fnValue :: ([Value] -> Value) -> Int -> Value
fnValue f arity = VFn (FnValue f arity [])
tv :: Int -> Type.Type
tv = Type.TypeVar
dbRow :: Int -> Type.Pred
dbRow = Type.ValuesInClass Type.DbValue . tv
schema :: [Int] -> Type.Type -> Type.TypeSchema
schema i = Type.TypeSchema i . Type.Qual []
schemaQual :: [Int] -> [Type.Pred] -> Type.Type -> Type.TypeSchema
schemaQual i q t = Type.TypeSchema i (Type.Qual q t)
queryToRowValue :: QB.Query -> Value
queryToRowValue =
VRow . M.fromList . map (\(Schema.ColumnName c) -> (Symbol c, VSqlExpr (QB.EField c))) . QB.columnNames
queryToQualRowValue :: Schema.TableName -> QB.Query -> Value
queryToQualRowValue (Schema.TableName table) =
VRow . M.fromList . map (\(Schema.ColumnName c) -> (Symbol c, VSqlExpr (QB.EQualifiedField table c))) . QB.columnNames
> ) = tFn
stdFilter :: EnvEntry
stdFilter = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case fnEval fn [queryToRowValue q] of
VSqlExpr e ->
VQuery (QB.applyFilter e q)
_ ->
error "expecting sql expression"
impl _ = undefined
stdOrder :: EnvEntry
stdOrder = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case evalFn fn [queryToRowValue q] of
VSqlExpr e ->
VQuery (QB.applyOrder e q)
_ ->
error "expecting sql expression"
impl _ = undefined
ty = schemaQual [0, 1] [dbRow 0, Type.InClass Type.Comparable (tv 1)] $
stdMap :: EnvEntry
stdMap = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case evalFn fn [queryToRowValue q] of
VRow rowExprs ->
let rowExprs' = M.fromList $ map (\(Symbol k, VSqlExpr expr) -> (k, expr)) $ M.toList rowExprs in
VQuery (QB.applyMap rowExprs' q)
_ ->
error "expecting row"
impl _ = undefined
stdNatJoin :: EnvEntry
stdNatJoin = (fnValue impl 2, ty)
where
impl [VQuery a, VQuery b] =
VQuery $ QB.applyNatJoin a b
impl _ = undefined
ty = schemaQual [0, 1, 2] [dbRow 0, dbRow 1, dbRow 2, Type.NatJoin (tv 2) (tv 0) (tv 1)] $
stdJoin :: EnvEntry
stdJoin = (fnValue impl 4, ty)
where
impl [VFn condFn, VFn mergeFn, VQuery a, VQuery b] =
let rowA = queryToQualRowValue (Schema.TableName "_t") a
rowB = queryToQualRowValue (Schema.TableName "_j0") b
VSqlExpr cond = fnEval condFn [rowA, rowB]
VRow merge = fnEval mergeFn [rowA, rowB]
merge' = M.fromList $ map (\(Symbol k, VSqlExpr expr) -> (k, expr)) $ M.toList merge
in
VQuery $ QB.applyJoin cond merge' a b
impl values = error $ "invalid join application: " <> show values
rA = tRow (tv 0)
rB = tRow (tv 1)
rC = tRow (tv 2)
qA = tQuery rA
qB = tQuery rB
qC = tQuery rC
ty = schemaQual [0, 1, 2] [dbRow 0, dbRow 1, dbRow 2] $
stdPipe :: EnvEntry
stdPipe = (fnValue impl 2, ty)
where
impl [x, VFn fn] = evalFn fn [x]
impl _ = undefined
stdAggregate :: EnvEntry
stdAggregate = (fnValue impl 2, ty)
where
impl [VFn fn, VQuery q] =
case evalFn fn [queryToRowValue q] of
VRow rowExprs ->
let rowExprs' = M.fromList $ map (\(Symbol k, VSqlExpr expr) -> (k, expr)) $ M.toList rowExprs in
VQuery (QB.applyMap rowExprs' q)
_ ->
error "expecting row"
impl _ = undefined
stdSum :: EnvEntry
stdSum = (fnValue impl 1, ty)
where
impl [VSqlExpr x] = VSqlExpr $ QB.EFn "sum" [x]
impl _ = undefined
stdCount :: EnvEntry
stdCount = (VSqlExpr (QB.ERaw "count(*)"), ty)
where
ty = schema [] $ tAgg tInt4
stdBinOp :: T.Text -> Type.TypeSchema -> (Symbol, EnvEntry)
stdBinOp op t =
(Symbol op, (fnValue (stdBinOpImpl op) 2, t))
stdBinOpImpl :: T.Text -> [Value] -> Value
stdBinOpImpl op [a, b] = case (a, b) of
(VSqlExpr a', VSqlExpr b') ->
VSqlExpr (QB.EBinOp op a' b')
_ ->
undefined
stdBinOpImpl _ _ = undefined
stdEqT :: Type.TypeSchema
stdNumOpT :: Type.TypeSchema
stdCompareT :: Type.TypeSchema
|
3280a593107b6aa61d31f6f0fff7f2cbf05a90248aa46ef477f8fd6c84422d62 | mirage/mirage | functoria.mli |
* Copyright ( c ) 2013 - 2020 < >
* Copyright ( c ) 2013 - 2020 Anil Madhavapeddy < >
* Copyright ( c ) 2015 - 2020 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2020 Thomas Gazagnaire <>
* Copyright (c) 2013-2020 Anil Madhavapeddy <>
* Copyright (c) 2015-2020 Gabriel Radanne <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
* { 1 The Functoria DSL }
* Functoria is a DSL to describe a set of modules and functors , their types
and how to apply them in order to produce a complete application .
The main use case is mirage . See the [ Mirage ] documentation for details .
Functoria is a DSL to write configuration files for functor - heavy
applications . Such configuration files ( imaginatively called [ config.ml ] )
usually contains three parts : one for defining toplevel modules , one for
defining configuration kyes and one for defining applications using these
modules and keys .
{ 2 Defining toplevel modules }
To define toplevel modules , use the { ! main } function . Among its various
arguments , it takes the module name and its signature . The type is assembled
with the { ! Type } combinators , like the [ @- > ] operator , which represents a
functor arrow .
{ [
let main = main " Unikernel . Main " ( m @- > job )
] }
This declares that the functor [ Unikernel . Main ] takes a module of type [ m ]
and returns a module of type { ! } . [ job ] has a specific meaning
for functoria : it is a module which defines at least a function [ start ] ,
which should have one argument per functor argument and should return
[ unit ] .
It is up to the user to ensure that the declaration matches the
implementation , or be rewarded by a compiler error later on . If the
declaration is correct , everything that follows will be .
{ 2 Defining configuration keys }
A configuration key is composed of :
- { i name } : The name of the value in the program .
- { i description } : How it should be displayed / serialized .
- { i stage } : Is the key available only at runtime , at configure time or
both ?
- { i documentation } : It is not optional so you should really write it .
Consider a multilingual application : we want to pass the default language as
a parameter . We will use a simple string , so we can use the predefined
description { ! Key.Arg.string } . We want to be able to define it both at
configure and run time , so we use the stage [ Both ] . This gives us the
following code :
{ [
let lang_key =
let doc =
Key.Arg.info ~doc:"The default language for the application . "
[ " l " ; " lang " ]
in
Key.create " language " @@ Key . Arg.(opt string " en " doc )
] }
Here , we defined both a long option [ " --lang " ] and a short one [ " -l " ] ( the
format is similar to the one used by
{ { : } Cmdliner } . In the application
code , the value is retrieved with [ Key_gen.language ( ) ] .
The option is also documented in the [ " --help " ] option for both the
[ configure ] subcommand ( at configure time ) and [ ./app.exe ] ( at startup
time ) .
{ v
-l VAL , = VAL ( absent = en ) The default language for the application .
v }
{ 2 Defining applications }
To register a new application , use [ register ] :
{ [
let ( ) = register " app " [ main $ impl ]
] }
This function ( which should only be called once ) takes as argument the name
of the application and a list of jobs . The jobs are defined using the
{ ! Impl } DSL ; for instance the operator [ $ ] is used to apply the functor
[ main ] ( aka [ Unikernel . Main ] ) to the default console .
Once an application is registered , it can be configured and built using
command - line arguments .
Configuration keys we can use be used to switch implementation at configure
time . This is done by using the { ! Key } DSL , for instance to check whether
[ lang_key ] is instanciated with a given string :
{ [
let lang_is " s " = Key.(pure ( ( = ) s ) $ value lang_key )
] }
Then by using the { ! if_impl } combinator to choose between two
implementations depending on the value of the key :
{ [
let impl = if_impl ( is " fi " ) finnish_impl not_finnish_implementation
] }
and how to apply them in order to produce a complete application.
The main use case is mirage. See the [Mirage] documentation for details.
Functoria is a DSL to write configuration files for functor-heavy
applications. Such configuration files (imaginatively called [config.ml])
usually contains three parts: one for defining toplevel modules, one for
defining configuration kyes and one for defining applications using these
modules and keys.
{2 Defining toplevel modules}
To define toplevel modules, use the {!main} function. Among its various
arguments, it takes the module name and its signature. The type is assembled
with the {!Type} combinators, like the [@->] operator, which represents a
functor arrow.
{[
let main = main "Unikernel.Main" (m @-> job)
]}
This declares that the functor [Unikernel.Main] takes a module of type [m]
and returns a module of type {!module-DSL.job}. [job] has a specific meaning
for functoria: it is a module which defines at least a function [start],
which should have one argument per functor argument and should return
[unit].
It is up to the user to ensure that the declaration matches the
implementation, or be rewarded by a compiler error later on. If the
declaration is correct, everything that follows will be.
{2 Defining configuration keys}
A configuration key is composed of:
- {i name} : The name of the value in the program.
- {i description} : How it should be displayed/serialized.
- {i stage} : Is the key available only at runtime, at configure time or
both?
- {i documentation} : It is not optional so you should really write it.
Consider a multilingual application: we want to pass the default language as
a parameter. We will use a simple string, so we can use the predefined
description {!Key.Arg.string}. We want to be able to define it both at
configure and run time, so we use the stage [Both]. This gives us the
following code:
{[
let lang_key =
let doc =
Key.Arg.info ~doc:"The default language for the application."
[ "l"; "lang" ]
in
Key.create "language" @@ Key.Arg.(opt ~stage:`Both string "en" doc)
]}
Here, we defined both a long option ["--lang"] and a short one ["-l"] (the
format is similar to the one used by
{{:} Cmdliner}. In the application
code, the value is retrieved with [Key_gen.language ()].
The option is also documented in the ["--help"] option for both the
[configure] subcommand (at configure time) and [./app.exe] (at startup
time).
{v
-l VAL, --lang=VAL (absent=en) The default language for the application.
v}
{2 Defining applications}
To register a new application, use [register]:
{[
let () = register "app" [ main $ impl ]
]}
This function (which should only be called once) takes as argument the name
of the application and a list of jobs. The jobs are defined using the
{!Impl} DSL; for instance the operator [$] is used to apply the functor
[main] (aka [Unikernel.Main]) to the default console.
Once an application is registered, it can be configured and built using
command-line arguments.
Configuration keys we can use be used to switch implementation at configure
time. This is done by using the {!Key} DSL, for instance to check whether
[lang_key] is instanciated with a given string:
{[
let lang_is "s" = Key.(pure (( = ) s) $ value lang_key)
]}
Then by using the {!if_impl} combinator to choose between two
implementations depending on the value of the key:
{[
let impl = if_impl (is "fi") finnish_impl not_finnish_implementation
]} *)
module type DSL = module type of DSL
include DSL
(** The signature for run-time and configure-time command-line keys. *)
module type KEY =
module type of Key
with type 'a Arg.converter = 'a Key.Arg.converter
and type 'a Arg.t = 'a Key.Arg.t
and type Arg.info = Key.Arg.info
and type 'a value = 'a Key.value
and type 'a key = 'a Key.key
and type t = Key.t
and type Set.t = Key.Set.t
and type 'a Alias.t = 'a Key.Alias.t
and type context = Key.context
module Package = Package
module Info = Info
module Install = Install
module Device = Device
* { 1 Useful module implementations }
val job : job typ
(** [job] is the signature for user's application main module. *)
val noop : job impl
(** [noop] is an implementation of {!type-job} that holds no state, does nothing
and has no dependency. *)
type argv = Argv.t
(** The type for command-line arguments, similar to the usual [Sys.argv]. *)
val argv : argv typ
(** [argv] is a value representing {!type-argv} module types. *)
val sys_argv : argv impl
(** [sys_argv] is a device providing command-line arguments by using [Sys.argv]. *)
val keys :
?runtime_package:string -> ?runtime_modname:string -> argv impl -> job impl
(** [keys a] is an implementation of {!type-job} that holds the parsed
command-line arguments. By default [runtime_package] is
["functoria-runtime"] and [runtime_modname] is ["Functoria_runtime"]. *)
val info : info typ
(** [info] is a value representing {!type-info} module types. *)
val app_info :
?runtime_package:string ->
?build_info:(string * string) list ->
?gen_modname:string ->
?modname:string ->
unit ->
info impl
* [ app_info ] is the module implementation whose state contains all the
information available at configure - time .
- The value is stored into a generated module name [ gen_modname ] : if not
set , it is [ Info_gen ] .
- [ modname ] is the name of the runtime module defining values of type
[ info ] . By default it 's [ Functoria_runtime ] .
information available at configure-time.
- The value is stored into a generated module name [gen_modname]: if not
set, it is [Info_gen].
- [modname] is the name of the runtime module defining values of type
[info]. By default it's [Functoria_runtime]. *)
module Type = Type
module Impl = Impl
module Key = Key
module Opam = Opam
module Lib = Lib
module Tool = Tool
module Engine = Engine
module DSL = DSL
module Cli = Cli
module Action = Action
module Dune = Dune
| null | https://raw.githubusercontent.com/mirage/mirage/df34501c4203fba59718bcee7d6ebd1198926838/lib/functoria/functoria.mli | ocaml | * The signature for run-time and configure-time command-line keys.
* [job] is the signature for user's application main module.
* [noop] is an implementation of {!type-job} that holds no state, does nothing
and has no dependency.
* The type for command-line arguments, similar to the usual [Sys.argv].
* [argv] is a value representing {!type-argv} module types.
* [sys_argv] is a device providing command-line arguments by using [Sys.argv].
* [keys a] is an implementation of {!type-job} that holds the parsed
command-line arguments. By default [runtime_package] is
["functoria-runtime"] and [runtime_modname] is ["Functoria_runtime"].
* [info] is a value representing {!type-info} module types. |
* Copyright ( c ) 2013 - 2020 < >
* Copyright ( c ) 2013 - 2020 Anil Madhavapeddy < >
* Copyright ( c ) 2015 - 2020 < >
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
* Copyright (c) 2013-2020 Thomas Gazagnaire <>
* Copyright (c) 2013-2020 Anil Madhavapeddy <>
* Copyright (c) 2015-2020 Gabriel Radanne <>
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*)
* { 1 The Functoria DSL }
* Functoria is a DSL to describe a set of modules and functors , their types
and how to apply them in order to produce a complete application .
The main use case is mirage . See the [ Mirage ] documentation for details .
Functoria is a DSL to write configuration files for functor - heavy
applications . Such configuration files ( imaginatively called [ config.ml ] )
usually contains three parts : one for defining toplevel modules , one for
defining configuration kyes and one for defining applications using these
modules and keys .
{ 2 Defining toplevel modules }
To define toplevel modules , use the { ! main } function . Among its various
arguments , it takes the module name and its signature . The type is assembled
with the { ! Type } combinators , like the [ @- > ] operator , which represents a
functor arrow .
{ [
let main = main " Unikernel . Main " ( m @- > job )
] }
This declares that the functor [ Unikernel . Main ] takes a module of type [ m ]
and returns a module of type { ! } . [ job ] has a specific meaning
for functoria : it is a module which defines at least a function [ start ] ,
which should have one argument per functor argument and should return
[ unit ] .
It is up to the user to ensure that the declaration matches the
implementation , or be rewarded by a compiler error later on . If the
declaration is correct , everything that follows will be .
{ 2 Defining configuration keys }
A configuration key is composed of :
- { i name } : The name of the value in the program .
- { i description } : How it should be displayed / serialized .
- { i stage } : Is the key available only at runtime , at configure time or
both ?
- { i documentation } : It is not optional so you should really write it .
Consider a multilingual application : we want to pass the default language as
a parameter . We will use a simple string , so we can use the predefined
description { ! Key.Arg.string } . We want to be able to define it both at
configure and run time , so we use the stage [ Both ] . This gives us the
following code :
{ [
let lang_key =
let doc =
Key.Arg.info ~doc:"The default language for the application . "
[ " l " ; " lang " ]
in
Key.create " language " @@ Key . Arg.(opt string " en " doc )
] }
Here , we defined both a long option [ " --lang " ] and a short one [ " -l " ] ( the
format is similar to the one used by
{ { : } Cmdliner } . In the application
code , the value is retrieved with [ Key_gen.language ( ) ] .
The option is also documented in the [ " --help " ] option for both the
[ configure ] subcommand ( at configure time ) and [ ./app.exe ] ( at startup
time ) .
{ v
-l VAL , = VAL ( absent = en ) The default language for the application .
v }
{ 2 Defining applications }
To register a new application , use [ register ] :
{ [
let ( ) = register " app " [ main $ impl ]
] }
This function ( which should only be called once ) takes as argument the name
of the application and a list of jobs . The jobs are defined using the
{ ! Impl } DSL ; for instance the operator [ $ ] is used to apply the functor
[ main ] ( aka [ Unikernel . Main ] ) to the default console .
Once an application is registered , it can be configured and built using
command - line arguments .
Configuration keys we can use be used to switch implementation at configure
time . This is done by using the { ! Key } DSL , for instance to check whether
[ lang_key ] is instanciated with a given string :
{ [
let lang_is " s " = Key.(pure ( ( = ) s ) $ value lang_key )
] }
Then by using the { ! if_impl } combinator to choose between two
implementations depending on the value of the key :
{ [
let impl = if_impl ( is " fi " ) finnish_impl not_finnish_implementation
] }
and how to apply them in order to produce a complete application.
The main use case is mirage. See the [Mirage] documentation for details.
Functoria is a DSL to write configuration files for functor-heavy
applications. Such configuration files (imaginatively called [config.ml])
usually contains three parts: one for defining toplevel modules, one for
defining configuration kyes and one for defining applications using these
modules and keys.
{2 Defining toplevel modules}
To define toplevel modules, use the {!main} function. Among its various
arguments, it takes the module name and its signature. The type is assembled
with the {!Type} combinators, like the [@->] operator, which represents a
functor arrow.
{[
let main = main "Unikernel.Main" (m @-> job)
]}
This declares that the functor [Unikernel.Main] takes a module of type [m]
and returns a module of type {!module-DSL.job}. [job] has a specific meaning
for functoria: it is a module which defines at least a function [start],
which should have one argument per functor argument and should return
[unit].
It is up to the user to ensure that the declaration matches the
implementation, or be rewarded by a compiler error later on. If the
declaration is correct, everything that follows will be.
{2 Defining configuration keys}
A configuration key is composed of:
- {i name} : The name of the value in the program.
- {i description} : How it should be displayed/serialized.
- {i stage} : Is the key available only at runtime, at configure time or
both?
- {i documentation} : It is not optional so you should really write it.
Consider a multilingual application: we want to pass the default language as
a parameter. We will use a simple string, so we can use the predefined
description {!Key.Arg.string}. We want to be able to define it both at
configure and run time, so we use the stage [Both]. This gives us the
following code:
{[
let lang_key =
let doc =
Key.Arg.info ~doc:"The default language for the application."
[ "l"; "lang" ]
in
Key.create "language" @@ Key.Arg.(opt ~stage:`Both string "en" doc)
]}
Here, we defined both a long option ["--lang"] and a short one ["-l"] (the
format is similar to the one used by
{{:} Cmdliner}. In the application
code, the value is retrieved with [Key_gen.language ()].
The option is also documented in the ["--help"] option for both the
[configure] subcommand (at configure time) and [./app.exe] (at startup
time).
{v
-l VAL, --lang=VAL (absent=en) The default language for the application.
v}
{2 Defining applications}
To register a new application, use [register]:
{[
let () = register "app" [ main $ impl ]
]}
This function (which should only be called once) takes as argument the name
of the application and a list of jobs. The jobs are defined using the
{!Impl} DSL; for instance the operator [$] is used to apply the functor
[main] (aka [Unikernel.Main]) to the default console.
Once an application is registered, it can be configured and built using
command-line arguments.
Configuration keys we can use be used to switch implementation at configure
time. This is done by using the {!Key} DSL, for instance to check whether
[lang_key] is instanciated with a given string:
{[
let lang_is "s" = Key.(pure (( = ) s) $ value lang_key)
]}
Then by using the {!if_impl} combinator to choose between two
implementations depending on the value of the key:
{[
let impl = if_impl (is "fi") finnish_impl not_finnish_implementation
]} *)
module type DSL = module type of DSL
include DSL
module type KEY =
module type of Key
with type 'a Arg.converter = 'a Key.Arg.converter
and type 'a Arg.t = 'a Key.Arg.t
and type Arg.info = Key.Arg.info
and type 'a value = 'a Key.value
and type 'a key = 'a Key.key
and type t = Key.t
and type Set.t = Key.Set.t
and type 'a Alias.t = 'a Key.Alias.t
and type context = Key.context
module Package = Package
module Info = Info
module Install = Install
module Device = Device
* { 1 Useful module implementations }
val job : job typ
val noop : job impl
type argv = Argv.t
val argv : argv typ
val sys_argv : argv impl
val keys :
?runtime_package:string -> ?runtime_modname:string -> argv impl -> job impl
val info : info typ
val app_info :
?runtime_package:string ->
?build_info:(string * string) list ->
?gen_modname:string ->
?modname:string ->
unit ->
info impl
* [ app_info ] is the module implementation whose state contains all the
information available at configure - time .
- The value is stored into a generated module name [ gen_modname ] : if not
set , it is [ Info_gen ] .
- [ modname ] is the name of the runtime module defining values of type
[ info ] . By default it 's [ Functoria_runtime ] .
information available at configure-time.
- The value is stored into a generated module name [gen_modname]: if not
set, it is [Info_gen].
- [modname] is the name of the runtime module defining values of type
[info]. By default it's [Functoria_runtime]. *)
module Type = Type
module Impl = Impl
module Key = Key
module Opam = Opam
module Lib = Lib
module Tool = Tool
module Engine = Engine
module DSL = DSL
module Cli = Cli
module Action = Action
module Dune = Dune
|
d4a36b3fa5f456a3fcf450505fa01abcd170e14c939cf552a3e32e348b02df0b | typeclasses/haskell-phrasebook | test-dynamic.hs | main = propertyMain $ withTests 1 $ property do
x <- exeStdout $ phrasebook "dynamic"
strLines x ===
[ "The answer is yes", "5 is an integer", "Unrecognized type: [Char]" ]
| null | https://raw.githubusercontent.com/typeclasses/haskell-phrasebook/2b0aa44ef6f6e9745c51ed47b4e59ff704346c87/tests/test-dynamic.hs | haskell | main = propertyMain $ withTests 1 $ property do
x <- exeStdout $ phrasebook "dynamic"
strLines x ===
[ "The answer is yes", "5 is an integer", "Unrecognized type: [Char]" ]
| |
96420776e24b07ccf1e437496159508c4a8797025a8add3320f57a2ab79a635c | RDTK/generator | util.lisp | ;;;; util.lisp --- Utilities used by the target.dockerfile module.
;;;;
Copyright ( C ) 2018 , 2019 Jan Moringen
;;;;
Author : < >
(cl:in-package #:build-generator.deployment.dockerfile)
;;; Commands
(defun trim-command (command)
(string-trim '(#\Space #\Tab #\Newline) command))
| null | https://raw.githubusercontent.com/RDTK/generator/8d9e6e47776f2ccb7b5ed934337d2db50ecbe2f5/src/deployment/dockerfile/util.lisp | lisp | util.lisp --- Utilities used by the target.dockerfile module.
Commands | Copyright ( C ) 2018 , 2019 Jan Moringen
Author : < >
(cl:in-package #:build-generator.deployment.dockerfile)
(defun trim-command (command)
(string-trim '(#\Space #\Tab #\Newline) command))
|
ce6ab86c4cc6a527f4699eddc698936a98f06c7b42e70387730f2a91c7de64dc | mirage/ocaml-launchd | example.ml |
* Copyright ( c ) 2015 Unikernel Systems
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2015 Unikernel Systems
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
let _ =
match Launchd.(error_to_msg (activate_socket "Listener")) with
| Ok fds ->
while true do
let ready_fds, _, _ = Unix.select fds [] [] (-1.) in
List.iter (fun fd ->
let client, _ = Unix.accept fd in
let message = Bytes.of_string "Hello there!\n" in
let (_: int) = Unix.write client message 0 (Bytes.length message) in
Unix.close client
) ready_fds
done
| Error (`Msg m) ->
Printf.fprintf stderr "%s\n%!" m;
exit (-1)
| null | https://raw.githubusercontent.com/mirage/ocaml-launchd/b605b27c9f3e4ab4223a37bb50b454c068897b28/example/example.ml | ocaml |
* Copyright ( c ) 2015 Unikernel Systems
*
* Permission to use , copy , modify , and distribute this software for any
* purpose with or without fee is hereby granted , provided that the above
* copyright notice and this permission notice appear in all copies .
*
* THE SOFTWARE IS PROVIDED " AS IS " AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS . IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL , DIRECT , INDIRECT , OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE , DATA OR PROFITS , WHETHER IN AN
* ACTION OF CONTRACT , NEGLIGENCE OR OTHER TORTIOUS ACTION , ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE .
*
* Copyright (c) 2015 Unikernel Systems
*
* Permission to use, copy, modify, and distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
*)
let _ =
match Launchd.(error_to_msg (activate_socket "Listener")) with
| Ok fds ->
while true do
let ready_fds, _, _ = Unix.select fds [] [] (-1.) in
List.iter (fun fd ->
let client, _ = Unix.accept fd in
let message = Bytes.of_string "Hello there!\n" in
let (_: int) = Unix.write client message 0 (Bytes.length message) in
Unix.close client
) ready_fds
done
| Error (`Msg m) ->
Printf.fprintf stderr "%s\n%!" m;
exit (-1)
| |
419d2cae148703272c5c091c2d8be6ca2f6d224af9b4702f688532440b0732ba | elastic/eui-cljs | property_sort.cljs | (ns eui.services.property-sort
(:require ["@elastic/eui/lib/services/sort/property_sort.js" :as eui]))
(def PropertySortType eui/PropertySortType)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/services/property_sort.cljs | clojure | (ns eui.services.property-sort
(:require ["@elastic/eui/lib/services/sort/property_sort.js" :as eui]))
(def PropertySortType eui/PropertySortType)
| |
b643053bfc425c7e973f2f2b91de6c80be688c60f7fb9f4bdac3e98fdabf1d1c | let-def/lwd | main.ml | open Brr
open Brr_lwd
let ui =
let values = Lwd_table.make () in
let items = Lwd.var Lwd_seq.empty in
let shuffle () =
let all = Lwd_seq.to_array (Lwd.peek items) in
for i = Array.length all - 1 downto 1 do
let i' = Random.int (i + 1) in
let x = all.(i) in
let x' = all.(i') in
all.(i') <- x;
all.(i) <- x';
done;
Lwd.set items (Lwd_seq.of_array all)
in
let edit _ =
let row = Lwd_table.append values in
Lwd.map (Elwd.input ()) ~f:(fun el ->
ignore (
Ev.listen Ev.input (fun _ ->
let txt = Jstr.to_string (El.prop El.Prop.value el) in
Console.log ["shuffle"; txt];
Lwd_table.set row txt;
shuffle ()
) (El.as_target el)
);
el
)
in
Lwd.set items (Lwd_seq.of_array (Array.init 10 edit));
let values =
Lwd_table.map_reduce
(fun _row txt -> Lwd_seq.element (txt ^ "\n"))
(Lwd_seq.monoid)
values
|> Lwd_seq.sort_uniq String.compare
in
Elwd.div [
`P (El.txt' "In this test, typing in one of the input field should \
shuffle them. The test succeeds if focus and selections are \
preserved after shuffling.");
`P (El.br ());
`S (Lwd_seq.lift (Lwd.get items));
`S (Lwd_seq.map El.txt' values);
]
let () =
let ui = Lwd.observe ui in
let on_invalidate _ =
Console.(log [str "on invalidate"]);
let _ : int =
G.request_animation_frame @@ fun _ ->
let _ui = Lwd.quick_sample ui in
(*El.set_children (Document.body G.document) [ui]*)
()
in
()
in
let on_load _ =
Console.(log [str "onload"]);
El.append_children (Document.body G.document) [Lwd.quick_sample ui];
Lwd.set_on_invalidate ui on_invalidate
in
ignore (Ev.listen Ev.dom_content_loaded on_load (Window.as_target G.window));
()
| null | https://raw.githubusercontent.com/let-def/lwd/9ebcf4cefef92de6bb9faf82a28cedcf05d1b59f/examples/focustest-brr/main.ml | ocaml | El.set_children (Document.body G.document) [ui] | open Brr
open Brr_lwd
let ui =
let values = Lwd_table.make () in
let items = Lwd.var Lwd_seq.empty in
let shuffle () =
let all = Lwd_seq.to_array (Lwd.peek items) in
for i = Array.length all - 1 downto 1 do
let i' = Random.int (i + 1) in
let x = all.(i) in
let x' = all.(i') in
all.(i') <- x;
all.(i) <- x';
done;
Lwd.set items (Lwd_seq.of_array all)
in
let edit _ =
let row = Lwd_table.append values in
Lwd.map (Elwd.input ()) ~f:(fun el ->
ignore (
Ev.listen Ev.input (fun _ ->
let txt = Jstr.to_string (El.prop El.Prop.value el) in
Console.log ["shuffle"; txt];
Lwd_table.set row txt;
shuffle ()
) (El.as_target el)
);
el
)
in
Lwd.set items (Lwd_seq.of_array (Array.init 10 edit));
let values =
Lwd_table.map_reduce
(fun _row txt -> Lwd_seq.element (txt ^ "\n"))
(Lwd_seq.monoid)
values
|> Lwd_seq.sort_uniq String.compare
in
Elwd.div [
`P (El.txt' "In this test, typing in one of the input field should \
shuffle them. The test succeeds if focus and selections are \
preserved after shuffling.");
`P (El.br ());
`S (Lwd_seq.lift (Lwd.get items));
`S (Lwd_seq.map El.txt' values);
]
let () =
let ui = Lwd.observe ui in
let on_invalidate _ =
Console.(log [str "on invalidate"]);
let _ : int =
G.request_animation_frame @@ fun _ ->
let _ui = Lwd.quick_sample ui in
()
in
()
in
let on_load _ =
Console.(log [str "onload"]);
El.append_children (Document.body G.document) [Lwd.quick_sample ui];
Lwd.set_on_invalidate ui on_invalidate
in
ignore (Ev.listen Ev.dom_content_loaded on_load (Window.as_target G.window));
()
|
d7f55e02f2d2739a78df40d522e0e25c9d628404d9e3a312db656bfb7fbbbe13 | haskell/lsp | DiagnosticsSpec.hs | {-# LANGUAGE OverloadedStrings #-}
module DiagnosticsSpec where
import qualified Data.Map as Map
import qualified Data.HashMap.Strict as HM
import qualified Data.SortedList as SL
import Data.Text (Text)
import Language.LSP.Diagnostics
import qualified Language.LSP.Types as J
import Test.Hspec
# ANN module ( " HLint : ignore Redundant do " : : String ) #
-- ---------------------------------------------------------------------
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Diagnostics functions" diagnosticsSpec
-- -- |Used when running from ghci, and it sets the current directory to ./tests
-- tt :: IO ()
-- tt = do
-- cd ".."
hspec spec
-- ---------------------------------------------------------------------
mkDiagnostic :: Maybe J.DiagnosticSource -> Text -> J.Diagnostic
mkDiagnostic ms str =
let
rng = J.Range (J.Position 0 1) (J.Position 3 0)
loc = J.Location (J.Uri "file") rng
in
J.Diagnostic rng Nothing Nothing ms str Nothing (Just (J.List [J.DiagnosticRelatedInformation loc str]))
mkDiagnostic2 :: Maybe J.DiagnosticSource -> Text -> J.Diagnostic
mkDiagnostic2 ms str =
let
rng = J.Range (J.Position 4 1) (J.Position 5 0)
loc = J.Location (J.Uri "file") rng
in J.Diagnostic rng Nothing Nothing ms str Nothing (Just (J.List [J.DiagnosticRelatedInformation loc str]))
-- ---------------------------------------------------------------------
diagnosticsSpec :: Spec
diagnosticsSpec = do
describe "constructs a new store" $ do
it "constructs a store with no doc version and a single source" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "hlint") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
(updateDiagnostics HM.empty uri Nothing (partitionBySource diags)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList [(Just "hlint", SL.toSortedList diags) ] )
]
-- ---------------------------------
it "constructs a store with no doc version and multiple sources" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "ghcmod") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
(updateDiagnostics HM.empty uri Nothing (partitionBySource diags)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a"))
,(Just "ghcmod", SL.singleton (mkDiagnostic (Just "ghcmod") "b"))
])
]
-- ---------------------------------
it "constructs a store with doc version and multiple sources" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "ghcmod") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
(updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)) `shouldBe`
HM.fromList
[ (uri,StoreItem (Just 1) $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a"))
,(Just "ghcmod", SL.singleton (mkDiagnostic (Just "ghcmod") "b"))
])
]
-- ---------------------------------
describe "updates a store for same document version" $ do
it "updates a store without a document version, single source only" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "hlint") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri Nothing (partitionBySource diags1)
(updateDiagnostics origStore uri Nothing (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList [(Just "hlint", SL.toSortedList diags2) ] )
]
-- ---------------------------------
it "updates just one source of a 2 source store" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "ghcmod") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri Nothing (partitionBySource diags1)
(updateDiagnostics origStore uri Nothing (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a2"))
,(Just "ghcmod", SL.singleton (mkDiagnostic (Just "ghcmod") "b1"))
] )
]
-- ---------------------------------
it "updates just one source of a 2 source store, with empty diags" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "ghcmod") "b1"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri Nothing (partitionBySource diags1)
(updateDiagnostics origStore uri Nothing (Map.fromList [(Just "ghcmod",SL.toSortedList [])])) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList
[(Just "ghcmod", SL.toSortedList [])
,(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a1"))
] )
]
-- ---------------------------------
describe "updates a store for a new document version" $ do
it "updates a store without a document version, single source only" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "hlint") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags1)
(updateDiagnostics origStore uri (Just 2) (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem (Just 2) $ Map.fromList [(Just "hlint", SL.toSortedList diags2) ] )
]
-- ---------------------------------
it "updates a store for a new doc version, removing all priot sources" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "ghcmod") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags1)
(updateDiagnostics origStore uri (Just 2) (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem (Just 2) $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a2"))
] )
]
-- ---------------------------------
describe "retrieves all the diagnostics for a given uri" $ do
it "gets diagnostics for multiple sources" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "ghcmod") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let ds = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)
getDiagnosticParamsFor 10 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1) (J.List $ reverse diags))
-- ---------------------------------
describe "limits the number of diagnostics retrieved, in order" $ do
it "gets diagnostics for multiple sources" $ do
let
diags =
[ mkDiagnostic2 (Just "hlint") "a"
, mkDiagnostic2 (Just "ghcmod") "b"
, mkDiagnostic (Just "hlint") "c"
, mkDiagnostic (Just "ghcmod") "d"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let ds = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)
getDiagnosticParamsFor 2 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
, mkDiagnostic (Just "hlint") "c"
]))
getDiagnosticParamsFor 1 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
]))
-- ---------------------------------
describe "flushes the diagnostics for a given source" $ do
it "gets diagnostics for multiple sources" $ do
let
diags =
[ mkDiagnostic2 (Just "hlint") "a"
, mkDiagnostic2 (Just "ghcmod") "b"
, mkDiagnostic (Just "hlint") "c"
, mkDiagnostic (Just "ghcmod") "d"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let ds = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)
getDiagnosticParamsFor 100 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
, mkDiagnostic (Just "hlint") "c"
, mkDiagnostic2 (Just "ghcmod") "b"
, mkDiagnostic2 (Just "hlint") "a"
]))
let ds' = flushBySource ds (Just "hlint")
getDiagnosticParamsFor 100 ds' uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
, mkDiagnostic2 (Just "ghcmod") "b"
]))
-- ---------------------------------
| null | https://raw.githubusercontent.com/haskell/lsp/5422dd13f0362917e5981e07d60617ca6e233833/lsp/test/DiagnosticsSpec.hs | haskell | # LANGUAGE OverloadedStrings #
---------------------------------------------------------------------
-- |Used when running from ghci, and it sets the current directory to ./tests
tt :: IO ()
tt = do
cd ".."
---------------------------------------------------------------------
---------------------------------------------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
---------------------------------
--------------------------------- | module DiagnosticsSpec where
import qualified Data.Map as Map
import qualified Data.HashMap.Strict as HM
import qualified Data.SortedList as SL
import Data.Text (Text)
import Language.LSP.Diagnostics
import qualified Language.LSP.Types as J
import Test.Hspec
# ANN module ( " HLint : ignore Redundant do " : : String ) #
main :: IO ()
main = hspec spec
spec :: Spec
spec = describe "Diagnostics functions" diagnosticsSpec
hspec spec
mkDiagnostic :: Maybe J.DiagnosticSource -> Text -> J.Diagnostic
mkDiagnostic ms str =
let
rng = J.Range (J.Position 0 1) (J.Position 3 0)
loc = J.Location (J.Uri "file") rng
in
J.Diagnostic rng Nothing Nothing ms str Nothing (Just (J.List [J.DiagnosticRelatedInformation loc str]))
mkDiagnostic2 :: Maybe J.DiagnosticSource -> Text -> J.Diagnostic
mkDiagnostic2 ms str =
let
rng = J.Range (J.Position 4 1) (J.Position 5 0)
loc = J.Location (J.Uri "file") rng
in J.Diagnostic rng Nothing Nothing ms str Nothing (Just (J.List [J.DiagnosticRelatedInformation loc str]))
diagnosticsSpec :: Spec
diagnosticsSpec = do
describe "constructs a new store" $ do
it "constructs a store with no doc version and a single source" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "hlint") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
(updateDiagnostics HM.empty uri Nothing (partitionBySource diags)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList [(Just "hlint", SL.toSortedList diags) ] )
]
it "constructs a store with no doc version and multiple sources" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "ghcmod") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
(updateDiagnostics HM.empty uri Nothing (partitionBySource diags)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a"))
,(Just "ghcmod", SL.singleton (mkDiagnostic (Just "ghcmod") "b"))
])
]
it "constructs a store with doc version and multiple sources" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "ghcmod") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
(updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)) `shouldBe`
HM.fromList
[ (uri,StoreItem (Just 1) $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a"))
,(Just "ghcmod", SL.singleton (mkDiagnostic (Just "ghcmod") "b"))
])
]
describe "updates a store for same document version" $ do
it "updates a store without a document version, single source only" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "hlint") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri Nothing (partitionBySource diags1)
(updateDiagnostics origStore uri Nothing (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList [(Just "hlint", SL.toSortedList diags2) ] )
]
it "updates just one source of a 2 source store" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "ghcmod") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri Nothing (partitionBySource diags1)
(updateDiagnostics origStore uri Nothing (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a2"))
,(Just "ghcmod", SL.singleton (mkDiagnostic (Just "ghcmod") "b1"))
] )
]
it "updates just one source of a 2 source store, with empty diags" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "ghcmod") "b1"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri Nothing (partitionBySource diags1)
(updateDiagnostics origStore uri Nothing (Map.fromList [(Just "ghcmod",SL.toSortedList [])])) `shouldBe`
HM.fromList
[ (uri,StoreItem Nothing $ Map.fromList
[(Just "ghcmod", SL.toSortedList [])
,(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a1"))
] )
]
describe "updates a store for a new document version" $ do
it "updates a store without a document version, single source only" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "hlint") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags1)
(updateDiagnostics origStore uri (Just 2) (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem (Just 2) $ Map.fromList [(Just "hlint", SL.toSortedList diags2) ] )
]
it "updates a store for a new doc version, removing all priot sources" $ do
let
diags1 =
[ mkDiagnostic (Just "hlint") "a1"
, mkDiagnostic (Just "ghcmod") "b1"
]
diags2 =
[ mkDiagnostic (Just "hlint") "a2"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let origStore = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags1)
(updateDiagnostics origStore uri (Just 2) (partitionBySource diags2)) `shouldBe`
HM.fromList
[ (uri,StoreItem (Just 2) $ Map.fromList
[(Just "hlint", SL.singleton (mkDiagnostic (Just "hlint") "a2"))
] )
]
describe "retrieves all the diagnostics for a given uri" $ do
it "gets diagnostics for multiple sources" $ do
let
diags =
[ mkDiagnostic (Just "hlint") "a"
, mkDiagnostic (Just "ghcmod") "b"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let ds = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)
getDiagnosticParamsFor 10 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1) (J.List $ reverse diags))
describe "limits the number of diagnostics retrieved, in order" $ do
it "gets diagnostics for multiple sources" $ do
let
diags =
[ mkDiagnostic2 (Just "hlint") "a"
, mkDiagnostic2 (Just "ghcmod") "b"
, mkDiagnostic (Just "hlint") "c"
, mkDiagnostic (Just "ghcmod") "d"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let ds = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)
getDiagnosticParamsFor 2 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
, mkDiagnostic (Just "hlint") "c"
]))
getDiagnosticParamsFor 1 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
]))
describe "flushes the diagnostics for a given source" $ do
it "gets diagnostics for multiple sources" $ do
let
diags =
[ mkDiagnostic2 (Just "hlint") "a"
, mkDiagnostic2 (Just "ghcmod") "b"
, mkDiagnostic (Just "hlint") "c"
, mkDiagnostic (Just "ghcmod") "d"
]
uri = J.toNormalizedUri $ J.Uri "uri"
let ds = updateDiagnostics HM.empty uri (Just 1) (partitionBySource diags)
getDiagnosticParamsFor 100 ds uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
, mkDiagnostic (Just "hlint") "c"
, mkDiagnostic2 (Just "ghcmod") "b"
, mkDiagnostic2 (Just "hlint") "a"
]))
let ds' = flushBySource ds (Just "hlint")
getDiagnosticParamsFor 100 ds' uri `shouldBe`
Just (J.PublishDiagnosticsParams (J.fromNormalizedUri uri) (Just 1)
(J.List [
mkDiagnostic (Just "ghcmod") "d"
, mkDiagnostic2 (Just "ghcmod") "b"
]))
|
970ac6d953f27856576a405eed3110bb1e366d3a811710671c8d43a6e1ea4184 | hcarty/ezlogs-cli | ezlogs_cli.ml | module Json = Yojson.Basic
module String_map = Map.Make (String)
let timestamp_of_tags_or_now (tags : Logs.Tag.set) =
match Logs.Tag.find Ecs.tag tags with
| None -> Ecs.Epoch.to_timestamp (Ptime_clock.now ())
| Some ecs ->
( match Ecs.Fields.find (Base (Timestamp Ptime.epoch)) ecs with
| None -> Ecs.Epoch.to_timestamp (Ptime_clock.now ())
| Some timestamp -> Fmt.str "%a" Ecs.pp timestamp
)
module Line_output = struct
let reporter ppf =
let report _src level ~over k msgf =
let continuation _ =
over ();
k ()
in
let write header tags k ppf fmt =
Fmt.kstr
(fun message ->
let timestamp = timestamp_of_tags_or_now tags in
let level =
String.uppercase_ascii (Logs.level_to_string (Some level))
in
Fmt.kpf k ppf "%s%s [%s] %s@." header timestamp level message)
fmt
in
msgf @@ fun ?(header = "") ?(tags = Logs.Tag.empty) fmt ->
write header tags continuation ppf fmt
in
{ Logs.report }
let setup style_renderer level =
Fmt_tty.setup_std_outputs ?style_renderer ();
Logs.set_level level;
Logs.set_reporter (reporter Fmt.stderr);
()
let logging =
Cmdliner.Term.(const setup $ Fmt_cli.style_renderer () $ Logs_cli.level ())
end
module Json_output = struct
let labels_of_tags (tags : Logs.Tag.set) : Json.t String_map.t =
Logs.Tag.fold
(fun tag map ->
match tag with
| V (tag_definition, tag_value) ->
let name = Logs.Tag.name tag_definition in
let tag_string =
Fmt.str "%a" (Logs.Tag.printer tag_definition) tag_value
in
String_map.update name (fun _v -> Some (`String tag_string)) map)
tags String_map.empty
let json_fields_of_tags (tags : Logs.Tag.set) : Json.t String_map.t =
let fields = Ecs.fields_of_tags tags in
let tags = Logs.Tag.rem Ecs.tag tags in
let labels = labels_of_tags tags |> String_map.bindings in
match labels with
| [] -> fields
| _ -> String_map.add "labels" (`Assoc labels) fields
let add_basic_fields (fields : Json.t String_map.t) level src message =
let add_if_new name thunk map =
if String_map.mem name map then
map
else
String_map.add name (thunk ()) map
in
let replace key value map =
String_map.remove key map |> String_map.add key value
in
add_if_new "@timestamp"
(fun () -> `String (Ecs.Epoch.to_timestamp (Ptime_clock.now ())))
fields
|> add_if_new "log.level" (fun () ->
`String (Logs.level_to_string (Some level)))
|> add_if_new "log.logger" (fun () -> `String (Logs.Src.name src))
(* Always include the log message *)
|> replace "message" (`String message)
Always include the ECS version we 're targeting
|> replace "ecs.version" (`String Ecs.ecs_version)
let reporter ppf =
let report src level ~over k msgf =
let continuation _ =
over ();
k ()
in
let as_json _header tags k ppf fmt =
Fmt.kstr
(fun message ->
let fields =
let ecs_fields = json_fields_of_tags tags in
add_basic_fields ecs_fields level src message
in
let json : Json.t = `Assoc (String_map.bindings fields) in
Fmt.kpf k ppf "%s@." (Json.to_string json))
fmt
in
msgf @@ fun ?header ?(tags = Logs.Tag.empty) fmt ->
as_json header tags continuation ppf fmt
in
{ Logs.report }
let setup level =
Logs.set_level level;
Logs.set_reporter (reporter Fmt.stderr)
let logging = Cmdliner.Term.(const setup $ Logs_cli.level ())
end
type format =
| Line
| Json
let setup (format : format) style_renderer level =
match format with
| Line -> Line_output.setup style_renderer level
| Json -> Json_output.setup level
let format_conv : format Cmdliner.Arg.conv =
Cmdliner.Arg.enum [ ("line", Line); ("json", Json) ]
let log_format default =
let doc = "Log format" in
let docv = "LOG_FORMAT" in
Cmdliner.Arg.(
value & opt format_conv default & info [ "log-format" ] ~doc ~docv)
let logging ~default =
let format = log_format default in
Cmdliner.Term.(
const setup $ format $ Fmt_cli.style_renderer () $ Logs_cli.level ())
| null | https://raw.githubusercontent.com/hcarty/ezlogs-cli/aa6079a279c93d8ac1e437ade34ec81835f6e121/src/ezlogs_cli.ml | ocaml | Always include the log message | module Json = Yojson.Basic
module String_map = Map.Make (String)
let timestamp_of_tags_or_now (tags : Logs.Tag.set) =
match Logs.Tag.find Ecs.tag tags with
| None -> Ecs.Epoch.to_timestamp (Ptime_clock.now ())
| Some ecs ->
( match Ecs.Fields.find (Base (Timestamp Ptime.epoch)) ecs with
| None -> Ecs.Epoch.to_timestamp (Ptime_clock.now ())
| Some timestamp -> Fmt.str "%a" Ecs.pp timestamp
)
module Line_output = struct
let reporter ppf =
let report _src level ~over k msgf =
let continuation _ =
over ();
k ()
in
let write header tags k ppf fmt =
Fmt.kstr
(fun message ->
let timestamp = timestamp_of_tags_or_now tags in
let level =
String.uppercase_ascii (Logs.level_to_string (Some level))
in
Fmt.kpf k ppf "%s%s [%s] %s@." header timestamp level message)
fmt
in
msgf @@ fun ?(header = "") ?(tags = Logs.Tag.empty) fmt ->
write header tags continuation ppf fmt
in
{ Logs.report }
let setup style_renderer level =
Fmt_tty.setup_std_outputs ?style_renderer ();
Logs.set_level level;
Logs.set_reporter (reporter Fmt.stderr);
()
let logging =
Cmdliner.Term.(const setup $ Fmt_cli.style_renderer () $ Logs_cli.level ())
end
module Json_output = struct
let labels_of_tags (tags : Logs.Tag.set) : Json.t String_map.t =
Logs.Tag.fold
(fun tag map ->
match tag with
| V (tag_definition, tag_value) ->
let name = Logs.Tag.name tag_definition in
let tag_string =
Fmt.str "%a" (Logs.Tag.printer tag_definition) tag_value
in
String_map.update name (fun _v -> Some (`String tag_string)) map)
tags String_map.empty
let json_fields_of_tags (tags : Logs.Tag.set) : Json.t String_map.t =
let fields = Ecs.fields_of_tags tags in
let tags = Logs.Tag.rem Ecs.tag tags in
let labels = labels_of_tags tags |> String_map.bindings in
match labels with
| [] -> fields
| _ -> String_map.add "labels" (`Assoc labels) fields
let add_basic_fields (fields : Json.t String_map.t) level src message =
let add_if_new name thunk map =
if String_map.mem name map then
map
else
String_map.add name (thunk ()) map
in
let replace key value map =
String_map.remove key map |> String_map.add key value
in
add_if_new "@timestamp"
(fun () -> `String (Ecs.Epoch.to_timestamp (Ptime_clock.now ())))
fields
|> add_if_new "log.level" (fun () ->
`String (Logs.level_to_string (Some level)))
|> add_if_new "log.logger" (fun () -> `String (Logs.Src.name src))
|> replace "message" (`String message)
Always include the ECS version we 're targeting
|> replace "ecs.version" (`String Ecs.ecs_version)
let reporter ppf =
let report src level ~over k msgf =
let continuation _ =
over ();
k ()
in
let as_json _header tags k ppf fmt =
Fmt.kstr
(fun message ->
let fields =
let ecs_fields = json_fields_of_tags tags in
add_basic_fields ecs_fields level src message
in
let json : Json.t = `Assoc (String_map.bindings fields) in
Fmt.kpf k ppf "%s@." (Json.to_string json))
fmt
in
msgf @@ fun ?header ?(tags = Logs.Tag.empty) fmt ->
as_json header tags continuation ppf fmt
in
{ Logs.report }
let setup level =
Logs.set_level level;
Logs.set_reporter (reporter Fmt.stderr)
let logging = Cmdliner.Term.(const setup $ Logs_cli.level ())
end
type format =
| Line
| Json
let setup (format : format) style_renderer level =
match format with
| Line -> Line_output.setup style_renderer level
| Json -> Json_output.setup level
let format_conv : format Cmdliner.Arg.conv =
Cmdliner.Arg.enum [ ("line", Line); ("json", Json) ]
let log_format default =
let doc = "Log format" in
let docv = "LOG_FORMAT" in
Cmdliner.Arg.(
value & opt format_conv default & info [ "log-format" ] ~doc ~docv)
let logging ~default =
let format = log_format default in
Cmdliner.Term.(
const setup $ format $ Fmt_cli.style_renderer () $ Logs_cli.level ())
|
c7d5872589dfde78b4f0d63886ee58a7f03a49352a6b4749e22dc3a96035fe97 | clojure-interop/aws-api | AmazonPinpointAsyncClientBuilder.clj | (ns com.amazonaws.services.pinpoint.AmazonPinpointAsyncClientBuilder
"Fluent builder for AmazonPinpointAsync. Use of the builder is preferred over
using constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.pinpoint AmazonPinpointAsyncClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.pinpoint.AmazonPinpointAsyncClientBuilder`"
(^com.amazonaws.services.pinpoint.AmazonPinpointAsyncClientBuilder []
(AmazonPinpointAsyncClientBuilder/standard )))
(defn *default-client
"returns: Default async client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.pinpoint.AmazonPinpointAsync`"
(^com.amazonaws.services.pinpoint.AmazonPinpointAsync []
(AmazonPinpointAsyncClientBuilder/defaultClient )))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.pinpoint/src/com/amazonaws/services/pinpoint/AmazonPinpointAsyncClientBuilder.clj | clojure | (ns com.amazonaws.services.pinpoint.AmazonPinpointAsyncClientBuilder
"Fluent builder for AmazonPinpointAsync. Use of the builder is preferred over
using constructors of the client class."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.pinpoint AmazonPinpointAsyncClientBuilder]))
(defn *standard
"returns: Create new instance of builder with all defaults set. - `com.amazonaws.services.pinpoint.AmazonPinpointAsyncClientBuilder`"
(^com.amazonaws.services.pinpoint.AmazonPinpointAsyncClientBuilder []
(AmazonPinpointAsyncClientBuilder/standard )))
(defn *default-client
"returns: Default async client using the DefaultAWSCredentialsProviderChain and
DefaultAwsRegionProviderChain chain - `com.amazonaws.services.pinpoint.AmazonPinpointAsync`"
(^com.amazonaws.services.pinpoint.AmazonPinpointAsync []
(AmazonPinpointAsyncClientBuilder/defaultClient )))
| |
fb21a0eabcf0e231f1f9274a4a56a3cb426baf55e02719557f147ee1b806df92 | aryx/ocamltarzan | expr2.ml | type ptype =
| BoolTy
| IntTy
| DoubleTy
| StringTy
| ArrayTy
| ObjectTy
(* with sexp *)
| null | https://raw.githubusercontent.com/aryx/ocamltarzan/4140f5102cee83a2ca7be996ca2d92e9cb035f9c/tests/expr2.ml | ocaml | with sexp | type ptype =
| BoolTy
| IntTy
| DoubleTy
| StringTy
| ArrayTy
| ObjectTy
|
10ba925c0612b8dc87986d2442b80f86145151e07bac847723c4d119a062fc37 | byorgey/AoC | 05.hs | #!/usr/bin/env stack
stack --resolver lts-19.28 script --package containers --package split --package mtl --package lens
import Control.Arrow ((>>>))
import Control.Lens (ix, use, (%=))
import Control.Monad.State
import Data.Char (isAlpha, isDigit, isSpace)
import Data.List (transpose)
import Data.List.Split (splitOn)
main = interact $
readInput >>> applyAll [solveA,solveB] >>> unlines
applyAll :: [a -> b] -> a -> [b]
applyAll fs a = map ($ a) fs
-- Input ---------------------------------------------------
data Input = Input { stacks :: [[Char]], procedure :: [Move] } deriving (Eq, Show)
data Move = Move { number :: Int, from :: Int, to :: Int } deriving (Eq, Show)
readInput :: String -> Input
readInput = lines >>> splitOn [""] >>> \[ss,ms] -> Input (readStacks ss) (readMoves ms)
readStacks :: [String] -> [[Char]]
readStacks = init >>> transpose >>> filter (any isAlpha) >>> map (dropWhile isSpace)
readMoves :: [String] -> [Move]
readMoves = map (words >>> filter (all isDigit) >>> map read >>> mkMove)
where
mkMove [n,f,t] = Move n f t
-- Solving -------------------------------------------------
type Output = String
solveA, solveB :: Input -> Output
solveA = solve True
solveB = solve False
solve :: Bool -> Input -> Output
solve shouldReverse (Input stks ms) = map head $ execState (mapM_ (execMove shouldReverse) ms) stks
execMove :: Bool -> Move -> State [[Char]] ()
execMove shouldReverse (Move n f t) = do
cs <- take n <$> use (ix (f-1))
ix (f-1) %= drop n
ix (t-1) %= ((if shouldReverse then reverse else id) cs ++)
| null | https://raw.githubusercontent.com/byorgey/AoC/a7fb36686ca3d7e07df26ff8183ffc526d091de0/2022/05/05.hs | haskell | resolver lts-19.28 script --package containers --package split --package mtl --package lens
Input ---------------------------------------------------
Solving ------------------------------------------------- | #!/usr/bin/env stack
import Control.Arrow ((>>>))
import Control.Lens (ix, use, (%=))
import Control.Monad.State
import Data.Char (isAlpha, isDigit, isSpace)
import Data.List (transpose)
import Data.List.Split (splitOn)
main = interact $
readInput >>> applyAll [solveA,solveB] >>> unlines
applyAll :: [a -> b] -> a -> [b]
applyAll fs a = map ($ a) fs
data Input = Input { stacks :: [[Char]], procedure :: [Move] } deriving (Eq, Show)
data Move = Move { number :: Int, from :: Int, to :: Int } deriving (Eq, Show)
readInput :: String -> Input
readInput = lines >>> splitOn [""] >>> \[ss,ms] -> Input (readStacks ss) (readMoves ms)
readStacks :: [String] -> [[Char]]
readStacks = init >>> transpose >>> filter (any isAlpha) >>> map (dropWhile isSpace)
readMoves :: [String] -> [Move]
readMoves = map (words >>> filter (all isDigit) >>> map read >>> mkMove)
where
mkMove [n,f,t] = Move n f t
type Output = String
solveA, solveB :: Input -> Output
solveA = solve True
solveB = solve False
solve :: Bool -> Input -> Output
solve shouldReverse (Input stks ms) = map head $ execState (mapM_ (execMove shouldReverse) ms) stks
execMove :: Bool -> Move -> State [[Char]] ()
execMove shouldReverse (Move n f t) = do
cs <- take n <$> use (ix (f-1))
ix (f-1) %= drop n
ix (t-1) %= ((if shouldReverse then reverse else id) cs ++)
|
6208e0f4533f116f5723347543288ce24cb6fbc59951dac7cfc0438921d4f6c5 | cl-rabbit/cl-bunny | basic.ack.lisp | (in-package :cl-bunny.test)
(plan 1)
(subtest "Basic.ack tests"
(subtest "Acknowledges a message with a valid (known) delivery tag"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 1)
(let ((message (queue.get)))
(isnt message nil)
(message.ack message))))
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t)))
(is (queue.message-count q) 0)))))
(subtest "Acknowledge multiple messages with a valid (known) delivery tag"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 2)
(queue.get)
(let ((message (queue.get)))
(isnt message nil)
(message.ack message :multiple t))))
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t)))
(is (queue.message-count q) 0)))))
(subtest "Acknowledges a message with a valid (known) delivery tag (:multiple nil)"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 2)
(queue.get)
(let ((message (queue.get)))
(isnt message nil)
(message.ack message))))
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t)))
(is (queue.message-count q) 1)))))
(subtest "Acknowledges a messages with a valid (known) delivery tag and auto-ack"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(subscribe-sync q :no-ack t)
(publish x "bunneth" :routing-key q)
(let ((message (consume :one-shot t)))
(isnt message nil "message received")
(message.ack message :multiple t))
(sleep 1)
(is-error (queue.message-count q) 'channel-closed-error "causes a channel-level error")
(is (channel-open-p) nil "channel closed")))))
(subtest "Acknowledges a message with a invalid (random) delivery tag"
(with-connection ()
(let ((error))
(with-channel (:on-error (lambda (e)
(setf error e)))
(let ((q (queue.declare :name "cl-bunny.basic.ack.unknown-delivery-tag" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 1)
(let ((message (queue.get)))
(isnt message nil)
(message.ack 82 :multiple t))
(sleep 1)))
(is-type error 'amqp:amqp-error-precondition-failed)))))
(finalize)
| null | https://raw.githubusercontent.com/cl-rabbit/cl-bunny/6da7fe161efc8d6bb0b8b09ac8efad03553d765c/t/integration/amqp/basic.ack.lisp | lisp | (in-package :cl-bunny.test)
(plan 1)
(subtest "Basic.ack tests"
(subtest "Acknowledges a message with a valid (known) delivery tag"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 1)
(let ((message (queue.get)))
(isnt message nil)
(message.ack message))))
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t)))
(is (queue.message-count q) 0)))))
(subtest "Acknowledge multiple messages with a valid (known) delivery tag"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 2)
(queue.get)
(let ((message (queue.get)))
(isnt message nil)
(message.ack message :multiple t))))
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t)))
(is (queue.message-count q) 0)))))
(subtest "Acknowledges a message with a valid (known) delivery tag (:multiple nil)"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 2)
(queue.get)
(let ((message (queue.get)))
(isnt message nil)
(message.ack message))))
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t)))
(is (queue.message-count q) 1)))))
(subtest "Acknowledges a messages with a valid (known) delivery tag and auto-ack"
(with-connection ()
(with-channel ()
(let ((q (queue.declare :name "cl-bunny.basic.ack.manual-acks" :exclusive t))
(x (exchange.default)))
(subscribe-sync q :no-ack t)
(publish x "bunneth" :routing-key q)
(let ((message (consume :one-shot t)))
(isnt message nil "message received")
(message.ack message :multiple t))
(sleep 1)
(is-error (queue.message-count q) 'channel-closed-error "causes a channel-level error")
(is (channel-open-p) nil "channel closed")))))
(subtest "Acknowledges a message with a invalid (random) delivery tag"
(with-connection ()
(let ((error))
(with-channel (:on-error (lambda (e)
(setf error e)))
(let ((q (queue.declare :name "cl-bunny.basic.ack.unknown-delivery-tag" :exclusive t))
(x (exchange.default)))
(publish x "bunneth" :routing-key q)
(sleep 0.5)
(is (queue.message-count q) 1)
(let ((message (queue.get)))
(isnt message nil)
(message.ack 82 :multiple t))
(sleep 1)))
(is-type error 'amqp:amqp-error-precondition-failed)))))
(finalize)
| |
8a510b71c50910a388bdc57492097a45af808f2f7cb0dafc92b90c1b8f9ceb84 | bpiel/guildsman | box.cljs | (ns re-com.box
(:require [clojure.string :as string]
[re-com.validate :refer [justify-style? justify-options-list align-style? align-options-list scroll-style?
scroll-options-list string-or-hiccup? css-style? html-attr?] :refer-macros [validate-args-macro]]))
(def debug false)
;; ------------------------------------------------------------------------------------
;; Private Helper functions
;; ------------------------------------------------------------------------------------
(defn flex-child-style
"Determines the value for the 'flex' attribute (which has grow, shrink and basis), based on the :size parameter.
IMPORTANT: The term 'size' means width of the item in the case of flex-direction 'row' OR height of the item in the case of flex-direction 'column'.
Flex property explanation:
- grow Integer ratio (used with other siblings) to determined how a flex item grows it's size if there is extra space to distribute. 0 for no growing.
- shrink Integer ratio (used with other siblings) to determined how a flex item shrinks it's size if space needs to be removed. 0 for no shrinking.
- basis Initial size (width, actually) of item before any growing or shrinking. Can be any size value, e.g. 60%, 100px, auto
Note: auto will cause the initial size to be calculated to take up as much space as possible, in conjunction with it's siblings :flex settings.
Supported values:
- initial '0 1 auto' - Use item's width/height for dimensions (or content dimensions if w/h not specifed). Never grow. Shrink (to min-size) if necessary.
Good for creating boxes with fixed maximum size, but that can shrink to a fixed smaller size (min-width/height) if space becomes tight.
NOTE: When using initial, you should also set a width/height value (depending on flex-direction) to specify it's default size
and an optional min-width/height value to specify the size it can shrink to.
- auto '1 1 auto' - Use item's width/height for dimensions. Grow if necessary. Shrink (to min-size) if necessary.
Good for creating really flexible boxes that will gobble as much available space as they are allowed or shrink as much as they are forced to.
- none '0 0 auto' - Use item's width/height for dimensions (or content dimensions if not specifed). Never grow. Never shrink.
Good for creating rigid boxes that stick to their width/height if specified, otherwise their content size.
- 100px '0 0 100px' - Non flexible 100px size (in the flex direction) box.
Good for fixed headers/footers and side bars of an exact size.
- 60% '60 1 0px' - Set the item's size (it's width/height depending on flex-direction) to be 60% of the parent container's width/height.
NOTE: If you use this, then all siblings with percentage values must add up to 100%.
- 60 '60 1 0px' - Same as percentage above.
- grow shrink basis 'grow shrink basis' - If none of the above common valaues above meet your needs, this gives you precise control.
If number of words is not 1 or 3, an exception is thrown.
Reference: -flexbox/#flexibility
Diagram: -flexbox/#flex-container
Regex101 testing: ^(initial|auto|none)|(\\d+)(px|%|em)|(\\d+)\\w(\\d+)\\w(.*) - remove double backslashes"
[size]
;; TODO: Could make initial/auto/none into keywords???
(let [split-size (string/split (string/trim size) #"\s+") ;; Split into words separated by whitespace
split-count (count split-size)
_ (assert (contains? #{1 3} split-count) "Must pass either 1 or 3 words to flex-child-style")
Contains value when only one word passed ( e.g. auto , 60px )
split-size-only (when size-only (string/split size-only #"(\d+)(.*)")) ;; Split into number + string
[_ num units] (when size-only split-size-only) ;; grab number and units
pass-through? (nil? num) ;; If we can't split, then we'll pass this straign through
grow-ratio? (or (= units "%") (= units "") (nil? units)) ;; Determine case for using grow ratio
grow (if grow-ratio? num "0") ;; Set grow based on percent or integer, otherwise no grow
shrink (if grow-ratio? "1" "0") ;; If grow set, then set shrink to even shrinkage as well
If grow set , then even growing , otherwise set basis size to the passed in size ( e.g. 100px , 5em )
flex (if (and size-only (not pass-through?))
(str grow " " shrink " " basis)
size)]
{:-webkit-flex flex
:flex flex}))
(defn flex-flow-style
"A cross-browser helper function to output flex-flow with all it's potential browser prefixes"
[flex-flow]
{:-webkit-flex-flow flex-flow
:flex-flow flex-flow})
(defn justify-style
"Determines the value for the flex 'justify-content' attribute.
This parameter determines how children are aligned along the main axis.
The justify parameter is a keyword.
Reference: -flexbox/#justify-content-property"
[justify]
(let [js (case justify
:start "flex-start"
:end "flex-end"
:center "center"
:between "space-between"
:around "space-around")]
{:-webkit-justify-content js
:justify-content js}))
(defn align-style
"Determines the value for the flex align type attributes.
This parameter determines how children are aligned on the cross axis.
The justify parameter is a keyword.
Reference: -flexbox/#align-items-property"
[attribute align]
(let [attribute-wk (->> attribute name (str "-webkit-") keyword)
as (case align
:start "flex-start"
:end "flex-end"
:center "center"
:baseline "baseline"
:stretch "stretch")]
{attribute-wk as
attribute as}))
(defn scroll-style
"Determines the value for the 'overflow' attribute.
The scroll parameter is a keyword.
Because we're translating scroll into overflow, the keyword doesn't appear to match the attribute value"
[attribute scroll]
{attribute (case scroll
:auto "auto"
:off "hidden"
:on "scroll"
:spill "visible")})
;; ------------------------------------------------------------------------------------
;; Private Component: box-base (debug color: lightblue)
;; ------------------------------------------------------------------------------------
(defn- box-base
"This should generally NOT be used as it is the basis for the box, scroller and border components"
[& {:keys [size scroll h-scroll v-scroll width height min-width min-height max-width max-height justify align align-self
margin padding border l-border r-border t-border b-border radius bk-color child class-name class style attr]}]
(let [s (merge
(flex-flow-style "inherit")
(flex-child-style size)
(when scroll (scroll-style :overflow scroll))
(when h-scroll (scroll-style :overflow-x h-scroll))
(when v-scroll (scroll-style :overflow-y v-scroll))
(when width {:width width})
(when height {:height height})
(when min-width {:min-width min-width})
(when min-height {:min-height min-height})
(when max-width {:max-width max-width})
(when max-height {:max-height max-height})
(when justify (justify-style justify))
(when align (align-style :align-items align))
(when align-self (align-style :align-self align-self))
(when margin {:margin margin}) ;; margin and padding: "all" OR "top&bottom right&left" OR "top right bottom left"
(when padding {:padding padding})
(when border {:border border})
(when l-border {:border-left l-border})
(when r-border {:border-right r-border})
(when t-border {:border-top t-border})
(when b-border {:border-bottom b-border})
(when radius {:border-radius radius})
(if bk-color
{:background-color bk-color}
(if debug {:background-color "lightblue"} {}))
style)]
[:div
(merge
{:class (str class-name "display-flex " class) :style s}
attr)
child]))
;; ------------------------------------------------------------------------------------
;; Component: gap (debug color: chocolate)
;; ------------------------------------------------------------------------------------
(def gap-args-desc
[{:name :size :required true :type "string" :validate-fn string? :description "the length of the whitespace. Typically, an absolute CSS length like 10px or 10em, but can be a stretchy proportional amount like 2"}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn gap
"Returns a component which produces a gap between children in a v-box/h-box along the main axis"
[& {:keys [size width height class style attr]
:as args}]
{:pre [(validate-args-macro gap-args-desc args "gap")]}
(let [s (merge
(when size (flex-child-style size))
(when width {:width width})
(when height {:height height})
(when debug {:background-color "chocolate"})
style)]
[:div
(merge
{:class (str "rc-gap " class) :style s}
attr)]))
;; ------------------------------------------------------------------------------------
;; Component: line
;; ------------------------------------------------------------------------------------
(def line-args-desc
[{:name :size :required false :default "1px" :type "string" :validate-fn string? :description "a CSS style for the thickness of the line. Usually px, % or em"}
{:name :color :required false :default "lightgray" :type "string" :validate-fn string? :description "a CSS color"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn line
"Returns a component which produces a line between children in a v-box/h-box along the main axis.
Specify size in pixels and a stancard CSS color. Defaults to a 1px lightgray line"
[& {:keys [size color class style attr]
:or {size "1px" color "lightgray"}
:as args}]
{:pre [(validate-args-macro line-args-desc args "line")]}
(let [s (merge
(flex-child-style (str "0 0 " size))
{:background-color color}
style)]
[:div
(merge
{:class (str "rc-line " class) :style s}
attr)]))
;; ------------------------------------------------------------------------------------
;; Component: h-box (debug color: gold)
;; ------------------------------------------------------------------------------------
(def h-box-args-desc
[{:name :children :required true :type "vector" :validate-fn sequential? :description "a vector (or list) of components"}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :gap :required false :type "string" :validate-fn string? :description "the amount of whitespace to put between each child. Typically, an absolute CSS length like 10px or 10em, but can be a stretchy proportional amount like 2"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn h-box
"Returns hiccup which produces a horizontal box.
It's primary role is to act as a container for components and lays it's children from left to right.
By default, it also acts as a child under it's parent"
[& {:keys [size width height min-width min-height max-width max-height justify align align-self margin padding gap children class style attr]
:or {size "none" justify :start align :stretch}
:as args}]
{:pre [(validate-args-macro h-box-args-desc args "h-box")]}
(let [s (merge
(flex-flow-style "row nowrap")
(flex-child-style size)
(when width {:width width})
(when height {:height height})
(when min-width {:min-width min-width})
(when min-height {:min-height min-height})
(when max-width {:max-width max-width})
(when max-height {:max-height max-height})
(justify-style justify)
(align-style :align-items align)
(when align-self (align-style :align-self align-self))
(when margin {:margin margin}) ;; margin and padding: "all" OR "top&bottom right&left" OR "top right bottom left"
(when padding {:padding padding})
(when debug {:background-color "gold"})
style)
gap-form (when gap [re-com.box/gap
:size gap
TODO : required to get around a Chrome bug : . Remove once fixed .
children (if gap
(interpose gap-form (filter identity children)) ;; filter is to remove possible nils so we don't add unwanted gaps
children)]
(into [:div
(merge
{:class (str "rc-h-box display-flex " class) :style s}
attr)]
children)))
;; ------------------------------------------------------------------------------------
Component : v - box ( debug color : )
;; ------------------------------------------------------------------------------------
(def v-box-args-desc
[{:name :children :required true :type "vector" :validate-fn sequential? :description "a vector (or list) of components"}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :gap :required false :type "string" :validate-fn string? :description "the amount of whitespace to put between each child. Typically, an absolute CSS length like 10px or 10em, but can be a stretchy proportional amount like 2"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn v-box
"Returns hiccup which produces a vertical box.
It's primary role is to act as a container for components and lays it's children from top to bottom.
By default, it also acts as a child under it's parent"
[& {:keys [size width height min-width min-height max-width max-height justify align align-self margin padding gap children class style attr]
:or {size "none" justify :start align :stretch}
:as args}]
{:pre [(validate-args-macro v-box-args-desc args "v-box")]}
(let [s (merge
(flex-flow-style "column nowrap")
(flex-child-style size)
(when width {:width width})
(when height {:height height})
(when min-width {:min-width min-width})
(when min-height {:min-height min-height})
(when max-width {:max-width max-width})
(when max-height {:max-height max-height})
(justify-style justify)
(align-style :align-items align)
(when align-self (align-style :align-self align-self))
(when margin {:margin margin}) ;; margin and padding: "all" OR "top&bottom right&left" OR "top right bottom left"
(when padding {:padding padding})
(when debug {:background-color "antiquewhite"})
style)
gap-form (when gap [re-com.box/gap
:size gap
TODO : required to get around a Chrome bug : . Remove once fixed .
children (if gap
(interpose gap-form (filter identity children)) ;; filter is to remove possible nils so we don't add unwanted gaps
children)]
(into [:div
(merge
{:class (str "rc-v-box display-flex " class) :style s}
attr)]
children)))
;; ------------------------------------------------------------------------------------
;; Component: box
;; ------------------------------------------------------------------------------------
(def box-args-desc
[{:name :child :required true :type "string | hiccup" :validate-fn string-or-hiccup? :description "a component (or string)"}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn box
"Returns hiccup which produces a box, which is generally used as a child of a v-box or an h-box.
By default, it also acts as a container for further child compenents, or another h-box or v-box"
[& {:keys [size width height min-width min-height max-width max-height justify align align-self margin padding child class style attr]
:or {size "none"}
:as args}]
{:pre [(validate-args-macro box-args-desc args "box")]}
(box-base :size size
:width width
:height height
:min-width min-width
:min-height min-height
:max-width max-width
:max-height max-height
:justify justify
:align align
:align-self align-self
:margin margin
:padding padding
:child child
:class-name "rc-box "
:class class
:style style
:attr attr))
;; ------------------------------------------------------------------------------------
;; Component: scroller
;; ------------------------------------------------------------------------------------
(def scroller-args-desc
[{:name :child :required true :type "string | hiccup" :validate-fn string-or-hiccup? :description "a component (or string)"}
{:name :size :required false :default "auto" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :scroll :required false :default "auto" :type "keyword" :validate-fn scroll-style? :description [:span "Sets both h-scroll and v-scroll at once: " [:br]
[:code ":auto"] ": only show scroll bar(s) if the content is larger than the scroller" [:br]
[:code ":on"] ": always show scroll bars" [:br]
[:code ":off"] ": never show scroll bar(s). Content which is not in the bounds of the scroller can not be seen" [:br]
[:code ":spill"] ": never show scroll bar(s). Content which is not in the bounds of the scroller spills all over the place"]}
{:name :h-scroll :required false :type "keyword" :validate-fn scroll-style? :description [:span "see " [:code ":scroll"] ". Overrides that setting"]}
{:name :v-scroll :required false :type "keyword" :validate-fn scroll-style? :description [:span "see " [:code ":scroll"] ". Overrides that setting"]}
{:name :width :required false :type "string" :validate-fn string? :description "initial width"}
{:name :height :required false :type "string" :validate-fn string? :description "initial height"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn scroller
"Returns hiccup which produces a scoller component.
This is the way scroll bars are added to boxes, in favour of adding the scroll attributes directly to the boxes themselves.
IMPORTANT: Because this component becomes the flex child in place of the component it is wrapping, you must copy the size attibutes to this componenet.
There are three scroll types:
- h-scroll Determines how the horizontal scroll bar will be displayed.
- v-scroll Determines how the vertical scroll bar will be displayed.
- scroll Sets both h-scroll and v-scroll at once.
Syntax: :auto [DEFAULT] Only show scroll bar(s) if the content is larger than the scroller.
:on Always show scroll bar(s).
:off Never show scroll bar(s). Content which is not in the bounds of the scroller can not be seen.
:spill Never show scroll bar(s). Content which is not in the bounds of the scroller spills all over the place.
Note: If scroll is set, then setting h-scroll or v-scroll overrides the scroll value"
[& {:keys [size scroll h-scroll v-scroll width height min-width min-height max-width max-height justify align align-self margin padding child class style attr]
:or {size "auto"}
:as args}]
{:pre [(validate-args-macro scroller-args-desc args "scroller")]}
(let [not-v-or-h (and (nil? v-scroll) (nil? h-scroll))
scroll (if (and (nil? scroll) not-v-or-h) :auto scroll)]
(box-base :size size
:scroll scroll
:h-scroll h-scroll
:v-scroll v-scroll
:width width
:height height
:min-width min-width
:min-height min-height
:max-width max-width
:max-height max-height
:justify justify
:align align
:align-self align-self
:margin margin
:padding padding
:child child
:class-name "rc-scroller "
:class class
:style style
:attr attr)))
;; ------------------------------------------------------------------------------------
;; Component: border
;; ------------------------------------------------------------------------------------
(def border-args-desc
[{:name :child :required true :type "string | hiccup" :validate-fn string-or-hiccup? :description "a component (or string)"}
{:name :border :required false :default "1px solid lightgrey" :type "string" :validate-fn string? :description "a CSS border style. A convenience to describe all borders in one parameter"}
{:name :l-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the left border. Overrides " [:code ":border"]]}
{:name :r-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the right border. Overrides " [:code ":border"]]}
{:name :t-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the top border. Overrides " [:code ":border"]]}
{:name :b-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the bottom. Overrides " [:code ":border"]]}
{:name :radius :required false :type "string" :validate-fn string? :description "a CSS radius style eg.\"2px\""}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS style describing the initial width"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS style describing the initial height"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn border
"Returns hiccup which produces a border component.
This is the way borders are added to boxes, in favour of adding the border attributes directly to the boxes themselves.
border property syntax: '<border-width> || <border-style> || <color>'
- border-width: thin, medium, thick or standard CSS size (e.g. 2px, 0.5em)
- border-style: none, hidden, dotted, dashed, solid, double, groove, ridge, inset, outset
- color: standard CSS color (e.g. grey #88ffee)"
[& {:keys [size width height min-width min-height max-width max-height margin padding border l-border r-border t-border b-border radius child class style attr]
:or {size "none"}
:as args}]
{:pre [(validate-args-macro border-args-desc args "border")]}
(let [no-border (every? nil? [border l-border r-border t-border b-border])
default-border "1px solid lightgrey"]
(box-base :size size
:width width
:height height
:min-width min-width
:min-height min-height
:max-width max-width
:max-height max-height
:margin margin
:padding padding
:border (if no-border default-border border)
:l-border l-border
:r-border r-border
:t-border t-border
:b-border b-border
:radius radius
:child child
:class-name "rc-border "
:class class
:style style
:attr attr))) | null | https://raw.githubusercontent.com/bpiel/guildsman/59c9a7459de19525cfc54112f02127e0777a00ce/resources/public/js/compiled/out/re_com/box.cljs | clojure | ------------------------------------------------------------------------------------
Private Helper functions
------------------------------------------------------------------------------------
TODO: Could make initial/auto/none into keywords???
Split into words separated by whitespace
Split into number + string
grab number and units
If we can't split, then we'll pass this straign through
Determine case for using grow ratio
Set grow based on percent or integer, otherwise no grow
If grow set, then set shrink to even shrinkage as well
------------------------------------------------------------------------------------
Private Component: box-base (debug color: lightblue)
------------------------------------------------------------------------------------
margin and padding: "all" OR "top&bottom right&left" OR "top right bottom left"
------------------------------------------------------------------------------------
Component: gap (debug color: chocolate)
------------------------------------------------------------------------------------
------------------------------------------------------------------------------------
Component: line
------------------------------------------------------------------------------------
------------------------------------------------------------------------------------
Component: h-box (debug color: gold)
------------------------------------------------------------------------------------
margin and padding: "all" OR "top&bottom right&left" OR "top right bottom left"
filter is to remove possible nils so we don't add unwanted gaps
------------------------------------------------------------------------------------
------------------------------------------------------------------------------------
margin and padding: "all" OR "top&bottom right&left" OR "top right bottom left"
filter is to remove possible nils so we don't add unwanted gaps
------------------------------------------------------------------------------------
Component: box
------------------------------------------------------------------------------------
------------------------------------------------------------------------------------
Component: scroller
------------------------------------------------------------------------------------
------------------------------------------------------------------------------------
Component: border
------------------------------------------------------------------------------------
| (ns re-com.box
(:require [clojure.string :as string]
[re-com.validate :refer [justify-style? justify-options-list align-style? align-options-list scroll-style?
scroll-options-list string-or-hiccup? css-style? html-attr?] :refer-macros [validate-args-macro]]))
(def debug false)
(defn flex-child-style
"Determines the value for the 'flex' attribute (which has grow, shrink and basis), based on the :size parameter.
IMPORTANT: The term 'size' means width of the item in the case of flex-direction 'row' OR height of the item in the case of flex-direction 'column'.
Flex property explanation:
- grow Integer ratio (used with other siblings) to determined how a flex item grows it's size if there is extra space to distribute. 0 for no growing.
- shrink Integer ratio (used with other siblings) to determined how a flex item shrinks it's size if space needs to be removed. 0 for no shrinking.
- basis Initial size (width, actually) of item before any growing or shrinking. Can be any size value, e.g. 60%, 100px, auto
Note: auto will cause the initial size to be calculated to take up as much space as possible, in conjunction with it's siblings :flex settings.
Supported values:
- initial '0 1 auto' - Use item's width/height for dimensions (or content dimensions if w/h not specifed). Never grow. Shrink (to min-size) if necessary.
Good for creating boxes with fixed maximum size, but that can shrink to a fixed smaller size (min-width/height) if space becomes tight.
NOTE: When using initial, you should also set a width/height value (depending on flex-direction) to specify it's default size
and an optional min-width/height value to specify the size it can shrink to.
- auto '1 1 auto' - Use item's width/height for dimensions. Grow if necessary. Shrink (to min-size) if necessary.
Good for creating really flexible boxes that will gobble as much available space as they are allowed or shrink as much as they are forced to.
- none '0 0 auto' - Use item's width/height for dimensions (or content dimensions if not specifed). Never grow. Never shrink.
Good for creating rigid boxes that stick to their width/height if specified, otherwise their content size.
- 100px '0 0 100px' - Non flexible 100px size (in the flex direction) box.
Good for fixed headers/footers and side bars of an exact size.
- 60% '60 1 0px' - Set the item's size (it's width/height depending on flex-direction) to be 60% of the parent container's width/height.
NOTE: If you use this, then all siblings with percentage values must add up to 100%.
- 60 '60 1 0px' - Same as percentage above.
- grow shrink basis 'grow shrink basis' - If none of the above common valaues above meet your needs, this gives you precise control.
If number of words is not 1 or 3, an exception is thrown.
Reference: -flexbox/#flexibility
Diagram: -flexbox/#flex-container
Regex101 testing: ^(initial|auto|none)|(\\d+)(px|%|em)|(\\d+)\\w(\\d+)\\w(.*) - remove double backslashes"
[size]
split-count (count split-size)
_ (assert (contains? #{1 3} split-count) "Must pass either 1 or 3 words to flex-child-style")
Contains value when only one word passed ( e.g. auto , 60px )
If grow set , then even growing , otherwise set basis size to the passed in size ( e.g. 100px , 5em )
flex (if (and size-only (not pass-through?))
(str grow " " shrink " " basis)
size)]
{:-webkit-flex flex
:flex flex}))
(defn flex-flow-style
"A cross-browser helper function to output flex-flow with all it's potential browser prefixes"
[flex-flow]
{:-webkit-flex-flow flex-flow
:flex-flow flex-flow})
(defn justify-style
"Determines the value for the flex 'justify-content' attribute.
This parameter determines how children are aligned along the main axis.
The justify parameter is a keyword.
Reference: -flexbox/#justify-content-property"
[justify]
(let [js (case justify
:start "flex-start"
:end "flex-end"
:center "center"
:between "space-between"
:around "space-around")]
{:-webkit-justify-content js
:justify-content js}))
(defn align-style
"Determines the value for the flex align type attributes.
This parameter determines how children are aligned on the cross axis.
The justify parameter is a keyword.
Reference: -flexbox/#align-items-property"
[attribute align]
(let [attribute-wk (->> attribute name (str "-webkit-") keyword)
as (case align
:start "flex-start"
:end "flex-end"
:center "center"
:baseline "baseline"
:stretch "stretch")]
{attribute-wk as
attribute as}))
(defn scroll-style
"Determines the value for the 'overflow' attribute.
The scroll parameter is a keyword.
Because we're translating scroll into overflow, the keyword doesn't appear to match the attribute value"
[attribute scroll]
{attribute (case scroll
:auto "auto"
:off "hidden"
:on "scroll"
:spill "visible")})
(defn- box-base
"This should generally NOT be used as it is the basis for the box, scroller and border components"
[& {:keys [size scroll h-scroll v-scroll width height min-width min-height max-width max-height justify align align-self
margin padding border l-border r-border t-border b-border radius bk-color child class-name class style attr]}]
(let [s (merge
(flex-flow-style "inherit")
(flex-child-style size)
(when scroll (scroll-style :overflow scroll))
(when h-scroll (scroll-style :overflow-x h-scroll))
(when v-scroll (scroll-style :overflow-y v-scroll))
(when width {:width width})
(when height {:height height})
(when min-width {:min-width min-width})
(when min-height {:min-height min-height})
(when max-width {:max-width max-width})
(when max-height {:max-height max-height})
(when justify (justify-style justify))
(when align (align-style :align-items align))
(when align-self (align-style :align-self align-self))
(when padding {:padding padding})
(when border {:border border})
(when l-border {:border-left l-border})
(when r-border {:border-right r-border})
(when t-border {:border-top t-border})
(when b-border {:border-bottom b-border})
(when radius {:border-radius radius})
(if bk-color
{:background-color bk-color}
(if debug {:background-color "lightblue"} {}))
style)]
[:div
(merge
{:class (str class-name "display-flex " class) :style s}
attr)
child]))
(def gap-args-desc
[{:name :size :required true :type "string" :validate-fn string? :description "the length of the whitespace. Typically, an absolute CSS length like 10px or 10em, but can be a stretchy proportional amount like 2"}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn gap
"Returns a component which produces a gap between children in a v-box/h-box along the main axis"
[& {:keys [size width height class style attr]
:as args}]
{:pre [(validate-args-macro gap-args-desc args "gap")]}
(let [s (merge
(when size (flex-child-style size))
(when width {:width width})
(when height {:height height})
(when debug {:background-color "chocolate"})
style)]
[:div
(merge
{:class (str "rc-gap " class) :style s}
attr)]))
(def line-args-desc
[{:name :size :required false :default "1px" :type "string" :validate-fn string? :description "a CSS style for the thickness of the line. Usually px, % or em"}
{:name :color :required false :default "lightgray" :type "string" :validate-fn string? :description "a CSS color"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn line
"Returns a component which produces a line between children in a v-box/h-box along the main axis.
Specify size in pixels and a stancard CSS color. Defaults to a 1px lightgray line"
[& {:keys [size color class style attr]
:or {size "1px" color "lightgray"}
:as args}]
{:pre [(validate-args-macro line-args-desc args "line")]}
(let [s (merge
(flex-child-style (str "0 0 " size))
{:background-color color}
style)]
[:div
(merge
{:class (str "rc-line " class) :style s}
attr)]))
(def h-box-args-desc
[{:name :children :required true :type "vector" :validate-fn sequential? :description "a vector (or list) of components"}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :gap :required false :type "string" :validate-fn string? :description "the amount of whitespace to put between each child. Typically, an absolute CSS length like 10px or 10em, but can be a stretchy proportional amount like 2"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn h-box
"Returns hiccup which produces a horizontal box.
It's primary role is to act as a container for components and lays it's children from left to right.
By default, it also acts as a child under it's parent"
[& {:keys [size width height min-width min-height max-width max-height justify align align-self margin padding gap children class style attr]
:or {size "none" justify :start align :stretch}
:as args}]
{:pre [(validate-args-macro h-box-args-desc args "h-box")]}
(let [s (merge
(flex-flow-style "row nowrap")
(flex-child-style size)
(when width {:width width})
(when height {:height height})
(when min-width {:min-width min-width})
(when min-height {:min-height min-height})
(when max-width {:max-width max-width})
(when max-height {:max-height max-height})
(justify-style justify)
(align-style :align-items align)
(when align-self (align-style :align-self align-self))
(when padding {:padding padding})
(when debug {:background-color "gold"})
style)
gap-form (when gap [re-com.box/gap
:size gap
TODO : required to get around a Chrome bug : . Remove once fixed .
children (if gap
children)]
(into [:div
(merge
{:class (str "rc-h-box display-flex " class) :style s}
attr)]
children)))
Component : v - box ( debug color : )
(def v-box-args-desc
[{:name :children :required true :type "vector" :validate-fn sequential? :description "a vector (or list) of components"}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :gap :required false :type "string" :validate-fn string? :description "the amount of whitespace to put between each child. Typically, an absolute CSS length like 10px or 10em, but can be a stretchy proportional amount like 2"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn v-box
"Returns hiccup which produces a vertical box.
It's primary role is to act as a container for components and lays it's children from top to bottom.
By default, it also acts as a child under it's parent"
[& {:keys [size width height min-width min-height max-width max-height justify align align-self margin padding gap children class style attr]
:or {size "none" justify :start align :stretch}
:as args}]
{:pre [(validate-args-macro v-box-args-desc args "v-box")]}
(let [s (merge
(flex-flow-style "column nowrap")
(flex-child-style size)
(when width {:width width})
(when height {:height height})
(when min-width {:min-width min-width})
(when min-height {:min-height min-height})
(when max-width {:max-width max-width})
(when max-height {:max-height max-height})
(justify-style justify)
(align-style :align-items align)
(when align-self (align-style :align-self align-self))
(when padding {:padding padding})
(when debug {:background-color "antiquewhite"})
style)
gap-form (when gap [re-com.box/gap
:size gap
TODO : required to get around a Chrome bug : . Remove once fixed .
children (if gap
children)]
(into [:div
(merge
{:class (str "rc-v-box display-flex " class) :style s}
attr)]
children)))
(def box-args-desc
[{:name :child :required true :type "string | hiccup" :validate-fn string-or-hiccup? :description "a component (or string)"}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS width style"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS height style"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn box
"Returns hiccup which produces a box, which is generally used as a child of a v-box or an h-box.
By default, it also acts as a container for further child compenents, or another h-box or v-box"
[& {:keys [size width height min-width min-height max-width max-height justify align align-self margin padding child class style attr]
:or {size "none"}
:as args}]
{:pre [(validate-args-macro box-args-desc args "box")]}
(box-base :size size
:width width
:height height
:min-width min-width
:min-height min-height
:max-width max-width
:max-height max-height
:justify justify
:align align
:align-self align-self
:margin margin
:padding padding
:child child
:class-name "rc-box "
:class class
:style style
:attr attr))
(def scroller-args-desc
[{:name :child :required true :type "string | hiccup" :validate-fn string-or-hiccup? :description "a component (or string)"}
{:name :size :required false :default "auto" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :scroll :required false :default "auto" :type "keyword" :validate-fn scroll-style? :description [:span "Sets both h-scroll and v-scroll at once: " [:br]
[:code ":auto"] ": only show scroll bar(s) if the content is larger than the scroller" [:br]
[:code ":on"] ": always show scroll bars" [:br]
[:code ":off"] ": never show scroll bar(s). Content which is not in the bounds of the scroller can not be seen" [:br]
[:code ":spill"] ": never show scroll bar(s). Content which is not in the bounds of the scroller spills all over the place"]}
{:name :h-scroll :required false :type "keyword" :validate-fn scroll-style? :description [:span "see " [:code ":scroll"] ". Overrides that setting"]}
{:name :v-scroll :required false :type "keyword" :validate-fn scroll-style? :description [:span "see " [:code ":scroll"] ". Overrides that setting"]}
{:name :width :required false :type "string" :validate-fn string? :description "initial width"}
{:name :height :required false :type "string" :validate-fn string? :description "initial height"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :justify :required false :default :start :type "keyword" :validate-fn justify-style? :description [:span "equivalent to CSS style " [:span.bold "justify-content"] "." [:br] "One of " justify-options-list]}
{:name :align :required false :default :stretch :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-items"] "." [:br] " One of " align-options-list]}
{:name :align-self :required false :type "keyword" :validate-fn align-style? :description [:span "equivalent to CSS style " [:span.bold "align-self"] "." [:br] "Used when a child must override the parent's align-items setting."]}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn scroller
"Returns hiccup which produces a scoller component.
This is the way scroll bars are added to boxes, in favour of adding the scroll attributes directly to the boxes themselves.
IMPORTANT: Because this component becomes the flex child in place of the component it is wrapping, you must copy the size attibutes to this componenet.
There are three scroll types:
- h-scroll Determines how the horizontal scroll bar will be displayed.
- v-scroll Determines how the vertical scroll bar will be displayed.
- scroll Sets both h-scroll and v-scroll at once.
Syntax: :auto [DEFAULT] Only show scroll bar(s) if the content is larger than the scroller.
:on Always show scroll bar(s).
:off Never show scroll bar(s). Content which is not in the bounds of the scroller can not be seen.
:spill Never show scroll bar(s). Content which is not in the bounds of the scroller spills all over the place.
Note: If scroll is set, then setting h-scroll or v-scroll overrides the scroll value"
[& {:keys [size scroll h-scroll v-scroll width height min-width min-height max-width max-height justify align align-self margin padding child class style attr]
:or {size "auto"}
:as args}]
{:pre [(validate-args-macro scroller-args-desc args "scroller")]}
(let [not-v-or-h (and (nil? v-scroll) (nil? h-scroll))
scroll (if (and (nil? scroll) not-v-or-h) :auto scroll)]
(box-base :size size
:scroll scroll
:h-scroll h-scroll
:v-scroll v-scroll
:width width
:height height
:min-width min-width
:min-height min-height
:max-width max-width
:max-height max-height
:justify justify
:align align
:align-self align-self
:margin margin
:padding padding
:child child
:class-name "rc-scroller "
:class class
:style style
:attr attr)))
(def border-args-desc
[{:name :child :required true :type "string | hiccup" :validate-fn string-or-hiccup? :description "a component (or string)"}
{:name :border :required false :default "1px solid lightgrey" :type "string" :validate-fn string? :description "a CSS border style. A convenience to describe all borders in one parameter"}
{:name :l-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the left border. Overrides " [:code ":border"]]}
{:name :r-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the right border. Overrides " [:code ":border"]]}
{:name :t-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the top border. Overrides " [:code ":border"]]}
{:name :b-border :required false :type "string" :validate-fn string? :description [:span "a CSS border style for the bottom. Overrides " [:code ":border"]]}
{:name :radius :required false :type "string" :validate-fn string? :description "a CSS radius style eg.\"2px\""}
{:name :size :required false :default "none" :type "string" :validate-fn string? :description [:span "equivalent to CSS style " [:span.bold "flex"] "." [:br] "Examples: " [:code "initial"] ", " [:code "auto"] ", " [:code "none"]", " [:code "100px"] ", " [:code "2"] " or a generic triple of " [:code "grow shrink basis"]]}
{:name :width :required false :type "string" :validate-fn string? :description "a CSS style describing the initial width"}
{:name :height :required false :type "string" :validate-fn string? :description "a CSS style describing the initial height"}
{:name :min-width :required false :type "string" :validate-fn string? :description "a CSS width style. The minimum width to which the box can shrink"}
{:name :min-height :required false :type "string" :validate-fn string? :description "a CSS height style. The minimum height to which the box can shrink"}
{:name :max-width :required false :type "string" :validate-fn string? :description "a CSS width style. The maximum width to which the box can grow"}
{:name :max-height :required false :type "string" :validate-fn string? :description "a CSS height style. The maximum height to which the box can grow"}
{:name :margin :required false :type "string" :validate-fn string? :description "a CSS margin style"}
{:name :padding :required false :type "string" :validate-fn string? :description "a CSS padding style"}
{:name :class :required false :type "string" :validate-fn string? :description "CSS class names, space separated"}
{:name :style :required false :type "CSS style map" :validate-fn css-style? :description "CSS styles to add or override"}
{:name :attr :required false :type "HTML attr map" :validate-fn html-attr? :description [:span "HTML attributes, like " [:code ":on-mouse-move"] [:br] "No " [:code ":class"] " or " [:code ":style"] "allowed"]}])
(defn border
"Returns hiccup which produces a border component.
This is the way borders are added to boxes, in favour of adding the border attributes directly to the boxes themselves.
border property syntax: '<border-width> || <border-style> || <color>'
- border-width: thin, medium, thick or standard CSS size (e.g. 2px, 0.5em)
- border-style: none, hidden, dotted, dashed, solid, double, groove, ridge, inset, outset
- color: standard CSS color (e.g. grey #88ffee)"
[& {:keys [size width height min-width min-height max-width max-height margin padding border l-border r-border t-border b-border radius child class style attr]
:or {size "none"}
:as args}]
{:pre [(validate-args-macro border-args-desc args "border")]}
(let [no-border (every? nil? [border l-border r-border t-border b-border])
default-border "1px solid lightgrey"]
(box-base :size size
:width width
:height height
:min-width min-width
:min-height min-height
:max-width max-width
:max-height max-height
:margin margin
:padding padding
:border (if no-border default-border border)
:l-border l-border
:r-border r-border
:t-border t-border
:b-border b-border
:radius radius
:child child
:class-name "rc-border "
:class class
:style style
:attr attr))) |
b49d75d16b83ab19619a7134f86ed3cf9f1b5cfcf761286c572893f4f6bf1430 | paypal/seazme-sources | es.clj | (ns seazme.common.es
(:require
[clojurewerkz.elastisch.rest :as esr]
[clojurewerkz.elastisch.rest.index :as esi]
[clojurewerkz.elastisch.rest.document :as esd])
)
(defn mk-connection[profile & {:keys [debug debug-body] :or {debug false debug-body false}}]
(esr/connect (profile :host) {:socket-timeout 60000 :conn-timeout 60000 :insecure? true :basic-auth (profile :basic-auth) :debug debug :debug-body debug-body}))
(def sna {:type "string" :index "not_analyzed"})
(defn reinit![{:keys [index kind]} conn]
(let [mapping-types {kind
{:properties
{:url sna
:kind-name sna
:bu-name sna
:instance-name sna
:level0 sna
:level1 sna
:parent-id sna
:last-author sna
:last-ts sna
:text {:type "string" :analyzer "snowball"}
:text-size {:type "integer"}
}}}]
(esi/create conn index {:mappings mapping-types})))
(defn reinit-datasources![conn]
(let [indx "datasources"
mapping-types {"datasources"
{:properties
{:current_status sna
:name sna
:owners sna
:business_unit sna
:last_updated_time {:type "integer"}
:tag sna
:notes sna
}}}]
[(esi/delete conn indx)
(esi/create conn indx {:mappings mapping-types})]))
TODO can we create two same docs ? test it
(defn put-doc![conn indx ttype doc]
TODO can we create two same docs ? test it
(defn exists?[conn indx]
(esi/exists? conn indx))
| null | https://raw.githubusercontent.com/paypal/seazme-sources/57e5b7579f5e475a908b2318a00549dd131f7745/src/main/clojure/seazme/common/es.clj | clojure | (ns seazme.common.es
(:require
[clojurewerkz.elastisch.rest :as esr]
[clojurewerkz.elastisch.rest.index :as esi]
[clojurewerkz.elastisch.rest.document :as esd])
)
(defn mk-connection[profile & {:keys [debug debug-body] :or {debug false debug-body false}}]
(esr/connect (profile :host) {:socket-timeout 60000 :conn-timeout 60000 :insecure? true :basic-auth (profile :basic-auth) :debug debug :debug-body debug-body}))
(def sna {:type "string" :index "not_analyzed"})
(defn reinit![{:keys [index kind]} conn]
(let [mapping-types {kind
{:properties
{:url sna
:kind-name sna
:bu-name sna
:instance-name sna
:level0 sna
:level1 sna
:parent-id sna
:last-author sna
:last-ts sna
:text {:type "string" :analyzer "snowball"}
:text-size {:type "integer"}
}}}]
(esi/create conn index {:mappings mapping-types})))
(defn reinit-datasources![conn]
(let [indx "datasources"
mapping-types {"datasources"
{:properties
{:current_status sna
:name sna
:owners sna
:business_unit sna
:last_updated_time {:type "integer"}
:tag sna
:notes sna
}}}]
[(esi/delete conn indx)
(esi/create conn indx {:mappings mapping-types})]))
TODO can we create two same docs ? test it
(defn put-doc![conn indx ttype doc]
TODO can we create two same docs ? test it
(defn exists?[conn indx]
(esi/exists? conn indx))
| |
9601ce45b79bcb37884c74d15c5016ae4b30a74af990a927e7a47ca3df699ab0 | ericfinster/opetopictt | lexer.ml | (*****************************************************************************)
(* *)
(* *)
(*****************************************************************************)
open Syntax
open Parser
let space = [%sedlex.regexp? ' ' | '\t' | '\r']
let digit = [%sedlex.regexp? '0'..'9']
let number = [%sedlex.regexp? Plus digit]
(* lower lambda is reserved ... *)
let upper = [%sedlex.regexp? 'A'..'Z']
let lower = [%sedlex.regexp? 'a'..'z']
let greek_lower = [%sedlex.regexp? 0x3B1 .. 0x3BA | 0x3BC .. 0x3C9]
let greek_upper = [%sedlex.regexp? 0x391 .. 0x3A9]
let subscripts = [%sedlex.regexp? 0x2080 .. 0x208E | 0x2090 .. 0x209C ]
let letter = [%sedlex.regexp? lower|upper|greek_lower|greek_upper]
let ident = [%sedlex.regexp? letter, Star (letter | subscripts | '_' | '-' | digit)]
let module_name = [%sedlex.regexp? upper, Star(lower | upper)]
exception Lexing_error of ((int * int) option * string)
let get_lexing_position lexbuf =
let (p,_) = Sedlexing.lexing_positions lexbuf in
let line_number = p.Lexing.pos_lnum in
let column = p.Lexing.pos_cnum - p.Lexing.pos_bol + 1 in
(line_number, column)
let lexing_error lexbuf msg =
let line, column = get_lexing_position lexbuf in
raise (Lexing_error (Some (line, column), msg))
let rec qname buf =
match%sedlex buf with
| module_name , '.' ->
let mndot = Sedlexing.Utf8.lexeme buf in
let mn = String.sub mndot 0 (String.length mndot - 1) in
let qn = qname buf in
Qual (mn,qn)
| ident -> Name (Sedlexing.Utf8.lexeme buf)
| _ -> lexing_error buf (Printf.sprintf "Unexpected character: %s" (Sedlexing.Utf8.lexeme buf))
let rec token buf =
match%sedlex buf with
| "import" -> IMPORT
| "shape" -> SHAPE
| "def" -> DEF
| "let" -> LET
| "in" -> IN
| "module" -> MODULE
| "where" -> WHERE
| "end" -> END
| 0x2192 -> ARROW
| "->" -> ARROW
| "(" -> LPAR
| ")" -> RPAR
| "{" -> LBR
| "}" -> RBR
| "[" -> LBRKT
| "]" -> RBRKT
| "@" -> AT
| ":" -> COLON
| "=" -> EQUAL
| "\\" -> LAMBDA
| 0x03bb -> LAMBDA
| "U" -> TYPE
| "lf" -> LF
| "nd" -> ND
| "tt" -> UNIT
| "|" -> VBAR
| 0xd7 -> TIMES
| "," -> COMMA
| "fst" -> FST
| "snd" -> SND
(* tokens for commands *)
| "quit" -> QUIT
| "infer" -> INFER
| "normalize" -> NORMALIZE
| "assume" -> ASSUME
| "load" -> LOAD
| ";" -> ENDCMD
| ident -> IDENT (Sedlexing.Utf8.lexeme buf)
| module_name , '.' ->
let mndot = Sedlexing.Utf8.lexeme buf in
let mn = String.sub mndot 0 (String.length mndot - 1) in
let qn = qname buf in
QNAME (Qual (mn,qn))
| Plus space -> token buf
| "#",Star (Compl '\n') -> token buf
| "\n" -> token buf
| eof -> EOF
| _ -> lexing_error buf (Printf.sprintf "Unexpected character: %s" (Sedlexing.Utf8.lexeme buf))
| null | https://raw.githubusercontent.com/ericfinster/opetopictt/766a547b59280b4016657318d9fb486940b863da/lib/lexer.ml | ocaml | ***************************************************************************
***************************************************************************
lower lambda is reserved ...
tokens for commands |
open Syntax
open Parser
let space = [%sedlex.regexp? ' ' | '\t' | '\r']
let digit = [%sedlex.regexp? '0'..'9']
let number = [%sedlex.regexp? Plus digit]
let upper = [%sedlex.regexp? 'A'..'Z']
let lower = [%sedlex.regexp? 'a'..'z']
let greek_lower = [%sedlex.regexp? 0x3B1 .. 0x3BA | 0x3BC .. 0x3C9]
let greek_upper = [%sedlex.regexp? 0x391 .. 0x3A9]
let subscripts = [%sedlex.regexp? 0x2080 .. 0x208E | 0x2090 .. 0x209C ]
let letter = [%sedlex.regexp? lower|upper|greek_lower|greek_upper]
let ident = [%sedlex.regexp? letter, Star (letter | subscripts | '_' | '-' | digit)]
let module_name = [%sedlex.regexp? upper, Star(lower | upper)]
exception Lexing_error of ((int * int) option * string)
let get_lexing_position lexbuf =
let (p,_) = Sedlexing.lexing_positions lexbuf in
let line_number = p.Lexing.pos_lnum in
let column = p.Lexing.pos_cnum - p.Lexing.pos_bol + 1 in
(line_number, column)
let lexing_error lexbuf msg =
let line, column = get_lexing_position lexbuf in
raise (Lexing_error (Some (line, column), msg))
let rec qname buf =
match%sedlex buf with
| module_name , '.' ->
let mndot = Sedlexing.Utf8.lexeme buf in
let mn = String.sub mndot 0 (String.length mndot - 1) in
let qn = qname buf in
Qual (mn,qn)
| ident -> Name (Sedlexing.Utf8.lexeme buf)
| _ -> lexing_error buf (Printf.sprintf "Unexpected character: %s" (Sedlexing.Utf8.lexeme buf))
let rec token buf =
match%sedlex buf with
| "import" -> IMPORT
| "shape" -> SHAPE
| "def" -> DEF
| "let" -> LET
| "in" -> IN
| "module" -> MODULE
| "where" -> WHERE
| "end" -> END
| 0x2192 -> ARROW
| "->" -> ARROW
| "(" -> LPAR
| ")" -> RPAR
| "{" -> LBR
| "}" -> RBR
| "[" -> LBRKT
| "]" -> RBRKT
| "@" -> AT
| ":" -> COLON
| "=" -> EQUAL
| "\\" -> LAMBDA
| 0x03bb -> LAMBDA
| "U" -> TYPE
| "lf" -> LF
| "nd" -> ND
| "tt" -> UNIT
| "|" -> VBAR
| 0xd7 -> TIMES
| "," -> COMMA
| "fst" -> FST
| "snd" -> SND
| "quit" -> QUIT
| "infer" -> INFER
| "normalize" -> NORMALIZE
| "assume" -> ASSUME
| "load" -> LOAD
| ";" -> ENDCMD
| ident -> IDENT (Sedlexing.Utf8.lexeme buf)
| module_name , '.' ->
let mndot = Sedlexing.Utf8.lexeme buf in
let mn = String.sub mndot 0 (String.length mndot - 1) in
let qn = qname buf in
QNAME (Qual (mn,qn))
| Plus space -> token buf
| "#",Star (Compl '\n') -> token buf
| "\n" -> token buf
| eof -> EOF
| _ -> lexing_error buf (Printf.sprintf "Unexpected character: %s" (Sedlexing.Utf8.lexeme buf))
|
28937d6a22e6903d02f64aecbc570cf6ec38a73bf6c53f6bcb9ae88910db88ea | TristeFigure/shuriken | string_test.clj | (ns shuriken.string-test
(:require [clojure.test :refer :all]
[shuriken.core :refer :all]))
(deftest test-words
(is (= ["abc" "def" "xyz" "12'uçé"]
(words "abc def \n xyz 12'uçé"))))
(deftest test-tabulate
(is (= (str "- aaa\n"
"- aaa\n"
"- aaa") ;; terminal namespaces are dropped
(tabulate
(str "aaa\n"
"aaa\n"
"aaa\n")
"- "))))
(deftest test-truncate
(is (= "abc..." (truncate "abcd" 3)))
(is (= "abc" (truncate "abc" 3)))
(is (= "a" (truncate "a" 3))))
(deftest test-no-print
(is (= 2
(binding [*out* (new java.io.StringWriter)]
(println "something")
(inc 1)))))
| null | https://raw.githubusercontent.com/TristeFigure/shuriken/cd36dd2a4005c85260125d89d5a3f475d248e6e4/test/shuriken/string_test.clj | clojure | terminal namespaces are dropped | (ns shuriken.string-test
(:require [clojure.test :refer :all]
[shuriken.core :refer :all]))
(deftest test-words
(is (= ["abc" "def" "xyz" "12'uçé"]
(words "abc def \n xyz 12'uçé"))))
(deftest test-tabulate
(is (= (str "- aaa\n"
"- aaa\n"
(tabulate
(str "aaa\n"
"aaa\n"
"aaa\n")
"- "))))
(deftest test-truncate
(is (= "abc..." (truncate "abcd" 3)))
(is (= "abc" (truncate "abc" 3)))
(is (= "a" (truncate "a" 3))))
(deftest test-no-print
(is (= 2
(binding [*out* (new java.io.StringWriter)]
(println "something")
(inc 1)))))
|
7bcf51270ec7ca6829de1ed89f4af310dbc74a15236cdbe21aa5f3795789dd55 | blindglobe/clocc | symbol10.lisp | ;;; based on v1.5 -*- mode: lisp -*-
(in-package :cl-user)
(check-for-bug :symbol10-legacy-4
(progn (in-package :cl-user) nil)
nil
"in-package expects a 'string designator'
this is or a character, a symbol or a string.")
;; test der neuen valuezelle
1 . ungebundenes symbol
(check-for-bug :symbol10-legacy-14
(defun testvar (var)
(list (boundp var) ; gebunden
(if (boundp var)
(symbol-value var)
nil) ; wert/nil
#+xcl
(eq (sys::%p-get-cdr var 0)
specvar
#+clisp
(and (sys::special-variable-p var)
specvar
#+allegro
(and (not (constantp var))
(eval `(let ((,var (list nil)))
(and (boundp ',var)
(eq (symbol-value ',var)
,var)))))
#+cmu
(eq (ext:info variable kind var)
like clisp
specvar
#+ecls
(si::specialp var)
#+sbcl
(eq (sb-int::info variable kind var)
like clisp
(and (fboundp var) t) ; funktion. eigenschaft
(and (fboundp var) (macro-function var) t) ; macro?
(and (fboundp var)
(special-operator-p var)
t) ; spezialform?
#-(or clisp ecl)
(and (symbol-plist var) t) ; p-liste?
#+(or clisp ecl)
(and (or (get var 'i1)
(get var 'i2)
(get var 'i3))
t) ; p-liste?
(get var 'i1) ; i1
(get var 'i2) ; i2
(get var 'i3) ; i3
) )
testvar)
(check-for-bug :symbol10-legacy-59
(defun clrvar (var)
#+xcl
(subr 84 ;sys::%p-set-cdr-content
var 0 (sys::%p-get-content 'sys::%void-value 0) 0)
#-xcl
(progn (makunbound var) (fmakunbound var)
(setf (symbol-plist var) '()))
#+allegro
(setf (excl::symbol-bit var 'excl::.globally-special.) nil)
#+cmu
(setf (ext:info variable kind var) ':global)
#+sbcl
(setf (sb-int::info variable kind var) ':global)
var)
clrvar)
#+(or xcl clisp allegro cmu sbcl)
(check-for-bug :symbol10-legacy-77
(progn (setf (symbol-function 'setf-get)
(symbol-function #+xcl 'sys::setf-get
#+clisp 'sys::%put
#+allegro 'excl::.inv-get
#+(or cmu sbcl) 'cl::%put)) t)
t)
begin breitentest
(check-for-bug :symbol10-legacy-87
(clrvar 'v1)
v1)
;;;; value - umbinden - macro - umbinden - props - umbinden
;;; value
(check-for-bug :symbol10-legacy-95
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-100
(setq v1 'val)
val)
(check-for-bug :symbol10-legacy-104
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val nil nil nil nil nil nil nil nil nil))
;;; umbinden
(check-for-bug :symbol10-legacy-111
(makunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-115
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-120
(setq v1 'val2)
val2)
(check-for-bug :symbol10-legacy-124
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil nil nil nil nil nil nil nil))
;;; macro
(check-for-bug :symbol10-legacy-131
(defmacro v1 (x) (list 'quote x))
v1)
(check-for-bug :symbol10-legacy-135
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil t t nil nil nil nil nil))
;;; umbinden
(check-for-bug :symbol10-legacy-142
(fmakunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-146
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-151
(defmacro v1 (x) (list 'quote (list x x)))
v1)
(check-for-bug :symbol10-legacy-155
(v1 33)
(33 33))
(check-for-bug :symbol10-legacy-159
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil t t nil nil nil nil nil))
(check-for-bug :symbol10-legacy-164
(makunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-168
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil t t nil nil nil nil nil))
(check-for-bug :symbol10-legacy-173
(setq v1 'val3)
val3)
(check-for-bug :symbol10-legacy-177
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val3 nil nil t t nil nil nil nil nil))
;;; props
(check-for-bug :symbol10-legacy-184
(setf-get 'v1 'i1 11)
11)
(check-for-bug :symbol10-legacy-188
(setf-get 'v1 'i2 22)
22)
(check-for-bug :symbol10-legacy-192
(setf-get 'v1 'i3 33)
33)
(check-for-bug :symbol10-legacy-196
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val3 nil nil t t nil t 11 22 33))
;;; umbinden
(check-for-bug :symbol10-legacy-203
(not (null (remprop 'v1 'i2)))
t)
(check-for-bug :symbol10-legacy-206
(not (null (remprop 'v1 'i1)))
t)
(check-for-bug :symbol10-legacy-209
(not (null (remprop 'v1 'i3)))
t)
(check-for-bug :symbol10-legacy-212
(fmakunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-215
(makunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-219
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-224
(setf-get 'v1 'i1 99)
99)
(check-for-bug :symbol10-legacy-227
(defmacro v1 (x) (list 'quote (list x x x)))
v1)
(check-for-bug :symbol10-legacy-230
(v1 a)
(a a a))
(check-for-bug :symbol10-legacy-233
(setq v1 'val4)
val4)
(check-for-bug :symbol10-legacy-237
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val4 nil nil t t nil t 99 nil nil))
--- ende -----
(check-for-bug :symbol10-legacy-244
(clrvar 'v2)
v2)
;;; specvar - props - rebind - function
(check-for-bug :symbol10-legacy-250
(defvar v2 'v2a)
v2)
(check-for-bug :symbol10-legacy-254
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2a nil t nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-259
(setf-get 'v2 'i3 33)
33)
(check-for-bug :symbol10-legacy-262
(setf-get 'v2 'i2 22)
22)
(check-for-bug :symbol10-legacy-265
(setf-get 'v2 'i1 11)
11)
(check-for-bug :symbol10-legacy-269
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2a nil t nil nil nil t 11 22 33))
;;; rebind
(check-for-bug :symbol10-legacy-276
(makunbound 'v2)
v2)
(check-for-bug :symbol10-legacy-279
(not (null (remprop 'v2 'i1)))
t)
(check-for-bug :symbol10-legacy-282
(not (null (remprop 'v2 'i2)))
t)
(check-for-bug :symbol10-legacy-285
(not (null (remprop 'v2 'i3)))
t)
(check-for-bug :symbol10-legacy-289
(testvar 'v2)
geb val konst svar func mac spec plist i3
#+xcl
(nil nil nil nil nil nil nil nil nil nil nil)
#-xcl
(nil nil nil t nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-297
(defvar v2 'v2b)
v2)
(check-for-bug :symbol10-legacy-300
(setf-get 'v2 'i1 111)
111)
(check-for-bug :symbol10-legacy-303
(setf-get 'v2 'i2 222)
222)
(check-for-bug :symbol10-legacy-306
(setf-get 'v2 'i3 333)
333)
(check-for-bug :symbol10-legacy-310
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2b nil t nil nil nil t 111 222 333))
;;; function
(check-for-bug :symbol10-legacy-317
(defun v2 (x) (list x x))
v2)
(check-for-bug :symbol10-legacy-320
(v2 44)
(44 44))
(check-for-bug :symbol10-legacy-324
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2b nil t t nil nil t 111 222 333 ))
(check-for-bug :symbol10-legacy-330
(clrvar 'v3)
v3)
;;;;; function - con - rebind - prop
;;; function
(check-for-bug :symbol10-legacy-338
(defun v3 (x y) (list x y))
v3)
(check-for-bug :symbol10-legacy-342
(testvar 'v3)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil nil nil nil nil))
;;; constant
(check-for-bug :symbol10-legacy-349
(defconstant v3 99)
v3)
(check-for-bug :symbol10-legacy-353
v3
99)
(check-for-bug :symbol10-legacy-356
(v3 'a 'b)
(a b))
(check-for-bug :symbol10-legacy-360
(testvar 'v3)
geb val konst svar func mac spec plist i3
(t 99 t nil t nil nil nil nil nil nil))
;;; rebind
(check-for-bug :symbol10-legacy-367
(makunbound 'v3)
#+(or xcl allegro cmu sbcl) v3
#+(or clisp ecls) error
#-(or xcl allegro cmu sbcl clisp ecls) unknown)
(check-for-bug :symbol10-legacy-372
(fmakunbound 'v3)
v3)
#+xcl
(check-for-bug :symbol10-legacy-377
(testvar 'v3)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-382
(defconstant v3 999)
v3)
(check-for-bug :symbol10-legacy-386
(defun v3 (x) (list x x))
v3)
(check-for-bug :symbol10-legacy-390
(v3 'c)
(c c))
(check-for-bug :symbol10-legacy-394
v3
999)
(check-for-bug :symbol10-legacy-398
(testvar 'v3)
geb val konst svar func mac spec plist i3
(t 999 t nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-405
(defparameter var33)
error)
(check-for-bug :symbol10-legacy-409
(defparameter var3 99)
var3)
(check-for-bug :symbol10-legacy-413
var3
99)
(check-for-bug :symbol10-legacy-417
(testvar 'var3)
geb val konst svar func mac spec plist i3
(t 99 nil t nil nil nil nil nil nil nil))
;;; rebind
(check-for-bug :symbol10-legacy-424
(makunbound 'var3)
var3)
(check-for-bug :symbol10-legacy-428
(testvar 'var3)
geb val konst svar func mac spec plist i3
#+xcl
(nil nil nil nil nil nil nil nil nil nil nil)
#-xcl
(nil nil nil t nil nil nil nil nil nil nil))
;;; props
(check-for-bug :symbol10-legacy-438
(setf-get 'v3 'i2 222)
222)
(check-for-bug :symbol10-legacy-442
(setf-get 'v3 'i1 111)
111)
(check-for-bug :symbol10-legacy-446
(testvar 'v3)
geb val konst svar func mac spec plist i3
(t 999 t nil t nil nil t 111 222 nil))
(check-for-bug :symbol10-legacy-452
(clrvar 'v4)
v4)
;;;; function - rebind - prop - rebind - specvar
(check-for-bug :symbol10-legacy-458
(defun v4 (x) x)
v4)
(check-for-bug :symbol10-legacy-462
(v4 55)
55)
(check-for-bug :symbol10-legacy-466
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil nil nil nil nil))
;;; rebind
(check-for-bug :symbol10-legacy-473
(fmakunbound 'v4)
v4)
(check-for-bug :symbol10-legacy-477
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-482
(defun v4 (x) (list x))
v4)
(check-for-bug :symbol10-legacy-486
(v4 88)
(88))
(check-for-bug :symbol10-legacy-490
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-495
(setf-get 'v4 'i1 11)
11)
(check-for-bug :symbol10-legacy-499
(setf-get 'v4 'i2 22)
22)
(check-for-bug :symbol10-legacy-503
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil t 11 22 nil))
;;; rebind
(check-for-bug :symbol10-legacy-510
(fmakunbound 'v4)
v4)
(check-for-bug :symbol10-legacy-513
(not (null (remprop 'v4 'i1)))
t)
(check-for-bug :symbol10-legacy-516
(not (null (remprop 'v4 'i2)))
t)
(check-for-bug :symbol10-legacy-519
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-524
(defun v4 (x) (list x x x))
v4)
(check-for-bug :symbol10-legacy-528
(v4 44)
(44 44 44))
(check-for-bug :symbol10-legacy-532
(setf-get 'v4 'i2 222)
222)
(check-for-bug :symbol10-legacy-536
(setf-get 'v4 'i3 333)
333)
(check-for-bug :symbol10-legacy-540
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-545
(defvar v4 'v4-value)
v4)
(check-for-bug :symbol10-legacy-549
(testvar 'v4)
geb val func mac spec plist i3
(t v4-value nil t t nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-554
(clrvar 'v5)
v5)
;;;;; prop - rebind - con - rebind - fun
(check-for-bug :symbol10-legacy-560
(setf-get 'v5 'i1 1)
1)
(check-for-bug :symbol10-legacy-563
(setf-get 'v5 'i2 2)
2)
(check-for-bug :symbol10-legacy-567
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil t 1 2 nil))
;;; rebind
(check-for-bug :symbol10-legacy-574
(not (null (remprop 'v5 'i1)))
t)
(check-for-bug :symbol10-legacy-577
(not (null (remprop 'v5 'i2)))
t)
(check-for-bug :symbol10-legacy-581
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-586
(setf-get 'v5 'i1 11)
11)
(check-for-bug :symbol10-legacy-589
(setf-get 'v5 'i2 22)
22)
(check-for-bug :symbol10-legacy-593
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil t 11 22 nil))
;;; con
(check-for-bug :symbol10-legacy-600
(defconstant v5 '123)
v5)
(check-for-bug :symbol10-legacy-604
(testvar 'v5)
geb val konst svar func mac spec plist i3
(t 123 t nil nil nil nil t 11 22 nil))
;;; rebind
(check-for-bug :symbol10-legacy-611
(makunbound 'v5)
#+(or xcl allegro cmu sbcl) v5
#+(or clisp ecls) error
#-(or xcl allegro cmu sbcl clisp ecls) unknown)
(check-for-bug :symbol10-legacy-616
(not (null (remprop 'v5 'i2)))
t)
(check-for-bug :symbol10-legacy-620
(not (null (remprop 'v5 'i1)))
t)
#+xcl
(check-for-bug :symbol10-legacy-625
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
;;; das ging schief !!
(check-for-bug :symbol10-legacy-632
(defconstant v5 321)
v5)
(check-for-bug :symbol10-legacy-636
(setf-get 'v5 'i3 333)
333)
(check-for-bug :symbol10-legacy-640
(setf-get 'v5 'i2 222)
222)
(check-for-bug :symbol10-legacy-644
(testvar 'v5)
geb val konst svar func mac spec plist i3
(t 321 t nil nil nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-649
(defun v5 (x) x)
v5)
(check-for-bug :symbol10-legacy-653
(v5 666)
666)
(check-for-bug :symbol10-legacy-657
(testvar 'v5)
geb val konst svar func mac spec plist i3
(t 321 t nil t nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-662
(clrvar 'v6)
v6)
prop mac con
(check-for-bug :symbol10-legacy-668
(setf-get 'v6 'i1 1)
1)
(check-for-bug :symbol10-legacy-672
(setf-get 'v6 'i3 3)
3)
(check-for-bug :symbol10-legacy-676
(testvar 'v6)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil t 1 nil 3))
(check-for-bug :symbol10-legacy-681
(defmacro v6 (x) (list 'quote x))
v6)
(check-for-bug :symbol10-legacy-685
(v6 a)
a)
(check-for-bug :symbol10-legacy-689
(testvar 'v6)
geb val konst svar func mac spec plist i3
(nil nil nil nil t t nil t 1 nil 3))
(check-for-bug :symbol10-legacy-694
(defconstant v6 234)
v6)
(check-for-bug :symbol10-legacy-698
(testvar 'v6)
geb val konst svar func mac spec plist i3
(t 234 t nil t t nil t 1 nil 3))
;; aufraeumen
(mapc #'unintern '(v1 v2 v3 v4 v5 v6))
| null | https://raw.githubusercontent.com/blindglobe/clocc/a50bb75edb01039b282cf320e4505122a59c59a7/src/tools/ansi-test/symbol10.lisp | lisp | based on v1.5 -*- mode: lisp -*-
test der neuen valuezelle
gebunden
wert/nil
funktion. eigenschaft
macro?
spezialform?
p-liste?
p-liste?
i1
i2
i3
sys::%p-set-cdr-content
value - umbinden - macro - umbinden - props - umbinden
value
umbinden
macro
umbinden
props
umbinden
specvar - props - rebind - function
rebind
function
function - con - rebind - prop
function
constant
rebind
rebind
props
function - rebind - prop - rebind - specvar
rebind
rebind
prop - rebind - con - rebind - fun
rebind
con
rebind
das ging schief !!
aufraeumen | (in-package :cl-user)
(check-for-bug :symbol10-legacy-4
(progn (in-package :cl-user) nil)
nil
"in-package expects a 'string designator'
this is or a character, a symbol or a string.")
1 . ungebundenes symbol
(check-for-bug :symbol10-legacy-14
(defun testvar (var)
(if (boundp var)
(symbol-value var)
#+xcl
(eq (sys::%p-get-cdr var 0)
specvar
#+clisp
(and (sys::special-variable-p var)
specvar
#+allegro
(and (not (constantp var))
(eval `(let ((,var (list nil)))
(and (boundp ',var)
(eq (symbol-value ',var)
,var)))))
#+cmu
(eq (ext:info variable kind var)
like clisp
specvar
#+ecls
(si::specialp var)
#+sbcl
(eq (sb-int::info variable kind var)
like clisp
(and (fboundp var)
(special-operator-p var)
#-(or clisp ecl)
#+(or clisp ecl)
(and (or (get var 'i1)
(get var 'i2)
(get var 'i3))
) )
testvar)
(check-for-bug :symbol10-legacy-59
(defun clrvar (var)
#+xcl
var 0 (sys::%p-get-content 'sys::%void-value 0) 0)
#-xcl
(progn (makunbound var) (fmakunbound var)
(setf (symbol-plist var) '()))
#+allegro
(setf (excl::symbol-bit var 'excl::.globally-special.) nil)
#+cmu
(setf (ext:info variable kind var) ':global)
#+sbcl
(setf (sb-int::info variable kind var) ':global)
var)
clrvar)
#+(or xcl clisp allegro cmu sbcl)
(check-for-bug :symbol10-legacy-77
(progn (setf (symbol-function 'setf-get)
(symbol-function #+xcl 'sys::setf-get
#+clisp 'sys::%put
#+allegro 'excl::.inv-get
#+(or cmu sbcl) 'cl::%put)) t)
t)
begin breitentest
(check-for-bug :symbol10-legacy-87
(clrvar 'v1)
v1)
(check-for-bug :symbol10-legacy-95
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-100
(setq v1 'val)
val)
(check-for-bug :symbol10-legacy-104
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-111
(makunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-115
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-120
(setq v1 'val2)
val2)
(check-for-bug :symbol10-legacy-124
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-131
(defmacro v1 (x) (list 'quote x))
v1)
(check-for-bug :symbol10-legacy-135
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil t t nil nil nil nil nil))
(check-for-bug :symbol10-legacy-142
(fmakunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-146
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-151
(defmacro v1 (x) (list 'quote (list x x)))
v1)
(check-for-bug :symbol10-legacy-155
(v1 33)
(33 33))
(check-for-bug :symbol10-legacy-159
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val2 nil nil t t nil nil nil nil nil))
(check-for-bug :symbol10-legacy-164
(makunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-168
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil t t nil nil nil nil nil))
(check-for-bug :symbol10-legacy-173
(setq v1 'val3)
val3)
(check-for-bug :symbol10-legacy-177
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val3 nil nil t t nil nil nil nil nil))
(check-for-bug :symbol10-legacy-184
(setf-get 'v1 'i1 11)
11)
(check-for-bug :symbol10-legacy-188
(setf-get 'v1 'i2 22)
22)
(check-for-bug :symbol10-legacy-192
(setf-get 'v1 'i3 33)
33)
(check-for-bug :symbol10-legacy-196
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val3 nil nil t t nil t 11 22 33))
(check-for-bug :symbol10-legacy-203
(not (null (remprop 'v1 'i2)))
t)
(check-for-bug :symbol10-legacy-206
(not (null (remprop 'v1 'i1)))
t)
(check-for-bug :symbol10-legacy-209
(not (null (remprop 'v1 'i3)))
t)
(check-for-bug :symbol10-legacy-212
(fmakunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-215
(makunbound 'v1)
v1)
(check-for-bug :symbol10-legacy-219
(testvar 'v1)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-224
(setf-get 'v1 'i1 99)
99)
(check-for-bug :symbol10-legacy-227
(defmacro v1 (x) (list 'quote (list x x x)))
v1)
(check-for-bug :symbol10-legacy-230
(v1 a)
(a a a))
(check-for-bug :symbol10-legacy-233
(setq v1 'val4)
val4)
(check-for-bug :symbol10-legacy-237
(testvar 'v1)
geb val konst svar func mac spec plist i3
(t val4 nil nil t t nil t 99 nil nil))
--- ende -----
(check-for-bug :symbol10-legacy-244
(clrvar 'v2)
v2)
(check-for-bug :symbol10-legacy-250
(defvar v2 'v2a)
v2)
(check-for-bug :symbol10-legacy-254
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2a nil t nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-259
(setf-get 'v2 'i3 33)
33)
(check-for-bug :symbol10-legacy-262
(setf-get 'v2 'i2 22)
22)
(check-for-bug :symbol10-legacy-265
(setf-get 'v2 'i1 11)
11)
(check-for-bug :symbol10-legacy-269
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2a nil t nil nil nil t 11 22 33))
(check-for-bug :symbol10-legacy-276
(makunbound 'v2)
v2)
(check-for-bug :symbol10-legacy-279
(not (null (remprop 'v2 'i1)))
t)
(check-for-bug :symbol10-legacy-282
(not (null (remprop 'v2 'i2)))
t)
(check-for-bug :symbol10-legacy-285
(not (null (remprop 'v2 'i3)))
t)
(check-for-bug :symbol10-legacy-289
(testvar 'v2)
geb val konst svar func mac spec plist i3
#+xcl
(nil nil nil nil nil nil nil nil nil nil nil)
#-xcl
(nil nil nil t nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-297
(defvar v2 'v2b)
v2)
(check-for-bug :symbol10-legacy-300
(setf-get 'v2 'i1 111)
111)
(check-for-bug :symbol10-legacy-303
(setf-get 'v2 'i2 222)
222)
(check-for-bug :symbol10-legacy-306
(setf-get 'v2 'i3 333)
333)
(check-for-bug :symbol10-legacy-310
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2b nil t nil nil nil t 111 222 333))
(check-for-bug :symbol10-legacy-317
(defun v2 (x) (list x x))
v2)
(check-for-bug :symbol10-legacy-320
(v2 44)
(44 44))
(check-for-bug :symbol10-legacy-324
(testvar 'v2)
geb val konst svar func mac spec plist i3
(t v2b nil t t nil nil t 111 222 333 ))
(check-for-bug :symbol10-legacy-330
(clrvar 'v3)
v3)
(check-for-bug :symbol10-legacy-338
(defun v3 (x y) (list x y))
v3)
(check-for-bug :symbol10-legacy-342
(testvar 'v3)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-349
(defconstant v3 99)
v3)
(check-for-bug :symbol10-legacy-353
v3
99)
(check-for-bug :symbol10-legacy-356
(v3 'a 'b)
(a b))
(check-for-bug :symbol10-legacy-360
(testvar 'v3)
geb val konst svar func mac spec plist i3
(t 99 t nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-367
(makunbound 'v3)
#+(or xcl allegro cmu sbcl) v3
#+(or clisp ecls) error
#-(or xcl allegro cmu sbcl clisp ecls) unknown)
(check-for-bug :symbol10-legacy-372
(fmakunbound 'v3)
v3)
#+xcl
(check-for-bug :symbol10-legacy-377
(testvar 'v3)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-382
(defconstant v3 999)
v3)
(check-for-bug :symbol10-legacy-386
(defun v3 (x) (list x x))
v3)
(check-for-bug :symbol10-legacy-390
(v3 'c)
(c c))
(check-for-bug :symbol10-legacy-394
v3
999)
(check-for-bug :symbol10-legacy-398
(testvar 'v3)
geb val konst svar func mac spec plist i3
(t 999 t nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-405
(defparameter var33)
error)
(check-for-bug :symbol10-legacy-409
(defparameter var3 99)
var3)
(check-for-bug :symbol10-legacy-413
var3
99)
(check-for-bug :symbol10-legacy-417
(testvar 'var3)
geb val konst svar func mac spec plist i3
(t 99 nil t nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-424
(makunbound 'var3)
var3)
(check-for-bug :symbol10-legacy-428
(testvar 'var3)
geb val konst svar func mac spec plist i3
#+xcl
(nil nil nil nil nil nil nil nil nil nil nil)
#-xcl
(nil nil nil t nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-438
(setf-get 'v3 'i2 222)
222)
(check-for-bug :symbol10-legacy-442
(setf-get 'v3 'i1 111)
111)
(check-for-bug :symbol10-legacy-446
(testvar 'v3)
geb val konst svar func mac spec plist i3
(t 999 t nil t nil nil t 111 222 nil))
(check-for-bug :symbol10-legacy-452
(clrvar 'v4)
v4)
(check-for-bug :symbol10-legacy-458
(defun v4 (x) x)
v4)
(check-for-bug :symbol10-legacy-462
(v4 55)
55)
(check-for-bug :symbol10-legacy-466
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-473
(fmakunbound 'v4)
v4)
(check-for-bug :symbol10-legacy-477
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-482
(defun v4 (x) (list x))
v4)
(check-for-bug :symbol10-legacy-486
(v4 88)
(88))
(check-for-bug :symbol10-legacy-490
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-495
(setf-get 'v4 'i1 11)
11)
(check-for-bug :symbol10-legacy-499
(setf-get 'v4 'i2 22)
22)
(check-for-bug :symbol10-legacy-503
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil t 11 22 nil))
(check-for-bug :symbol10-legacy-510
(fmakunbound 'v4)
v4)
(check-for-bug :symbol10-legacy-513
(not (null (remprop 'v4 'i1)))
t)
(check-for-bug :symbol10-legacy-516
(not (null (remprop 'v4 'i2)))
t)
(check-for-bug :symbol10-legacy-519
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-524
(defun v4 (x) (list x x x))
v4)
(check-for-bug :symbol10-legacy-528
(v4 44)
(44 44 44))
(check-for-bug :symbol10-legacy-532
(setf-get 'v4 'i2 222)
222)
(check-for-bug :symbol10-legacy-536
(setf-get 'v4 'i3 333)
333)
(check-for-bug :symbol10-legacy-540
(testvar 'v4)
geb val konst svar func mac spec plist i3
(nil nil nil nil t nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-545
(defvar v4 'v4-value)
v4)
(check-for-bug :symbol10-legacy-549
(testvar 'v4)
geb val func mac spec plist i3
(t v4-value nil t t nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-554
(clrvar 'v5)
v5)
(check-for-bug :symbol10-legacy-560
(setf-get 'v5 'i1 1)
1)
(check-for-bug :symbol10-legacy-563
(setf-get 'v5 'i2 2)
2)
(check-for-bug :symbol10-legacy-567
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil t 1 2 nil))
(check-for-bug :symbol10-legacy-574
(not (null (remprop 'v5 'i1)))
t)
(check-for-bug :symbol10-legacy-577
(not (null (remprop 'v5 'i2)))
t)
(check-for-bug :symbol10-legacy-581
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-586
(setf-get 'v5 'i1 11)
11)
(check-for-bug :symbol10-legacy-589
(setf-get 'v5 'i2 22)
22)
(check-for-bug :symbol10-legacy-593
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil t 11 22 nil))
(check-for-bug :symbol10-legacy-600
(defconstant v5 '123)
v5)
(check-for-bug :symbol10-legacy-604
(testvar 'v5)
geb val konst svar func mac spec plist i3
(t 123 t nil nil nil nil t 11 22 nil))
(check-for-bug :symbol10-legacy-611
(makunbound 'v5)
#+(or xcl allegro cmu sbcl) v5
#+(or clisp ecls) error
#-(or xcl allegro cmu sbcl clisp ecls) unknown)
(check-for-bug :symbol10-legacy-616
(not (null (remprop 'v5 'i2)))
t)
(check-for-bug :symbol10-legacy-620
(not (null (remprop 'v5 'i1)))
t)
#+xcl
(check-for-bug :symbol10-legacy-625
(testvar 'v5)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil nil nil nil nil))
(check-for-bug :symbol10-legacy-632
(defconstant v5 321)
v5)
(check-for-bug :symbol10-legacy-636
(setf-get 'v5 'i3 333)
333)
(check-for-bug :symbol10-legacy-640
(setf-get 'v5 'i2 222)
222)
(check-for-bug :symbol10-legacy-644
(testvar 'v5)
geb val konst svar func mac spec plist i3
(t 321 t nil nil nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-649
(defun v5 (x) x)
v5)
(check-for-bug :symbol10-legacy-653
(v5 666)
666)
(check-for-bug :symbol10-legacy-657
(testvar 'v5)
geb val konst svar func mac spec plist i3
(t 321 t nil t nil nil t nil 222 333))
(check-for-bug :symbol10-legacy-662
(clrvar 'v6)
v6)
prop mac con
(check-for-bug :symbol10-legacy-668
(setf-get 'v6 'i1 1)
1)
(check-for-bug :symbol10-legacy-672
(setf-get 'v6 'i3 3)
3)
(check-for-bug :symbol10-legacy-676
(testvar 'v6)
geb val konst svar func mac spec plist i3
(nil nil nil nil nil nil nil t 1 nil 3))
(check-for-bug :symbol10-legacy-681
(defmacro v6 (x) (list 'quote x))
v6)
(check-for-bug :symbol10-legacy-685
(v6 a)
a)
(check-for-bug :symbol10-legacy-689
(testvar 'v6)
geb val konst svar func mac spec plist i3
(nil nil nil nil t t nil t 1 nil 3))
(check-for-bug :symbol10-legacy-694
(defconstant v6 234)
v6)
(check-for-bug :symbol10-legacy-698
(testvar 'v6)
geb val konst svar func mac spec plist i3
(t 234 t nil t t nil t 1 nil 3))
(mapc #'unintern '(v1 v2 v3 v4 v5 v6))
|
4adf6124ed5bd1cb0c55b44a5f470b002583dfe912bf65b1f2e151efe881fcbe | LuisThiamNye/chic | style.clj | (ns chic.style
(:require
[babashka.fs :as fs]
[taoensso.encore :as enc]
[chic.util :as util])
(:import
[io.github.humbleui.skija FontMgr FontStyle Typeface Data Font Paint]))
(def input-font-path (let [s "/Volumes/Carbonator/csync/fonts/Input-Font/Input_Fonts/InputSans/InputSansCondensed/InputSansCondensed-Regular.ttf"]
(when (fs/exists? s) s)))
(def ^Typeface face-default
(.matchFamiliesStyle (FontMgr/getDefault)
(into-array String ["Roboto Slab", #_".SF NS" ;; slow?
"Helvetica Neue", "Arial"])
FontStyle/NORMAL))
(def ^Typeface face-code-default
(if input-font-path
(Typeface/makeFromFile input-font-path)
(try
(Typeface/makeFromData
(Data/makeFromBytes
(util/url->bytes (re-find #"https:.+\.ttf"
(slurp "+Code&display=swap")))))
(catch Exception _
face-default))))
(def context-default
(enc/memoize
(fn [{:keys [scale]}]
(let [font-ui (Font. face-default (float (* 14 scale)))
fill-text (doto (Paint.) (.setColor (unchecked-int 0xFF000000)))
font-code (Font. face-code-default (float (* 14 scale)))]
{:face-ui face-default
:font-ui font-ui
:face-code face-code-default
:font-code font-code
:fill-text fill-text}))))
| null | https://raw.githubusercontent.com/LuisThiamNye/chic/813633a689f9080731613f788a295604d4d9a510/src/chic/style.clj | clojure | slow? | (ns chic.style
(:require
[babashka.fs :as fs]
[taoensso.encore :as enc]
[chic.util :as util])
(:import
[io.github.humbleui.skija FontMgr FontStyle Typeface Data Font Paint]))
(def input-font-path (let [s "/Volumes/Carbonator/csync/fonts/Input-Font/Input_Fonts/InputSans/InputSansCondensed/InputSansCondensed-Regular.ttf"]
(when (fs/exists? s) s)))
(def ^Typeface face-default
(.matchFamiliesStyle (FontMgr/getDefault)
"Helvetica Neue", "Arial"])
FontStyle/NORMAL))
(def ^Typeface face-code-default
(if input-font-path
(Typeface/makeFromFile input-font-path)
(try
(Typeface/makeFromData
(Data/makeFromBytes
(util/url->bytes (re-find #"https:.+\.ttf"
(slurp "+Code&display=swap")))))
(catch Exception _
face-default))))
(def context-default
(enc/memoize
(fn [{:keys [scale]}]
(let [font-ui (Font. face-default (float (* 14 scale)))
fill-text (doto (Paint.) (.setColor (unchecked-int 0xFF000000)))
font-code (Font. face-code-default (float (* 14 scale)))]
{:face-ui face-default
:font-ui font-ui
:face-code face-code-default
:font-code font-code
:fill-text fill-text}))))
|
02533f114cd0173009bbdccad9c5cab00d1efbd452729b60461c00391f171f22 | charlieg/Sparser | moving.lisp | copyright ( c ) 2011 -- all rights reserved
$ Id:$
;;;
;;; File: "moving"
Module : " : places : "
version : August 2011
;; Intended for modeling movement in direction or w.r.t. some
;; spatial feature.
initated 8/4/11
(in-package :sparser)
;; drive to <location>
;; travel <measurement (distance)> <direction> on<path> to<location
;; turn <direction> onto<path>
;; cross <landmark ??>
;; follow <path> to<configuration> past<landmark> (<distance>)
(define-category move
:instantiates self
:specializes event ;; we'll just 'action', since the examples aren't tensed
:binds ((mover))) ;; the one that moves. Could be anything
;; A category generator based on these verbs, with the assumption
;; that we get the arguments by building up left-to-right (which
;; means we won't see any of them until we get to the forest level,
but if they do n't individually combine we 'll get there one adjunct
;; at a time in order.
(defun define-movement-verb (string &optional name)
(let* ((symbol (or name (name-to-use-for-category string)))
(word (define-word string))
(category (category-named symbol)))
(let ((form `(define-category ,symbol
:specializes move
:instantiates move
:binds ((to-location . location)
(via-path . path)
(for-distance . measurement) ;; refine to measurements of distance?
(in-direction . direction)
)
:realization ((:tree-family vp+adjunct
:mapping ((vg . :self)
(vp . move)
(adjunct . measurement)
(slot . for-distance)))
(:tree-family vp+adjunct
:mapping ((vg . :self)
(vp . move)
(adjunct . direction)
(slot . in-direction)))
(:main-verb ,string)))))
(setq category (eval form))
category)))
;;--- Cases (could go to a dossier)
(define-movement-verb "cross")
(define-movement-verb "drive")
(define-movement-verb "follow")
(define-movement-verb "travel")
(define-movement-verb "turn")
| null | https://raw.githubusercontent.com/charlieg/Sparser/b9bb7d01d2e40f783f3214fc104062db3d15e608/Sparser/code/s/grammar/model/core/places/moving.lisp | lisp |
File: "moving"
Intended for modeling movement in direction or w.r.t. some
spatial feature.
drive to <location>
travel <measurement (distance)> <direction> on<path> to<location
turn <direction> onto<path>
cross <landmark ??>
follow <path> to<configuration> past<landmark> (<distance>)
we'll just 'action', since the examples aren't tensed
the one that moves. Could be anything
A category generator based on these verbs, with the assumption
that we get the arguments by building up left-to-right (which
means we won't see any of them until we get to the forest level,
at a time in order.
refine to measurements of distance?
--- Cases (could go to a dossier) | copyright ( c ) 2011 -- all rights reserved
$ Id:$
Module : " : places : "
version : August 2011
initated 8/4/11
(in-package :sparser)
(define-category move
:instantiates self
but if they do n't individually combine we 'll get there one adjunct
(defun define-movement-verb (string &optional name)
(let* ((symbol (or name (name-to-use-for-category string)))
(word (define-word string))
(category (category-named symbol)))
(let ((form `(define-category ,symbol
:specializes move
:instantiates move
:binds ((to-location . location)
(via-path . path)
(in-direction . direction)
)
:realization ((:tree-family vp+adjunct
:mapping ((vg . :self)
(vp . move)
(adjunct . measurement)
(slot . for-distance)))
(:tree-family vp+adjunct
:mapping ((vg . :self)
(vp . move)
(adjunct . direction)
(slot . in-direction)))
(:main-verb ,string)))))
(setq category (eval form))
category)))
(define-movement-verb "cross")
(define-movement-verb "drive")
(define-movement-verb "follow")
(define-movement-verb "travel")
(define-movement-verb "turn")
|
dc47e23155825396774db42bdcbf9a216c80f9362c300a0822d9801e5020f7a9 | circleci/circleci.test | test_test.clj | Copyright ( c ) . All rights reserved .
; The use and distribution terms for this software are covered by the
; Eclipse Public License 1.0 (-1.0.php)
; which can be found in the file epl-v10.html at the root of this distribution.
; By using this software in any fashion, you are agreeing to be bound by
; the terms of this license.
; You must not remove this notice, or any other, from this software.
;;; circleci/test-test.clj: unit tests for test.clj
by
January 16 , 2009
Thanks to , , and for
;; contributions and suggestions.
(ns circleci.test-test
(:use circleci.test)
(:require [clojure.stacktrace :as stack]
[circleci.test :as t]
[circleci.test.report :as report]
[clojure.test :refer (deftest testing is are)]))
(deftest should-run-global-fixture
;; this is defined in dev-resources/circleci_test/config.clj
(is @(resolve 'user/*inside-global*) "Should pass"))
(deftest can-test-symbol
(let [x true]
(is x "Should pass"))
(let [x false]
(is x "Should fail")))
(deftest can-test-boolean
(is true "Should pass")
(is false "Should fail"))
(deftest can-test-nil
(is nil "Should fail"))
(deftest can-test-=
(is (= 2 (+ 1 1)) "Should pass")
(is (= 3 (+ 2 2)) "Should fail"))
(deftest can-test-instance
(is (instance? Long (+ 2 2)) "Should pass")
(is (instance? Float (+ 1 1)) "Should fail"))
(deftest can-test-thrown
(is (thrown? ArithmeticException (/ 1 0)) "Should pass")
;; No exception is thrown:
(is (thrown? Exception (+ 1 1)) "Should fail")
;; Wrong class of exception is thrown:
(is (thrown? ArithmeticException (throw (RuntimeException.))) "Should error"))
(deftest can-test-thrown-with-msg
(is (thrown-with-msg? ArithmeticException #"Divide by zero" (/ 1 0)) "Should pass")
;; Wrong message string:
(is (thrown-with-msg? ArithmeticException #"Something else" (/ 1 0)) "Should fail")
;; No exception is thrown:
(is (thrown? Exception (+ 1 1)) "Should fail")
;; Wrong class of exception is thrown:
(is (thrown-with-msg? IllegalArgumentException #"Divide by zero" (/ 1 0)) "Should error"))
(deftest can-catch-unexpected-exceptions
(is (= 1 (throw (Exception.))) "Should error"))
(deftest can-test-method-call
(is (.startsWith "abc" "a") "Should pass")
(is (.startsWith "abc" "d") "Should fail"))
(deftest can-test-anonymous-fn
(is (#(.startsWith % "a") "abc") "Should pass")
(is (#(.startsWith % "d") "abc") "Should fail"))
(deftest can-test-regexps
(is (re-matches #"^ab.*$" "abbabba") "Should pass")
(is (re-matches #"^cd.*$" "abbabba") "Should fail")
(is (re-find #"ab" "abbabba") "Should pass")
(is (re-find #"cd" "abbabba") "Should fail"))
(deftest clj-1102-empty-stack-trace-should-not-throw-exceptions
(let [empty-stack (into-array (Class/forName "java.lang.StackTraceElement")
[])
t (doto (Exception.) (.setStackTrace empty-stack))]
(is (map? (#'clojure.test/stacktrace-file-and-line empty-stack)) "Should pass")
(is (string? (with-out-str (stack/print-stack-trace t))) "Should pass")))
(deftest #^{:has-meta true} can-add-metadata-to-tests
(is (:has-meta (meta #'can-add-metadata-to-tests)) "Should pass"))
(deftest lookup-selector-supplies-default
(is (= identity
(#'t/lookup-selector {:selectors {:foo :integration}}
:default))
"Should pass")
(is (= :user-default
(#'t/lookup-selector {:selectors {:foo :integration :default :user-default}}
:default))
"Should pass"))
;; still have to declare the symbol before testing unbound symbols
(declare does-not-exist)
#_(deftest can-test-unbound-symbol
(is (= nil does-not-exist) "Should error"))
#_(deftest can-test-unbound-function
(is (does-not-exist) "Should error"))
;; Here, we create an alternate version of test/report, that
;; compares the event with the message, then calls the original
;; 'report' with modified arguments.
(declare ^:dynamic original-report)
(defn custom-report [data]
(let [event (:type data)
msg (:message data)
expected (:expected data)
actual (:actual data)
passed (cond
(= event :fail) (= msg "Should fail")
(= event :pass) (= msg "Should pass")
(= event :error) (= msg "Should error")
:else true)]
(if passed
(original-report {:type :pass, :message msg,
:expected expected, :actual actual})
(original-report {:type :fail, :message (str msg " but got " event)
:expected expected, :actual actual}))))
;; test-ns-hook will be used by test/test-ns to run tests in this
;; namespace.
(defn test-ns-hook []
(binding [original-report clojure.test/report
report/report custom-report]
(#'t/test-all-vars {} (find-ns 'circleci.test-test) (constantly true))))
(deftest clj-1588-symbols-in-are-isolated-from-test-clauses
(binding [clojure.test/report original-report]
(are [x y] (= x y)
((fn [x] (inc x)) 1) 2)))
(deftest dummy-test
(is 1 "Should pass"))
(defn tracking-report
[reports]
(fn [data]
(swap! reports conj data)))
(deftest nested-test-invocations-use-correct-test-var
(let [reports (atom [])]
(binding [clojure.test/report (tracking-report reports)]
(dummy-test))
(let [end-test-var-data (->> @reports
(filter #(-> % :type (= :end-test-var)))
first)]
(is (some? (:elapsed end-test-var-data)) "Should pass"))))
(defn counting-fixture
[counter]
(fn [f]
(swap! counter inc)
(f)))
(defn throwing-fixture
[exception]
(fn [f]
(throw exception)
(f)))
;; Dummy ns for testing once fixtures
(in-ns 'circleci.test.test-ns)
(clojure.core/require '[clojure.test :refer (deftest is)])
(deftest dummy-test
(is 1 "Should pass"))
(deftest nested-dummy-test
(dummy-test))
(deftest ^:integration test
(is false)) ; skipped by test selectors
;; And back to circleci.test-test
(in-ns 'circleci.test-test)
(deftest once-fixture-fns-run-exactly-once-for-test-var-invocations
(let [reports (atom [])
test-ns (find-ns 'circleci.test.test-ns)
once-fixture-counts (atom 0)
_ (alter-meta! test-ns
assoc ::clojure.test/once-fixtures
[(counting-fixture once-fixture-counts)])
test-fn (ns-resolve test-ns 'nested-dummy-test)]
(binding [clojure.test/report (tracking-report reports)]
(test-fn))
(is (= 1 @once-fixture-counts) "Should pass")
(is (= 2 (->> @reports
(filter #(-> % :type (= :begin-test-var)))
count))
"Should pass")
(is (= 2 (->> @reports
(filter #(-> % :type (= :end-test-var)))
count))
"Should pass")))
(deftest once-fixture-fns-run-exactly-once-for-test-ns-invocations
(let [reports (atom [])
test-ns (find-ns 'circleci.test.test-ns)
once-fixture-counts (atom 0)
_ (alter-meta! test-ns
assoc ::clojure.test/once-fixtures
[(counting-fixture once-fixture-counts)])]
(binding [report/report (tracking-report reports)]
(t/test-ns test-ns (complement :integration)))
(is (= 1 @once-fixture-counts) "Should pass")
(is (= 3 (->> @reports
(filter #(-> % :type (= :begin-test-var)))
count))
"Should pass")
(is (= 3 (->> @reports
(filter #(-> % :type (= :end-test-var)))
count))
"Should pass")
(is (= 1 (->> @reports
(filter #(-> % :type (= :begin-test-ns)))
count))
"Should pass")
(is (= 1 (->> @reports
(filter #(-> % :type (= :end-test-ns)))
count))
"Should pass")))
(deftest fixture-exceptions-are-reported-as-test-errors-from-test-ns
(testing "once fixtures"
(let [reports (atom [])
exception (doto (Exception. "test exception from fixture")
(.setStackTrace (into-array StackTraceElement [])))
test-ns (find-ns 'circleci.test.test-ns)
_ (alter-meta! test-ns
assoc ::clojure.test/once-fixtures
[(throwing-fixture exception)])]
(binding [report/report (tracking-report reports)]
(t/test-ns test-ns (complement :integration)))
(is (= #{{:type :error
:message "Exception thrown from test fixture."
:file nil
:line nil
:expected nil
:actual exception}}
(->> @reports
(filter #(-> % :type (= :error)))
set))
"Should pass"))))
| null | https://raw.githubusercontent.com/circleci/circleci.test/eb2e44fe94e430648c852f7f736419dc70a4e5a1/test/circleci/test_test.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
circleci/test-test.clj: unit tests for test.clj
contributions and suggestions.
this is defined in dev-resources/circleci_test/config.clj
No exception is thrown:
Wrong class of exception is thrown:
Wrong message string:
No exception is thrown:
Wrong class of exception is thrown:
still have to declare the symbol before testing unbound symbols
Here, we create an alternate version of test/report, that
compares the event with the message, then calls the original
'report' with modified arguments.
test-ns-hook will be used by test/test-ns to run tests in this
namespace.
Dummy ns for testing once fixtures
skipped by test selectors
And back to circleci.test-test | Copyright ( c ) . All rights reserved .
by
January 16 , 2009
Thanks to , , and for
(ns circleci.test-test
(:use circleci.test)
(:require [clojure.stacktrace :as stack]
[circleci.test :as t]
[circleci.test.report :as report]
[clojure.test :refer (deftest testing is are)]))
(deftest should-run-global-fixture
(is @(resolve 'user/*inside-global*) "Should pass"))
(deftest can-test-symbol
(let [x true]
(is x "Should pass"))
(let [x false]
(is x "Should fail")))
(deftest can-test-boolean
(is true "Should pass")
(is false "Should fail"))
(deftest can-test-nil
(is nil "Should fail"))
(deftest can-test-=
(is (= 2 (+ 1 1)) "Should pass")
(is (= 3 (+ 2 2)) "Should fail"))
(deftest can-test-instance
(is (instance? Long (+ 2 2)) "Should pass")
(is (instance? Float (+ 1 1)) "Should fail"))
(deftest can-test-thrown
(is (thrown? ArithmeticException (/ 1 0)) "Should pass")
(is (thrown? Exception (+ 1 1)) "Should fail")
(is (thrown? ArithmeticException (throw (RuntimeException.))) "Should error"))
(deftest can-test-thrown-with-msg
(is (thrown-with-msg? ArithmeticException #"Divide by zero" (/ 1 0)) "Should pass")
(is (thrown-with-msg? ArithmeticException #"Something else" (/ 1 0)) "Should fail")
(is (thrown? Exception (+ 1 1)) "Should fail")
(is (thrown-with-msg? IllegalArgumentException #"Divide by zero" (/ 1 0)) "Should error"))
(deftest can-catch-unexpected-exceptions
(is (= 1 (throw (Exception.))) "Should error"))
(deftest can-test-method-call
(is (.startsWith "abc" "a") "Should pass")
(is (.startsWith "abc" "d") "Should fail"))
(deftest can-test-anonymous-fn
(is (#(.startsWith % "a") "abc") "Should pass")
(is (#(.startsWith % "d") "abc") "Should fail"))
(deftest can-test-regexps
(is (re-matches #"^ab.*$" "abbabba") "Should pass")
(is (re-matches #"^cd.*$" "abbabba") "Should fail")
(is (re-find #"ab" "abbabba") "Should pass")
(is (re-find #"cd" "abbabba") "Should fail"))
(deftest clj-1102-empty-stack-trace-should-not-throw-exceptions
(let [empty-stack (into-array (Class/forName "java.lang.StackTraceElement")
[])
t (doto (Exception.) (.setStackTrace empty-stack))]
(is (map? (#'clojure.test/stacktrace-file-and-line empty-stack)) "Should pass")
(is (string? (with-out-str (stack/print-stack-trace t))) "Should pass")))
(deftest #^{:has-meta true} can-add-metadata-to-tests
(is (:has-meta (meta #'can-add-metadata-to-tests)) "Should pass"))
(deftest lookup-selector-supplies-default
(is (= identity
(#'t/lookup-selector {:selectors {:foo :integration}}
:default))
"Should pass")
(is (= :user-default
(#'t/lookup-selector {:selectors {:foo :integration :default :user-default}}
:default))
"Should pass"))
(declare does-not-exist)
#_(deftest can-test-unbound-symbol
(is (= nil does-not-exist) "Should error"))
#_(deftest can-test-unbound-function
(is (does-not-exist) "Should error"))
(declare ^:dynamic original-report)
(defn custom-report [data]
(let [event (:type data)
msg (:message data)
expected (:expected data)
actual (:actual data)
passed (cond
(= event :fail) (= msg "Should fail")
(= event :pass) (= msg "Should pass")
(= event :error) (= msg "Should error")
:else true)]
(if passed
(original-report {:type :pass, :message msg,
:expected expected, :actual actual})
(original-report {:type :fail, :message (str msg " but got " event)
:expected expected, :actual actual}))))
(defn test-ns-hook []
(binding [original-report clojure.test/report
report/report custom-report]
(#'t/test-all-vars {} (find-ns 'circleci.test-test) (constantly true))))
(deftest clj-1588-symbols-in-are-isolated-from-test-clauses
(binding [clojure.test/report original-report]
(are [x y] (= x y)
((fn [x] (inc x)) 1) 2)))
(deftest dummy-test
(is 1 "Should pass"))
(defn tracking-report
[reports]
(fn [data]
(swap! reports conj data)))
(deftest nested-test-invocations-use-correct-test-var
(let [reports (atom [])]
(binding [clojure.test/report (tracking-report reports)]
(dummy-test))
(let [end-test-var-data (->> @reports
(filter #(-> % :type (= :end-test-var)))
first)]
(is (some? (:elapsed end-test-var-data)) "Should pass"))))
(defn counting-fixture
[counter]
(fn [f]
(swap! counter inc)
(f)))
(defn throwing-fixture
[exception]
(fn [f]
(throw exception)
(f)))
(in-ns 'circleci.test.test-ns)
(clojure.core/require '[clojure.test :refer (deftest is)])
(deftest dummy-test
(is 1 "Should pass"))
(deftest nested-dummy-test
(dummy-test))
(deftest ^:integration test
(in-ns 'circleci.test-test)
(deftest once-fixture-fns-run-exactly-once-for-test-var-invocations
(let [reports (atom [])
test-ns (find-ns 'circleci.test.test-ns)
once-fixture-counts (atom 0)
_ (alter-meta! test-ns
assoc ::clojure.test/once-fixtures
[(counting-fixture once-fixture-counts)])
test-fn (ns-resolve test-ns 'nested-dummy-test)]
(binding [clojure.test/report (tracking-report reports)]
(test-fn))
(is (= 1 @once-fixture-counts) "Should pass")
(is (= 2 (->> @reports
(filter #(-> % :type (= :begin-test-var)))
count))
"Should pass")
(is (= 2 (->> @reports
(filter #(-> % :type (= :end-test-var)))
count))
"Should pass")))
(deftest once-fixture-fns-run-exactly-once-for-test-ns-invocations
(let [reports (atom [])
test-ns (find-ns 'circleci.test.test-ns)
once-fixture-counts (atom 0)
_ (alter-meta! test-ns
assoc ::clojure.test/once-fixtures
[(counting-fixture once-fixture-counts)])]
(binding [report/report (tracking-report reports)]
(t/test-ns test-ns (complement :integration)))
(is (= 1 @once-fixture-counts) "Should pass")
(is (= 3 (->> @reports
(filter #(-> % :type (= :begin-test-var)))
count))
"Should pass")
(is (= 3 (->> @reports
(filter #(-> % :type (= :end-test-var)))
count))
"Should pass")
(is (= 1 (->> @reports
(filter #(-> % :type (= :begin-test-ns)))
count))
"Should pass")
(is (= 1 (->> @reports
(filter #(-> % :type (= :end-test-ns)))
count))
"Should pass")))
(deftest fixture-exceptions-are-reported-as-test-errors-from-test-ns
(testing "once fixtures"
(let [reports (atom [])
exception (doto (Exception. "test exception from fixture")
(.setStackTrace (into-array StackTraceElement [])))
test-ns (find-ns 'circleci.test.test-ns)
_ (alter-meta! test-ns
assoc ::clojure.test/once-fixtures
[(throwing-fixture exception)])]
(binding [report/report (tracking-report reports)]
(t/test-ns test-ns (complement :integration)))
(is (= #{{:type :error
:message "Exception thrown from test fixture."
:file nil
:line nil
:expected nil
:actual exception}}
(->> @reports
(filter #(-> % :type (= :error)))
set))
"Should pass"))))
|
bcfc6bc334ac318c587fefe75753e921746a9364397cdabd589acac32e5d9c7f | markbastian/partsbin | system.clj | (ns partsbin.system
"Utility ns for creating a ns-global reloadable system"
(:require [integrant.core :as ig]))
(defmacro create-system
"Create a ns-global system. Takes code that evaluates to a system configuration.
The following items will be created:
* *system* dynamic variable to hold the system.
* 'system' function for viewing the current value of the system.
* Functions start, stop, and restart which will do those actions on the system."
[config]
`(do
(defonce ~(with-meta '*system* {:dynamic true}) nil)
(defn ~'system [] ~'*system*)
(defn ~'start []
(alter-var-root ~'#'*system* (fn [~'s] (if-not ~'s (ig/init ~config) ~'s))))
(defn ~'stop []
(alter-var-root ~'#'*system* (fn [~'s] (when ~'s (do (ig/halt! ~'s) nil)))))
(defn ~'restart [] (do (~'stop) (~'start)))))
(defmacro with-system [[bindings config] & body]
`(let [system# (ig/init ~config)
~bindings system#]
(try
~@body
(finally (ig/halt! system#))))) | null | https://raw.githubusercontent.com/markbastian/partsbin/8dc159327f296c9625d129b5943ec79433019e54/src/partsbin/system.clj | clojure | (ns partsbin.system
"Utility ns for creating a ns-global reloadable system"
(:require [integrant.core :as ig]))
(defmacro create-system
"Create a ns-global system. Takes code that evaluates to a system configuration.
The following items will be created:
* *system* dynamic variable to hold the system.
* 'system' function for viewing the current value of the system.
* Functions start, stop, and restart which will do those actions on the system."
[config]
`(do
(defonce ~(with-meta '*system* {:dynamic true}) nil)
(defn ~'system [] ~'*system*)
(defn ~'start []
(alter-var-root ~'#'*system* (fn [~'s] (if-not ~'s (ig/init ~config) ~'s))))
(defn ~'stop []
(alter-var-root ~'#'*system* (fn [~'s] (when ~'s (do (ig/halt! ~'s) nil)))))
(defn ~'restart [] (do (~'stop) (~'start)))))
(defmacro with-system [[bindings config] & body]
`(let [system# (ig/init ~config)
~bindings system#]
(try
~@body
(finally (ig/halt! system#))))) | |
648bac14441cfc2afa86eb33d719883499a9ee56c83266e1e3c58350c61ec206 | basho/machi | machi_flu_psup.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
@doc Supervisor for Machi FLU servers and their related support
%% servers.
%%
Our parent supervisor , { @link machi_flu_sup } , is responsible for
managing FLUs as a single entity . However , the actual
implementation of a FLU includes three major Erlang processes ( not
including support / worker procs ): the FLU itself , the FLU 's
projection store , and the FLU 's local chain manager . This
supervisor is responsible for managing those three major services
%% as a single "package", to be started & stopped together.
%%
The illustration below shows the OTP process supervision tree for
the Machi application . Two FLUs are running , called ` a ' and ` b ' .
The chain is configured for a third FLU , ` c ' , which is not running
%% at this time.
%%
%% <img src="/machi/{@docRoot}/images/supervisor-2flus.png"></img>
%%
%% <ul>
< li > The FLU process itself is named ` a ' .
%% </li>
%% <li> The projection store process is named `a_pstore'.
%% </li>
< li > The chain manager process is named ` a_chmgr ' . The three
%% linked subprocesses are long-lived {@link
%% machi_proxy_flu1_client} processes for communicating to all
%% chain participants' projection stores (including the local
%% store `a_pstore').
%% </li>
< li > A fourth major process , ` a_listener ' , which is responsible for
%% listening on a TCP socket and creating new connections.
Currently , each listener has two processes handling incoming
requests , one from each chain manager proxy .
%% </li>
< li > Note that the sub - supervisor parent of ` a ' and ` a_listener ' does
%% not have a registered name.
%% </li>
%% </ul>
-module(machi_flu_psup).
-behaviour(supervisor).
-include("machi_projection.hrl").
-include("machi_verbose.hrl").
-ifdef(PULSE).
-compile({parse_transform, pulse_instrument}).
-include_lib("pulse_otp/include/pulse_otp.hrl").
-define(SHUTDOWN, infinity).
-else.
-define(SHUTDOWN, 5000).
-endif.
%% External API
-export([make_package_spec/1, make_package_spec/4,
start_flu_package/1, start_flu_package/4, stop_flu_package/1]).
%% Internal API
-export([start_link/4,
make_flu_regname/1, make_p_regname/1, make_mgr_supname/1,
make_proj_supname/1, make_fitness_regname/1]).
%% Supervisor callbacks
-export([init/1]).
make_package_spec(#p_srvr{name=FluName, port=TcpPort, props=Props}) when is_list(Props) ->
make_package_spec({FluName, TcpPort, Props});
make_package_spec({FluName, TcpPort, Props}) when is_list(Props) ->
FluDataDir = get_env(flu_data_dir, undefined_is_invalid),
MyDataDir = filename:join(FluDataDir, atom_to_list(FluName)),
make_package_spec(FluName, TcpPort, MyDataDir, Props).
make_package_spec(FluName, TcpPort, DataDir, Props) ->
{FluName, {machi_flu_psup, start_link,
[FluName, TcpPort, DataDir, Props]},
permanent, ?SHUTDOWN, supervisor, []}.
start_flu_package(#p_srvr{name=FluName, port=TcpPort, props=Props}) ->
DataDir = get_data_dir(FluName, Props),
start_flu_package(FluName, TcpPort, DataDir, Props).
start_flu_package(FluName, TcpPort, DataDir, Props) ->
Spec = make_package_spec(FluName, TcpPort, DataDir, Props),
{ok, _SupPid} = supervisor:start_child(machi_flu_sup, Spec).
stop_flu_package(FluName) ->
case supervisor:terminate_child(machi_flu_sup, FluName) of
ok ->
ok = supervisor:delete_child(machi_flu_sup, FluName);
Else ->
Else
end.
start_link(FluName, TcpPort, DataDir, Props) ->
supervisor:start_link({local, make_p_regname(FluName)}, ?MODULE,
[FluName, TcpPort, DataDir, Props]).
init([FluName, TcpPort, DataDir, Props0]) ->
RestartStrategy = one_for_all,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
ProjRegName = make_proj_supname(FluName),
Props = Props0 ++ [{projection_store_registered_name, ProjRegName},
{use_partition_simulator,false}],
ProjSpec = {ProjRegName,
{machi_projection_store, start_link,
[ProjRegName, DataDir, FluName]},
permanent, ?SHUTDOWN, worker, []},
FitnessRegName = make_fitness_regname(FluName),
FitnessSpec = {FitnessRegName,
{machi_fitness, start_link,
[ [{FluName}|Props] ]},
permanent, ?SHUTDOWN, worker, []},
MgrSpec = {make_mgr_supname(FluName),
{machi_chain_manager1, start_link,
[FluName, [], Props]},
permanent, ?SHUTDOWN, worker, []},
FNameMgrSpec = machi_flu_filename_mgr:child_spec(FluName, DataDir),
MetaMgrCnt = get_env(metadata_manager_count, 10),
MetaSupSpec = machi_flu_metadata_mgr_sup:child_spec(FluName, DataDir, MetaMgrCnt),
FProxySupSpec = machi_file_proxy_sup:child_spec(FluName),
Flu1SubSupSpec = {machi_flu1_subsup:subsup_name(FluName),
{machi_flu1_subsup, start_link, [FluName]},
permanent, ?SHUTDOWN, supervisor, []},
FluSpec = {FluName,
{machi_flu1, start_link,
[ [{FluName, TcpPort, DataDir}|Props] ]},
permanent, ?SHUTDOWN, worker, []},
{ok, {SupFlags, [
ProjSpec, FitnessSpec, MgrSpec,
FProxySupSpec, FNameMgrSpec, MetaSupSpec,
Flu1SubSupSpec, FluSpec]}}.
make_flu_regname(FluName) when is_atom(FluName) ->
FluName.
make_p_regname(FluName) when is_atom(FluName) ->
list_to_atom("flusup_" ++ atom_to_list(FluName)).
make_mgr_supname(MgrName) when is_atom(MgrName) ->
machi_chain_manager1:make_chmgr_regname(MgrName).
make_proj_supname(ProjName) when is_atom(ProjName) ->
list_to_atom(atom_to_list(ProjName) ++ "_pstore").
make_fitness_regname(FluName) when is_atom(FluName) ->
list_to_atom(atom_to_list(FluName) ++ "_fitness").
get_env(Setting, Default) ->
case application:get_env(machi, Setting) of
undefined -> Default;
{ok, V} -> V
end.
get_data_dir(FluName, Props) ->
case proplists:get_value(data_dir, Props) of
Path when is_list(Path) ->
Path;
undefined ->
{ok, Dir} = application:get_env(machi, flu_data_dir),
Dir ++ "/" ++ atom_to_list(FluName)
end.
| null | https://raw.githubusercontent.com/basho/machi/e87bd59a9777d805b00f9e9981467eb28e28390c/src/machi_flu_psup.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
servers.
as a single "package", to be started & stopped together.
at this time.
<img src="/machi/{@docRoot}/images/supervisor-2flus.png"></img>
<ul>
</li>
<li> The projection store process is named `a_pstore'.
</li>
linked subprocesses are long-lived {@link
machi_proxy_flu1_client} processes for communicating to all
chain participants' projection stores (including the local
store `a_pstore').
</li>
listening on a TCP socket and creating new connections.
</li>
not have a registered name.
</li>
</ul>
External API
Internal API
Supervisor callbacks | Copyright ( c ) 2007 - 2015 Basho Technologies , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
@doc Supervisor for Machi FLU servers and their related support
Our parent supervisor , { @link machi_flu_sup } , is responsible for
managing FLUs as a single entity . However , the actual
implementation of a FLU includes three major Erlang processes ( not
including support / worker procs ): the FLU itself , the FLU 's
projection store , and the FLU 's local chain manager . This
supervisor is responsible for managing those three major services
The illustration below shows the OTP process supervision tree for
the Machi application . Two FLUs are running , called ` a ' and ` b ' .
The chain is configured for a third FLU , ` c ' , which is not running
< li > The FLU process itself is named ` a ' .
< li > The chain manager process is named ` a_chmgr ' . The three
< li > A fourth major process , ` a_listener ' , which is responsible for
Currently , each listener has two processes handling incoming
requests , one from each chain manager proxy .
< li > Note that the sub - supervisor parent of ` a ' and ` a_listener ' does
-module(machi_flu_psup).
-behaviour(supervisor).
-include("machi_projection.hrl").
-include("machi_verbose.hrl").
-ifdef(PULSE).
-compile({parse_transform, pulse_instrument}).
-include_lib("pulse_otp/include/pulse_otp.hrl").
-define(SHUTDOWN, infinity).
-else.
-define(SHUTDOWN, 5000).
-endif.
-export([make_package_spec/1, make_package_spec/4,
start_flu_package/1, start_flu_package/4, stop_flu_package/1]).
-export([start_link/4,
make_flu_regname/1, make_p_regname/1, make_mgr_supname/1,
make_proj_supname/1, make_fitness_regname/1]).
-export([init/1]).
make_package_spec(#p_srvr{name=FluName, port=TcpPort, props=Props}) when is_list(Props) ->
make_package_spec({FluName, TcpPort, Props});
make_package_spec({FluName, TcpPort, Props}) when is_list(Props) ->
FluDataDir = get_env(flu_data_dir, undefined_is_invalid),
MyDataDir = filename:join(FluDataDir, atom_to_list(FluName)),
make_package_spec(FluName, TcpPort, MyDataDir, Props).
make_package_spec(FluName, TcpPort, DataDir, Props) ->
{FluName, {machi_flu_psup, start_link,
[FluName, TcpPort, DataDir, Props]},
permanent, ?SHUTDOWN, supervisor, []}.
start_flu_package(#p_srvr{name=FluName, port=TcpPort, props=Props}) ->
DataDir = get_data_dir(FluName, Props),
start_flu_package(FluName, TcpPort, DataDir, Props).
start_flu_package(FluName, TcpPort, DataDir, Props) ->
Spec = make_package_spec(FluName, TcpPort, DataDir, Props),
{ok, _SupPid} = supervisor:start_child(machi_flu_sup, Spec).
stop_flu_package(FluName) ->
case supervisor:terminate_child(machi_flu_sup, FluName) of
ok ->
ok = supervisor:delete_child(machi_flu_sup, FluName);
Else ->
Else
end.
start_link(FluName, TcpPort, DataDir, Props) ->
supervisor:start_link({local, make_p_regname(FluName)}, ?MODULE,
[FluName, TcpPort, DataDir, Props]).
init([FluName, TcpPort, DataDir, Props0]) ->
RestartStrategy = one_for_all,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
ProjRegName = make_proj_supname(FluName),
Props = Props0 ++ [{projection_store_registered_name, ProjRegName},
{use_partition_simulator,false}],
ProjSpec = {ProjRegName,
{machi_projection_store, start_link,
[ProjRegName, DataDir, FluName]},
permanent, ?SHUTDOWN, worker, []},
FitnessRegName = make_fitness_regname(FluName),
FitnessSpec = {FitnessRegName,
{machi_fitness, start_link,
[ [{FluName}|Props] ]},
permanent, ?SHUTDOWN, worker, []},
MgrSpec = {make_mgr_supname(FluName),
{machi_chain_manager1, start_link,
[FluName, [], Props]},
permanent, ?SHUTDOWN, worker, []},
FNameMgrSpec = machi_flu_filename_mgr:child_spec(FluName, DataDir),
MetaMgrCnt = get_env(metadata_manager_count, 10),
MetaSupSpec = machi_flu_metadata_mgr_sup:child_spec(FluName, DataDir, MetaMgrCnt),
FProxySupSpec = machi_file_proxy_sup:child_spec(FluName),
Flu1SubSupSpec = {machi_flu1_subsup:subsup_name(FluName),
{machi_flu1_subsup, start_link, [FluName]},
permanent, ?SHUTDOWN, supervisor, []},
FluSpec = {FluName,
{machi_flu1, start_link,
[ [{FluName, TcpPort, DataDir}|Props] ]},
permanent, ?SHUTDOWN, worker, []},
{ok, {SupFlags, [
ProjSpec, FitnessSpec, MgrSpec,
FProxySupSpec, FNameMgrSpec, MetaSupSpec,
Flu1SubSupSpec, FluSpec]}}.
make_flu_regname(FluName) when is_atom(FluName) ->
FluName.
make_p_regname(FluName) when is_atom(FluName) ->
list_to_atom("flusup_" ++ atom_to_list(FluName)).
make_mgr_supname(MgrName) when is_atom(MgrName) ->
machi_chain_manager1:make_chmgr_regname(MgrName).
make_proj_supname(ProjName) when is_atom(ProjName) ->
list_to_atom(atom_to_list(ProjName) ++ "_pstore").
make_fitness_regname(FluName) when is_atom(FluName) ->
list_to_atom(atom_to_list(FluName) ++ "_fitness").
get_env(Setting, Default) ->
case application:get_env(machi, Setting) of
undefined -> Default;
{ok, V} -> V
end.
get_data_dir(FluName, Props) ->
case proplists:get_value(data_dir, Props) of
Path when is_list(Path) ->
Path;
undefined ->
{ok, Dir} = application:get_env(machi, flu_data_dir),
Dir ++ "/" ++ atom_to_list(FluName)
end.
|
822535cc2edeb42e77c5f1dc814ef81b430fddfc2478463713b0bcb513e10b48 | elizabethsiegle/reddit-slack-bot | Applications.hs | # LANGUAGE MultiParamTypeClasses #
{-#LANGUAGE OverloadedStrings #-}
module Twilio.Applications
( -- * Resource
Applications(..)
, Twilio.Applications.get
) where
import Control.Applicative
import Control.Monad.Catch
import Data.Aeson
import Data.Maybe
import Control.Monad.Twilio
import Twilio.Application
import Twilio.Internal.Request
import Twilio.Internal.Resource as Resource
import Twilio.Types
Resource
data Applications = Applications
{ applicationsPagingInformation :: PagingInformation
, applicationList :: [Application]
} deriving (Show, Eq)
instance List Applications Application where
getListWrapper = wrap (Applications . fromJust)
getList = applicationList
getPlural = Const "applications"
instance FromJSON Applications where
parseJSON = parseJSONToList
instance Get0 Applications where
get0 = request parseJSONFromResponse =<< makeTwilioRequest "/Applications.json"
| Get the ' Applications ' for your account .
For example , you can fetch the ' Applications ' resource in the ' IO ' monad as follows :
> module Main where
>
> import Control . Monad . IO.Class ( liftIO )
> import System . Environment ( )
> import Twilio . Applications as Applications
> import Twilio . Types
>
> -- | Print applications .
> main : : IO ( )
> main = runTwilio ' ( getEnv " ACCOUNT_SID " )
> ( getEnv " " )
> $ Applications.get > > = liftIO . print
For example, you can fetch the 'Applications' resource in the 'IO' monad as follows:
>module Main where
>
>import Control.Monad.IO.Class (liftIO)
>import System.Environment (getEnv)
>import Twilio.Applications as Applications
>import Twilio.Types
>
>-- | Print applications.
>main :: IO ()
>main = runTwilio' (getEnv "ACCOUNT_SID")
> (getEnv "AUTH_TOKEN")
> $ Applications.get >>= liftIO . print
-}
get :: MonadThrow m => TwilioT m Applications
get = Resource.get
| null | https://raw.githubusercontent.com/elizabethsiegle/reddit-slack-bot/a52ab60dcaae8e16bee8cdba22fce627157a42d8/twilio-haskell-move-to-stack/src/Twilio/Applications.hs | haskell | #LANGUAGE OverloadedStrings #
* Resource
| Print applications .
| Print applications. | # LANGUAGE MultiParamTypeClasses #
module Twilio.Applications
Applications(..)
, Twilio.Applications.get
) where
import Control.Applicative
import Control.Monad.Catch
import Data.Aeson
import Data.Maybe
import Control.Monad.Twilio
import Twilio.Application
import Twilio.Internal.Request
import Twilio.Internal.Resource as Resource
import Twilio.Types
Resource
data Applications = Applications
{ applicationsPagingInformation :: PagingInformation
, applicationList :: [Application]
} deriving (Show, Eq)
instance List Applications Application where
getListWrapper = wrap (Applications . fromJust)
getList = applicationList
getPlural = Const "applications"
instance FromJSON Applications where
parseJSON = parseJSONToList
instance Get0 Applications where
get0 = request parseJSONFromResponse =<< makeTwilioRequest "/Applications.json"
| Get the ' Applications ' for your account .
For example , you can fetch the ' Applications ' resource in the ' IO ' monad as follows :
> module Main where
>
> import Control . Monad . IO.Class ( liftIO )
> import System . Environment ( )
> import Twilio . Applications as Applications
> import Twilio . Types
>
> main : : IO ( )
> main = runTwilio ' ( getEnv " ACCOUNT_SID " )
> ( getEnv " " )
> $ Applications.get > > = liftIO . print
For example, you can fetch the 'Applications' resource in the 'IO' monad as follows:
>module Main where
>
>import Control.Monad.IO.Class (liftIO)
>import System.Environment (getEnv)
>import Twilio.Applications as Applications
>import Twilio.Types
>
>main :: IO ()
>main = runTwilio' (getEnv "ACCOUNT_SID")
> (getEnv "AUTH_TOKEN")
> $ Applications.get >>= liftIO . print
-}
get :: MonadThrow m => TwilioT m Applications
get = Resource.get
|
e5eaa8978efa4cbf265fb181e142bf673cf07cad19684939630181abf2a67805 | jonase/eastwood | consumer.clj | (ns testcases.const.unused-namespaces.consumer
""
(:require [testcases.const.unused-namespaces.producer :as a]))
(defn project-info []
{:version a/version})
| null | https://raw.githubusercontent.com/jonase/eastwood/0d1cb5930775ccd6ab897b7b85d9e118149a24eb/cases/testcases/const/unused_namespaces/consumer.clj | clojure | (ns testcases.const.unused-namespaces.consumer
""
(:require [testcases.const.unused-namespaces.producer :as a]))
(defn project-info []
{:version a/version})
| |
fe55092ebc44b3c22c9d617be7889671a96690c7edff4861dc5fe89246b890cf | webyrd/untitled-relational-interpreter-book | files-to-be-ported.scm | slatex-src.scm
| null | https://raw.githubusercontent.com/webyrd/untitled-relational-interpreter-book/247e29afd224586c39c1983e042524c7cc9fe17b/latex/slatex/dialects/files-to-be-ported.scm | scheme | slatex-src.scm
| |
f1f24331508daff36d716b683b92bff834f9872edb36805b85889a60f07a1b9a | spurious/chibi-scheme-mirror | extract-case-offsets.scm | #!/usr/bin/env chibi-scheme
;; Extract sets of char case offsets.
;;
;; Usage:
;; extract-case-offsets.scm options CaseFolding.txt > out
;;
;; Recognized options are:
;;
;; -c <min-count> - the minimum required count to output a char-set
for an offset , default 32
;; -m <max-char-sets> - the maximum number of character sets to output
;; regardless of min-count, default umlimited
;; -n <name> - the name for char-sets, defaults to
;; "char-downcase-offsets"
;; -o <output-file> - the output file, defaults to stdout
(import (chibi) (srfi 1) (srfi 69) (srfi 95) (chibi io) (chibi string)
(chibi iset) (chibi iset optimize))
(define (warn . args)
(let ((err (current-error-port)))
(for-each (lambda (x) (display x err)) args)
(newline err)))
(define (write-offsets offset-map out min-count max-char-sets name)
(let lp ((ls (sort (hash-table->alist offset-map)
(lambda (a b) (> (iset-size (cdr a)) (iset-size (cdr b))))))
(i 0)
(res '()))
(cond
((and (pair? ls)
(or (not max-char-sets) (< i max-char-sets))
(or (not min-count) (>= (iset-size (cdar ls)) min-count)))
(lp (cdr ls)
(+ i 1)
(cons `(cons ,(iset->code (iset-balance (iset-optimize (cdar ls))))
,(caar ls))
res)))
(else
(write `(define ,(string->symbol name) (list ,@(reverse res))) out)
(newline out)
(newline out)
(let ((pairs
(sort
(append-map
(lambda (x)
(map (lambda (y) (list y (+ y (car x))))
(iset->list (cdr x))))
ls)
(lambda (a b) (< (car a) (car b))))))
(write `(define char-downcase-map
',(list->vector (append-map (lambda (x) x) pairs)))
out)
(newline out)
(newline out)
(write `(define char-upcase-map
',(list->vector
(append-map (lambda (x) (list (cadr x) (car x)))
(sort pairs
(lambda (a b) (< (cadr a) (cadr b)))))))
out)
(newline out))))))
(define (extract-case-folding in out min-count max-char-sets name)
(define (string-trim-comment str comment-ch)
(car (string-split str comment-ch 2)))
(let ((offset-map (make-hash-table eq?)))
(let lp ()
(let ((line (read-line in)))
(cond
((eof-object? line)
(write-offsets offset-map out min-count max-char-sets name))
((or (equal? line "") (eqv? #\# (string-ref line 0)))
(lp))
(else
(let ((ls (map string-trim
(string-split (string-trim-comment line #\#) #\;))))
(cond
((< (length ls) 3)
(warn "invalid CaseFolding.txt line: " line))
(else
(let ((upper (string->number (car ls) 16))
(status (string->symbol (cadr ls))))
(cond
((not upper)
(warn "invalid upper char in CaseFolding.txt: " line))
((eqv? 'C status)
(let ((lower (string->number (car (cddr ls)) 16)))
(if (not lower)
(warn "invalid lower char in CaseFolding.txt: " line)
(hash-table-update!
offset-map
(- lower upper)
(lambda (is) (iset-adjoin! is upper))
(lambda () (make-iset))))))))))
(lp))))))))
(let ((args (command-line)))
(let lp ((ls (cdr args))
(min-count 26)
(max-char-sets #f)
(name "char-downcase-offsets")
(out (current-output-port)))
(cond
((and (pair? ls) (not (equal? "" (car ls)))
(eqv? #\- (string-ref (car ls) 0)))
(cond
((member (car ls) '("-c" "--min-count"))
(lp (cddr ls) (cadr ls) max-char-sets name out))
((member (car ls) '("-m" "--max-char-sets"))
(lp (cddr ls) min-count (cadr ls) name out))
((member (car ls) '("-n" "--name"))
(lp (cddr ls) min-count max-char-sets (cadr ls) out))
((member (car ls) '("-o" "--output"))
(lp (cddr ls) min-count max-char-sets name
(open-output-file (cadr ls))))
(else
(error "unknown option: " (car ls)))))
((null? ls)
(error "usage: extract-case-offsets <CaseFolding.txt>"))
(else
(if (equal? "-" (car ls))
(extract-case-folding
(current-input-port) out min-count max-char-sets name)
(call-with-input-file (car ls)
(lambda (in)
(extract-case-folding in out min-count max-char-sets name))))
(close-output-port out)))))
| null | https://raw.githubusercontent.com/spurious/chibi-scheme-mirror/49168ab073f64a95c834b5f584a9aaea3469594d/tools/extract-case-offsets.scm | scheme | Extract sets of char case offsets.
Usage:
extract-case-offsets.scm options CaseFolding.txt > out
Recognized options are:
-c <min-count> - the minimum required count to output a char-set
-m <max-char-sets> - the maximum number of character sets to output
regardless of min-count, default umlimited
-n <name> - the name for char-sets, defaults to
"char-downcase-offsets"
-o <output-file> - the output file, defaults to stdout
)))) | #!/usr/bin/env chibi-scheme
for an offset , default 32
(import (chibi) (srfi 1) (srfi 69) (srfi 95) (chibi io) (chibi string)
(chibi iset) (chibi iset optimize))
(define (warn . args)
(let ((err (current-error-port)))
(for-each (lambda (x) (display x err)) args)
(newline err)))
(define (write-offsets offset-map out min-count max-char-sets name)
(let lp ((ls (sort (hash-table->alist offset-map)
(lambda (a b) (> (iset-size (cdr a)) (iset-size (cdr b))))))
(i 0)
(res '()))
(cond
((and (pair? ls)
(or (not max-char-sets) (< i max-char-sets))
(or (not min-count) (>= (iset-size (cdar ls)) min-count)))
(lp (cdr ls)
(+ i 1)
(cons `(cons ,(iset->code (iset-balance (iset-optimize (cdar ls))))
,(caar ls))
res)))
(else
(write `(define ,(string->symbol name) (list ,@(reverse res))) out)
(newline out)
(newline out)
(let ((pairs
(sort
(append-map
(lambda (x)
(map (lambda (y) (list y (+ y (car x))))
(iset->list (cdr x))))
ls)
(lambda (a b) (< (car a) (car b))))))
(write `(define char-downcase-map
',(list->vector (append-map (lambda (x) x) pairs)))
out)
(newline out)
(newline out)
(write `(define char-upcase-map
',(list->vector
(append-map (lambda (x) (list (cadr x) (car x)))
(sort pairs
(lambda (a b) (< (cadr a) (cadr b)))))))
out)
(newline out))))))
(define (extract-case-folding in out min-count max-char-sets name)
(define (string-trim-comment str comment-ch)
(car (string-split str comment-ch 2)))
(let ((offset-map (make-hash-table eq?)))
(let lp ()
(let ((line (read-line in)))
(cond
((eof-object? line)
(write-offsets offset-map out min-count max-char-sets name))
((or (equal? line "") (eqv? #\# (string-ref line 0)))
(lp))
(else
(let ((ls (map string-trim
(cond
((< (length ls) 3)
(warn "invalid CaseFolding.txt line: " line))
(else
(let ((upper (string->number (car ls) 16))
(status (string->symbol (cadr ls))))
(cond
((not upper)
(warn "invalid upper char in CaseFolding.txt: " line))
((eqv? 'C status)
(let ((lower (string->number (car (cddr ls)) 16)))
(if (not lower)
(warn "invalid lower char in CaseFolding.txt: " line)
(hash-table-update!
offset-map
(- lower upper)
(lambda (is) (iset-adjoin! is upper))
(lambda () (make-iset))))))))))
(lp))))))))
(let ((args (command-line)))
(let lp ((ls (cdr args))
(min-count 26)
(max-char-sets #f)
(name "char-downcase-offsets")
(out (current-output-port)))
(cond
((and (pair? ls) (not (equal? "" (car ls)))
(eqv? #\- (string-ref (car ls) 0)))
(cond
((member (car ls) '("-c" "--min-count"))
(lp (cddr ls) (cadr ls) max-char-sets name out))
((member (car ls) '("-m" "--max-char-sets"))
(lp (cddr ls) min-count (cadr ls) name out))
((member (car ls) '("-n" "--name"))
(lp (cddr ls) min-count max-char-sets (cadr ls) out))
((member (car ls) '("-o" "--output"))
(lp (cddr ls) min-count max-char-sets name
(open-output-file (cadr ls))))
(else
(error "unknown option: " (car ls)))))
((null? ls)
(error "usage: extract-case-offsets <CaseFolding.txt>"))
(else
(if (equal? "-" (car ls))
(extract-case-folding
(current-input-port) out min-count max-char-sets name)
(call-with-input-file (car ls)
(lambda (in)
(extract-case-folding in out min-count max-char-sets name))))
(close-output-port out)))))
|
a4796af2ae829d98b018a9ecb1075d61b321a336ac9e4fb77355729fd569ed6d | IUCompilerCourse/public-student-support-code | interp-Lvec-prime.rkt | #lang racket
(require "interp-Lvec.rkt")
(require "utilities.rkt")
(require (prefix-in runtime-config: "runtime-config.rkt"))
(provide interp-Lvec-prime interp-Lvec-prime-mixin interp-Lvec-prime-class)
(define (interp-Lvec-prime-mixin super-class)
(class super-class
(super-new)
;; The simulated global state of the program
;; define produces private fields
(define memory (box '()))
;; field is like define but public
(field [stack-size (runtime-config:rootstack-size)]
[heap-size (runtime-config:heap-size)]
[uninitialized 'uninitialized-value-from-memory]
[fromspace_begin (box uninitialized)]
[rootstack_end (box uninitialized)]
[free_ptr (box uninitialized)]
[fromspace_end (box uninitialized)]
[rootstack_begin (box uninitialized)]
[global-label-table
(make-immutable-hash
`((free_ptr . ,free_ptr)
(fromspace_begin . ,fromspace_begin)
(fromspace_end . ,fromspace_end)
(rootstack_begin . ,rootstack_begin)
(rootstack_end . ,rootstack_end)))])
(define/public (memory-read)
(lambda (addr)
(let-values ([(start stop name vect) (fetch-page addr)])
(let ([value (vector-ref vect (arithmetic-shift (- addr start) -3))])
(when (equal? value uninitialized)
(error 'interp-Lvec-class/memory-read
"read uninitialized memory at address ~s"
addr))
value))))
(define/public (memory-write!)
(lambda (addr value)
(let-values ([(start stop name vect) (fetch-page addr)])
(vector-set! vect (arithmetic-shift (- addr start) -3) value))))
(define/public (collect!)
(lambda (rootset bytes-requested)
(verbose "collect!" bytes-requested)
;; after a call to collect we must guarantee there is enough
;; memory to allocate the requested block of memory
(let double-heap ([hs heap-size])
(if (< hs bytes-requested)
(double-heap (* 2 hs))
(let ((h-begin (allocate-page! 'fromspace hs)))
;; I am only advancing the end of the heap because we
;; are not reclaiming memory
(set-box! fromspace_end (+ h-begin hs))
(set-box! free_ptr h-begin))))))
(define/public (initialize!)
(lambda (stack-length heap_length)
(verbose "initialize!")
(set-box! memory '())
(let* ([s-begin (allocate-page! 'rootstack stack-size)]
[h-begin (allocate-page! 'fromspace heap-size)])
(set-box! rootstack_begin s-begin)
(set-box! rootstack_end (+ s-begin stack-size))
(set-box! fromspace_begin h-begin)
(set-box! fromspace_end (+ h-begin heap-size))
(set-box! free_ptr h-begin))))
(define (allocate-page! name size)
(verbose "allocate-page!" name size)
(unless (and (fixnum? size)
(positive? size)
(= 0 (modulo size 8)))
(error 'allocate-page! "expected non-negative fixnum in ~a" size))
;; Find the last address
(define max-addr
(for/fold ([next 8])
([page (in-list (unbox memory))])
(match-let ([`(page ,_ ,stop ,_ ,_) page])
(max next stop))))
Allocate with a small pad 100 words so that it is n't likely to
;; accidentally use another region.
The randomness is to dispell any reliance on interp always allocating
;; the same way. -Andre
(define start-addr (+ max-addr 800))
;; The range is of valid addresses in memory are [start, stop)
(define stop-addr (+ start-addr size))
(define vect (make-vector (arithmetic-shift size -3) uninitialized))
(verbose "allocated" name start-addr stop-addr)
(set-box! memory (cons `(page ,start-addr ,stop-addr ,name ,vect)
(unbox memory)))
start-addr)
(define (free! addr)
(set-box! memory
(let loop ([memory (unbox memory)])
(match memory
[`() (error 'free "invalid address ~a, not currently allocated")]
[`(,(and page `(page ,ptr ,_ ,_ ,_)) . ,pages)
(if (= addr ptr)
pages
(cons page (loop pages)))]))))
(define (fetch-page addr)
;; Create a string containing
(define (fmt-err addr memory)
(apply
string-append
(cons (format "address ~a out of bounds\n\tcurrent memory regions:\n"
addr)
(for/list ([page (in-list (unbox memory))])
(match-let ([`(page ,start ,stop ,name ,_) page])
(format "\t\t~a\t\t[~a,~a)\n" name start stop))))))
(unless (fixnum? addr)
(error 'fetch-page "invalid address ~a, not a fixnum" addr))
(unless (positive? addr)
(error 'fetch-page "invalid address ~a, negative" addr))
(unless (= 0 (modulo addr 8))
(error 'fetch-page "invalid address ~a, not 8-byte aligned" addr))
(let search ([m (unbox memory)])
(match m
[`() (error 'fetch-page (fmt-err addr memory))]
[`((page ,min ,max ,name ,vect) . ,rest-memory)
( copious " / fetch page " addr min max name vect )
; vect is too large to print, makes things hard to read.
( copious " / fetch page " addr min max name )
(if (and (<= min addr) (< addr max))
(values min max name vect)
(search rest-memory))]
[other (error 'fetch-page "unmatched ~a" m)])))
(define/override (interp-exp env)
(lambda (ast)
(define recur (interp-exp env))
(verbose "interp-exp" ast)
(match ast
[(GlobalValue 'free_ptr)
(unbox free_ptr)]
[(GlobalValue 'fromspace_end)
(unbox fromspace_end)]
[(Allocate l ty) (build-vector l (lambda a uninitialized))]
[(AllocateClosure l ty arity)
(define vec (build-vector (add1 l) (lambda a uninitialized)))
(vector-set! vec l `(arity ,arity))
vec]
[(AllocateProxy ty) (build-vector 3 (lambda a uninitialized))]
[(Collect size)
(unless (exact-nonnegative-integer? size)
(error 'interp-exp "invalid argument to collect in ~a" ast))
(void)]
[else ((super interp-exp env) ast)]
)))
(define/override (interp-program ast)
(verbose "interp-program" ast)
(match ast
[(Program info e)
((initialize!) runtime-config:rootstack-size
runtime-config:heap-size)
((interp-exp '()) e)]
))
))
(define interp-Lvec-prime-class (interp-Lvec-prime-mixin interp-Lvec-class))
(define (interp-Lvec-prime p)
(send (new interp-Lvec-prime-class) interp-program p))
| null | https://raw.githubusercontent.com/IUCompilerCourse/public-student-support-code/7ccbcb4cba9ed11a3efffd7a6c8c3da2f28072fb/interp-Lvec-prime.rkt | racket | The simulated global state of the program
define produces private fields
field is like define but public
after a call to collect we must guarantee there is enough
memory to allocate the requested block of memory
I am only advancing the end of the heap because we
are not reclaiming memory
Find the last address
accidentally use another region.
the same way. -Andre
The range is of valid addresses in memory are [start, stop)
Create a string containing
vect is too large to print, makes things hard to read. | #lang racket
(require "interp-Lvec.rkt")
(require "utilities.rkt")
(require (prefix-in runtime-config: "runtime-config.rkt"))
(provide interp-Lvec-prime interp-Lvec-prime-mixin interp-Lvec-prime-class)
(define (interp-Lvec-prime-mixin super-class)
(class super-class
(super-new)
(define memory (box '()))
(field [stack-size (runtime-config:rootstack-size)]
[heap-size (runtime-config:heap-size)]
[uninitialized 'uninitialized-value-from-memory]
[fromspace_begin (box uninitialized)]
[rootstack_end (box uninitialized)]
[free_ptr (box uninitialized)]
[fromspace_end (box uninitialized)]
[rootstack_begin (box uninitialized)]
[global-label-table
(make-immutable-hash
`((free_ptr . ,free_ptr)
(fromspace_begin . ,fromspace_begin)
(fromspace_end . ,fromspace_end)
(rootstack_begin . ,rootstack_begin)
(rootstack_end . ,rootstack_end)))])
(define/public (memory-read)
(lambda (addr)
(let-values ([(start stop name vect) (fetch-page addr)])
(let ([value (vector-ref vect (arithmetic-shift (- addr start) -3))])
(when (equal? value uninitialized)
(error 'interp-Lvec-class/memory-read
"read uninitialized memory at address ~s"
addr))
value))))
(define/public (memory-write!)
(lambda (addr value)
(let-values ([(start stop name vect) (fetch-page addr)])
(vector-set! vect (arithmetic-shift (- addr start) -3) value))))
(define/public (collect!)
(lambda (rootset bytes-requested)
(verbose "collect!" bytes-requested)
(let double-heap ([hs heap-size])
(if (< hs bytes-requested)
(double-heap (* 2 hs))
(let ((h-begin (allocate-page! 'fromspace hs)))
(set-box! fromspace_end (+ h-begin hs))
(set-box! free_ptr h-begin))))))
(define/public (initialize!)
(lambda (stack-length heap_length)
(verbose "initialize!")
(set-box! memory '())
(let* ([s-begin (allocate-page! 'rootstack stack-size)]
[h-begin (allocate-page! 'fromspace heap-size)])
(set-box! rootstack_begin s-begin)
(set-box! rootstack_end (+ s-begin stack-size))
(set-box! fromspace_begin h-begin)
(set-box! fromspace_end (+ h-begin heap-size))
(set-box! free_ptr h-begin))))
(define (allocate-page! name size)
(verbose "allocate-page!" name size)
(unless (and (fixnum? size)
(positive? size)
(= 0 (modulo size 8)))
(error 'allocate-page! "expected non-negative fixnum in ~a" size))
(define max-addr
(for/fold ([next 8])
([page (in-list (unbox memory))])
(match-let ([`(page ,_ ,stop ,_ ,_) page])
(max next stop))))
Allocate with a small pad 100 words so that it is n't likely to
The randomness is to dispell any reliance on interp always allocating
(define start-addr (+ max-addr 800))
(define stop-addr (+ start-addr size))
(define vect (make-vector (arithmetic-shift size -3) uninitialized))
(verbose "allocated" name start-addr stop-addr)
(set-box! memory (cons `(page ,start-addr ,stop-addr ,name ,vect)
(unbox memory)))
start-addr)
(define (free! addr)
(set-box! memory
(let loop ([memory (unbox memory)])
(match memory
[`() (error 'free "invalid address ~a, not currently allocated")]
[`(,(and page `(page ,ptr ,_ ,_ ,_)) . ,pages)
(if (= addr ptr)
pages
(cons page (loop pages)))]))))
(define (fetch-page addr)
(define (fmt-err addr memory)
(apply
string-append
(cons (format "address ~a out of bounds\n\tcurrent memory regions:\n"
addr)
(for/list ([page (in-list (unbox memory))])
(match-let ([`(page ,start ,stop ,name ,_) page])
(format "\t\t~a\t\t[~a,~a)\n" name start stop))))))
(unless (fixnum? addr)
(error 'fetch-page "invalid address ~a, not a fixnum" addr))
(unless (positive? addr)
(error 'fetch-page "invalid address ~a, negative" addr))
(unless (= 0 (modulo addr 8))
(error 'fetch-page "invalid address ~a, not 8-byte aligned" addr))
(let search ([m (unbox memory)])
(match m
[`() (error 'fetch-page (fmt-err addr memory))]
[`((page ,min ,max ,name ,vect) . ,rest-memory)
( copious " / fetch page " addr min max name vect )
( copious " / fetch page " addr min max name )
(if (and (<= min addr) (< addr max))
(values min max name vect)
(search rest-memory))]
[other (error 'fetch-page "unmatched ~a" m)])))
(define/override (interp-exp env)
(lambda (ast)
(define recur (interp-exp env))
(verbose "interp-exp" ast)
(match ast
[(GlobalValue 'free_ptr)
(unbox free_ptr)]
[(GlobalValue 'fromspace_end)
(unbox fromspace_end)]
[(Allocate l ty) (build-vector l (lambda a uninitialized))]
[(AllocateClosure l ty arity)
(define vec (build-vector (add1 l) (lambda a uninitialized)))
(vector-set! vec l `(arity ,arity))
vec]
[(AllocateProxy ty) (build-vector 3 (lambda a uninitialized))]
[(Collect size)
(unless (exact-nonnegative-integer? size)
(error 'interp-exp "invalid argument to collect in ~a" ast))
(void)]
[else ((super interp-exp env) ast)]
)))
(define/override (interp-program ast)
(verbose "interp-program" ast)
(match ast
[(Program info e)
((initialize!) runtime-config:rootstack-size
runtime-config:heap-size)
((interp-exp '()) e)]
))
))
(define interp-Lvec-prime-class (interp-Lvec-prime-mixin interp-Lvec-class))
(define (interp-Lvec-prime p)
(send (new interp-Lvec-prime-class) interp-program p))
|
74192a0f3fbd14ac6fe344ab0ab7a1924d4f4cb8419af92035130d12909a79b9 | RefactoringTools/HaRe | LetIn1_TokOut.hs | module LetIn1 where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe_global x = let
fringe :: Tree a -> [a]
fringe (Leaf x) = [x]
fringe (Branch left@(Leaf b_1) right)
= (fringe left) ++ (fringe right)
fringe (Branch left@(Branch b_1 b_2) right)
= (fringe left) ++ (fringe right)
fringe (Branch left right)
= (fringe left) ++ (fringe right)
in fringe x | null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/old/testing/subIntroPattern/LetIn1_TokOut.hs | haskell | module LetIn1 where
data Tree a = Leaf a | Branch (Tree a) (Tree a)
fringe_global x = let
fringe :: Tree a -> [a]
fringe (Leaf x) = [x]
fringe (Branch left@(Leaf b_1) right)
= (fringe left) ++ (fringe right)
fringe (Branch left@(Branch b_1 b_2) right)
= (fringe left) ++ (fringe right)
fringe (Branch left right)
= (fringe left) ++ (fringe right)
in fringe x | |
388fa52bcdfb01383b2b88626bce376ffb29a369e62b3c4882a721be657fc9c1 | expipiplus1/vulkan | VK_EXT_depth_range_unrestricted.hs | {-# language CPP #-}
-- | = Name
--
-- VK_EXT_depth_range_unrestricted - device extension
--
-- == VK_EXT_depth_range_unrestricted
--
-- [__Name String__]
-- @VK_EXT_depth_range_unrestricted@
--
-- [__Extension Type__]
-- Device extension
--
-- [__Registered Extension Number__]
14
--
-- [__Revision__]
1
--
-- [__Extension and Version Dependencies__]
--
- Requires support for Vulkan 1.0
--
-- [__Contact__]
--
-
< -Docs/issues/new?body=[VK_EXT_depth_range_unrestricted ] @pdaniell - nv%0A*Here describe the issue or question you have about the VK_EXT_depth_range_unrestricted extension * >
--
-- == Other Extension Metadata
--
-- [__Last Modified Date__]
2017 - 06 - 22
--
-- [__Contributors__]
--
- , NVIDIA
--
- , NVIDIA
--
-- == Description
--
This extension removes the ' Vulkan . Core10.Pipeline . Viewport ' @minDepth@
and @maxDepth@ restrictions that the values must be between @0.0@ and
-- @1.0@, inclusive. It also removes the same restriction on
' Vulkan . Core10.Pipeline . PipelineDepthStencilStateCreateInfo '
-- @minDepthBounds@ and @maxDepthBounds@. Finally it removes the
-- restriction on the @depth@ value in
' Vulkan . Core10.CommandBufferBuilding . ClearDepthStencilValue ' .
--
-- == New Enum Constants
--
-- - 'EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME'
--
-- - 'EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION'
--
-- == Issues
--
1 ) How do ' Vulkan . Core10.Pipeline . Viewport ' @minDepth@ and @maxDepth@
-- values outside of the @0.0@ to @1.0@ range interact with
-- <-extensions/html/vkspec.html#vertexpostproc-clipping Primitive Clipping>?
--
-- __RESOLVED__: The behavior described in
-- <-extensions/html/vkspec.html#vertexpostproc-clipping Primitive Clipping>
-- still applies. If depth clamping is disabled the depth values are still
clipped to 0 ≤ zc ≤ wc before the viewport transform . If depth clamping
-- is enabled the above equation is ignored and the depth values are
instead clamped to the ' Vulkan . Core10.Pipeline . Viewport ' @minDepth@ and
-- @maxDepth@ values, which in the case of this extension can be outside of
-- the @0.0@ to @1.0@ range.
--
2 ) What happens if a resulting depth fragment is outside of the @0.0@ to
-- @1.0@ range and the depth buffer is fixed-point rather than
-- floating-point?
--
-- __RESOLVED__: This situation can also arise without this extension (when
-- fragment shaders replace depth values, for example), and this extension
-- does not change the behaviour, which is defined in the
-- <-extensions/html/vkspec.html#fragops-depth Depth Test>
-- section of the Fragment Operations chapter.
--
-- == Version History
--
- Revision 1 , 2017 - 06 - 22 ( )
--
-- - Internal revisions
--
-- == See Also
--
-- No cross-references are available
--
-- == Document Notes
--
-- For more information, see the
< -extensions/html/vkspec.html#VK_EXT_depth_range_unrestricted Vulkan Specification >
--
-- This page is a generated document. Fixes and changes should be made to
-- the generator scripts, not directly.
module Vulkan.Extensions.VK_EXT_depth_range_unrestricted ( EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION
, pattern EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION
, EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME
, pattern EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME
) where
import Data.String (IsString)
type EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION = 1
No documentation found for TopLevel " VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION "
pattern EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION :: forall a . Integral a => a
pattern EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION = 1
type EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME = "VK_EXT_depth_range_unrestricted"
No documentation found for TopLevel " VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "
pattern EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME = "VK_EXT_depth_range_unrestricted"
| null | https://raw.githubusercontent.com/expipiplus1/vulkan/b1e33d1031779b4740c279c68879d05aee371659/src/Vulkan/Extensions/VK_EXT_depth_range_unrestricted.hs | haskell | # language CPP #
| = Name
VK_EXT_depth_range_unrestricted - device extension
== VK_EXT_depth_range_unrestricted
[__Name String__]
@VK_EXT_depth_range_unrestricted@
[__Extension Type__]
Device extension
[__Registered Extension Number__]
[__Revision__]
[__Extension and Version Dependencies__]
[__Contact__]
== Other Extension Metadata
[__Last Modified Date__]
[__Contributors__]
== Description
@1.0@, inclusive. It also removes the same restriction on
@minDepthBounds@ and @maxDepthBounds@. Finally it removes the
restriction on the @depth@ value in
== New Enum Constants
- 'EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME'
- 'EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION'
== Issues
values outside of the @0.0@ to @1.0@ range interact with
<-extensions/html/vkspec.html#vertexpostproc-clipping Primitive Clipping>?
__RESOLVED__: The behavior described in
<-extensions/html/vkspec.html#vertexpostproc-clipping Primitive Clipping>
still applies. If depth clamping is disabled the depth values are still
is enabled the above equation is ignored and the depth values are
@maxDepth@ values, which in the case of this extension can be outside of
the @0.0@ to @1.0@ range.
@1.0@ range and the depth buffer is fixed-point rather than
floating-point?
__RESOLVED__: This situation can also arise without this extension (when
fragment shaders replace depth values, for example), and this extension
does not change the behaviour, which is defined in the
<-extensions/html/vkspec.html#fragops-depth Depth Test>
section of the Fragment Operations chapter.
== Version History
- Internal revisions
== See Also
No cross-references are available
== Document Notes
For more information, see the
This page is a generated document. Fixes and changes should be made to
the generator scripts, not directly. | 14
1
- Requires support for Vulkan 1.0
-
< -Docs/issues/new?body=[VK_EXT_depth_range_unrestricted ] @pdaniell - nv%0A*Here describe the issue or question you have about the VK_EXT_depth_range_unrestricted extension * >
2017 - 06 - 22
- , NVIDIA
- , NVIDIA
This extension removes the ' Vulkan . Core10.Pipeline . Viewport ' @minDepth@
and @maxDepth@ restrictions that the values must be between @0.0@ and
' Vulkan . Core10.Pipeline . PipelineDepthStencilStateCreateInfo '
' Vulkan . Core10.CommandBufferBuilding . ClearDepthStencilValue ' .
1 ) How do ' Vulkan . Core10.Pipeline . Viewport ' @minDepth@ and @maxDepth@
clipped to 0 ≤ zc ≤ wc before the viewport transform . If depth clamping
instead clamped to the ' Vulkan . Core10.Pipeline . Viewport ' @minDepth@ and
2 ) What happens if a resulting depth fragment is outside of the @0.0@ to
- Revision 1 , 2017 - 06 - 22 ( )
< -extensions/html/vkspec.html#VK_EXT_depth_range_unrestricted Vulkan Specification >
module Vulkan.Extensions.VK_EXT_depth_range_unrestricted ( EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION
, pattern EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION
, EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME
, pattern EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME
) where
import Data.String (IsString)
type EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION = 1
No documentation found for TopLevel " VK_EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION "
pattern EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION :: forall a . Integral a => a
pattern EXT_DEPTH_RANGE_UNRESTRICTED_SPEC_VERSION = 1
type EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME = "VK_EXT_depth_range_unrestricted"
No documentation found for TopLevel " VK_EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME "
pattern EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME :: forall a . (Eq a, IsString a) => a
pattern EXT_DEPTH_RANGE_UNRESTRICTED_EXTENSION_NAME = "VK_EXT_depth_range_unrestricted"
|
69fcada3467a784162b1e99d3807988c055b87c2d56d58570c255ba76cf5ed96 | iu-parfunc/lvars | k-CFA-lvish.hs | {-# LANGUAGE TypeSynonymInstances, FlexibleInstances #-}
# LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
{-# LANGUAGE RankNTypes #-}
# OPTIONS_GHC -fwarn - incomplete - patterns #
module Main where
Translated from article : -of-kcfa-and-0cfa/k-CFA.scm
-- Extended with less ad-hoc support for halting
import Control.Applicative (liftA2, liftA3)
import qualified Control.Monad.State as State
import Control.Monad
import Control.Exception (evaluate)
import Control.Concurrent
import System.IO.Unsafe (unsafePerformIO)
import System.Mem.StableName (makeStableName, hashStableName)
import qualified Data.Map as M
import qualified Data.Set as S
import Data.List ((\\))
import Debug.Trace
import Control.LVish
import Control.LVish.DeepFrz
import Control.LVish.Internal (liftIO)
import Control.LVish.SchedIdempotent (dbgLvl)
#define NONSCALABLE
#ifdef NONSCALABLE
import Data.LVar.PureSet as IS
import Data.LVar.PureMap as IM
#elif defined(HYBRID)
#warning "Using lockfree map with plain set"
import Data.LVar.PureSet as IS
import Data.LVar.SLMap as IM
#else
#warning "Building with genuine lock-free structures."
import Data.LVar.SLSet as IS
import Data.LVar.SLMap as IM
#endif
import Text.PrettyPrint as PP
import Text.PrettyPrint.GenericPretty (Out(doc,docPrec), Generic)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit (Test(..))
import CFA_Common
define INPLACE
--------------------------------------------------------------------------------
-- Abstract State Space for analysis
--------------------------------------------------------------------------------
-- Abstract state space
data State s = State Call BEnv (Store s) Time
deriving (Show, Eq, Generic)
-- A binding environment maps variables to addresses
( In 's example , this mapped to Addr , but I found this a bit redundant
since the Var in the Addr can be inferred , so I map straight to Time )
type BEnv = M.Map Var Time
-- A store maps addresses to denotable values
type Store s = IM.IMap Addr s (Denotable s)
-- | An abstact denotable value is a set of possible values
type Denotable s = IS.ISet s Value
For pure CPS , closures are the only kind of value
type Value = Clo
-- Closures pair a lambda-term with a binding environment that determines
-- the values of its free variables
data Clo = Closure (Label, [Var], Call) BEnv | HaltClosure | Arbitrary
deriving (Eq, Ord, Show, Generic)
Addresses can point to values in the store . In pure CPS , the only kind of addresses are bindings
type Addr = Bind
-- A binding is minted each time a variable gets bound to a value
data Bind = Binding Var Time
deriving (Eq, Ord, Show, Generic)
In k - CFA , time is a bounded memory of program history .
-- In particular, it is the last k call sites through which
-- the program has traversed.
type Time = [Label]
instance Show (Store s) where
show _ = "<Store>"
instance Show (IS.ISet s a) where
show _ = "<ISet>"
-- State Call BEnv Store Time
instance Ord (State s) where
compare (State c1 be1 s1 t1)
(State c2 be2 s2 t2)
= compare c1 c2 `andthen`
compare be1 be2 `andthen`
compare t1 t2 `andthen`
if s1 == s2
then EQ
else error "Ord State: states are equivalent except for Store... FINISHME"
-- else (unsafeName s1) `compare` (unsafeName s2)
andthen :: Ordering -> Ordering -> Ordering
andthen EQ b = b
andthen a _ = a
instance Out Clo
instance Out Bind
instance Out (M.Map Var Time) where
doc = docPrec 0
docPrec _ mp = doc (M.toList mp)
instance Out (IS.ISet s a) where
doc = docPrec 0
docPrec _ s = PP.text (show s)
instance Out (State s) where
doc = docPrec 0
docPrec _ (State call benv _str time) =
PP.text "State" <+> doc call
<+> doc benv
<+> doc time
--------------------------------------------------------------------------------
| Mutate a store to increase the set of values that an Addr may bind to .
storeInsert :: Addr -> Value -> Store s -> Par d s ()
storeInsert a v s = IM.modify s a newEmptySet (IS.insert v)
-- k-CFA parameters
tick :: Label -> Time -> Time
tick l t = take k_param (l:t)
-- k-CFA abstract interpreter
atomEval :: BEnv -> Store s -> Exp -> Par d s (Denotable s)
atomEval benv store Halt = single HaltClosure
atomEval benv store (Ref x) = case M.lookup x benv of
Nothing -> error $ "Variable unbound in BEnv: " ++ show x
Just t -> IM.getKey (Binding x t) store
atomEval benv _ (Lam l v c) = single (Closure (l, v, c) benv)
single :: Ord a => a -> Par d s (ISet s a)
single x = do
s <- newEmptySet
IS.insert x s
return s
-- | Extend the search from the current state to adjacent states. This function
mutates the set of states ( first parameter ) , adding the new states .
Next states
next seen st0@(State (Call l fun args) benv store time)
= -- trace ("next " ++ show (doc st0)) $
do
logStrLn ("next " ++ show (doc st0))
procs <- atomEval benv store fun
paramss <- mapM (atomEval benv store) args
let time' = tick l time
This applies to all elements evr added to the set object :
IS.forEach procs $ \ clo -> do
case clo of
HaltClosure -> return ()
Closure (_, formals, call') benv' -> do
let benv'' = foldr (\formal benv' -> M.insert formal time benv') benv' formals
allParamConfs <- IS.cartesianProds paramss
IS.forEach allParamConfs $ \ params -> do
-- Hmm... we need to create a new store for the extended bindings
#ifdef INPLACE
#warning "Activating INPLACE LVar joining of stores."
let store' = store
#else
Simply REMOVE this to implicitly
#endif
forM_ (formals `zip` params) $ \(formal, params) ->
storeInsert (Binding formal time) params store'
let newST = State call' benv'' store' time'
IS.insert newST seen -- Extending the seen set should spawn more work.
return ()
return ()
Arbitrary -> do
allParamConfs <- IS.cartesianProds paramss
IS.forEach allParamConfs $ \ params -> do
forM_ params $ \ param -> do
ms <- escape param store
case ms of
Just state' -> IS.insert state' seen
Nothing -> return ()
return ()
return ()
return ()
Extension of my own design to allow CFA in the presence of arbitrary values .
-- Similar to "sub-0CFA" where locations are inferred to either have either a single
-- lambda flow to them, no lambdas, or all lambdas
escape :: Value -> Store s -> Par d s (Maybe (State s))
-- If an arbitrary value from outside escapes we don't care:
escape Arbitrary _ = return Nothing
escape HaltClosure _ = return Nothing
escape (Closure (_l, formals, call) benv) store = do
(benv', store') <- fvStuff formals store
return $ Just $
(State call (benv `M.union` benv') store' [])
-- | Create an environment and store with empty/Arbitrary bindings.
Second argument is an output parameter .
fvStuff :: [Var] -> Store s -> Par d s (BEnv, Store s)
fvStuff xs store = do
forM_ xs $ \x -> do
IM.modify store (Binding x []) newEmptySet $ IS.insert Arbitrary
return (M.fromList [(x, []) | x <- xs], store)
--------------------------------------------------------------------------------
State - space exploration
--------------------------------------------------------------------------------
-- | Kick off the state space exploration by setting up a handler.
explore :: State s -> Par d s (IS.ISet s (State s))
explore initial = do
allSeen <- newEmptySet
-- liftIO$ putStrLn$ "Kicking off with an initial state: "++show (doc initial)
IS.insert initial allSeen
-- Feedback: recursively feed back new states into allSeen in parallel:
IS.forEach allSeen (next allSeen)
return allSeen
NB : Might 's dissertation ( Section 5.3.5 ) explains how we can apply widening here to
-- improve the worst case runtime from exponential to cubic: for an new state from the
-- work list, we must extract all seen states which match in every element *except* the
-- store. Then, join those seen stores together. If the potential store is a subset
-- of the seen ones then we can just loop. Otherwise, union the new store onto a global
-- "widening" store, update the global store with this one, and do abstract evalution on the state with the new sotre.
--------------------------------------------------------------------------------
-- User interface
--------------------------------------------------------------------------------
-- summarize :: S.Set State -> Par Store
summarize :: IS.ISet s (State s) -> Par d s (Store s)
summarize states = do
storeFin <- newEmptyMap
-- Note: a generic union operation could also handle this:
void$ IS.forEach states $ \ (State _ _ store_n _) -> do
void$ IM.forEach store_n $ \ key val -> do
void$ IS.forEach val $ \ elem -> do
IM.modify storeFin key newEmptySet $ \ st -> do
IS.insert elem st
return storeFin
( " Monovariant " because it throws away information we know about what time things arrive at )
monovariantStore :: Store s -> Par d s (IM.IMap Var s (IS.ISet s Exp))
monovariantStore store = do
mp <- newEmptyMap
IM.forEach store $ \ (Binding vr _throwaway) d -> do
IS.forEach d $ \ elm -> do
let elm' = monovariantValue elm
IM.modify mp vr newEmptySet (IS.insert elm')
return ()
return mp
where
monovariantValue :: Value -> Exp
monovariantValue (Closure (l, v, c) _) = Lam l v c
monovariantValue HaltClosure = Halt
monovariantValue Arbitrary = Ref "unknown"
| Perform a complete , analysis .
analyse :: Call -> IMap Var Frzn (IS.ISet Frzn Exp)
analyse e = runParThenFreeze $ par
where
par :: forall d s . Par d s (IM.IMap Var s (IS.ISet s Exp))
par = do
logStrLn " [kcfa] Starting program..."
newStore <- newEmptyMap
(benv, store) <- fvStuff (S.toList (fvsCall e)) newStore
let initState = State e benv store []
allStates <- explore initState
-- logStrLn$ " [kcfa] all states explored: "++show (length allStates)
finStore <- summarize allStates
logStrLn $ "Got back finStore: "++show(unsafeName finStore)
{-
IM.forEach finStore $ \ k x ->
logStrLn $ "---Member of final store: "++show(doc (k,x))
IS.forEach allStates $ \ x ->
logStrLn $ "---Member of allStates: "++show(doc x)
-}
r <- monovariantStore finStore
return r
-- | Get the free vars of an expression
fvsExp :: Exp -> S.Set Var
fvsExp Halt = S.empty
fvsExp (Ref x) = S.singleton x
fvsExp (Lam _ xs c) = fvsCall c S.\\ S.fromList xs
fvsCall :: Call -> S.Set Var
fvsCall (Call _ fun args) = fvsExp fun `S.union` S.unions (map fvsExp args)
------------------------------------------------------------------------------------------
FIXME ( issue # 18 ): : This compiles and runs , but a bunch of tests
-- fail and I'm not sure what's up
main :: IO ()
main = makeMain runExample
runExample :: UniqM Call -> IO ()
runExample example = do
let mp = analyse (runUniqM example)
let res = M.toList (fromIMap mp)
len <- evaluate (length res)
putStrLn$ "===== #results = "++show len ++ ", K is "++show k_param
when (dbgLvl >= 1) $
forM _ res $ \(x , ) - > do
forM_ res $ \(x, es) -> do
putStrLn (x ++ ":")
mapM_ (putStrLn . (" " ++) . show) (S.toList (IS.fromISet es))
# NOINLINE unsafeName #
unsafeName :: a -> Int
unsafeName x = unsafePerformIO $ do
sn <- makeStableName x
return (hashStableName sn)
| null | https://raw.githubusercontent.com/iu-parfunc/lvars/78e73c96a929aa75aa4f991d42b2f677849e433a/src/lvish-apps/cfa/k-CFA-lvish.hs | haskell | # LANGUAGE TypeSynonymInstances, FlexibleInstances #
# LANGUAGE RankNTypes #
Extended with less ad-hoc support for halting
------------------------------------------------------------------------------
Abstract State Space for analysis
------------------------------------------------------------------------------
Abstract state space
A binding environment maps variables to addresses
A store maps addresses to denotable values
| An abstact denotable value is a set of possible values
Closures pair a lambda-term with a binding environment that determines
the values of its free variables
A binding is minted each time a variable gets bound to a value
In particular, it is the last k call sites through which
the program has traversed.
State Call BEnv Store Time
else (unsafeName s1) `compare` (unsafeName s2)
------------------------------------------------------------------------------
k-CFA parameters
k-CFA abstract interpreter
| Extend the search from the current state to adjacent states. This function
trace ("next " ++ show (doc st0)) $
Hmm... we need to create a new store for the extended bindings
Extending the seen set should spawn more work.
Similar to "sub-0CFA" where locations are inferred to either have either a single
lambda flow to them, no lambdas, or all lambdas
If an arbitrary value from outside escapes we don't care:
| Create an environment and store with empty/Arbitrary bindings.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Kick off the state space exploration by setting up a handler.
liftIO$ putStrLn$ "Kicking off with an initial state: "++show (doc initial)
Feedback: recursively feed back new states into allSeen in parallel:
improve the worst case runtime from exponential to cubic: for an new state from the
work list, we must extract all seen states which match in every element *except* the
store. Then, join those seen stores together. If the potential store is a subset
of the seen ones then we can just loop. Otherwise, union the new store onto a global
"widening" store, update the global store with this one, and do abstract evalution on the state with the new sotre.
------------------------------------------------------------------------------
User interface
------------------------------------------------------------------------------
summarize :: S.Set State -> Par Store
Note: a generic union operation could also handle this:
logStrLn$ " [kcfa] all states explored: "++show (length allStates)
IM.forEach finStore $ \ k x ->
logStrLn $ "---Member of final store: "++show(doc (k,x))
IS.forEach allStates $ \ x ->
logStrLn $ "---Member of allStates: "++show(doc x)
| Get the free vars of an expression
----------------------------------------------------------------------------------------
fail and I'm not sure what's up | # LANGUAGE CPP #
# LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# OPTIONS_GHC -fwarn - incomplete - patterns #
module Main where
Translated from article : -of-kcfa-and-0cfa/k-CFA.scm
import Control.Applicative (liftA2, liftA3)
import qualified Control.Monad.State as State
import Control.Monad
import Control.Exception (evaluate)
import Control.Concurrent
import System.IO.Unsafe (unsafePerformIO)
import System.Mem.StableName (makeStableName, hashStableName)
import qualified Data.Map as M
import qualified Data.Set as S
import Data.List ((\\))
import Debug.Trace
import Control.LVish
import Control.LVish.DeepFrz
import Control.LVish.Internal (liftIO)
import Control.LVish.SchedIdempotent (dbgLvl)
#define NONSCALABLE
#ifdef NONSCALABLE
import Data.LVar.PureSet as IS
import Data.LVar.PureMap as IM
#elif defined(HYBRID)
#warning "Using lockfree map with plain set"
import Data.LVar.PureSet as IS
import Data.LVar.SLMap as IM
#else
#warning "Building with genuine lock-free structures."
import Data.LVar.SLSet as IS
import Data.LVar.SLMap as IM
#endif
import Text.PrettyPrint as PP
import Text.PrettyPrint.GenericPretty (Out(doc,docPrec), Generic)
import Test.Framework
import Test.Framework.Providers.HUnit
import Test.HUnit (Test(..))
import CFA_Common
define INPLACE
data State s = State Call BEnv (Store s) Time
deriving (Show, Eq, Generic)
( In 's example , this mapped to Addr , but I found this a bit redundant
since the Var in the Addr can be inferred , so I map straight to Time )
type BEnv = M.Map Var Time
type Store s = IM.IMap Addr s (Denotable s)
type Denotable s = IS.ISet s Value
For pure CPS , closures are the only kind of value
type Value = Clo
data Clo = Closure (Label, [Var], Call) BEnv | HaltClosure | Arbitrary
deriving (Eq, Ord, Show, Generic)
Addresses can point to values in the store . In pure CPS , the only kind of addresses are bindings
type Addr = Bind
data Bind = Binding Var Time
deriving (Eq, Ord, Show, Generic)
In k - CFA , time is a bounded memory of program history .
type Time = [Label]
instance Show (Store s) where
show _ = "<Store>"
instance Show (IS.ISet s a) where
show _ = "<ISet>"
instance Ord (State s) where
compare (State c1 be1 s1 t1)
(State c2 be2 s2 t2)
= compare c1 c2 `andthen`
compare be1 be2 `andthen`
compare t1 t2 `andthen`
if s1 == s2
then EQ
else error "Ord State: states are equivalent except for Store... FINISHME"
andthen :: Ordering -> Ordering -> Ordering
andthen EQ b = b
andthen a _ = a
instance Out Clo
instance Out Bind
instance Out (M.Map Var Time) where
doc = docPrec 0
docPrec _ mp = doc (M.toList mp)
instance Out (IS.ISet s a) where
doc = docPrec 0
docPrec _ s = PP.text (show s)
instance Out (State s) where
doc = docPrec 0
docPrec _ (State call benv _str time) =
PP.text "State" <+> doc call
<+> doc benv
<+> doc time
| Mutate a store to increase the set of values that an Addr may bind to .
storeInsert :: Addr -> Value -> Store s -> Par d s ()
storeInsert a v s = IM.modify s a newEmptySet (IS.insert v)
tick :: Label -> Time -> Time
tick l t = take k_param (l:t)
atomEval :: BEnv -> Store s -> Exp -> Par d s (Denotable s)
atomEval benv store Halt = single HaltClosure
atomEval benv store (Ref x) = case M.lookup x benv of
Nothing -> error $ "Variable unbound in BEnv: " ++ show x
Just t -> IM.getKey (Binding x t) store
atomEval benv _ (Lam l v c) = single (Closure (l, v, c) benv)
single :: Ord a => a -> Par d s (ISet s a)
single x = do
s <- newEmptySet
IS.insert x s
return s
mutates the set of states ( first parameter ) , adding the new states .
Next states
next seen st0@(State (Call l fun args) benv store time)
do
logStrLn ("next " ++ show (doc st0))
procs <- atomEval benv store fun
paramss <- mapM (atomEval benv store) args
let time' = tick l time
This applies to all elements evr added to the set object :
IS.forEach procs $ \ clo -> do
case clo of
HaltClosure -> return ()
Closure (_, formals, call') benv' -> do
let benv'' = foldr (\formal benv' -> M.insert formal time benv') benv' formals
allParamConfs <- IS.cartesianProds paramss
IS.forEach allParamConfs $ \ params -> do
#ifdef INPLACE
#warning "Activating INPLACE LVar joining of stores."
let store' = store
#else
Simply REMOVE this to implicitly
#endif
forM_ (formals `zip` params) $ \(formal, params) ->
storeInsert (Binding formal time) params store'
let newST = State call' benv'' store' time'
return ()
return ()
Arbitrary -> do
allParamConfs <- IS.cartesianProds paramss
IS.forEach allParamConfs $ \ params -> do
forM_ params $ \ param -> do
ms <- escape param store
case ms of
Just state' -> IS.insert state' seen
Nothing -> return ()
return ()
return ()
return ()
Extension of my own design to allow CFA in the presence of arbitrary values .
escape :: Value -> Store s -> Par d s (Maybe (State s))
escape Arbitrary _ = return Nothing
escape HaltClosure _ = return Nothing
escape (Closure (_l, formals, call) benv) store = do
(benv', store') <- fvStuff formals store
return $ Just $
(State call (benv `M.union` benv') store' [])
Second argument is an output parameter .
fvStuff :: [Var] -> Store s -> Par d s (BEnv, Store s)
fvStuff xs store = do
forM_ xs $ \x -> do
IM.modify store (Binding x []) newEmptySet $ IS.insert Arbitrary
return (M.fromList [(x, []) | x <- xs], store)
State - space exploration
explore :: State s -> Par d s (IS.ISet s (State s))
explore initial = do
allSeen <- newEmptySet
IS.insert initial allSeen
IS.forEach allSeen (next allSeen)
return allSeen
NB : Might 's dissertation ( Section 5.3.5 ) explains how we can apply widening here to
summarize :: IS.ISet s (State s) -> Par d s (Store s)
summarize states = do
storeFin <- newEmptyMap
void$ IS.forEach states $ \ (State _ _ store_n _) -> do
void$ IM.forEach store_n $ \ key val -> do
void$ IS.forEach val $ \ elem -> do
IM.modify storeFin key newEmptySet $ \ st -> do
IS.insert elem st
return storeFin
( " Monovariant " because it throws away information we know about what time things arrive at )
monovariantStore :: Store s -> Par d s (IM.IMap Var s (IS.ISet s Exp))
monovariantStore store = do
mp <- newEmptyMap
IM.forEach store $ \ (Binding vr _throwaway) d -> do
IS.forEach d $ \ elm -> do
let elm' = monovariantValue elm
IM.modify mp vr newEmptySet (IS.insert elm')
return ()
return mp
where
monovariantValue :: Value -> Exp
monovariantValue (Closure (l, v, c) _) = Lam l v c
monovariantValue HaltClosure = Halt
monovariantValue Arbitrary = Ref "unknown"
| Perform a complete , analysis .
analyse :: Call -> IMap Var Frzn (IS.ISet Frzn Exp)
analyse e = runParThenFreeze $ par
where
par :: forall d s . Par d s (IM.IMap Var s (IS.ISet s Exp))
par = do
logStrLn " [kcfa] Starting program..."
newStore <- newEmptyMap
(benv, store) <- fvStuff (S.toList (fvsCall e)) newStore
let initState = State e benv store []
allStates <- explore initState
finStore <- summarize allStates
logStrLn $ "Got back finStore: "++show(unsafeName finStore)
r <- monovariantStore finStore
return r
fvsExp :: Exp -> S.Set Var
fvsExp Halt = S.empty
fvsExp (Ref x) = S.singleton x
fvsExp (Lam _ xs c) = fvsCall c S.\\ S.fromList xs
fvsCall :: Call -> S.Set Var
fvsCall (Call _ fun args) = fvsExp fun `S.union` S.unions (map fvsExp args)
FIXME ( issue # 18 ): : This compiles and runs , but a bunch of tests
main :: IO ()
main = makeMain runExample
runExample :: UniqM Call -> IO ()
runExample example = do
let mp = analyse (runUniqM example)
let res = M.toList (fromIMap mp)
len <- evaluate (length res)
putStrLn$ "===== #results = "++show len ++ ", K is "++show k_param
when (dbgLvl >= 1) $
forM _ res $ \(x , ) - > do
forM_ res $ \(x, es) -> do
putStrLn (x ++ ":")
mapM_ (putStrLn . (" " ++) . show) (S.toList (IS.fromISet es))
# NOINLINE unsafeName #
unsafeName :: a -> Int
unsafeName x = unsafePerformIO $ do
sn <- makeStableName x
return (hashStableName sn)
|
912da72cb8b6e2047450d75675d3a96ea77f841e34d786c4f99382fe2e2ef046 | actondev/s7-reaper | gen.scm | (ns rpr.actions.gen
:doc "Generating functions from the action list
The rpr.actions.item/track/time-selection are generated from here"
:require ((rpr)
(aod.c.string :as s)))
(define (gen-file the-ns file regex)
(call-with-output-file file
(lambda (out)
;; paredit freaks out with multi-line strings
(format out ";; auto-generated file. see gen.scm\n")
(format out "(ns ~A\n :require ((rpr) (rpr.common)))\n(define cmd rpr.common/cmd) ;; runs either native actions (with number id) or named actions (eg SWS)\n\n" the-ns)
(let loop ((i 1))
(let* ((id+name (rpr/CF_EnumerateActions 0 i))
(id (car id+name))
(name (cadr id+name)))
(if (> id 0)
(begin
(when (s/search name regex)
(let ((fun-name
;; TODO implement -> and ->> clojure macros
(s/lowercase
(s/replace
(s/replace
(s/replace (s/match-at 1) "[ /]" "-")
"[(]" "{")
"[)]" "}"))))
(if-let (named-id (rpr/ReverseNamedCommandLookup id))
;; named command: appending underscore before the returned name id
(format out "(define (~A) (cmd \"_~A\"))\n" fun-name named-id)
;; native action
(format out "(define (~A) (cmd ~A))\n" fun-name id)
)))
(loop (inc i)))))))))
(comment
(rpr/ReverseNamedCommandLookup 53001)
(gen-file
"rpr.actions.time-selection"
"src/scheme/rpr/actions/time_selection.scm"
"^Time selection: ([^0-9]+)$")
(gen-file
"rpr.actions.track"
"src/scheme/rpr/actions/track.scm"
"^Track: ([^0-9]+)$")
(gen-file
"rpr.actions.item"
"src/scheme/rpr/actions/item.scm"
"^Item: ([^0-9]+)$")
(gen-file
"rpr.actions.sws-track"
"src/scheme/rpr/actions/sws-track.scm"
"^SWS: ((?!.*[0-9']).*track.*)$")
(gen-file
"rpr.actions.sws-item"
"src/scheme/rpr/actions/sws-item.scm"
"^SWS: ((?!.*[0-9']).*item.*)$")
(comment
(s/lowercase
(s/replace
(s/replace
(s/replace "SWS: Set selected track(s)/item(s) to one random color"
"[ /]" "-")
"[(]" "{")
"[)]" "}"))
)
)
| null | https://raw.githubusercontent.com/actondev/s7-reaper/4e407e72c3e08b5a984b351fd2c355f0bb560e8e/src/scheme/rpr/actions/gen.scm | scheme | paredit freaks out with multi-line strings
TODO implement -> and ->> clojure macros
named command: appending underscore before the returned name id
native action | (ns rpr.actions.gen
:doc "Generating functions from the action list
The rpr.actions.item/track/time-selection are generated from here"
:require ((rpr)
(aod.c.string :as s)))
(define (gen-file the-ns file regex)
(call-with-output-file file
(lambda (out)
(format out ";; auto-generated file. see gen.scm\n")
(format out "(ns ~A\n :require ((rpr) (rpr.common)))\n(define cmd rpr.common/cmd) ;; runs either native actions (with number id) or named actions (eg SWS)\n\n" the-ns)
(let loop ((i 1))
(let* ((id+name (rpr/CF_EnumerateActions 0 i))
(id (car id+name))
(name (cadr id+name)))
(if (> id 0)
(begin
(when (s/search name regex)
(let ((fun-name
(s/lowercase
(s/replace
(s/replace
(s/replace (s/match-at 1) "[ /]" "-")
"[(]" "{")
"[)]" "}"))))
(if-let (named-id (rpr/ReverseNamedCommandLookup id))
(format out "(define (~A) (cmd \"_~A\"))\n" fun-name named-id)
(format out "(define (~A) (cmd ~A))\n" fun-name id)
)))
(loop (inc i)))))))))
(comment
(rpr/ReverseNamedCommandLookup 53001)
(gen-file
"rpr.actions.time-selection"
"src/scheme/rpr/actions/time_selection.scm"
"^Time selection: ([^0-9]+)$")
(gen-file
"rpr.actions.track"
"src/scheme/rpr/actions/track.scm"
"^Track: ([^0-9]+)$")
(gen-file
"rpr.actions.item"
"src/scheme/rpr/actions/item.scm"
"^Item: ([^0-9]+)$")
(gen-file
"rpr.actions.sws-track"
"src/scheme/rpr/actions/sws-track.scm"
"^SWS: ((?!.*[0-9']).*track.*)$")
(gen-file
"rpr.actions.sws-item"
"src/scheme/rpr/actions/sws-item.scm"
"^SWS: ((?!.*[0-9']).*item.*)$")
(comment
(s/lowercase
(s/replace
(s/replace
(s/replace "SWS: Set selected track(s)/item(s) to one random color"
"[ /]" "-")
"[(]" "{")
"[)]" "}"))
)
)
|
5a407c39919994ab699baff8718c93cd2814d42a17a2d6eeb3c90060a0a2f71c | c-cube/tiny_httpd | Tiny_httpd_camlzip.mli |
val middleware :
?compress_above:int ->
?buf_size:int -> unit ->
Tiny_httpd_server.Middleware.t
* Middleware responsible for deflate compression / decompression .
@since 0.11
@since 0.11 *)
val setup :
?compress_above:int ->
?buf_size:int -> Tiny_httpd_server.t -> unit
(** Install middleware for tiny_httpd to be able to encode/decode
compressed streams
@param compress_above threshold above with string responses are compressed
@param buf_size size of the underlying buffer for compression/decompression *)
| null | https://raw.githubusercontent.com/c-cube/tiny_httpd/533a42a661d458de5f4819773eb8133bc582da90/src/camlzip/Tiny_httpd_camlzip.mli | ocaml | * Install middleware for tiny_httpd to be able to encode/decode
compressed streams
@param compress_above threshold above with string responses are compressed
@param buf_size size of the underlying buffer for compression/decompression |
val middleware :
?compress_above:int ->
?buf_size:int -> unit ->
Tiny_httpd_server.Middleware.t
* Middleware responsible for deflate compression / decompression .
@since 0.11
@since 0.11 *)
val setup :
?compress_above:int ->
?buf_size:int -> Tiny_httpd_server.t -> unit
|
8bd9b32539aed75c3f2fa1bbded06fa40f28048307dd2b38926d6b2de7aa74b5 | jitlogic/micrometer-clj | newrelic.clj | (ns io.resonant.micrometer.newrelic
(:require
[io.resonant.micrometer :refer [create-registry to-duration]])
(:import (io.micrometer.newrelic NewRelicMeterRegistry NewRelicConfig ClientProviderType)
(io.micrometer.core.instrument.step StepRegistryConfig)
(io.micrometer.core.instrument Clock)))
(defmethod create-registry :newrelic [cfg]
(NewRelicMeterRegistry.
(reify
NewRelicConfig
(get [_ _] nil)
(meterNameEventTypeEnabled [_] (:meter-name-event-type-enabled? cfg false))
(eventType [_] (:event-type cfg "MicrometerSample"))
(clientProviderType [_] (ClientProviderType/valueOf (.toUpperCase (name (:client-provider-type cfg :INSIGHTS_API)))))
(apiKey [_] (:api-key cfg))
(accountId [_] (:account-id cfg))
(uri [_] (:url cfg "-collector.newrelic.com"))
StepRegistryConfig
(step [_] (to-duration (:step cfg 60000)))
(enabled [_] (:enabled? cfg true))
(numThreads [_] (:num-threads cfg 2))
(connectTimeout [_] (to-duration (:connect-timeout cfg 1000)))
(readTimeout [_] (to-duration (:read-timeout cfg 10000)))
(batchSize [_] (:batch-size cfg 10000)))
(Clock/SYSTEM)))
| null | https://raw.githubusercontent.com/jitlogic/micrometer-clj/16918a1ef67f7e7e422a58ccd58742a1496cba26/src/io/resonant/micrometer/newrelic.clj | clojure | (ns io.resonant.micrometer.newrelic
(:require
[io.resonant.micrometer :refer [create-registry to-duration]])
(:import (io.micrometer.newrelic NewRelicMeterRegistry NewRelicConfig ClientProviderType)
(io.micrometer.core.instrument.step StepRegistryConfig)
(io.micrometer.core.instrument Clock)))
(defmethod create-registry :newrelic [cfg]
(NewRelicMeterRegistry.
(reify
NewRelicConfig
(get [_ _] nil)
(meterNameEventTypeEnabled [_] (:meter-name-event-type-enabled? cfg false))
(eventType [_] (:event-type cfg "MicrometerSample"))
(clientProviderType [_] (ClientProviderType/valueOf (.toUpperCase (name (:client-provider-type cfg :INSIGHTS_API)))))
(apiKey [_] (:api-key cfg))
(accountId [_] (:account-id cfg))
(uri [_] (:url cfg "-collector.newrelic.com"))
StepRegistryConfig
(step [_] (to-duration (:step cfg 60000)))
(enabled [_] (:enabled? cfg true))
(numThreads [_] (:num-threads cfg 2))
(connectTimeout [_] (to-duration (:connect-timeout cfg 1000)))
(readTimeout [_] (to-duration (:read-timeout cfg 10000)))
(batchSize [_] (:batch-size cfg 10000)))
(Clock/SYSTEM)))
| |
58f0722e72e63e53c4eb7c39948bd2f3ff38f7f9f4a40483308c7a1b97db493f | bugsbio/lein-tern | tern.clj | (ns leiningen.tern
(:require [tern.config :as config]
[tern.commands :as c]
[tern.log :as log]
[tern.version :refer [tern-version]]
[leiningen.core.project :as project]
[leiningen.core.eval :refer [eval-in-project]]))
(defn
^{:subtasks
[#'c/init
#'c/config
#'c/version
#'c/migrate
#'c/pending
#'c/rollback
#'c/reset
#'c/new-migration]}
tern
"Create, run, and roll back database migrations.
For the lazy among you, the commands `migrate` and `new-migration` can be
called without using the `tern` prefix."
([project]
(log/info "The" (log/highlight "tern") "task requires a subcommand.")
(log/info "Run" (log/highlight "lein help tern") "for a list of available commands."))
([project cmd & args]
(let [user-config-fn (or (-> project :tern :init) 'tern.user/config)
tern-profile {:dependencies [['lein-tern tern-version]]}]
(eval-in-project
(project/merge-profiles project [tern-profile])
`(do
(require '[tern.commands :as c])
(require '[tern.config :as config])
(require '~(symbol (namespace user-config-fn)))
(let [config# (config/init ~(:tern project) (~user-config-fn))]
(case ~cmd
"init" (c/init config#)
"config" (c/config config#)
"version" (c/version config#)
"migrate" (c/migrate config# ~(first args))
"pending" (c/pending config# ~(first args))
"rollback" (c/rollback config#)
"reset" (c/reset config#)
"new-migration" (c/new-migration config# ~(first args))))
(System/exit 0))))))
| null | https://raw.githubusercontent.com/bugsbio/lein-tern/1496cc11fb343393416cc62fd874cd2fb634b2f3/src/leiningen/tern.clj | clojure | (ns leiningen.tern
(:require [tern.config :as config]
[tern.commands :as c]
[tern.log :as log]
[tern.version :refer [tern-version]]
[leiningen.core.project :as project]
[leiningen.core.eval :refer [eval-in-project]]))
(defn
^{:subtasks
[#'c/init
#'c/config
#'c/version
#'c/migrate
#'c/pending
#'c/rollback
#'c/reset
#'c/new-migration]}
tern
"Create, run, and roll back database migrations.
For the lazy among you, the commands `migrate` and `new-migration` can be
called without using the `tern` prefix."
([project]
(log/info "The" (log/highlight "tern") "task requires a subcommand.")
(log/info "Run" (log/highlight "lein help tern") "for a list of available commands."))
([project cmd & args]
(let [user-config-fn (or (-> project :tern :init) 'tern.user/config)
tern-profile {:dependencies [['lein-tern tern-version]]}]
(eval-in-project
(project/merge-profiles project [tern-profile])
`(do
(require '[tern.commands :as c])
(require '[tern.config :as config])
(require '~(symbol (namespace user-config-fn)))
(let [config# (config/init ~(:tern project) (~user-config-fn))]
(case ~cmd
"init" (c/init config#)
"config" (c/config config#)
"version" (c/version config#)
"migrate" (c/migrate config# ~(first args))
"pending" (c/pending config# ~(first args))
"rollback" (c/rollback config#)
"reset" (c/reset config#)
"new-migration" (c/new-migration config# ~(first args))))
(System/exit 0))))))
| |
5e5b5c79c1994def1014a246354339d2a1bcf44d16a7350aa4f0cbd1723827ef | walmartlabs/lacinia | no_unused_fragments.clj | Copyright ( c ) 2017 - present Walmart , Inc.
;
Licensed under the Apache License , Version 2.0 ( the " License " )
; you may not use this file except in compliance with the License.
; You may obtain a copy of the License at
;
; -2.0
;
; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
; See the License for the specific language governing permissions and
; limitations under the License.
(ns com.walmartlabs.lacinia.validation.no-unused-fragments
{:no-doc true}
(:require
[clojure.set :as set]
[com.walmartlabs.lacinia.internal-utils :refer [q cond-let]])
(:import (clojure.lang PersistentQueue)))
(defn ^:private all-fragments-used
[fragments root-selections]
(loop [result (transient #{})
queue (-> (PersistentQueue/EMPTY)
(into root-selections)
(into (vals fragments)))]
(cond-let
:let [selection (peek queue)]
(nil? selection)
(persistent! result)
:let [{:keys [fragment-name]} selection
queue' (pop queue)]
;; Named fragments do not, themselves, have sub-selections
fragment-name
(recur (conj! result fragment-name) queue')
:let [sub-selections (:selections selection)]
(seq sub-selections)
(recur result (into queue' sub-selections))
:else
(recur result queue'))))
(defn no-unused-fragments
"Validates if all fragment definitions are spread
within operations, or spread within other fragments
spread within operations."
[prepared-query]
(let [{:keys [fragments selections]} prepared-query
f-locations (into {} (map (fn [[f-name {location :location}]]
{f-name location})
fragments))
f-definitions (set (keys fragments))
f-names-used (all-fragments-used fragments selections)]
(for [unused-f-definition (set/difference f-definitions f-names-used)]
{:message (format "Fragment %s is never used."
(q unused-f-definition))
:locations [(unused-f-definition f-locations)]})))
| null | https://raw.githubusercontent.com/walmartlabs/lacinia/e2a0da69f1b31b947b1245d8071601ad9ee3d84b/src/com/walmartlabs/lacinia/validation/no_unused_fragments.clj | clojure |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
Named fragments do not, themselves, have sub-selections | Copyright ( c ) 2017 - present Walmart , Inc.
Licensed under the Apache License , Version 2.0 ( the " License " )
distributed under the License is distributed on an " AS IS " BASIS ,
(ns com.walmartlabs.lacinia.validation.no-unused-fragments
{:no-doc true}
(:require
[clojure.set :as set]
[com.walmartlabs.lacinia.internal-utils :refer [q cond-let]])
(:import (clojure.lang PersistentQueue)))
(defn ^:private all-fragments-used
[fragments root-selections]
(loop [result (transient #{})
queue (-> (PersistentQueue/EMPTY)
(into root-selections)
(into (vals fragments)))]
(cond-let
:let [selection (peek queue)]
(nil? selection)
(persistent! result)
:let [{:keys [fragment-name]} selection
queue' (pop queue)]
fragment-name
(recur (conj! result fragment-name) queue')
:let [sub-selections (:selections selection)]
(seq sub-selections)
(recur result (into queue' sub-selections))
:else
(recur result queue'))))
(defn no-unused-fragments
"Validates if all fragment definitions are spread
within operations, or spread within other fragments
spread within operations."
[prepared-query]
(let [{:keys [fragments selections]} prepared-query
f-locations (into {} (map (fn [[f-name {location :location}]]
{f-name location})
fragments))
f-definitions (set (keys fragments))
f-names-used (all-fragments-used fragments selections)]
(for [unused-f-definition (set/difference f-definitions f-names-used)]
{:message (format "Fragment %s is never used."
(q unused-f-definition))
:locations [(unused-f-definition f-locations)]})))
|
d6532623a2c9cc9549570264713d3a17ca8cd51565a68341d50c1d35d88d6c3d | wlitwin/graphv | Gv.ml | include Graphv_webgl
open Js_of_ocaml
let canvas : Dom_html.canvasElement Js.t =
Js.Unsafe.coerce (Dom_html.getElementById_exn "canvas")
;;
let ctx_webgl : WebGL.renderingContext Js.t =
WebGL.getContextWithAttributes canvas
(Js.Unsafe.coerce (object%js
val antialias = Js._false
val stencil = Js._true
end))
|> Js.Opt.to_option
|> function
| None -> failwith "Expected context"
| Some ctx -> ctx
;;
let _ =
Js.Unsafe.global##.wctx := ctx_webgl
| null | https://raw.githubusercontent.com/wlitwin/graphv/1416fe1daaedc411e8b54e5458d6005d2d3678b5/examples/web/Gv.ml | ocaml | include Graphv_webgl
open Js_of_ocaml
let canvas : Dom_html.canvasElement Js.t =
Js.Unsafe.coerce (Dom_html.getElementById_exn "canvas")
;;
let ctx_webgl : WebGL.renderingContext Js.t =
WebGL.getContextWithAttributes canvas
(Js.Unsafe.coerce (object%js
val antialias = Js._false
val stencil = Js._true
end))
|> Js.Opt.to_option
|> function
| None -> failwith "Expected context"
| Some ctx -> ctx
;;
let _ =
Js.Unsafe.global##.wctx := ctx_webgl
| |
78e402696b14a7615009ba1e7f3ba13a9a98cdbfee70d3556e0a60ad6b6a4ece | GillianPlatform/Gillian | LCmd.ml | module SSubst = Gillian.Symbolic.Subst
module Expr = Gillian.Gil_syntax.Expr
module Formula = Gillian.Gil_syntax.Formula
module Type = Gillian.Gil_syntax.Type
(***************************************************************)
(** Logic Commmands **)
(***************************************************************)
* { b JSIL logic commands } .
type t =
| If of Expr.t * t list * t list (** If-then-else *)
| Branch of Formula.t (** branching on a FO formual *)
| Macro of string * Expr.t list (** Macro *)
| Assert of Formula.t (** Assert *)
| Assume of Formula.t (** Assume *)
| AssumeType of Expr.t * Type.t (** Assume Type *)
| FreshSVar of string
| SL of SLCmd.t
let rec pp fmt lcmd =
let pp_list = Fmt.list ~sep:Fmt.semi pp in
let pp_params = Fmt.list ~sep:Fmt.comma Expr.pp in
match lcmd with
| If (le, then_lcmds, else_lcmds) ->
if List.length else_lcmds > 0 then
Fmt.pf fmt
"if (%a) @[<hov 2>then {@\n%a@]@\n@[<hov 2>} else {\n%a@]@\n}" Expr.pp
le pp_list then_lcmds pp_list else_lcmds
else
Fmt.pf fmt "if (%a) @[<hov 2>then {@\n%a@]@\n}" Expr.pp le pp_list
then_lcmds
| Branch fo -> Fmt.pf fmt "branch (%a)" Formula.pp fo
| Macro (name, lparams) -> Fmt.pf fmt "%s(%a)" name pp_params lparams
| Assert a -> Fmt.pf fmt "assert (%a)" Formula.pp a
| Assume a -> Fmt.pf fmt "assume (%a)" Formula.pp a
| FreshSVar x -> Fmt.pf fmt "%s := fresh_svar()" x
| SL sl_cmd -> SLCmd.pp fmt sl_cmd
| AssumeType (e, t) ->
Fmt.pf fmt "assume_type (%a, %s)" Expr.pp e (Type.str t)
| null | https://raw.githubusercontent.com/GillianPlatform/Gillian/6547ef923404309c1d709ff5debcabff2035f9ca/Gillian-JS/lib/JSIL/LCmd.ml | ocaml | *************************************************************
* Logic Commmands *
*************************************************************
* If-then-else
* branching on a FO formual
* Macro
* Assert
* Assume
* Assume Type | module SSubst = Gillian.Symbolic.Subst
module Expr = Gillian.Gil_syntax.Expr
module Formula = Gillian.Gil_syntax.Formula
module Type = Gillian.Gil_syntax.Type
* { b JSIL logic commands } .
type t =
| FreshSVar of string
| SL of SLCmd.t
let rec pp fmt lcmd =
let pp_list = Fmt.list ~sep:Fmt.semi pp in
let pp_params = Fmt.list ~sep:Fmt.comma Expr.pp in
match lcmd with
| If (le, then_lcmds, else_lcmds) ->
if List.length else_lcmds > 0 then
Fmt.pf fmt
"if (%a) @[<hov 2>then {@\n%a@]@\n@[<hov 2>} else {\n%a@]@\n}" Expr.pp
le pp_list then_lcmds pp_list else_lcmds
else
Fmt.pf fmt "if (%a) @[<hov 2>then {@\n%a@]@\n}" Expr.pp le pp_list
then_lcmds
| Branch fo -> Fmt.pf fmt "branch (%a)" Formula.pp fo
| Macro (name, lparams) -> Fmt.pf fmt "%s(%a)" name pp_params lparams
| Assert a -> Fmt.pf fmt "assert (%a)" Formula.pp a
| Assume a -> Fmt.pf fmt "assume (%a)" Formula.pp a
| FreshSVar x -> Fmt.pf fmt "%s := fresh_svar()" x
| SL sl_cmd -> SLCmd.pp fmt sl_cmd
| AssumeType (e, t) ->
Fmt.pf fmt "assume_type (%a, %s)" Expr.pp e (Type.str t)
|
903aa473af488b620913d3c1a67e427b878dca199c6896c0916735f362daf463 | yuriy-chumak/ol | window.scm | (define-library (lib gtk-3 window)
(export
GtkWindow*
gtk_window_set_title
gtk_window_set_default_size
gtk_window_get_size
gtk_window_get_default_size
gtk_window_resize
gtk_window_present
)
(import
(scheme core)
(otus ffi)
(lib gtk-3 gtk)
(lib gtk-3 widget))
(begin
(define GtkWindow* type-vptr)
(define gtk_window_set_title (GTK3 void "gtk_window_set_title" GtkWindow* type-string))
(define gtk_window_set_default_size (GTK3 void "gtk_window_set_default_size" GtkWindow* gint gint))
(define gtk_window_get_size (GTK3 void "gtk_window_get_size" GtkWindow* (fft& gint) (fft& gint)))
(define gtk_window_get_default_size (GTK3 void "gtk_window_get_default_size" GtkWindow* (fft& gint) (fft& gint)))
(define gtk_window_resize (GTK3 void "gtk_window_resize" GtkWindow* gint gint))
(define gtk_window_present (GTK3 void "gtk_window_present" GtkWindow*))
))
| null | https://raw.githubusercontent.com/yuriy-chumak/ol/7beb5e2a58bf25201cc34daa42efa1c179f893e2/libraries/lib/gtk-3/window.scm | scheme | (define-library (lib gtk-3 window)
(export
GtkWindow*
gtk_window_set_title
gtk_window_set_default_size
gtk_window_get_size
gtk_window_get_default_size
gtk_window_resize
gtk_window_present
)
(import
(scheme core)
(otus ffi)
(lib gtk-3 gtk)
(lib gtk-3 widget))
(begin
(define GtkWindow* type-vptr)
(define gtk_window_set_title (GTK3 void "gtk_window_set_title" GtkWindow* type-string))
(define gtk_window_set_default_size (GTK3 void "gtk_window_set_default_size" GtkWindow* gint gint))
(define gtk_window_get_size (GTK3 void "gtk_window_get_size" GtkWindow* (fft& gint) (fft& gint)))
(define gtk_window_get_default_size (GTK3 void "gtk_window_get_default_size" GtkWindow* (fft& gint) (fft& gint)))
(define gtk_window_resize (GTK3 void "gtk_window_resize" GtkWindow* gint gint))
(define gtk_window_present (GTK3 void "gtk_window_present" GtkWindow*))
))
| |
a1021f34b4b616323160306ad47e074197fe3c1b3bdf2d96b887c5efa65a5245 | muldvarp/CFGAnalyzer | cfg.ml | open Basics ;;
type symbol = Nonterminal of string
| Terminal of string
type pureCFG = (string * (symbol list list)) list
type alphabet = string list
type fullCFG = { cfg: pureCFG;
origcfg: pureCFG;
alphabet: string list;
nonterminals: string list;
start: string;
(* termprods: (string * (string list)) list; *)
nullable: StringSet.t;
ambnullable: StringIntSet.t;
ambnonterminals: StringSet.t;
ambproductions: StringStringSet.t;
;
closure : ;
ambclosure : StringIntSet.t StringMap.t
closure: StringSet.t StringMap.t;
ambclosure: StringIntSet.t StringMap.t *)
}
let showPureCFG =
List.fold_left
(fun s -> fun (nonterminal,rules) ->
s ^ nonterminal ^ " -> " ^
(String.concat " | " (List.map (fun rule -> String.concat " " (List.map (function Terminal s -> "\"" ^ s ^ "\""
| Nonterminal s -> s)
rule))
rules)) ^ " ;\n")
""
let alphabet cfg =
StringSet.fold
(fun s -> fun l -> s::l)
(List.fold_left (fun s -> fun (_,rules) ->
StringSet.union s (List.fold_left (fun s -> fun r -> StringSet.union s r)
StringSet.empty
(List.map (function Terminal s -> StringSet.singleton s
| Nonterminal _ -> StringSet.empty)
(List.concat rules))))
StringSet.empty
cfg)
[]
let nonterminals = List.map fst
let to2nf cfg gid =
let ncounter = ref 0 in
let acounter = ref 0 in
let newNvar _ = let s = "G" ^ string_of_int gid ^ ".AuxN" ^ string_of_int !ncounter in
incr ncounter; s
in
let newAvar _ = let s = "G" ^ string_of_int gid ^ ".AuxA" ^ string_of_int !acounter in
incr acounter; s
in
let newAvars = List.map (fun a -> (a,newAvar ())) (alphabet cfg) in
let getAvar a = List.assoc a newAvars in
let replaced = ref StringSet.empty in
let cfg = (List.map
(fun (n,rules) ->
let rules = List.map
(fun rule -> if List.length rule > 1
then List.map
(function Terminal s ->
replaced := StringSet.add s !replaced;
Nonterminal (getAvar s)
| t -> t)
rule
else rule)
rules
in
(n,rules))
cfg)
in
let cfg = cfg @ (StringSet.fold (fun t -> fun l -> ((getAvar t), [[Terminal t]])::l) !replaced [])
in
let rec divider racc newracc =
function [] -> (racc,newracc)
| ([]::rs) -> divider ([]::racc) newracc rs
| ([x]::rs) -> divider ([x]::racc) newracc rs
| ([x;y]::rs) -> divider ([x;y]::racc) newracc rs
| ((x::y::xs)::rs) -> let n = newNvar () in
divider racc ((n,[[x;y]])::newracc) (((Nonterminal n)::xs)::rs)
in
let rec transformer racc newracc =
function [] -> racc @ newracc
| (n,rs)::rls -> let (rs,nrs) = divider [] newracc (List.rev rs) in
transformer ((n,rs)::racc) nrs rls
in
transformer [] [] (List.rev cfg)
let appendPrefixes cfg gid =
let pre = "G" ^ string_of_int gid ^ "." in
List.map (fun (n,rs) -> (pre ^ n, List.map (fun r -> List.map (function Nonterminal v -> Nonterminal (pre ^ v)
| x -> x) r) rs)) cfg
let nullableSymbols cfg =
let s = ref (List.fold_left (fun s -> fun n -> StringSet.add n s)
StringSet.empty
(List.map fst
(List.filter (fun (n,rs) -> List.exists (function [] -> true | _ -> false) rs)
cfg)))
in
let s' = ref StringSet.empty in
while not (StringSet.equal !s !s') do
s' := !s;
s := List.fold_left
(fun s -> fun n -> StringSet.add n s)
!s
(List.map
fst
(List.filter (fun (n,rs) -> List.exists (fun r -> match r with
[Nonterminal b] -> StringSet.mem b !s
| [Nonterminal b;Nonterminal c] -> StringSet.mem b !s && StringSet.mem c !s
| _ -> false)
rs)
cfg))
done;
!s
let showUnitProdClosure m =
StringMap.fold
(fun n -> fun bs -> fun s ->
n ^ " ==>* {" ^ String.concat "," (StringSet.fold (fun x -> fun l -> x::l) bs []) ^ "}\n" ^ s)
m
""
let showAmbUnitProdClosure m =
StringMap.fold
(fun n -> fun bs -> fun s ->
n ^ " ==>* {" ^ String.concat "," (StringIntSet.fold (fun (x,d) -> fun l -> ("(" ^ x ^ "," ^ string_of_int d ^ ")")::l) bs []) ^ "}\n" ^ s)
m
""
let unitProductionClosure cfg =
let nullable = nullableSymbols cfg in
let m = ref (List.fold_left
(fun m -> fun (n,ns) -> StringMap.add n ns m)
StringMap.empty
(List.map
(fun (n,rs) ->
(n, List.fold_left
(fun s -> fun b -> StringSet.add b s)
StringSet.empty
(n::(List.concat
(List.map
(function [Nonterminal b] -> [b]
| [Nonterminal b; Nonterminal c] -> let nc = if StringSet.mem b nullable then [c] else [] in
if StringSet.mem c nullable then b::nc else nc
| _ -> [])
rs)))))
cfg))
in
let m' = ref StringMap.empty in
while not (StringMap.equal StringSet.equal !m !m') do
m' := !m;
m := StringMap.map
(fun bs -> StringSet.fold
(fun b -> fun s -> StringSet.union s (StringMap.find b !m))
bs
bs)
!m
done;
!m
let terminalProductions cfg =
List.map
( fun ( n , rules ) - >
( n , List.map
( function [ Terminal a ] - > a | _ - > failwith " Should only be applied to unit productions ! " )
( List.filter ( function [ Terminal a ] - > true | _ - > false ) rules ) ) )
cfg
let terminalProductions cfg =
List.map
(fun (n,rules) ->
(n, List.map
(function [Terminal a] -> a | _ -> failwith "Should only be applied to unit productions!")
(List.filter (function [Terminal a] -> true | _ -> false) rules)))
cfg
*)
let stringIntSetAdd b s =
match b with
(b,i) -> match StringIntSet.elements (StringIntSet.filter (fun (c,_) -> b=c) s) with
[] -> StringIntSet.add (b,i) s
| (c,j)::_ -> StringIntSet.add (b,2) (StringIntSet.remove (c,j) s)
let stringIntSetUnion s s' =
List.fold_left (fun s -> fun x -> stringIntSetAdd x s) s (StringIntSet.elements s')
let nullableSymbolsAmb cfg =
let s = ref (List.fold_left (fun s -> fun n -> stringIntSetAdd (n,1) s)
StringIntSet.empty
(List.map fst
(List.filter (fun (n,rs) -> List.exists (function [] -> true | _ -> false) rs)
cfg)))
in
let m = ref !s in
let s' = ref StringIntSet.empty in
while not (StringIntSet.equal !s !s') do
s' := !s;
m := List.fold_left
(fun s -> fun x -> stringIntSetAdd x s)
StringIntSet.empty
(List.concat
(List.map
(fun (n,rs) ->
List.concat
(List.map
(function [] -> []
| [Nonterminal b] -> if StringIntSet.mem (b,1) !m
then [(n,1)]
else if StringIntSet.mem (b,2) !m
then [(n,2)]
else []
| [Nonterminal b;Nonterminal c] ->
if StringIntSet.mem (b,1) !m
then (if StringIntSet.mem (c,1) !m
then [(n,1)]
else if StringIntSet.mem (c,2) !m
then [(n,2)]
else [])
else if StringIntSet.mem (b,2) !m
then (if StringIntSet.mem (c,1) !m ||
StringIntSet.mem (c,2) !m
then [(n,2)]
else [])
else []
| _ -> [])
rs))
cfg));
s := stringIntSetUnion !s !m
done;
!s
let stringMapUnion m m' =
StringMap.fold
(fun k -> fun s -> fun m -> let s' = try
StringMap.find k m
with Not_found -> StringIntSet.empty
in
StringMap.add k (stringIntSetUnion s' s) m)
m'
m
let makeAmbClosure cfg =
let nullable = nullableSymbolsAmb cfg in
let m = ref (List.fold_left
(fun m -> fun (n,ns) -> StringMap.add n ns m)
StringMap.empty
(List.map
(fun (n,rs) ->
(n, List.fold_left
(fun s -> fun b -> stringIntSetAdd b s)
StringIntSet.empty
(List.concat
(List.map
(function [Nonterminal b] -> [(b,1)]
| [Nonterminal b; Nonterminal c] ->
let nc = if StringIntSet.mem (b,1) nullable
then [(c,1)]
else if StringIntSet.mem (b,2) nullable
then [(c,2)]
else []
in
if StringIntSet.mem (c,1) nullable
then (b,1)::nc
else if StringIntSet.mem (c,2) nullable
then (b,2)::nc
else nc
| _ -> [])
rs))))
cfg))
in
let id = List.fold_left
(fun m -> fun n -> StringMap.add n (StringIntSet.singleton (n,1)) m)
StringMap.empty
(List.map fst cfg)
in
let s = ref (stringMapUnion id !m) in
let s' = ref StringMap.empty in
let m' = ref !m in
while not (StringMap.equal StringIntSet.equal !s !s') do
message 3 ( fun _ - > " Current m:\n " ^ showAmbUnitProdClosure ! m ) ;
message 3 ( fun _ - > " Current s:\n " ^ showAmbUnitProdClosure ! s ) ;
message 3 (fun _ -> "Current s:\n" ^ showAmbUnitProdClosure !s); *)
s' := !s;
m' := StringMap.map
(fun bs -> StringIntSet.fold
(fun (b,d) -> fun s -> stringIntSetUnion
s
(try
StringIntSet.fold
(fun (c,d') -> fun s' -> StringIntSet.add (c,min 2 (d*d')) s')
(StringMap.find b !m')
StringIntSet.empty
with Not_found -> failwith ("Can't find nonterminal " ^ b ^ "\n")))
bs
StringIntSet.empty)
!m;
s := stringMapUnion !s !m'
done;
!s
let equivalence_classes cfg =
let nulls = nullableSymbols cfg in
message 3 (fun _ -> " Nullable symbols: {" ^
String.concat "," (StringSet.fold (fun b -> fun l -> b::l) nulls []) ^ "}\n");
let l = List.length cfg in
let dfsnum = Hashtbl.create l in
let dfsnam = Array.make l "" in
(* let index = Array.make l (-1) in *)
let n = ref 0 in
let visited = ref (StringSet.empty) in
let rec dfs v =
message 3 (fun _ -> " Now visiting node " ^ v ^ "\n");
if not (StringSet.mem v !visited)
then (visited := StringSet.add v !visited;
let nexts = ref [] in
List.iter
(function [Nonterminal b] -> nexts := b :: !nexts
| [Nonterminal b; Nonterminal c] ->
if StringSet.mem b nulls then nexts := c :: !nexts;
if StringSet.mem c nulls then nexts := b :: !nexts
| _ -> ())
(List.assoc v cfg);
List.iter (fun w -> dfs w) !nexts;
Hashtbl.add dfsnum v !n;
dfsnam.(!n) <- v;
message 3 (fun _ -> " Node " ^ v ^ " has DFS finishing time " ^ string_of_int !n ^ "\n");
incr n)
in
message 3 (fun _ -> " Forwards DFS through CFG\n");
List.iter
(fun (v,_) -> if not (StringSet.mem v !visited) then dfs v)
cfg;
decr n;
message 3 (fun _ -> " Computing transposed graph\n");
let tcfg = ref [] in
List.iter
(fun (n,rules) -> List.iter
(function [Nonterminal b] -> tcfg := (b,StringSet.singleton n) :: !tcfg
| [Nonterminal b; Nonterminal c] ->
if StringSet.mem b nulls then tcfg := (c,StringSet.singleton n) :: !tcfg;
if StringSet.mem c nulls then tcfg := (b,StringSet.singleton n) :: !tcfg
| _ -> ())
rules)
cfg;
message 3 (fun _ -> " Sorting\n");
tcfg := List.sort (fun (b,_) -> fun (c,_) -> compare b c) !tcfg;
let rec retract =
function [] -> []
| [(b,bs)] -> [(b, StringSet.elements bs)]
| ((b,bs)::(c,cs)::xs) -> if b <> c
then (b,StringSet.elements bs) :: (retract ((c,cs)::xs))
else retract ((b,StringSet.union bs cs)::xs)
in
message 3 (fun _ -> " Retracting\n");
let tcfg = List.map
(fun (n,bs) -> (n, List.sort
(fun b -> fun c -> compare (Hashtbl.find dfsnum c) (Hashtbl.find dfsnum b))
bs))
(retract !tcfg)
in
visited := StringSet.empty;
let scc = ref [] in
let sccs = ref [] in
let rec dfs v =
message 3 (fun _ -> " Now visiting node " ^ v ^ "\n");
if not (StringSet.mem v !visited)
then (visited := StringSet.add v !visited;
message 3 (fun _ -> " Adding node " ^ v ^ "\n");
scc := v :: !scc;
let nexts = try
List.assoc v tcfg
with _ -> []
in
List.iter (fun w -> dfs w) nexts)
in
message 3 (fun _ -> " Backwards DFS through CFG\n");
while !n >= 0 do
let v = dfsnam.(!n) in
message 3 (fun _ -> " Starting new SCC with node with DFS finishing time " ^ string_of_int !n ^ "\n");
scc := [];
dfs v;
sccs := !scc :: !sccs;
while !n >= 0 && StringSet.mem (dfsnam.(!n)) !visited do
decr n
done
done;
!sccs
let tidy cfg =
let start = fst (List.hd cfg) in
let reachable = ref StringSet.empty in
let rec reach v =
message 3 (fun _ -> "Currently reachable nonterminals: " ^
String.concat "," (StringSet.elements !reachable) ^ "\n");
if not (StringSet.mem v !reachable)
then (reachable := StringSet.add v !reachable;
let rules = try
List.assoc v cfg
with
failwith ( " Cfg.tidy : nonterminal " ^ v ^ " seems to have no definition in its grammar.\n " )
in
List.iter
(fun rule -> List.iter
(function Nonterminal b -> reach b
| _ -> ())
rule)
rules)
in
reach (fst (List.hd cfg));
let cfg = List.filter
(fun (n,_) -> StringSet.mem n !reachable)
cfg
in
let productive = ref StringSet.empty in
List.iter (fun (n,rules) -> if List.exists
(fun r -> List.for_all (function Terminal _ -> true | _ -> false) r)
rules
then productive := StringSet.add n !productive)
cfg;
let p' = ref StringSet.empty in
while !productive <> !p' do
message 3 (fun _ -> "Currently productive nonterminals: " ^
String.concat "," (StringSet.elements !productive) ^ "\n");
p' := !productive;
productive := List.fold_left
(fun s -> fun b -> StringSet.add b s)
!productive
(List.map
fst
(List.filter
(fun (n,rules) ->
(List.exists
(fun rule -> List.for_all
(function Terminal _ -> true
| Nonterminal b -> StringSet.mem b !productive)
rule)
rules))
cfg))
done;
if StringSet.mem start !productive then
List.map (fun (n,prods) -> (n, List.filter (fun rule -> List.for_all (function Terminal _ -> true
| Nonterminal b -> StringSet.mem b !productive)
rule)
prods))
(List.filter (fun (n,_) -> StringSet.mem n !productive) cfg)
else
[ (start,[[Nonterminal start]]) ]
exception No_initial_nonterminal
let makeFullCFG cfg' number =
message 3 (fun _ -> " Pre-computing ...\n");
let cfg = tidy cfg' in
message 3 (fun _ -> " CFG after removal of unreachable and unproductive nonterminals:\n" ^
showPureCFG cfg ^ "\n");
let cfg = appendPrefixes cfg number in
message 3 (fun _ -> " CFG after renaming nonterminals uniquely:\n" ^ showPureCFG cfg ^"\n");
let cfg = to2nf cfg number in
message 3 (fun _ -> " CFG after transformation into 2NF:\n" ^ showPureCFG cfg ^ "\n");
let nullsamb = nullableSymbolsAmb cfg in
let nulls = StringIntSet.fold (fun (b,_) -> fun s -> StringSet.add b s) nullsamb StringSet.empty in
let sccs = equivalence_classes cfg in
message 3 (fun _ -> " Equivalence classes under <=*=> are:\n " ^
String.concat
"\n "
(List.map (fun scc -> "{" ^ String.concat "," scc ^ "}") sccs)
^ "\n");
let ambns = List.fold_left
(fun s -> fun b -> StringSet.add b s)
StringSet.empty
(List.concat
(List.filter
(function [b] -> List.exists
(fun rule -> match rule with
[Nonterminal c] -> b=c
| [Nonterminal c; Nonterminal d] -> b=c && StringSet.mem d nulls ||
b=d && StringSet.mem c nulls
| _ -> false)
(List.assoc b cfg)
| _::_::_ -> true
| _ -> false)
sccs))
in
let rec replace_rules n cfg rules =
match cfg with
[] -> []
| ((b,rs)::xs) -> if n=b
then (b,rules)::xs
else (b,rs)::(replace_rules n xs rules)
in
let rec replace_nonterminals cs b =
function [] -> []
| (n,rules)::xs -> (n, List.map
(fun rule -> List.map
(function (Nonterminal c) -> if List.mem c cs
then Nonterminal b
else Nonterminal c
| x -> x)
rule)
rules) :: (replace_nonterminals cs b xs)
in
let rec remove_nonterminals cs =
function [] -> []
| (n,rules)::xs -> if List.mem n cs
then remove_nonterminals cs xs
else (n,rules)::(remove_nonterminals cs xs)
in
let cfg = ref cfg in
List.iter
(fun scc -> match scc with
[] -> ()
| [b] -> ()
| bs -> let ns = List.map fst !cfg in
let bs = List.sort
(fun b -> fun c -> compare (listindex b ns) (listindex c ns))
bs
in
match bs with (b::bs) -> (
cfg := replace_rules
b
!cfg
(List.concat
(List.map
(fun c -> List.assoc c !cfg)
(b::bs)));
cfg := remove_nonterminals bs !cfg;
cfg := replace_nonterminals bs b !cfg)
| _ -> ())
sccs;
message 3 (fun _ -> " Removing duplicate productions\n");
cfg := List.map
(fun (n,rules) -> (n, remove_dups rules))
!cfg;
message 3 (fun _ -> showPureCFG !cfg);
message 3 (fun _ -> " Removing unproductive productions\n");
cfg := List.map
(fun (n,rules) -> (n, List.filter
(function [Nonterminal b] -> b <> n
| _ -> true)
rules))
!cfg;
message 3 (fun _ -> showPureCFG !cfg);
message 3 (fun _ -> " Removing epsilon productions\n");
cfg := List.map
(fun (n,rules) -> (n, List.filter
(function [] -> false | _ -> true)
rules))
!cfg;
message 3 (fun _ -> showPureCFG !cfg);
let cfg = !cfg in
let ns = nonterminals cfg in
{ cfg = cfg;
origcfg = cfg';
alphabet = alphabet cfg;
nonterminals = ns;
(* termprods = terminalProductions cfg; *)
start = (try
List.hd ns
with Failure _ -> raise No_initial_nonterminal);
nullable = nulls;
ambnullable = nullsamb;
ambnonterminals = ambns;
ambproductions = StringStringSet.empty;
id = number
;
closure = unitProductionClosure cfg ;
ambclosure = ( message 3 ( fun _ - > " Computing derivation relation between nonterminals with ambiguity information\n " ) ;
makeAmbClosure cfg )
closure = unitProductionClosure cfg;
ambclosure = (message 3 (fun _ -> " Computing derivation relation between nonterminals with ambiguity information\n");
makeAmbClosure cfg) *) }
let size cfg =
let epsilons = ref 0 in
let terminals = ref 0 in
let units = ref 0 in
let comps = ref 0 in
let z = ref 0 in
List.iter
(fun (n,rules) ->
List.iter (fun rule -> z := !z + (min 1 (List.length rule));
match rule with
[] -> incr epsilons
| [Terminal _] -> incr terminals
| [Nonterminal _] -> incr units
| _::_ -> incr comps)
rules)
cfg;
let r = !epsilons + !terminals + !units + !comps in
(!epsilons, !terminals, !units, !comps, r, !z, !z + (List.length cfg))
let number_of_productions cfg n =
List.length (List.assoc n cfg.cfg)
| null | https://raw.githubusercontent.com/muldvarp/CFGAnalyzer/d9dcde837a03fba04ac24aca469c4b86e1b9b021/src/cfg.ml | ocaml | termprods: (string * (string list)) list;
let index = Array.make l (-1) in
termprods = terminalProductions cfg; | open Basics ;;
type symbol = Nonterminal of string
| Terminal of string
type pureCFG = (string * (symbol list list)) list
type alphabet = string list
type fullCFG = { cfg: pureCFG;
origcfg: pureCFG;
alphabet: string list;
nonterminals: string list;
start: string;
nullable: StringSet.t;
ambnullable: StringIntSet.t;
ambnonterminals: StringSet.t;
ambproductions: StringStringSet.t;
;
closure : ;
ambclosure : StringIntSet.t StringMap.t
closure: StringSet.t StringMap.t;
ambclosure: StringIntSet.t StringMap.t *)
}
let showPureCFG =
List.fold_left
(fun s -> fun (nonterminal,rules) ->
s ^ nonterminal ^ " -> " ^
(String.concat " | " (List.map (fun rule -> String.concat " " (List.map (function Terminal s -> "\"" ^ s ^ "\""
| Nonterminal s -> s)
rule))
rules)) ^ " ;\n")
""
let alphabet cfg =
StringSet.fold
(fun s -> fun l -> s::l)
(List.fold_left (fun s -> fun (_,rules) ->
StringSet.union s (List.fold_left (fun s -> fun r -> StringSet.union s r)
StringSet.empty
(List.map (function Terminal s -> StringSet.singleton s
| Nonterminal _ -> StringSet.empty)
(List.concat rules))))
StringSet.empty
cfg)
[]
let nonterminals = List.map fst
let to2nf cfg gid =
let ncounter = ref 0 in
let acounter = ref 0 in
let newNvar _ = let s = "G" ^ string_of_int gid ^ ".AuxN" ^ string_of_int !ncounter in
incr ncounter; s
in
let newAvar _ = let s = "G" ^ string_of_int gid ^ ".AuxA" ^ string_of_int !acounter in
incr acounter; s
in
let newAvars = List.map (fun a -> (a,newAvar ())) (alphabet cfg) in
let getAvar a = List.assoc a newAvars in
let replaced = ref StringSet.empty in
let cfg = (List.map
(fun (n,rules) ->
let rules = List.map
(fun rule -> if List.length rule > 1
then List.map
(function Terminal s ->
replaced := StringSet.add s !replaced;
Nonterminal (getAvar s)
| t -> t)
rule
else rule)
rules
in
(n,rules))
cfg)
in
let cfg = cfg @ (StringSet.fold (fun t -> fun l -> ((getAvar t), [[Terminal t]])::l) !replaced [])
in
let rec divider racc newracc =
function [] -> (racc,newracc)
| ([]::rs) -> divider ([]::racc) newracc rs
| ([x]::rs) -> divider ([x]::racc) newracc rs
| ([x;y]::rs) -> divider ([x;y]::racc) newracc rs
| ((x::y::xs)::rs) -> let n = newNvar () in
divider racc ((n,[[x;y]])::newracc) (((Nonterminal n)::xs)::rs)
in
let rec transformer racc newracc =
function [] -> racc @ newracc
| (n,rs)::rls -> let (rs,nrs) = divider [] newracc (List.rev rs) in
transformer ((n,rs)::racc) nrs rls
in
transformer [] [] (List.rev cfg)
let appendPrefixes cfg gid =
let pre = "G" ^ string_of_int gid ^ "." in
List.map (fun (n,rs) -> (pre ^ n, List.map (fun r -> List.map (function Nonterminal v -> Nonterminal (pre ^ v)
| x -> x) r) rs)) cfg
let nullableSymbols cfg =
let s = ref (List.fold_left (fun s -> fun n -> StringSet.add n s)
StringSet.empty
(List.map fst
(List.filter (fun (n,rs) -> List.exists (function [] -> true | _ -> false) rs)
cfg)))
in
let s' = ref StringSet.empty in
while not (StringSet.equal !s !s') do
s' := !s;
s := List.fold_left
(fun s -> fun n -> StringSet.add n s)
!s
(List.map
fst
(List.filter (fun (n,rs) -> List.exists (fun r -> match r with
[Nonterminal b] -> StringSet.mem b !s
| [Nonterminal b;Nonterminal c] -> StringSet.mem b !s && StringSet.mem c !s
| _ -> false)
rs)
cfg))
done;
!s
let showUnitProdClosure m =
StringMap.fold
(fun n -> fun bs -> fun s ->
n ^ " ==>* {" ^ String.concat "," (StringSet.fold (fun x -> fun l -> x::l) bs []) ^ "}\n" ^ s)
m
""
let showAmbUnitProdClosure m =
StringMap.fold
(fun n -> fun bs -> fun s ->
n ^ " ==>* {" ^ String.concat "," (StringIntSet.fold (fun (x,d) -> fun l -> ("(" ^ x ^ "," ^ string_of_int d ^ ")")::l) bs []) ^ "}\n" ^ s)
m
""
let unitProductionClosure cfg =
let nullable = nullableSymbols cfg in
let m = ref (List.fold_left
(fun m -> fun (n,ns) -> StringMap.add n ns m)
StringMap.empty
(List.map
(fun (n,rs) ->
(n, List.fold_left
(fun s -> fun b -> StringSet.add b s)
StringSet.empty
(n::(List.concat
(List.map
(function [Nonterminal b] -> [b]
| [Nonterminal b; Nonterminal c] -> let nc = if StringSet.mem b nullable then [c] else [] in
if StringSet.mem c nullable then b::nc else nc
| _ -> [])
rs)))))
cfg))
in
let m' = ref StringMap.empty in
while not (StringMap.equal StringSet.equal !m !m') do
m' := !m;
m := StringMap.map
(fun bs -> StringSet.fold
(fun b -> fun s -> StringSet.union s (StringMap.find b !m))
bs
bs)
!m
done;
!m
let terminalProductions cfg =
List.map
( fun ( n , rules ) - >
( n , List.map
( function [ Terminal a ] - > a | _ - > failwith " Should only be applied to unit productions ! " )
( List.filter ( function [ Terminal a ] - > true | _ - > false ) rules ) ) )
cfg
let terminalProductions cfg =
List.map
(fun (n,rules) ->
(n, List.map
(function [Terminal a] -> a | _ -> failwith "Should only be applied to unit productions!")
(List.filter (function [Terminal a] -> true | _ -> false) rules)))
cfg
*)
let stringIntSetAdd b s =
match b with
(b,i) -> match StringIntSet.elements (StringIntSet.filter (fun (c,_) -> b=c) s) with
[] -> StringIntSet.add (b,i) s
| (c,j)::_ -> StringIntSet.add (b,2) (StringIntSet.remove (c,j) s)
let stringIntSetUnion s s' =
List.fold_left (fun s -> fun x -> stringIntSetAdd x s) s (StringIntSet.elements s')
let nullableSymbolsAmb cfg =
let s = ref (List.fold_left (fun s -> fun n -> stringIntSetAdd (n,1) s)
StringIntSet.empty
(List.map fst
(List.filter (fun (n,rs) -> List.exists (function [] -> true | _ -> false) rs)
cfg)))
in
let m = ref !s in
let s' = ref StringIntSet.empty in
while not (StringIntSet.equal !s !s') do
s' := !s;
m := List.fold_left
(fun s -> fun x -> stringIntSetAdd x s)
StringIntSet.empty
(List.concat
(List.map
(fun (n,rs) ->
List.concat
(List.map
(function [] -> []
| [Nonterminal b] -> if StringIntSet.mem (b,1) !m
then [(n,1)]
else if StringIntSet.mem (b,2) !m
then [(n,2)]
else []
| [Nonterminal b;Nonterminal c] ->
if StringIntSet.mem (b,1) !m
then (if StringIntSet.mem (c,1) !m
then [(n,1)]
else if StringIntSet.mem (c,2) !m
then [(n,2)]
else [])
else if StringIntSet.mem (b,2) !m
then (if StringIntSet.mem (c,1) !m ||
StringIntSet.mem (c,2) !m
then [(n,2)]
else [])
else []
| _ -> [])
rs))
cfg));
s := stringIntSetUnion !s !m
done;
!s
let stringMapUnion m m' =
StringMap.fold
(fun k -> fun s -> fun m -> let s' = try
StringMap.find k m
with Not_found -> StringIntSet.empty
in
StringMap.add k (stringIntSetUnion s' s) m)
m'
m
let makeAmbClosure cfg =
let nullable = nullableSymbolsAmb cfg in
let m = ref (List.fold_left
(fun m -> fun (n,ns) -> StringMap.add n ns m)
StringMap.empty
(List.map
(fun (n,rs) ->
(n, List.fold_left
(fun s -> fun b -> stringIntSetAdd b s)
StringIntSet.empty
(List.concat
(List.map
(function [Nonterminal b] -> [(b,1)]
| [Nonterminal b; Nonterminal c] ->
let nc = if StringIntSet.mem (b,1) nullable
then [(c,1)]
else if StringIntSet.mem (b,2) nullable
then [(c,2)]
else []
in
if StringIntSet.mem (c,1) nullable
then (b,1)::nc
else if StringIntSet.mem (c,2) nullable
then (b,2)::nc
else nc
| _ -> [])
rs))))
cfg))
in
let id = List.fold_left
(fun m -> fun n -> StringMap.add n (StringIntSet.singleton (n,1)) m)
StringMap.empty
(List.map fst cfg)
in
let s = ref (stringMapUnion id !m) in
let s' = ref StringMap.empty in
let m' = ref !m in
while not (StringMap.equal StringIntSet.equal !s !s') do
message 3 ( fun _ - > " Current m:\n " ^ showAmbUnitProdClosure ! m ) ;
message 3 ( fun _ - > " Current s:\n " ^ showAmbUnitProdClosure ! s ) ;
message 3 (fun _ -> "Current s:\n" ^ showAmbUnitProdClosure !s); *)
s' := !s;
m' := StringMap.map
(fun bs -> StringIntSet.fold
(fun (b,d) -> fun s -> stringIntSetUnion
s
(try
StringIntSet.fold
(fun (c,d') -> fun s' -> StringIntSet.add (c,min 2 (d*d')) s')
(StringMap.find b !m')
StringIntSet.empty
with Not_found -> failwith ("Can't find nonterminal " ^ b ^ "\n")))
bs
StringIntSet.empty)
!m;
s := stringMapUnion !s !m'
done;
!s
let equivalence_classes cfg =
let nulls = nullableSymbols cfg in
message 3 (fun _ -> " Nullable symbols: {" ^
String.concat "," (StringSet.fold (fun b -> fun l -> b::l) nulls []) ^ "}\n");
let l = List.length cfg in
let dfsnum = Hashtbl.create l in
let dfsnam = Array.make l "" in
let n = ref 0 in
let visited = ref (StringSet.empty) in
let rec dfs v =
message 3 (fun _ -> " Now visiting node " ^ v ^ "\n");
if not (StringSet.mem v !visited)
then (visited := StringSet.add v !visited;
let nexts = ref [] in
List.iter
(function [Nonterminal b] -> nexts := b :: !nexts
| [Nonterminal b; Nonterminal c] ->
if StringSet.mem b nulls then nexts := c :: !nexts;
if StringSet.mem c nulls then nexts := b :: !nexts
| _ -> ())
(List.assoc v cfg);
List.iter (fun w -> dfs w) !nexts;
Hashtbl.add dfsnum v !n;
dfsnam.(!n) <- v;
message 3 (fun _ -> " Node " ^ v ^ " has DFS finishing time " ^ string_of_int !n ^ "\n");
incr n)
in
message 3 (fun _ -> " Forwards DFS through CFG\n");
List.iter
(fun (v,_) -> if not (StringSet.mem v !visited) then dfs v)
cfg;
decr n;
message 3 (fun _ -> " Computing transposed graph\n");
let tcfg = ref [] in
List.iter
(fun (n,rules) -> List.iter
(function [Nonterminal b] -> tcfg := (b,StringSet.singleton n) :: !tcfg
| [Nonterminal b; Nonterminal c] ->
if StringSet.mem b nulls then tcfg := (c,StringSet.singleton n) :: !tcfg;
if StringSet.mem c nulls then tcfg := (b,StringSet.singleton n) :: !tcfg
| _ -> ())
rules)
cfg;
message 3 (fun _ -> " Sorting\n");
tcfg := List.sort (fun (b,_) -> fun (c,_) -> compare b c) !tcfg;
let rec retract =
function [] -> []
| [(b,bs)] -> [(b, StringSet.elements bs)]
| ((b,bs)::(c,cs)::xs) -> if b <> c
then (b,StringSet.elements bs) :: (retract ((c,cs)::xs))
else retract ((b,StringSet.union bs cs)::xs)
in
message 3 (fun _ -> " Retracting\n");
let tcfg = List.map
(fun (n,bs) -> (n, List.sort
(fun b -> fun c -> compare (Hashtbl.find dfsnum c) (Hashtbl.find dfsnum b))
bs))
(retract !tcfg)
in
visited := StringSet.empty;
let scc = ref [] in
let sccs = ref [] in
let rec dfs v =
message 3 (fun _ -> " Now visiting node " ^ v ^ "\n");
if not (StringSet.mem v !visited)
then (visited := StringSet.add v !visited;
message 3 (fun _ -> " Adding node " ^ v ^ "\n");
scc := v :: !scc;
let nexts = try
List.assoc v tcfg
with _ -> []
in
List.iter (fun w -> dfs w) nexts)
in
message 3 (fun _ -> " Backwards DFS through CFG\n");
while !n >= 0 do
let v = dfsnam.(!n) in
message 3 (fun _ -> " Starting new SCC with node with DFS finishing time " ^ string_of_int !n ^ "\n");
scc := [];
dfs v;
sccs := !scc :: !sccs;
while !n >= 0 && StringSet.mem (dfsnam.(!n)) !visited do
decr n
done
done;
!sccs
let tidy cfg =
let start = fst (List.hd cfg) in
let reachable = ref StringSet.empty in
let rec reach v =
message 3 (fun _ -> "Currently reachable nonterminals: " ^
String.concat "," (StringSet.elements !reachable) ^ "\n");
if not (StringSet.mem v !reachable)
then (reachable := StringSet.add v !reachable;
let rules = try
List.assoc v cfg
with
failwith ( " Cfg.tidy : nonterminal " ^ v ^ " seems to have no definition in its grammar.\n " )
in
List.iter
(fun rule -> List.iter
(function Nonterminal b -> reach b
| _ -> ())
rule)
rules)
in
reach (fst (List.hd cfg));
let cfg = List.filter
(fun (n,_) -> StringSet.mem n !reachable)
cfg
in
let productive = ref StringSet.empty in
List.iter (fun (n,rules) -> if List.exists
(fun r -> List.for_all (function Terminal _ -> true | _ -> false) r)
rules
then productive := StringSet.add n !productive)
cfg;
let p' = ref StringSet.empty in
while !productive <> !p' do
message 3 (fun _ -> "Currently productive nonterminals: " ^
String.concat "," (StringSet.elements !productive) ^ "\n");
p' := !productive;
productive := List.fold_left
(fun s -> fun b -> StringSet.add b s)
!productive
(List.map
fst
(List.filter
(fun (n,rules) ->
(List.exists
(fun rule -> List.for_all
(function Terminal _ -> true
| Nonterminal b -> StringSet.mem b !productive)
rule)
rules))
cfg))
done;
if StringSet.mem start !productive then
List.map (fun (n,prods) -> (n, List.filter (fun rule -> List.for_all (function Terminal _ -> true
| Nonterminal b -> StringSet.mem b !productive)
rule)
prods))
(List.filter (fun (n,_) -> StringSet.mem n !productive) cfg)
else
[ (start,[[Nonterminal start]]) ]
exception No_initial_nonterminal
let makeFullCFG cfg' number =
message 3 (fun _ -> " Pre-computing ...\n");
let cfg = tidy cfg' in
message 3 (fun _ -> " CFG after removal of unreachable and unproductive nonterminals:\n" ^
showPureCFG cfg ^ "\n");
let cfg = appendPrefixes cfg number in
message 3 (fun _ -> " CFG after renaming nonterminals uniquely:\n" ^ showPureCFG cfg ^"\n");
let cfg = to2nf cfg number in
message 3 (fun _ -> " CFG after transformation into 2NF:\n" ^ showPureCFG cfg ^ "\n");
let nullsamb = nullableSymbolsAmb cfg in
let nulls = StringIntSet.fold (fun (b,_) -> fun s -> StringSet.add b s) nullsamb StringSet.empty in
let sccs = equivalence_classes cfg in
message 3 (fun _ -> " Equivalence classes under <=*=> are:\n " ^
String.concat
"\n "
(List.map (fun scc -> "{" ^ String.concat "," scc ^ "}") sccs)
^ "\n");
let ambns = List.fold_left
(fun s -> fun b -> StringSet.add b s)
StringSet.empty
(List.concat
(List.filter
(function [b] -> List.exists
(fun rule -> match rule with
[Nonterminal c] -> b=c
| [Nonterminal c; Nonterminal d] -> b=c && StringSet.mem d nulls ||
b=d && StringSet.mem c nulls
| _ -> false)
(List.assoc b cfg)
| _::_::_ -> true
| _ -> false)
sccs))
in
let rec replace_rules n cfg rules =
match cfg with
[] -> []
| ((b,rs)::xs) -> if n=b
then (b,rules)::xs
else (b,rs)::(replace_rules n xs rules)
in
let rec replace_nonterminals cs b =
function [] -> []
| (n,rules)::xs -> (n, List.map
(fun rule -> List.map
(function (Nonterminal c) -> if List.mem c cs
then Nonterminal b
else Nonterminal c
| x -> x)
rule)
rules) :: (replace_nonterminals cs b xs)
in
let rec remove_nonterminals cs =
function [] -> []
| (n,rules)::xs -> if List.mem n cs
then remove_nonterminals cs xs
else (n,rules)::(remove_nonterminals cs xs)
in
let cfg = ref cfg in
List.iter
(fun scc -> match scc with
[] -> ()
| [b] -> ()
| bs -> let ns = List.map fst !cfg in
let bs = List.sort
(fun b -> fun c -> compare (listindex b ns) (listindex c ns))
bs
in
match bs with (b::bs) -> (
cfg := replace_rules
b
!cfg
(List.concat
(List.map
(fun c -> List.assoc c !cfg)
(b::bs)));
cfg := remove_nonterminals bs !cfg;
cfg := replace_nonterminals bs b !cfg)
| _ -> ())
sccs;
message 3 (fun _ -> " Removing duplicate productions\n");
cfg := List.map
(fun (n,rules) -> (n, remove_dups rules))
!cfg;
message 3 (fun _ -> showPureCFG !cfg);
message 3 (fun _ -> " Removing unproductive productions\n");
cfg := List.map
(fun (n,rules) -> (n, List.filter
(function [Nonterminal b] -> b <> n
| _ -> true)
rules))
!cfg;
message 3 (fun _ -> showPureCFG !cfg);
message 3 (fun _ -> " Removing epsilon productions\n");
cfg := List.map
(fun (n,rules) -> (n, List.filter
(function [] -> false | _ -> true)
rules))
!cfg;
message 3 (fun _ -> showPureCFG !cfg);
let cfg = !cfg in
let ns = nonterminals cfg in
{ cfg = cfg;
origcfg = cfg';
alphabet = alphabet cfg;
nonterminals = ns;
start = (try
List.hd ns
with Failure _ -> raise No_initial_nonterminal);
nullable = nulls;
ambnullable = nullsamb;
ambnonterminals = ambns;
ambproductions = StringStringSet.empty;
id = number
;
closure = unitProductionClosure cfg ;
ambclosure = ( message 3 ( fun _ - > " Computing derivation relation between nonterminals with ambiguity information\n " ) ;
makeAmbClosure cfg )
closure = unitProductionClosure cfg;
ambclosure = (message 3 (fun _ -> " Computing derivation relation between nonterminals with ambiguity information\n");
makeAmbClosure cfg) *) }
let size cfg =
let epsilons = ref 0 in
let terminals = ref 0 in
let units = ref 0 in
let comps = ref 0 in
let z = ref 0 in
List.iter
(fun (n,rules) ->
List.iter (fun rule -> z := !z + (min 1 (List.length rule));
match rule with
[] -> incr epsilons
| [Terminal _] -> incr terminals
| [Nonterminal _] -> incr units
| _::_ -> incr comps)
rules)
cfg;
let r = !epsilons + !terminals + !units + !comps in
(!epsilons, !terminals, !units, !comps, r, !z, !z + (List.length cfg))
let number_of_productions cfg n =
List.length (List.assoc n cfg.cfg)
|
c57393eaea7a26f9f7c8175d10fe969bafb5f152d225db0f4619a37a6ab56a72 | clj-commons/iapetos | fn.clj | (ns iapetos.collector.fn
(:require [iapetos.collector :as collector]
[iapetos.core :as prometheus]
[iapetos.metric :as metric]
[iapetos.collector.exceptions :as ex])
(:import [io.prometheus.client CollectorRegistry]))
# # Instrumentation
(defmacro ^:private wrap
[body f]
`(let [f# ~f]
(fn [& args#]
(->> (apply f# args#) ~body))))
(defmacro ^:private wrap->>
[v & pairs]
(->> (partition 2 pairs)
(mapcat
(fn [[condition body]]
(list condition `(wrap ~body))))
(list* `cond->> v)))
(defn wrap-instrumentation
"Wrap the given function to write a series of execution metrics to the given
registry. See [[initialize]]."
[f registry fn-name
{:keys [duration?
exceptions?
last-failure?
run-count?
labels]
:or {duration? true
exceptions? true
last-failure? true
run-count? true
labels {}}}]
(let [labels (into labels {:fn fn-name, :result "success"})
failure-labels (assoc labels :result "failure")]
(wrap->>
f
duration? (prometheus/with-duration
(registry :fn/duration-seconds labels))
exceptions? (ex/with-exceptions
(registry :fn/exceptions-total labels))
last-failure? (prometheus/with-failure-timestamp
(registry :fn/last-failure-unixtime labels))
run-count? (prometheus/with-failure-counter
(registry :fn/runs-total failure-labels))
run-count? (prometheus/with-success-counter
(registry :fn/runs-total labels)))))
(defn- instrument-function!
[registry fn-name fn-var options]
(let [f' (-> fn-var
(alter-meta! update ::original #(or % @fn-var))
(::original)
(wrap-instrumentation registry fn-name options))]
(alter-var-root fn-var (constantly f'))))
;; ## Collectors
(defn initialize
"Enable function instrumentalization by registering the metric collectors.
Metrics include:
- `fn_duration_seconds`: a histogram of execution duration,
- `fn_last_failure_unixtime`: a gauge with the last failure timestamp,
- `fn_runs_total`: a counter for fn runs, split by success/failure,
- `fn_exceptions_total`: a counter for fn exceptions, split by class.
"
[registry & [{:keys [labels]}]]
(->> (vector
(prometheus/histogram
:fn/duration-seconds
{:description "the time elapsed during execution of the observed function."
:labels (into [:fn] labels)})
(prometheus/gauge
:fn/last-failure-unixtime
{:description "the UNIX timestamp of the last time the observed function threw an exception."
:labels (into [:fn] labels)})
(prometheus/counter
:fn/runs-total
{:description "the total number of finished runs of the observed function."
:labels (into [:fn :result] labels)})
(ex/exception-counter
:fn/exceptions-total
{:description "the total number and type of exceptions for the observed function."
:labels (into [:fn] labels)}))
(reduce prometheus/register registry)))
# # Constructor
(defn- instrument!*
[registry fn-name fn-var options]
{:pre [(string? fn-name) (var? fn-var)]}
(instrument-function! registry fn-name fn-var options)
registry)
(defn instrument!
([registry fn-var]
(instrument! registry fn-var {}))
([registry fn-var
{:keys [fn-name
exceptions?
duration?
last-failure?
run-count?
labels]
:or {fn-name (subs (str fn-var) 2)}
:as options}]
(instrument!* registry fn-name fn-var options)))
(defn instrument-namespace!
([registry namespace] (instrument-namespace! registry namespace {}))
([registry namespace options]
(->> namespace
ns-publics vals
(filter #(fn? (var-get %)))
(map #(instrument! registry % options)))))
| null | https://raw.githubusercontent.com/clj-commons/iapetos/0fecedaf8454e17e41b05e0e14754a311b9f4ce2/src/iapetos/collector/fn.clj | clojure | ## Collectors | (ns iapetos.collector.fn
(:require [iapetos.collector :as collector]
[iapetos.core :as prometheus]
[iapetos.metric :as metric]
[iapetos.collector.exceptions :as ex])
(:import [io.prometheus.client CollectorRegistry]))
# # Instrumentation
(defmacro ^:private wrap
[body f]
`(let [f# ~f]
(fn [& args#]
(->> (apply f# args#) ~body))))
(defmacro ^:private wrap->>
[v & pairs]
(->> (partition 2 pairs)
(mapcat
(fn [[condition body]]
(list condition `(wrap ~body))))
(list* `cond->> v)))
(defn wrap-instrumentation
"Wrap the given function to write a series of execution metrics to the given
registry. See [[initialize]]."
[f registry fn-name
{:keys [duration?
exceptions?
last-failure?
run-count?
labels]
:or {duration? true
exceptions? true
last-failure? true
run-count? true
labels {}}}]
(let [labels (into labels {:fn fn-name, :result "success"})
failure-labels (assoc labels :result "failure")]
(wrap->>
f
duration? (prometheus/with-duration
(registry :fn/duration-seconds labels))
exceptions? (ex/with-exceptions
(registry :fn/exceptions-total labels))
last-failure? (prometheus/with-failure-timestamp
(registry :fn/last-failure-unixtime labels))
run-count? (prometheus/with-failure-counter
(registry :fn/runs-total failure-labels))
run-count? (prometheus/with-success-counter
(registry :fn/runs-total labels)))))
(defn- instrument-function!
[registry fn-name fn-var options]
(let [f' (-> fn-var
(alter-meta! update ::original #(or % @fn-var))
(::original)
(wrap-instrumentation registry fn-name options))]
(alter-var-root fn-var (constantly f'))))
(defn initialize
"Enable function instrumentalization by registering the metric collectors.
Metrics include:
- `fn_duration_seconds`: a histogram of execution duration,
- `fn_last_failure_unixtime`: a gauge with the last failure timestamp,
- `fn_runs_total`: a counter for fn runs, split by success/failure,
- `fn_exceptions_total`: a counter for fn exceptions, split by class.
"
[registry & [{:keys [labels]}]]
(->> (vector
(prometheus/histogram
:fn/duration-seconds
{:description "the time elapsed during execution of the observed function."
:labels (into [:fn] labels)})
(prometheus/gauge
:fn/last-failure-unixtime
{:description "the UNIX timestamp of the last time the observed function threw an exception."
:labels (into [:fn] labels)})
(prometheus/counter
:fn/runs-total
{:description "the total number of finished runs of the observed function."
:labels (into [:fn :result] labels)})
(ex/exception-counter
:fn/exceptions-total
{:description "the total number and type of exceptions for the observed function."
:labels (into [:fn] labels)}))
(reduce prometheus/register registry)))
# # Constructor
(defn- instrument!*
[registry fn-name fn-var options]
{:pre [(string? fn-name) (var? fn-var)]}
(instrument-function! registry fn-name fn-var options)
registry)
(defn instrument!
([registry fn-var]
(instrument! registry fn-var {}))
([registry fn-var
{:keys [fn-name
exceptions?
duration?
last-failure?
run-count?
labels]
:or {fn-name (subs (str fn-var) 2)}
:as options}]
(instrument!* registry fn-name fn-var options)))
(defn instrument-namespace!
([registry namespace] (instrument-namespace! registry namespace {}))
([registry namespace options]
(->> namespace
ns-publics vals
(filter #(fn? (var-get %)))
(map #(instrument! registry % options)))))
|
634d0dbc4116831cc58fc90ea66e682d67208d8609958ef7039c12dddbc8df84 | vibrato-team/vibrato-programming-language | Assembly.hs | module Backend.Assembly where
import Backend.TAC.TAC
import Backend.TAC.Monad
import Backend.FlowGraph.DSatur
import Data.Maybe
import Data.List
import qualified AST
tacToAssembly :: Instruction -> String
tacToAssembly (ThreeAddressCode Assign (Just x) (Just y@Constant{}) Nothing) = assemblyInst "li" (Just x) (Just y) Nothing
tacToAssembly (ThreeAddressCode Assign (Just x) (Just y) Nothing) = assemblyInst "add" (Just x) (Just y) (Just zeroReg)
TODO use floating point arithmetic if necessary
tacToAssembly (ThreeAddressCode Add (Just x) (Just y) (Just z@Constant{})) = assemblyInst "addi" (Just x) (Just y) (Just z)
tacToAssembly (ThreeAddressCode Add (Just x) (Just y@Constant{}) (Just z)) = assemblyInst "addi" (Just x) (Just z) (Just y)
tacToAssembly (ThreeAddressCode Add (Just x) (Just y) (Just z)) = assemblyInst "add" (Just x) (Just y) (Just z)
tacToAssembly (ThreeAddressCode Minus (Just x) (Just y) Nothing) = assemblyInst "sub" (Just x) (Just zeroReg) (Just y)
tacToAssembly (ThreeAddressCode Sub (Just x) (Just y) (Just z)) = assemblyInst "sub" (Just x) (Just y) (Just z)
TODO use floating point arithmetic if necessary
tacToAssembly (ThreeAddressCode Mult (Just x) (Just y) (Just z)) = assemblyInst "mult" (Just y) (Just z) Nothing ++ "\n" ++ assemblyInst "mflo" (Just x) Nothing Nothing
tacToAssembly (ThreeAddressCode Div (Just x) (Just y) (Just z)) = assemblyInst "div" (Just y) (Just z) Nothing ++ "\n" ++ assemblyInst "mflo" (Just x) Nothing Nothing
-- TODO: Cast between int and float
tacToAssembly (ThreeAddressCode (Cast _ toType) (Just x) (Just y) _) = tacToAssembly (ThreeAddressCode Assign (Just x) (Just y) Nothing)
tacToAssembly (ThreeAddressCode GoTo Nothing Nothing (Just label)) = assemblyInst "j" (Just label) Nothing Nothing
tacToAssembly (ThreeAddressCode If Nothing (Just b) (Just label)) = assemblyInst "bne" (Just b) (Just zeroReg) (Just label)
tacToAssembly (ThreeAddressCode IfFalse Nothing (Just b) (Just label)) = assemblyInst "beq" (Just b) (Just zeroReg) (Just label)
tacToAssembly (ThreeAddressCode Eq (Just x) (Just y) (Just label)) = assemblyInst "beq" (Just x) (Just y) (Just label)
tacToAssembly (ThreeAddressCode Neq (Just x) (Just y) (Just label)) = assemblyInst "bne" (Just x) (Just y) (Just label)
tacToAssembly (ThreeAddressCode Lez (Just x) Nothing (Just label)) = assemblyInst "blez" (Just x) Nothing (Just label)
tacToAssembly (ThreeAddressCode Get r@(Just reg@(Id Reg{})) v1@(Just addr) offset@(Just i@Constant{})) =
tacToMoveInstruction "l" r offset v1
transform ` $ x = $ i($y ) ` into ` $ x = $ y + $ i $ x = 0($x ) `
tacToAssembly (ThreeAddressCode Get r@(Just reg@(Id Reg{})) v1@(Just addr@(Id Reg{})) offset@(Just i)) =
assemblyInst "add" r v1 offset ++ "\n" ++
tacToMoveInstruction "l" r (Just zeroConstant) r
tacToAssembly (ThreeAddressCode Get v@(Just var) v1@(Just addr) offset@(Just i)) =
tacToMoveInstruction "s" v offset v1
tacToAssembly (ThreeAddressCode Set v1@(Just addr) offset@(Just i@Constant{}) r@(Just feg)) =
tacToMoveInstruction "s" r offset v1
tacToAssembly (ThreeAddressCode Get (Just x) (Just y) (Just i)) = error $ "Get " ++ show x ++ " := " ++ show y ++ "[" ++ show i ++ "]"
tacToAssembly (ThreeAddressCode Set (Just x) (Just i) (Just y)) = error $ "Set " ++ show x ++ "[" ++ show i ++ "] := " ++ show y
tacToAssembly (ThreeAddressCode NewLabel Nothing (Just label) Nothing) = show label ++ ":"
tacToAssembly (ThreeAddressCode Call Nothing (Just label) (Just newFrame)) =
assemblyInst "sub" (Just base) (Just base) (Just newFrame) ++ "\n" ++
assemblyInst "jal" (Just label) Nothing Nothing
tacToAssembly (ThreeAddressCode Call (Just t@(Id Reg{})) (Just label) (Just newFrame)) =
assemblyInst "sub" (Just base) (Just base) (Just newFrame) ++ "\n" ++
assemblyInst "jal" (Just label) Nothing Nothing ++ "\n" ++
-- TODO: Use floating point arithmetic if necessary
assemblyInst "add" (Just t) (Just $ returnReg (getType t)) (Just zeroReg)
tacToAssembly (ThreeAddressCode Call (Just t) (Just label) (Just newFrame)) =
let v0reg = returnReg (getType t) in
assemblyInst "sub" (Just base) (Just base) (Just newFrame) ++ "\n" ++
assemblyInst "jal" (Just label) Nothing Nothing ++ "\n" ++
-- TODO: Use floating point arithmetic if necessary
tacToMoveInstruction "s" (Just v0reg) (Just t) Nothing
tacToAssembly (ThreeAddressCode Return (Just base') maybeRet Nothing) =
maybe "" (\ret -> assemblyInst "add" (Just $ returnReg $ getType ret) maybeRet (Just zeroReg)) maybeRet ++ "\n" ++
assemblyInst "add" (Just base) (Just base') (Just zeroReg) ++ "\n" ++
returnInst
tacToAssembly (ThreeAddressCode Sbrk (Just t) (Just sz) Nothing) =
syscall 9 (Just sz) Nothing ++ "\n" ++
assemblyInst "add" (Just t) (Just $ returnReg (AST.Simple "eighth")) (Just zeroReg)
tacToAssembly (ThreeAddressCode Print Nothing (Just x) maybeSize)
-- Print integer
| getType x `elem` map AST.Simple ["whole", "quarter", "eighth"] =
syscall 1 (Just x) Nothing
-- Print float
| getType x == AST.Simple "32th" =
syscall 2 (Just x) Nothing
-- Print double
| getType x == AST.Simple "64th" =
syscall 3 (Just x) Nothing
-- Print character
| getType x == AST.Simple "half" =
syscall 11 (Just x) Nothing
-- Print string
| otherwise =
syscall 4 (Just x) maybeSize
tacToAssembly (ThreeAddressCode Read Nothing (Just x) maybeSize)
-- Read integer
| getType x `elem` map AST.Simple ["whole", "quarter", "eighth"] =
syscall 5 Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
-- Read float
| getType x == AST.Simple "32th" =
syscall 6 Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
-- Read double
| getType x == AST.Simple "64th" =
syscall 7 Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
-- Read character
| getType x == AST.Simple "half" =
syscall 12 Nothing Nothing
-- Read string
| otherwise =
syscall 8 Nothing maybeSize ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
tacToAssembly (ThreeAddressCode Entry Nothing Nothing Nothing) = ""
tacToAssembly (ThreeAddressCode Exit Nothing Nothing Nothing) = ""
tacToAssembly (ThreeAddressCode Comment Nothing (Just cmnt) Nothing) = "\t# " ++ show cmnt
tacToAssembly (ThreeAddressCode Load r@(Just reg) v1@(Just x) Nothing) =
tacToMoveInstruction "l" r v1 Nothing
transform ` $ x = $ i($y ) ` into ` $ x = $ y + $ i $ x = 0($x ) `
tacToAssembly (ThreeAddressCode Load r@(Just reg) v1@(Just addr) offset@(Just _)) =
tacToMoveInstruction "l" r offset v1
tacToAssembly (ThreeAddressCode Store r@(Just reg) v1@(Just x) Nothing) =
tacToMoveInstruction "s" r v1 Nothing
transform ` $ i($y ) = $ x ` into ` $ x = $ y + $ i $ 0($x ) = $ x `
tacToAssembly (ThreeAddressCode Store r@(Just reg) v1@(Just addr) offset@(Just _)) =
tacToMoveInstruction "s" r offset v1
tacToAssembly t = error $ show t
moveInstructions = ["lb", "lh", "lw", "lbu", "lhu", "sb", "sh", "sw"]
tacToMoveInstruction :: String -> Maybe Value -> Maybe Value -> Maybe Value -> String
tacToMoveInstruction move (Just reg) mayVal2 mayVal3
| getType reg `elem` [AST.Simple "whole", AST.Simple "half"] =
assemblyInst (move ++ "b") (Just reg) mayVal2 mayVal3
| getType reg == AST.Simple "quarter" =
assemblyInst (move ++ "h") (Just reg) mayVal2 mayVal3
| getType reg == AST.Simple "eighth" =
assemblyInst (move ++ "w") (Just reg) mayVal2 mayVal3
| otherwise =
assemblyInst (move ++ "w") (Just reg) mayVal2 mayVal3
returnInst = "\tjr $ra"
assemblyInst :: String -> Maybe Value -> Maybe Value -> Maybe Value -> String
assemblyInst op mayVal1 mayVal2 mayVal3
| op `elem` moveInstructions =
"\t" ++ op ++ " " ++ (intercalate ", " $ words $ justMaybeValue mayVal1 ++ " " ++ justMaybeValue mayVal2 ++ if isJust mayVal3 then "(" ++ justMaybeValue mayVal3 ++ ")" else "")
| otherwise =
"\t" ++ op ++ " " ++ (intercalate ", " $ words $ justMaybeValue mayVal1 ++ " " ++ justMaybeValue mayVal2 ++ " " ++ justMaybeValue mayVal3)
syscall :: Int -> Maybe Value -> Maybe Value -> String
syscall v0 maybeA0 maybeA1=
assemblyInst "li" (Just $ returnReg (AST.Simple "quarter")) (Just $ toEighthConstant v0) Nothing ++ "\n" ++
(if isJust maybeA0 then assemblyInst "add" (Just $ Id $ Reg "$a0" (AST.Simple "eighth")) maybeA0 (Just zeroReg) ++ "\n" else "") ++
(if isJust maybeA1 then assemblyInst "add" (Just $ Id $ Reg "$a1" (AST.Simple "eighth")) maybeA1 (Just zeroReg) ++ "\n" else "") ++
assemblyInst "syscall" Nothing Nothing Nothing
justMaybeValue = maybe "" show
generateAssembly :: [Instruction] -> String
generateAssembly tac =
".data\n\thead: .word 0\n.text\nmain:" ++ (unlines $ map tacToAssembly tac) | null | https://raw.githubusercontent.com/vibrato-team/vibrato-programming-language/f6d98c2686604a91cd99307bc3fdc06828c7f538/src/Backend/Assembly.hs | haskell | TODO: Cast between int and float
TODO: Use floating point arithmetic if necessary
TODO: Use floating point arithmetic if necessary
Print integer
Print float
Print double
Print character
Print string
Read integer
Read float
Read double
Read character
Read string | module Backend.Assembly where
import Backend.TAC.TAC
import Backend.TAC.Monad
import Backend.FlowGraph.DSatur
import Data.Maybe
import Data.List
import qualified AST
tacToAssembly :: Instruction -> String
tacToAssembly (ThreeAddressCode Assign (Just x) (Just y@Constant{}) Nothing) = assemblyInst "li" (Just x) (Just y) Nothing
tacToAssembly (ThreeAddressCode Assign (Just x) (Just y) Nothing) = assemblyInst "add" (Just x) (Just y) (Just zeroReg)
TODO use floating point arithmetic if necessary
tacToAssembly (ThreeAddressCode Add (Just x) (Just y) (Just z@Constant{})) = assemblyInst "addi" (Just x) (Just y) (Just z)
tacToAssembly (ThreeAddressCode Add (Just x) (Just y@Constant{}) (Just z)) = assemblyInst "addi" (Just x) (Just z) (Just y)
tacToAssembly (ThreeAddressCode Add (Just x) (Just y) (Just z)) = assemblyInst "add" (Just x) (Just y) (Just z)
tacToAssembly (ThreeAddressCode Minus (Just x) (Just y) Nothing) = assemblyInst "sub" (Just x) (Just zeroReg) (Just y)
tacToAssembly (ThreeAddressCode Sub (Just x) (Just y) (Just z)) = assemblyInst "sub" (Just x) (Just y) (Just z)
TODO use floating point arithmetic if necessary
tacToAssembly (ThreeAddressCode Mult (Just x) (Just y) (Just z)) = assemblyInst "mult" (Just y) (Just z) Nothing ++ "\n" ++ assemblyInst "mflo" (Just x) Nothing Nothing
tacToAssembly (ThreeAddressCode Div (Just x) (Just y) (Just z)) = assemblyInst "div" (Just y) (Just z) Nothing ++ "\n" ++ assemblyInst "mflo" (Just x) Nothing Nothing
tacToAssembly (ThreeAddressCode (Cast _ toType) (Just x) (Just y) _) = tacToAssembly (ThreeAddressCode Assign (Just x) (Just y) Nothing)
tacToAssembly (ThreeAddressCode GoTo Nothing Nothing (Just label)) = assemblyInst "j" (Just label) Nothing Nothing
tacToAssembly (ThreeAddressCode If Nothing (Just b) (Just label)) = assemblyInst "bne" (Just b) (Just zeroReg) (Just label)
tacToAssembly (ThreeAddressCode IfFalse Nothing (Just b) (Just label)) = assemblyInst "beq" (Just b) (Just zeroReg) (Just label)
tacToAssembly (ThreeAddressCode Eq (Just x) (Just y) (Just label)) = assemblyInst "beq" (Just x) (Just y) (Just label)
tacToAssembly (ThreeAddressCode Neq (Just x) (Just y) (Just label)) = assemblyInst "bne" (Just x) (Just y) (Just label)
tacToAssembly (ThreeAddressCode Lez (Just x) Nothing (Just label)) = assemblyInst "blez" (Just x) Nothing (Just label)
tacToAssembly (ThreeAddressCode Get r@(Just reg@(Id Reg{})) v1@(Just addr) offset@(Just i@Constant{})) =
tacToMoveInstruction "l" r offset v1
transform ` $ x = $ i($y ) ` into ` $ x = $ y + $ i $ x = 0($x ) `
tacToAssembly (ThreeAddressCode Get r@(Just reg@(Id Reg{})) v1@(Just addr@(Id Reg{})) offset@(Just i)) =
assemblyInst "add" r v1 offset ++ "\n" ++
tacToMoveInstruction "l" r (Just zeroConstant) r
tacToAssembly (ThreeAddressCode Get v@(Just var) v1@(Just addr) offset@(Just i)) =
tacToMoveInstruction "s" v offset v1
tacToAssembly (ThreeAddressCode Set v1@(Just addr) offset@(Just i@Constant{}) r@(Just feg)) =
tacToMoveInstruction "s" r offset v1
tacToAssembly (ThreeAddressCode Get (Just x) (Just y) (Just i)) = error $ "Get " ++ show x ++ " := " ++ show y ++ "[" ++ show i ++ "]"
tacToAssembly (ThreeAddressCode Set (Just x) (Just i) (Just y)) = error $ "Set " ++ show x ++ "[" ++ show i ++ "] := " ++ show y
tacToAssembly (ThreeAddressCode NewLabel Nothing (Just label) Nothing) = show label ++ ":"
tacToAssembly (ThreeAddressCode Call Nothing (Just label) (Just newFrame)) =
assemblyInst "sub" (Just base) (Just base) (Just newFrame) ++ "\n" ++
assemblyInst "jal" (Just label) Nothing Nothing
tacToAssembly (ThreeAddressCode Call (Just t@(Id Reg{})) (Just label) (Just newFrame)) =
assemblyInst "sub" (Just base) (Just base) (Just newFrame) ++ "\n" ++
assemblyInst "jal" (Just label) Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just t) (Just $ returnReg (getType t)) (Just zeroReg)
tacToAssembly (ThreeAddressCode Call (Just t) (Just label) (Just newFrame)) =
let v0reg = returnReg (getType t) in
assemblyInst "sub" (Just base) (Just base) (Just newFrame) ++ "\n" ++
assemblyInst "jal" (Just label) Nothing Nothing ++ "\n" ++
tacToMoveInstruction "s" (Just v0reg) (Just t) Nothing
tacToAssembly (ThreeAddressCode Return (Just base') maybeRet Nothing) =
maybe "" (\ret -> assemblyInst "add" (Just $ returnReg $ getType ret) maybeRet (Just zeroReg)) maybeRet ++ "\n" ++
assemblyInst "add" (Just base) (Just base') (Just zeroReg) ++ "\n" ++
returnInst
tacToAssembly (ThreeAddressCode Sbrk (Just t) (Just sz) Nothing) =
syscall 9 (Just sz) Nothing ++ "\n" ++
assemblyInst "add" (Just t) (Just $ returnReg (AST.Simple "eighth")) (Just zeroReg)
tacToAssembly (ThreeAddressCode Print Nothing (Just x) maybeSize)
| getType x `elem` map AST.Simple ["whole", "quarter", "eighth"] =
syscall 1 (Just x) Nothing
| getType x == AST.Simple "32th" =
syscall 2 (Just x) Nothing
| getType x == AST.Simple "64th" =
syscall 3 (Just x) Nothing
| getType x == AST.Simple "half" =
syscall 11 (Just x) Nothing
| otherwise =
syscall 4 (Just x) maybeSize
tacToAssembly (ThreeAddressCode Read Nothing (Just x) maybeSize)
| getType x `elem` map AST.Simple ["whole", "quarter", "eighth"] =
syscall 5 Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
| getType x == AST.Simple "32th" =
syscall 6 Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
| getType x == AST.Simple "64th" =
syscall 7 Nothing Nothing ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
| getType x == AST.Simple "half" =
syscall 12 Nothing Nothing
| otherwise =
syscall 8 Nothing maybeSize ++ "\n" ++
assemblyInst "add" (Just x) (Just $ returnReg $ getType x) (Just zeroReg)
tacToAssembly (ThreeAddressCode Entry Nothing Nothing Nothing) = ""
tacToAssembly (ThreeAddressCode Exit Nothing Nothing Nothing) = ""
tacToAssembly (ThreeAddressCode Comment Nothing (Just cmnt) Nothing) = "\t# " ++ show cmnt
tacToAssembly (ThreeAddressCode Load r@(Just reg) v1@(Just x) Nothing) =
tacToMoveInstruction "l" r v1 Nothing
transform ` $ x = $ i($y ) ` into ` $ x = $ y + $ i $ x = 0($x ) `
tacToAssembly (ThreeAddressCode Load r@(Just reg) v1@(Just addr) offset@(Just _)) =
tacToMoveInstruction "l" r offset v1
tacToAssembly (ThreeAddressCode Store r@(Just reg) v1@(Just x) Nothing) =
tacToMoveInstruction "s" r v1 Nothing
transform ` $ i($y ) = $ x ` into ` $ x = $ y + $ i $ 0($x ) = $ x `
tacToAssembly (ThreeAddressCode Store r@(Just reg) v1@(Just addr) offset@(Just _)) =
tacToMoveInstruction "s" r offset v1
tacToAssembly t = error $ show t
moveInstructions = ["lb", "lh", "lw", "lbu", "lhu", "sb", "sh", "sw"]
tacToMoveInstruction :: String -> Maybe Value -> Maybe Value -> Maybe Value -> String
tacToMoveInstruction move (Just reg) mayVal2 mayVal3
| getType reg `elem` [AST.Simple "whole", AST.Simple "half"] =
assemblyInst (move ++ "b") (Just reg) mayVal2 mayVal3
| getType reg == AST.Simple "quarter" =
assemblyInst (move ++ "h") (Just reg) mayVal2 mayVal3
| getType reg == AST.Simple "eighth" =
assemblyInst (move ++ "w") (Just reg) mayVal2 mayVal3
| otherwise =
assemblyInst (move ++ "w") (Just reg) mayVal2 mayVal3
returnInst = "\tjr $ra"
assemblyInst :: String -> Maybe Value -> Maybe Value -> Maybe Value -> String
assemblyInst op mayVal1 mayVal2 mayVal3
| op `elem` moveInstructions =
"\t" ++ op ++ " " ++ (intercalate ", " $ words $ justMaybeValue mayVal1 ++ " " ++ justMaybeValue mayVal2 ++ if isJust mayVal3 then "(" ++ justMaybeValue mayVal3 ++ ")" else "")
| otherwise =
"\t" ++ op ++ " " ++ (intercalate ", " $ words $ justMaybeValue mayVal1 ++ " " ++ justMaybeValue mayVal2 ++ " " ++ justMaybeValue mayVal3)
syscall :: Int -> Maybe Value -> Maybe Value -> String
syscall v0 maybeA0 maybeA1=
assemblyInst "li" (Just $ returnReg (AST.Simple "quarter")) (Just $ toEighthConstant v0) Nothing ++ "\n" ++
(if isJust maybeA0 then assemblyInst "add" (Just $ Id $ Reg "$a0" (AST.Simple "eighth")) maybeA0 (Just zeroReg) ++ "\n" else "") ++
(if isJust maybeA1 then assemblyInst "add" (Just $ Id $ Reg "$a1" (AST.Simple "eighth")) maybeA1 (Just zeroReg) ++ "\n" else "") ++
assemblyInst "syscall" Nothing Nothing Nothing
justMaybeValue = maybe "" show
generateAssembly :: [Instruction] -> String
generateAssembly tac =
".data\n\thead: .word 0\n.text\nmain:" ++ (unlines $ map tacToAssembly tac) |
a5d4fb5538d759a4eb03fd56cc07f56d279c71bcedb72038498e0b2fcd136742 | tnelson/Forge | handshake.rkt | #lang racket/base
;; RFC 6455 Handshaking.
Copyright ( c ) 2013
;;
;; This module is distributed under the GNU Lesser General Public
;; License (LGPL). This means that you can link it into proprietary
;; applications, provided you follow the rules stated in the LGPL. You
;; can also modify this module; if you distribute a modified version,
;; you must distribute it under the terms of the LGPL, which in
;; particular means that you must release the source code for the
;; modified software. See -3.0.txt for
;; more information.
(require rackunit)
(require file/sha1)
(require net/base64)
(provide key-digest)
(define (key-digest key)
(base64-encode (sha1-bytes (open-input-bytes
(bytes-append key #"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")))
#""))
;; Example from rfc6455, section 4.2.2
(check-equal? (key-digest #"dGhlIHNhbXBsZSBub25jZQ==") #"s3pPLMBiTxaQ9kYGzzhZRbK+xOo=")
| null | https://raw.githubusercontent.com/tnelson/Forge/1687cba0ebdb598c29c51845d43c98a459d0588f/forge/racket-rfc6455/net/rfc6455/rfc6455/handshake.rkt | racket | RFC 6455 Handshaking.
This module is distributed under the GNU Lesser General Public
License (LGPL). This means that you can link it into proprietary
applications, provided you follow the rules stated in the LGPL. You
can also modify this module; if you distribute a modified version,
you must distribute it under the terms of the LGPL, which in
particular means that you must release the source code for the
modified software. See -3.0.txt for
more information.
Example from rfc6455, section 4.2.2 | #lang racket/base
Copyright ( c ) 2013
(require rackunit)
(require file/sha1)
(require net/base64)
(provide key-digest)
(define (key-digest key)
(base64-encode (sha1-bytes (open-input-bytes
(bytes-append key #"258EAFA5-E914-47DA-95CA-C5AB0DC85B11")))
#""))
(check-equal? (key-digest #"dGhlIHNhbXBsZSBub25jZQ==") #"s3pPLMBiTxaQ9kYGzzhZRbK+xOo=")
|
4a73df84c273c10567605828159ba40b9d9309de860f9f2bd93eb22f4f803b0f | per-gron/blackhole | module-info.scm | ;;; -------------------------------------------------------------- ;;;
;;; ;;;
;;; Module info data structure ;;;
;;; ;;;
;;; -------------------------------------------------------------- ;;;
(define-type module-info
id: 726DB40B-AB18-4396-A570-BB715B602DB9
constructor: make-module-info/internal
(symbols read-only:)
(exports read-only:)
(imports read-only:)
(definitions read-only:)
;; A list of module references, possibly relative
(runtime-dependencies read-only:)
;; A list of module references, possibly relative
(compiletime-dependencies read-only:)
;; A list with Gambit compiler options
(options read-only:)
;; A string with options for the C compiler
(cc-options read-only:)
;; A string with options for the linker
(ld-options-prelude read-only:)
;; A string with options for the linker
(ld-options read-only:)
;; A boolean
(force-compile read-only:)
;; A boolean
(no-global-state read-only:)
(namespace-string read-only:))
(define (make-module-info #!key
(symbols '())
(exports '())
(imports '())
(definitions '())
(runtime-dependencies '())
(compiletime-dependencies '())
(options '())
(cc-options "")
(ld-options-prelude "")
(ld-options "")
force-compile
no-global-state
namespace-string)
(make-module-info/internal symbols
exports
imports
definitions
runtime-dependencies
compiletime-dependencies
options
cc-options
ld-options-prelude
ld-options
force-compile
no-global-state
namespace-string))
(define (module-info-dependencies info)
(append (module-info-runtime-dependencies info)
(module-info-compiletime-dependencies info)))
(define (resolve-export-self-reference ref exports)
(map (lambda (export)
(if (eq? 'self-reference
(cadddr export))
(list (car export)
(cadr export)
(caddr export)
ref)
export))
exports))
(define (make-module-info-from-alist module-ref module-info-alist)
(if (not (module-reference-absolute? module-ref))
(error "Module reference must be absolute"))
(let* ((tbl (list->table module-info-alist))
(definitions (table-ref tbl 'definitions '()))
(imports (table-ref tbl 'imports '()))
(exports (resolve-export-self-reference
module-ref
(table-ref tbl 'exports #f)))
(namespace-string (table-ref tbl 'namespace-string))
(symbols (map (lambda (def)
(cons (car def)
(cadr def)))
definitions))
(runtime-dependencies
(map (lambda (ref)
(module-reference-absolutize ref module-ref))
(table-ref tbl 'runtime-dependencies '())))
(compiletime-dependencies
(map (lambda (ref)
(module-reference-absolutize ref module-ref))
(table-ref tbl 'compiletime-dependencies '()))))
(if (not (equal? (module-reference-namespace module-ref)
namespace-string))
(error "The compiled module's namespace and its current \
namespace don't match"
namespace-string
(module-reference-namespace module-ref)))
(make-module-info
symbols: symbols
exports: exports
imports: imports
definitions: definitions
runtime-dependencies: runtime-dependencies
compiletime-dependencies: compiletime-dependencies
options: (table-ref tbl 'options '())
cc-options: (table-ref tbl 'cc-options "")
ld-options-prelude: (table-ref tbl 'ld-options-prelude "")
ld-options: (table-ref tbl 'ld-options "")
force-compile: (table-ref tbl 'force-compile #f)
no-global-state: (table-ref tbl 'no-global-state #f)
namespace-string: namespace-string)))
| null | https://raw.githubusercontent.com/per-gron/blackhole/3f78eb726f85a6b8bddf757e6f28c785bc70e53d/src/module-info.scm | scheme | -------------------------------------------------------------- ;;;
;;;
Module info data structure ;;;
;;;
-------------------------------------------------------------- ;;;
A list of module references, possibly relative
A list of module references, possibly relative
A list with Gambit compiler options
A string with options for the C compiler
A string with options for the linker
A string with options for the linker
A boolean
A boolean |
(define-type module-info
id: 726DB40B-AB18-4396-A570-BB715B602DB9
constructor: make-module-info/internal
(symbols read-only:)
(exports read-only:)
(imports read-only:)
(definitions read-only:)
(runtime-dependencies read-only:)
(compiletime-dependencies read-only:)
(options read-only:)
(cc-options read-only:)
(ld-options-prelude read-only:)
(ld-options read-only:)
(force-compile read-only:)
(no-global-state read-only:)
(namespace-string read-only:))
(define (make-module-info #!key
(symbols '())
(exports '())
(imports '())
(definitions '())
(runtime-dependencies '())
(compiletime-dependencies '())
(options '())
(cc-options "")
(ld-options-prelude "")
(ld-options "")
force-compile
no-global-state
namespace-string)
(make-module-info/internal symbols
exports
imports
definitions
runtime-dependencies
compiletime-dependencies
options
cc-options
ld-options-prelude
ld-options
force-compile
no-global-state
namespace-string))
(define (module-info-dependencies info)
(append (module-info-runtime-dependencies info)
(module-info-compiletime-dependencies info)))
(define (resolve-export-self-reference ref exports)
(map (lambda (export)
(if (eq? 'self-reference
(cadddr export))
(list (car export)
(cadr export)
(caddr export)
ref)
export))
exports))
(define (make-module-info-from-alist module-ref module-info-alist)
(if (not (module-reference-absolute? module-ref))
(error "Module reference must be absolute"))
(let* ((tbl (list->table module-info-alist))
(definitions (table-ref tbl 'definitions '()))
(imports (table-ref tbl 'imports '()))
(exports (resolve-export-self-reference
module-ref
(table-ref tbl 'exports #f)))
(namespace-string (table-ref tbl 'namespace-string))
(symbols (map (lambda (def)
(cons (car def)
(cadr def)))
definitions))
(runtime-dependencies
(map (lambda (ref)
(module-reference-absolutize ref module-ref))
(table-ref tbl 'runtime-dependencies '())))
(compiletime-dependencies
(map (lambda (ref)
(module-reference-absolutize ref module-ref))
(table-ref tbl 'compiletime-dependencies '()))))
(if (not (equal? (module-reference-namespace module-ref)
namespace-string))
(error "The compiled module's namespace and its current \
namespace don't match"
namespace-string
(module-reference-namespace module-ref)))
(make-module-info
symbols: symbols
exports: exports
imports: imports
definitions: definitions
runtime-dependencies: runtime-dependencies
compiletime-dependencies: compiletime-dependencies
options: (table-ref tbl 'options '())
cc-options: (table-ref tbl 'cc-options "")
ld-options-prelude: (table-ref tbl 'ld-options-prelude "")
ld-options: (table-ref tbl 'ld-options "")
force-compile: (table-ref tbl 'force-compile #f)
no-global-state: (table-ref tbl 'no-global-state #f)
namespace-string: namespace-string)))
|
5d67eda937edaeddf7cf3ffd2dd4127d82e1a8294c9cda0a4b19845f3bc246d6 | hasktorch/hasktorch | GCrossAttention.hs | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE GADTs #-}
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Torch.GraduallyTyped.NN.Transformer.GCrossAttention where
import Control.Monad.Indexed (IxPointed (ireturn), (>>>=))
import Control.Monad.Indexed.State (IxStateT (..))
import Control.Monad.Indexed.Trans (IxMonadTrans (ilift))
import Data.Kind (Type)
import GHC.Generics (Generic)
import GHC.TypeLits (Nat, Symbol)
import Torch.GraduallyTyped.DType (DType (..), DataType, SDataType (..))
import Torch.GraduallyTyped.Device (Device (..), DeviceType (..), SDevice (..))
import Torch.GraduallyTyped.NN.Class (HasForward (..), HasInitialize (..), HasStateDict (..), ModelSpec, NamedModel (..))
import Torch.GraduallyTyped.NN.Dropout (Dropout (..))
import Torch.GraduallyTyped.NN.Normalization (LayerNorm (..), LayerNormSpec (..))
import Torch.GraduallyTyped.NN.Transformer.GMultiHeadAttention (GMultiHeadAttentionF, multiHeadAttentionSpec)
import Torch.GraduallyTyped.NN.Transformer.Type (STransformerStyle (..), TransformerStyle (..))
import Torch.GraduallyTyped.NN.Type (HasBias (..), HasDropout (..), SHasBias (..), SHasDropout (..))
import Torch.GraduallyTyped.Prelude (Catch, pattern (:|:))
import Torch.GraduallyTyped.Prelude.List (SList (..))
import Torch.GraduallyTyped.RequiresGradient (Gradient, RequiresGradient (..), SGradient (..))
import Torch.GraduallyTyped.Shape.Class (BroadcastShapesF)
import Torch.GraduallyTyped.Shape.Type (Dim (..), Name (..), SDim, SShape (..), Shape (..), Size (..))
import Torch.GraduallyTyped.Tensor.MathOperations.Pointwise (add)
import Torch.GraduallyTyped.Tensor.Type (Tensor)
import Torch.GraduallyTyped.Unify (type (<+>), type (<|>))
-- | Generic cross-attention layer data type.
--
-- - @initialLayerNorm@: the initial layer normalization
-- - @mha@: the multi-headed attention layer
-- - @dropout@: the dropout layer
-- - @finalLayerNorm@: the final layer normalization
data
GCrossAttention
(initialLayerNorm :: Type)
(mha :: Type)
(dropout :: Type)
(finalLayerNorm :: Type)
where
GCrossAttention ::
forall initialLayerNorm mha dropout finalLayerNorm.
{ -- | initial layer normalization of the cross-attention layer.
caInitialLayerNorm :: initialLayerNorm,
-- | multi-headed attention layer specialized for cross-attention.
caMultiHeadAttention :: mha,
-- | dropout
caDropout :: dropout,
-- | final layer normalization of the cross-attention layer.
caFinalLayerNorm :: finalLayerNorm
} ->
GCrossAttention initialLayerNorm mha dropout finalLayerNorm
deriving stock (Eq, Ord, Show, Generic)
type instance
ModelSpec (GCrossAttention initialLayerNorm mha dropout finalLayerNorm) =
GCrossAttention (ModelSpec initialLayerNorm) (ModelSpec mha) (ModelSpec dropout) (ModelSpec finalLayerNorm)
type family
GCrossAttentionF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(headDim :: Dim (Name Symbol) (Size Nat))
(headEmbedDim :: Dim (Name Symbol) (Size Nat))
(embedDim :: Dim (Name Symbol) (Size Nat))
(queryEmbedDim :: Dim (Name Symbol) (Size Nat))
(keyEmbedDim :: Dim (Name Symbol) (Size Nat))
(hasDropout :: HasDropout) ::
Type
where
GCrossAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout =
GCrossAttention
(CAInitialLayerNormF style gradient device dataType queryEmbedDim)
(CAMultiheadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout)
(CADropoutF style hasDropout)
(CAFinalLayerNormF style gradient device dataType queryEmbedDim)
-- | Specifies the initial layer normalization of the cross-attention layer.
type family
CAInitialLayerNormF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(queryEmbedDim :: Dim (Name Symbol) (Size Nat)) ::
Type
where
CAInitialLayerNormF 'T5 gradient device dataType queryEmbedDim =
NamedModel (LayerNorm 'WithoutBias gradient device dataType ('Shape '[queryEmbedDim]))
CAInitialLayerNormF 'ByT5 gradient device dataType queryEmbedDim =
CAInitialLayerNormF 'T5 gradient device dataType queryEmbedDim
CAInitialLayerNormF 'BART _ _ _ _ =
()
CAInitialLayerNormF 'MBART gradient device dataType queryEmbedDim =
CAInitialLayerNormF 'BART gradient device dataType queryEmbedDim
CAInitialLayerNormF 'Pegasus gradient device dataType queryEmbedDim =
NamedModel (LayerNorm 'WithBias gradient device dataType ('Shape '[queryEmbedDim]))
-- | Specifies the multi-headed attention layer specialized for cross-attention.
type family
CAMultiheadAttentionF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(headDim :: Dim (Name Symbol) (Size Nat))
(headEmbedDim :: Dim (Name Symbol) (Size Nat))
(embedDim :: Dim (Name Symbol) (Size Nat))
(queryEmbedDim :: Dim (Name Symbol) (Size Nat))
(keyEmbedDim :: Dim (Name Symbol) (Size Nat))
(hasDropout :: HasDropout) ::
Type
where
CAMultiheadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout =
NamedModel (GMultiHeadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim keyEmbedDim hasDropout)
-- | Specifies the dropout layer of the cross-attention layer.
type family
CADropoutF
(style :: TransformerStyle)
(hasDropout :: HasDropout) ::
Type
where
CADropoutF _ 'WithDropout = Dropout
CADropoutF _ 'WithoutDropout = ()
-- | Specifies the final layer normalization of the cross-attention layer.
type family
CAFinalLayerNormF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(queryEmbedDim :: Dim (Name Symbol) (Size Nat)) ::
Type
where
CAFinalLayerNormF 'T5 _ _ _ _ =
()
CAFinalLayerNormF 'ByT5 gradient device dataType queryEmbedDim =
CAFinalLayerNormF 'T5 gradient device dataType queryEmbedDim
CAFinalLayerNormF 'BART gradient device dataType queryEmbedDim =
NamedModel (LayerNorm 'WithBias gradient device dataType ('Shape '[queryEmbedDim]))
CAFinalLayerNormF 'MBART gradient device dataType queryEmbedDim =
CAFinalLayerNormF 'BART gradient device dataType queryEmbedDim
CAFinalLayerNormF 'Pegasus gradient device dataType queryEmbedDim =
()
-- | Specifies the parameters of a cross-attention layer.
--
-- - @style@: the style of the transformer stack, e.g. 'ST5', 'SByT5', etc.
-- - @gradient@: whether to compute the gradient of the stack's parameters.
-- - @device@: the computational device on which the stack is allocated.
-- - @dataType@: the data type of the stack's parameters.
-- - @headDim@: the dimension of all transformer heads in the stack.
-- - @headEmbedDim@: the dimension of the transformer head embeddings.
-- - @embedDim@: the dimension of the transformer embeddings.
-- - @queryEmbedDim@: the dimension of the transformer query embeddings.
-- - @keyEmbedDim@: the dimension of the transformer key embeddings.
-- - @dropoutP@: the dropout rate.
-- - @eps@: the epsilon value for numerical stability of the layer normalization.
crossAttentionSpec ::
forall style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout.
STransformerStyle style ->
SGradient gradient ->
SDevice device ->
SDataType dataType ->
SDim headDim ->
SDim headEmbedDim ->
SDim embedDim ->
SDim queryEmbedDim ->
SDim keyEmbedDim ->
SHasDropout hasDropout ->
Double ->
Double ->
ModelSpec (GCrossAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout)
crossAttentionSpec style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout dropoutP eps =
let initialLayerNormSpec ST5 = NamedModel "layer_norm." layerNormWithoutBiasSpec
initialLayerNormSpec SByT5 = NamedModel "layer_norm." layerNormWithoutBiasSpec
initialLayerNormSpec SBART = ()
initialLayerNormSpec SMBART = ()
initialLayerNormSpec SPegasus = NamedModel "encoder_attn_layer_norm." layerNormWithBiasSpec
initialLayerNormSpec SBERT = undefined
initialLayerNormSpec SRoBERTa = undefined
initialLayerNormSpec SGPT2 = undefined
mhaSpec ST5 = NamedModel "EncDecAttention." $ mhaSpec' ST5
mhaSpec SByT5 = NamedModel "EncDecAttention." $ mhaSpec' SByT5
mhaSpec SBART = NamedModel "encoder_attn." $ mhaSpec' SBART
mhaSpec SMBART = NamedModel "encoder_attn." $ mhaSpec' SMBART
mhaSpec SPegasus = NamedModel "encoder_attn." $ mhaSpec' SPegasus
mhaSpec SBERT = undefined
mhaSpec SRoBERTa = undefined
mhaSpec SGPT2 = undefined
dropoutSpec _ SWithDropout = Dropout dropoutP
dropoutSpec _ SWithoutDropout = ()
finalLayerNormSpec ST5 = ()
finalLayerNormSpec SByT5 = ()
finalLayerNormSpec SBART = NamedModel "encoder_attn_layer_norm." layerNormWithBiasSpec
finalLayerNormSpec SMBART = NamedModel "encoder_attn_layer_norm." layerNormWithBiasSpec
finalLayerNormSpec SPegasus = ()
finalLayerNormSpec SBERT = undefined
finalLayerNormSpec SRoBERTa = undefined
finalLayerNormSpec SGPT2 = undefined
in GCrossAttention (initialLayerNormSpec style) (mhaSpec style) (dropoutSpec style hasDropout) (finalLayerNormSpec style)
where
mhaSpec' ::
STransformerStyle style ->
ModelSpec (GMultiHeadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim keyEmbedDim hasDropout)
mhaSpec' style' = multiHeadAttentionSpec style' gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim keyEmbedDim hasDropout dropoutP
layerNormWithoutBiasSpec = LayerNormSpec SWithoutBias gradient device dataType (SShape $ queryEmbedDim :|: SNil) eps
layerNormWithBiasSpec = LayerNormSpec SWithBias gradient device dataType (SShape $ queryEmbedDim :|: SNil) eps
instance
( HasInitialize initialLayerNorm generatorDevice initialLayerNorm' generatorDevice0,
HasInitialize multiHeadAttention generatorDevice0 multiHeadAttention' generatorDevice1,
HasInitialize dropout generatorDevice1 dropout' generatorDevice2,
HasInitialize finalLayerNorm generatorDevice2 finalLayerNorm' generatorOutputDevice
) =>
HasInitialize
(GCrossAttention initialLayerNorm multiHeadAttention dropout finalLayerNorm)
generatorDevice
(GCrossAttention initialLayerNorm' multiHeadAttention' dropout' finalLayerNorm')
generatorOutputDevice
instance
( HasStateDict initialLayerNorm,
HasStateDict multiHeadAttention,
HasStateDict dropout,
HasStateDict finalLayerNorm
) =>
HasStateDict (GCrossAttention initialLayerNorm multiHeadAttention dropout finalLayerNorm)
| ' HasForward ' instance for ' GCrossAttention ' .
--
-- @
┌ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ query │ │ key │ │ attentionBias │
─ ┬ ─ ─ ─ ┘ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┘
-- │ │ │
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┤ │ │
-- │ │ │ │
-- │ ▼ │ │
-- │ (caInitialLayerNorm) │ │
-- │ │ │ │
│ │ ┌ ─ ─ ─ ┴ ─ ─ ─ ┐ │
-- │ │ │ │ │
-- │ ▼ ▼ ▼ │
-- │ caMultiheadAttention◄─────────┘
-- │ │
-- │ ▼
-- │ caDropout
-- │ │
-- └──────►add◄───────┘
-- │
-- ▼
-- (caFinalLayerNorm)
-- │
-- ▼
┌ ─ ─ ─ ─ ─ ─ ─ ┐
-- │ query │
-- └───────┘
-- @
instance
( HasForward
initialLayerNorm
(Tensor queryGradient queryLayout queryDevice queryDataType queryShape)
generatorDevice
tensor0
generatorDevice0,
HasForward
multiHeadAttention
( tensor0,
Tensor keyGradient keyLayout keyDevice keyDataType keyShape,
Tensor keyGradient keyLayout keyDevice keyDataType keyShape,
Tensor attentionBiasGradient attentionBiasLayout attentionBiasDevice attentionBiasDataType attentionBiasShape
)
generatorDevice0
tensor1
generatorDevice1,
HasForward
dropout
tensor1
generatorDevice1
(Tensor gradient2 layout2 device2 dataType2 shape2)
generatorDevice2,
HasForward
finalLayerNorm
(Tensor (queryGradient <|> gradient2) (queryLayout <+> layout2) (queryDevice <+> device2) (queryDataType <+> dataType2) (BroadcastShapesF queryShape shape2))
generatorDevice2
output
generatorOutputDevice,
Catch (BroadcastShapesF queryShape shape2)
) =>
HasForward
(GCrossAttention initialLayerNorm multiHeadAttention dropout finalLayerNorm)
( Tensor queryGradient queryLayout queryDevice queryDataType queryShape,
Tensor keyGradient keyLayout keyDevice keyDataType keyShape,
Tensor attentionBiasGradient attentionBiasLayout attentionBiasDevice attentionBiasDataType attentionBiasShape
)
generatorDevice
output
generatorOutputDevice
where
forward GCrossAttention {..} (query, key, attentionBias) =
runIxStateT $
ireturn query
>>>= IxStateT . forward caInitialLayerNorm
>>>= (\query' -> IxStateT $ forward caMultiHeadAttention (query', key, key, attentionBias))
>>>= IxStateT . forward caDropout
>>>= ilift . (query `add`)
>>>= IxStateT . forward caFinalLayerNorm
| null | https://raw.githubusercontent.com/hasktorch/hasktorch/cf0ed1aba9c41123ba46f7c0788a4df10fbfe1ef/experimental/gradually-typed/src/Torch/GraduallyTyped/NN/Transformer/GCrossAttention.hs | haskell | # LANGUAGE GADTs #
# LANGUAGE OverloadedStrings #
| Generic cross-attention layer data type.
- @initialLayerNorm@: the initial layer normalization
- @mha@: the multi-headed attention layer
- @dropout@: the dropout layer
- @finalLayerNorm@: the final layer normalization
| initial layer normalization of the cross-attention layer.
| multi-headed attention layer specialized for cross-attention.
| dropout
| final layer normalization of the cross-attention layer.
| Specifies the initial layer normalization of the cross-attention layer.
| Specifies the multi-headed attention layer specialized for cross-attention.
| Specifies the dropout layer of the cross-attention layer.
| Specifies the final layer normalization of the cross-attention layer.
| Specifies the parameters of a cross-attention layer.
- @style@: the style of the transformer stack, e.g. 'ST5', 'SByT5', etc.
- @gradient@: whether to compute the gradient of the stack's parameters.
- @device@: the computational device on which the stack is allocated.
- @dataType@: the data type of the stack's parameters.
- @headDim@: the dimension of all transformer heads in the stack.
- @headEmbedDim@: the dimension of the transformer head embeddings.
- @embedDim@: the dimension of the transformer embeddings.
- @queryEmbedDim@: the dimension of the transformer query embeddings.
- @keyEmbedDim@: the dimension of the transformer key embeddings.
- @dropoutP@: the dropout rate.
- @eps@: the epsilon value for numerical stability of the layer normalization.
@
│ query │ │ key │ │ attentionBias │
│ │ │
│ │ │ │
│ ▼ │ │
│ (caInitialLayerNorm) │ │
│ │ │ │
│ │ │ │ │
│ ▼ ▼ ▼ │
│ caMultiheadAttention◄─────────┘
│ │
│ ▼
│ caDropout
│ │
└──────►add◄───────┘
│
▼
(caFinalLayerNorm)
│
▼
│ query │
└───────┘
@ | # LANGUAGE DataKinds #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE PatternSynonyms #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeFamilies #
# LANGUAGE TypeOperators #
# LANGUAGE UndecidableInstances #
module Torch.GraduallyTyped.NN.Transformer.GCrossAttention where
import Control.Monad.Indexed (IxPointed (ireturn), (>>>=))
import Control.Monad.Indexed.State (IxStateT (..))
import Control.Monad.Indexed.Trans (IxMonadTrans (ilift))
import Data.Kind (Type)
import GHC.Generics (Generic)
import GHC.TypeLits (Nat, Symbol)
import Torch.GraduallyTyped.DType (DType (..), DataType, SDataType (..))
import Torch.GraduallyTyped.Device (Device (..), DeviceType (..), SDevice (..))
import Torch.GraduallyTyped.NN.Class (HasForward (..), HasInitialize (..), HasStateDict (..), ModelSpec, NamedModel (..))
import Torch.GraduallyTyped.NN.Dropout (Dropout (..))
import Torch.GraduallyTyped.NN.Normalization (LayerNorm (..), LayerNormSpec (..))
import Torch.GraduallyTyped.NN.Transformer.GMultiHeadAttention (GMultiHeadAttentionF, multiHeadAttentionSpec)
import Torch.GraduallyTyped.NN.Transformer.Type (STransformerStyle (..), TransformerStyle (..))
import Torch.GraduallyTyped.NN.Type (HasBias (..), HasDropout (..), SHasBias (..), SHasDropout (..))
import Torch.GraduallyTyped.Prelude (Catch, pattern (:|:))
import Torch.GraduallyTyped.Prelude.List (SList (..))
import Torch.GraduallyTyped.RequiresGradient (Gradient, RequiresGradient (..), SGradient (..))
import Torch.GraduallyTyped.Shape.Class (BroadcastShapesF)
import Torch.GraduallyTyped.Shape.Type (Dim (..), Name (..), SDim, SShape (..), Shape (..), Size (..))
import Torch.GraduallyTyped.Tensor.MathOperations.Pointwise (add)
import Torch.GraduallyTyped.Tensor.Type (Tensor)
import Torch.GraduallyTyped.Unify (type (<+>), type (<|>))
data
GCrossAttention
(initialLayerNorm :: Type)
(mha :: Type)
(dropout :: Type)
(finalLayerNorm :: Type)
where
GCrossAttention ::
forall initialLayerNorm mha dropout finalLayerNorm.
caInitialLayerNorm :: initialLayerNorm,
caMultiHeadAttention :: mha,
caDropout :: dropout,
caFinalLayerNorm :: finalLayerNorm
} ->
GCrossAttention initialLayerNorm mha dropout finalLayerNorm
deriving stock (Eq, Ord, Show, Generic)
type instance
ModelSpec (GCrossAttention initialLayerNorm mha dropout finalLayerNorm) =
GCrossAttention (ModelSpec initialLayerNorm) (ModelSpec mha) (ModelSpec dropout) (ModelSpec finalLayerNorm)
type family
GCrossAttentionF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(headDim :: Dim (Name Symbol) (Size Nat))
(headEmbedDim :: Dim (Name Symbol) (Size Nat))
(embedDim :: Dim (Name Symbol) (Size Nat))
(queryEmbedDim :: Dim (Name Symbol) (Size Nat))
(keyEmbedDim :: Dim (Name Symbol) (Size Nat))
(hasDropout :: HasDropout) ::
Type
where
GCrossAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout =
GCrossAttention
(CAInitialLayerNormF style gradient device dataType queryEmbedDim)
(CAMultiheadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout)
(CADropoutF style hasDropout)
(CAFinalLayerNormF style gradient device dataType queryEmbedDim)
type family
CAInitialLayerNormF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(queryEmbedDim :: Dim (Name Symbol) (Size Nat)) ::
Type
where
CAInitialLayerNormF 'T5 gradient device dataType queryEmbedDim =
NamedModel (LayerNorm 'WithoutBias gradient device dataType ('Shape '[queryEmbedDim]))
CAInitialLayerNormF 'ByT5 gradient device dataType queryEmbedDim =
CAInitialLayerNormF 'T5 gradient device dataType queryEmbedDim
CAInitialLayerNormF 'BART _ _ _ _ =
()
CAInitialLayerNormF 'MBART gradient device dataType queryEmbedDim =
CAInitialLayerNormF 'BART gradient device dataType queryEmbedDim
CAInitialLayerNormF 'Pegasus gradient device dataType queryEmbedDim =
NamedModel (LayerNorm 'WithBias gradient device dataType ('Shape '[queryEmbedDim]))
type family
CAMultiheadAttentionF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(headDim :: Dim (Name Symbol) (Size Nat))
(headEmbedDim :: Dim (Name Symbol) (Size Nat))
(embedDim :: Dim (Name Symbol) (Size Nat))
(queryEmbedDim :: Dim (Name Symbol) (Size Nat))
(keyEmbedDim :: Dim (Name Symbol) (Size Nat))
(hasDropout :: HasDropout) ::
Type
where
CAMultiheadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout =
NamedModel (GMultiHeadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim keyEmbedDim hasDropout)
type family
CADropoutF
(style :: TransformerStyle)
(hasDropout :: HasDropout) ::
Type
where
CADropoutF _ 'WithDropout = Dropout
CADropoutF _ 'WithoutDropout = ()
type family
CAFinalLayerNormF
(style :: TransformerStyle)
(gradient :: Gradient RequiresGradient)
(device :: Device (DeviceType Nat))
(dataType :: DataType DType)
(queryEmbedDim :: Dim (Name Symbol) (Size Nat)) ::
Type
where
CAFinalLayerNormF 'T5 _ _ _ _ =
()
CAFinalLayerNormF 'ByT5 gradient device dataType queryEmbedDim =
CAFinalLayerNormF 'T5 gradient device dataType queryEmbedDim
CAFinalLayerNormF 'BART gradient device dataType queryEmbedDim =
NamedModel (LayerNorm 'WithBias gradient device dataType ('Shape '[queryEmbedDim]))
CAFinalLayerNormF 'MBART gradient device dataType queryEmbedDim =
CAFinalLayerNormF 'BART gradient device dataType queryEmbedDim
CAFinalLayerNormF 'Pegasus gradient device dataType queryEmbedDim =
()
crossAttentionSpec ::
forall style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout.
STransformerStyle style ->
SGradient gradient ->
SDevice device ->
SDataType dataType ->
SDim headDim ->
SDim headEmbedDim ->
SDim embedDim ->
SDim queryEmbedDim ->
SDim keyEmbedDim ->
SHasDropout hasDropout ->
Double ->
Double ->
ModelSpec (GCrossAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout)
crossAttentionSpec style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim hasDropout dropoutP eps =
let initialLayerNormSpec ST5 = NamedModel "layer_norm." layerNormWithoutBiasSpec
initialLayerNormSpec SByT5 = NamedModel "layer_norm." layerNormWithoutBiasSpec
initialLayerNormSpec SBART = ()
initialLayerNormSpec SMBART = ()
initialLayerNormSpec SPegasus = NamedModel "encoder_attn_layer_norm." layerNormWithBiasSpec
initialLayerNormSpec SBERT = undefined
initialLayerNormSpec SRoBERTa = undefined
initialLayerNormSpec SGPT2 = undefined
mhaSpec ST5 = NamedModel "EncDecAttention." $ mhaSpec' ST5
mhaSpec SByT5 = NamedModel "EncDecAttention." $ mhaSpec' SByT5
mhaSpec SBART = NamedModel "encoder_attn." $ mhaSpec' SBART
mhaSpec SMBART = NamedModel "encoder_attn." $ mhaSpec' SMBART
mhaSpec SPegasus = NamedModel "encoder_attn." $ mhaSpec' SPegasus
mhaSpec SBERT = undefined
mhaSpec SRoBERTa = undefined
mhaSpec SGPT2 = undefined
dropoutSpec _ SWithDropout = Dropout dropoutP
dropoutSpec _ SWithoutDropout = ()
finalLayerNormSpec ST5 = ()
finalLayerNormSpec SByT5 = ()
finalLayerNormSpec SBART = NamedModel "encoder_attn_layer_norm." layerNormWithBiasSpec
finalLayerNormSpec SMBART = NamedModel "encoder_attn_layer_norm." layerNormWithBiasSpec
finalLayerNormSpec SPegasus = ()
finalLayerNormSpec SBERT = undefined
finalLayerNormSpec SRoBERTa = undefined
finalLayerNormSpec SGPT2 = undefined
in GCrossAttention (initialLayerNormSpec style) (mhaSpec style) (dropoutSpec style hasDropout) (finalLayerNormSpec style)
where
mhaSpec' ::
STransformerStyle style ->
ModelSpec (GMultiHeadAttentionF style gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim keyEmbedDim hasDropout)
mhaSpec' style' = multiHeadAttentionSpec style' gradient device dataType headDim headEmbedDim embedDim queryEmbedDim keyEmbedDim keyEmbedDim hasDropout dropoutP
layerNormWithoutBiasSpec = LayerNormSpec SWithoutBias gradient device dataType (SShape $ queryEmbedDim :|: SNil) eps
layerNormWithBiasSpec = LayerNormSpec SWithBias gradient device dataType (SShape $ queryEmbedDim :|: SNil) eps
instance
( HasInitialize initialLayerNorm generatorDevice initialLayerNorm' generatorDevice0,
HasInitialize multiHeadAttention generatorDevice0 multiHeadAttention' generatorDevice1,
HasInitialize dropout generatorDevice1 dropout' generatorDevice2,
HasInitialize finalLayerNorm generatorDevice2 finalLayerNorm' generatorOutputDevice
) =>
HasInitialize
(GCrossAttention initialLayerNorm multiHeadAttention dropout finalLayerNorm)
generatorDevice
(GCrossAttention initialLayerNorm' multiHeadAttention' dropout' finalLayerNorm')
generatorOutputDevice
instance
( HasStateDict initialLayerNorm,
HasStateDict multiHeadAttention,
HasStateDict dropout,
HasStateDict finalLayerNorm
) =>
HasStateDict (GCrossAttention initialLayerNorm multiHeadAttention dropout finalLayerNorm)
| ' HasForward ' instance for ' GCrossAttention ' .
┌ ─ ─ ─ ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ┐ ┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┐
─ ┬ ─ ─ ─ ┘ ─ ─ ─ ─ ─ ─ ─ ┬ ─ ─ ─ ─ ─ ─ ─ ┘
┌ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ┤ │ │
│ │ ┌ ─ ─ ─ ┴ ─ ─ ─ ┐ │
┌ ─ ─ ─ ─ ─ ─ ─ ┐
instance
( HasForward
initialLayerNorm
(Tensor queryGradient queryLayout queryDevice queryDataType queryShape)
generatorDevice
tensor0
generatorDevice0,
HasForward
multiHeadAttention
( tensor0,
Tensor keyGradient keyLayout keyDevice keyDataType keyShape,
Tensor keyGradient keyLayout keyDevice keyDataType keyShape,
Tensor attentionBiasGradient attentionBiasLayout attentionBiasDevice attentionBiasDataType attentionBiasShape
)
generatorDevice0
tensor1
generatorDevice1,
HasForward
dropout
tensor1
generatorDevice1
(Tensor gradient2 layout2 device2 dataType2 shape2)
generatorDevice2,
HasForward
finalLayerNorm
(Tensor (queryGradient <|> gradient2) (queryLayout <+> layout2) (queryDevice <+> device2) (queryDataType <+> dataType2) (BroadcastShapesF queryShape shape2))
generatorDevice2
output
generatorOutputDevice,
Catch (BroadcastShapesF queryShape shape2)
) =>
HasForward
(GCrossAttention initialLayerNorm multiHeadAttention dropout finalLayerNorm)
( Tensor queryGradient queryLayout queryDevice queryDataType queryShape,
Tensor keyGradient keyLayout keyDevice keyDataType keyShape,
Tensor attentionBiasGradient attentionBiasLayout attentionBiasDevice attentionBiasDataType attentionBiasShape
)
generatorDevice
output
generatorOutputDevice
where
forward GCrossAttention {..} (query, key, attentionBias) =
runIxStateT $
ireturn query
>>>= IxStateT . forward caInitialLayerNorm
>>>= (\query' -> IxStateT $ forward caMultiHeadAttention (query', key, key, attentionBias))
>>>= IxStateT . forward caDropout
>>>= ilift . (query `add`)
>>>= IxStateT . forward caFinalLayerNorm
|
c686a79cc709b64c038cced56afbac1740f6cc5ed032c7ba8e64b04ef19bcc0c | foreverbell/project-euler-solutions | 89.hs |
roman :: String -> String
roman ('I':'I':'I':'I':r) = 'I':'V':roman r
roman ('V':'I':'I':'I':'I': r) = 'I':'X':roman r
roman ('X':'X':'X':'X':r) = 'X':'L':roman r
roman ('L':'X':'X':'X':'X':r) = 'X':'C':roman r
roman ('C':'C':'C':'C':r) = 'C':'D':roman r
roman ('D':'C':'C':'C':'C':r) = 'C':'M':roman r
roman (x:r) = x:roman r
roman [] = []
solve :: String -> Int
solve s = count r - count r' where
r = words s
r' = map roman r
count = sum . (map length)
main = (readFile "input/p089_roman.txt") >>= (print . solve)
| null | https://raw.githubusercontent.com/foreverbell/project-euler-solutions/c0bf2746aafce9be510892814e2d03e20738bf2b/src/89.hs | haskell |
roman :: String -> String
roman ('I':'I':'I':'I':r) = 'I':'V':roman r
roman ('V':'I':'I':'I':'I': r) = 'I':'X':roman r
roman ('X':'X':'X':'X':r) = 'X':'L':roman r
roman ('L':'X':'X':'X':'X':r) = 'X':'C':roman r
roman ('C':'C':'C':'C':r) = 'C':'D':roman r
roman ('D':'C':'C':'C':'C':r) = 'C':'M':roman r
roman (x:r) = x:roman r
roman [] = []
solve :: String -> Int
solve s = count r - count r' where
r = words s
r' = map roman r
count = sum . (map length)
main = (readFile "input/p089_roman.txt") >>= (print . solve)
| |
0671123b5bb889cc1544615c8059432e94c9925eddb6f4de927d102f037ae351 | elastic/eui-cljs | colors.cljs | (ns eui.themes.colors
(:require ["@elastic/eui/lib/themes/amsterdam/global_styling/variables/_colors.js" :as eui]))
(def special_colors eui/special_colors)
(def shade_colors eui/shade_colors)
(def dark_shades eui/dark_shades)
(def brand_colors eui/brand_colors)
(def dark_colors_ams eui/dark_colors_ams)
(def light_colors eui/light_colors)
(def brand_text_colors eui/brand_text_colors)
(def text_colors eui/text_colors)
(def colors eui/colors)
| null | https://raw.githubusercontent.com/elastic/eui-cljs/ad60b57470a2eb8db9bca050e02f52dd964d9f8e/src/eui/themes/colors.cljs | clojure | (ns eui.themes.colors
(:require ["@elastic/eui/lib/themes/amsterdam/global_styling/variables/_colors.js" :as eui]))
(def special_colors eui/special_colors)
(def shade_colors eui/shade_colors)
(def dark_shades eui/dark_shades)
(def brand_colors eui/brand_colors)
(def dark_colors_ams eui/dark_colors_ams)
(def light_colors eui/light_colors)
(def brand_text_colors eui/brand_text_colors)
(def text_colors eui/text_colors)
(def colors eui/colors)
| |
ec8dcb74f3a86eb342defceaaf8b80bff146b81ff2791cfcf62110f01366ca37 | geophf/1HaskellADay | Exercise.hs | # LANGUAGE OverloadedStrings , QuasiQuotes , TypeSynonymInstances #
module Y2017.M12.D26.Exercise where
-
Yesterday , we were able to parse the HTML content of articles stored in JSON .
But we want to take a step back today , and go back to before then when we
simply ingested the JSON as blocks of ( unprocessed articles ) . What we 're going
to do today , instead of parsing these blocks ( as we started yesterday ) , is
to store what we 've scanned .
-
Yesterday, we were able to parse the HTML content of articles stored in JSON.
But we want to take a step back today, and go back to before then when we
simply ingested the JSON as blocks of (unprocessed articles). What we're going
to do today, instead of parsing these blocks (as we started yesterday), is
to store what we've scanned.
--}
below import available via 1HaskellADay git repository
import Y2017.M12.D20.Exercise
{--
Read in subset.json from D20 exercise, now: save each article block to the
PostgreSQL database.
--}
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToRow
import Store.SQL.Util.Indexed
instance ToRow Block where
toRow art = undefined
The insert statement gives the Article structure
-- (also image attached from the Entity-relation diagram)
insertArticleStgStmt :: Query
insertArticleStgStmt =
[sql|INSERT INTO article_stg (block) VALUES (?) returning id|]
insertStagedArt :: Connection -> [Block] -> IO [Index]
insertStagedArt conn arts = undefined
-- insertStagedArt stores a set of articles and returns the ids the database
-- assigns to these articles it stores
-- What are these stored article ids? How many articles did you store?
| null | https://raw.githubusercontent.com/geophf/1HaskellADay/514792071226cd1e2ba7640af942667b85601006/exercises/HAD/Y2017/M12/D26/Exercise.hs | haskell | }
-
Read in subset.json from D20 exercise, now: save each article block to the
PostgreSQL database.
-
(also image attached from the Entity-relation diagram)
insertStagedArt stores a set of articles and returns the ids the database
assigns to these articles it stores
What are these stored article ids? How many articles did you store? | # LANGUAGE OverloadedStrings , QuasiQuotes , TypeSynonymInstances #
module Y2017.M12.D26.Exercise where
-
Yesterday , we were able to parse the HTML content of articles stored in JSON .
But we want to take a step back today , and go back to before then when we
simply ingested the JSON as blocks of ( unprocessed articles ) . What we 're going
to do today , instead of parsing these blocks ( as we started yesterday ) , is
to store what we 've scanned .
-
Yesterday, we were able to parse the HTML content of articles stored in JSON.
But we want to take a step back today, and go back to before then when we
simply ingested the JSON as blocks of (unprocessed articles). What we're going
to do today, instead of parsing these blocks (as we started yesterday), is
to store what we've scanned.
below import available via 1HaskellADay git repository
import Y2017.M12.D20.Exercise
import Database.PostgreSQL.Simple
import Database.PostgreSQL.Simple.SqlQQ
import Database.PostgreSQL.Simple.ToRow
import Store.SQL.Util.Indexed
instance ToRow Block where
toRow art = undefined
The insert statement gives the Article structure
insertArticleStgStmt :: Query
insertArticleStgStmt =
[sql|INSERT INTO article_stg (block) VALUES (?) returning id|]
insertStagedArt :: Connection -> [Block] -> IO [Index]
insertStagedArt conn arts = undefined
|
2341862a510a6d4ffaa1a566bd1d023d14ce033d74f952997f3e153c293294d5 | helium/router | router_discovery_SUITE.erl | -module(router_discovery_SUITE).
-export([
all/0,
init_per_testcase/2,
end_per_testcase/2
]).
-export([discovery_test/1, fail_to_connect_test/1]).
-include_lib("helium_proto/include/blockchain_state_channel_v1_pb.hrl").
-include_lib("helium_proto/include/discovery_pb.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("router_device_worker.hrl").
-include("lorawan_vars.hrl").
-include("console_test.hrl").
-define(DECODE(A), jsx:decode(A, [return_maps])).
-define(APPEUI, <<0, 0, 0, 2, 0, 0, 0, 1>>).
-define(DEVEUI, <<0, 0, 0, 0, 0, 0, 0, 1>>).
-define(ETS, ?MODULE).
%%--------------------------------------------------------------------
%% COMMON TEST CALLBACK FUNCTIONS
%%--------------------------------------------------------------------
%%--------------------------------------------------------------------
@public
%% @doc
%% Running tests for this suite
%% @end
%%--------------------------------------------------------------------
all() ->
[].
%%--------------------------------------------------------------------
%% TEST CASE SETUP
%%--------------------------------------------------------------------
init_per_testcase(TestCase, Config) ->
test_utils:init_per_testcase(TestCase, Config).
%%--------------------------------------------------------------------
%% TEST CASE TEARDOWN
%%--------------------------------------------------------------------
end_per_testcase(TestCase, Config) ->
test_utils:end_per_testcase(TestCase, Config).
%%--------------------------------------------------------------------
%% TEST CASES
%%--------------------------------------------------------------------
discovery_test(Config) ->
DiscoFrameTimeout = timer:seconds(3),
application:set_env(router, disco_frame_timeout, DiscoFrameTimeout),
%% Setup stream stuff
HotspotSwarm = proplists:get_value(swarm, Config),
libp2p_swarm:add_stream_handler(
HotspotSwarm,
router_discovery_handler:version(),
{router_discovery_handler_test, server, [self()]}
),
[HotspotListenAddress | _] = libp2p_swarm:listen_addrs(HotspotSwarm),
{ok, _} = libp2p_swarm:connect(blockchain_swarm:swarm(), HotspotListenAddress),
test_utils:wait_until(fun() ->
case
libp2p_swarm:connect(blockchain_swarm:swarm(), libp2p_swarm:p2p_address(HotspotSwarm))
of
{ok, _} -> true;
_ -> false
end
end),
#{secret := HotspotPrivKey, public := HotspotPubKey} = proplists:get_value(keys, Config),
SigFun = libp2p_crypto:mk_sig_fun(HotspotPrivKey),
HotspotPubKeyBin = libp2p_crypto:pubkey_to_bin(HotspotPubKey),
HotspotB58Bin = erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
TxnID1 = 1,
Sig = SigFun(HotspotB58Bin),
EncodedSig = base64:encode(Sig),
Map1 = #{
<<"hotspot">> => HotspotB58Bin,
<<"transaction_id">> => TxnID1,
<<"device_id">> => ?CONSOLE_DEVICE_ID,
<<"signature">> => EncodedSig
},
WSPid =
receive
{websocket_init, P} -> P
after 2500 -> ct:fail(websocket_init_timeout)
end,
%% Start discovery process
WSPid ! {discovery, Map1},
Make sure that data is deliverd to hotspot
DiscoveryData1 =
receive
{router_discovery_handler_test, Bin1} ->
#discovery_start_pb{
hotspot = HotspotPubKeyBin,
signature = EncodedSig
} = discovery_pb:decode_msg(Bin1, discovery_start_pb)
after 5000 -> ct:fail(router_discovery_handler_test_timeout)
end,
%% Setup to send rcved packet
RouterSwarm = blockchain_swarm:swarm(),
[RouterAddress | _] = libp2p_swarm:listen_addrs(RouterSwarm),
{ok, Stream} = libp2p_swarm:dial_framed_stream(
HotspotSwarm,
RouterAddress,
router_handler_test:version(),
router_handler_test,
[self()]
),
?assertEqual(10, erlang:length(DiscoveryData1#discovery_start_pb.packets)),
[Payload1 | _] = DiscoveryData1#discovery_start_pb.packets,
HeliumPacket1 = #packet_pb{
type = lorawan,
payload = Payload1,
frequency = 923.3,
datarate = <<"SF8BW125">>,
signal_strength = 0.0,
snr = 0.0,
routing = undefined
},
Packet1 = #blockchain_state_channel_packet_v1_pb{
packet = HeliumPacket1,
hotspot = HotspotPubKeyBin,
region = 'US915'
},
SCMsg1 = #blockchain_state_channel_message_v1_pb{msg = {packet, Packet1}},
blockchain_state_channel_v1_pb:encode_msg(SCMsg1),
Stream ! {send, blockchain_state_channel_v1_pb:encode_msg(SCMsg1)},
timer:sleep(DiscoFrameTimeout),
%% Make sure that we got our txn id in the payload
Body1 = jsx:encode(#{txn_id => TxnID1, error => 0}),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 0,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body1),
<<"payload_size">> => erlang:byte_size(Body1),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(
blockchain_utils:addr2name(HotspotPubKeyBin)
),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 100,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
[_, Payload2 | _] = DiscoveryData1#discovery_start_pb.packets,
HeliumPacket2 = #packet_pb{
type = lorawan,
payload = Payload2,
frequency = 923.3,
datarate = <<"SF8BW125">>,
signal_strength = 0.0,
snr = 0.0,
routing = undefined
},
Packet2 = #blockchain_state_channel_packet_v1_pb{
packet = HeliumPacket2,
hotspot = HotspotPubKeyBin,
region = 'US915'
},
SCMsg2 = #blockchain_state_channel_message_v1_pb{msg = {packet, Packet2}},
blockchain_state_channel_v1_pb:encode_msg(SCMsg2),
Stream ! {send, blockchain_state_channel_v1_pb:encode_msg(SCMsg2)},
timer:sleep(DiscoFrameTimeout),
%% Make sure that we got our txn id in the payload
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 1,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body1),
<<"payload_size">> => erlang:byte_size(Body1),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(
blockchain_utils:addr2name(HotspotPubKeyBin)
),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 100,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
Restart process one more to make sure device worker behaves correctly
TxnID3 = 3,
Map3 = #{
<<"hotspot">> => HotspotB58Bin,
<<"transaction_id">> => TxnID3,
<<"device_id">> => ?CONSOLE_DEVICE_ID,
<<"signature">> => EncodedSig
},
WSPid ! {discovery, Map3},
DiscoveryData3 =
receive
{router_discovery_handler_test, Bin3} ->
#discovery_start_pb{
hotspot = HotspotPubKeyBin,
signature = EncodedSig
} = discovery_pb:decode_msg(Bin3, discovery_start_pb)
after 5000 -> ct:fail(router_discovery_handler_test_timeout)
end,
[Payload3 | _] = DiscoveryData3#discovery_start_pb.packets,
HeliumPacket3 = #packet_pb{
type = lorawan,
payload = Payload3,
frequency = 923.3,
datarate = <<"SF8BW125">>,
signal_strength = 0.0,
snr = 0.0,
routing = undefined
},
Packet3 = #blockchain_state_channel_packet_v1_pb{
packet = HeliumPacket3,
hotspot = HotspotPubKeyBin,
region = 'US915'
},
SCMsg3 = #blockchain_state_channel_message_v1_pb{msg = {packet, Packet3}},
blockchain_state_channel_v1_pb:encode_msg(SCMsg3),
Stream ! {send, blockchain_state_channel_v1_pb:encode_msg(SCMsg3)},
timer:sleep(DiscoFrameTimeout),
Body3 = jsx:encode(#{txn_id => TxnID3, error => 0}),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 0,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body3),
<<"payload_size">> => erlang:byte_size(Body3),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(blockchain_utils:addr2name(HotspotPubKeyBin)),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 100,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
ok.
fail_to_connect_test(Config) ->
application:set_env(router, disco_frame_timeout, 400),
HotspotSwarm = proplists:get_value(swarm, Config),
libp2p_swarm:add_stream_handler(
HotspotSwarm,
router_discovery_handler:version(),
{router_discovery_handler_test, server, [self()]}
),
#{secret := HotspotPrivKey, public := HotspotPubKey} = proplists:get_value(keys, Config),
SigFun = libp2p_crypto:mk_sig_fun(HotspotPrivKey),
HotspotPubKeyBin = libp2p_crypto:pubkey_to_bin(HotspotPubKey),
HotspotB58Bin = erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
TxnID1 = 1,
Sig = SigFun(HotspotB58Bin),
EncodedSig = base64:encode(Sig),
Map1 = #{
<<"hotspot">> => HotspotB58Bin,
<<"transaction_id">> => TxnID1,
<<"device_id">> => ?CONSOLE_DEVICE_ID,
<<"signature">> => EncodedSig
},
WSPid =
receive
{websocket_init, P} -> P
after 2500 -> ct:fail(websocket_init_timeout)
end,
%% Start discovery process
WSPid ! {discovery, Map1},
This can take time as we will retry 10 times
timer:sleep(12000),
Body1 = jsx:encode(#{txn_id => TxnID1, error => 1}),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 1,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body1),
<<"payload_size">> => erlang:byte_size(Body1),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(
blockchain_utils:addr2name(HotspotPubKeyBin)
),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 1,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
ok.
%% ------------------------------------------------------------------
%% Helper functions
%% ------------------------------------------------------------------
| null | https://raw.githubusercontent.com/helium/router/0b553f0c7b192ed52387fe807760cdec779c9d74/test/router_discovery_SUITE.erl | erlang | --------------------------------------------------------------------
COMMON TEST CALLBACK FUNCTIONS
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
Running tests for this suite
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST CASE SETUP
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST CASE TEARDOWN
--------------------------------------------------------------------
--------------------------------------------------------------------
TEST CASES
--------------------------------------------------------------------
Setup stream stuff
Start discovery process
Setup to send rcved packet
Make sure that we got our txn id in the payload
Make sure that we got our txn id in the payload
Start discovery process
------------------------------------------------------------------
Helper functions
------------------------------------------------------------------ | -module(router_discovery_SUITE).
-export([
all/0,
init_per_testcase/2,
end_per_testcase/2
]).
-export([discovery_test/1, fail_to_connect_test/1]).
-include_lib("helium_proto/include/blockchain_state_channel_v1_pb.hrl").
-include_lib("helium_proto/include/discovery_pb.hrl").
-include_lib("common_test/include/ct.hrl").
-include_lib("eunit/include/eunit.hrl").
-include("router_device_worker.hrl").
-include("lorawan_vars.hrl").
-include("console_test.hrl").
-define(DECODE(A), jsx:decode(A, [return_maps])).
-define(APPEUI, <<0, 0, 0, 2, 0, 0, 0, 1>>).
-define(DEVEUI, <<0, 0, 0, 0, 0, 0, 0, 1>>).
-define(ETS, ?MODULE).
@public
all() ->
[].
init_per_testcase(TestCase, Config) ->
test_utils:init_per_testcase(TestCase, Config).
end_per_testcase(TestCase, Config) ->
test_utils:end_per_testcase(TestCase, Config).
discovery_test(Config) ->
DiscoFrameTimeout = timer:seconds(3),
application:set_env(router, disco_frame_timeout, DiscoFrameTimeout),
HotspotSwarm = proplists:get_value(swarm, Config),
libp2p_swarm:add_stream_handler(
HotspotSwarm,
router_discovery_handler:version(),
{router_discovery_handler_test, server, [self()]}
),
[HotspotListenAddress | _] = libp2p_swarm:listen_addrs(HotspotSwarm),
{ok, _} = libp2p_swarm:connect(blockchain_swarm:swarm(), HotspotListenAddress),
test_utils:wait_until(fun() ->
case
libp2p_swarm:connect(blockchain_swarm:swarm(), libp2p_swarm:p2p_address(HotspotSwarm))
of
{ok, _} -> true;
_ -> false
end
end),
#{secret := HotspotPrivKey, public := HotspotPubKey} = proplists:get_value(keys, Config),
SigFun = libp2p_crypto:mk_sig_fun(HotspotPrivKey),
HotspotPubKeyBin = libp2p_crypto:pubkey_to_bin(HotspotPubKey),
HotspotB58Bin = erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
TxnID1 = 1,
Sig = SigFun(HotspotB58Bin),
EncodedSig = base64:encode(Sig),
Map1 = #{
<<"hotspot">> => HotspotB58Bin,
<<"transaction_id">> => TxnID1,
<<"device_id">> => ?CONSOLE_DEVICE_ID,
<<"signature">> => EncodedSig
},
WSPid =
receive
{websocket_init, P} -> P
after 2500 -> ct:fail(websocket_init_timeout)
end,
WSPid ! {discovery, Map1},
Make sure that data is deliverd to hotspot
DiscoveryData1 =
receive
{router_discovery_handler_test, Bin1} ->
#discovery_start_pb{
hotspot = HotspotPubKeyBin,
signature = EncodedSig
} = discovery_pb:decode_msg(Bin1, discovery_start_pb)
after 5000 -> ct:fail(router_discovery_handler_test_timeout)
end,
RouterSwarm = blockchain_swarm:swarm(),
[RouterAddress | _] = libp2p_swarm:listen_addrs(RouterSwarm),
{ok, Stream} = libp2p_swarm:dial_framed_stream(
HotspotSwarm,
RouterAddress,
router_handler_test:version(),
router_handler_test,
[self()]
),
?assertEqual(10, erlang:length(DiscoveryData1#discovery_start_pb.packets)),
[Payload1 | _] = DiscoveryData1#discovery_start_pb.packets,
HeliumPacket1 = #packet_pb{
type = lorawan,
payload = Payload1,
frequency = 923.3,
datarate = <<"SF8BW125">>,
signal_strength = 0.0,
snr = 0.0,
routing = undefined
},
Packet1 = #blockchain_state_channel_packet_v1_pb{
packet = HeliumPacket1,
hotspot = HotspotPubKeyBin,
region = 'US915'
},
SCMsg1 = #blockchain_state_channel_message_v1_pb{msg = {packet, Packet1}},
blockchain_state_channel_v1_pb:encode_msg(SCMsg1),
Stream ! {send, blockchain_state_channel_v1_pb:encode_msg(SCMsg1)},
timer:sleep(DiscoFrameTimeout),
Body1 = jsx:encode(#{txn_id => TxnID1, error => 0}),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 0,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body1),
<<"payload_size">> => erlang:byte_size(Body1),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(
blockchain_utils:addr2name(HotspotPubKeyBin)
),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 100,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
[_, Payload2 | _] = DiscoveryData1#discovery_start_pb.packets,
HeliumPacket2 = #packet_pb{
type = lorawan,
payload = Payload2,
frequency = 923.3,
datarate = <<"SF8BW125">>,
signal_strength = 0.0,
snr = 0.0,
routing = undefined
},
Packet2 = #blockchain_state_channel_packet_v1_pb{
packet = HeliumPacket2,
hotspot = HotspotPubKeyBin,
region = 'US915'
},
SCMsg2 = #blockchain_state_channel_message_v1_pb{msg = {packet, Packet2}},
blockchain_state_channel_v1_pb:encode_msg(SCMsg2),
Stream ! {send, blockchain_state_channel_v1_pb:encode_msg(SCMsg2)},
timer:sleep(DiscoFrameTimeout),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 1,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body1),
<<"payload_size">> => erlang:byte_size(Body1),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(
blockchain_utils:addr2name(HotspotPubKeyBin)
),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 100,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
Restart process one more to make sure device worker behaves correctly
TxnID3 = 3,
Map3 = #{
<<"hotspot">> => HotspotB58Bin,
<<"transaction_id">> => TxnID3,
<<"device_id">> => ?CONSOLE_DEVICE_ID,
<<"signature">> => EncodedSig
},
WSPid ! {discovery, Map3},
DiscoveryData3 =
receive
{router_discovery_handler_test, Bin3} ->
#discovery_start_pb{
hotspot = HotspotPubKeyBin,
signature = EncodedSig
} = discovery_pb:decode_msg(Bin3, discovery_start_pb)
after 5000 -> ct:fail(router_discovery_handler_test_timeout)
end,
[Payload3 | _] = DiscoveryData3#discovery_start_pb.packets,
HeliumPacket3 = #packet_pb{
type = lorawan,
payload = Payload3,
frequency = 923.3,
datarate = <<"SF8BW125">>,
signal_strength = 0.0,
snr = 0.0,
routing = undefined
},
Packet3 = #blockchain_state_channel_packet_v1_pb{
packet = HeliumPacket3,
hotspot = HotspotPubKeyBin,
region = 'US915'
},
SCMsg3 = #blockchain_state_channel_message_v1_pb{msg = {packet, Packet3}},
blockchain_state_channel_v1_pb:encode_msg(SCMsg3),
Stream ! {send, blockchain_state_channel_v1_pb:encode_msg(SCMsg3)},
timer:sleep(DiscoFrameTimeout),
Body3 = jsx:encode(#{txn_id => TxnID3, error => 0}),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 0,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body3),
<<"payload_size">> => erlang:byte_size(Body3),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(blockchain_utils:addr2name(HotspotPubKeyBin)),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 100,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
ok.
fail_to_connect_test(Config) ->
application:set_env(router, disco_frame_timeout, 400),
HotspotSwarm = proplists:get_value(swarm, Config),
libp2p_swarm:add_stream_handler(
HotspotSwarm,
router_discovery_handler:version(),
{router_discovery_handler_test, server, [self()]}
),
#{secret := HotspotPrivKey, public := HotspotPubKey} = proplists:get_value(keys, Config),
SigFun = libp2p_crypto:mk_sig_fun(HotspotPrivKey),
HotspotPubKeyBin = libp2p_crypto:pubkey_to_bin(HotspotPubKey),
HotspotB58Bin = erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
TxnID1 = 1,
Sig = SigFun(HotspotB58Bin),
EncodedSig = base64:encode(Sig),
Map1 = #{
<<"hotspot">> => HotspotB58Bin,
<<"transaction_id">> => TxnID1,
<<"device_id">> => ?CONSOLE_DEVICE_ID,
<<"signature">> => EncodedSig
},
WSPid =
receive
{websocket_init, P} -> P
after 2500 -> ct:fail(websocket_init_timeout)
end,
WSPid ! {discovery, Map1},
This can take time as we will retry 10 times
timer:sleep(12000),
Body1 = jsx:encode(#{txn_id => TxnID1, error => 1}),
test_utils:wait_channel_data(#{
<<"type">> => <<"uplink">>,
<<"replay">> => false,
<<"uuid">> => fun erlang:is_binary/1,
<<"id">> => ?CONSOLE_DEVICE_ID,
<<"downlink_url">> =>
<<?CONSOLE_URL/binary, "/api/v1/down/", ?CONSOLE_HTTP_CHANNEL_ID/binary, "/",
?CONSOLE_HTTP_CHANNEL_DOWNLINK_TOKEN/binary, "/", ?CONSOLE_DEVICE_ID/binary>>,
<<"name">> => ?CONSOLE_DEVICE_NAME,
<<"dev_eui">> => lorawan_utils:binary_to_hex(?DEVEUI),
<<"app_eui">> => lorawan_utils:binary_to_hex(?APPEUI),
<<"metadata">> => #{
<<"labels">> => ?CONSOLE_LABELS,
<<"organization_id">> => ?CONSOLE_ORG_ID,
<<"multi_buy">> => 1,
<<"adr_allowed">> => false,
<<"cf_list_enabled">> => false,
<<"rx_delay_state">> => fun erlang:is_binary/1,
<<"rx_delay">> => 0,
<<"preferred_hotspots">> => fun erlang:is_list/1
},
<<"fcnt">> => 1,
<<"reported_at">> => fun erlang:is_integer/1,
<<"payload">> => base64:encode(Body1),
<<"payload_size">> => erlang:byte_size(Body1),
<<"raw_packet">> => fun erlang:is_binary/1,
<<"port">> => 1,
<<"devaddr">> => '_',
<<"hotspots">> => [
#{
<<"id">> => erlang:list_to_binary(libp2p_crypto:bin_to_b58(HotspotPubKeyBin)),
<<"name">> => erlang:list_to_binary(
blockchain_utils:addr2name(HotspotPubKeyBin)
),
<<"reported_at">> => fun erlang:is_integer/1,
<<"hold_time">> => 1,
<<"status">> => <<"success">>,
<<"rssi">> => 0.0,
<<"snr">> => 0.0,
<<"spreading">> => <<"SF8BW125">>,
<<"frequency">> => fun erlang:is_float/1,
<<"channel">> => fun erlang:is_number/1,
<<"lat">> => fun erlang:is_float/1,
<<"long">> => fun erlang:is_float/1
}
],
<<"dc">> => #{
<<"balance">> => fun erlang:is_integer/1,
<<"nonce">> => fun erlang:is_integer/1
}
}),
ok.
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.