_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
9b0044dd7fcd8e143de335730cab4eae76d3698706a3e83b3bb33bc9e97f77d2 | YoshikuniJujo/test_haskell | TryTypeLevelList.hs | {-# LANGUAGE ScopedTypeVariables, TypeApplications #-}
{-# LANGUAGE DataKinds, KindSignatures, TypeOperators #-}
# LANGUAGE MonoLocalBinds #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE FlexibleContexts , FlexibleInstances , UndecidableInstances #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module TryTypeLevelList where
import Data.Kind
class PrefixOf (xs :: [Type]) (ys :: [Type])
instance PrefixOf '[] ys
instance PrefixOf xs ys => PrefixOf (x ': xs) (x ': ys)
class InfixIndex (xs :: [Type]) (ys :: [Type]) where infixIndex :: Int
instance PrefixOf (x ': xs) (x ': ys) => InfixIndex (x ': xs) (x ': ys) where
infixIndex = 0
instance {-# OVERLAPPABLE #-} InfixIndex xs ys => InfixIndex xs (y ': ys) where
infixIndex = infixIndex @xs @ys + 1
| null | https://raw.githubusercontent.com/YoshikuniJujo/test_haskell/871e64ffcf3e9b8c6eea0604e723dc421179f7fe/tribial/tribial-trials/src/TryTypeLevelList.hs | haskell | # LANGUAGE ScopedTypeVariables, TypeApplications #
# LANGUAGE DataKinds, KindSignatures, TypeOperators #
# OVERLAPPABLE # | # LANGUAGE MonoLocalBinds #
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE AllowAmbiguousTypes #
# LANGUAGE FlexibleContexts , FlexibleInstances , UndecidableInstances #
# OPTIONS_GHC -Wall -fno - warn - tabs #
module TryTypeLevelList where
import Data.Kind
class PrefixOf (xs :: [Type]) (ys :: [Type])
instance PrefixOf '[] ys
instance PrefixOf xs ys => PrefixOf (x ': xs) (x ': ys)
class InfixIndex (xs :: [Type]) (ys :: [Type]) where infixIndex :: Int
instance PrefixOf (x ': xs) (x ': ys) => InfixIndex (x ': xs) (x ': ys) where
infixIndex = 0
infixIndex = infixIndex @xs @ys + 1
|
052da81e755cd3e7fcbaff67bca2ea95295ef2bdcced79d55a693346644ddeee | stumpwm/stumpwm-contrib | bitcoin.lisp | bitcoin.lisp
(in-package :bitcoin)
;;; CODE:
Bitcoin formatter for the Stumpwm mode - line .
;;; There is no timestamp, so let's store up to size
;;; values got from url and calculate its average. Comparing
;;; actual value with this average, set a color format.
;;; Exported
(defparameter *modeline-use-colors* t
"Flag use color or not in price value.")
(defparameter *threshold* 0.001
"Magnitude that must be exceeded for a increasing or decreasing
color.")
(defparameter *time-delay* 30
"Time in seconds between calls to `*url*' to get price. Must be
positive because `*values-size*'.")
(defparameter *local-code* 2
"Localization code, `0' gives 1234.56, `1' gives 1,234.56, `2' gives
1.234,56, and `3' gives 1 234,56.")
;;; Get price
(defparameter *url* ""
"Location of price provider.")
(defvar *prev-time* 0
"Store previous time when got price.")
(defun get-value-from-url ()
"Get the actual USD-BTC value."
;; Just in case internet drops
(handler-case
(gethash "rate_float"
(gethash "USD"
(gethash "bpi"
(yason:parse
(babel:octets-to-string
(dexador:get *url* :keep-alive t)
:encoding :utf-8)))))
;; Return NIL in case some condition is triggered
(condition () nil)))
;;; Store prices
(defvar *values*
3 hours
:initial-element NIL)
"List of values got from `*url*'. Set size to a list of the last n
hours getting values: a new coin value is appended in `*values*' every
`*time-delay*', so it is divided the desired n time in seconds by the
time-delay in seconds.")
(defvar *values-average* 0.0
"Average of values in `*values*'.")
;;; Write on modeline
(defun comma-point (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,',,:D.~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun point-comma (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,'.,:D,~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun space-comma (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,' ,:D,~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun bitcoin-modeline (ml)
"Get the actual USD-BTC value, store value in list, preserve list size
popping first value, calculate average and set formatting depending on
value vs average. This function is evaluated on every modeline refresh."
(declare (ignore ml))
(let ((now (/ (get-internal-real-time) internal-time-units-per-second)))
(when (> (- now *prev-time*) *time-delay*)
(progn (setf *prev-time* now)
;; Add value to values list, pushing to front
(push (get-value-from-url) *values*)
;; Preserve values list size, popping from end
(setf *values* (nreverse *values*))
(pop *values*)
(setf *values* (nreverse *values*))
Calculate average of values , excluding NIL values
;; that could exist because network issues.
(let ((clean (remove-if-not #'numberp *values*)))
(setf *values-average* (/ (reduce #'+ clean)
(if (zerop (length clean))
1
(length clean))))))))
;; Actual value must be positive number
(if (and (numberp (car *values*)) (plusp (car *values*)))
;; Apply desired format to value
(let ((value-string
(case *local-code*
(0 (format nil "~,2F" (car *values*)))
(1 (format nil "~/bitcoin::comma-point/" (car *values*)))
(2 (format nil "~/bitcoin::point-comma/" (car *values*)))
(3 (format nil "~/bitcoin::space-comma/" (car *values*)))
(otherwise (format nil "~,2F" (car *values*))))))
;; Return with color if desired
(if *modeline-use-colors*
(let* ((diff (- (car *values*) *values-average*))
(pdiff (/ diff (if (zerop (car *values*))
1
(car *values*)))))
(cond ((> pdiff *threshold*)
(format nil "^[^B^3*~A^]" value-string))
((< pdiff (- *threshold*))
(format nil "^[^1*~A^]" value-string))
(t (format nil "^[^7*~A^]" value-string))))
(format nil "^[^**~A^]" value-string)))
(format nil "-BTC-")))
(stumpwm:add-screen-mode-line-formatter #\b 'bitcoin-modeline)
;;; Debugging ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
CL - USER > ( declaim ( optimize ( speed 0 ) ( debug 3 ) ( safety 0 ) ) )
CL - USER > ( asdf : load - system : bitcoin )
CL - USER > ( in - package " BITCOIN " )
;;; Wile executing, swap to code buffers, and any re-compile-load-ed
changes will be visible . Recall ` C - c C - b ' stops loop in Sly REPL .
;; (do () (nil)
;; (let* ((price (get-value-from-url))
;; (clean (remove-if-not #'numberp *values*))
;; (average (/ (reduce #'+ clean)
;; (length clean)))
;; (diff (- price average)))
( format t " ~&~2 $ ~2 $ ~2@$ ~4@$% ~a "
;; price
;; average
;; diff
( * 100 ( / diff price ) )
;; (bitcoin-modeline t)))
;; (force-output)
( sleep 3 ) )
;; (do () (nil)
;; (format t "~&~a" (bitcoin-modeline t))
;; (force-output)
( sleep 1 ) )
Search optimized code to push / append / pop list . See time and conses .
;; (time
;; (do ((i 1 (1+ i))
( l ( make - list 10 : initial - element 0 ) ) )
;; ((> i 10))
;; (push i l)
;; (pop l)
;; (format t "~&~A" l)))
;; (time
;; (do ((i 1 (1+ i))
( l ( make - list 10 : initial - element 0 ) ) )
( ( > i 1000000 ) ( format t " ~&~A " l ) )
( setf l ( append l ( list i ) ) )
;; (pop l)))
;;; Best option and the car is the last pushed element
;; (time
;; (do ((i 1 (1+ i))
( l ( make - list 10 : initial - element 0 ) ) )
( ( > i 1000000 ) ( format t " ~&~A " l ) )
;; (push i l)
( setf l ( l ) )
;; (pop l)
( setf l ( l ) ) ) )
;;; Seek for optimal number formatting function
;;; From
;; (defun comma-point (stream arg &rest args)
;; (declare (ignore args))
;; (format stream
;; "~,,',,:D.~A"
;; (truncate arg)
;; (let ((float-string (format nil "~,2F" arg)))
;; (subseq float-string (1+ (position #\. float-string))))))
;; (defun point-comma (stream arg &rest args)
;; (declare (ignore args))
;; (format stream
;; "~,,'.,:D,~A"
;; (truncate arg)
;; (let ((float-string (format nil "~,2F" arg)))
;; (subseq float-string (1+ (position #\. float-string))))))
;; (defun space-comma (stream arg &rest args)
;; (declare (ignore args))
;; (format stream
" ~ , , ' , : D,~A "
;; (truncate arg)
;; (let ((float-string (format nil "~,2F" arg)))
;; (subseq float-string (1+ (position #\. float-string))))))
;; (defun custom (stream arg &rest args)
;; (declare (ignore args))
;; (multiple-value-bind (quotient remainder) (truncate arg)
;; (format stream
" ~,,' . ,:D,~D "
;; quotient
( truncate ( * 100 remainder ) ) ) ) )
| null | https://raw.githubusercontent.com/stumpwm/stumpwm-contrib/1751590093a838ef48721b4eda6980e0b69cd5b3/modeline/bitcoin/bitcoin.lisp | lisp | CODE:
There is no timestamp, so let's store up to size
values got from url and calculate its average. Comparing
actual value with this average, set a color format.
Exported
Get price
Just in case internet drops
Return NIL in case some condition is triggered
Store prices
Write on modeline
Add value to values list, pushing to front
Preserve values list size, popping from end
that could exist because network issues.
Actual value must be positive number
Apply desired format to value
Return with color if desired
Debugging ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Wile executing, swap to code buffers, and any re-compile-load-ed
(do () (nil)
(let* ((price (get-value-from-url))
(clean (remove-if-not #'numberp *values*))
(average (/ (reduce #'+ clean)
(length clean)))
(diff (- price average)))
price
average
diff
(bitcoin-modeline t)))
(force-output)
(do () (nil)
(format t "~&~a" (bitcoin-modeline t))
(force-output)
(time
(do ((i 1 (1+ i))
((> i 10))
(push i l)
(pop l)
(format t "~&~A" l)))
(time
(do ((i 1 (1+ i))
(pop l)))
Best option and the car is the last pushed element
(time
(do ((i 1 (1+ i))
(push i l)
(pop l)
Seek for optimal number formatting function
From
(defun comma-point (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,',,:D.~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun point-comma (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,'.,:D,~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun space-comma (stream arg &rest args)
(declare (ignore args))
(format stream
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun custom (stream arg &rest args)
(declare (ignore args))
(multiple-value-bind (quotient remainder) (truncate arg)
(format stream
quotient | bitcoin.lisp
(in-package :bitcoin)
Bitcoin formatter for the Stumpwm mode - line .
(defparameter *modeline-use-colors* t
"Flag use color or not in price value.")
(defparameter *threshold* 0.001
"Magnitude that must be exceeded for a increasing or decreasing
color.")
(defparameter *time-delay* 30
"Time in seconds between calls to `*url*' to get price. Must be
positive because `*values-size*'.")
(defparameter *local-code* 2
"Localization code, `0' gives 1234.56, `1' gives 1,234.56, `2' gives
1.234,56, and `3' gives 1 234,56.")
(defparameter *url* ""
"Location of price provider.")
(defvar *prev-time* 0
"Store previous time when got price.")
(defun get-value-from-url ()
"Get the actual USD-BTC value."
(handler-case
(gethash "rate_float"
(gethash "USD"
(gethash "bpi"
(yason:parse
(babel:octets-to-string
(dexador:get *url* :keep-alive t)
:encoding :utf-8)))))
(condition () nil)))
(defvar *values*
3 hours
:initial-element NIL)
"List of values got from `*url*'. Set size to a list of the last n
hours getting values: a new coin value is appended in `*values*' every
`*time-delay*', so it is divided the desired n time in seconds by the
time-delay in seconds.")
(defvar *values-average* 0.0
"Average of values in `*values*'.")
(defun comma-point (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,',,:D.~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun point-comma (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,'.,:D,~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun space-comma (stream arg &rest args)
(declare (ignore args))
(format stream
"~,,' ,:D,~A"
(truncate arg)
(let ((float-string (format nil "~,2F" arg)))
(subseq float-string (1+ (position #\. float-string))))))
(defun bitcoin-modeline (ml)
"Get the actual USD-BTC value, store value in list, preserve list size
popping first value, calculate average and set formatting depending on
value vs average. This function is evaluated on every modeline refresh."
(declare (ignore ml))
(let ((now (/ (get-internal-real-time) internal-time-units-per-second)))
(when (> (- now *prev-time*) *time-delay*)
(progn (setf *prev-time* now)
(push (get-value-from-url) *values*)
(setf *values* (nreverse *values*))
(pop *values*)
(setf *values* (nreverse *values*))
Calculate average of values , excluding NIL values
(let ((clean (remove-if-not #'numberp *values*)))
(setf *values-average* (/ (reduce #'+ clean)
(if (zerop (length clean))
1
(length clean))))))))
(if (and (numberp (car *values*)) (plusp (car *values*)))
(let ((value-string
(case *local-code*
(0 (format nil "~,2F" (car *values*)))
(1 (format nil "~/bitcoin::comma-point/" (car *values*)))
(2 (format nil "~/bitcoin::point-comma/" (car *values*)))
(3 (format nil "~/bitcoin::space-comma/" (car *values*)))
(otherwise (format nil "~,2F" (car *values*))))))
(if *modeline-use-colors*
(let* ((diff (- (car *values*) *values-average*))
(pdiff (/ diff (if (zerop (car *values*))
1
(car *values*)))))
(cond ((> pdiff *threshold*)
(format nil "^[^B^3*~A^]" value-string))
((< pdiff (- *threshold*))
(format nil "^[^1*~A^]" value-string))
(t (format nil "^[^7*~A^]" value-string))))
(format nil "^[^**~A^]" value-string)))
(format nil "-BTC-")))
(stumpwm:add-screen-mode-line-formatter #\b 'bitcoin-modeline)
CL - USER > ( declaim ( optimize ( speed 0 ) ( debug 3 ) ( safety 0 ) ) )
CL - USER > ( asdf : load - system : bitcoin )
CL - USER > ( in - package " BITCOIN " )
changes will be visible . Recall ` C - c C - b ' stops loop in Sly REPL .
( format t " ~&~2 $ ~2 $ ~2@$ ~4@$% ~a "
( * 100 ( / diff price ) )
( sleep 3 ) )
( sleep 1 ) )
Search optimized code to push / append / pop list . See time and conses .
( l ( make - list 10 : initial - element 0 ) ) )
( l ( make - list 10 : initial - element 0 ) ) )
( ( > i 1000000 ) ( format t " ~&~A " l ) )
( setf l ( append l ( list i ) ) )
( l ( make - list 10 : initial - element 0 ) ) )
( ( > i 1000000 ) ( format t " ~&~A " l ) )
( setf l ( l ) )
( setf l ( l ) ) ) )
" ~ , , ' , : D,~A "
" ~,,' . ,:D,~D "
( truncate ( * 100 remainder ) ) ) ) )
|
b974d600afdd21fcaa31f8436f6b3513c3505c7d434c1565bc1b3e4ff2f6383d | kaizhang/Taiji | Exporter.hs | -- Export results generated by Taiji for visualization by external programs.
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Taiji.Component.Exporter (builder) where
import Bio.Data.Experiment.Types
import Bio.Utils.Misc (readDouble)
import Codec.Compression.GZip (bestCompression, compressLevel,
compressWith, defaultCompressParams)
import Control.DeepSeq (($!!))
import Control.Lens ((.=), (^.))
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Data.Binary (encode)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import Data.CaseInsensitive as CI (CI, mk, original)
import Data.Char (toLower, toUpper)
import Data.Function (on)
import Data.List (groupBy, sort)
import qualified Data.Map.Strict as M
import qualified Data.Matrix.Unboxed as MU
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Vector as V
import IGraph (getNodes, nodeLab, pre, suc)
import Scientific.Workflow
import Shelly (run_, shelly)
import System.IO.Temp (withTempDirectory)
import Taiji.Component.Rank (buildNet)
import Taiji.Types
builder :: Builder ()
builder = do
node "Export_results" [| \x -> do
outputdir <- getConfig' "outputDir"
let output = outputdir ++ "/TaijiResults.bin.gz"
liftIO $ getResults output x
return output
|] $ do
submitToRemote .= Just False
stateful .= True
note .= "Export results."
["Output_ranks", "Output_expression","Link_TF_gene"] ~> "Export_results"
getResults :: Experiment e
=> FilePath
-> ( FilePath -- ^ File storing the PageRank results
^ expression
, [e] -- ^ networks
)
-> IO ()
getResults output (pagerank, Just expr, es) = do
table <- readData pagerank expr
nets <- forM es $ \e -> do
gr <- buildNet e
let results = M.fromList $ flip mapMaybe (V.toList $ rowNames table) $
\x -> case getNodes gr (mk x) of
[] -> Nothing
(i:_) ->
let children = B.intercalate "\t" $
map (format . original . nodeLab gr) $ pre gr i
parents = B.intercalate "\t" $
map (format . original . nodeLab gr) $ suc gr i
in Just (x, parents `B.append` "+" `B.append` children)
return $!! (fromJust $ e^.groupName, results)
BL.writeFile output $
compressWith defaultCompressParams{compressLevel=bestCompression} $
encode $ TaijiResults table $ M.fromList nets
where
format x = let (a, b) = B.splitAt 1 x
in B.map toUpper a `B.append` B.map toLower b
-- | Read data, normalize and calculate p-values.
readData :: FilePath -- ^ PageRank
^ expression
-> IO RankTable
readData input1 input2 = do
rank <- readTSV <$> B.readFile input1
expr <- (fmap log' . readTSV) <$> B.readFile input2
let (labels, xs) = unzip $ map unzip $ groupBy ((==) `on` (fst.fst)) $ sort $
M.toList $ M.intersectionWith (,) rank expr
rowlab = V.fromList $ map (format . original) $ fst $ unzip $
map head labels
collab = V.fromList $ map (T.pack . B.unpack . original) $ snd $
unzip $ head $ labels
(rank', expr') = unzip $ map unzip xs
return $ RankTable rowlab collab (MU.fromLists rank') $ MU.fromLists expr'
where
log' x | x == 0 = log 0.01
| otherwise = log x
format x = let (a, b) = B.splitAt 1 x
in B.map toUpper a `B.append` B.map toLower b
readTSV :: B.ByteString -> M.Map (CI B.ByteString, CI B.ByteString) Double
readTSV input = M.fromList $ concatMap (f . B.split '\t') content
where
f (x:xs) = zipWith (\s v -> ((mk x, mk s), readDouble v)) samples xs
(header:content) = B.lines input
samples = tail $ B.split '\t' header
| null | https://raw.githubusercontent.com/kaizhang/Taiji/f6a050ba0ee6acb6077435566669279455cef350/Taiji/src/Taiji/Component/Exporter.hs | haskell | Export results generated by Taiji for visualization by external programs.
# LANGUAGE OverloadedStrings #
^ File storing the PageRank results
^ networks
| Read data, normalize and calculate p-values.
^ PageRank | # LANGUAGE TemplateHaskell #
module Taiji.Component.Exporter (builder) where
import Bio.Data.Experiment.Types
import Bio.Utils.Misc (readDouble)
import Codec.Compression.GZip (bestCompression, compressLevel,
compressWith, defaultCompressParams)
import Control.DeepSeq (($!!))
import Control.Lens ((.=), (^.))
import Control.Monad
import Control.Monad.IO.Class (liftIO)
import Data.Binary (encode)
import qualified Data.ByteString.Char8 as B
import qualified Data.ByteString.Lazy as BL
import Data.CaseInsensitive as CI (CI, mk, original)
import Data.Char (toLower, toUpper)
import Data.Function (on)
import Data.List (groupBy, sort)
import qualified Data.Map.Strict as M
import qualified Data.Matrix.Unboxed as MU
import Data.Maybe
import qualified Data.Text as T
import qualified Data.Vector as V
import IGraph (getNodes, nodeLab, pre, suc)
import Scientific.Workflow
import Shelly (run_, shelly)
import System.IO.Temp (withTempDirectory)
import Taiji.Component.Rank (buildNet)
import Taiji.Types
builder :: Builder ()
builder = do
node "Export_results" [| \x -> do
outputdir <- getConfig' "outputDir"
let output = outputdir ++ "/TaijiResults.bin.gz"
liftIO $ getResults output x
return output
|] $ do
submitToRemote .= Just False
stateful .= True
note .= "Export results."
["Output_ranks", "Output_expression","Link_TF_gene"] ~> "Export_results"
getResults :: Experiment e
=> FilePath
^ expression
)
-> IO ()
getResults output (pagerank, Just expr, es) = do
table <- readData pagerank expr
nets <- forM es $ \e -> do
gr <- buildNet e
let results = M.fromList $ flip mapMaybe (V.toList $ rowNames table) $
\x -> case getNodes gr (mk x) of
[] -> Nothing
(i:_) ->
let children = B.intercalate "\t" $
map (format . original . nodeLab gr) $ pre gr i
parents = B.intercalate "\t" $
map (format . original . nodeLab gr) $ suc gr i
in Just (x, parents `B.append` "+" `B.append` children)
return $!! (fromJust $ e^.groupName, results)
BL.writeFile output $
compressWith defaultCompressParams{compressLevel=bestCompression} $
encode $ TaijiResults table $ M.fromList nets
where
format x = let (a, b) = B.splitAt 1 x
in B.map toUpper a `B.append` B.map toLower b
^ expression
-> IO RankTable
readData input1 input2 = do
rank <- readTSV <$> B.readFile input1
expr <- (fmap log' . readTSV) <$> B.readFile input2
let (labels, xs) = unzip $ map unzip $ groupBy ((==) `on` (fst.fst)) $ sort $
M.toList $ M.intersectionWith (,) rank expr
rowlab = V.fromList $ map (format . original) $ fst $ unzip $
map head labels
collab = V.fromList $ map (T.pack . B.unpack . original) $ snd $
unzip $ head $ labels
(rank', expr') = unzip $ map unzip xs
return $ RankTable rowlab collab (MU.fromLists rank') $ MU.fromLists expr'
where
log' x | x == 0 = log 0.01
| otherwise = log x
format x = let (a, b) = B.splitAt 1 x
in B.map toUpper a `B.append` B.map toLower b
readTSV :: B.ByteString -> M.Map (CI B.ByteString, CI B.ByteString) Double
readTSV input = M.fromList $ concatMap (f . B.split '\t') content
where
f (x:xs) = zipWith (\s v -> ((mk x, mk s), readDouble v)) samples xs
(header:content) = B.lines input
samples = tail $ B.split '\t' header
|
b0989383bd3901e595f1689552abc70bc3432f9e047dc9d4f8048ae66e1a13b8 | sbcl/specializable | package.lisp | ;;;; package.lisp --- Package definition for the language-extension.prototype-specializer system.
;;;;
Copyright ( C ) 2014 Jan Moringen
;;;;
Author : < >
(cl:defpackage #:prototype-specializer
(:use
#:cl
#:alexandria
#:specializable)
;; Conditions
(:export
#:delegation-cycle-error
#:delegation-cycle-error-object
#:delegation-cycle-error-delegation
#:delegation-cycle-error-path)
Prototype object protocol
(:export
#:add-delegation
#:remove-delegation
#:map-delegations
#:map-delegations-and-paths
#:clone)
Prototype class and root object
(:export
#:prototype-object
#:/root/)
Prototype generic function
(:export
#:prototype-generic-function))
| null | https://raw.githubusercontent.com/sbcl/specializable/a08048ce874a2a8c58e4735d88de3bf3da0de052/src/prototype-specializer/package.lisp | lisp | package.lisp --- Package definition for the language-extension.prototype-specializer system.
Conditions | Copyright ( C ) 2014 Jan Moringen
Author : < >
(cl:defpackage #:prototype-specializer
(:use
#:cl
#:alexandria
#:specializable)
(:export
#:delegation-cycle-error
#:delegation-cycle-error-object
#:delegation-cycle-error-delegation
#:delegation-cycle-error-path)
Prototype object protocol
(:export
#:add-delegation
#:remove-delegation
#:map-delegations
#:map-delegations-and-paths
#:clone)
Prototype class and root object
(:export
#:prototype-object
#:/root/)
Prototype generic function
(:export
#:prototype-generic-function))
|
9b9de21778e92b48e373628b592ac8512a2475a8a20b6fb03ec3c67015c871e0 | cl-rabbit/cl-bunny | dummy.lisp | (in-package :cl-bunny.test)
(plan 1)
(subtest "Dummy, sanity check"
(ok :t "T is T"))
(finalize)
| null | https://raw.githubusercontent.com/cl-rabbit/cl-bunny/6da7fe161efc8d6bb0b8b09ac8efad03553d765c/t/dummy.lisp | lisp | (in-package :cl-bunny.test)
(plan 1)
(subtest "Dummy, sanity check"
(ok :t "T is T"))
(finalize)
| |
d97c7f1c85f3d30f4e0b41f5c5e4fdbc1a6b76d2fc40b1b6fb6d52247d588bdf | vaibhavsagar/experiments | XmlReader.hs | --
wrapper for Dotnet . System . Xml . XmlReader
--
module Dotnet.System.Xml.XmlReader where
import Dotnet
import qualified Dotnet.System.Object
import Dotnet.System.Xml.XmlNameTable
import Dotnet.System.Xml
data XmlReader_ a
type XmlReader a = Dotnet.System.Object.Object (XmlReader_ a)
attributeCount :: XmlReader a -> IO Int
attributeCount = invoke "get_AttributeCount" ()
baseURI :: XmlReader a -> IO String
baseURI = invoke "get_BaseURI" ()
canResolveEntity :: XmlReader a -> IO Bool
canResolveEntity = invoke "get_CanResolveEntity" ()
depth :: XmlReader a -> IO Int
depth = invoke "get_Depth" ()
eof :: XmlReader a -> IO Bool
eof = invoke "get_EOF" ()
hasAttributes :: XmlReader a -> IO Bool
hasAttributes = invoke "get_HasAttributes" ()
hasValue :: XmlReader a -> IO Bool
hasValue = invoke "get_HasValue" ()
isDefault :: XmlReader a -> IO Bool
isDefault = invoke "get_IsDefault" ()
isEmptyElement :: XmlReader a -> IO Bool
isEmptyElement = invoke "get_IsEmptyElement" ()
itemIndex :: Int -> XmlReader a -> IO String
itemIndex idx = invoke "get_Item" idx
itemName :: String -> XmlReader a -> IO String
itemName nm = invoke "get_Item" nm
itemNameURI :: String -> String -> XmlReader a -> IO String
itemNameURI nm uri = invoke "get_Item" (nm,uri)
localName :: XmlReader a -> IO String
localName = invoke "get_LocalName" ()
name :: XmlReader a -> IO String
name = invoke "get_Name" ()
namespaceURI :: XmlReader a -> IO String
namespaceURI = invoke "get_NamespaceURI" ()
nameTable :: XmlReader a -> IO (Dotnet.System.Xml.XmlNameTable.XmlNameTable b)
nameTable = invoke "get_NameTable" ()
nodeType :: XmlReader a -> IO Dotnet.System.Xml.XmlNodeType
nodeType this = do
v <- this # invoke "get_NodeType" ()
return (toEnum v)
prefix :: XmlReader a -> IO String
prefix = invoke "get_Prefix" ()
quoteChar :: XmlReader a -> IO Char
quoteChar = invoke "get_QuoteChar" ()
readState :: XmlReader a -> IO Dotnet.System.Xml.ReadState
readState this = do
v <- this # invoke "get_ReadState" ()
return (toEnum v)
value :: XmlReader a -> IO String
value = invoke "get_Value" ()
xmlLang :: XmlReader a -> IO String
xmlLang = invoke "get_XmlLang" ()
xmlSpace :: XmlReader a -> IO Dotnet.System.Xml.XmlSpace
xmlSpace this = do
v <- this # invoke "get_XmlSpace" ()
return (toEnum v)
close :: XmlReader a -> IO ()
close = invoke "Close" ()
getAttributeIndex :: Int -> XmlReader a -> IO String
getAttributeIndex idx = invoke "GetAttribute" idx
getAttributeName :: String -> XmlReader a -> IO String
getAttributeName nm = invoke "GetAttribute" nm
getAttributeNameURI :: String -> String -> XmlReader a -> IO String
getAttributeNameURI nm uri = invoke "getAttribute" (nm,uri)
isName :: String -> XmlReader a -> IO Bool
isName str = invoke "IsName" str
isNameToken :: String -> XmlReader a -> IO Bool
isNameToken str = invoke "IsNameToken" str
isStartElement :: XmlReader a -> IO Bool
isStartElement = invoke "IsStartElement" ()
isStartElementName :: String -> XmlReader a -> IO Bool
isStartElementName str = invoke "IsStartElement" str
isStartElementNameURI :: String -> String -> XmlReader a -> IO Bool
isStartElementNameURI str uri = invoke "IsStartElement" (str,uri)
lookupNamespace :: String -> XmlReader a -> IO String
lookupNamespace str = invoke "LookupNamespace" str
moveToAttributeIndex :: Int -> XmlReader a -> IO ()
moveToAttributeIndex idx = invoke "MoveToAttribute" idx
moveToAttributeName :: String -> XmlReader a -> IO Bool
moveToAttributeName str = invoke "MoveToAttribute" str
moveToAttributeNameURI :: String -> String -> XmlReader a -> IO Bool
moveToAttributeNameURI str uri = invoke "MoveToAttribute" (str,uri)
moveToContent :: XmlReader a -> IO Dotnet.System.Xml.XmlNodeType
moveToContent this = do
v <- this # invoke "MoveToContent" ()
return (toEnum v)
moveToElement :: XmlReader a -> IO Bool
moveToElement = invoke "MoveToElement" ()
moveToFirstAttribute :: XmlReader a -> IO Bool
moveToFirstAttribute = invoke "MoveToFirstAttribute" ()
moveToNextAttribute :: XmlReader a -> IO Bool
moveToNextAttribute = invoke "MoveToNextAttribute" ()
readNext :: XmlReader a -> IO Bool
readNext = invoke "Read" ()
readAttributeValue :: XmlReader a -> IO Bool
readAttributeValue = invoke "ReadAttributeValue" ()
readElementString :: XmlReader a -> IO String
readElementString = invoke "ReadElementString" ()
readElementStringName :: String -> XmlReader a -> IO String
readElementStringName str = invoke "ReadElementString" str
readElementStringNameURI :: String -> String -> XmlReader a -> IO String
readElementStringNameURI str uri = invoke "ReadElementString" (str,uri)
readEndElement :: XmlReader a -> IO ()
readEndElement = invoke "ReadEndElement" ()
readInnerXml :: XmlReader a -> IO String
readInnerXml = invoke "ReadInnerXml" ()
readOuterXml :: XmlReader a -> IO String
readOuterXml = invoke "ReadOuterXml" ()
readStartElement :: XmlReader a -> IO ()
readStartElement = invoke "ReadStartElement" ()
readStartElementName :: String -> XmlReader a -> IO ()
readStartElementName str = invoke "ReadStartElement" str
readStartElementNameURI :: String -> String -> XmlReader a -> IO ()
readStartElementNameURI str uri = invoke "ReadStartElement" (str,uri)
readString :: XmlReader a -> IO String
readString = invoke "ReadString" ()
resolveEntity :: XmlReader a -> IO ()
resolveEntity = invoke "ResolveEntity" ()
skip :: XmlReader a -> IO ()
skip = invoke "Skip" ()
| null | https://raw.githubusercontent.com/vaibhavsagar/experiments/378d7ba97eabfc7bbeaa4116380369ea6612bfeb/hugs/dotnet/lib/Dotnet/System/Xml/XmlReader.hs | haskell | wrapper for Dotnet . System . Xml . XmlReader
module Dotnet.System.Xml.XmlReader where
import Dotnet
import qualified Dotnet.System.Object
import Dotnet.System.Xml.XmlNameTable
import Dotnet.System.Xml
data XmlReader_ a
type XmlReader a = Dotnet.System.Object.Object (XmlReader_ a)
attributeCount :: XmlReader a -> IO Int
attributeCount = invoke "get_AttributeCount" ()
baseURI :: XmlReader a -> IO String
baseURI = invoke "get_BaseURI" ()
canResolveEntity :: XmlReader a -> IO Bool
canResolveEntity = invoke "get_CanResolveEntity" ()
depth :: XmlReader a -> IO Int
depth = invoke "get_Depth" ()
eof :: XmlReader a -> IO Bool
eof = invoke "get_EOF" ()
hasAttributes :: XmlReader a -> IO Bool
hasAttributes = invoke "get_HasAttributes" ()
hasValue :: XmlReader a -> IO Bool
hasValue = invoke "get_HasValue" ()
isDefault :: XmlReader a -> IO Bool
isDefault = invoke "get_IsDefault" ()
isEmptyElement :: XmlReader a -> IO Bool
isEmptyElement = invoke "get_IsEmptyElement" ()
itemIndex :: Int -> XmlReader a -> IO String
itemIndex idx = invoke "get_Item" idx
itemName :: String -> XmlReader a -> IO String
itemName nm = invoke "get_Item" nm
itemNameURI :: String -> String -> XmlReader a -> IO String
itemNameURI nm uri = invoke "get_Item" (nm,uri)
localName :: XmlReader a -> IO String
localName = invoke "get_LocalName" ()
name :: XmlReader a -> IO String
name = invoke "get_Name" ()
namespaceURI :: XmlReader a -> IO String
namespaceURI = invoke "get_NamespaceURI" ()
nameTable :: XmlReader a -> IO (Dotnet.System.Xml.XmlNameTable.XmlNameTable b)
nameTable = invoke "get_NameTable" ()
nodeType :: XmlReader a -> IO Dotnet.System.Xml.XmlNodeType
nodeType this = do
v <- this # invoke "get_NodeType" ()
return (toEnum v)
prefix :: XmlReader a -> IO String
prefix = invoke "get_Prefix" ()
quoteChar :: XmlReader a -> IO Char
quoteChar = invoke "get_QuoteChar" ()
readState :: XmlReader a -> IO Dotnet.System.Xml.ReadState
readState this = do
v <- this # invoke "get_ReadState" ()
return (toEnum v)
value :: XmlReader a -> IO String
value = invoke "get_Value" ()
xmlLang :: XmlReader a -> IO String
xmlLang = invoke "get_XmlLang" ()
xmlSpace :: XmlReader a -> IO Dotnet.System.Xml.XmlSpace
xmlSpace this = do
v <- this # invoke "get_XmlSpace" ()
return (toEnum v)
close :: XmlReader a -> IO ()
close = invoke "Close" ()
getAttributeIndex :: Int -> XmlReader a -> IO String
getAttributeIndex idx = invoke "GetAttribute" idx
getAttributeName :: String -> XmlReader a -> IO String
getAttributeName nm = invoke "GetAttribute" nm
getAttributeNameURI :: String -> String -> XmlReader a -> IO String
getAttributeNameURI nm uri = invoke "getAttribute" (nm,uri)
isName :: String -> XmlReader a -> IO Bool
isName str = invoke "IsName" str
isNameToken :: String -> XmlReader a -> IO Bool
isNameToken str = invoke "IsNameToken" str
isStartElement :: XmlReader a -> IO Bool
isStartElement = invoke "IsStartElement" ()
isStartElementName :: String -> XmlReader a -> IO Bool
isStartElementName str = invoke "IsStartElement" str
isStartElementNameURI :: String -> String -> XmlReader a -> IO Bool
isStartElementNameURI str uri = invoke "IsStartElement" (str,uri)
lookupNamespace :: String -> XmlReader a -> IO String
lookupNamespace str = invoke "LookupNamespace" str
moveToAttributeIndex :: Int -> XmlReader a -> IO ()
moveToAttributeIndex idx = invoke "MoveToAttribute" idx
moveToAttributeName :: String -> XmlReader a -> IO Bool
moveToAttributeName str = invoke "MoveToAttribute" str
moveToAttributeNameURI :: String -> String -> XmlReader a -> IO Bool
moveToAttributeNameURI str uri = invoke "MoveToAttribute" (str,uri)
moveToContent :: XmlReader a -> IO Dotnet.System.Xml.XmlNodeType
moveToContent this = do
v <- this # invoke "MoveToContent" ()
return (toEnum v)
moveToElement :: XmlReader a -> IO Bool
moveToElement = invoke "MoveToElement" ()
moveToFirstAttribute :: XmlReader a -> IO Bool
moveToFirstAttribute = invoke "MoveToFirstAttribute" ()
moveToNextAttribute :: XmlReader a -> IO Bool
moveToNextAttribute = invoke "MoveToNextAttribute" ()
readNext :: XmlReader a -> IO Bool
readNext = invoke "Read" ()
readAttributeValue :: XmlReader a -> IO Bool
readAttributeValue = invoke "ReadAttributeValue" ()
readElementString :: XmlReader a -> IO String
readElementString = invoke "ReadElementString" ()
readElementStringName :: String -> XmlReader a -> IO String
readElementStringName str = invoke "ReadElementString" str
readElementStringNameURI :: String -> String -> XmlReader a -> IO String
readElementStringNameURI str uri = invoke "ReadElementString" (str,uri)
readEndElement :: XmlReader a -> IO ()
readEndElement = invoke "ReadEndElement" ()
readInnerXml :: XmlReader a -> IO String
readInnerXml = invoke "ReadInnerXml" ()
readOuterXml :: XmlReader a -> IO String
readOuterXml = invoke "ReadOuterXml" ()
readStartElement :: XmlReader a -> IO ()
readStartElement = invoke "ReadStartElement" ()
readStartElementName :: String -> XmlReader a -> IO ()
readStartElementName str = invoke "ReadStartElement" str
readStartElementNameURI :: String -> String -> XmlReader a -> IO ()
readStartElementNameURI str uri = invoke "ReadStartElement" (str,uri)
readString :: XmlReader a -> IO String
readString = invoke "ReadString" ()
resolveEntity :: XmlReader a -> IO ()
resolveEntity = invoke "ResolveEntity" ()
skip :: XmlReader a -> IO ()
skip = invoke "Skip" ()
| |
0286741fafc552f08e47dd5e327253a6fb357795c5eee7508e02e133a2b305cd | rtrusso/scp | read9.scm | (define l (call-with-input-file "tests/read-ext-id.scm"
(lambda (f) (read f))))
(let ((index 0))
(for-each (lambda (x)
(set! index (+ index 1))
(display "index ")
(display index)
(display ": ")
(write x)
(newline)
(display " symbol? ")
(display (symbol? x))
(newline)
(newline))
l))
| null | https://raw.githubusercontent.com/rtrusso/scp/2051e76df14bd36aef81aba519ffafa62b260f5c/src/tests/read9.scm | scheme | (define l (call-with-input-file "tests/read-ext-id.scm"
(lambda (f) (read f))))
(let ((index 0))
(for-each (lambda (x)
(set! index (+ index 1))
(display "index ")
(display index)
(display ": ")
(write x)
(newline)
(display " symbol? ")
(display (symbol? x))
(newline)
(newline))
l))
| |
340931de985782cabb4e2d4d1b7777d898a13cc39037754778a2eab9479f8284 | psilord/option-9 | scene-man.lisp | (in-package :option-9)
(defmethod add-child ((parent frame) (child frame))
"Return the CHILD after it has been added to PARENT. No checks for
duplicate adds are performed."
(unless (children parent)
(setf (children parent) (make-hash-table :test #'eq)))
(setf (gethash child (children parent)) child)
(setf (parent child) parent)
child)
;; This is recursive from the root (at the parent) to the leaves, but
;; I don't ever expect the depth of this tree to be stack breaking.
;; When the child is removed, all children of it are ripped out of the
;; scene tree with it too. XXX Fix using the orphan-policy mechanism.
(defmethod remove-child ((parent frame) (child frame))
"Return CHILD if removed, or NIL otherwise."
(when (children parent)
(if (remhash child (children parent))
;; XXX If this child had children of its own, where do they go?
;; SHould they just get shoved into the universe and be done with them?
child
;; Walk all children subtrees recursively until I find it, then bail.
(loop for p being the hash-values in (children parent) do
(when (remove-child p child)
;; Child currently has no parent.
(setf (parent child) nil)
(return-from remove-child child))))))
(defgeneric insert-into-scene (scene-manager child-drawable parent-drawable)
(:documentation "Insert the CHILD-FRAME object as a child of the
specified PARENT-FRAME and then insert it into the specified view. If
PARENT-FRAME is :universe, then insert the object as a child of the
universe frame. Return the PARENT-FRAME and CHILD-FRAME as values, in
that order."))
(defgeneric remove-from-scene (scene-manager the-drawable)
(:documentation "Remove THE-FRAME (and by consequence all of its
children) from the scene-db and return THE-FRAME. If it is desired that
the children of THE-FRAME should participate in the scene, then it is
up to you to modify their bases to be in the right coordinate frame
along with their other physcal vectors and such, and reinsert them."))
(defmethod insert-into-scene ((sman scene-manager) (child drawable)
parent)
;; We poke in the child into the scene-tree
(add-child (if (eq parent :universe) (root sman) parent) child)
;; Add the item into each role view it specifies.
;; Currently, it is stored in a list. I may change this to be a hash table
;; whose key is the child and value is T.
(dolist (role (roles child))
(push child (gethash role (views sman)))))
;; Return a list of the entities that have the role specified.
(defmethod entities-with-role ((sman scene-manager) role)
(gethash role (views sman)))
;; Return a list that collects all the entities in the roles specified.
(defmethod all-entities-in-roles ((sman scene-manager) &rest roles)
(let ((entity-lists (mapcar #'(lambda (role)
(entities-with-role sman role))
roles)))
NIL entity lists are squeezed out by the appending keyword ...
(loop for group in entity-lists appending group)))
;; TODO: This is broken and doesn't understand the orphan policy! It
;; just the item and all children recursively.
(defmethod remove-from-scene ((sman scene-manager) item)
1 . remove the item out of all of the role views . If this starts
;; sucking in speed as the number of objects rise, then change it to
;; be removing stuff out of a hash table.
(dolist (role (roles item))
(symbol-macrolet ((the-view (gethash role (views sman))))
(setf the-view (remove-if #'(lambda (x) (eq x item)) the-view))))
2 . get the parent of the item from the var and then remove - child it .
(remove-child (parent item) item)
3 . TODO This needs to be verified that it is right .
(when (children item)
(loop for child being the hash-values in (children item) do
(remove-from-scene sman child))))
| null | https://raw.githubusercontent.com/psilord/option-9/44d96cbc5543ee2acbdcf45d300207ef175462bc/scene-man.lisp | lisp | This is recursive from the root (at the parent) to the leaves, but
I don't ever expect the depth of this tree to be stack breaking.
When the child is removed, all children of it are ripped out of the
scene tree with it too. XXX Fix using the orphan-policy mechanism.
XXX If this child had children of its own, where do they go?
SHould they just get shoved into the universe and be done with them?
Walk all children subtrees recursively until I find it, then bail.
Child currently has no parent.
We poke in the child into the scene-tree
Add the item into each role view it specifies.
Currently, it is stored in a list. I may change this to be a hash table
whose key is the child and value is T.
Return a list of the entities that have the role specified.
Return a list that collects all the entities in the roles specified.
TODO: This is broken and doesn't understand the orphan policy! It
just the item and all children recursively.
sucking in speed as the number of objects rise, then change it to
be removing stuff out of a hash table. | (in-package :option-9)
(defmethod add-child ((parent frame) (child frame))
"Return the CHILD after it has been added to PARENT. No checks for
duplicate adds are performed."
(unless (children parent)
(setf (children parent) (make-hash-table :test #'eq)))
(setf (gethash child (children parent)) child)
(setf (parent child) parent)
child)
(defmethod remove-child ((parent frame) (child frame))
"Return CHILD if removed, or NIL otherwise."
(when (children parent)
(if (remhash child (children parent))
child
(loop for p being the hash-values in (children parent) do
(when (remove-child p child)
(setf (parent child) nil)
(return-from remove-child child))))))
(defgeneric insert-into-scene (scene-manager child-drawable parent-drawable)
(:documentation "Insert the CHILD-FRAME object as a child of the
specified PARENT-FRAME and then insert it into the specified view. If
PARENT-FRAME is :universe, then insert the object as a child of the
universe frame. Return the PARENT-FRAME and CHILD-FRAME as values, in
that order."))
(defgeneric remove-from-scene (scene-manager the-drawable)
(:documentation "Remove THE-FRAME (and by consequence all of its
children) from the scene-db and return THE-FRAME. If it is desired that
the children of THE-FRAME should participate in the scene, then it is
up to you to modify their bases to be in the right coordinate frame
along with their other physcal vectors and such, and reinsert them."))
(defmethod insert-into-scene ((sman scene-manager) (child drawable)
parent)
(add-child (if (eq parent :universe) (root sman) parent) child)
(dolist (role (roles child))
(push child (gethash role (views sman)))))
(defmethod entities-with-role ((sman scene-manager) role)
(gethash role (views sman)))
(defmethod all-entities-in-roles ((sman scene-manager) &rest roles)
(let ((entity-lists (mapcar #'(lambda (role)
(entities-with-role sman role))
roles)))
NIL entity lists are squeezed out by the appending keyword ...
(loop for group in entity-lists appending group)))
(defmethod remove-from-scene ((sman scene-manager) item)
1 . remove the item out of all of the role views . If this starts
(dolist (role (roles item))
(symbol-macrolet ((the-view (gethash role (views sman))))
(setf the-view (remove-if #'(lambda (x) (eq x item)) the-view))))
2 . get the parent of the item from the var and then remove - child it .
(remove-child (parent item) item)
3 . TODO This needs to be verified that it is right .
(when (children item)
(loop for child being the hash-values in (children item) do
(remove-from-scene sman child))))
|
80c1e85fa9a394e5c8b1c16be73fa2a40b10f2b14a3614c1e7eb5fbd64d73836 | metaocaml/ber-metaocaml | recvfrom_unix.ml | (* TEST
include unix
modules = "recvfrom.ml"
* hasunix
** not-windows
*** bytecode
*** native
*)
open Recvfrom
let () =
let server_path = "ocaml-test-socket-unix" in
ensure_no_file server_path;
at_exit (fun () -> ensure_no_file server_path);
with_bound_socket server_path (fun server_addr server_socket ->
(* path socket, just reuse server addr *)
test_sender ~client_socket:server_socket ~server_socket ~server_addr ~client_addr:server_addr;
(* unnamed socket *)
with_socket (fun client_socket ->
(* unbound socket should be treated as empty path *)
test_sender ~client_socket ~server_socket ~server_addr ~client_addr:(ADDR_UNIX "")
)
)
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tests/lib-unix/unix-socket/recvfrom_unix.ml | ocaml | TEST
include unix
modules = "recvfrom.ml"
* hasunix
** not-windows
*** bytecode
*** native
path socket, just reuse server addr
unnamed socket
unbound socket should be treated as empty path | open Recvfrom
let () =
let server_path = "ocaml-test-socket-unix" in
ensure_no_file server_path;
at_exit (fun () -> ensure_no_file server_path);
with_bound_socket server_path (fun server_addr server_socket ->
test_sender ~client_socket:server_socket ~server_socket ~server_addr ~client_addr:server_addr;
with_socket (fun client_socket ->
test_sender ~client_socket ~server_socket ~server_addr ~client_addr:(ADDR_UNIX "")
)
)
|
5db1d36635f49a70666998fe44b373bf6e938d6acb67552dafabaca92b1d70ab | clojure/core.rrb-vector | test_cljs_only.cljs | (ns clojure.core.rrb-vector.test-cljs-only
(:require [clojure.test :as test :refer [deftest testing is are]]
[clojure.core.rrb-vector.test-utils :as u]
[clojure.core.rrb-vector :as fv]
[clojure.core.rrb-vector.debug :as dv]
[clojure.core.rrb-vector.debug-platform-dependent :as dpd]))
(dv/set-debug-opts! dv/full-debug-opts)
| null | https://raw.githubusercontent.com/clojure/core.rrb-vector/88c2f814b47c0bbc4092dad82be2ec783ed2961f/src/test/cljs/clojure/core/rrb_vector/test_cljs_only.cljs | clojure | (ns clojure.core.rrb-vector.test-cljs-only
(:require [clojure.test :as test :refer [deftest testing is are]]
[clojure.core.rrb-vector.test-utils :as u]
[clojure.core.rrb-vector :as fv]
[clojure.core.rrb-vector.debug :as dv]
[clojure.core.rrb-vector.debug-platform-dependent :as dpd]))
(dv/set-debug-opts! dv/full-debug-opts)
| |
a7d80a4167537c33503457264c4358210c6cce7fda4c1630125bd17784bff02d | lspector/Clojush | replace_space_with_newline.clj | ;; replace_space_with_newline.clj
,
;;
Problem Source : ( / )
;;
;; Given a string input, print the string, replacing spaces with newlines.
;; The input string will not have tabs or newlines, but may have multiple spaces
in a row . It will have maximum length of 20 characters . Also , the program
;; should return the integer count of the non-whitespace characters.
;;
;; input stack has the input string
(ns clojush.problems.software.replace-space-with-newline
(:use clojush.pushgp.pushgp
[clojush pushstate interpreter random util globals]
clojush.instructions.tag
clojure.math.numeric-tower)
(:require [clojure.string :as string]))
;; Define test cases
(defn replace-space-with-newline-input
"Makes a Replace Space With Newline input of length len."
[len]
(apply str
(repeatedly len
(fn []
(if (< (lrand) 0.2)
\space
(lrand-nth (map char (range 32 127))))))))
; Atom generators
(def replace-space-with-newline-atom-generators
(concat (list
\space
\newline
;;; end constants
Visible character ERC
(fn [] (replace-space-with-newline-input (lrand-int 21))) ;String ERC
end ERCs
(tag-instruction-erc [:exec :integer :boolean :string :char] 1000)
(tagged-instruction-erc 1000)
end tag
'in1
;;; end input instructions
)
(registered-for-stacks [:integer :boolean :string :char :exec :print])))
;; A list of data domains for the problem. Each domain is a vector containing
a " set " of inputs and two integers representing how many cases from the set
;; should be used as training and testing cases respectively. Each "set" of
;; inputs is either a list or a function that, when called, will create a
;; random element of the set.
(def replace-space-with-newline-data-domains
[[(list "", "A", "*", " ", "s", "B ", " ", " D", "ef", "!!", " F ", "T L", "4ps", "q ", " ", " e", "hi ",
" $ ", " 9",
(apply str (take 13 (cycle (list \i \space \!))))
(apply str (repeat 20 \8))
(apply str (repeat 20 \space))
(apply str (repeat 20 \s))
(apply str (take 20 (cycle (list \1 \space))))
(apply str (take 20 (cycle (list \space \v))))
(apply str (take 20 (cycle (list \H \a \space))))
(apply str (take 20 (cycle (list \x \space \y \!))))
(apply str (take 20 (cycle (list \G \5))))
(apply str (take 20 (cycle (list \> \_ \= \]))))
(apply str (take 20 (cycle (list \^ \_ \^ \space))))) 30 0] ;; "Special" inputs covering some base cases
[(fn [] (replace-space-with-newline-input (+ 2 (lrand-int 19)))) 70 1000]
])
;;Can make Replace Space With Newline test data like this:
;(test-and-train-data-from-domains replace-space-with-newline-data-domains)
; Helper function for error function
(defn replace-space-with-newline-test-cases
"Takes a sequence of inputs and gives IO test cases of the form
[input output]."
[inputs]
(map (fn [in]
(vector in
[(string/replace in \space \newline)
(count (filter #(not= \space %) in))]))
inputs))
(defn get-replace-space-with-newline-train-and-test
"Returns the train and test cases."
[data-domains]
(map #(sort-by (comp count first) %)
(map replace-space-with-newline-test-cases
(test-and-train-data-from-domains data-domains))))
; Define train and test cases
(def replace-space-with-newline-train-and-test-cases
(get-replace-space-with-newline-train-and-test replace-space-with-newline-data-domains))
(defn replace-space-with-newline-evaluate-program-for-behaviors
"Evaluates the program on the given list of cases.
Returns the behaviors, a list of the outputs of the program on the inputs."
[program cases]
(flatten
(doall
(for [[input output] cases]
(let [final-state (run-push program
(->> (make-push-state)
(push-item input :input)
(push-item "" :output)))
printed-result (stack-ref :output 0 final-state)
int-result (stack-ref :integer 0 final-state)]
(vector printed-result int-result))))))
(defn replace-space-with-newline-errors-from-behaviors
"Takes a list of behaviors across the list of cases and finds the error
for each of those behaviors, returning an error vector."
[behaviors cases]
(let [behavior-pairs (partition 2 behaviors)
output-pairs (map second cases)]
(flatten
(map (fn [[printed-result int-result] [correct-printed-output correct-int]]
(vector
(levenshtein-distance correct-printed-output printed-result)
(if (number? int-result)
(abs (- int-result correct-int)) ;distance from correct integer
1000) ;penalty for no return value
))
behavior-pairs
output-pairs))))
(defn replace-space-with-newline-error-function
"The error function for Replace Space With Newline. Takes an individual as input,
and returns that individual with :errors and :behaviors set."
([individual]
(replace-space-with-newline-error-function individual :train))
([individual data-cases] ;; data-cases should be :train or :test
(let [cases (case data-cases
:train (first replace-space-with-newline-train-and-test-cases)
:test (second replace-space-with-newline-train-and-test-cases)
data-cases)
behaviors (replace-space-with-newline-evaluate-program-for-behaviors (:program individual)
cases)
errors (replace-space-with-newline-errors-from-behaviors behaviors cases)]
(if (= data-cases :test)
(assoc individual :test-errors errors)
(assoc individual :behaviors behaviors :errors errors)))))
(defn replace-space-with-newline-initial-report
[argmap]
(println "Train and test cases:")
(doseq [[i case] (map vector (range) (first replace-space-with-newline-train-and-test-cases))]
(println (format "Train Case: %3d | Input/Output: %s" i (str case))))
(doseq [[i case] (map vector (range) (second replace-space-with-newline-train-and-test-cases))]
(println (format "Test Case: %3d | Input/Output: %s" i (str case))))
(println ";;******************************"))
(defn replace-space-with-newline-report
"Custom generational report."
[best population generation error-function report-simplifications]
(let [best-with-test (error-function best :test)
best-test-errors (:test-errors best-with-test)
best-total-test-error (apply +' best-test-errors)]
(println ";;******************************")
(printf ";; -*- Replace Space With Newline problem report - generation %s\n" generation)(flush)
(println "Test total error for best:" best-total-test-error)
(println (format "Test mean error for best: %.5f" (double (/ best-total-test-error (count best-test-errors)))))
(when (zero? (:total-error best))
(doseq [[i error] (map vector
(range)
best-test-errors)]
(println (format "Test Case %3d | Error: %s" i (str error)))))
(println ";;------------------------------")
(println "Outputs of best individual on training cases:")
(doseq [[[correct-output correct-int] [printed-result int-result]]
(map vector
(map second (first replace-space-with-newline-train-and-test-cases))
(partition 2 (:behaviors best)))]
(println (format "\n| Correct output: %s\n| Program output: %s" (pr-str correct-output) (pr-str printed-result)))
(println (format "| Correct integer: %2d | Program integer: %s" correct-int (str int-result))))
(println ";;******************************")
;; return best individual with tests errors added so that those are recorded
best-with-test))
;; To do validation, could have this function return an altered best individual
with total - error > 0 if it had error of zero on train but not on validation
set . Would need a third category of data cases , or a defined split of training cases .
Define the argmap
(def argmap
{:error-function replace-space-with-newline-error-function
:training-cases (first replace-space-with-newline-train-and-test-cases)
:sub-training-cases '()
:atom-generators replace-space-with-newline-atom-generators
:max-points 3200
:max-genome-size-in-initial-program 400
:evalpush-limit 1600
:population-size 1000
:max-generations 300
:parent-selection :lexicase
:genetic-operator-probabilities {:alternation 0.2
:uniform-mutation 0.2
:uniform-close-mutation 0.1
[:alternation :uniform-mutation] 0.5
}
:alternation-rate 0.01
:alignment-deviation 10
:uniform-mutation-rate 0.01
:problem-specific-report replace-space-with-newline-report
:problem-specific-initial-report replace-space-with-newline-initial-report
:report-simplifications 0
:final-report-simplifications 5000
:max-error 5000
})
| null | https://raw.githubusercontent.com/lspector/Clojush/685b991535607cf942ae1500557171a0739982c3/src/clojush/problems/software/replace_space_with_newline.clj | clojure | replace_space_with_newline.clj
Given a string input, print the string, replacing spaces with newlines.
The input string will not have tabs or newlines, but may have multiple spaces
should return the integer count of the non-whitespace characters.
input stack has the input string
Define test cases
Atom generators
end constants
String ERC
end input instructions
A list of data domains for the problem. Each domain is a vector containing
should be used as training and testing cases respectively. Each "set" of
inputs is either a list or a function that, when called, will create a
random element of the set.
"Special" inputs covering some base cases
Can make Replace Space With Newline test data like this:
(test-and-train-data-from-domains replace-space-with-newline-data-domains)
Helper function for error function
Define train and test cases
distance from correct integer
penalty for no return value
data-cases should be :train or :test
return best individual with tests errors added so that those are recorded
To do validation, could have this function return an altered best individual | ,
Problem Source : ( / )
in a row . It will have maximum length of 20 characters . Also , the program
(ns clojush.problems.software.replace-space-with-newline
(:use clojush.pushgp.pushgp
[clojush pushstate interpreter random util globals]
clojush.instructions.tag
clojure.math.numeric-tower)
(:require [clojure.string :as string]))
(defn replace-space-with-newline-input
"Makes a Replace Space With Newline input of length len."
[len]
(apply str
(repeatedly len
(fn []
(if (< (lrand) 0.2)
\space
(lrand-nth (map char (range 32 127))))))))
(def replace-space-with-newline-atom-generators
(concat (list
\space
\newline
Visible character ERC
end ERCs
(tag-instruction-erc [:exec :integer :boolean :string :char] 1000)
(tagged-instruction-erc 1000)
end tag
'in1
)
(registered-for-stacks [:integer :boolean :string :char :exec :print])))
a " set " of inputs and two integers representing how many cases from the set
(def replace-space-with-newline-data-domains
[[(list "", "A", "*", " ", "s", "B ", " ", " D", "ef", "!!", " F ", "T L", "4ps", "q ", " ", " e", "hi ",
" $ ", " 9",
(apply str (take 13 (cycle (list \i \space \!))))
(apply str (repeat 20 \8))
(apply str (repeat 20 \space))
(apply str (repeat 20 \s))
(apply str (take 20 (cycle (list \1 \space))))
(apply str (take 20 (cycle (list \space \v))))
(apply str (take 20 (cycle (list \H \a \space))))
(apply str (take 20 (cycle (list \x \space \y \!))))
(apply str (take 20 (cycle (list \G \5))))
(apply str (take 20 (cycle (list \> \_ \= \]))))
[(fn [] (replace-space-with-newline-input (+ 2 (lrand-int 19)))) 70 1000]
])
(defn replace-space-with-newline-test-cases
"Takes a sequence of inputs and gives IO test cases of the form
[input output]."
[inputs]
(map (fn [in]
(vector in
[(string/replace in \space \newline)
(count (filter #(not= \space %) in))]))
inputs))
(defn get-replace-space-with-newline-train-and-test
"Returns the train and test cases."
[data-domains]
(map #(sort-by (comp count first) %)
(map replace-space-with-newline-test-cases
(test-and-train-data-from-domains data-domains))))
(def replace-space-with-newline-train-and-test-cases
(get-replace-space-with-newline-train-and-test replace-space-with-newline-data-domains))
(defn replace-space-with-newline-evaluate-program-for-behaviors
"Evaluates the program on the given list of cases.
Returns the behaviors, a list of the outputs of the program on the inputs."
[program cases]
(flatten
(doall
(for [[input output] cases]
(let [final-state (run-push program
(->> (make-push-state)
(push-item input :input)
(push-item "" :output)))
printed-result (stack-ref :output 0 final-state)
int-result (stack-ref :integer 0 final-state)]
(vector printed-result int-result))))))
(defn replace-space-with-newline-errors-from-behaviors
"Takes a list of behaviors across the list of cases and finds the error
for each of those behaviors, returning an error vector."
[behaviors cases]
(let [behavior-pairs (partition 2 behaviors)
output-pairs (map second cases)]
(flatten
(map (fn [[printed-result int-result] [correct-printed-output correct-int]]
(vector
(levenshtein-distance correct-printed-output printed-result)
(if (number? int-result)
))
behavior-pairs
output-pairs))))
(defn replace-space-with-newline-error-function
"The error function for Replace Space With Newline. Takes an individual as input,
and returns that individual with :errors and :behaviors set."
([individual]
(replace-space-with-newline-error-function individual :train))
(let [cases (case data-cases
:train (first replace-space-with-newline-train-and-test-cases)
:test (second replace-space-with-newline-train-and-test-cases)
data-cases)
behaviors (replace-space-with-newline-evaluate-program-for-behaviors (:program individual)
cases)
errors (replace-space-with-newline-errors-from-behaviors behaviors cases)]
(if (= data-cases :test)
(assoc individual :test-errors errors)
(assoc individual :behaviors behaviors :errors errors)))))
(defn replace-space-with-newline-initial-report
[argmap]
(println "Train and test cases:")
(doseq [[i case] (map vector (range) (first replace-space-with-newline-train-and-test-cases))]
(println (format "Train Case: %3d | Input/Output: %s" i (str case))))
(doseq [[i case] (map vector (range) (second replace-space-with-newline-train-and-test-cases))]
(println (format "Test Case: %3d | Input/Output: %s" i (str case))))
(println ";;******************************"))
(defn replace-space-with-newline-report
"Custom generational report."
[best population generation error-function report-simplifications]
(let [best-with-test (error-function best :test)
best-test-errors (:test-errors best-with-test)
best-total-test-error (apply +' best-test-errors)]
(println ";;******************************")
(printf ";; -*- Replace Space With Newline problem report - generation %s\n" generation)(flush)
(println "Test total error for best:" best-total-test-error)
(println (format "Test mean error for best: %.5f" (double (/ best-total-test-error (count best-test-errors)))))
(when (zero? (:total-error best))
(doseq [[i error] (map vector
(range)
best-test-errors)]
(println (format "Test Case %3d | Error: %s" i (str error)))))
(println ";;------------------------------")
(println "Outputs of best individual on training cases:")
(doseq [[[correct-output correct-int] [printed-result int-result]]
(map vector
(map second (first replace-space-with-newline-train-and-test-cases))
(partition 2 (:behaviors best)))]
(println (format "\n| Correct output: %s\n| Program output: %s" (pr-str correct-output) (pr-str printed-result)))
(println (format "| Correct integer: %2d | Program integer: %s" correct-int (str int-result))))
(println ";;******************************")
best-with-test))
with total - error > 0 if it had error of zero on train but not on validation
set . Would need a third category of data cases , or a defined split of training cases .
Define the argmap
(def argmap
{:error-function replace-space-with-newline-error-function
:training-cases (first replace-space-with-newline-train-and-test-cases)
:sub-training-cases '()
:atom-generators replace-space-with-newline-atom-generators
:max-points 3200
:max-genome-size-in-initial-program 400
:evalpush-limit 1600
:population-size 1000
:max-generations 300
:parent-selection :lexicase
:genetic-operator-probabilities {:alternation 0.2
:uniform-mutation 0.2
:uniform-close-mutation 0.1
[:alternation :uniform-mutation] 0.5
}
:alternation-rate 0.01
:alignment-deviation 10
:uniform-mutation-rate 0.01
:problem-specific-report replace-space-with-newline-report
:problem-specific-initial-report replace-space-with-newline-initial-report
:report-simplifications 0
:final-report-simplifications 5000
:max-error 5000
})
|
27d2e9632b52289d289f9e55ff581ec4349df36cafbbb28688f8f60d2f203428 | argp/bap | batLogger.ml | -*- Mode : ; indent - tabs - mode : nil -*-
(******************************************************************************)
Copyright ( c ) 2009 , Metaweb Technologies , Inc.
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
* * Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
* * Redistributions in binary form must reproduce the above
* copyright notice , this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution .
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ` ` AS IS '' AND ANY
* EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR
* BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE
* OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************)
open BatPrintf
type log = {
name : string;
mutable level : int;
}
type level = NONE | FATAL | ERROR | WARN | NOTICE | INFO | DEBUG
type event = string * (string * string) list
type formatter = log -> level -> event -> float -> unit
(******************************************************************************)
(** log utilities *)
let int_of_level = function
| NONE -> 0 | FATAL -> 1 | ERROR -> 2 | WARN -> 3
| NOTICE -> 4 | INFO -> 5 | DEBUG -> 6
let level_of_int = function
| 0 -> NONE | 1 -> FATAL | 2 -> ERROR | 3 -> WARN
| 4 -> NOTICE | 5 -> INFO | 6 -> DEBUG
| i -> failwith ("invalid level: " ^ string_of_int i)
let name_of_level = function
| NONE -> "NONE" | FATAL -> "FATAL" | ERROR -> "ERROR" | WARN -> "WARN"
| NOTICE -> "NOTICE" | INFO -> "INFO" | DEBUG -> "DEBUG"
let level_of_name = function
| "NONE" -> NONE | "FATAL" -> FATAL | "ERROR" -> ERROR | "WARN" -> WARN
| "NOTICE" -> NOTICE | "INFO" -> INFO | "DEBUG" -> DEBUG
| n -> failwith ("invalid level: " ^ n)
let format_timestamp out ts =
let tm = Unix.gmtime ts in
let us, _ = modf ts in
fprintf out "%04d-%02d-%02dT%02d:%02d:%02d.%06dZ"
(1900 + tm.Unix.tm_year)
(1 + tm.Unix.tm_mon)
(tm.Unix.tm_mday)
(tm.Unix.tm_hour)
(tm.Unix.tm_min)
(tm.Unix.tm_sec)
(int_of_float (1_000_000. *. us))
(******************************************************************************)
(** log modules *)
let logs = Hashtbl.create 16
let default_level = ref (int_of_level INFO)
let make_log name =
try Hashtbl.find logs name
with Not_found ->
let lm = { name = name; level = !default_level }
in Hashtbl.replace logs name lm;
lm
let log_enable lm lev = lm.level <- int_of_level lev
let log_enabled lm lev =
let lev_no = int_of_level lev in
lev_no <= lm.level
let log_name lm = lm.name
let log_level lm = level_of_int lm.level
(******************************************************************************)
(** log formatters *)
let depth = ref 0
let formatters : (string * formatter) list ref = ref []
let register_formatter name f = formatters := (name, f) :: !formatters
let unregister_formatter name =
formatters := List.remove_assoc name !formatters
let rec format_kvl oc = function
| [] -> ()
| (k, v)::rest ->
fprintf oc "\t%s:%s" k v;
format_kvl oc rest
let make_std_formatter oc lm lev (event_name, event_args) timestamp =
fprintf oc "D:%a\tE:%s.%s\tL:%s%a\n%!"
(*D:*) format_timestamp timestamp
(*E:*) lm.name event_name
(*L:*) (name_of_level lev)
format_kvl event_args
let stderr_formatter = make_std_formatter BatIO.stderr
let null_formatter lm lev event timestamp = ()
let format_indent oc depth =
for _i = 0 to depth do
fprintf oc "| "
done
let make_dbg_formatter oc lm lev (event_name, event_args) timestamp =
let indent = try int_of_string (List.assoc "I" event_args) with _ -> 0 in
let args = List.remove_assoc "I" event_args in
fprintf oc "### %a%s.%s %a [%s]\n%!" format_indent indent
(log_name lm) event_name
format_kvl args (name_of_level lev)
let dbg_formatter lm lev ep ts = make_dbg_formatter BatIO.stderr lm lev ep ts
(******************************************************************************)
(** log events *)
let log lm lev event_fun =
if log_enabled lm lev then
let time = Unix.gettimeofday () in
let event_name, event_args = event_fun () in
let event = event_name, ("I", string_of_int !depth) :: event_args in
List.iter (fun (name, fmt) -> fmt lm lev event time) !formatters
let with_log lm lev event_fun ?result body =
if log_enabled lm lev then begin
try
log lm lev event_fun;
incr depth;
let rv = body () in
decr depth;
log lm lev (fun () ->
let event_name, event_args = event_fun () in
let result_str = match result with
| Some f -> f rv
| None -> "-"
in
event_name, ("RESULT", result_str) ::event_args);
rv
with exn ->
decr depth;
log lm lev (fun () ->
let event_name, event_args = event_fun () in
event_name, ("EXN", Printexc.to_string exn) :: event_args);
raise exn
end else body ()
(******************************************************************************)
(** logger initialization *)
let init name_level_list formatter =
List.iter (fun (name, level) -> let lm = make_log name in log_enable lm level)
name_level_list;
register_formatter "default" formatter
let init_from_string name_level_string formatter =
let init_key_value ss =
try
let name_ss, level_ss = BatSubstring.splitl (fun c -> c <> ':') ss in
let name = BatSubstring.to_string name_ss in
let level = level_of_name (BatSubstring.to_string level_ss) in
let lm = make_log name in
log_enable lm level
with Not_found -> try
let level = level_of_name (BatSubstring.to_string ss) in
default_level := int_of_level level;
Hashtbl.iter (fun name lm -> log_enable lm level) logs
with Failure _ ->
failwith ("invalid log initialization: " ^ BatSubstring.to_string ss)
in
List.iter init_key_value (BatSubstring.split_on_comma (BatSubstring.of_string name_level_string) );
register_formatter "default" formatter
(******************************************************************************)
let test =
let lm = make_log " test " in
let direct ( ) =
log lm NOTICE ( fun ( ) - > " hello " , [ ] ) ;
log lm DEBUG ( fun ( ) - > " debug msg1 " , [ ] ) ;
log lm ERROR ( fun ( ) - > " error msg1 " , [ ] ) ;
log lm ERROR ( fun ( ) - > " ok " , [ " ARG1 " , string_of_int 234 ] ) ;
in
let rec run ( ) =
direct ( ) ;
Unix.sleep 3 ;
run ( )
in run ( )
let test =
let lm = make_log "test" in
let direct () =
log lm NOTICE (fun () -> "hello", []);
log lm DEBUG (fun () -> "debug msg1", []);
log lm ERROR (fun () -> "error msg1", []);
log lm ERROR (fun () -> "ok", ["ARG1", string_of_int 234]);
in
let rec run () =
direct ();
Unix.sleep 3;
run ()
in run ()
*)
(******************************************************************************)
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/src/batLogger.ml | ocaml | ****************************************************************************
****************************************************************************
* log utilities
****************************************************************************
* log modules
****************************************************************************
* log formatters
D:
E:
L:
****************************************************************************
* log events
****************************************************************************
* logger initialization
****************************************************************************
**************************************************************************** | -*- Mode : ; indent - tabs - mode : nil -*-
Copyright ( c ) 2009 , Metaweb Technologies , Inc.
* All rights reserved .
*
* Redistribution and use in source and binary forms , with or without
* modification , are permitted provided that the following conditions
* are met :
* * Redistributions of source code must retain the above copyright
* notice , this list of conditions and the following disclaimer .
* * Redistributions in binary form must reproduce the above
* copyright notice , this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution .
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ` ` AS IS '' AND ANY
* EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT LIMITED TO , THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED . IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
* CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED TO , PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES ; LOSS OF USE , DATA , OR PROFITS ; OR
* BUSINESS INTERRUPTION ) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY ,
* IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING NEGLIGENCE
* OR OTHERWISE ) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE , EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE .
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
* modification, are permitted provided that the following conditions
* are met:
* * Redistributions of source code must retain the above copyright
* notice, this list of conditions and the following disclaimer.
* * Redistributions in binary form must reproduce the above
* copyright notice, this list of conditions and the following
* disclaimer in the documentation and/or other materials provided
* with the distribution.
*
* THIS SOFTWARE IS PROVIDED BY METAWEB TECHNOLOGIES ``AS IS'' AND ANY
* EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
* IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
* PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL METAWEB TECHNOLOGIES BE
* LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
* CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
* SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
* BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
* WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
* OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN
* IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
******************************************************************************)
open BatPrintf
type log = {
name : string;
mutable level : int;
}
type level = NONE | FATAL | ERROR | WARN | NOTICE | INFO | DEBUG
type event = string * (string * string) list
type formatter = log -> level -> event -> float -> unit
let int_of_level = function
| NONE -> 0 | FATAL -> 1 | ERROR -> 2 | WARN -> 3
| NOTICE -> 4 | INFO -> 5 | DEBUG -> 6
let level_of_int = function
| 0 -> NONE | 1 -> FATAL | 2 -> ERROR | 3 -> WARN
| 4 -> NOTICE | 5 -> INFO | 6 -> DEBUG
| i -> failwith ("invalid level: " ^ string_of_int i)
let name_of_level = function
| NONE -> "NONE" | FATAL -> "FATAL" | ERROR -> "ERROR" | WARN -> "WARN"
| NOTICE -> "NOTICE" | INFO -> "INFO" | DEBUG -> "DEBUG"
let level_of_name = function
| "NONE" -> NONE | "FATAL" -> FATAL | "ERROR" -> ERROR | "WARN" -> WARN
| "NOTICE" -> NOTICE | "INFO" -> INFO | "DEBUG" -> DEBUG
| n -> failwith ("invalid level: " ^ n)
let format_timestamp out ts =
let tm = Unix.gmtime ts in
let us, _ = modf ts in
fprintf out "%04d-%02d-%02dT%02d:%02d:%02d.%06dZ"
(1900 + tm.Unix.tm_year)
(1 + tm.Unix.tm_mon)
(tm.Unix.tm_mday)
(tm.Unix.tm_hour)
(tm.Unix.tm_min)
(tm.Unix.tm_sec)
(int_of_float (1_000_000. *. us))
let logs = Hashtbl.create 16
let default_level = ref (int_of_level INFO)
let make_log name =
try Hashtbl.find logs name
with Not_found ->
let lm = { name = name; level = !default_level }
in Hashtbl.replace logs name lm;
lm
let log_enable lm lev = lm.level <- int_of_level lev
let log_enabled lm lev =
let lev_no = int_of_level lev in
lev_no <= lm.level
let log_name lm = lm.name
let log_level lm = level_of_int lm.level
let depth = ref 0
let formatters : (string * formatter) list ref = ref []
let register_formatter name f = formatters := (name, f) :: !formatters
let unregister_formatter name =
formatters := List.remove_assoc name !formatters
let rec format_kvl oc = function
| [] -> ()
| (k, v)::rest ->
fprintf oc "\t%s:%s" k v;
format_kvl oc rest
let make_std_formatter oc lm lev (event_name, event_args) timestamp =
fprintf oc "D:%a\tE:%s.%s\tL:%s%a\n%!"
format_kvl event_args
let stderr_formatter = make_std_formatter BatIO.stderr
let null_formatter lm lev event timestamp = ()
let format_indent oc depth =
for _i = 0 to depth do
fprintf oc "| "
done
let make_dbg_formatter oc lm lev (event_name, event_args) timestamp =
let indent = try int_of_string (List.assoc "I" event_args) with _ -> 0 in
let args = List.remove_assoc "I" event_args in
fprintf oc "### %a%s.%s %a [%s]\n%!" format_indent indent
(log_name lm) event_name
format_kvl args (name_of_level lev)
let dbg_formatter lm lev ep ts = make_dbg_formatter BatIO.stderr lm lev ep ts
let log lm lev event_fun =
if log_enabled lm lev then
let time = Unix.gettimeofday () in
let event_name, event_args = event_fun () in
let event = event_name, ("I", string_of_int !depth) :: event_args in
List.iter (fun (name, fmt) -> fmt lm lev event time) !formatters
let with_log lm lev event_fun ?result body =
if log_enabled lm lev then begin
try
log lm lev event_fun;
incr depth;
let rv = body () in
decr depth;
log lm lev (fun () ->
let event_name, event_args = event_fun () in
let result_str = match result with
| Some f -> f rv
| None -> "-"
in
event_name, ("RESULT", result_str) ::event_args);
rv
with exn ->
decr depth;
log lm lev (fun () ->
let event_name, event_args = event_fun () in
event_name, ("EXN", Printexc.to_string exn) :: event_args);
raise exn
end else body ()
let init name_level_list formatter =
List.iter (fun (name, level) -> let lm = make_log name in log_enable lm level)
name_level_list;
register_formatter "default" formatter
let init_from_string name_level_string formatter =
let init_key_value ss =
try
let name_ss, level_ss = BatSubstring.splitl (fun c -> c <> ':') ss in
let name = BatSubstring.to_string name_ss in
let level = level_of_name (BatSubstring.to_string level_ss) in
let lm = make_log name in
log_enable lm level
with Not_found -> try
let level = level_of_name (BatSubstring.to_string ss) in
default_level := int_of_level level;
Hashtbl.iter (fun name lm -> log_enable lm level) logs
with Failure _ ->
failwith ("invalid log initialization: " ^ BatSubstring.to_string ss)
in
List.iter init_key_value (BatSubstring.split_on_comma (BatSubstring.of_string name_level_string) );
register_formatter "default" formatter
let test =
let lm = make_log " test " in
let direct ( ) =
log lm NOTICE ( fun ( ) - > " hello " , [ ] ) ;
log lm DEBUG ( fun ( ) - > " debug msg1 " , [ ] ) ;
log lm ERROR ( fun ( ) - > " error msg1 " , [ ] ) ;
log lm ERROR ( fun ( ) - > " ok " , [ " ARG1 " , string_of_int 234 ] ) ;
in
let rec run ( ) =
direct ( ) ;
Unix.sleep 3 ;
run ( )
in run ( )
let test =
let lm = make_log "test" in
let direct () =
log lm NOTICE (fun () -> "hello", []);
log lm DEBUG (fun () -> "debug msg1", []);
log lm ERROR (fun () -> "error msg1", []);
log lm ERROR (fun () -> "ok", ["ARG1", string_of_int 234]);
in
let rec run () =
direct ();
Unix.sleep 3;
run ()
in run ()
*)
|
e20e63bd850ff9de52415c2bfc8290507eadf88e0e6d281d7c002a84a7fbe6bd | blindglobe/common-lisp-stat | linear-regression.lisp | ;;; -*- mode: lisp -*-
Time - stamp : < 2009 - 12 - 13 18:02:13 tony >
Creation : < 2009 - 04 - 19 09:41:09 >
;;; File: linear-regression.lisp
Author : < >
Copyright : ( c)2009 - - , . BSD , MIT , LLGPL , or
GPLv2 + , or + depending on how it arrives .
;;; Purpose: Example of basic linear regression data analysis in CLS.
;;; What is this talk of 'release'? Klingons do not make software
;;; 'releases'. Our software 'escapes', leaving a bloody trail of
;;; designers and quality assurance people in its wake.
(in-package :ls-user)
;; TODO:
;; - confirm estimates for multivariate case,
;; - pretty-print output
;; - fix up API -- what do we want this to look like?
(defparameter *m*
(regression-model (list->vector-like iron)
(list->vector-like absorbtion))
"holding variable.")
(defparameter *m2*
(lm (list->vector-like iron)
(list->vector-like absorbtion))
"holding variable.")
(defparameter *m-fit*
(fit-model *m*))
(princ *m2*)
(princ *m-fit*)
(estimates *m-fit*)
(covariance-matrix *m-fit*)
(defparameter *m3*
(regression-model (transpose
(listoflist->matrix-like
(list iron aluminum)
:orientation :row-major))
(list->vector-like absorbtion)))
(princ *m3*)
(defparameter *m3-fit*
(fit-model *m3*))
#|
;; Should the above look something like:
(defparameter *m3-fit*
(spec-and-fit-model '(absorbtion = iron aluminum)))
;; in which case we split the list before/after the "=" character.
|#
(estimates *m3-fit*)
(covariance-matrix *m3-fit*)
;; now to build a linear regression model from an external CSV datafile...
(defparameter *my-df*
(make-dataframe
(rsm.string::file->string-table
(concatenate 'string *cls-data-dir* "file.dsv"))
"Initial read-in of data."))
(defparameter *my-resp-var* (slice *my-df* ))
(defparameter *my-pred-vars* (slice *my-df* ))
| null | https://raw.githubusercontent.com/blindglobe/common-lisp-stat/0c657e10a4ee7e8d4ef3737f8c2d4e62abace2d8/examples/OLD/linear-regression.lisp | lisp | -*- mode: lisp -*-
File: linear-regression.lisp
Purpose: Example of basic linear regression data analysis in CLS.
What is this talk of 'release'? Klingons do not make software
'releases'. Our software 'escapes', leaving a bloody trail of
designers and quality assurance people in its wake.
TODO:
- confirm estimates for multivariate case,
- pretty-print output
- fix up API -- what do we want this to look like?
;; Should the above look something like:
(defparameter *m3-fit*
(spec-and-fit-model '(absorbtion = iron aluminum)))
;; in which case we split the list before/after the "=" character.
now to build a linear regression model from an external CSV datafile... |
Time - stamp : < 2009 - 12 - 13 18:02:13 tony >
Creation : < 2009 - 04 - 19 09:41:09 >
Author : < >
Copyright : ( c)2009 - - , . BSD , MIT , LLGPL , or
GPLv2 + , or + depending on how it arrives .
(in-package :ls-user)
(defparameter *m*
(regression-model (list->vector-like iron)
(list->vector-like absorbtion))
"holding variable.")
(defparameter *m2*
(lm (list->vector-like iron)
(list->vector-like absorbtion))
"holding variable.")
(defparameter *m-fit*
(fit-model *m*))
(princ *m2*)
(princ *m-fit*)
(estimates *m-fit*)
(covariance-matrix *m-fit*)
(defparameter *m3*
(regression-model (transpose
(listoflist->matrix-like
(list iron aluminum)
:orientation :row-major))
(list->vector-like absorbtion)))
(princ *m3*)
(defparameter *m3-fit*
(fit-model *m3*))
(estimates *m3-fit*)
(covariance-matrix *m3-fit*)
(defparameter *my-df*
(make-dataframe
(rsm.string::file->string-table
(concatenate 'string *cls-data-dir* "file.dsv"))
"Initial read-in of data."))
(defparameter *my-resp-var* (slice *my-df* ))
(defparameter *my-pred-vars* (slice *my-df* ))
|
d6d837e39c0af0af1ad73515a026e45f6f4032cfc72b1c1d20b60dc526b1419b | semilin/layoup | real-ev.lisp |
(MAKE-LAYOUT :NAME "real-ev" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX
NIL :KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/real-ev.lisp | lisp |
(MAKE-LAYOUT :NAME "real-ev" :MATRIX (APPLY #'KEY-MATRIX 'NIL) :SHIFT-MATRIX
NIL :KEYBOARD NIL) | |
9ea6b8a2b695287b6db1ecfe195fd7f17cb2c344591e196c33cebbd9978ec974 | tip-org/tools | Annotations.hs | {-# LANGUAGE DeriveDataTypeable #-}
module Tip.GHC.Annotations(
module Tip.GHC.Annotations,
BuiltinType(..), Builtin(..), Lit(..)) where
import Data.Data
import Tip.Types
-- An annotation that can be attached to a function to give it special
-- meaning in tip-ghc.
data TipAnnotation =
-- Rename the function when translating it to TIP.
Name String
-- Rename the projection functions for this constructor.
| Projections [String]
-- Unconditionally inline the function
-- (useful when the function must be applied to a monomorphic type).
| Inline
-- Turn into an uninterpreted function.
To avoid mishaps , you should also mark the function as NOINLINE .
| Uninterpreted
-- TIP built-in types, special functions and literals.
| PrimType BuiltinType | SomeSpecial Special
| MakeWiredIn WiredIn | WiredIn WiredIn | Special | Literal Lit
-- The type of properties.
| PropType
-- TIP attributes
| Attr String
| AttrValue String String
deriving (Eq, Ord, Show, Data)
-- Special functions which tip-ghc knows about.
data Special =
A TIP builtin . The second argument is the arity .
Primitive Builtin Int
-- An error function.
| Error
Cast between two types , which should either
-- a) have the same representation in TIP, or
-- b) be a cast from integer to real.
| Cast
-- The magic inlining function inline :: a -> a.
| InlineIt
-- A quantifier.
| QuantSpecial Quant
deriving (Eq, Ord, Show, Read, Data)
-- Functions which are invoked by tip-ghc and must be defined
-- in the prelude.
data WiredIn =
-- Convert a pair of integers to a rational.
MakeRational
-- Negate a number.
| Negate
deriving (Eq, Ord, Show, Read, Data)
| null | https://raw.githubusercontent.com/tip-org/tools/34350072587bd29157d18331eb895a1b2819555f/tip-types/src/Tip/GHC/Annotations.hs | haskell | # LANGUAGE DeriveDataTypeable #
An annotation that can be attached to a function to give it special
meaning in tip-ghc.
Rename the function when translating it to TIP.
Rename the projection functions for this constructor.
Unconditionally inline the function
(useful when the function must be applied to a monomorphic type).
Turn into an uninterpreted function.
TIP built-in types, special functions and literals.
The type of properties.
TIP attributes
Special functions which tip-ghc knows about.
An error function.
a) have the same representation in TIP, or
b) be a cast from integer to real.
The magic inlining function inline :: a -> a.
A quantifier.
Functions which are invoked by tip-ghc and must be defined
in the prelude.
Convert a pair of integers to a rational.
Negate a number. | module Tip.GHC.Annotations(
module Tip.GHC.Annotations,
BuiltinType(..), Builtin(..), Lit(..)) where
import Data.Data
import Tip.Types
data TipAnnotation =
Name String
| Projections [String]
| Inline
To avoid mishaps , you should also mark the function as NOINLINE .
| Uninterpreted
| PrimType BuiltinType | SomeSpecial Special
| MakeWiredIn WiredIn | WiredIn WiredIn | Special | Literal Lit
| PropType
| Attr String
| AttrValue String String
deriving (Eq, Ord, Show, Data)
data Special =
A TIP builtin . The second argument is the arity .
Primitive Builtin Int
| Error
Cast between two types , which should either
| Cast
| InlineIt
| QuantSpecial Quant
deriving (Eq, Ord, Show, Read, Data)
data WiredIn =
MakeRational
| Negate
deriving (Eq, Ord, Show, Read, Data)
|
4f571c50651607a65b63128eec9e3e29f756e802efced4249519aba5d6c5041d | arcusfelis/xapian-erlang-bindings | xapian_pool_sup.erl | %% It is a pool of pools
-module(xapian_pool_sup).
-behaviour(supervisor).
-export([start_link/0, start_pool/1]). %% API.
-export([init/1]). %% supervisor.
-define(SUPERVISOR, ?MODULE).
%% API.
-spec start_link() -> {ok, Pid::pid()}.
start_link() ->
supervisor:start_link({local, ?SUPERVISOR}, ?MODULE, []).
%% supervisor.
start_pool(Args) ->
supervisor:start_child(?SUPERVISOR, [Args]).
init([]) ->
ChildSpec = {pool,
{poolboy, start_link, []},
transient, infinity, worker, [poolboy]},
{ok, {{simple_one_for_one, 10, 10}, [ChildSpec]}}.
| null | https://raw.githubusercontent.com/arcusfelis/xapian-erlang-bindings/29871b3e64d658e74701c6ba68bf59e1a9b168f1/src/xapian_pool_sup.erl | erlang | It is a pool of pools
API.
supervisor.
API.
supervisor. | -module(xapian_pool_sup).
-behaviour(supervisor).
-define(SUPERVISOR, ?MODULE).
-spec start_link() -> {ok, Pid::pid()}.
start_link() ->
supervisor:start_link({local, ?SUPERVISOR}, ?MODULE, []).
start_pool(Args) ->
supervisor:start_child(?SUPERVISOR, [Args]).
init([]) ->
ChildSpec = {pool,
{poolboy, start_link, []},
transient, infinity, worker, [poolboy]},
{ok, {{simple_one_for_one, 10, 10}, [ChildSpec]}}.
|
cb59bfe8a2fbe6e0e84c663b3d13b20ca60af58a0f02fae6944eff2dd94a57c8 | racket/racket-pkg-website | users.rkt | #lang racket/base
;; User management - userdb, plus registration and emailing
(provide login-password-correct?
send-registration-or-reset-email!
registration-code-correct?
register-or-update-user!)
(require net/sendmail)
(require reloadable)
(require infrastructure-userdb)
(require "config.rkt")
(require "hash-utils.rkt")
(define-logger racket-pkg-website/users)
(define userdb (userdb-config (@ (config) user-directory)
#t ;; writeable!
))
(define *codes*
(make-persistent-state '*codes* (lambda () (make-registration-state))))
(define (login-password-correct? email given-password)
(log-racket-pkg-website/users-info "Checking password for ~v" email)
(user-password-correct? (lookup-user userdb email) given-password))
(define (send-registration-or-reset-email! email)
(if (user-exists? userdb email)
(send-password-reset-email! email)
(send-account-registration-email! email)))
(define (sender-address)
(or (@ (config) email-sender-address)
""))
(log-racket-pkg-website/users-info "Will use sender address ~v" (sender-address))
(define (send-password-reset-email! email)
(log-racket-pkg-website/users-info "Sending password reset email to ~v" email)
(send-mail-message
(sender-address)
"Account password reset for Racket Package Catalog"
(list email)
'()
'()
(list
"Someone tried to login with your email address for an account on the Racket Package Catalog, but failed."
"If this was you, please use this code to reset your password:"
""
(generate-registration-code! (*codes*) email)
""
"This code will expire, so if it is not available, you'll have to try again.")))
(define (send-account-registration-email! email)
(log-racket-pkg-website/users-info "Sending account registration email to ~v" email)
(send-mail-message
(sender-address)
"Account confirmation for Racket Package Catalog"
(list email)
'()
'()
(list
"Someone tried to register your email address for an account on the Racket Package Catalog."
"If you want to proceed, use this code:"
""
(generate-registration-code! (*codes*) email)
""
"This code will expire, so if it is not available, you'll have to try to register again.")))
(define (registration-code-correct? email given-code)
(log-racket-pkg-website/users-info "Checking registration code for ~v" email)
(check-registration-code (*codes*)
email
given-code
(lambda () #t)
(lambda () #f)))
(define (register-or-update-user! email password)
(log-racket-pkg-website/users-info "Updating user record ~v" email)
(save-user! userdb
(user-password-set (or (lookup-user userdb email)
(make-user email password))
password)))
| null | https://raw.githubusercontent.com/racket/racket-pkg-website/d25b68c125698d944446f8e0a945eff85caebccd/src/users.rkt | racket | User management - userdb, plus registration and emailing
writeable! | #lang racket/base
(provide login-password-correct?
send-registration-or-reset-email!
registration-code-correct?
register-or-update-user!)
(require net/sendmail)
(require reloadable)
(require infrastructure-userdb)
(require "config.rkt")
(require "hash-utils.rkt")
(define-logger racket-pkg-website/users)
(define userdb (userdb-config (@ (config) user-directory)
))
(define *codes*
(make-persistent-state '*codes* (lambda () (make-registration-state))))
(define (login-password-correct? email given-password)
(log-racket-pkg-website/users-info "Checking password for ~v" email)
(user-password-correct? (lookup-user userdb email) given-password))
(define (send-registration-or-reset-email! email)
(if (user-exists? userdb email)
(send-password-reset-email! email)
(send-account-registration-email! email)))
(define (sender-address)
(or (@ (config) email-sender-address)
""))
(log-racket-pkg-website/users-info "Will use sender address ~v" (sender-address))
(define (send-password-reset-email! email)
(log-racket-pkg-website/users-info "Sending password reset email to ~v" email)
(send-mail-message
(sender-address)
"Account password reset for Racket Package Catalog"
(list email)
'()
'()
(list
"Someone tried to login with your email address for an account on the Racket Package Catalog, but failed."
"If this was you, please use this code to reset your password:"
""
(generate-registration-code! (*codes*) email)
""
"This code will expire, so if it is not available, you'll have to try again.")))
(define (send-account-registration-email! email)
(log-racket-pkg-website/users-info "Sending account registration email to ~v" email)
(send-mail-message
(sender-address)
"Account confirmation for Racket Package Catalog"
(list email)
'()
'()
(list
"Someone tried to register your email address for an account on the Racket Package Catalog."
"If you want to proceed, use this code:"
""
(generate-registration-code! (*codes*) email)
""
"This code will expire, so if it is not available, you'll have to try to register again.")))
(define (registration-code-correct? email given-code)
(log-racket-pkg-website/users-info "Checking registration code for ~v" email)
(check-registration-code (*codes*)
email
given-code
(lambda () #t)
(lambda () #f)))
(define (register-or-update-user! email password)
(log-racket-pkg-website/users-info "Updating user record ~v" email)
(save-user! userdb
(user-password-set (or (lookup-user userdb email)
(make-user email password))
password)))
|
f10d4967d063006d5d4650f0da60a9a16a3a548d6519bf3d3b5f3942e7bc2163 | lambdalille/history | place.mli | type t
include Yocaml.Metadata.READABLE with type t := t
include Yocaml.Metadata.INJECTABLE with type t := t
val fetch
: (module Yocaml.Metadata.VALIDABLE)
-> string
-> (Yocaml.Deps.t * t) Yocaml.t
| null | https://raw.githubusercontent.com/lambdalille/history/6b5dd76b80d095d3a54e8860a7568b115ed747e8/bin/place.mli | ocaml | type t
include Yocaml.Metadata.READABLE with type t := t
include Yocaml.Metadata.INJECTABLE with type t := t
val fetch
: (module Yocaml.Metadata.VALIDABLE)
-> string
-> (Yocaml.Deps.t * t) Yocaml.t
| |
9a3d352f45b612d3df3ac90c8ca9972fae7b8ab6d92d66780563a09aafb3e46f | metaocaml/ber-metaocaml | parsecmmaux.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(* Auxiliary functions for parsing *)
val bind_ident: string -> Backend_var.With_provenance.t
val find_ident: string -> Backend_var.t
val unbind_ident: Backend_var.With_provenance.t -> unit
val find_label: string -> int
val debuginfo: ?loc:Location.t -> unit -> Debuginfo.t
type error =
Unbound of string
exception Error of error
val report_error: error -> unit
| null | https://raw.githubusercontent.com/metaocaml/ber-metaocaml/4992d1f87fc08ccb958817926cf9d1d739caf3a2/testsuite/tools/parsecmmaux.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
Auxiliary functions for parsing | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
val bind_ident: string -> Backend_var.With_provenance.t
val find_ident: string -> Backend_var.t
val unbind_ident: Backend_var.With_provenance.t -> unit
val find_label: string -> int
val debuginfo: ?loc:Location.t -> unit -> Debuginfo.t
type error =
Unbound of string
exception Error of error
val report_error: error -> unit
|
1cab7a96987a58577843cac10ec1a9c504aef9c393e407c83425a8bb43dd88a9 | j-mie6/ParsleyHaskell | Analysis.hs | |
Module : Parsley . Internal . Frontend . Analysis
Description : Exposes various analysis passes .
License : BSD-3 - Clause
Maintainer : : experimental
Exposes the analysis passes defined within the analysis submodules via ` analyse ` .
@since 1.5.0.0
Module : Parsley.Internal.Frontend.Analysis
Description : Exposes various analysis passes.
License : BSD-3-Clause
Maintainer : Jamie Willis
Stability : experimental
Exposes the analysis passes defined within the analysis submodules via `analyse`.
@since 1.5.0.0
-}
module Parsley.Internal.Frontend.Analysis (
analyse, dependencyAnalysis, inliner,
module Flags
) where
import Parsley.Internal.Common.Indexed (Fix)
import Parsley.Internal.Core.CombinatorAST (Combinator)
import Parsley.Internal.Frontend.Analysis.Cut (cutAnalysis)
import Parsley.Internal.Frontend.Analysis.Dependencies (dependencyAnalysis)
import Parsley.Internal.Frontend.Analysis.Inliner (inliner)
import Parsley.Internal.Frontend.Analysis.Flags as Flags (emptyFlags, AnalysisFlags)
|
Performs Cut - Analysis on the combinator tree ( See " Parsley . Internal . Frontend . Analysis . Cut " )
@since 1.5.0.0
Performs Cut-Analysis on the combinator tree (See "Parsley.Internal.Frontend.Analysis.Cut")
@since 1.5.0.0
-}
analyse :: AnalysisFlags -> Fix Combinator a -> Fix Combinator a
analyse _ = cutAnalysis
| null | https://raw.githubusercontent.com/j-mie6/ParsleyHaskell/045ab78ed7af0cbb52cf8b42b6aeef5dd7f91ab2/parsley-core/src/ghc/Parsley/Internal/Frontend/Analysis.hs | haskell | |
Module : Parsley . Internal . Frontend . Analysis
Description : Exposes various analysis passes .
License : BSD-3 - Clause
Maintainer : : experimental
Exposes the analysis passes defined within the analysis submodules via ` analyse ` .
@since 1.5.0.0
Module : Parsley.Internal.Frontend.Analysis
Description : Exposes various analysis passes.
License : BSD-3-Clause
Maintainer : Jamie Willis
Stability : experimental
Exposes the analysis passes defined within the analysis submodules via `analyse`.
@since 1.5.0.0
-}
module Parsley.Internal.Frontend.Analysis (
analyse, dependencyAnalysis, inliner,
module Flags
) where
import Parsley.Internal.Common.Indexed (Fix)
import Parsley.Internal.Core.CombinatorAST (Combinator)
import Parsley.Internal.Frontend.Analysis.Cut (cutAnalysis)
import Parsley.Internal.Frontend.Analysis.Dependencies (dependencyAnalysis)
import Parsley.Internal.Frontend.Analysis.Inliner (inliner)
import Parsley.Internal.Frontend.Analysis.Flags as Flags (emptyFlags, AnalysisFlags)
|
Performs Cut - Analysis on the combinator tree ( See " Parsley . Internal . Frontend . Analysis . Cut " )
@since 1.5.0.0
Performs Cut-Analysis on the combinator tree (See "Parsley.Internal.Frontend.Analysis.Cut")
@since 1.5.0.0
-}
analyse :: AnalysisFlags -> Fix Combinator a -> Fix Combinator a
analyse _ = cutAnalysis
| |
c8752f9f33ef71d31e968eae881cf84a53d8730ced0c6386fde0c27c4584f62f | ekmett/semigroupoids | Semigroupoid.hs | # LANGUAGE CPP #
{-# LANGUAGE GADTs #-}
# LANGUAGE PolyKinds #
# LANGUAGE Trustworthy #
-----------------------------------------------------------------------------
-- |
-- Module : Data.Semigroupoid
Copyright : ( C ) 2007 - 2015
-- License : BSD-style (see the file LICENSE)
--
Maintainer : < >
-- Stability : provisional
-- Portability : portable
--
-- A semigroupoid satisfies all of the requirements to be a Category except
-- for the existence of identity arrows.
----------------------------------------------------------------------------
module Data.Semigroupoid
( Semigroupoid(..)
, WrappedCategory(..)
, Semi(..)
) where
import Control.Applicative
import Control.Arrow
import Control.Category
import Data.Functor.Bind
import Data.Semigroup
import qualified Data.Type.Coercion as Co
import qualified Data.Type.Equality as Eq
import Prelude hiding (id, (.))
#ifdef MIN_VERSION_contravariant
import Data.Functor.Contravariant
#endif
#ifdef MIN_VERSION_comonad
import Data.Functor.Extend
import Control.Comonad
#endif
#ifdef MIN_VERSION_tagged
import Data.Tagged (Tagged (..))
#endif
-- | 'Control.Category.Category' sans 'Control.Category.id'
class Semigroupoid c where
o :: c j k -> c i j -> c i k
instance Semigroupoid (->) where
o = (.)
-- | <(mathematics)#Rectangular_bands>
instance Semigroupoid (,) where
o (_,k) (i,_) = (i,k)
instance Bind m => Semigroupoid (Kleisli m) where
Kleisli g `o` Kleisli f = Kleisli $ \a -> f a >>- g
#ifdef MIN_VERSION_comonad
instance Extend w => Semigroupoid (Cokleisli w) where
Cokleisli f `o` Cokleisli g = Cokleisli $ f . extended g
#endif
#ifdef MIN_VERSION_contravariant
instance Semigroupoid Op where
Op f `o` Op g = Op (g `o` f)
#endif
newtype WrappedCategory k a b = WrapCategory { unwrapCategory :: k a b }
instance Category k => Semigroupoid (WrappedCategory k) where
WrapCategory f `o` WrapCategory g = WrapCategory (f . g)
instance Category k => Category (WrappedCategory k) where
id = WrapCategory id
WrapCategory f . WrapCategory g = WrapCategory (f . g)
newtype Semi m a b = Semi { getSemi :: m }
instance Semigroup m => Semigroupoid (Semi m) where
Semi m `o` Semi n = Semi (m <> n)
instance Monoid m => Category (Semi m) where
id = Semi mempty
Semi m . Semi n = Semi (m `mappend` n)
instance Semigroupoid Const where
_ `o` Const a = Const a
#ifdef MIN_VERSION_tagged
instance Semigroupoid Tagged where
Tagged b `o` _ = Tagged b
#endif
instance Semigroupoid Co.Coercion where
o = flip Co.trans
instance Semigroupoid (Eq.:~:) where
o = flip Eq.trans
#if MIN_VERSION_base(4,10,0)
instance Semigroupoid (Eq.:~~:) where
o Eq.HRefl Eq.HRefl = Eq.HRefl
#endif
| null | https://raw.githubusercontent.com/ekmett/semigroupoids/f63f7a2d696fdd6ed72eea90798fca1f86d6d040/src/Data/Semigroupoid.hs | haskell | # LANGUAGE GADTs #
---------------------------------------------------------------------------
|
Module : Data.Semigroupoid
License : BSD-style (see the file LICENSE)
Stability : provisional
Portability : portable
A semigroupoid satisfies all of the requirements to be a Category except
for the existence of identity arrows.
--------------------------------------------------------------------------
| 'Control.Category.Category' sans 'Control.Category.id'
| <(mathematics)#Rectangular_bands> | # LANGUAGE CPP #
# LANGUAGE PolyKinds #
# LANGUAGE Trustworthy #
Copyright : ( C ) 2007 - 2015
Maintainer : < >
module Data.Semigroupoid
( Semigroupoid(..)
, WrappedCategory(..)
, Semi(..)
) where
import Control.Applicative
import Control.Arrow
import Control.Category
import Data.Functor.Bind
import Data.Semigroup
import qualified Data.Type.Coercion as Co
import qualified Data.Type.Equality as Eq
import Prelude hiding (id, (.))
#ifdef MIN_VERSION_contravariant
import Data.Functor.Contravariant
#endif
#ifdef MIN_VERSION_comonad
import Data.Functor.Extend
import Control.Comonad
#endif
#ifdef MIN_VERSION_tagged
import Data.Tagged (Tagged (..))
#endif
class Semigroupoid c where
o :: c j k -> c i j -> c i k
instance Semigroupoid (->) where
o = (.)
instance Semigroupoid (,) where
o (_,k) (i,_) = (i,k)
instance Bind m => Semigroupoid (Kleisli m) where
Kleisli g `o` Kleisli f = Kleisli $ \a -> f a >>- g
#ifdef MIN_VERSION_comonad
instance Extend w => Semigroupoid (Cokleisli w) where
Cokleisli f `o` Cokleisli g = Cokleisli $ f . extended g
#endif
#ifdef MIN_VERSION_contravariant
instance Semigroupoid Op where
Op f `o` Op g = Op (g `o` f)
#endif
newtype WrappedCategory k a b = WrapCategory { unwrapCategory :: k a b }
instance Category k => Semigroupoid (WrappedCategory k) where
WrapCategory f `o` WrapCategory g = WrapCategory (f . g)
instance Category k => Category (WrappedCategory k) where
id = WrapCategory id
WrapCategory f . WrapCategory g = WrapCategory (f . g)
newtype Semi m a b = Semi { getSemi :: m }
instance Semigroup m => Semigroupoid (Semi m) where
Semi m `o` Semi n = Semi (m <> n)
instance Monoid m => Category (Semi m) where
id = Semi mempty
Semi m . Semi n = Semi (m `mappend` n)
instance Semigroupoid Const where
_ `o` Const a = Const a
#ifdef MIN_VERSION_tagged
instance Semigroupoid Tagged where
Tagged b `o` _ = Tagged b
#endif
instance Semigroupoid Co.Coercion where
o = flip Co.trans
instance Semigroupoid (Eq.:~:) where
o = flip Eq.trans
#if MIN_VERSION_base(4,10,0)
instance Semigroupoid (Eq.:~~:) where
o Eq.HRefl Eq.HRefl = Eq.HRefl
#endif
|
10a063c13b7210908ec851327472c13c3b07f9f7fe6321b5751b6a75017a8497 | athos/pogonos | spec_test.cljc | (ns pogonos.spec-test
(:require clojure.test
pogonos.core
#?(:clj [pogonos.spec-test-macros :refer [import-spec-tests]]))
#?(:cljs (:require-macros [pogonos.spec-test-macros :refer [import-spec-tests]])))
(import-spec-tests)
| null | https://raw.githubusercontent.com/athos/pogonos/42222f73b85814482b1e5bb797ff91ee7ad61c61/test/pogonos/spec_test.cljc | clojure | (ns pogonos.spec-test
(:require clojure.test
pogonos.core
#?(:clj [pogonos.spec-test-macros :refer [import-spec-tests]]))
#?(:cljs (:require-macros [pogonos.spec-test-macros :refer [import-spec-tests]])))
(import-spec-tests)
| |
6668c46f64673f147f39741188f8cb5da4e26c8424798c72c688dfd5badb6c05 | goblint/analyzer | sLRphased.ml | open Prelude
open Analyses
open Constraints
open Messages
open SLR
* the two - phased terminating SLR3 box solver
module Make =
functor (S:EqConstrSys) ->
functor (HM:Hashtbl.S with type key = S.v) ->
struct
include Generic.SolverStats (S) (HM)
module VS = Set.Make (S.Var)
module P =
struct
type t = S.Var.t * S.Var.t [@@deriving eq, hash]
end
module HPM = Hashtbl.Make (P)
let narrow = narrow S.Dom.narrow
let solve st vs =
let key = HM.create 10 in
let module H = Heap.Make (struct
type t = S.Var.t
let compare x y = compare (HM.find key x) (HM.find key y)
end)
in
let extract_min q =
let x = H.find_min !q in
q := H.del_min !q; x
in
let min_key q =
let x = H.find_min !q in
HM.find key x
in
let wpoint = HM.create 10 in
let infl = HM.create 10 in
let set = HM.create 10 in
let rho0 = HM.create 10 in (* widening *)
let rho1 = HM.create 10 in (* narrowing *)
let rho' = HPM.create 10 in
let q = ref H.empty in
let count = ref 0 in
let count_side = ref (max_int - 1) in
let rec iterate b prio =
if H.size !q = 0 || min_key q > prio then ()
else
let x = extract_min q in
if b then solve1 (HM.find key x - 1) x;
do_var b x;
iterate b prio
and do_var b x =
let rho = if b then rho1 else rho0 in
let wpx = HM.mem wpoint x in
HM.remove wpoint x;
let old = HM.find rho x in
let eval y =
get_var_event y;
if b then solve1 (HM.find key x - 1) y else solve0 y;
if HM.find key x <= HM.find key y then begin
HM.replace wpoint y ()
end;
HM.replace infl y (VS.add x (HM.find infl y));
HM.find rho y
in
let effects = ref Set.empty in
let side y d =
assert (not (S.Dom.is_bot d));
trace "sol" "SIDE: Var: %a\nVal: %a\n" S.Var.pretty_trace y S.Dom.pretty d;
let first = not (Set.mem y !effects) in
effects := Set.add y !effects;
if first then (
let old = try HPM.find rho ' ( x , y ) with _ - > S.Dom.bot ( ) in
let d = S.Dom.join old d in
HPM.replace rho' (x,y) d;
HM.replace set y (VS.add x (try HM.find set y with Not_found -> VS.empty));
if not (HM.mem rho y) then (
if b then solve1 (HM.find key x - 1) ~side:true y else solve0 ~side:true y
) else (
trace " sol " " SIDE : : % a already exists with Prio : % i and : % a\n " S.Var.pretty_trace y ( HM.find key y ) S.Dom.pretty d ;
if HM.find key y < 0 then HM.replace key y (Ref.post_decr count_side)
);
q := H.add y !q
) else (
assert (HM.mem rho y);
let old = HPM.find rho' (x,y) in
let newd = S.Dom.join old d in
HPM.replace rho' (x,y) newd;
if not (S.Dom.equal old newd) then (
q := H.add y !q
)
);
HM.replace wpoint y ()
in
let tmp = eq x eval side in
let tmp = S.Dom.join tmp (sides x) in
(* if (b && not (S.Dom.leq old tmp)) then ( *)
trace " sol " " : % a\nOld : % a\nTmp : % a\n " S.Var.pretty_trace x S.Dom.pretty old S.Dom.pretty tmp ;
(* assert false *)
(* ); *)
let val_new =
if wpx then
if b then
let nar = narrow old tmp in
trace "sol" "NARROW: Var: %a\nOld: %a\nNew: %a\nWiden: %a\n" S.Var.pretty_trace x S.Dom.pretty old S.Dom.pretty tmp S.Dom.pretty nar;
nar
else
let wid = S.Dom.widen old (S.Dom.join old tmp) in
trace "sol" "WIDEN: Var: %a\nOld: %a\nNew: %a\nWiden: %a\n" S.Var.pretty_trace x S.Dom.pretty old S.Dom.pretty tmp S.Dom.pretty wid;
wid
else
tmp
in
if tracing then trace "sol" "Var: %a\n" S.Var.pretty_trace x ;
if tracing then trace "sol" "Contrib:%a\n" S.Dom.pretty val_new;
if S.Dom.equal old val_new then ()
else begin
update_var_event x old val_new;
if tracing then trace "sol" "New Value:%a\n\n" S.Dom.pretty val_new;
HM.replace rho x val_new;
let w = try HM.find infl x with Not_found -> VS.empty in
let w = if wpx then VS.add x w else w in
q := Enum.fold (fun x y -> H.add y x) !q (VS.enum w);
HM.replace infl x VS.empty
end
and solve0 ?(side=false) x =
if not (HM.mem rho0 x) then (
new_var_event x;
let d = S.Dom.bot () in
HM.replace rho0 x d;
HM.replace infl x VS.empty;
if side then (
print_endline @@ "Variable by side-effect " ^ S.Var.var_id x ^ " to " ^ string_of_int !count_side;
HM.replace key x !count_side; decr count_side
) else (
print_endline @@ "Variable " ^ S.Var.var_id x ^ " to " ^ string_of_int !count;
HM.replace key x !count; decr count
);
do_var false x;
if side then
q := H.add x !q
else
iterate false (HM.find key x)
)
and solve1 ?(side=false) prio x =
solve0 ~side:side x;
if not (HM.mem rho1 x) then (
new_var_event x;
let d = HM.find rho0 x in
HM.replace rho1 x d;
let w = VS.add x @@ try HM.find infl x with Not_found -> VS.empty in
HM.replace infl x VS.empty;
q := Enum.fold (fun x y -> H.add y x) !q (VS.enum w);
iterate true prio
)
and sides x =
let w = try HM.find set x with Not_found -> VS.empty in
let v = Enum.fold (fun d z -> try S.Dom.join d (HPM.find rho' (z,x)) with Not_found -> d) (S.Dom.bot ()) (VS.enum w)
in trace "sol" "SIDES: Var: %a\nVal: %a\n" S.Var.pretty_trace x S.Dom.pretty v; v
and eq x get set =
eval_rhs_event x;
match S.system x with
| None -> S.Dom.bot ()
| Some f -> f get set
in
let set_start (x,d) =
solve0 ~side:true x;
HM.replace rho0 x d;
HM.replace wpoint x ();
q := H.add x !q;
HM.replace set x (VS.add x VS.empty);
HPM.replace rho' (x,x) d
in
start_event ();
List.iter set_start st;
List.iter (solve0) vs;
iterate false max_int;
List.iter (solve1 max_int) vs;
TODO remove ?
stop_event ();
if GobConfig.get_bool "dbg.print_wpoints" then (
Printf.printf "\nWidening points:\n";
HM.iter (fun k () -> ignore @@ Pretty.printf "%a\n" S.Var.pretty_trace k) wpoint;
print_newline ();
);
HM.clear key ;
HM.clear wpoint;
HM.clear infl ;
HM.clear set ;
HPM.clear rho' ;
rho1
end
let _ =
two - phased slr3 t
| null | https://raw.githubusercontent.com/goblint/analyzer/b9f527f6d3066d548c5551716366fbed8acecf35/src/solvers/sLRphased.ml | ocaml | widening
narrowing
if (b && not (S.Dom.leq old tmp)) then (
assert false
); | open Prelude
open Analyses
open Constraints
open Messages
open SLR
* the two - phased terminating SLR3 box solver
module Make =
functor (S:EqConstrSys) ->
functor (HM:Hashtbl.S with type key = S.v) ->
struct
include Generic.SolverStats (S) (HM)
module VS = Set.Make (S.Var)
module P =
struct
type t = S.Var.t * S.Var.t [@@deriving eq, hash]
end
module HPM = Hashtbl.Make (P)
let narrow = narrow S.Dom.narrow
let solve st vs =
let key = HM.create 10 in
let module H = Heap.Make (struct
type t = S.Var.t
let compare x y = compare (HM.find key x) (HM.find key y)
end)
in
let extract_min q =
let x = H.find_min !q in
q := H.del_min !q; x
in
let min_key q =
let x = H.find_min !q in
HM.find key x
in
let wpoint = HM.create 10 in
let infl = HM.create 10 in
let set = HM.create 10 in
let rho' = HPM.create 10 in
let q = ref H.empty in
let count = ref 0 in
let count_side = ref (max_int - 1) in
let rec iterate b prio =
if H.size !q = 0 || min_key q > prio then ()
else
let x = extract_min q in
if b then solve1 (HM.find key x - 1) x;
do_var b x;
iterate b prio
and do_var b x =
let rho = if b then rho1 else rho0 in
let wpx = HM.mem wpoint x in
HM.remove wpoint x;
let old = HM.find rho x in
let eval y =
get_var_event y;
if b then solve1 (HM.find key x - 1) y else solve0 y;
if HM.find key x <= HM.find key y then begin
HM.replace wpoint y ()
end;
HM.replace infl y (VS.add x (HM.find infl y));
HM.find rho y
in
let effects = ref Set.empty in
let side y d =
assert (not (S.Dom.is_bot d));
trace "sol" "SIDE: Var: %a\nVal: %a\n" S.Var.pretty_trace y S.Dom.pretty d;
let first = not (Set.mem y !effects) in
effects := Set.add y !effects;
if first then (
let old = try HPM.find rho ' ( x , y ) with _ - > S.Dom.bot ( ) in
let d = S.Dom.join old d in
HPM.replace rho' (x,y) d;
HM.replace set y (VS.add x (try HM.find set y with Not_found -> VS.empty));
if not (HM.mem rho y) then (
if b then solve1 (HM.find key x - 1) ~side:true y else solve0 ~side:true y
) else (
trace " sol " " SIDE : : % a already exists with Prio : % i and : % a\n " S.Var.pretty_trace y ( HM.find key y ) S.Dom.pretty d ;
if HM.find key y < 0 then HM.replace key y (Ref.post_decr count_side)
);
q := H.add y !q
) else (
assert (HM.mem rho y);
let old = HPM.find rho' (x,y) in
let newd = S.Dom.join old d in
HPM.replace rho' (x,y) newd;
if not (S.Dom.equal old newd) then (
q := H.add y !q
)
);
HM.replace wpoint y ()
in
let tmp = eq x eval side in
let tmp = S.Dom.join tmp (sides x) in
trace " sol " " : % a\nOld : % a\nTmp : % a\n " S.Var.pretty_trace x S.Dom.pretty old S.Dom.pretty tmp ;
let val_new =
if wpx then
if b then
let nar = narrow old tmp in
trace "sol" "NARROW: Var: %a\nOld: %a\nNew: %a\nWiden: %a\n" S.Var.pretty_trace x S.Dom.pretty old S.Dom.pretty tmp S.Dom.pretty nar;
nar
else
let wid = S.Dom.widen old (S.Dom.join old tmp) in
trace "sol" "WIDEN: Var: %a\nOld: %a\nNew: %a\nWiden: %a\n" S.Var.pretty_trace x S.Dom.pretty old S.Dom.pretty tmp S.Dom.pretty wid;
wid
else
tmp
in
if tracing then trace "sol" "Var: %a\n" S.Var.pretty_trace x ;
if tracing then trace "sol" "Contrib:%a\n" S.Dom.pretty val_new;
if S.Dom.equal old val_new then ()
else begin
update_var_event x old val_new;
if tracing then trace "sol" "New Value:%a\n\n" S.Dom.pretty val_new;
HM.replace rho x val_new;
let w = try HM.find infl x with Not_found -> VS.empty in
let w = if wpx then VS.add x w else w in
q := Enum.fold (fun x y -> H.add y x) !q (VS.enum w);
HM.replace infl x VS.empty
end
and solve0 ?(side=false) x =
if not (HM.mem rho0 x) then (
new_var_event x;
let d = S.Dom.bot () in
HM.replace rho0 x d;
HM.replace infl x VS.empty;
if side then (
print_endline @@ "Variable by side-effect " ^ S.Var.var_id x ^ " to " ^ string_of_int !count_side;
HM.replace key x !count_side; decr count_side
) else (
print_endline @@ "Variable " ^ S.Var.var_id x ^ " to " ^ string_of_int !count;
HM.replace key x !count; decr count
);
do_var false x;
if side then
q := H.add x !q
else
iterate false (HM.find key x)
)
and solve1 ?(side=false) prio x =
solve0 ~side:side x;
if not (HM.mem rho1 x) then (
new_var_event x;
let d = HM.find rho0 x in
HM.replace rho1 x d;
let w = VS.add x @@ try HM.find infl x with Not_found -> VS.empty in
HM.replace infl x VS.empty;
q := Enum.fold (fun x y -> H.add y x) !q (VS.enum w);
iterate true prio
)
and sides x =
let w = try HM.find set x with Not_found -> VS.empty in
let v = Enum.fold (fun d z -> try S.Dom.join d (HPM.find rho' (z,x)) with Not_found -> d) (S.Dom.bot ()) (VS.enum w)
in trace "sol" "SIDES: Var: %a\nVal: %a\n" S.Var.pretty_trace x S.Dom.pretty v; v
and eq x get set =
eval_rhs_event x;
match S.system x with
| None -> S.Dom.bot ()
| Some f -> f get set
in
let set_start (x,d) =
solve0 ~side:true x;
HM.replace rho0 x d;
HM.replace wpoint x ();
q := H.add x !q;
HM.replace set x (VS.add x VS.empty);
HPM.replace rho' (x,x) d
in
start_event ();
List.iter set_start st;
List.iter (solve0) vs;
iterate false max_int;
List.iter (solve1 max_int) vs;
TODO remove ?
stop_event ();
if GobConfig.get_bool "dbg.print_wpoints" then (
Printf.printf "\nWidening points:\n";
HM.iter (fun k () -> ignore @@ Pretty.printf "%a\n" S.Var.pretty_trace k) wpoint;
print_newline ();
);
HM.clear key ;
HM.clear wpoint;
HM.clear infl ;
HM.clear set ;
HPM.clear rho' ;
rho1
end
let _ =
two - phased slr3 t
|
64310181389246f5e83ebe7cfca5cffade12fe40fab9d50e146b1881ef2441ed | ndmitchell/catch | Referee.hs |
module Referee where
main xs ys = case null xs || null ys of
True -> []
False -> [head xs, head ys]
| null | https://raw.githubusercontent.com/ndmitchell/catch/5d834416a27b4df3f7ce7830c4757d4505aaf96e/examples/Example/Referee.hs | haskell |
module Referee where
main xs ys = case null xs || null ys of
True -> []
False -> [head xs, head ys]
| |
7d7982193caf4432126f2720ad2231da23cf5fdb54c1a09559dd926355485d04 | iustin/corydalis | FoundationSpec.hs |
Copyright ( C ) 2013
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2013 Iustin Pop
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
module FoundationSpec (spec) where
import qualified Data.Map as M
import Web.Cookie
import Handler.Cookies
import TestImport
import Types
handlerView :: [(Route App, ViewMode)]
handlerView = [ (BrowseImagesR 0, ViewImages PresentationGrid)
, (ListImagesR, ViewImages PresentationList)
, (BrowseFoldersR 0, ViewFolders PresentationGrid)
, (ListFoldersR, ViewFolders PresentationList)
]
checkViewCookie :: Maybe ViewMode -> YesodExample App ()
checkViewCookie expected = request $ do
cookies <- getRequestCookies
let actual = do
cookieB <- M.lookup (encodeUtf8 viewCookieName) cookies
parseViewMode . decodeUtf8 . setCookieValue $ cookieB
liftIO $ actual `shouldBe` expected
spec :: Spec
spec = parallel $ withApp $ do
describe "checks view pages set their cookie" $ do
forM_ handlerView $ \(route, symbol) ->
it ("validates route " ++ show route) $ do
login
checkRoute route
checkViewCookie $ Just symbol
it "checks that a normal handler doesn't reset the view mode" $ do
login
forM_ [CurateR, LensStatsR] $ \route -> do
checkRoute route
checkViewCookie Nothing
| null | https://raw.githubusercontent.com/iustin/corydalis/43f8bf004904847fad43c428a9e1b20e67da964d/test/FoundationSpec.hs | haskell | # LANGUAGE OverloadedStrings # |
Copyright ( C ) 2013
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2013 Iustin Pop
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
# LANGUAGE NoImplicitPrelude #
module FoundationSpec (spec) where
import qualified Data.Map as M
import Web.Cookie
import Handler.Cookies
import TestImport
import Types
handlerView :: [(Route App, ViewMode)]
handlerView = [ (BrowseImagesR 0, ViewImages PresentationGrid)
, (ListImagesR, ViewImages PresentationList)
, (BrowseFoldersR 0, ViewFolders PresentationGrid)
, (ListFoldersR, ViewFolders PresentationList)
]
checkViewCookie :: Maybe ViewMode -> YesodExample App ()
checkViewCookie expected = request $ do
cookies <- getRequestCookies
let actual = do
cookieB <- M.lookup (encodeUtf8 viewCookieName) cookies
parseViewMode . decodeUtf8 . setCookieValue $ cookieB
liftIO $ actual `shouldBe` expected
spec :: Spec
spec = parallel $ withApp $ do
describe "checks view pages set their cookie" $ do
forM_ handlerView $ \(route, symbol) ->
it ("validates route " ++ show route) $ do
login
checkRoute route
checkViewCookie $ Just symbol
it "checks that a normal handler doesn't reset the view mode" $ do
login
forM_ [CurateR, LensStatsR] $ \route -> do
checkRoute route
checkViewCookie Nothing
|
342ef4e1c7513bef072888e866b7e65d81a7c2ba899717da81c64ad4a84c528b | jyh/metaprl | ma_Dconstant_object_directory.mli | extends Nuprl_Dconstant_object_directory
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/mesa/ma_Dconstant_object_directory.mli | ocaml | extends Nuprl_Dconstant_object_directory
| |
07dc854c28d2139fdac5992cb27299ce67fde9dc7efc8cdb5ebb26a4985f1490 | danelahman/haskell-coop | FPStateTests.hs | # LANGUAGE DataKinds #
{-# LANGUAGE GADTs #-}
|
Module : FPStateTests
Description : Example use cases of the footprint - based runner for state from ` Control . Runner . FPState `
Copyright : ( c ) , 2019
License : MIT
Maintainer :
Stability : experimental
This module provides example use cases of the footprint - based
runners for state from ` Control . Runner . FPState ` .
Module : FPStateTests
Description : Example use cases of the footprint-based runner for state from `Control.Runner.FPState`
Copyright : (c) Danel Ahman, 2019
License : MIT
Maintainer :
Stability : experimental
This module provides example use cases of the footprint-based
runners for state from `Control.Runner.FPState`.
-}
module FPStateTests where
import Control.Runner
import Control.Runner.FPState
test1 :: User '[State (ShC Int (ShC String ShE))] Int
test1 =
do x <- get AZ;
return x
expected result 42
test3 :: User '[State (ShC Int (ShC String ShE))] (Int,String)
test3 =
do s <- get (AS AZ);
x <- get AZ;
put AZ (x + 7);
put (AS AZ) (s ++ "bar");
x' <- get AZ;
s' <- get (AS AZ);
return (x',s')
expected result ( 49,"foobar " )
| null | https://raw.githubusercontent.com/danelahman/haskell-coop/95ddb0f340313a25f2e006c75dab8e2c93add8f8/examples/without_signals/FPStateTests.hs | haskell | # LANGUAGE GADTs # | # LANGUAGE DataKinds #
|
Module : FPStateTests
Description : Example use cases of the footprint - based runner for state from ` Control . Runner . FPState `
Copyright : ( c ) , 2019
License : MIT
Maintainer :
Stability : experimental
This module provides example use cases of the footprint - based
runners for state from ` Control . Runner . FPState ` .
Module : FPStateTests
Description : Example use cases of the footprint-based runner for state from `Control.Runner.FPState`
Copyright : (c) Danel Ahman, 2019
License : MIT
Maintainer :
Stability : experimental
This module provides example use cases of the footprint-based
runners for state from `Control.Runner.FPState`.
-}
module FPStateTests where
import Control.Runner
import Control.Runner.FPState
test1 :: User '[State (ShC Int (ShC String ShE))] Int
test1 =
do x <- get AZ;
return x
expected result 42
test3 :: User '[State (ShC Int (ShC String ShE))] (Int,String)
test3 =
do s <- get (AS AZ);
x <- get AZ;
put AZ (x + 7);
put (AS AZ) (s ++ "bar");
x' <- get AZ;
s' <- get (AS AZ);
return (x',s')
expected result ( 49,"foobar " )
|
56e9f5b1c65af2ff86917da1696629a1a506116ad67bc89fb826fc121fdfeae7 | groupoid/alonzo | error.ml | open Expr
exception Parser of int * int * string
exception UnboundVariable of name
exception Type of typexp * typexp
exception Application of exp * typexp * exp * typexp
let print_error : exn -> unit = function
| UnboundVariable var -> Printf.printf "Unbound variable: %s\n" var
| Type (expected, actual) -> Printf.printf "Types don't match.\n Expected: %s\nActual: %s\n" (t_to_s expected) (t_to_s actual)
| Application (e1, te1, e2, te2) -> Printf.printf "Can't apply %s:%s\nto %s:%s.\n" (e_to_s e1) (t_to_s te1) (e_to_s e2) (t_to_s te2)
| ex -> Printf.printf "Uncaught exception: %s\n" (Printexc.to_string ex)
let handle_errors (f : 'a -> 'b) (arg : 'a) : unit =
try f arg with ex -> print_error ex; ()
| null | https://raw.githubusercontent.com/groupoid/alonzo/a74e7c4cfdc2bd94402ef9c1c1e8979d91f5ad28/src/error.ml | ocaml | open Expr
exception Parser of int * int * string
exception UnboundVariable of name
exception Type of typexp * typexp
exception Application of exp * typexp * exp * typexp
let print_error : exn -> unit = function
| UnboundVariable var -> Printf.printf "Unbound variable: %s\n" var
| Type (expected, actual) -> Printf.printf "Types don't match.\n Expected: %s\nActual: %s\n" (t_to_s expected) (t_to_s actual)
| Application (e1, te1, e2, te2) -> Printf.printf "Can't apply %s:%s\nto %s:%s.\n" (e_to_s e1) (t_to_s te1) (e_to_s e2) (t_to_s te2)
| ex -> Printf.printf "Uncaught exception: %s\n" (Printexc.to_string ex)
let handle_errors (f : 'a -> 'b) (arg : 'a) : unit =
try f arg with ex -> print_error ex; ()
| |
cfb5f03f3140449f62e0e03e3e4767fa60c74a47846029cf9613187da207c078 | racket/web-server | 12658.rkt | #lang racket/base
(require rackunit
"12658-mod.rkt")
(check-exn
(lambda (e)
(and (exn:fail:contract:arity? e)
(regexp-match? #rx"go: arity mismatch;\n the expected number of arguments does not match the given number\n expected: 0\n given: 1"
(exn-message e))))
(lambda () (go 42)))
| null | https://raw.githubusercontent.com/racket/web-server/39f49dade5edab758ed9c00f35e5093bc5a69da5/web-server-test/tests/web-server/pr/12658.rkt | racket | #lang racket/base
(require rackunit
"12658-mod.rkt")
(check-exn
(lambda (e)
(and (exn:fail:contract:arity? e)
(regexp-match? #rx"go: arity mismatch;\n the expected number of arguments does not match the given number\n expected: 0\n given: 1"
(exn-message e))))
(lambda () (go 42)))
| |
48c0fe326cf16612933d8fdee79e636a57d3c2db4292e6803d8bd8f5e34e008f | philnguyen/soft-contract | ex-03.rkt | #lang racket
(require soft-contract/fake-contract)
(define (f member v l)
(let ([x (member v l)])
#|HERE|# (cons? x)))
(provide/contract
[f ((any/c (listof any/c) . -> . (or/c false? (cons/c any/c (listof any/c))))
any/c
(listof any/c)
. -> . (not/c false?))])
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/unsafe/octy/ex-03.rkt | racket | HERE | #lang racket
(require soft-contract/fake-contract)
(define (f member v l)
(let ([x (member v l)])
(provide/contract
[f ((any/c (listof any/c) . -> . (or/c false? (cons/c any/c (listof any/c))))
any/c
(listof any/c)
. -> . (not/c false?))])
|
0204fee18c527ebbdcc589c0b6031809fc2ae31aa7db607e99d42a82607904fd | bos/rwh | callingpure.hs | {-- snippet all --}
-- ch08/callingpure.hs
name2reply :: String -> String
name2reply name =
"Pleased to meet you, " ++ name ++ ".\n" ++
"Your name contains " ++ charcount ++ " characters."
where charcount = show (length name)
main :: IO ()
main = do
putStrLn "Greetings once again. What is your name?"
inpStr <- getLine
let outStr = name2reply inpStr
putStrLn outStr
{-- /snippet all --}
| null | https://raw.githubusercontent.com/bos/rwh/7fd1e467d54aef832f5476ebf5f4f6a898a895d1/examples/ch08/callingpure.hs | haskell | - snippet all -
ch08/callingpure.hs
- /snippet all - |
name2reply :: String -> String
name2reply name =
"Pleased to meet you, " ++ name ++ ".\n" ++
"Your name contains " ++ charcount ++ " characters."
where charcount = show (length name)
main :: IO ()
main = do
putStrLn "Greetings once again. What is your name?"
inpStr <- getLine
let outStr = name2reply inpStr
putStrLn outStr
|
60fe5f0adf4bf71254c937f5715ef7535a33a5e276b183ac9ee330942d749fa2 | scarvalhojr/haskellbook | section9.6.hs |
myWords :: [Char] -> [[Char]]
myWords [] = []
myWords (' ':xs) = myWords xs
myWords xs = takeWhile (/=' ') xs : (myWords (dropWhile (/=' ') xs))
firstSen = "Tyger, Tyger, burning bright\n"
secondSen = "In the forest of the night\n"
thirdSen = "What immortal hand of eye\n"
fourthSen = "Could frame thy fearful\
\ symmetry?"
sentences = firstSen ++ secondSen ++ thirdSen ++ fourthSen
myLines :: String -> [String]
myLines [] = []
myLines ('\n':xs) = myLines xs
myLines xs = takeWhile (/='\n') xs : (myLines (dropWhile (/='\n') xs))
splitByChar :: Char -> String -> [String]
splitByChar _ [] = []
splitByChar c (x:xs)
| x == c = splitByChar c xs
| otherwise = takeWhile (/=c) (x:xs) : (splitByChar c (dropWhile (/=c) (x:xs)))
| null | https://raw.githubusercontent.com/scarvalhojr/haskellbook/6016a5a78da3fc4a29f5ea68b239563895c448d5/chapter9/section9.6.hs | haskell |
myWords :: [Char] -> [[Char]]
myWords [] = []
myWords (' ':xs) = myWords xs
myWords xs = takeWhile (/=' ') xs : (myWords (dropWhile (/=' ') xs))
firstSen = "Tyger, Tyger, burning bright\n"
secondSen = "In the forest of the night\n"
thirdSen = "What immortal hand of eye\n"
fourthSen = "Could frame thy fearful\
\ symmetry?"
sentences = firstSen ++ secondSen ++ thirdSen ++ fourthSen
myLines :: String -> [String]
myLines [] = []
myLines ('\n':xs) = myLines xs
myLines xs = takeWhile (/='\n') xs : (myLines (dropWhile (/='\n') xs))
splitByChar :: Char -> String -> [String]
splitByChar _ [] = []
splitByChar c (x:xs)
| x == c = splitByChar c xs
| otherwise = takeWhile (/=c) (x:xs) : (splitByChar c (dropWhile (/=c) (x:xs)))
| |
307bae080416c2240af91497e47bd3448dfbcd083cd691c9d8670029eba0413a | austral/austral | CliParser.ml |
Part of the Austral project , under the Apache License v2.0 with LLVM Exceptions .
See LICENSE file for details .
SPDX - License - Identifier : Apache-2.0 WITH LLVM - exception
Part of the Austral project, under the Apache License v2.0 with LLVM Exceptions.
See LICENSE file for details.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*)
open Identifier
open CliUtil
open Error
module Errors = struct
let invalid_entrypoint entry =
austral_raise CliError [
Text "Invalid entrypoint format ";
Code entry;
Break;
Text "The entrypoint must be supplied in the form ";
Code "Module:name"
]
let invalid_module_source source =
austral_raise CliError [
Text "Invalid module source format ";
Code source;
Break;
Text "Sources must be supplied as either";
Code "interface.aui,body.aum";
Text " or ";
Code "body.aum"
]
let missing_entrypoint () =
austral_raise CliError [
Code "--entrypoint";
Text " argument not provided."
]
let missing_module () =
austral_raise CliError [
Text "The ";
Code "compile";
Text " command must specify at least one module."
]
let missing_output () =
austral_raise CliError [
Code "--output";
Text " argument not provided."
]
let no_entrypoint_wrong_target () =
austral_raise CliError [
Code "--no-entrypoint";
Text " requires ";
Code "--target-type=c";
Text ", because otherwise the compiler will try to build the generated C code, and will fail because there is no entrypoint function."
]
let unknown_target target =
austral_raise CliError [
Text "Unknown target type ";
Code target
]
end
type entrypoint =
| Entrypoint of module_name * identifier
[@@deriving eq]
type mod_source =
| ModuleSource of { inter_path: string; body_path: string }
| ModuleBodySource of { body_path: string }
[@@deriving eq]
type target =
| TypeCheck
| Executable of { bin_path: string; entrypoint: entrypoint; }
| CStandalone of { output_path: string; entrypoint: entrypoint option; }
[@@deriving eq]
type cmd =
| HelpCommand
| VersionCommand
| CompileHelp
| WholeProgramCompile of {
modules: mod_source list;
target: target;
}
[@@deriving eq]
let check_leftovers (arglist: arglist): unit =
if (arglist_size arglist) > 0 then
err "There are leftover arguments."
else
()
let parse_mod_source (s: string): mod_source =
let ss = String.split_on_char ',' s in
match ss with
| [path] ->
ModuleBodySource { body_path = path }
| [inter_path; body_path] ->
ModuleSource { inter_path = inter_path; body_path = body_path }
| _ ->
Errors.invalid_module_source s
let parse_entrypoint (s: string): entrypoint =
let ss = String.split_on_char ':' s in
match ss with
| [mn; i] ->
Entrypoint (make_mod_name mn, make_ident i)
| _ ->
Errors.invalid_entrypoint s
let parse_executable_target (arglist: arglist): (arglist * target) =
(* Get the --entrypoint *)
match pop_value_flag arglist "entrypoint" with
| Some (arglist, entrypoint) ->
(match pop_value_flag arglist "output" with
| Some (arglist, bin_path) ->
(arglist, Executable { bin_path = bin_path; entrypoint = parse_entrypoint entrypoint })
| None ->
Errors.missing_output ())
| None ->
(match pop_bool_flag arglist "--no-entrypoint" with
| Some _ ->
Errors.no_entrypoint_wrong_target ()
| None ->
Errors.missing_entrypoint ())
let get_output (arglist: arglist): (arglist * string) =
match pop_value_flag arglist "output" with
| Some (arglist, output_path) ->
(arglist, output_path)
| None ->
Errors.missing_output ()
let parse_c_target (arglist: arglist): (arglist * target) =
(* Get the --entrypoint *)
match pop_value_flag arglist "entrypoint" with
| Some (arglist, entrypoint) ->
(* An entrypoint was passed in. *)
let (arglist, output_path) = get_output arglist in
(arglist, CStandalone { output_path = output_path; entrypoint = Some (parse_entrypoint entrypoint) })
| None ->
(* No --entrypoint. Did we get the --no-entrypoint flag? *)
(match pop_bool_flag arglist "no-entrypoint" with
| Some arglist ->
let (arglist, output_path) = get_output arglist in
(arglist, CStandalone { output_path = output_path; entrypoint = None })
| None ->
Errors.missing_entrypoint ())
let parse_target_type (arglist: arglist): (arglist * target) =
match pop_value_flag arglist "target-type" with
| Some (arglist, target_value) ->
(* An explicit target type was passed. *)
(match target_value with
| "exe" ->
(* Build an executable binary. *)
parse_executable_target arglist
| "c" ->
Build a standaloine C file .
parse_c_target arglist
| "tc" ->
.
(arglist, TypeCheck)
| _ ->
Errors.unknown_target target_value)
| None ->
(* The default target is to build an executable binary. This means we need
an entrypoint. *)
parse_executable_target arglist
let parse_compile_command' (arglist: arglist): (arglist * cmd) =
Parse module list
let (arglist, modules): (arglist * string list) = pop_positional arglist in
let modules: mod_source list = List.map parse_mod_source modules in
There must be at least one module .
if ((List.length modules) < 1) then
Errors.missing_module ()
else
(* Parse the target type. *)
let (arglist, target): (arglist * target) = parse_target_type arglist in
(arglist, WholeProgramCompile { modules = modules; target = target; })
let parse_compile_command (arglist: arglist): (arglist * cmd) =
match pop_bool_flag arglist "help" with
| Some arglist ->
(arglist, CompileHelp)
| None ->
parse_compile_command' arglist
let parse (arglist: arglist): cmd =
let args: arg list = arglist_to_list arglist in
match args with
| [BoolFlag "help"] ->
HelpCommand
| [BoolFlag "version"] ->
VersionCommand
| (PositionalArg "compile")::rest ->
(* Try parsing the `compile` command. *)
let (arglist, cmd) = parse_compile_command (arglist_from_list rest) in
let _ = check_leftovers arglist in
cmd
| _ ->
HelpCommand
| null | https://raw.githubusercontent.com/austral/austral/69b6f7de36cc9576483acd1ac4a31bf52074dbd1/lib/CliParser.ml | ocaml | Get the --entrypoint
Get the --entrypoint
An entrypoint was passed in.
No --entrypoint. Did we get the --no-entrypoint flag?
An explicit target type was passed.
Build an executable binary.
The default target is to build an executable binary. This means we need
an entrypoint.
Parse the target type.
Try parsing the `compile` command. |
Part of the Austral project , under the Apache License v2.0 with LLVM Exceptions .
See LICENSE file for details .
SPDX - License - Identifier : Apache-2.0 WITH LLVM - exception
Part of the Austral project, under the Apache License v2.0 with LLVM Exceptions.
See LICENSE file for details.
SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
*)
open Identifier
open CliUtil
open Error
module Errors = struct
let invalid_entrypoint entry =
austral_raise CliError [
Text "Invalid entrypoint format ";
Code entry;
Break;
Text "The entrypoint must be supplied in the form ";
Code "Module:name"
]
let invalid_module_source source =
austral_raise CliError [
Text "Invalid module source format ";
Code source;
Break;
Text "Sources must be supplied as either";
Code "interface.aui,body.aum";
Text " or ";
Code "body.aum"
]
let missing_entrypoint () =
austral_raise CliError [
Code "--entrypoint";
Text " argument not provided."
]
let missing_module () =
austral_raise CliError [
Text "The ";
Code "compile";
Text " command must specify at least one module."
]
let missing_output () =
austral_raise CliError [
Code "--output";
Text " argument not provided."
]
let no_entrypoint_wrong_target () =
austral_raise CliError [
Code "--no-entrypoint";
Text " requires ";
Code "--target-type=c";
Text ", because otherwise the compiler will try to build the generated C code, and will fail because there is no entrypoint function."
]
let unknown_target target =
austral_raise CliError [
Text "Unknown target type ";
Code target
]
end
type entrypoint =
| Entrypoint of module_name * identifier
[@@deriving eq]
type mod_source =
| ModuleSource of { inter_path: string; body_path: string }
| ModuleBodySource of { body_path: string }
[@@deriving eq]
type target =
| TypeCheck
| Executable of { bin_path: string; entrypoint: entrypoint; }
| CStandalone of { output_path: string; entrypoint: entrypoint option; }
[@@deriving eq]
type cmd =
| HelpCommand
| VersionCommand
| CompileHelp
| WholeProgramCompile of {
modules: mod_source list;
target: target;
}
[@@deriving eq]
let check_leftovers (arglist: arglist): unit =
if (arglist_size arglist) > 0 then
err "There are leftover arguments."
else
()
let parse_mod_source (s: string): mod_source =
let ss = String.split_on_char ',' s in
match ss with
| [path] ->
ModuleBodySource { body_path = path }
| [inter_path; body_path] ->
ModuleSource { inter_path = inter_path; body_path = body_path }
| _ ->
Errors.invalid_module_source s
let parse_entrypoint (s: string): entrypoint =
let ss = String.split_on_char ':' s in
match ss with
| [mn; i] ->
Entrypoint (make_mod_name mn, make_ident i)
| _ ->
Errors.invalid_entrypoint s
let parse_executable_target (arglist: arglist): (arglist * target) =
match pop_value_flag arglist "entrypoint" with
| Some (arglist, entrypoint) ->
(match pop_value_flag arglist "output" with
| Some (arglist, bin_path) ->
(arglist, Executable { bin_path = bin_path; entrypoint = parse_entrypoint entrypoint })
| None ->
Errors.missing_output ())
| None ->
(match pop_bool_flag arglist "--no-entrypoint" with
| Some _ ->
Errors.no_entrypoint_wrong_target ()
| None ->
Errors.missing_entrypoint ())
let get_output (arglist: arglist): (arglist * string) =
match pop_value_flag arglist "output" with
| Some (arglist, output_path) ->
(arglist, output_path)
| None ->
Errors.missing_output ()
let parse_c_target (arglist: arglist): (arglist * target) =
match pop_value_flag arglist "entrypoint" with
| Some (arglist, entrypoint) ->
let (arglist, output_path) = get_output arglist in
(arglist, CStandalone { output_path = output_path; entrypoint = Some (parse_entrypoint entrypoint) })
| None ->
(match pop_bool_flag arglist "no-entrypoint" with
| Some arglist ->
let (arglist, output_path) = get_output arglist in
(arglist, CStandalone { output_path = output_path; entrypoint = None })
| None ->
Errors.missing_entrypoint ())
let parse_target_type (arglist: arglist): (arglist * target) =
match pop_value_flag arglist "target-type" with
| Some (arglist, target_value) ->
(match target_value with
| "exe" ->
parse_executable_target arglist
| "c" ->
Build a standaloine C file .
parse_c_target arglist
| "tc" ->
.
(arglist, TypeCheck)
| _ ->
Errors.unknown_target target_value)
| None ->
parse_executable_target arglist
let parse_compile_command' (arglist: arglist): (arglist * cmd) =
Parse module list
let (arglist, modules): (arglist * string list) = pop_positional arglist in
let modules: mod_source list = List.map parse_mod_source modules in
There must be at least one module .
if ((List.length modules) < 1) then
Errors.missing_module ()
else
let (arglist, target): (arglist * target) = parse_target_type arglist in
(arglist, WholeProgramCompile { modules = modules; target = target; })
let parse_compile_command (arglist: arglist): (arglist * cmd) =
match pop_bool_flag arglist "help" with
| Some arglist ->
(arglist, CompileHelp)
| None ->
parse_compile_command' arglist
let parse (arglist: arglist): cmd =
let args: arg list = arglist_to_list arglist in
match args with
| [BoolFlag "help"] ->
HelpCommand
| [BoolFlag "version"] ->
VersionCommand
| (PositionalArg "compile")::rest ->
let (arglist, cmd) = parse_compile_command (arglist_from_list rest) in
let _ = check_leftovers arglist in
cmd
| _ ->
HelpCommand
|
84415c086a378f3ce4974a55605af52ce3a409bd3b64760c1059ac366abac544 | jaspervdj/blaze-html | SnapFramework.hs | | Example with BlazeHtml running behind the Snap Haskell web framework .
For more information on snap , you can refer to .
--
{-# LANGUAGE OverloadedStrings #-}
module SnapFramework where
import Snap.Http.Server
import Snap.Types
import Text.Blaze.Html5
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Renderer.Utf8 (renderHtml)
-- | A welcome page.
--
welcomePage :: Html
welcomePage = docTypeHtml $ do
H.head $ do
title $ "Snap & BlazeHtml"
body $ do
h1 $ "Snap & BlazeHtml"
p $ "This is an example of BlazeHtml running behind the snap framework."
| Auxiliary function to render a BlazeHtml template to a @Snap ( ) @ type .
--
blazeTemplate :: Html -> Snap ()
blazeTemplate template = do
modifyResponse $ addHeader "Content-Type" "text/html; charset=UTF-8"
writeLBS $ renderHtml template
-- | Always return the welcome page.
--
site :: Snap ()
site = blazeTemplate welcomePage
-- | Snap main function.
--
main :: IO ()
main = httpServe defaultConfig site
| null | https://raw.githubusercontent.com/jaspervdj/blaze-html/1c76db3cf02712569fd22b021fb0a367df028eb4/doc/examples/SnapFramework.hs | haskell |
# LANGUAGE OverloadedStrings #
| A welcome page.
| Always return the welcome page.
| Snap main function.
| | Example with BlazeHtml running behind the Snap Haskell web framework .
For more information on snap , you can refer to .
module SnapFramework where
import Snap.Http.Server
import Snap.Types
import Text.Blaze.Html5
import qualified Text.Blaze.Html5 as H
import Text.Blaze.Renderer.Utf8 (renderHtml)
welcomePage :: Html
welcomePage = docTypeHtml $ do
H.head $ do
title $ "Snap & BlazeHtml"
body $ do
h1 $ "Snap & BlazeHtml"
p $ "This is an example of BlazeHtml running behind the snap framework."
| Auxiliary function to render a BlazeHtml template to a @Snap ( ) @ type .
blazeTemplate :: Html -> Snap ()
blazeTemplate template = do
modifyResponse $ addHeader "Content-Type" "text/html; charset=UTF-8"
writeLBS $ renderHtml template
site :: Snap ()
site = blazeTemplate welcomePage
main :: IO ()
main = httpServe defaultConfig site
|
178393a83ef5ca72c8f20af7eb6562be45123e9192221380c420775f10c90669 | iburzynski/EMURGO_71 | Transformers-2.hs | import Data.Char (isUpper, isLower)
import Control.Monad.Trans.Maybe
import Control.Monad.Reader
" Monad transformers are like onions . At first , they make you cry but then you learn to appreciate
-- them. Like onions, they're also made of layers. Each layer is the functionality of a new monad,
-- you lift monadic functions to get into the inner monads and you have transformerised functions
-- to unwrap each layer."
-- *** Transformers in Practice: User Login ***
-- Reference:
-- Let's say we're creating some login functionality, which needs to read in user input from the
-- terminal and validate the input before logging in.
-- We want to use the functionality of the Maybe monad, but we're already inside another monad (IO).
-- Ugly version without transformers:
main1 :: IO ()
main1 = do
maybeUserName <- readUserName
-- This "stairway to hell" results from the nested monadic contexts...
case maybeUserName of
Nothing -> print "Invalid user name!"
Just uName -> do
maybeEmail <- readEmail
case maybeEmail of
Nothing -> print "Invalid email!"
Just email -> do
maybePassword <- readPassword
case maybePassword of
Nothing -> print "Invalid Password"
Just password -> login uName email password
-- Does this look familiar? It's similar to our naive approach to sequencing computations that
-- return `Maybe` values before we learned how to leverage the "bind" operator.
-- Once we discovered (>>=) (and the even cleaner do-notation that sugars over it), we replaced the
-- stairway with a neat sequence of computations resembling imperative code:
sequenceMaybeComps :: String -> String -> String -> Maybe (String, String, String)
sequenceMaybeComps u e p = do
uName <- validateUserName u
email <- validateEmail e
pw <- validateEmail p
Just (uName, email, pw)
-- equivalent to:
-- validateUserName u >>=
( \u ' - > validateEmail e > > =
-- (\e' -> validatePassword p >>=
( ' - > Just ( u ' , e ' , p ' ) ) ) )
-- When the complexity of our application grows to require computations in multiple composed
-- contexts, we lose the elegant syntax above and are back to manually pattern matching `Maybe`
-- computations within the context of the "base" monad (IO). This is because monads aren't closed
under composition , so we do n't automatically get a new monad when we compose two together . That
-- means we can't use (>>=)/do-notation to chain computations within the "double-context".
--- *** Read functions ***
readUserName :: IO (Maybe String)
readUserName = do
putStrLn "Please enter your username:"
validateUserName <$> getLine
readEmail :: IO (Maybe String)
readEmail = do
putStrLn "Please enter your email:"
validateEmail <$> getLine
readPassword :: IO (Maybe String)
readPassword = do
putStrLn "Please enter your Password:"
validatePassword <$> getLine
-- *** Validators ***
validateUserName :: String -> Maybe String
validateUserName = validateStr (\s -> length s > 5)
validateEmail :: String -> Maybe String
validateEmail = validateStr (\s -> '@' `elem` s && '.' `elem` s)
validatePassword :: String -> Maybe String
validatePassword = validateStr (\s -> all ($ s) [(> 8) . length, any isUpper, any isLower])
validateStr :: (String -> Bool) -> String -> Maybe String
validateStr p s = if p s then Just s else Nothing
* * * Refactor functions from IO ( Maybe String ) to MaybeT IO String * * *
-- newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) }
readUserNameT :: MaybeT IO String
-- ^ type constructor (named after the inner context)
-- ^ "base" (outer) context
-- ^ contextualized data
readUserNameT = MaybeT $ do
putStrLn "Please enter your username:"
validateUserName <$> getLine
readEmailT :: MaybeT IO String
readEmailT = MaybeT $ do
putStrLn "Please enter your email:"
validateEmail <$> getLine
readPasswordT :: MaybeT IO String
readPasswordT = MaybeT $ do
putStrLn "Please enter your password:"
validatePassword <$> getLine
-- *** Refactor main action ***
main2 :: IO ()
1 . Enter the IO context
3 . Apply the MaybeT " getter " to get an ` IO Maybe ` value , which we
-- can then bind into a Maybe value (`maybeCreds`)
2 . Enter our new MaybeT context , where we can now bind the raw strings ( if valid ) via ` < - `
-- instead of `Maybe String` values:
usr <- readUserNameT
email <- readEmailT
pass <- readPasswordT
-- If any of these functions fail, the code will short-circuit and immediately return Nothing
pure (usr, email, pass) -- Promote the credentials back into MaybeT context
4 . We now do a single case - match on the Maybe credential value ...
-- ...and say goodbye stairway to hell!
Nothing -> print "Login failed!"
Just (u, e, p) -> login u e p -- If valid, destructure the Maybe credentials and login
* * * Add a third context with ReaderT * * *
-- Create a type synonym for some cached user data:
type Cookie = (Maybe String, Maybe String, Maybe String)
-- The Reader transformer adds a read-only environment (`r`) to the base monad (`m`):
-- newtype ReaderT r m a = ReaderT { runReaderT :: r -> m a }
readUserNameR :: MaybeT (ReaderT Cookie IO) String
-- ^ the base monad for MaybeT is now itself a transformer, ReaderT
-- ^ the type of the environment data
-- ^ the base monad for ReaderT
readUserNameR = MaybeT $ do
(mCachedUser, _, _) <- ask -- `ask` is a Reader utility function that retrieves the
-- environment data.
case mCachedUser of
Nothing -> do
-- lift takes a value in a base context and lifts it to a transformer context
-- lift :: (Monad m) => m a -> t m a
IO ( ) - > ReaderT Cookie IO ( )
validateUserName <$> lift getLine
ju -> pure ju -- if username is cached, promote it to `ReaderT IO` context
readEmailR :: MaybeT (ReaderT Cookie IO) String
readEmailR = MaybeT $ do
(_, mCachedEmail, _) <- ask
case mCachedEmail of
Nothing -> do
lift $ putStrLn "Please enter your email:"
validateEmail <$> lift getLine
je -> pure je
readPasswordR :: MaybeT (ReaderT Cookie IO) String
readPasswordR = MaybeT $ do
(_, _, mCachedPassword) <- ask
case mCachedPassword of
Nothing -> do
lift $ putStrLn "Please enter your password:"
validatePassword <$> lift getLine
jp -> pure jp
emptyCookie = (Nothing, Nothing, Nothing)
fullCookie = (Just "curriedFunctions", Just "", Just "LoneBurrito")
main3 :: IO ()
main3 = do
maybeCreds <- (runReaderT . runMaybeT $ do
user <- readUserNameR
email <- readEmailR
pass <- readPasswordR
pure (user, email, pass)) fullCookie
case maybeCreds of
Nothing -> print "Login failed!"
Just (u, e, p) -> login u e p
main3' :: IO ()
main3' = do
maybeCreds <- (runReaderT . runMaybeT $ do
user <- readUserNameR
email <- readEmailR
pass <- readPasswordR
pure (user, email, pass)) emptyCookie
case maybeCreds of
Nothing -> print "Login failed!"
Just (u, e, p) -> login u e p
-- Placeholder function for some login operation:
login :: String -> String -> String -> IO ()
login u e p = putStrLn . concat $ ["Logged in as "
, u
, " "
, "("
, e
, ")!"] | null | https://raw.githubusercontent.com/iburzynski/EMURGO_71/c72e8b3259cb33c78867657813d9fcfff12c3e82/Transformers/Transformers-2.hs | haskell | them. Like onions, they're also made of layers. Each layer is the functionality of a new monad,
you lift monadic functions to get into the inner monads and you have transformerised functions
to unwrap each layer."
*** Transformers in Practice: User Login ***
Reference:
Let's say we're creating some login functionality, which needs to read in user input from the
terminal and validate the input before logging in.
We want to use the functionality of the Maybe monad, but we're already inside another monad (IO).
Ugly version without transformers:
This "stairway to hell" results from the nested monadic contexts...
Does this look familiar? It's similar to our naive approach to sequencing computations that
return `Maybe` values before we learned how to leverage the "bind" operator.
Once we discovered (>>=) (and the even cleaner do-notation that sugars over it), we replaced the
stairway with a neat sequence of computations resembling imperative code:
equivalent to:
validateUserName u >>=
(\e' -> validatePassword p >>=
When the complexity of our application grows to require computations in multiple composed
contexts, we lose the elegant syntax above and are back to manually pattern matching `Maybe`
computations within the context of the "base" monad (IO). This is because monads aren't closed
means we can't use (>>=)/do-notation to chain computations within the "double-context".
- *** Read functions ***
*** Validators ***
newtype MaybeT m a = MaybeT { runMaybeT :: m (Maybe a) }
^ type constructor (named after the inner context)
^ "base" (outer) context
^ contextualized data
*** Refactor main action ***
can then bind into a Maybe value (`maybeCreds`)
instead of `Maybe String` values:
If any of these functions fail, the code will short-circuit and immediately return Nothing
Promote the credentials back into MaybeT context
...and say goodbye stairway to hell!
If valid, destructure the Maybe credentials and login
Create a type synonym for some cached user data:
The Reader transformer adds a read-only environment (`r`) to the base monad (`m`):
newtype ReaderT r m a = ReaderT { runReaderT :: r -> m a }
^ the base monad for MaybeT is now itself a transformer, ReaderT
^ the type of the environment data
^ the base monad for ReaderT
`ask` is a Reader utility function that retrieves the
environment data.
lift takes a value in a base context and lifts it to a transformer context
lift :: (Monad m) => m a -> t m a
if username is cached, promote it to `ReaderT IO` context
Placeholder function for some login operation: | import Data.Char (isUpper, isLower)
import Control.Monad.Trans.Maybe
import Control.Monad.Reader
" Monad transformers are like onions . At first , they make you cry but then you learn to appreciate
main1 :: IO ()
main1 = do
maybeUserName <- readUserName
case maybeUserName of
Nothing -> print "Invalid user name!"
Just uName -> do
maybeEmail <- readEmail
case maybeEmail of
Nothing -> print "Invalid email!"
Just email -> do
maybePassword <- readPassword
case maybePassword of
Nothing -> print "Invalid Password"
Just password -> login uName email password
sequenceMaybeComps :: String -> String -> String -> Maybe (String, String, String)
sequenceMaybeComps u e p = do
uName <- validateUserName u
email <- validateEmail e
pw <- validateEmail p
Just (uName, email, pw)
( \u ' - > validateEmail e > > =
( ' - > Just ( u ' , e ' , p ' ) ) ) )
under composition , so we do n't automatically get a new monad when we compose two together . That
readUserName :: IO (Maybe String)
readUserName = do
putStrLn "Please enter your username:"
validateUserName <$> getLine
readEmail :: IO (Maybe String)
readEmail = do
putStrLn "Please enter your email:"
validateEmail <$> getLine
readPassword :: IO (Maybe String)
readPassword = do
putStrLn "Please enter your Password:"
validatePassword <$> getLine
validateUserName :: String -> Maybe String
validateUserName = validateStr (\s -> length s > 5)
validateEmail :: String -> Maybe String
validateEmail = validateStr (\s -> '@' `elem` s && '.' `elem` s)
validatePassword :: String -> Maybe String
validatePassword = validateStr (\s -> all ($ s) [(> 8) . length, any isUpper, any isLower])
validateStr :: (String -> Bool) -> String -> Maybe String
validateStr p s = if p s then Just s else Nothing
* * * Refactor functions from IO ( Maybe String ) to MaybeT IO String * * *
readUserNameT :: MaybeT IO String
readUserNameT = MaybeT $ do
putStrLn "Please enter your username:"
validateUserName <$> getLine
readEmailT :: MaybeT IO String
readEmailT = MaybeT $ do
putStrLn "Please enter your email:"
validateEmail <$> getLine
readPasswordT :: MaybeT IO String
readPasswordT = MaybeT $ do
putStrLn "Please enter your password:"
validatePassword <$> getLine
main2 :: IO ()
1 . Enter the IO context
3 . Apply the MaybeT " getter " to get an ` IO Maybe ` value , which we
2 . Enter our new MaybeT context , where we can now bind the raw strings ( if valid ) via ` < - `
usr <- readUserNameT
email <- readEmailT
pass <- readPasswordT
4 . We now do a single case - match on the Maybe credential value ...
Nothing -> print "Login failed!"
* * * Add a third context with ReaderT * * *
type Cookie = (Maybe String, Maybe String, Maybe String)
readUserNameR :: MaybeT (ReaderT Cookie IO) String
readUserNameR = MaybeT $ do
case mCachedUser of
Nothing -> do
IO ( ) - > ReaderT Cookie IO ( )
validateUserName <$> lift getLine
readEmailR :: MaybeT (ReaderT Cookie IO) String
readEmailR = MaybeT $ do
(_, mCachedEmail, _) <- ask
case mCachedEmail of
Nothing -> do
lift $ putStrLn "Please enter your email:"
validateEmail <$> lift getLine
je -> pure je
readPasswordR :: MaybeT (ReaderT Cookie IO) String
readPasswordR = MaybeT $ do
(_, _, mCachedPassword) <- ask
case mCachedPassword of
Nothing -> do
lift $ putStrLn "Please enter your password:"
validatePassword <$> lift getLine
jp -> pure jp
emptyCookie = (Nothing, Nothing, Nothing)
fullCookie = (Just "curriedFunctions", Just "", Just "LoneBurrito")
main3 :: IO ()
main3 = do
maybeCreds <- (runReaderT . runMaybeT $ do
user <- readUserNameR
email <- readEmailR
pass <- readPasswordR
pure (user, email, pass)) fullCookie
case maybeCreds of
Nothing -> print "Login failed!"
Just (u, e, p) -> login u e p
main3' :: IO ()
main3' = do
maybeCreds <- (runReaderT . runMaybeT $ do
user <- readUserNameR
email <- readEmailR
pass <- readPasswordR
pure (user, email, pass)) emptyCookie
case maybeCreds of
Nothing -> print "Login failed!"
Just (u, e, p) -> login u e p
login :: String -> String -> String -> IO ()
login u e p = putStrLn . concat $ ["Logged in as "
, u
, " "
, "("
, e
, ")!"] |
d5dca81dc9e50e3f2ead6bf29228ea29b842c56e12e610549f0ddcd35911a76c | LeventErkok/sbv | Trans.hs | -----------------------------------------------------------------------------
-- |
-- Module : Data.SBV.Trans
Copyright : ( c )
-- License : BSD3
-- Maintainer:
-- Stability : experimental
--
More generalized alternative to @Data . SBV@ for advanced client use
-----------------------------------------------------------------------------
{-# OPTIONS_GHC -Wall -Werror #-}
module Data.SBV.Trans (
-- * Symbolic types
* * Booleans
SBool
-- *** Boolean values and functions
, sTrue, sFalse, sNot, (.&&), (.||), (.<+>), (.~&), (.~|), (.=>), (.<=>), fromBool, oneIf
-- *** Logical functions
, sAnd, sOr, sAny, sAll
-- ** Bit-vectors
-- *** Unsigned bit-vectors
, SWord8, SWord16, SWord32, SWord64, SWord, WordN
-- *** Signed bit-vectors
, SInt8, SInt16, SInt32, SInt64, SInt, IntN
* * * Converting between fixed - size and arbitrary
, BVIsNonZero, FromSized, ToSized, fromSized, toSized
-- ** Unbounded integers
, SInteger
-- ** Floating point numbers
, SFloat, SDouble, SFloatingPoint
-- ** Algebraic reals
, SReal, AlgReal, sRealToSInteger
* * Characters , Strings and Regular Expressions
, SChar, SString
-- ** Symbolic lists
, SList
-- * Arrays of symbolic values
, SymArray(newArray_, newArray, readArray, writeArray, mergeArrays), SArray
-- * Creating symbolic values
-- ** Single value
, sBool, sWord8, sWord16, sWord32, sWord64, sWord, sInt8, sInt16, sInt32, sInt64, sInt, sInteger, sReal, sFloat, sDouble, sChar, sString, sList
-- ** List of values
, sBools, sWord8s, sWord16s, sWord32s, sWord64s, sWords, sInt8s, sInt16s, sInt32s, sInt64s, sInts, sIntegers, sReals, sFloats, sDoubles, sChars, sStrings, sLists
-- * Symbolic Equality and Comparisons
, EqSymbolic(..), OrdSymbolic(..), Equality(..)
* Conditionals : values
, Mergeable(..), ite, iteLazy
-- * Symbolic integral numbers
, SIntegral
-- * Division and Modulus
, SDivisible(..)
-- * Bit-vector operations
-- ** Conversions
, sFromIntegral
-- ** Shifts and rotates
, sShiftLeft, sShiftRight, sRotateLeft, sBarrelRotateLeft, sRotateRight, sBarrelRotateRight, sSignedShiftArithRight
-- ** Finite bit-vector operations
, SFiniteBits(..)
-- ** Splitting, joining, and extending bit-vectors
, bvExtract, (#), zeroExtend, signExtend, bvDrop, bvTake
-- ** Exponentiation
, (.^)
-- * IEEE-floating point numbers
, IEEEFloating(..), RoundingMode(..), SRoundingMode, nan, infinity, sNaN, sInfinity
-- ** Rounding modes
, sRoundNearestTiesToEven, sRoundNearestTiesToAway, sRoundTowardPositive, sRoundTowardNegative, sRoundTowardZero, sRNE, sRNA, sRTP, sRTN, sRTZ
-- ** Conversion to/from floats
, IEEEFloatConvertible(..)
-- ** Bit-pattern conversions
, sFloatAsSWord32, sWord32AsSFloat
, sDoubleAsSWord64, sWord64AsSDouble
, sFloatingPointAsSWord, sWordAsSFloatingPoint
-- ** Extracting bit patterns from floats
, blastSFloat
, blastSDouble
, blastSFloatingPoint
-- * Enumerations
, mkSymbolicEnumeration
* Uninterpreted sorts , axioms , constants , and functions
, mkUninterpretedSort, Uninterpreted(..), addAxiom
-- * Properties, proofs, and satisfiability
, Predicate, Goal, MProvable(..), Provable, proveWithAll, proveWithAny , satWithAll
, proveConcurrentWithAny, proveConcurrentWithAll, satConcurrentWithAny, satConcurrentWithAll
, satWithAny, generateSMTBenchmark
, solve
-- * Constraints
-- ** General constraints
, constrain, softConstrain
-- ** Constraint Vacuity
-- ** Named constraints and attributes
, namedConstraint, constrainWithAttribute
* * Unsat cores
* * constraints
, pbAtMost, pbAtLeast, pbExactly, pbLe, pbGe, pbEq, pbMutexed, pbStronglyMutexed
-- * Checking safety
, sAssert, isSafe, SExecutable(..)
-- * Quick-checking
, sbvQuickCheck
-- * Optimization
-- ** Multiple optimization goals
, OptimizeStyle(..)
-- ** Objectives
, Objective(..), Metric(..)
-- ** Soft assumptions
, assertWithPenalty , Penalty(..)
-- ** Field extensions
-- | If an optimization results in an infinity/epsilon value, the returned `CV` value will be in the corresponding extension field.
, ExtCV(..), GeneralizedCV(..)
-- * Model extraction
-- ** Inspecting proof results
, ThmResult(..), SatResult(..), AllSatResult(..), SafeResult(..), OptimizeResult(..), SMTResult(..), SMTReasonUnknown(..)
-- ** Observing expressions
, observe
-- ** Programmable model extraction
, SatModel(..), Modelable(..), displayModels, extractModels
, getModelDictionaries, getModelValues, getModelUninterpretedValues
-- * SMT Interface
, SMTConfig(..), Timing(..), SMTLibVersion(..), Solver(..), SMTSolver(..)
-- ** Controlling verbosity
-- ** Solvers
, boolector, bitwuzla, cvc4, cvc5, dReal, yices, z3, mathSAT, abc
-- ** Configurations
, defaultSolverConfig, defaultSMTCfg, sbvCheckSolverInstallation, getAvailableSolvers
, setLogic, Logic(..), setOption, setInfo, setTimeOut
-- ** SBV exceptions
, SBVException(..)
-- * Abstract SBV type
, SBV, HasKind(..), Kind(..), SymVal(..)
, MonadSymbolic(..), Symbolic, SymbolicT, label, output, runSMT, runSMTWith
-- * Module exports
, module Data.Bits
, module Data.Word
, module Data.Int
, module Data.Ratio
) where
import Data.SBV.Core.AlgReals
import Data.SBV.Core.Data
import Data.SBV.Core.Kind
import Data.SBV.Core.Model
import Data.SBV.Core.Floating
import Data.SBV.Core.Sized
import Data.SBV.Core.Symbolic
import Data.SBV.Provers.Prover
import Data.SBV.Client
import Data.SBV.Client.BaseIO (FromSized, ToSized, fromSized, toSized)
import Data.SBV.Utils.TDiff (Timing(..))
import Data.Bits
import Data.Int
import Data.Ratio
import Data.Word
import Data.SBV.SMT.Utils (SBVException(..))
import Data.SBV.Control.Types (SMTReasonUnknown(..), Logic(..))
| null | https://raw.githubusercontent.com/LeventErkok/sbv/ec08aa4274bba469dea67f3c3315d4a7af5144c9/Data/SBV/Trans.hs | haskell | ---------------------------------------------------------------------------
|
Module : Data.SBV.Trans
License : BSD3
Maintainer:
Stability : experimental
---------------------------------------------------------------------------
# OPTIONS_GHC -Wall -Werror #
* Symbolic types
*** Boolean values and functions
*** Logical functions
** Bit-vectors
*** Unsigned bit-vectors
*** Signed bit-vectors
** Unbounded integers
** Floating point numbers
** Algebraic reals
** Symbolic lists
* Arrays of symbolic values
* Creating symbolic values
** Single value
** List of values
* Symbolic Equality and Comparisons
* Symbolic integral numbers
* Division and Modulus
* Bit-vector operations
** Conversions
** Shifts and rotates
** Finite bit-vector operations
** Splitting, joining, and extending bit-vectors
** Exponentiation
* IEEE-floating point numbers
** Rounding modes
** Conversion to/from floats
** Bit-pattern conversions
** Extracting bit patterns from floats
* Enumerations
* Properties, proofs, and satisfiability
* Constraints
** General constraints
** Constraint Vacuity
** Named constraints and attributes
* Checking safety
* Quick-checking
* Optimization
** Multiple optimization goals
** Objectives
** Soft assumptions
** Field extensions
| If an optimization results in an infinity/epsilon value, the returned `CV` value will be in the corresponding extension field.
* Model extraction
** Inspecting proof results
** Observing expressions
** Programmable model extraction
* SMT Interface
** Controlling verbosity
** Solvers
** Configurations
** SBV exceptions
* Abstract SBV type
* Module exports | Copyright : ( c )
More generalized alternative to @Data . SBV@ for advanced client use
module Data.SBV.Trans (
* * Booleans
SBool
, sTrue, sFalse, sNot, (.&&), (.||), (.<+>), (.~&), (.~|), (.=>), (.<=>), fromBool, oneIf
, sAnd, sOr, sAny, sAll
, SWord8, SWord16, SWord32, SWord64, SWord, WordN
, SInt8, SInt16, SInt32, SInt64, SInt, IntN
* * * Converting between fixed - size and arbitrary
, BVIsNonZero, FromSized, ToSized, fromSized, toSized
, SInteger
, SFloat, SDouble, SFloatingPoint
, SReal, AlgReal, sRealToSInteger
* * Characters , Strings and Regular Expressions
, SChar, SString
, SList
, SymArray(newArray_, newArray, readArray, writeArray, mergeArrays), SArray
, sBool, sWord8, sWord16, sWord32, sWord64, sWord, sInt8, sInt16, sInt32, sInt64, sInt, sInteger, sReal, sFloat, sDouble, sChar, sString, sList
, sBools, sWord8s, sWord16s, sWord32s, sWord64s, sWords, sInt8s, sInt16s, sInt32s, sInt64s, sInts, sIntegers, sReals, sFloats, sDoubles, sChars, sStrings, sLists
, EqSymbolic(..), OrdSymbolic(..), Equality(..)
* Conditionals : values
, Mergeable(..), ite, iteLazy
, SIntegral
, SDivisible(..)
, sFromIntegral
, sShiftLeft, sShiftRight, sRotateLeft, sBarrelRotateLeft, sRotateRight, sBarrelRotateRight, sSignedShiftArithRight
, SFiniteBits(..)
, bvExtract, (#), zeroExtend, signExtend, bvDrop, bvTake
, (.^)
, IEEEFloating(..), RoundingMode(..), SRoundingMode, nan, infinity, sNaN, sInfinity
, sRoundNearestTiesToEven, sRoundNearestTiesToAway, sRoundTowardPositive, sRoundTowardNegative, sRoundTowardZero, sRNE, sRNA, sRTP, sRTN, sRTZ
, IEEEFloatConvertible(..)
, sFloatAsSWord32, sWord32AsSFloat
, sDoubleAsSWord64, sWord64AsSDouble
, sFloatingPointAsSWord, sWordAsSFloatingPoint
, blastSFloat
, blastSDouble
, blastSFloatingPoint
, mkSymbolicEnumeration
* Uninterpreted sorts , axioms , constants , and functions
, mkUninterpretedSort, Uninterpreted(..), addAxiom
, Predicate, Goal, MProvable(..), Provable, proveWithAll, proveWithAny , satWithAll
, proveConcurrentWithAny, proveConcurrentWithAll, satConcurrentWithAny, satConcurrentWithAll
, satWithAny, generateSMTBenchmark
, solve
, constrain, softConstrain
, namedConstraint, constrainWithAttribute
* * Unsat cores
* * constraints
, pbAtMost, pbAtLeast, pbExactly, pbLe, pbGe, pbEq, pbMutexed, pbStronglyMutexed
, sAssert, isSafe, SExecutable(..)
, sbvQuickCheck
, OptimizeStyle(..)
, Objective(..), Metric(..)
, assertWithPenalty , Penalty(..)
, ExtCV(..), GeneralizedCV(..)
, ThmResult(..), SatResult(..), AllSatResult(..), SafeResult(..), OptimizeResult(..), SMTResult(..), SMTReasonUnknown(..)
, observe
, SatModel(..), Modelable(..), displayModels, extractModels
, getModelDictionaries, getModelValues, getModelUninterpretedValues
, SMTConfig(..), Timing(..), SMTLibVersion(..), Solver(..), SMTSolver(..)
, boolector, bitwuzla, cvc4, cvc5, dReal, yices, z3, mathSAT, abc
, defaultSolverConfig, defaultSMTCfg, sbvCheckSolverInstallation, getAvailableSolvers
, setLogic, Logic(..), setOption, setInfo, setTimeOut
, SBVException(..)
, SBV, HasKind(..), Kind(..), SymVal(..)
, MonadSymbolic(..), Symbolic, SymbolicT, label, output, runSMT, runSMTWith
, module Data.Bits
, module Data.Word
, module Data.Int
, module Data.Ratio
) where
import Data.SBV.Core.AlgReals
import Data.SBV.Core.Data
import Data.SBV.Core.Kind
import Data.SBV.Core.Model
import Data.SBV.Core.Floating
import Data.SBV.Core.Sized
import Data.SBV.Core.Symbolic
import Data.SBV.Provers.Prover
import Data.SBV.Client
import Data.SBV.Client.BaseIO (FromSized, ToSized, fromSized, toSized)
import Data.SBV.Utils.TDiff (Timing(..))
import Data.Bits
import Data.Int
import Data.Ratio
import Data.Word
import Data.SBV.SMT.Utils (SBVException(..))
import Data.SBV.Control.Types (SMTReasonUnknown(..), Logic(..))
|
be74ac901e413eceb36a7469489ca673b49b12f3e4677d5368308bcb829f78a1 | typedclojure/typedclojure | lex_env.clj | Copyright ( c ) , contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
(ns ^:no-doc typed.cljc.checker.lex-env
(:require [clojure.core.typed.contract-utils :as con]
[clojure.core.typed.errors :as err]
[clojure.core.typed.util-vars :as vs]
[typed.clj.checker.path-type :as path-type]
[typed.cljc.checker.filter-rep :as fr]
[typed.cljc.checker.object-rep :as obj]
[typed.cljc.checker.path-rep :as pr]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.utils :as u]))
(def lex-env? (con/hash-c? con/local-sym? r/Type?))
(def prop-set? (con/set-c? fr/Filter?))
(def alias-env? (con/hash-c? con/local-sym? obj/RObject?))
(u/def-type PropEnv [l props aliases]
"A lexical environment l, props is a set of known propositions"
[(lex-env? l)
(prop-set? props)
(alias-env? aliases)])
(defn -PropEnv
([] (-PropEnv {} #{} {}))
([l props]
(-PropEnv l props {}))
([l props aliases]
(PropEnv-maker
l
(set props)
aliases)))
(defn init-lexical-env []
(-PropEnv))
(defn lexical-env []
vs/*lexical-env*)
(defn PropEnv?-workaround [a]
(or (PropEnv? a)
work around for recompilation issues with AOT
(= "typed.cljc.checker.lex_env.PropEnv"
(.getName (class a)))))
;; hack: override
(defn PropEnv? [a]
(or (instance? PropEnv a)
(= "typed.cljc.checker.lex_env.PropEnv"
(.getName (class a)))))
(set-validator! #'vs/*lexical-env* (fn [a]
(or (nil? a)
(PropEnv?-workaround a))))
(defn lookup-alias [sym & {:keys [env]}]
{:pre [(con/local-sym? sym)
((some-fn nil? PropEnv?) env)]
:post [(obj/RObject? %)]}
(or (get-in (or env (lexical-env)) [:aliases sym])
(obj/-id-path sym)))
(defn lookup-local [sym]
{:pre [(con/local-sym? sym)]
:post [((some-fn nil? r/Type?) %)]}
see if sym is an alias for an object
if not ( -id - path sym ) is returned
obj (lookup-alias sym)
[alias-path alias-id] (cond
(obj/Path? obj) [(:path obj) (:id obj)]
(obj/EmptyObject? obj) [nil sym]
:else (err/int-error (str "what is this? " (pr-str obj))))
_ (assert (pr/path-elems? alias-path))
_ (assert (fr/name-ref? alias-id))
lt (get-in (lexical-env) [:l alias-id])]
;(prn "lex-env" (lexical-env))
(some-> lt
(path-type/path-type alias-path))))
(defn merge-locals [env new]
{:pre [(PropEnv?-workaround env)]
:post [(PropEnv?-workaround %)]}
(-> env
(update :l into new)))
(defmacro with-locals [locals & body]
`(binding [vs/*lexical-env* (merge-locals (lexical-env) ~locals)]
~@body))
; take an environment and (depending on the new object given) either record
; and alias to an existing local or extend the type env directly.
(defn extend-env [env id t o]
{:pre [(PropEnv?-workaround env)
(con/local-sym? id)
(r/Type? t)
(obj/RObject? o)]
:post [(PropEnv?-workaround %)]}
(cond
; no aliasing to add
(obj/EmptyObject? o)
(-> env
(assoc-in [:l id] t))
(obj/Path? o)
(-> env
(assoc-in [:aliases id] o)
; if we have an empty path, add a "normal" entry to our
type environment . Not sure why this is needed , added
; it to TR because tests were failing.
(cond-> (empty? (:path o)) (assoc-in [:l (:id o)] t)))
:else (err/int-error (str "what is this? " (pr-str o)))))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/2061ee744a021f209ef5ee0a690bb4babddb37c1/typed/clj.checker/src/typed/cljc/checker/lex_env.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
hack: override
(prn "lex-env" (lexical-env))
take an environment and (depending on the new object given) either record
and alias to an existing local or extend the type env directly.
no aliasing to add
if we have an empty path, add a "normal" entry to our
it to TR because tests were failing. | Copyright ( c ) , contributors .
(ns ^:no-doc typed.cljc.checker.lex-env
(:require [clojure.core.typed.contract-utils :as con]
[clojure.core.typed.errors :as err]
[clojure.core.typed.util-vars :as vs]
[typed.clj.checker.path-type :as path-type]
[typed.cljc.checker.filter-rep :as fr]
[typed.cljc.checker.object-rep :as obj]
[typed.cljc.checker.path-rep :as pr]
[typed.cljc.checker.type-rep :as r]
[typed.cljc.checker.utils :as u]))
(def lex-env? (con/hash-c? con/local-sym? r/Type?))
(def prop-set? (con/set-c? fr/Filter?))
(def alias-env? (con/hash-c? con/local-sym? obj/RObject?))
(u/def-type PropEnv [l props aliases]
"A lexical environment l, props is a set of known propositions"
[(lex-env? l)
(prop-set? props)
(alias-env? aliases)])
(defn -PropEnv
([] (-PropEnv {} #{} {}))
([l props]
(-PropEnv l props {}))
([l props aliases]
(PropEnv-maker
l
(set props)
aliases)))
(defn init-lexical-env []
(-PropEnv))
(defn lexical-env []
vs/*lexical-env*)
(defn PropEnv?-workaround [a]
(or (PropEnv? a)
work around for recompilation issues with AOT
(= "typed.cljc.checker.lex_env.PropEnv"
(.getName (class a)))))
(defn PropEnv? [a]
(or (instance? PropEnv a)
(= "typed.cljc.checker.lex_env.PropEnv"
(.getName (class a)))))
(set-validator! #'vs/*lexical-env* (fn [a]
(or (nil? a)
(PropEnv?-workaround a))))
(defn lookup-alias [sym & {:keys [env]}]
{:pre [(con/local-sym? sym)
((some-fn nil? PropEnv?) env)]
:post [(obj/RObject? %)]}
(or (get-in (or env (lexical-env)) [:aliases sym])
(obj/-id-path sym)))
(defn lookup-local [sym]
{:pre [(con/local-sym? sym)]
:post [((some-fn nil? r/Type?) %)]}
see if sym is an alias for an object
if not ( -id - path sym ) is returned
obj (lookup-alias sym)
[alias-path alias-id] (cond
(obj/Path? obj) [(:path obj) (:id obj)]
(obj/EmptyObject? obj) [nil sym]
:else (err/int-error (str "what is this? " (pr-str obj))))
_ (assert (pr/path-elems? alias-path))
_ (assert (fr/name-ref? alias-id))
lt (get-in (lexical-env) [:l alias-id])]
(some-> lt
(path-type/path-type alias-path))))
(defn merge-locals [env new]
{:pre [(PropEnv?-workaround env)]
:post [(PropEnv?-workaround %)]}
(-> env
(update :l into new)))
(defmacro with-locals [locals & body]
`(binding [vs/*lexical-env* (merge-locals (lexical-env) ~locals)]
~@body))
(defn extend-env [env id t o]
{:pre [(PropEnv?-workaround env)
(con/local-sym? id)
(r/Type? t)
(obj/RObject? o)]
:post [(PropEnv?-workaround %)]}
(cond
(obj/EmptyObject? o)
(-> env
(assoc-in [:l id] t))
(obj/Path? o)
(-> env
(assoc-in [:aliases id] o)
type environment . Not sure why this is needed , added
(cond-> (empty? (:path o)) (assoc-in [:l (:id o)] t)))
:else (err/int-error (str "what is this? " (pr-str o)))))
|
3b3f7324af270791e55f4013d39e41d0e181789dd965da9aa778f76058faeacd | heyarne/airsonic-ui | fixtures.cljs | (ns airsonic-ui.components.library.fixtures)
;; this is straight from the response cache, copied from app db after
browsing through the most recently listened to tracks for the first couple
;; of pages
(def responses
{["getAlbumList2" {:type "recent", :size 100, :offset 0}]
{:album
[{:artistId "478"
:name "The OOZ"
:songCount 19
:created "2018-06-02T12:06:11.000Z"
:duration 3975
:artist "King Krule"
:year 2017
:id "857"
:coverArt "al-857"}
{:genre "hip/electronic/jaz/Alternative Hip Hop/ambient"
:artistId "644"
:name "The Unseen"
:songCount 24
:created "2018-07-30T09:20:22.000Z"
:duration 3795
:artist "Quasimoto"
:year 2000
:id "1174"
:coverArt "al-1174"}
{:artistId "15"
:name "The Starkiller"
:songCount 3
:created "2018-01-02T16:27:35.000Z"
:duration 1158
:artist "The Starkiller"
:year 2013
:id "29"
:coverArt "al-29"}
{:genre "Disco"
:artistId "437"
:name "Waffles 004"
:songCount 1
:created "2018-03-08T19:18:24.000Z"
:duration 349
:artist "Waffles"
:year 2016
:id "771"
:coverArt "al-771"}
{:genre "Electronic"
:artistId "49"
:name "Fated"
:songCount 15
:created "2018-03-12T08:36:57.000Z"
:duration 2017
:artist "Nosaj Thing"
:year 2015
:id "81"
:coverArt "al-81"}
{:genre "Electronic"
:artistId "41"
:name "Open Eye Signal (Remixes)"
:songCount 1
:created "2017-06-28T19:11:50.000Z"
:duration 208
:artist "Jon Hopkins"
:year 2013
:id "68"}
{:genre "Soundtrack"
:artistId "684"
:name "Ghost in the Shell"
:songCount 11
:created "2018-10-20T08:35:00.000Z"
:duration 2730
:artist "Kenji Kawai"
:year 1995
:id "1263"
:coverArt "al-1263"}
{:artistId "31"
:name "Drop Me A Line / Your Heart To Me"
:songCount 2
:created "2017-12-30T23:40:03.000Z"
:duration 551
:artist "Social Lovers"
:year 2017
:id "56"
:coverArt "al-56"}
{:id "84"
:name "Unknown Album"
:artist "Nosaj Thing"
:artistId "49"
:songCount 1
:duration 202
:created "2017-06-28T20:08:38.000Z"
:genre "Unknown Genre"}
{:genre "Electronic"
:artistId "49"
:name "Home"
:songCount 11
:created "2017-06-28T20:08:29.000Z"
:duration 2196
:artist "Nosaj Thing"
:year 2013
:id "82"
:coverArt "al-82"}
{:genre "Gothic"
:artistId "403"
:name "Three Imaginary Boys (Deluxe Edition)"
:songCount 34
:created "2017-11-06T20:37:32.000Z"
:duration 6128
:artist "The Cure"
:year 2005
:id "701"
:coverArt "al-701"}
{:genre "hip"
:artistId "236"
:name "Amygdala"
:songCount 13
:created "2018-08-14T20:23:42.000Z"
:duration 4665
:artist "DJ Koze"
:year 2013
:id "403"
:coverArt "al-403"}
{:genre "Downtempo"
:artistId "596"
:name "II"
:songCount 10
:created "2017-06-28T20:15:56.000Z"
:duration 2755
:artist "Raz Ohara and The Odd Orchestra"
:year 2009
:id "1040"}
{:genre "Soul"
:artistId "436"
:name "Overgrown"
:songCount 11
:created "2018-01-02T08:43:32.000Z"
:duration 2569
:artist "James Blake"
:year 2013
:id "770"
:coverArt "al-770"}
{:genre "Electronic"
:artistId "436"
:name "Life Round Here (feat. Chance the Rapper) - Single"
:songCount 1
:created "2017-06-28T19:23:25.000Z"
:duration 188
:artist "James Blake"
:year 2013
:id "768"
:coverArt "al-768"}
{:genre "Hip Hop"
:artistId "290"
:name "C'mon! EP"
:songCount 6
:created "2017-06-28T19:12:11.000Z"
:duration 1163
:artist "Fatoni"
:year 2015
:id "508"
:coverArt "al-508"}
{:genre "electronic"
:artistId "683"
:name "Das Ziel ist im Weg"
:songCount 10
:created "2018-10-17T11:01:24.000Z"
:duration 2130
:artist "Mine"
:year 2016
:id "1262"
:coverArt "al-1262"}
{:genre "Downtempo"
:artistId "479"
:name "Days to Come"
:songCount 18
:created "2017-06-28T18:47:44.000Z"
:duration 4627
:artist "Bonobo"
:year 2006
:id "861"
:coverArt "al-861"}
{:genre "Electronic"
:artistId "206"
:name "Andorra"
:songCount 9
:created "2017-11-25T20:47:26.000Z"
:duration 2581
:artist "Caribou"
:year 2007
:id "336"
:coverArt "al-336"}
{:genre "Electronic"
:artistId "206"
:name "Melody Day"
:songCount 3
:created "2017-11-25T20:49:51.000Z"
:duration 683
:artist "Caribou"
:year 2007
:id "335"
:coverArt "al-335"}
{:id "707"
:name "lassmalaura"
:artist "lassmalaura"
:artistId "406"
:songCount 2
:duration 8241
:created "2017-06-28T18:27:36.000Z"}
{:genre "Idm"
:artistId "597"
:name "Cerulean"
:songCount 12
:created "2017-06-28T18:44:43.000Z"
:duration 2594
:artist "Baths"
:year 2010
:id "1042"
:coverArt "al-1042"}
{:genre "Electronic"
:artistId "64"
:name "Plörre"
:songCount 11
:created "2017-06-28T19:17:41.000Z"
:duration 2495
:artist "Frittenbude"
:year 2010
:id "109"}
{:genre "Electronic"
:artistId "388"
:name "Rongorongo Remixed"
:songCount 11
:created "2017-06-28T19:57:48.000Z"
:duration 3590
:artist "Me Succeeds"
:year 2013
:id "654"
:coverArt "al-654"}
{:genre "Hip-Hop"
:artistId "270"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T18:42:12.000Z"
:duration 169
:artist "Audio88 und Yassin"
:year 2011
:id "469"}
{:genre "Hip-Hop"
:artistId "523"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T20:21:03.000Z"
:duration 275
:artist "Suff Daddy"
:year 2011
:id "940"}
{:id "25"
:name "Nhar, Lee Burton"
:artist "Nhar, Lee Burton"
:artistId "13"
:songCount 1
:duration 482
:created "2017-06-28T18:30:39.000Z"}
{:genre "Indie Dance / Nu Disco"
:artistId "360"
:name "Salto"
:songCount 1
:created "2018-01-02T18:55:06.000Z"
:duration 414
:artist "Martin Heimann"
:year 2016
:id "625"
:coverArt "al-625"}
{:id "273"
:name "[via XLR8R]"
:artist "Lianne La Havas"
:artistId "165"
:coverArt "al-273"
:songCount 1
:duration 307
:created "2017-06-28T19:35:28.000Z"}
{:artistId "249"
:name "Free Downloads"
:songCount 1
:created "2017-06-28T19:10:19.000Z"
:duration 286
:artist "Emancipator feat. Sigur Rós vs. Mobb Deep"
:year 2011
:id "419"
:coverArt "al-419"}
{:genre "Electronic"
:artistId "64"
:name "Und täglich grüßt das Murmeltier"
:songCount 3
:created "2017-12-31T09:03:39.000Z"
:duration 652
:artist "Frittenbude"
:year 2010
:id "107"}
{:genre "Electronic"
:artistId "206"
:name "Start Breaking My Heart"
:songCount 20
:created "2017-11-25T20:47:51.000Z"
:duration 6197
:artist "Caribou"
:year 2006
:id "338"
:coverArt "al-338"}
{:genre "Electronic"
:artistId "206"
:name "The Milk Of Human Kindness"
:songCount 11
:created "2017-11-25T20:41:58.000Z"
:duration 2412
:artist "Caribou"
:year 2005
:id "337"
:coverArt "al-337"}
{:genre "electronic"
:artistId "424"
:name "Permanent Vacation 3"
:songCount 47
:created "2017-06-28T20:29:36.000Z"
:duration 18682
:artist "Various Artists"
:year 2014
:id "747"
:coverArt "al-747"}
{:genre "Electronic"
:artistId "162"
:name "Music Has the Right to Children"
:songCount 18
:created "2017-06-28T18:46:28.000Z"
:duration 4226
:artist "Boards of Canada"
:year 2004
:id "270"
:coverArt "al-270"}
{:id "276"
:name "Nostalgia 77"
:artist "Nostalgia 77"
:artistId "168"
:songCount 1
:duration 277
:created "2017-06-28T18:31:28.000Z"}
{:genre "Electronic"
:artistId "597"
:name "Obsidian"
:songCount 10
:created "2017-06-28T18:43:58.000Z"
:duration 2596
:artist "Baths"
:year 2013
:id "1041"
:coverArt "al-1041"}
{:id "954"
:name "[via XLR8R.com]"
:artist "Burial"
:artistId "530"
:coverArt "al-954"
:songCount 1
:duration 297
:created "2017-06-28T18:49:04.000Z"}
{:genre "Uk Garage"
:artistId "530"
:name "Kindred EP"
:songCount 3
:created "2017-06-28T18:49:36.000Z"
:duration 1839
:artist "Burial"
:year 2012
:id "953"
:coverArt "al-953"}
{:genre "Unknown"
:artistId "430"
:name "Rampue"
:songCount 6
:created "2017-06-28T18:34:00.000Z"
:duration 16433
:artist "Rampue"
:year 2012
:id "753"
:coverArt "al-753"}
{:artistId "96"
:name "www.soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:31:41.000Z"
:duration 424
:artist "Klima"
:year 2013
:id "166"
:coverArt "al-166"}
{:artistId "463"
:name "soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:35:30.000Z"
:duration 523
:artist "Leonard Cohen"
:year 2014
:id "831"
:coverArt "al-831"}
{:artistId "463"
:name "The Future"
:songCount 9
:created "2018-01-16T11:14:41.000Z"
:duration 3579
:artist "Leonard Cohen"
:year 2012
:id "821"
:coverArt "al-821"}
{:id "1001"
:name "Lonski & Classen"
:artist "Lonski & Classen"
:artistId "566"
:coverArt "al-1001"
:songCount 1
:duration 248
:created "2017-06-28T20:07:24.000Z"}
{:genre "Podcast"
:artistId "199"
:name "Waterkant Souvenirs Podcast"
:songCount 1
:created "2017-06-28T20:00:25.000Z"
:duration 5341
:artist "Mira"
:year 2012
:id "325"}
{:id "324"
:name "Familiar Forest Festival 2012"
:artist "Mira"
:artistId "199"
:songCount 1
:duration 6695
:created "2017-06-28T20:00:35.000Z"
:year 2012}
{:genre "Ambient"
:artistId "188"
:name "We're New Here"
:songCount 13
:created "2017-06-28T19:18:06.000Z"
:duration 2135
:artist "Gil Scott-Heron and Jamie xx"
:year 2011
:id "310"
:coverArt "al-310"}
{:genre "Gothic"
:artistId "403"
:name "Galore : The Singles 87 - 97"
:songCount 18
:created "2017-11-06T20:51:35.000Z"
:duration 4369
:artist "The Cure"
:year 1997
:id "684"
:coverArt "al-684"}
{:genre "IDM"
:artistId "333"
:name "Remixes Compiled"
:songCount 12
:created "2017-06-28T20:22:43.000Z"
:duration 3233
:artist "Telefon Tel Aviv"
:year 2007
:id "723"
:coverArt "al-723"}
{:artistId "230"
:name "Ufordian Edits"
:songCount 1
:created "2018-02-19T22:55:59.000Z"
:duration 331
:artist "Peter Power"
:year 2015
:id "393"
:coverArt "al-393"}
{:genre "Other"
:artistId "528"
:name "Dream Runner EP"
:songCount 6
:created "2017-06-28T18:39:24.000Z"
:duration 899
:artist "Annu"
:year 2009
:id "948"}
{:genre "Techno"
:artistId "75"
:name "Unknown"
:songCount 8
:created "2017-06-28T20:17:47.000Z"
:duration 2841
:artist "Saschienne"
:year 2012
:id "124"
:coverArt "al-124"}
{:genre "Nintendocore"
:artistId "306"
:name "Nach der Kippe Pogo!?"
:songCount 11
:created "2017-06-28T18:40:09.000Z"
:duration 1508
:artist "Antitainment"
:year 2007
:id "532"
:coverArt "al-532"}
{:genre "Electronic"
:artistId "206"
:name "Swim"
:songCount 9
:created "2017-11-25T20:06:58.000Z"
:duration 2596
:artist "Caribou"
:year 2010
:id "339"
:coverArt "al-339"}
{:genre "trance"
:artistId "117"
:name "Nymphs III"
:songCount 2
:created "2017-06-28T20:04:17.000Z"
:duration 1080
:artist "Nicolas Jaar"
:year 2015
:id "201"}
{:genre "Gothic"
:artistId "403"
:name "Wish"
:songCount 12
:created "2018-01-02T14:29:04.000Z"
:duration 3976
:artist "The Cure"
:year 1992
:id "685"
:coverArt "al-685"}
{:genre "Gothic"
:artistId "403"
:name "Show (Live)"
:songCount 18
:created "2018-01-02T14:30:03.000Z"
:duration 5316
:artist "The Cure"
:id "698"
:coverArt "al-698"}
{:genre "Gothic"
:artistId "403"
:name "Mixed Up"
:songCount 11
:created "2018-01-02T14:29:43.000Z"
:duration 4260
:artist "The Cure"
:year 1990
:id "692"
:coverArt "al-692"}
{:id "1257"
:name "Saal"
:artist "Serengeti"
:artistId "678"
:songCount 13
:duration 2437
:created "2018-09-20T17:02:50.000Z"
:year 2013}
{:genre "Hip Hop"
:artistId "204"
:name "Leaders Of The Brew School"
:songCount 16
:created "2017-06-28T18:45:16.000Z"
:duration 2214
:artist "Betty Ford Boys"
:year 2013
:id "331"}
{:id "202"
:name "Sirens"
:artist "Nicolas Jaar"
:artistId "117"
:songCount 7
:duration 2841
:created "2017-06-28T20:04:34.000Z"
:year 2016}
{:genre "techno"
:artistId "682"
:name "Piñata"
:songCount 21
:created "2018-10-09T15:30:48.000Z"
:duration 3963
:artist "Freddie Gibbs & Madlib"
:year 2014
:id "1261"
:coverArt "al-1261"}
{:genre "electronic"
:artistId "681"
:name "We Must Become the Pitiless Censors of Ourselves"
:songCount 11
:created "2018-10-08T17:21:47.000Z"
:duration 1916
:artist "John Maus"
:year 2011
:id "1260"
:coverArt "al-1260"}
{:artistId "514"
:name "Time"
:songCount 1
:created "2017-07-24T13:19:05.000Z"
:duration 247
:artist "Lokke"
:year 2015
:id "923"
:coverArt "al-923"}
{:genre "jazz"
:artistId "680"
:name "These Things Take Time"
:songCount 13
:created "2018-10-08T17:21:09.000Z"
:duration 3013
:artist "Molly Nilsson"
:year 2008
:id "1259"
:coverArt "al-1259"}
{:artistId "463"
:name "Songs of Love and Hate"
:songCount 4
:created "2018-01-16T11:13:54.000Z"
:duration 1273
:artist "Leonard Cohen"
:year 1970
:id "829"
:coverArt "al-829"}
{:artistId "187"
:name "Vacation EP"
:songCount 7
:created "2017-06-28T20:19:17.000Z"
:duration 1902
:artist "Shlohmo"
:year 2012
:id "305"
:coverArt "al-305"}
{:genre "Electronic"
:artistId "187"
:name "Vacation (Remixes)"
:songCount 6
:created "2017-06-28T20:19:23.000Z"
:duration 3559
:artist "Shlohmo"
:year 2012
:id "303"}
{:genre "WeDidIt"
:artistId "302"
:name "Salvation Remixes"
:songCount 3
:created "2017-06-28T20:14:04.000Z"
:duration 739
:artist "Purple"
:year 2013
:id "525"
:coverArt "al-525"}
{:genre "Alternative Rock / Indie Rock"
:artistId "16"
:name "Sleeping With Ghosts"
:songCount 22
:created "2017-11-06T20:39:23.000Z"
:duration 5232
:artist "Placebo"
:year 2003
:id "38"
:coverArt "al-38"}
{:genre "Funk/Hip-Hop"
:artistId "198"
:name "Looking For the Perfect Beat"
:songCount 13
:created "2017-06-28T18:36:47.000Z"
:duration 4521
:artist "Afrika Bambaataa"
:year 2001
:id "323"
:coverArt "al-323"}
{:artistId "103"
:name "edits & cuts"
:songCount 14
:created "2017-06-28T19:37:20.000Z"
:duration 3550
:artist "M.Rux"
:year 2014
:id "182"
:coverArt "al-182"}
{:genre "Techno"
:artistId "117"
:name "Marks / Angles"
:songCount 3
:created "2017-06-28T20:03:46.000Z"
:duration 1000
:artist "Nicolas Jaar"
:year 2010
:id "196"}
{:genre "Electronic"
:artistId "73"
:name "Don't Break My Love EP"
:songCount 2
:created "2017-06-28T20:05:16.000Z"
:duration 673
:artist "Nicolas Jaar & Theatre Roosevelt"
:year 2011
:id "122"
:coverArt "al-122"}
{:genre "Electronic"
:artistId "233"
:name "Mother Earth's Plantasia"
:songCount 10
:created "2018-05-28T21:31:55.000Z"
:duration 1837
:artist "Mort Garson"
:year 1976
:id "397"
:coverArt "al-397"}
{:genre "Psychedelic Rock"
:artistId "424"
:name
"Nuggets: Original Artyfacts From the First Psychedelic Era, 1965-1968"
:songCount 27
:created "2018-02-21T12:01:38.000Z"
:duration 4614
:artist "Various Artists"
:year 1998
:id "743"
:coverArt "al-743"}
{:genre "Psychedelic Rock"
:artistId "37"
:name "Phluph"
:songCount 10
:created "2018-03-05T16:31:46.000Z"
:duration 2182
:artist "Phluph"
:year 2001
:id "64"
:coverArt "al-64"}
{:genre "Rock"
:artistId "305"
:name "The Best of Talking Heads (Remastered)"
:songCount 18
:created "2018-01-22T11:00:50.000Z"
:duration 4618
:artist "Talking Heads"
:year 2004
:id "529"
:coverArt "al-529"}
{:genre "Electronic"
:artistId "50"
:name "Divide And Exit"
:songCount 14
:created "2018-01-21T14:47:59.000Z"
:duration 2417
:artist "Sleaford Mods"
:year 2014
:id "86"
:coverArt "al-86"}
{:genre "electronic"
:artistId "349"
:name "Fade to Grey: The Best of Visage"
:songCount 12
:created "2018-08-29T13:01:26.000Z"
:duration 2757
:artist "Visage"
:year 1993
:id "1234"}
{:genre "electronic"
:artistId "334"
:name "Hounds of Love"
:songCount 18
:created "2018-08-29T13:00:32.000Z"
:duration 4419
:artist "Kate Bush"
:year 1997
:id "1215"
:coverArt "al-1215"}
{:genre "Psychedelic"
:artistId "424"
:name
"Forge Your Own Chains: Heavy Psychedelic Ballads and Dirges 1968-1974"
:songCount 15
:created "2018-01-27T12:23:47.000Z"
:duration 4241
:artist "Various Artists"
:id "742"
:coverArt "al-742"}
{:genre "Live Archive"
:artistId "141"
:name "2017-08-28 Rough Trade NYC, Brooklyn, NY"
:songCount 4
:created "2018-01-19T23:07:20.000Z"
:duration 2483
:artist "Sunburned Hand of the Man"
:year 2017
:id "242"
:coverArt "al-242"}
{:genre "electronic"
:artistId "236"
:name "Knock Knock"
:songCount 16
:created "2018-06-09T23:04:20.000Z"
:duration 4710
:artist "DJ Koze"
:year 2018
:id "401"
:coverArt "al-401"}
{:genre "Nintendocore"
:artistId "306"
:name "Gymnasiastik mit Antitainment"
:songCount 6
:created "2017-06-28T18:39:58.000Z"
:duration 795
:artist "Antitainment"
:year 2004
:id "533"}
{:genre "Electronic"
:artistId "584"
:name "Amok"
:songCount 9
:created "2017-06-28T18:41:30.000Z"
:duration 2681
:artist "Atoms for Peace"
:year 2013
:id "1023"
:coverArt "al-1023"}
{:artistId "26"
:name "TamponTango I"
:songCount 3
:created "2017-11-23T23:18:43.000Z"
:duration 851
:artist "Diederdas"
:year 2017
:id "51"
:coverArt "al-51"}
{:genre "electronic"
:artistId "679"
:name "Heaven and Earth"
:songCount 16
:created "2018-09-20T22:07:23.000Z"
:duration 8672
:artist "Kamasi Washington"
:year 2018
:id "1258"
:coverArt "al-1258"}
{:genre "rhy"
:artistId "661"
:name "Yawn Zen"
:songCount 12
:created "2018-08-21T21:36:43.000Z"
:duration 1883
:artist "Mndsgn"
:year 2014
:id "1200"
:coverArt "al-1200"}
{:genre "Rap"
:artistId "677"
:name "Elephant Eyelash"
:songCount 12
:created "2018-09-20T17:02:08.000Z"
:duration 2478
:artist "Why?"
:year 2005
:id "1256"}
{:genre "Electronic"
:artistId "41"
:name "Immunity"
:songCount 8
:created "2017-06-28T19:28:24.000Z"
:duration 3604
:artist "Jon Hopkins"
:year 2013
:id "104"
:coverArt "al-104"}
{:genre "IDM / Trip-Hop / Experimental"
:artistId "454"
:name "New Energy"
:songCount 14
:created "2017-11-25T19:44:56.000Z"
:duration 3381
:artist "Four Tet"
:year 2017
:id "800"
:coverArt "al-800"}
{:genre "Electronic"
:artistId "633"
:name "ƒIN (Special Edition)"
:songCount 20
:created "2017-06-28T19:26:41.000Z"
:duration 5822
:artist "John Talabot"
:year 2012
:id "1159"}
{:artistId "412"
:name "A Moot Point"
:songCount 2
:created "2017-06-28T18:32:24.000Z"
:duration 857
:artist "Pional"
:year 2010
:id "719"
:coverArt "al-719"}
{:id "740"
:name "KR Family EP, Pt. 1"
:artist "Peter Power"
:artistId "230"
:coverArt "al-740"
:songCount 3
:duration 1333
:created "2017-06-28T20:31:06.000Z"}
{:genre "House"
:artistId "482"
:name "Busy Days For Fools"
:songCount 11
:created "2017-06-28T19:35:05.000Z"
:duration 3238
:artist "Lee Burton"
:year 2012
:id "866"
:coverArt "al-866"}
{:id "851"
:name "Ry & Frank Wiedemann"
:artist "Ry & Frank Wiedemann"
:artistId "472"
:songCount 1
:duration 485
:created "2017-06-28T18:34:23.000Z"}
{:genre "Electronic"
:artistId "58"
:name "Deep Cuts"
:songCount 17
:created "2017-12-22T08:21:19.000Z"
:duration 3321
:artist "The Knife"
:year 2003
:id "96"
:coverArt "al-96"}
{:artistId "125"
:name "VIA Remixes"
:songCount 1
:created "2017-06-28T18:27:59.000Z"
:duration 362
:artist "Andi Otto"
:year 2017
:id "211"
:coverArt "al-211"}
{:artistId "626"
:name "Hummingbird / Milk & Honey"
:songCount 2
:created "2017-11-23T21:27:00.000Z"
:duration 303
:artist "Luca Nieri"
:year 2016
:id "1150"
:coverArt "al-1150"}]}
["getAlbumList2" {:type "recent", :size 100, :offset 20}]
{:album
[{:id "707"
:name "lassmalaura"
:artist "lassmalaura"
:artistId "406"
:songCount 2
:duration 8241
:created "2017-06-28T18:27:36.000Z"}
{:genre "Idm"
:artistId "597"
:name "Cerulean"
:songCount 12
:created "2017-06-28T18:44:43.000Z"
:duration 2594
:artist "Baths"
:year 2010
:id "1042"
:coverArt "al-1042"}
{:genre "Electronic"
:artistId "64"
:name "Plörre"
:songCount 11
:created "2017-06-28T19:17:41.000Z"
:duration 2495
:artist "Frittenbude"
:year 2010
:id "109"}
{:genre "Electronic"
:artistId "388"
:name "Rongorongo Remixed"
:songCount 11
:created "2017-06-28T19:57:48.000Z"
:duration 3590
:artist "Me Succeeds"
:year 2013
:id "654"
:coverArt "al-654"}
{:genre "Hip-Hop"
:artistId "270"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T18:42:12.000Z"
:duration 169
:artist "Audio88 und Yassin"
:year 2011
:id "469"}
{:genre "Hip-Hop"
:artistId "523"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T20:21:03.000Z"
:duration 275
:artist "Suff Daddy"
:year 2011
:id "940"}
{:id "25"
:name "Nhar, Lee Burton"
:artist "Nhar, Lee Burton"
:artistId "13"
:songCount 1
:duration 482
:created "2017-06-28T18:30:39.000Z"}
{:genre "Indie Dance / Nu Disco"
:artistId "360"
:name "Salto"
:songCount 1
:created "2018-01-02T18:55:06.000Z"
:duration 414
:artist "Martin Heimann"
:year 2016
:id "625"
:coverArt "al-625"}
{:id "273"
:name "[via XLR8R]"
:artist "Lianne La Havas"
:artistId "165"
:coverArt "al-273"
:songCount 1
:duration 307
:created "2017-06-28T19:35:28.000Z"}
{:artistId "249"
:name "Free Downloads"
:songCount 1
:created "2017-06-28T19:10:19.000Z"
:duration 286
:artist "Emancipator feat. Sigur Rós vs. Mobb Deep"
:year 2011
:id "419"
:coverArt "al-419"}
{:genre "Electronic"
:artistId "64"
:name "Und täglich grüßt das Murmeltier"
:songCount 3
:created "2017-12-31T09:03:39.000Z"
:duration 652
:artist "Frittenbude"
:year 2010
:id "107"}
{:genre "Electronic"
:artistId "206"
:name "Start Breaking My Heart"
:songCount 20
:created "2017-11-25T20:47:51.000Z"
:duration 6197
:artist "Caribou"
:year 2006
:id "338"
:coverArt "al-338"}
{:genre "Electronic"
:artistId "206"
:name "The Milk Of Human Kindness"
:songCount 11
:created "2017-11-25T20:41:58.000Z"
:duration 2412
:artist "Caribou"
:year 2005
:id "337"
:coverArt "al-337"}
{:genre "electronic"
:artistId "424"
:name "Permanent Vacation 3"
:songCount 47
:created "2017-06-28T20:29:36.000Z"
:duration 18682
:artist "Various Artists"
:year 2014
:id "747"
:coverArt "al-747"}
{:genre "Electronic"
:artistId "162"
:name "Music Has the Right to Children"
:songCount 18
:created "2017-06-28T18:46:28.000Z"
:duration 4226
:artist "Boards of Canada"
:year 2004
:id "270"
:coverArt "al-270"}
{:id "276"
:name "Nostalgia 77"
:artist "Nostalgia 77"
:artistId "168"
:songCount 1
:duration 277
:created "2017-06-28T18:31:28.000Z"}
{:genre "Electronic"
:artistId "597"
:name "Obsidian"
:songCount 10
:created "2017-06-28T18:43:58.000Z"
:duration 2596
:artist "Baths"
:year 2013
:id "1041"
:coverArt "al-1041"}
{:id "954"
:name "[via XLR8R.com]"
:artist "Burial"
:artistId "530"
:coverArt "al-954"
:songCount 1
:duration 297
:created "2017-06-28T18:49:04.000Z"}
{:genre "Uk Garage"
:artistId "530"
:name "Kindred EP"
:songCount 3
:created "2017-06-28T18:49:36.000Z"
:duration 1839
:artist "Burial"
:year 2012
:id "953"
:coverArt "al-953"}
{:genre "Unknown"
:artistId "430"
:name "Rampue"
:songCount 6
:created "2017-06-28T18:34:00.000Z"
:duration 16433
:artist "Rampue"
:year 2012
:id "753"
:coverArt "al-753"}
{:artistId "96"
:name "www.soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:31:41.000Z"
:duration 424
:artist "Klima"
:year 2013
:id "166"
:coverArt "al-166"}
{:artistId "463"
:name "soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:35:30.000Z"
:duration 523
:artist "Leonard Cohen"
:year 2014
:id "831"
:coverArt "al-831"}
{:artistId "463"
:name "The Future"
:songCount 9
:created "2018-01-16T11:14:41.000Z"
:duration 3579
:artist "Leonard Cohen"
:year 2012
:id "821"
:coverArt "al-821"}
{:id "1001"
:name "Lonski & Classen"
:artist "Lonski & Classen"
:artistId "566"
:coverArt "al-1001"
:songCount 1
:duration 248
:created "2017-06-28T20:07:24.000Z"}
{:genre "Podcast"
:artistId "199"
:name "Waterkant Souvenirs Podcast"
:songCount 1
:created "2017-06-28T20:00:25.000Z"
:duration 5341
:artist "Mira"
:year 2012
:id "325"}
{:id "324"
:name "Familiar Forest Festival 2012"
:artist "Mira"
:artistId "199"
:songCount 1
:duration 6695
:created "2017-06-28T20:00:35.000Z"
:year 2012}
{:genre "Ambient"
:artistId "188"
:name "We're New Here"
:songCount 13
:created "2017-06-28T19:18:06.000Z"
:duration 2135
:artist "Gil Scott-Heron and Jamie xx"
:year 2011
:id "310"
:coverArt "al-310"}
{:genre "Gothic"
:artistId "403"
:name "Galore : The Singles 87 - 97"
:songCount 18
:created "2017-11-06T20:51:35.000Z"
:duration 4369
:artist "The Cure"
:year 1997
:id "684"
:coverArt "al-684"}
{:genre "IDM"
:artistId "333"
:name "Remixes Compiled"
:songCount 12
:created "2017-06-28T20:22:43.000Z"
:duration 3233
:artist "Telefon Tel Aviv"
:year 2007
:id "723"
:coverArt "al-723"}
{:artistId "230"
:name "Ufordian Edits"
:songCount 1
:created "2018-02-19T22:55:59.000Z"
:duration 331
:artist "Peter Power"
:year 2015
:id "393"
:coverArt "al-393"}
{:genre "Other"
:artistId "528"
:name "Dream Runner EP"
:songCount 6
:created "2017-06-28T18:39:24.000Z"
:duration 899
:artist "Annu"
:year 2009
:id "948"}
{:genre "Techno"
:artistId "75"
:name "Unknown"
:songCount 8
:created "2017-06-28T20:17:47.000Z"
:duration 2841
:artist "Saschienne"
:year 2012
:id "124"
:coverArt "al-124"}
{:genre "Nintendocore"
:artistId "306"
:name "Nach der Kippe Pogo!?"
:songCount 11
:created "2017-06-28T18:40:09.000Z"
:duration 1508
:artist "Antitainment"
:year 2007
:id "532"
:coverArt "al-532"}
{:genre "Electronic"
:artistId "206"
:name "Swim"
:songCount 9
:created "2017-11-25T20:06:58.000Z"
:duration 2596
:artist "Caribou"
:year 2010
:id "339"
:coverArt "al-339"}
{:genre "trance"
:artistId "117"
:name "Nymphs III"
:songCount 2
:created "2017-06-28T20:04:17.000Z"
:duration 1080
:artist "Nicolas Jaar"
:year 2015
:id "201"}
{:genre "Gothic"
:artistId "403"
:name "Wish"
:songCount 12
:created "2018-01-02T14:29:04.000Z"
:duration 3976
:artist "The Cure"
:year 1992
:id "685"
:coverArt "al-685"}
{:genre "Gothic"
:artistId "403"
:name "Show (Live)"
:songCount 18
:created "2018-01-02T14:30:03.000Z"
:duration 5316
:artist "The Cure"
:id "698"
:coverArt "al-698"}
{:genre "Gothic"
:artistId "403"
:name "Mixed Up"
:songCount 11
:created "2018-01-02T14:29:43.000Z"
:duration 4260
:artist "The Cure"
:year 1990
:id "692"
:coverArt "al-692"}
{:id "1257"
:name "Saal"
:artist "Serengeti"
:artistId "678"
:songCount 13
:duration 2437
:created "2018-09-20T17:02:50.000Z"
:year 2013}
{:genre "Hip Hop"
:artistId "204"
:name "Leaders Of The Brew School"
:songCount 16
:created "2017-06-28T18:45:16.000Z"
:duration 2214
:artist "Betty Ford Boys"
:year 2013
:id "331"}
{:id "202"
:name "Sirens"
:artist "Nicolas Jaar"
:artistId "117"
:songCount 7
:duration 2841
:created "2017-06-28T20:04:34.000Z"
:year 2016}
{:genre "techno"
:artistId "682"
:name "Piñata"
:songCount 21
:created "2018-10-09T15:30:48.000Z"
:duration 3963
:artist "Freddie Gibbs & Madlib"
:year 2014
:id "1261"
:coverArt "al-1261"}
{:genre "electronic"
:artistId "681"
:name "We Must Become the Pitiless Censors of Ourselves"
:songCount 11
:created "2018-10-08T17:21:47.000Z"
:duration 1916
:artist "John Maus"
:year 2011
:id "1260"
:coverArt "al-1260"}
{:artistId "514"
:name "Time"
:songCount 1
:created "2017-07-24T13:19:05.000Z"
:duration 247
:artist "Lokke"
:year 2015
:id "923"
:coverArt "al-923"}
{:genre "jazz"
:artistId "680"
:name "These Things Take Time"
:songCount 13
:created "2018-10-08T17:21:09.000Z"
:duration 3013
:artist "Molly Nilsson"
:year 2008
:id "1259"
:coverArt "al-1259"}
{:artistId "463"
:name "Songs of Love and Hate"
:songCount 4
:created "2018-01-16T11:13:54.000Z"
:duration 1273
:artist "Leonard Cohen"
:year 1970
:id "829"
:coverArt "al-829"}
{:artistId "187"
:name "Vacation EP"
:songCount 7
:created "2017-06-28T20:19:17.000Z"
:duration 1902
:artist "Shlohmo"
:year 2012
:id "305"
:coverArt "al-305"}
{:genre "Electronic"
:artistId "187"
:name "Vacation (Remixes)"
:songCount 6
:created "2017-06-28T20:19:23.000Z"
:duration 3559
:artist "Shlohmo"
:year 2012
:id "303"}
{:genre "WeDidIt"
:artistId "302"
:name "Salvation Remixes"
:songCount 3
:created "2017-06-28T20:14:04.000Z"
:duration 739
:artist "Purple"
:year 2013
:id "525"
:coverArt "al-525"}
{:genre "Alternative Rock / Indie Rock"
:artistId "16"
:name "Sleeping With Ghosts"
:songCount 22
:created "2017-11-06T20:39:23.000Z"
:duration 5232
:artist "Placebo"
:year 2003
:id "38"
:coverArt "al-38"}
{:genre "Funk/Hip-Hop"
:artistId "198"
:name "Looking For the Perfect Beat"
:songCount 13
:created "2017-06-28T18:36:47.000Z"
:duration 4521
:artist "Afrika Bambaataa"
:year 2001
:id "323"
:coverArt "al-323"}
{:artistId "103"
:name "edits & cuts"
:songCount 14
:created "2017-06-28T19:37:20.000Z"
:duration 3550
:artist "M.Rux"
:year 2014
:id "182"
:coverArt "al-182"}
{:genre "Techno"
:artistId "117"
:name "Marks / Angles"
:songCount 3
:created "2017-06-28T20:03:46.000Z"
:duration 1000
:artist "Nicolas Jaar"
:year 2010
:id "196"}
{:genre "Electronic"
:artistId "73"
:name "Don't Break My Love EP"
:songCount 2
:created "2017-06-28T20:05:16.000Z"
:duration 673
:artist "Nicolas Jaar & Theatre Roosevelt"
:year 2011
:id "122"
:coverArt "al-122"}
{:genre "Electronic"
:artistId "233"
:name "Mother Earth's Plantasia"
:songCount 10
:created "2018-05-28T21:31:55.000Z"
:duration 1837
:artist "Mort Garson"
:year 1976
:id "397"
:coverArt "al-397"}
{:genre "Psychedelic Rock"
:artistId "424"
:name
"Nuggets: Original Artyfacts From the First Psychedelic Era, 1965-1968"
:songCount 27
:created "2018-02-21T12:01:38.000Z"
:duration 4614
:artist "Various Artists"
:year 1998
:id "743"
:coverArt "al-743"}
{:genre "Psychedelic Rock"
:artistId "37"
:name "Phluph"
:songCount 10
:created "2018-03-05T16:31:46.000Z"
:duration 2182
:artist "Phluph"
:year 2001
:id "64"
:coverArt "al-64"}
{:genre "Rock"
:artistId "305"
:name "The Best of Talking Heads (Remastered)"
:songCount 18
:created "2018-01-22T11:00:50.000Z"
:duration 4618
:artist "Talking Heads"
:year 2004
:id "529"
:coverArt "al-529"}
{:genre "Electronic"
:artistId "50"
:name "Divide And Exit"
:songCount 14
:created "2018-01-21T14:47:59.000Z"
:duration 2417
:artist "Sleaford Mods"
:year 2014
:id "86"
:coverArt "al-86"}
{:genre "electronic"
:artistId "349"
:name "Fade to Grey: The Best of Visage"
:songCount 12
:created "2018-08-29T13:01:26.000Z"
:duration 2757
:artist "Visage"
:year 1993
:id "1234"}
{:genre "electronic"
:artistId "334"
:name "Hounds of Love"
:songCount 18
:created "2018-08-29T13:00:32.000Z"
:duration 4419
:artist "Kate Bush"
:year 1997
:id "1215"
:coverArt "al-1215"}
{:genre "Psychedelic"
:artistId "424"
:name
"Forge Your Own Chains: Heavy Psychedelic Ballads and Dirges 1968-1974"
:songCount 15
:created "2018-01-27T12:23:47.000Z"
:duration 4241
:artist "Various Artists"
:id "742"
:coverArt "al-742"}
{:genre "Live Archive"
:artistId "141"
:name "2017-08-28 Rough Trade NYC, Brooklyn, NY"
:songCount 4
:created "2018-01-19T23:07:20.000Z"
:duration 2483
:artist "Sunburned Hand of the Man"
:year 2017
:id "242"
:coverArt "al-242"}
{:genre "electronic"
:artistId "236"
:name "Knock Knock"
:songCount 16
:created "2018-06-09T23:04:20.000Z"
:duration 4710
:artist "DJ Koze"
:year 2018
:id "401"
:coverArt "al-401"}
{:genre "Nintendocore"
:artistId "306"
:name "Gymnasiastik mit Antitainment"
:songCount 6
:created "2017-06-28T18:39:58.000Z"
:duration 795
:artist "Antitainment"
:year 2004
:id "533"}
{:genre "Electronic"
:artistId "584"
:name "Amok"
:songCount 9
:created "2017-06-28T18:41:30.000Z"
:duration 2681
:artist "Atoms for Peace"
:year 2013
:id "1023"
:coverArt "al-1023"}
{:artistId "26"
:name "TamponTango I"
:songCount 3
:created "2017-11-23T23:18:43.000Z"
:duration 851
:artist "Diederdas"
:year 2017
:id "51"
:coverArt "al-51"}
{:genre "electronic"
:artistId "679"
:name "Heaven and Earth"
:songCount 16
:created "2018-09-20T22:07:23.000Z"
:duration 8672
:artist "Kamasi Washington"
:year 2018
:id "1258"
:coverArt "al-1258"}
{:genre "rhy"
:artistId "661"
:name "Yawn Zen"
:songCount 12
:created "2018-08-21T21:36:43.000Z"
:duration 1883
:artist "Mndsgn"
:year 2014
:id "1200"
:coverArt "al-1200"}
{:genre "Rap"
:artistId "677"
:name "Elephant Eyelash"
:songCount 12
:created "2018-09-20T17:02:08.000Z"
:duration 2478
:artist "Why?"
:year 2005
:id "1256"}
{:genre "Electronic"
:artistId "41"
:name "Immunity"
:songCount 8
:created "2017-06-28T19:28:24.000Z"
:duration 3604
:artist "Jon Hopkins"
:year 2013
:id "104"
:coverArt "al-104"}
{:genre "IDM / Trip-Hop / Experimental"
:artistId "454"
:name "New Energy"
:songCount 14
:created "2017-11-25T19:44:56.000Z"
:duration 3381
:artist "Four Tet"
:year 2017
:id "800"
:coverArt "al-800"}
{:genre "Electronic"
:artistId "633"
:name "ƒIN (Special Edition)"
:songCount 20
:created "2017-06-28T19:26:41.000Z"
:duration 5822
:artist "John Talabot"
:year 2012
:id "1159"}
{:artistId "412"
:name "A Moot Point"
:songCount 2
:created "2017-06-28T18:32:24.000Z"
:duration 857
:artist "Pional"
:year 2010
:id "719"
:coverArt "al-719"}
{:id "740"
:name "KR Family EP, Pt. 1"
:artist "Peter Power"
:artistId "230"
:coverArt "al-740"
:songCount 3
:duration 1333
:created "2017-06-28T20:31:06.000Z"}
{:genre "House"
:artistId "482"
:name "Busy Days For Fools"
:songCount 11
:created "2017-06-28T19:35:05.000Z"
:duration 3238
:artist "Lee Burton"
:year 2012
:id "866"
:coverArt "al-866"}
{:id "851"
:name "Ry & Frank Wiedemann"
:artist "Ry & Frank Wiedemann"
:artistId "472"
:songCount 1
:duration 485
:created "2017-06-28T18:34:23.000Z"}
{:genre "Electronic"
:artistId "58"
:name "Deep Cuts"
:songCount 17
:created "2017-12-22T08:21:19.000Z"
:duration 3321
:artist "The Knife"
:year 2003
:id "96"
:coverArt "al-96"}
{:artistId "125"
:name "VIA Remixes"
:songCount 1
:created "2017-06-28T18:27:59.000Z"
:duration 362
:artist "Andi Otto"
:year 2017
:id "211"
:coverArt "al-211"}
{:artistId "626"
:name "Hummingbird / Milk & Honey"
:songCount 2
:created "2017-11-23T21:27:00.000Z"
:duration 303
:artist "Luca Nieri"
:year 2016
:id "1150"
:coverArt "al-1150"}
{:genre "Electronic"
:artistId "434"
:name "Mercy Street"
:songCount 2
:created "2017-12-22T08:18:55.000Z"
:duration 568
:artist "Fever Ray"
:year 2010
:id "762"
:coverArt "al-762"}
{:artistId "43"
:name "2012-2017"
:songCount 11
:created "2018-03-06T15:51:42.000Z"
:duration 3998
:artist "A.A.L."
:year 2018
:id "73"
:coverArt "al-73"}
{:genre "New Wave Music"
:artistId "337"
:name "Liaisons dangereuses"
:songCount 10
:created "2018-08-29T13:00:42.000Z"
:duration 2392
:artist "Liaisons Dangereuses"
:year 1985
:id "1216"}
{:genre "Electro"
:artistId "434"
:name "Fever Ray"
:songCount 12
:created "2017-12-22T08:19:04.000Z"
:duration 3380
:artist "Fever Ray"
:year 2009
:id "765"}
{:id "621"
:name "RSS Disco"
:artist "RSS Disco"
:artistId "358"
:songCount 2
:duration 841
:created "2018-04-25T10:11:14.000Z"}
{:genre "House"
:artistId "358"
:name "Very"
:songCount 3
:created "2017-06-28T20:17:12.000Z"
:duration 1339
:artist "RSS Disco"
:year 2012
:id "624"
:coverArt "al-624"}
{:genre "Disco"
:artistId "619"
:name "Sir John"
:songCount 1
:created "2018-03-12T20:21:14.000Z"
:duration 419
:artist "White Elephant"
:year 2011
:id "1134"
:coverArt "al-1134"}
{:genre "House"
:artistId "434"
:name "Sidetracked"
:songCount 1
:created "2017-06-28T18:20:10.000Z"
:duration 270
:artist "Fever Ray"
:year 2012
:id "920"
:coverArt "al-920"}
{:genre "Electronic"
:artistId "58"
:name "Hannah Med H Soundtrack"
:songCount 16
:created "2017-12-22T08:21:33.000Z"
:duration 2307
:artist "The Knife"
:year 2003
:id "97"
:coverArt "al-97"}
{:genre "Alternative Rock"
:artistId "478"
:name "6 Feet Beneath the Moon"
:songCount 14
:created "2017-09-08T17:37:16.000Z"
:duration 3136
:artist "King Krule"
:year 2013
:id "859"
:coverArt "al-859"}
{:artistId "103"
:name "Joga / Crazy Junker 7\""
:songCount 2
:created "2017-06-28T19:37:31.000Z"
:duration 442
:artist "M.Rux"
:year 2014
:id "177"
:coverArt "al-177"}
{:genre "House"
:artistId "267"
:name "Carat EP"
:songCount 5
:created "2017-06-28T20:08:58.000Z"
:duration 2080
:artist "Nu"
:year 2013
:id "467"}
{:artistId "419"
:name "On Claws (reissue)"
:songCount 1
:created "2017-07-24T13:48:20.000Z"
:duration 176
:artist "I am Oak"
:year 2013
:id "733"
:coverArt "al-733"}
{:genre "Indie Dance / Nu Disco"
:artistId "214"
:name "Thinking Allowed"
:songCount 1
:created "2018-01-02T18:54:41.000Z"
:duration 430
:artist "Tornado Wallace"
:year 2013
:id "354"
:coverArt "al-354"}
{:artistId "629"
:name "V.I.C.T.O.R"
:songCount 1
:created "2017-06-28T18:25:45.000Z"
:duration 279
:artist "Golden Bug"
:year 2016
:id "1153"
:coverArt "al-1153"}
{:genre "Avant-Garde"
:artistId "256"
:name "Ende Neu"
:songCount 9
:created "2017-06-28T19:09:43.000Z"
:duration 2693
:artist "Einstürzende Neubauten"
:year 1998
:id "426"
:coverArt "al-426"}
{:genre "House"
:artistId "245"
:name "Visibles"
:songCount 4
:created "2017-06-28T18:57:22.000Z"
:duration 1556
:artist "Constantijn Lange"
:year 2014
:id "413"
:coverArt "al-413"}
{:artistId "245"
:name "Orange Atlas"
:songCount 5
:created "2017-06-28T18:57:08.000Z"
:duration 2171
:artist "Constantijn Lange"
:year 2013
:id "412"
:coverArt "al-412"}
{:artistId "146"
:name "Mapping The Futures Gone By"
:songCount 7
:created "2017-06-28T18:57:28.000Z"
:duration 1536
:artist "CONTACT FIELD ORCHESTRA"
:year 2015
:id "247"
:coverArt "al-247"}
{:genre "electronic"
:artistId "253"
:name "It's Album Time"
:songCount 12
:created "2018-09-04T14:25:00.000Z"
:duration 3555
:artist "Todd Terje"
:year 2014
:id "1254"
:coverArt "al-1254"}]}
["getAlbumList2" {:type "recent", :size 100, :offset 40}]
{:album
[{:artistId "96"
:name "www.soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:31:41.000Z"
:duration 424
:artist "Klima"
:year 2013
:id "166"
:coverArt "al-166"}
{:artistId "463"
:name "soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:35:30.000Z"
:duration 523
:artist "Leonard Cohen"
:year 2014
:id "831"
:coverArt "al-831"}
{:artistId "463"
:name "The Future"
:songCount 9
:created "2018-01-16T11:14:41.000Z"
:duration 3579
:artist "Leonard Cohen"
:year 2012
:id "821"
:coverArt "al-821"}
{:id "1001"
:name "Lonski & Classen"
:artist "Lonski & Classen"
:artistId "566"
:coverArt "al-1001"
:songCount 1
:duration 248
:created "2017-06-28T20:07:24.000Z"}
{:genre "Podcast"
:artistId "199"
:name "Waterkant Souvenirs Podcast"
:songCount 1
:created "2017-06-28T20:00:25.000Z"
:duration 5341
:artist "Mira"
:year 2012
:id "325"}
{:id "324"
:name "Familiar Forest Festival 2012"
:artist "Mira"
:artistId "199"
:songCount 1
:duration 6695
:created "2017-06-28T20:00:35.000Z"
:year 2012}
{:genre "Ambient"
:artistId "188"
:name "We're New Here"
:songCount 13
:created "2017-06-28T19:18:06.000Z"
:duration 2135
:artist "Gil Scott-Heron and Jamie xx"
:year 2011
:id "310"
:coverArt "al-310"}
{:genre "Gothic"
:artistId "403"
:name "Galore : The Singles 87 - 97"
:songCount 18
:created "2017-11-06T20:51:35.000Z"
:duration 4369
:artist "The Cure"
:year 1997
:id "684"
:coverArt "al-684"}
{:genre "IDM"
:artistId "333"
:name "Remixes Compiled"
:songCount 12
:created "2017-06-28T20:22:43.000Z"
:duration 3233
:artist "Telefon Tel Aviv"
:year 2007
:id "723"
:coverArt "al-723"}
{:artistId "230"
:name "Ufordian Edits"
:songCount 1
:created "2018-02-19T22:55:59.000Z"
:duration 331
:artist "Peter Power"
:year 2015
:id "393"
:coverArt "al-393"}
{:genre "Other"
:artistId "528"
:name "Dream Runner EP"
:songCount 6
:created "2017-06-28T18:39:24.000Z"
:duration 899
:artist "Annu"
:year 2009
:id "948"}
{:genre "Techno"
:artistId "75"
:name "Unknown"
:songCount 8
:created "2017-06-28T20:17:47.000Z"
:duration 2841
:artist "Saschienne"
:year 2012
:id "124"
:coverArt "al-124"}
{:genre "Nintendocore"
:artistId "306"
:name "Nach der Kippe Pogo!?"
:songCount 11
:created "2017-06-28T18:40:09.000Z"
:duration 1508
:artist "Antitainment"
:year 2007
:id "532"
:coverArt "al-532"}
{:genre "Electronic"
:artistId "206"
:name "Swim"
:songCount 9
:created "2017-11-25T20:06:58.000Z"
:duration 2596
:artist "Caribou"
:year 2010
:id "339"
:coverArt "al-339"}
{:genre "trance"
:artistId "117"
:name "Nymphs III"
:songCount 2
:created "2017-06-28T20:04:17.000Z"
:duration 1080
:artist "Nicolas Jaar"
:year 2015
:id "201"}
{:genre "Gothic"
:artistId "403"
:name "Wish"
:songCount 12
:created "2018-01-02T14:29:04.000Z"
:duration 3976
:artist "The Cure"
:year 1992
:id "685"
:coverArt "al-685"}
{:genre "Gothic"
:artistId "403"
:name "Show (Live)"
:songCount 18
:created "2018-01-02T14:30:03.000Z"
:duration 5316
:artist "The Cure"
:id "698"
:coverArt "al-698"}
{:genre "Gothic"
:artistId "403"
:name "Mixed Up"
:songCount 11
:created "2018-01-02T14:29:43.000Z"
:duration 4260
:artist "The Cure"
:year 1990
:id "692"
:coverArt "al-692"}
{:id "1257"
:name "Saal"
:artist "Serengeti"
:artistId "678"
:songCount 13
:duration 2437
:created "2018-09-20T17:02:50.000Z"
:year 2013}
{:genre "Hip Hop"
:artistId "204"
:name "Leaders Of The Brew School"
:songCount 16
:created "2017-06-28T18:45:16.000Z"
:duration 2214
:artist "Betty Ford Boys"
:year 2013
:id "331"}
{:id "202"
:name "Sirens"
:artist "Nicolas Jaar"
:artistId "117"
:songCount 7
:duration 2841
:created "2017-06-28T20:04:34.000Z"
:year 2016}
{:genre "techno"
:artistId "682"
:name "Piñata"
:songCount 21
:created "2018-10-09T15:30:48.000Z"
:duration 3963
:artist "Freddie Gibbs & Madlib"
:year 2014
:id "1261"
:coverArt "al-1261"}
{:genre "electronic"
:artistId "681"
:name "We Must Become the Pitiless Censors of Ourselves"
:songCount 11
:created "2018-10-08T17:21:47.000Z"
:duration 1916
:artist "John Maus"
:year 2011
:id "1260"
:coverArt "al-1260"}
{:artistId "514"
:name "Time"
:songCount 1
:created "2017-07-24T13:19:05.000Z"
:duration 247
:artist "Lokke"
:year 2015
:id "923"
:coverArt "al-923"}
{:genre "jazz"
:artistId "680"
:name "These Things Take Time"
:songCount 13
:created "2018-10-08T17:21:09.000Z"
:duration 3013
:artist "Molly Nilsson"
:year 2008
:id "1259"
:coverArt "al-1259"}
{:artistId "463"
:name "Songs of Love and Hate"
:songCount 4
:created "2018-01-16T11:13:54.000Z"
:duration 1273
:artist "Leonard Cohen"
:year 1970
:id "829"
:coverArt "al-829"}
{:artistId "187"
:name "Vacation EP"
:songCount 7
:created "2017-06-28T20:19:17.000Z"
:duration 1902
:artist "Shlohmo"
:year 2012
:id "305"
:coverArt "al-305"}
{:genre "Electronic"
:artistId "187"
:name "Vacation (Remixes)"
:songCount 6
:created "2017-06-28T20:19:23.000Z"
:duration 3559
:artist "Shlohmo"
:year 2012
:id "303"}
{:genre "WeDidIt"
:artistId "302"
:name "Salvation Remixes"
:songCount 3
:created "2017-06-28T20:14:04.000Z"
:duration 739
:artist "Purple"
:year 2013
:id "525"
:coverArt "al-525"}
{:genre "Alternative Rock / Indie Rock"
:artistId "16"
:name "Sleeping With Ghosts"
:songCount 22
:created "2017-11-06T20:39:23.000Z"
:duration 5232
:artist "Placebo"
:year 2003
:id "38"
:coverArt "al-38"}
{:genre "Funk/Hip-Hop"
:artistId "198"
:name "Looking For the Perfect Beat"
:songCount 13
:created "2017-06-28T18:36:47.000Z"
:duration 4521
:artist "Afrika Bambaataa"
:year 2001
:id "323"
:coverArt "al-323"}
{:artistId "103"
:name "edits & cuts"
:songCount 14
:created "2017-06-28T19:37:20.000Z"
:duration 3550
:artist "M.Rux"
:year 2014
:id "182"
:coverArt "al-182"}
{:genre "Techno"
:artistId "117"
:name "Marks / Angles"
:songCount 3
:created "2017-06-28T20:03:46.000Z"
:duration 1000
:artist "Nicolas Jaar"
:year 2010
:id "196"}
{:genre "Electronic"
:artistId "73"
:name "Don't Break My Love EP"
:songCount 2
:created "2017-06-28T20:05:16.000Z"
:duration 673
:artist "Nicolas Jaar & Theatre Roosevelt"
:year 2011
:id "122"
:coverArt "al-122"}
{:genre "Electronic"
:artistId "233"
:name "Mother Earth's Plantasia"
:songCount 10
:created "2018-05-28T21:31:55.000Z"
:duration 1837
:artist "Mort Garson"
:year 1976
:id "397"
:coverArt "al-397"}
{:genre "Psychedelic Rock"
:artistId "424"
:name
"Nuggets: Original Artyfacts From the First Psychedelic Era, 1965-1968"
:songCount 27
:created "2018-02-21T12:01:38.000Z"
:duration 4614
:artist "Various Artists"
:year 1998
:id "743"
:coverArt "al-743"}
{:genre "Psychedelic Rock"
:artistId "37"
:name "Phluph"
:songCount 10
:created "2018-03-05T16:31:46.000Z"
:duration 2182
:artist "Phluph"
:year 2001
:id "64"
:coverArt "al-64"}
{:genre "Rock"
:artistId "305"
:name "The Best of Talking Heads (Remastered)"
:songCount 18
:created "2018-01-22T11:00:50.000Z"
:duration 4618
:artist "Talking Heads"
:year 2004
:id "529"
:coverArt "al-529"}
{:genre "Electronic"
:artistId "50"
:name "Divide And Exit"
:songCount 14
:created "2018-01-21T14:47:59.000Z"
:duration 2417
:artist "Sleaford Mods"
:year 2014
:id "86"
:coverArt "al-86"}
{:genre "electronic"
:artistId "349"
:name "Fade to Grey: The Best of Visage"
:songCount 12
:created "2018-08-29T13:01:26.000Z"
:duration 2757
:artist "Visage"
:year 1993
:id "1234"}
{:genre "electronic"
:artistId "334"
:name "Hounds of Love"
:songCount 18
:created "2018-08-29T13:00:32.000Z"
:duration 4419
:artist "Kate Bush"
:year 1997
:id "1215"
:coverArt "al-1215"}
{:genre "Psychedelic"
:artistId "424"
:name
"Forge Your Own Chains: Heavy Psychedelic Ballads and Dirges 1968-1974"
:songCount 15
:created "2018-01-27T12:23:47.000Z"
:duration 4241
:artist "Various Artists"
:id "742"
:coverArt "al-742"}
{:genre "Live Archive"
:artistId "141"
:name "2017-08-28 Rough Trade NYC, Brooklyn, NY"
:songCount 4
:created "2018-01-19T23:07:20.000Z"
:duration 2483
:artist "Sunburned Hand of the Man"
:year 2017
:id "242"
:coverArt "al-242"}
{:genre "electronic"
:artistId "236"
:name "Knock Knock"
:songCount 16
:created "2018-06-09T23:04:20.000Z"
:duration 4710
:artist "DJ Koze"
:year 2018
:id "401"
:coverArt "al-401"}
{:genre "Nintendocore"
:artistId "306"
:name "Gymnasiastik mit Antitainment"
:songCount 6
:created "2017-06-28T18:39:58.000Z"
:duration 795
:artist "Antitainment"
:year 2004
:id "533"}
{:genre "Electronic"
:artistId "584"
:name "Amok"
:songCount 9
:created "2017-06-28T18:41:30.000Z"
:duration 2681
:artist "Atoms for Peace"
:year 2013
:id "1023"
:coverArt "al-1023"}
{:artistId "26"
:name "TamponTango I"
:songCount 3
:created "2017-11-23T23:18:43.000Z"
:duration 851
:artist "Diederdas"
:year 2017
:id "51"
:coverArt "al-51"}
{:genre "electronic"
:artistId "679"
:name "Heaven and Earth"
:songCount 16
:created "2018-09-20T22:07:23.000Z"
:duration 8672
:artist "Kamasi Washington"
:year 2018
:id "1258"
:coverArt "al-1258"}
{:genre "rhy"
:artistId "661"
:name "Yawn Zen"
:songCount 12
:created "2018-08-21T21:36:43.000Z"
:duration 1883
:artist "Mndsgn"
:year 2014
:id "1200"
:coverArt "al-1200"}
{:genre "Rap"
:artistId "677"
:name "Elephant Eyelash"
:songCount 12
:created "2018-09-20T17:02:08.000Z"
:duration 2478
:artist "Why?"
:year 2005
:id "1256"}
{:genre "Electronic"
:artistId "41"
:name "Immunity"
:songCount 8
:created "2017-06-28T19:28:24.000Z"
:duration 3604
:artist "Jon Hopkins"
:year 2013
:id "104"
:coverArt "al-104"}
{:genre "IDM / Trip-Hop / Experimental"
:artistId "454"
:name "New Energy"
:songCount 14
:created "2017-11-25T19:44:56.000Z"
:duration 3381
:artist "Four Tet"
:year 2017
:id "800"
:coverArt "al-800"}
{:genre "Electronic"
:artistId "633"
:name "ƒIN (Special Edition)"
:songCount 20
:created "2017-06-28T19:26:41.000Z"
:duration 5822
:artist "John Talabot"
:year 2012
:id "1159"}
{:artistId "412"
:name "A Moot Point"
:songCount 2
:created "2017-06-28T18:32:24.000Z"
:duration 857
:artist "Pional"
:year 2010
:id "719"
:coverArt "al-719"}
{:id "740"
:name "KR Family EP, Pt. 1"
:artist "Peter Power"
:artistId "230"
:coverArt "al-740"
:songCount 3
:duration 1333
:created "2017-06-28T20:31:06.000Z"}
{:genre "House"
:artistId "482"
:name "Busy Days For Fools"
:songCount 11
:created "2017-06-28T19:35:05.000Z"
:duration 3238
:artist "Lee Burton"
:year 2012
:id "866"
:coverArt "al-866"}
{:id "851"
:name "Ry & Frank Wiedemann"
:artist "Ry & Frank Wiedemann"
:artistId "472"
:songCount 1
:duration 485
:created "2017-06-28T18:34:23.000Z"}
{:genre "Electronic"
:artistId "58"
:name "Deep Cuts"
:songCount 17
:created "2017-12-22T08:21:19.000Z"
:duration 3321
:artist "The Knife"
:year 2003
:id "96"
:coverArt "al-96"}
{:artistId "125"
:name "VIA Remixes"
:songCount 1
:created "2017-06-28T18:27:59.000Z"
:duration 362
:artist "Andi Otto"
:year 2017
:id "211"
:coverArt "al-211"}
{:artistId "626"
:name "Hummingbird / Milk & Honey"
:songCount 2
:created "2017-11-23T21:27:00.000Z"
:duration 303
:artist "Luca Nieri"
:year 2016
:id "1150"
:coverArt "al-1150"}
{:genre "Electronic"
:artistId "434"
:name "Mercy Street"
:songCount 2
:created "2017-12-22T08:18:55.000Z"
:duration 568
:artist "Fever Ray"
:year 2010
:id "762"
:coverArt "al-762"}
{:artistId "43"
:name "2012-2017"
:songCount 11
:created "2018-03-06T15:51:42.000Z"
:duration 3998
:artist "A.A.L."
:year 2018
:id "73"
:coverArt "al-73"}
{:genre "New Wave Music"
:artistId "337"
:name "Liaisons dangereuses"
:songCount 10
:created "2018-08-29T13:00:42.000Z"
:duration 2392
:artist "Liaisons Dangereuses"
:year 1985
:id "1216"}
{:genre "Electro"
:artistId "434"
:name "Fever Ray"
:songCount 12
:created "2017-12-22T08:19:04.000Z"
:duration 3380
:artist "Fever Ray"
:year 2009
:id "765"}
{:id "621"
:name "RSS Disco"
:artist "RSS Disco"
:artistId "358"
:songCount 2
:duration 841
:created "2018-04-25T10:11:14.000Z"}
{:genre "House"
:artistId "358"
:name "Very"
:songCount 3
:created "2017-06-28T20:17:12.000Z"
:duration 1339
:artist "RSS Disco"
:year 2012
:id "624"
:coverArt "al-624"}
{:genre "Disco"
:artistId "619"
:name "Sir John"
:songCount 1
:created "2018-03-12T20:21:14.000Z"
:duration 419
:artist "White Elephant"
:year 2011
:id "1134"
:coverArt "al-1134"}
{:genre "House"
:artistId "434"
:name "Sidetracked"
:songCount 1
:created "2017-06-28T18:20:10.000Z"
:duration 270
:artist "Fever Ray"
:year 2012
:id "920"
:coverArt "al-920"}
{:genre "Electronic"
:artistId "58"
:name "Hannah Med H Soundtrack"
:songCount 16
:created "2017-12-22T08:21:33.000Z"
:duration 2307
:artist "The Knife"
:year 2003
:id "97"
:coverArt "al-97"}
{:genre "Alternative Rock"
:artistId "478"
:name "6 Feet Beneath the Moon"
:songCount 14
:created "2017-09-08T17:37:16.000Z"
:duration 3136
:artist "King Krule"
:year 2013
:id "859"
:coverArt "al-859"}
{:artistId "103"
:name "Joga / Crazy Junker 7\""
:songCount 2
:created "2017-06-28T19:37:31.000Z"
:duration 442
:artist "M.Rux"
:year 2014
:id "177"
:coverArt "al-177"}
{:genre "House"
:artistId "267"
:name "Carat EP"
:songCount 5
:created "2017-06-28T20:08:58.000Z"
:duration 2080
:artist "Nu"
:year 2013
:id "467"}
{:artistId "419"
:name "On Claws (reissue)"
:songCount 1
:created "2017-07-24T13:48:20.000Z"
:duration 176
:artist "I am Oak"
:year 2013
:id "733"
:coverArt "al-733"}
{:genre "Indie Dance / Nu Disco"
:artistId "214"
:name "Thinking Allowed"
:songCount 1
:created "2018-01-02T18:54:41.000Z"
:duration 430
:artist "Tornado Wallace"
:year 2013
:id "354"
:coverArt "al-354"}
{:artistId "629"
:name "V.I.C.T.O.R"
:songCount 1
:created "2017-06-28T18:25:45.000Z"
:duration 279
:artist "Golden Bug"
:year 2016
:id "1153"
:coverArt "al-1153"}
{:genre "Avant-Garde"
:artistId "256"
:name "Ende Neu"
:songCount 9
:created "2017-06-28T19:09:43.000Z"
:duration 2693
:artist "Einstürzende Neubauten"
:year 1998
:id "426"
:coverArt "al-426"}
{:genre "House"
:artistId "245"
:name "Visibles"
:songCount 4
:created "2017-06-28T18:57:22.000Z"
:duration 1556
:artist "Constantijn Lange"
:year 2014
:id "413"
:coverArt "al-413"}
{:artistId "245"
:name "Orange Atlas"
:songCount 5
:created "2017-06-28T18:57:08.000Z"
:duration 2171
:artist "Constantijn Lange"
:year 2013
:id "412"
:coverArt "al-412"}
{:artistId "146"
:name "Mapping The Futures Gone By"
:songCount 7
:created "2017-06-28T18:57:28.000Z"
:duration 1536
:artist "CONTACT FIELD ORCHESTRA"
:year 2015
:id "247"
:coverArt "al-247"}
{:genre "electronic"
:artistId "253"
:name "It's Album Time"
:songCount 12
:created "2018-09-04T14:25:00.000Z"
:duration 3555
:artist "Todd Terje"
:year 2014
:id "1254"
:coverArt "al-1254"}
{:genre "electronic"
:artistId "676"
:name "The Big Cover-Up"
:songCount 8
:created "2018-09-04T14:44:38.000Z"
:duration 3130
:artist "Todd Terje & The Olsens"
:year 2016
:id "1255"
:coverArt "al-1255"}
{:genre "electronic"
:artistId "424"
:name "I-Robots: Italo Electro Disco Underground Classics"
:songCount 13
:created "2018-08-29T13:01:11.000Z"
:duration 4797
:artist "Various Artists"
:year 2004
:id "1217"}
{:genre "Electronic"
:artistId "497"
:name "Creature Dreams"
:songCount 7
:created "2017-06-28T20:27:36.000Z"
:duration 1709
:artist "TOKiMONSTA"
:year 2011
:id "897"}
{:genre "Other"
:artistId "466"
:name "Brighton Beach (Freddie Joachim Remix)"
:songCount 1
:created "2017-06-28T18:34:34.000Z"
:duration 187
:artist "Télépopmusik"
:year 2011
:id "838"}
{:genre "Hip-Hop"
:artistId "234"
:name "Viktor Vaughn - Vaudeville Villain"
:songCount 30
:created "2017-06-28T20:45:05.000Z"
:duration 6039
:artist "MF Doom"
:year 2012
:id "1079"
:coverArt "al-1079"}
{:genre "Hip-Hop"
:artistId "234"
:name "King Geedorah - Take Me To Your Leader"
:songCount 13
:created "2017-06-28T20:44:03.000Z"
:duration 2514
:artist "MF Doom"
:year 2003
:id "1078"
:coverArt "al-1078"}
{:genre "electronic"
:artistId "667"
:name "Solid State Survivor"
:songCount 8
:created "2018-08-29T13:02:20.000Z"
:duration 1921
:artist "Yellow Magic Orchestra"
:year 1979
:id "1231"}
{:genre "electronic"
:artistId "666"
:name "Technodon"
:songCount 12
:created "2018-08-29T13:02:40.000Z"
:duration 3806
:artist "Y̶M̶O̶"
:year 1993
:id "1224"}
{:genre "Alternative Hip Hop"
:artistId "669"
:name "Unicron"
:songCount 6
:created "2018-08-29T13:45:33.000Z"
:duration 887
:artist "MF DOOM & Trunks"
:year 2008
:id "1235"
:coverArt "al-1235"}
{:genre "Alternative Hip Hop"
:artistId "650"
:name "Special Herbs, Volume 5 & 6"
:songCount 13
:created "2018-08-29T13:45:33.000Z"
:duration 2760
:artist "Metal Fingers"
:year 2004
:id "1248"
:coverArt "al-1248"}
{:genre "Alternative Hip Hop"
:artistId "650"
:name "Special Herbs, Volume 3 & 4"
:songCount 16
:created "2018-08-29T13:45:43.000Z"
:duration 3054
:artist "Metal Fingers"
:year 2003
:id "1251"
:coverArt "al-1251"}
{:genre "electronic"
:artistId "650"
:name "Special Herbs, Volume 9 & 0"
:songCount 13
:created "2018-08-29T13:45:57.000Z"
:duration 2751
:artist "Metal Fingers"
:year 2005
:id "1249"
:coverArt "al-1249"}
{:genre "electronic"
:artistId "650"
:name "Special Herbs, Volume 7 & 8"
:songCount 13
:created "2018-08-29T13:46:07.000Z"
:duration 2680
:artist "Metal Fingers"
:year 2004
:id "1250"
:coverArt "al-1250"}
{:genre "raphiphop"
:artistId "674"
:name "Key to the Kuffs"
:songCount 15
:created "2018-08-29T13:46:12.000Z"
:duration 2520
:artist "JJ DOOM"
:year 2012
:id "1245"
:coverArt "al-1245"}
{:genre "Hip Hop Music"
:artistId "647"
:name "The Prof. Meets the Supervillain"
:songCount 5
:created "2018-08-29T13:46:19.000Z"
:duration 829
:artist "MF DOOM"
:year 2003
:id "1244"
:coverArt "al-1244"}
{:genre "Hip Hop Music"
:artistId "647"
:name "Vomit"
:songCount 6
:created "2018-08-29T13:46:24.000Z"
:duration 1254
:artist "MF DOOM"
:year 2006
:id "1241"
:coverArt "al-1241"}
{:genre "Hip-Hop"
:artistId "670"
:name "Victory Laps"
:songCount 6
:created "2018-08-29T13:46:34.000Z"
:duration 1026
:artist "DOOMSTARKS"
:year 2011
:id "1237"
:coverArt "al-1237"}
{:genre "rock"
:artistId "672"
:name "(VV:2) Venomous Villain"
:songCount 12
:created "2018-08-29T13:46:36.000Z"
:duration 1976
:artist "Viktor Vaughn"
:year 2004
:id "1242"
:coverArt "al-1242"}
{:genre "Hip Hop Music"
:artistId "671"
:name "Air"
:songCount 5
:created "2018-08-29T13:46:39.000Z"
:duration 803
:artist "Dabrye"
:year 2006
:id "1238"
:coverArt "al-1238"}
{:id "984"
:name "The Wicker Man"
:artist "The Wicker Man"
:artistId "553"
:coverArt "al-984"
:songCount 1
:duration 243
:created "2017-06-28T20:06:58.000Z"}]}}) | null | https://raw.githubusercontent.com/heyarne/airsonic-ui/7adb03d6e2ba0ff764796a57b7e87f62b242c9b7/test/cljs/airsonic_ui/components/library/fixtures.cljs | clojure | this is straight from the response cache, copied from app db after
of pages | (ns airsonic-ui.components.library.fixtures)
browsing through the most recently listened to tracks for the first couple
(def responses
{["getAlbumList2" {:type "recent", :size 100, :offset 0}]
{:album
[{:artistId "478"
:name "The OOZ"
:songCount 19
:created "2018-06-02T12:06:11.000Z"
:duration 3975
:artist "King Krule"
:year 2017
:id "857"
:coverArt "al-857"}
{:genre "hip/electronic/jaz/Alternative Hip Hop/ambient"
:artistId "644"
:name "The Unseen"
:songCount 24
:created "2018-07-30T09:20:22.000Z"
:duration 3795
:artist "Quasimoto"
:year 2000
:id "1174"
:coverArt "al-1174"}
{:artistId "15"
:name "The Starkiller"
:songCount 3
:created "2018-01-02T16:27:35.000Z"
:duration 1158
:artist "The Starkiller"
:year 2013
:id "29"
:coverArt "al-29"}
{:genre "Disco"
:artistId "437"
:name "Waffles 004"
:songCount 1
:created "2018-03-08T19:18:24.000Z"
:duration 349
:artist "Waffles"
:year 2016
:id "771"
:coverArt "al-771"}
{:genre "Electronic"
:artistId "49"
:name "Fated"
:songCount 15
:created "2018-03-12T08:36:57.000Z"
:duration 2017
:artist "Nosaj Thing"
:year 2015
:id "81"
:coverArt "al-81"}
{:genre "Electronic"
:artistId "41"
:name "Open Eye Signal (Remixes)"
:songCount 1
:created "2017-06-28T19:11:50.000Z"
:duration 208
:artist "Jon Hopkins"
:year 2013
:id "68"}
{:genre "Soundtrack"
:artistId "684"
:name "Ghost in the Shell"
:songCount 11
:created "2018-10-20T08:35:00.000Z"
:duration 2730
:artist "Kenji Kawai"
:year 1995
:id "1263"
:coverArt "al-1263"}
{:artistId "31"
:name "Drop Me A Line / Your Heart To Me"
:songCount 2
:created "2017-12-30T23:40:03.000Z"
:duration 551
:artist "Social Lovers"
:year 2017
:id "56"
:coverArt "al-56"}
{:id "84"
:name "Unknown Album"
:artist "Nosaj Thing"
:artistId "49"
:songCount 1
:duration 202
:created "2017-06-28T20:08:38.000Z"
:genre "Unknown Genre"}
{:genre "Electronic"
:artistId "49"
:name "Home"
:songCount 11
:created "2017-06-28T20:08:29.000Z"
:duration 2196
:artist "Nosaj Thing"
:year 2013
:id "82"
:coverArt "al-82"}
{:genre "Gothic"
:artistId "403"
:name "Three Imaginary Boys (Deluxe Edition)"
:songCount 34
:created "2017-11-06T20:37:32.000Z"
:duration 6128
:artist "The Cure"
:year 2005
:id "701"
:coverArt "al-701"}
{:genre "hip"
:artistId "236"
:name "Amygdala"
:songCount 13
:created "2018-08-14T20:23:42.000Z"
:duration 4665
:artist "DJ Koze"
:year 2013
:id "403"
:coverArt "al-403"}
{:genre "Downtempo"
:artistId "596"
:name "II"
:songCount 10
:created "2017-06-28T20:15:56.000Z"
:duration 2755
:artist "Raz Ohara and The Odd Orchestra"
:year 2009
:id "1040"}
{:genre "Soul"
:artistId "436"
:name "Overgrown"
:songCount 11
:created "2018-01-02T08:43:32.000Z"
:duration 2569
:artist "James Blake"
:year 2013
:id "770"
:coverArt "al-770"}
{:genre "Electronic"
:artistId "436"
:name "Life Round Here (feat. Chance the Rapper) - Single"
:songCount 1
:created "2017-06-28T19:23:25.000Z"
:duration 188
:artist "James Blake"
:year 2013
:id "768"
:coverArt "al-768"}
{:genre "Hip Hop"
:artistId "290"
:name "C'mon! EP"
:songCount 6
:created "2017-06-28T19:12:11.000Z"
:duration 1163
:artist "Fatoni"
:year 2015
:id "508"
:coverArt "al-508"}
{:genre "electronic"
:artistId "683"
:name "Das Ziel ist im Weg"
:songCount 10
:created "2018-10-17T11:01:24.000Z"
:duration 2130
:artist "Mine"
:year 2016
:id "1262"
:coverArt "al-1262"}
{:genre "Downtempo"
:artistId "479"
:name "Days to Come"
:songCount 18
:created "2017-06-28T18:47:44.000Z"
:duration 4627
:artist "Bonobo"
:year 2006
:id "861"
:coverArt "al-861"}
{:genre "Electronic"
:artistId "206"
:name "Andorra"
:songCount 9
:created "2017-11-25T20:47:26.000Z"
:duration 2581
:artist "Caribou"
:year 2007
:id "336"
:coverArt "al-336"}
{:genre "Electronic"
:artistId "206"
:name "Melody Day"
:songCount 3
:created "2017-11-25T20:49:51.000Z"
:duration 683
:artist "Caribou"
:year 2007
:id "335"
:coverArt "al-335"}
{:id "707"
:name "lassmalaura"
:artist "lassmalaura"
:artistId "406"
:songCount 2
:duration 8241
:created "2017-06-28T18:27:36.000Z"}
{:genre "Idm"
:artistId "597"
:name "Cerulean"
:songCount 12
:created "2017-06-28T18:44:43.000Z"
:duration 2594
:artist "Baths"
:year 2010
:id "1042"
:coverArt "al-1042"}
{:genre "Electronic"
:artistId "64"
:name "Plörre"
:songCount 11
:created "2017-06-28T19:17:41.000Z"
:duration 2495
:artist "Frittenbude"
:year 2010
:id "109"}
{:genre "Electronic"
:artistId "388"
:name "Rongorongo Remixed"
:songCount 11
:created "2017-06-28T19:57:48.000Z"
:duration 3590
:artist "Me Succeeds"
:year 2013
:id "654"
:coverArt "al-654"}
{:genre "Hip-Hop"
:artistId "270"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T18:42:12.000Z"
:duration 169
:artist "Audio88 und Yassin"
:year 2011
:id "469"}
{:genre "Hip-Hop"
:artistId "523"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T20:21:03.000Z"
:duration 275
:artist "Suff Daddy"
:year 2011
:id "940"}
{:id "25"
:name "Nhar, Lee Burton"
:artist "Nhar, Lee Burton"
:artistId "13"
:songCount 1
:duration 482
:created "2017-06-28T18:30:39.000Z"}
{:genre "Indie Dance / Nu Disco"
:artistId "360"
:name "Salto"
:songCount 1
:created "2018-01-02T18:55:06.000Z"
:duration 414
:artist "Martin Heimann"
:year 2016
:id "625"
:coverArt "al-625"}
{:id "273"
:name "[via XLR8R]"
:artist "Lianne La Havas"
:artistId "165"
:coverArt "al-273"
:songCount 1
:duration 307
:created "2017-06-28T19:35:28.000Z"}
{:artistId "249"
:name "Free Downloads"
:songCount 1
:created "2017-06-28T19:10:19.000Z"
:duration 286
:artist "Emancipator feat. Sigur Rós vs. Mobb Deep"
:year 2011
:id "419"
:coverArt "al-419"}
{:genre "Electronic"
:artistId "64"
:name "Und täglich grüßt das Murmeltier"
:songCount 3
:created "2017-12-31T09:03:39.000Z"
:duration 652
:artist "Frittenbude"
:year 2010
:id "107"}
{:genre "Electronic"
:artistId "206"
:name "Start Breaking My Heart"
:songCount 20
:created "2017-11-25T20:47:51.000Z"
:duration 6197
:artist "Caribou"
:year 2006
:id "338"
:coverArt "al-338"}
{:genre "Electronic"
:artistId "206"
:name "The Milk Of Human Kindness"
:songCount 11
:created "2017-11-25T20:41:58.000Z"
:duration 2412
:artist "Caribou"
:year 2005
:id "337"
:coverArt "al-337"}
{:genre "electronic"
:artistId "424"
:name "Permanent Vacation 3"
:songCount 47
:created "2017-06-28T20:29:36.000Z"
:duration 18682
:artist "Various Artists"
:year 2014
:id "747"
:coverArt "al-747"}
{:genre "Electronic"
:artistId "162"
:name "Music Has the Right to Children"
:songCount 18
:created "2017-06-28T18:46:28.000Z"
:duration 4226
:artist "Boards of Canada"
:year 2004
:id "270"
:coverArt "al-270"}
{:id "276"
:name "Nostalgia 77"
:artist "Nostalgia 77"
:artistId "168"
:songCount 1
:duration 277
:created "2017-06-28T18:31:28.000Z"}
{:genre "Electronic"
:artistId "597"
:name "Obsidian"
:songCount 10
:created "2017-06-28T18:43:58.000Z"
:duration 2596
:artist "Baths"
:year 2013
:id "1041"
:coverArt "al-1041"}
{:id "954"
:name "[via XLR8R.com]"
:artist "Burial"
:artistId "530"
:coverArt "al-954"
:songCount 1
:duration 297
:created "2017-06-28T18:49:04.000Z"}
{:genre "Uk Garage"
:artistId "530"
:name "Kindred EP"
:songCount 3
:created "2017-06-28T18:49:36.000Z"
:duration 1839
:artist "Burial"
:year 2012
:id "953"
:coverArt "al-953"}
{:genre "Unknown"
:artistId "430"
:name "Rampue"
:songCount 6
:created "2017-06-28T18:34:00.000Z"
:duration 16433
:artist "Rampue"
:year 2012
:id "753"
:coverArt "al-753"}
{:artistId "96"
:name "www.soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:31:41.000Z"
:duration 424
:artist "Klima"
:year 2013
:id "166"
:coverArt "al-166"}
{:artistId "463"
:name "soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:35:30.000Z"
:duration 523
:artist "Leonard Cohen"
:year 2014
:id "831"
:coverArt "al-831"}
{:artistId "463"
:name "The Future"
:songCount 9
:created "2018-01-16T11:14:41.000Z"
:duration 3579
:artist "Leonard Cohen"
:year 2012
:id "821"
:coverArt "al-821"}
{:id "1001"
:name "Lonski & Classen"
:artist "Lonski & Classen"
:artistId "566"
:coverArt "al-1001"
:songCount 1
:duration 248
:created "2017-06-28T20:07:24.000Z"}
{:genre "Podcast"
:artistId "199"
:name "Waterkant Souvenirs Podcast"
:songCount 1
:created "2017-06-28T20:00:25.000Z"
:duration 5341
:artist "Mira"
:year 2012
:id "325"}
{:id "324"
:name "Familiar Forest Festival 2012"
:artist "Mira"
:artistId "199"
:songCount 1
:duration 6695
:created "2017-06-28T20:00:35.000Z"
:year 2012}
{:genre "Ambient"
:artistId "188"
:name "We're New Here"
:songCount 13
:created "2017-06-28T19:18:06.000Z"
:duration 2135
:artist "Gil Scott-Heron and Jamie xx"
:year 2011
:id "310"
:coverArt "al-310"}
{:genre "Gothic"
:artistId "403"
:name "Galore : The Singles 87 - 97"
:songCount 18
:created "2017-11-06T20:51:35.000Z"
:duration 4369
:artist "The Cure"
:year 1997
:id "684"
:coverArt "al-684"}
{:genre "IDM"
:artistId "333"
:name "Remixes Compiled"
:songCount 12
:created "2017-06-28T20:22:43.000Z"
:duration 3233
:artist "Telefon Tel Aviv"
:year 2007
:id "723"
:coverArt "al-723"}
{:artistId "230"
:name "Ufordian Edits"
:songCount 1
:created "2018-02-19T22:55:59.000Z"
:duration 331
:artist "Peter Power"
:year 2015
:id "393"
:coverArt "al-393"}
{:genre "Other"
:artistId "528"
:name "Dream Runner EP"
:songCount 6
:created "2017-06-28T18:39:24.000Z"
:duration 899
:artist "Annu"
:year 2009
:id "948"}
{:genre "Techno"
:artistId "75"
:name "Unknown"
:songCount 8
:created "2017-06-28T20:17:47.000Z"
:duration 2841
:artist "Saschienne"
:year 2012
:id "124"
:coverArt "al-124"}
{:genre "Nintendocore"
:artistId "306"
:name "Nach der Kippe Pogo!?"
:songCount 11
:created "2017-06-28T18:40:09.000Z"
:duration 1508
:artist "Antitainment"
:year 2007
:id "532"
:coverArt "al-532"}
{:genre "Electronic"
:artistId "206"
:name "Swim"
:songCount 9
:created "2017-11-25T20:06:58.000Z"
:duration 2596
:artist "Caribou"
:year 2010
:id "339"
:coverArt "al-339"}
{:genre "trance"
:artistId "117"
:name "Nymphs III"
:songCount 2
:created "2017-06-28T20:04:17.000Z"
:duration 1080
:artist "Nicolas Jaar"
:year 2015
:id "201"}
{:genre "Gothic"
:artistId "403"
:name "Wish"
:songCount 12
:created "2018-01-02T14:29:04.000Z"
:duration 3976
:artist "The Cure"
:year 1992
:id "685"
:coverArt "al-685"}
{:genre "Gothic"
:artistId "403"
:name "Show (Live)"
:songCount 18
:created "2018-01-02T14:30:03.000Z"
:duration 5316
:artist "The Cure"
:id "698"
:coverArt "al-698"}
{:genre "Gothic"
:artistId "403"
:name "Mixed Up"
:songCount 11
:created "2018-01-02T14:29:43.000Z"
:duration 4260
:artist "The Cure"
:year 1990
:id "692"
:coverArt "al-692"}
{:id "1257"
:name "Saal"
:artist "Serengeti"
:artistId "678"
:songCount 13
:duration 2437
:created "2018-09-20T17:02:50.000Z"
:year 2013}
{:genre "Hip Hop"
:artistId "204"
:name "Leaders Of The Brew School"
:songCount 16
:created "2017-06-28T18:45:16.000Z"
:duration 2214
:artist "Betty Ford Boys"
:year 2013
:id "331"}
{:id "202"
:name "Sirens"
:artist "Nicolas Jaar"
:artistId "117"
:songCount 7
:duration 2841
:created "2017-06-28T20:04:34.000Z"
:year 2016}
{:genre "techno"
:artistId "682"
:name "Piñata"
:songCount 21
:created "2018-10-09T15:30:48.000Z"
:duration 3963
:artist "Freddie Gibbs & Madlib"
:year 2014
:id "1261"
:coverArt "al-1261"}
{:genre "electronic"
:artistId "681"
:name "We Must Become the Pitiless Censors of Ourselves"
:songCount 11
:created "2018-10-08T17:21:47.000Z"
:duration 1916
:artist "John Maus"
:year 2011
:id "1260"
:coverArt "al-1260"}
{:artistId "514"
:name "Time"
:songCount 1
:created "2017-07-24T13:19:05.000Z"
:duration 247
:artist "Lokke"
:year 2015
:id "923"
:coverArt "al-923"}
{:genre "jazz"
:artistId "680"
:name "These Things Take Time"
:songCount 13
:created "2018-10-08T17:21:09.000Z"
:duration 3013
:artist "Molly Nilsson"
:year 2008
:id "1259"
:coverArt "al-1259"}
{:artistId "463"
:name "Songs of Love and Hate"
:songCount 4
:created "2018-01-16T11:13:54.000Z"
:duration 1273
:artist "Leonard Cohen"
:year 1970
:id "829"
:coverArt "al-829"}
{:artistId "187"
:name "Vacation EP"
:songCount 7
:created "2017-06-28T20:19:17.000Z"
:duration 1902
:artist "Shlohmo"
:year 2012
:id "305"
:coverArt "al-305"}
{:genre "Electronic"
:artistId "187"
:name "Vacation (Remixes)"
:songCount 6
:created "2017-06-28T20:19:23.000Z"
:duration 3559
:artist "Shlohmo"
:year 2012
:id "303"}
{:genre "WeDidIt"
:artistId "302"
:name "Salvation Remixes"
:songCount 3
:created "2017-06-28T20:14:04.000Z"
:duration 739
:artist "Purple"
:year 2013
:id "525"
:coverArt "al-525"}
{:genre "Alternative Rock / Indie Rock"
:artistId "16"
:name "Sleeping With Ghosts"
:songCount 22
:created "2017-11-06T20:39:23.000Z"
:duration 5232
:artist "Placebo"
:year 2003
:id "38"
:coverArt "al-38"}
{:genre "Funk/Hip-Hop"
:artistId "198"
:name "Looking For the Perfect Beat"
:songCount 13
:created "2017-06-28T18:36:47.000Z"
:duration 4521
:artist "Afrika Bambaataa"
:year 2001
:id "323"
:coverArt "al-323"}
{:artistId "103"
:name "edits & cuts"
:songCount 14
:created "2017-06-28T19:37:20.000Z"
:duration 3550
:artist "M.Rux"
:year 2014
:id "182"
:coverArt "al-182"}
{:genre "Techno"
:artistId "117"
:name "Marks / Angles"
:songCount 3
:created "2017-06-28T20:03:46.000Z"
:duration 1000
:artist "Nicolas Jaar"
:year 2010
:id "196"}
{:genre "Electronic"
:artistId "73"
:name "Don't Break My Love EP"
:songCount 2
:created "2017-06-28T20:05:16.000Z"
:duration 673
:artist "Nicolas Jaar & Theatre Roosevelt"
:year 2011
:id "122"
:coverArt "al-122"}
{:genre "Electronic"
:artistId "233"
:name "Mother Earth's Plantasia"
:songCount 10
:created "2018-05-28T21:31:55.000Z"
:duration 1837
:artist "Mort Garson"
:year 1976
:id "397"
:coverArt "al-397"}
{:genre "Psychedelic Rock"
:artistId "424"
:name
"Nuggets: Original Artyfacts From the First Psychedelic Era, 1965-1968"
:songCount 27
:created "2018-02-21T12:01:38.000Z"
:duration 4614
:artist "Various Artists"
:year 1998
:id "743"
:coverArt "al-743"}
{:genre "Psychedelic Rock"
:artistId "37"
:name "Phluph"
:songCount 10
:created "2018-03-05T16:31:46.000Z"
:duration 2182
:artist "Phluph"
:year 2001
:id "64"
:coverArt "al-64"}
{:genre "Rock"
:artistId "305"
:name "The Best of Talking Heads (Remastered)"
:songCount 18
:created "2018-01-22T11:00:50.000Z"
:duration 4618
:artist "Talking Heads"
:year 2004
:id "529"
:coverArt "al-529"}
{:genre "Electronic"
:artistId "50"
:name "Divide And Exit"
:songCount 14
:created "2018-01-21T14:47:59.000Z"
:duration 2417
:artist "Sleaford Mods"
:year 2014
:id "86"
:coverArt "al-86"}
{:genre "electronic"
:artistId "349"
:name "Fade to Grey: The Best of Visage"
:songCount 12
:created "2018-08-29T13:01:26.000Z"
:duration 2757
:artist "Visage"
:year 1993
:id "1234"}
{:genre "electronic"
:artistId "334"
:name "Hounds of Love"
:songCount 18
:created "2018-08-29T13:00:32.000Z"
:duration 4419
:artist "Kate Bush"
:year 1997
:id "1215"
:coverArt "al-1215"}
{:genre "Psychedelic"
:artistId "424"
:name
"Forge Your Own Chains: Heavy Psychedelic Ballads and Dirges 1968-1974"
:songCount 15
:created "2018-01-27T12:23:47.000Z"
:duration 4241
:artist "Various Artists"
:id "742"
:coverArt "al-742"}
{:genre "Live Archive"
:artistId "141"
:name "2017-08-28 Rough Trade NYC, Brooklyn, NY"
:songCount 4
:created "2018-01-19T23:07:20.000Z"
:duration 2483
:artist "Sunburned Hand of the Man"
:year 2017
:id "242"
:coverArt "al-242"}
{:genre "electronic"
:artistId "236"
:name "Knock Knock"
:songCount 16
:created "2018-06-09T23:04:20.000Z"
:duration 4710
:artist "DJ Koze"
:year 2018
:id "401"
:coverArt "al-401"}
{:genre "Nintendocore"
:artistId "306"
:name "Gymnasiastik mit Antitainment"
:songCount 6
:created "2017-06-28T18:39:58.000Z"
:duration 795
:artist "Antitainment"
:year 2004
:id "533"}
{:genre "Electronic"
:artistId "584"
:name "Amok"
:songCount 9
:created "2017-06-28T18:41:30.000Z"
:duration 2681
:artist "Atoms for Peace"
:year 2013
:id "1023"
:coverArt "al-1023"}
{:artistId "26"
:name "TamponTango I"
:songCount 3
:created "2017-11-23T23:18:43.000Z"
:duration 851
:artist "Diederdas"
:year 2017
:id "51"
:coverArt "al-51"}
{:genre "electronic"
:artistId "679"
:name "Heaven and Earth"
:songCount 16
:created "2018-09-20T22:07:23.000Z"
:duration 8672
:artist "Kamasi Washington"
:year 2018
:id "1258"
:coverArt "al-1258"}
{:genre "rhy"
:artistId "661"
:name "Yawn Zen"
:songCount 12
:created "2018-08-21T21:36:43.000Z"
:duration 1883
:artist "Mndsgn"
:year 2014
:id "1200"
:coverArt "al-1200"}
{:genre "Rap"
:artistId "677"
:name "Elephant Eyelash"
:songCount 12
:created "2018-09-20T17:02:08.000Z"
:duration 2478
:artist "Why?"
:year 2005
:id "1256"}
{:genre "Electronic"
:artistId "41"
:name "Immunity"
:songCount 8
:created "2017-06-28T19:28:24.000Z"
:duration 3604
:artist "Jon Hopkins"
:year 2013
:id "104"
:coverArt "al-104"}
{:genre "IDM / Trip-Hop / Experimental"
:artistId "454"
:name "New Energy"
:songCount 14
:created "2017-11-25T19:44:56.000Z"
:duration 3381
:artist "Four Tet"
:year 2017
:id "800"
:coverArt "al-800"}
{:genre "Electronic"
:artistId "633"
:name "ƒIN (Special Edition)"
:songCount 20
:created "2017-06-28T19:26:41.000Z"
:duration 5822
:artist "John Talabot"
:year 2012
:id "1159"}
{:artistId "412"
:name "A Moot Point"
:songCount 2
:created "2017-06-28T18:32:24.000Z"
:duration 857
:artist "Pional"
:year 2010
:id "719"
:coverArt "al-719"}
{:id "740"
:name "KR Family EP, Pt. 1"
:artist "Peter Power"
:artistId "230"
:coverArt "al-740"
:songCount 3
:duration 1333
:created "2017-06-28T20:31:06.000Z"}
{:genre "House"
:artistId "482"
:name "Busy Days For Fools"
:songCount 11
:created "2017-06-28T19:35:05.000Z"
:duration 3238
:artist "Lee Burton"
:year 2012
:id "866"
:coverArt "al-866"}
{:id "851"
:name "Ry & Frank Wiedemann"
:artist "Ry & Frank Wiedemann"
:artistId "472"
:songCount 1
:duration 485
:created "2017-06-28T18:34:23.000Z"}
{:genre "Electronic"
:artistId "58"
:name "Deep Cuts"
:songCount 17
:created "2017-12-22T08:21:19.000Z"
:duration 3321
:artist "The Knife"
:year 2003
:id "96"
:coverArt "al-96"}
{:artistId "125"
:name "VIA Remixes"
:songCount 1
:created "2017-06-28T18:27:59.000Z"
:duration 362
:artist "Andi Otto"
:year 2017
:id "211"
:coverArt "al-211"}
{:artistId "626"
:name "Hummingbird / Milk & Honey"
:songCount 2
:created "2017-11-23T21:27:00.000Z"
:duration 303
:artist "Luca Nieri"
:year 2016
:id "1150"
:coverArt "al-1150"}]}
["getAlbumList2" {:type "recent", :size 100, :offset 20}]
{:album
[{:id "707"
:name "lassmalaura"
:artist "lassmalaura"
:artistId "406"
:songCount 2
:duration 8241
:created "2017-06-28T18:27:36.000Z"}
{:genre "Idm"
:artistId "597"
:name "Cerulean"
:songCount 12
:created "2017-06-28T18:44:43.000Z"
:duration 2594
:artist "Baths"
:year 2010
:id "1042"
:coverArt "al-1042"}
{:genre "Electronic"
:artistId "64"
:name "Plörre"
:songCount 11
:created "2017-06-28T19:17:41.000Z"
:duration 2495
:artist "Frittenbude"
:year 2010
:id "109"}
{:genre "Electronic"
:artistId "388"
:name "Rongorongo Remixed"
:songCount 11
:created "2017-06-28T19:57:48.000Z"
:duration 3590
:artist "Me Succeeds"
:year 2013
:id "654"
:coverArt "al-654"}
{:genre "Hip-Hop"
:artistId "270"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T18:42:12.000Z"
:duration 169
:artist "Audio88 und Yassin"
:year 2011
:id "469"}
{:genre "Hip-Hop"
:artistId "523"
:name "Über Liebe VLS"
:songCount 1
:created "2017-06-28T20:21:03.000Z"
:duration 275
:artist "Suff Daddy"
:year 2011
:id "940"}
{:id "25"
:name "Nhar, Lee Burton"
:artist "Nhar, Lee Burton"
:artistId "13"
:songCount 1
:duration 482
:created "2017-06-28T18:30:39.000Z"}
{:genre "Indie Dance / Nu Disco"
:artistId "360"
:name "Salto"
:songCount 1
:created "2018-01-02T18:55:06.000Z"
:duration 414
:artist "Martin Heimann"
:year 2016
:id "625"
:coverArt "al-625"}
{:id "273"
:name "[via XLR8R]"
:artist "Lianne La Havas"
:artistId "165"
:coverArt "al-273"
:songCount 1
:duration 307
:created "2017-06-28T19:35:28.000Z"}
{:artistId "249"
:name "Free Downloads"
:songCount 1
:created "2017-06-28T19:10:19.000Z"
:duration 286
:artist "Emancipator feat. Sigur Rós vs. Mobb Deep"
:year 2011
:id "419"
:coverArt "al-419"}
{:genre "Electronic"
:artistId "64"
:name "Und täglich grüßt das Murmeltier"
:songCount 3
:created "2017-12-31T09:03:39.000Z"
:duration 652
:artist "Frittenbude"
:year 2010
:id "107"}
{:genre "Electronic"
:artistId "206"
:name "Start Breaking My Heart"
:songCount 20
:created "2017-11-25T20:47:51.000Z"
:duration 6197
:artist "Caribou"
:year 2006
:id "338"
:coverArt "al-338"}
{:genre "Electronic"
:artistId "206"
:name "The Milk Of Human Kindness"
:songCount 11
:created "2017-11-25T20:41:58.000Z"
:duration 2412
:artist "Caribou"
:year 2005
:id "337"
:coverArt "al-337"}
{:genre "electronic"
:artistId "424"
:name "Permanent Vacation 3"
:songCount 47
:created "2017-06-28T20:29:36.000Z"
:duration 18682
:artist "Various Artists"
:year 2014
:id "747"
:coverArt "al-747"}
{:genre "Electronic"
:artistId "162"
:name "Music Has the Right to Children"
:songCount 18
:created "2017-06-28T18:46:28.000Z"
:duration 4226
:artist "Boards of Canada"
:year 2004
:id "270"
:coverArt "al-270"}
{:id "276"
:name "Nostalgia 77"
:artist "Nostalgia 77"
:artistId "168"
:songCount 1
:duration 277
:created "2017-06-28T18:31:28.000Z"}
{:genre "Electronic"
:artistId "597"
:name "Obsidian"
:songCount 10
:created "2017-06-28T18:43:58.000Z"
:duration 2596
:artist "Baths"
:year 2013
:id "1041"
:coverArt "al-1041"}
{:id "954"
:name "[via XLR8R.com]"
:artist "Burial"
:artistId "530"
:coverArt "al-954"
:songCount 1
:duration 297
:created "2017-06-28T18:49:04.000Z"}
{:genre "Uk Garage"
:artistId "530"
:name "Kindred EP"
:songCount 3
:created "2017-06-28T18:49:36.000Z"
:duration 1839
:artist "Burial"
:year 2012
:id "953"
:coverArt "al-953"}
{:genre "Unknown"
:artistId "430"
:name "Rampue"
:songCount 6
:created "2017-06-28T18:34:00.000Z"
:duration 16433
:artist "Rampue"
:year 2012
:id "753"
:coverArt "al-753"}
{:artistId "96"
:name "www.soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:31:41.000Z"
:duration 424
:artist "Klima"
:year 2013
:id "166"
:coverArt "al-166"}
{:artistId "463"
:name "soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:35:30.000Z"
:duration 523
:artist "Leonard Cohen"
:year 2014
:id "831"
:coverArt "al-831"}
{:artistId "463"
:name "The Future"
:songCount 9
:created "2018-01-16T11:14:41.000Z"
:duration 3579
:artist "Leonard Cohen"
:year 2012
:id "821"
:coverArt "al-821"}
{:id "1001"
:name "Lonski & Classen"
:artist "Lonski & Classen"
:artistId "566"
:coverArt "al-1001"
:songCount 1
:duration 248
:created "2017-06-28T20:07:24.000Z"}
{:genre "Podcast"
:artistId "199"
:name "Waterkant Souvenirs Podcast"
:songCount 1
:created "2017-06-28T20:00:25.000Z"
:duration 5341
:artist "Mira"
:year 2012
:id "325"}
{:id "324"
:name "Familiar Forest Festival 2012"
:artist "Mira"
:artistId "199"
:songCount 1
:duration 6695
:created "2017-06-28T20:00:35.000Z"
:year 2012}
{:genre "Ambient"
:artistId "188"
:name "We're New Here"
:songCount 13
:created "2017-06-28T19:18:06.000Z"
:duration 2135
:artist "Gil Scott-Heron and Jamie xx"
:year 2011
:id "310"
:coverArt "al-310"}
{:genre "Gothic"
:artistId "403"
:name "Galore : The Singles 87 - 97"
:songCount 18
:created "2017-11-06T20:51:35.000Z"
:duration 4369
:artist "The Cure"
:year 1997
:id "684"
:coverArt "al-684"}
{:genre "IDM"
:artistId "333"
:name "Remixes Compiled"
:songCount 12
:created "2017-06-28T20:22:43.000Z"
:duration 3233
:artist "Telefon Tel Aviv"
:year 2007
:id "723"
:coverArt "al-723"}
{:artistId "230"
:name "Ufordian Edits"
:songCount 1
:created "2018-02-19T22:55:59.000Z"
:duration 331
:artist "Peter Power"
:year 2015
:id "393"
:coverArt "al-393"}
{:genre "Other"
:artistId "528"
:name "Dream Runner EP"
:songCount 6
:created "2017-06-28T18:39:24.000Z"
:duration 899
:artist "Annu"
:year 2009
:id "948"}
{:genre "Techno"
:artistId "75"
:name "Unknown"
:songCount 8
:created "2017-06-28T20:17:47.000Z"
:duration 2841
:artist "Saschienne"
:year 2012
:id "124"
:coverArt "al-124"}
{:genre "Nintendocore"
:artistId "306"
:name "Nach der Kippe Pogo!?"
:songCount 11
:created "2017-06-28T18:40:09.000Z"
:duration 1508
:artist "Antitainment"
:year 2007
:id "532"
:coverArt "al-532"}
{:genre "Electronic"
:artistId "206"
:name "Swim"
:songCount 9
:created "2017-11-25T20:06:58.000Z"
:duration 2596
:artist "Caribou"
:year 2010
:id "339"
:coverArt "al-339"}
{:genre "trance"
:artistId "117"
:name "Nymphs III"
:songCount 2
:created "2017-06-28T20:04:17.000Z"
:duration 1080
:artist "Nicolas Jaar"
:year 2015
:id "201"}
{:genre "Gothic"
:artistId "403"
:name "Wish"
:songCount 12
:created "2018-01-02T14:29:04.000Z"
:duration 3976
:artist "The Cure"
:year 1992
:id "685"
:coverArt "al-685"}
{:genre "Gothic"
:artistId "403"
:name "Show (Live)"
:songCount 18
:created "2018-01-02T14:30:03.000Z"
:duration 5316
:artist "The Cure"
:id "698"
:coverArt "al-698"}
{:genre "Gothic"
:artistId "403"
:name "Mixed Up"
:songCount 11
:created "2018-01-02T14:29:43.000Z"
:duration 4260
:artist "The Cure"
:year 1990
:id "692"
:coverArt "al-692"}
{:id "1257"
:name "Saal"
:artist "Serengeti"
:artistId "678"
:songCount 13
:duration 2437
:created "2018-09-20T17:02:50.000Z"
:year 2013}
{:genre "Hip Hop"
:artistId "204"
:name "Leaders Of The Brew School"
:songCount 16
:created "2017-06-28T18:45:16.000Z"
:duration 2214
:artist "Betty Ford Boys"
:year 2013
:id "331"}
{:id "202"
:name "Sirens"
:artist "Nicolas Jaar"
:artistId "117"
:songCount 7
:duration 2841
:created "2017-06-28T20:04:34.000Z"
:year 2016}
{:genre "techno"
:artistId "682"
:name "Piñata"
:songCount 21
:created "2018-10-09T15:30:48.000Z"
:duration 3963
:artist "Freddie Gibbs & Madlib"
:year 2014
:id "1261"
:coverArt "al-1261"}
{:genre "electronic"
:artistId "681"
:name "We Must Become the Pitiless Censors of Ourselves"
:songCount 11
:created "2018-10-08T17:21:47.000Z"
:duration 1916
:artist "John Maus"
:year 2011
:id "1260"
:coverArt "al-1260"}
{:artistId "514"
:name "Time"
:songCount 1
:created "2017-07-24T13:19:05.000Z"
:duration 247
:artist "Lokke"
:year 2015
:id "923"
:coverArt "al-923"}
{:genre "jazz"
:artistId "680"
:name "These Things Take Time"
:songCount 13
:created "2018-10-08T17:21:09.000Z"
:duration 3013
:artist "Molly Nilsson"
:year 2008
:id "1259"
:coverArt "al-1259"}
{:artistId "463"
:name "Songs of Love and Hate"
:songCount 4
:created "2018-01-16T11:13:54.000Z"
:duration 1273
:artist "Leonard Cohen"
:year 1970
:id "829"
:coverArt "al-829"}
{:artistId "187"
:name "Vacation EP"
:songCount 7
:created "2017-06-28T20:19:17.000Z"
:duration 1902
:artist "Shlohmo"
:year 2012
:id "305"
:coverArt "al-305"}
{:genre "Electronic"
:artistId "187"
:name "Vacation (Remixes)"
:songCount 6
:created "2017-06-28T20:19:23.000Z"
:duration 3559
:artist "Shlohmo"
:year 2012
:id "303"}
{:genre "WeDidIt"
:artistId "302"
:name "Salvation Remixes"
:songCount 3
:created "2017-06-28T20:14:04.000Z"
:duration 739
:artist "Purple"
:year 2013
:id "525"
:coverArt "al-525"}
{:genre "Alternative Rock / Indie Rock"
:artistId "16"
:name "Sleeping With Ghosts"
:songCount 22
:created "2017-11-06T20:39:23.000Z"
:duration 5232
:artist "Placebo"
:year 2003
:id "38"
:coverArt "al-38"}
{:genre "Funk/Hip-Hop"
:artistId "198"
:name "Looking For the Perfect Beat"
:songCount 13
:created "2017-06-28T18:36:47.000Z"
:duration 4521
:artist "Afrika Bambaataa"
:year 2001
:id "323"
:coverArt "al-323"}
{:artistId "103"
:name "edits & cuts"
:songCount 14
:created "2017-06-28T19:37:20.000Z"
:duration 3550
:artist "M.Rux"
:year 2014
:id "182"
:coverArt "al-182"}
{:genre "Techno"
:artistId "117"
:name "Marks / Angles"
:songCount 3
:created "2017-06-28T20:03:46.000Z"
:duration 1000
:artist "Nicolas Jaar"
:year 2010
:id "196"}
{:genre "Electronic"
:artistId "73"
:name "Don't Break My Love EP"
:songCount 2
:created "2017-06-28T20:05:16.000Z"
:duration 673
:artist "Nicolas Jaar & Theatre Roosevelt"
:year 2011
:id "122"
:coverArt "al-122"}
{:genre "Electronic"
:artistId "233"
:name "Mother Earth's Plantasia"
:songCount 10
:created "2018-05-28T21:31:55.000Z"
:duration 1837
:artist "Mort Garson"
:year 1976
:id "397"
:coverArt "al-397"}
{:genre "Psychedelic Rock"
:artistId "424"
:name
"Nuggets: Original Artyfacts From the First Psychedelic Era, 1965-1968"
:songCount 27
:created "2018-02-21T12:01:38.000Z"
:duration 4614
:artist "Various Artists"
:year 1998
:id "743"
:coverArt "al-743"}
{:genre "Psychedelic Rock"
:artistId "37"
:name "Phluph"
:songCount 10
:created "2018-03-05T16:31:46.000Z"
:duration 2182
:artist "Phluph"
:year 2001
:id "64"
:coverArt "al-64"}
{:genre "Rock"
:artistId "305"
:name "The Best of Talking Heads (Remastered)"
:songCount 18
:created "2018-01-22T11:00:50.000Z"
:duration 4618
:artist "Talking Heads"
:year 2004
:id "529"
:coverArt "al-529"}
{:genre "Electronic"
:artistId "50"
:name "Divide And Exit"
:songCount 14
:created "2018-01-21T14:47:59.000Z"
:duration 2417
:artist "Sleaford Mods"
:year 2014
:id "86"
:coverArt "al-86"}
{:genre "electronic"
:artistId "349"
:name "Fade to Grey: The Best of Visage"
:songCount 12
:created "2018-08-29T13:01:26.000Z"
:duration 2757
:artist "Visage"
:year 1993
:id "1234"}
{:genre "electronic"
:artistId "334"
:name "Hounds of Love"
:songCount 18
:created "2018-08-29T13:00:32.000Z"
:duration 4419
:artist "Kate Bush"
:year 1997
:id "1215"
:coverArt "al-1215"}
{:genre "Psychedelic"
:artistId "424"
:name
"Forge Your Own Chains: Heavy Psychedelic Ballads and Dirges 1968-1974"
:songCount 15
:created "2018-01-27T12:23:47.000Z"
:duration 4241
:artist "Various Artists"
:id "742"
:coverArt "al-742"}
{:genre "Live Archive"
:artistId "141"
:name "2017-08-28 Rough Trade NYC, Brooklyn, NY"
:songCount 4
:created "2018-01-19T23:07:20.000Z"
:duration 2483
:artist "Sunburned Hand of the Man"
:year 2017
:id "242"
:coverArt "al-242"}
{:genre "electronic"
:artistId "236"
:name "Knock Knock"
:songCount 16
:created "2018-06-09T23:04:20.000Z"
:duration 4710
:artist "DJ Koze"
:year 2018
:id "401"
:coverArt "al-401"}
{:genre "Nintendocore"
:artistId "306"
:name "Gymnasiastik mit Antitainment"
:songCount 6
:created "2017-06-28T18:39:58.000Z"
:duration 795
:artist "Antitainment"
:year 2004
:id "533"}
{:genre "Electronic"
:artistId "584"
:name "Amok"
:songCount 9
:created "2017-06-28T18:41:30.000Z"
:duration 2681
:artist "Atoms for Peace"
:year 2013
:id "1023"
:coverArt "al-1023"}
{:artistId "26"
:name "TamponTango I"
:songCount 3
:created "2017-11-23T23:18:43.000Z"
:duration 851
:artist "Diederdas"
:year 2017
:id "51"
:coverArt "al-51"}
{:genre "electronic"
:artistId "679"
:name "Heaven and Earth"
:songCount 16
:created "2018-09-20T22:07:23.000Z"
:duration 8672
:artist "Kamasi Washington"
:year 2018
:id "1258"
:coverArt "al-1258"}
{:genre "rhy"
:artistId "661"
:name "Yawn Zen"
:songCount 12
:created "2018-08-21T21:36:43.000Z"
:duration 1883
:artist "Mndsgn"
:year 2014
:id "1200"
:coverArt "al-1200"}
{:genre "Rap"
:artistId "677"
:name "Elephant Eyelash"
:songCount 12
:created "2018-09-20T17:02:08.000Z"
:duration 2478
:artist "Why?"
:year 2005
:id "1256"}
{:genre "Electronic"
:artistId "41"
:name "Immunity"
:songCount 8
:created "2017-06-28T19:28:24.000Z"
:duration 3604
:artist "Jon Hopkins"
:year 2013
:id "104"
:coverArt "al-104"}
{:genre "IDM / Trip-Hop / Experimental"
:artistId "454"
:name "New Energy"
:songCount 14
:created "2017-11-25T19:44:56.000Z"
:duration 3381
:artist "Four Tet"
:year 2017
:id "800"
:coverArt "al-800"}
{:genre "Electronic"
:artistId "633"
:name "ƒIN (Special Edition)"
:songCount 20
:created "2017-06-28T19:26:41.000Z"
:duration 5822
:artist "John Talabot"
:year 2012
:id "1159"}
{:artistId "412"
:name "A Moot Point"
:songCount 2
:created "2017-06-28T18:32:24.000Z"
:duration 857
:artist "Pional"
:year 2010
:id "719"
:coverArt "al-719"}
{:id "740"
:name "KR Family EP, Pt. 1"
:artist "Peter Power"
:artistId "230"
:coverArt "al-740"
:songCount 3
:duration 1333
:created "2017-06-28T20:31:06.000Z"}
{:genre "House"
:artistId "482"
:name "Busy Days For Fools"
:songCount 11
:created "2017-06-28T19:35:05.000Z"
:duration 3238
:artist "Lee Burton"
:year 2012
:id "866"
:coverArt "al-866"}
{:id "851"
:name "Ry & Frank Wiedemann"
:artist "Ry & Frank Wiedemann"
:artistId "472"
:songCount 1
:duration 485
:created "2017-06-28T18:34:23.000Z"}
{:genre "Electronic"
:artistId "58"
:name "Deep Cuts"
:songCount 17
:created "2017-12-22T08:21:19.000Z"
:duration 3321
:artist "The Knife"
:year 2003
:id "96"
:coverArt "al-96"}
{:artistId "125"
:name "VIA Remixes"
:songCount 1
:created "2017-06-28T18:27:59.000Z"
:duration 362
:artist "Andi Otto"
:year 2017
:id "211"
:coverArt "al-211"}
{:artistId "626"
:name "Hummingbird / Milk & Honey"
:songCount 2
:created "2017-11-23T21:27:00.000Z"
:duration 303
:artist "Luca Nieri"
:year 2016
:id "1150"
:coverArt "al-1150"}
{:genre "Electronic"
:artistId "434"
:name "Mercy Street"
:songCount 2
:created "2017-12-22T08:18:55.000Z"
:duration 568
:artist "Fever Ray"
:year 2010
:id "762"
:coverArt "al-762"}
{:artistId "43"
:name "2012-2017"
:songCount 11
:created "2018-03-06T15:51:42.000Z"
:duration 3998
:artist "A.A.L."
:year 2018
:id "73"
:coverArt "al-73"}
{:genre "New Wave Music"
:artistId "337"
:name "Liaisons dangereuses"
:songCount 10
:created "2018-08-29T13:00:42.000Z"
:duration 2392
:artist "Liaisons Dangereuses"
:year 1985
:id "1216"}
{:genre "Electro"
:artistId "434"
:name "Fever Ray"
:songCount 12
:created "2017-12-22T08:19:04.000Z"
:duration 3380
:artist "Fever Ray"
:year 2009
:id "765"}
{:id "621"
:name "RSS Disco"
:artist "RSS Disco"
:artistId "358"
:songCount 2
:duration 841
:created "2018-04-25T10:11:14.000Z"}
{:genre "House"
:artistId "358"
:name "Very"
:songCount 3
:created "2017-06-28T20:17:12.000Z"
:duration 1339
:artist "RSS Disco"
:year 2012
:id "624"
:coverArt "al-624"}
{:genre "Disco"
:artistId "619"
:name "Sir John"
:songCount 1
:created "2018-03-12T20:21:14.000Z"
:duration 419
:artist "White Elephant"
:year 2011
:id "1134"
:coverArt "al-1134"}
{:genre "House"
:artistId "434"
:name "Sidetracked"
:songCount 1
:created "2017-06-28T18:20:10.000Z"
:duration 270
:artist "Fever Ray"
:year 2012
:id "920"
:coverArt "al-920"}
{:genre "Electronic"
:artistId "58"
:name "Hannah Med H Soundtrack"
:songCount 16
:created "2017-12-22T08:21:33.000Z"
:duration 2307
:artist "The Knife"
:year 2003
:id "97"
:coverArt "al-97"}
{:genre "Alternative Rock"
:artistId "478"
:name "6 Feet Beneath the Moon"
:songCount 14
:created "2017-09-08T17:37:16.000Z"
:duration 3136
:artist "King Krule"
:year 2013
:id "859"
:coverArt "al-859"}
{:artistId "103"
:name "Joga / Crazy Junker 7\""
:songCount 2
:created "2017-06-28T19:37:31.000Z"
:duration 442
:artist "M.Rux"
:year 2014
:id "177"
:coverArt "al-177"}
{:genre "House"
:artistId "267"
:name "Carat EP"
:songCount 5
:created "2017-06-28T20:08:58.000Z"
:duration 2080
:artist "Nu"
:year 2013
:id "467"}
{:artistId "419"
:name "On Claws (reissue)"
:songCount 1
:created "2017-07-24T13:48:20.000Z"
:duration 176
:artist "I am Oak"
:year 2013
:id "733"
:coverArt "al-733"}
{:genre "Indie Dance / Nu Disco"
:artistId "214"
:name "Thinking Allowed"
:songCount 1
:created "2018-01-02T18:54:41.000Z"
:duration 430
:artist "Tornado Wallace"
:year 2013
:id "354"
:coverArt "al-354"}
{:artistId "629"
:name "V.I.C.T.O.R"
:songCount 1
:created "2017-06-28T18:25:45.000Z"
:duration 279
:artist "Golden Bug"
:year 2016
:id "1153"
:coverArt "al-1153"}
{:genre "Avant-Garde"
:artistId "256"
:name "Ende Neu"
:songCount 9
:created "2017-06-28T19:09:43.000Z"
:duration 2693
:artist "Einstürzende Neubauten"
:year 1998
:id "426"
:coverArt "al-426"}
{:genre "House"
:artistId "245"
:name "Visibles"
:songCount 4
:created "2017-06-28T18:57:22.000Z"
:duration 1556
:artist "Constantijn Lange"
:year 2014
:id "413"
:coverArt "al-413"}
{:artistId "245"
:name "Orange Atlas"
:songCount 5
:created "2017-06-28T18:57:08.000Z"
:duration 2171
:artist "Constantijn Lange"
:year 2013
:id "412"
:coverArt "al-412"}
{:artistId "146"
:name "Mapping The Futures Gone By"
:songCount 7
:created "2017-06-28T18:57:28.000Z"
:duration 1536
:artist "CONTACT FIELD ORCHESTRA"
:year 2015
:id "247"
:coverArt "al-247"}
{:genre "electronic"
:artistId "253"
:name "It's Album Time"
:songCount 12
:created "2018-09-04T14:25:00.000Z"
:duration 3555
:artist "Todd Terje"
:year 2014
:id "1254"
:coverArt "al-1254"}]}
["getAlbumList2" {:type "recent", :size 100, :offset 40}]
{:album
[{:artistId "96"
:name "www.soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:31:41.000Z"
:duration 424
:artist "Klima"
:year 2013
:id "166"
:coverArt "al-166"}
{:artistId "463"
:name "soundcloud.com/rampue"
:songCount 1
:created "2017-06-28T19:35:30.000Z"
:duration 523
:artist "Leonard Cohen"
:year 2014
:id "831"
:coverArt "al-831"}
{:artistId "463"
:name "The Future"
:songCount 9
:created "2018-01-16T11:14:41.000Z"
:duration 3579
:artist "Leonard Cohen"
:year 2012
:id "821"
:coverArt "al-821"}
{:id "1001"
:name "Lonski & Classen"
:artist "Lonski & Classen"
:artistId "566"
:coverArt "al-1001"
:songCount 1
:duration 248
:created "2017-06-28T20:07:24.000Z"}
{:genre "Podcast"
:artistId "199"
:name "Waterkant Souvenirs Podcast"
:songCount 1
:created "2017-06-28T20:00:25.000Z"
:duration 5341
:artist "Mira"
:year 2012
:id "325"}
{:id "324"
:name "Familiar Forest Festival 2012"
:artist "Mira"
:artistId "199"
:songCount 1
:duration 6695
:created "2017-06-28T20:00:35.000Z"
:year 2012}
{:genre "Ambient"
:artistId "188"
:name "We're New Here"
:songCount 13
:created "2017-06-28T19:18:06.000Z"
:duration 2135
:artist "Gil Scott-Heron and Jamie xx"
:year 2011
:id "310"
:coverArt "al-310"}
{:genre "Gothic"
:artistId "403"
:name "Galore : The Singles 87 - 97"
:songCount 18
:created "2017-11-06T20:51:35.000Z"
:duration 4369
:artist "The Cure"
:year 1997
:id "684"
:coverArt "al-684"}
{:genre "IDM"
:artistId "333"
:name "Remixes Compiled"
:songCount 12
:created "2017-06-28T20:22:43.000Z"
:duration 3233
:artist "Telefon Tel Aviv"
:year 2007
:id "723"
:coverArt "al-723"}
{:artistId "230"
:name "Ufordian Edits"
:songCount 1
:created "2018-02-19T22:55:59.000Z"
:duration 331
:artist "Peter Power"
:year 2015
:id "393"
:coverArt "al-393"}
{:genre "Other"
:artistId "528"
:name "Dream Runner EP"
:songCount 6
:created "2017-06-28T18:39:24.000Z"
:duration 899
:artist "Annu"
:year 2009
:id "948"}
{:genre "Techno"
:artistId "75"
:name "Unknown"
:songCount 8
:created "2017-06-28T20:17:47.000Z"
:duration 2841
:artist "Saschienne"
:year 2012
:id "124"
:coverArt "al-124"}
{:genre "Nintendocore"
:artistId "306"
:name "Nach der Kippe Pogo!?"
:songCount 11
:created "2017-06-28T18:40:09.000Z"
:duration 1508
:artist "Antitainment"
:year 2007
:id "532"
:coverArt "al-532"}
{:genre "Electronic"
:artistId "206"
:name "Swim"
:songCount 9
:created "2017-11-25T20:06:58.000Z"
:duration 2596
:artist "Caribou"
:year 2010
:id "339"
:coverArt "al-339"}
{:genre "trance"
:artistId "117"
:name "Nymphs III"
:songCount 2
:created "2017-06-28T20:04:17.000Z"
:duration 1080
:artist "Nicolas Jaar"
:year 2015
:id "201"}
{:genre "Gothic"
:artistId "403"
:name "Wish"
:songCount 12
:created "2018-01-02T14:29:04.000Z"
:duration 3976
:artist "The Cure"
:year 1992
:id "685"
:coverArt "al-685"}
{:genre "Gothic"
:artistId "403"
:name "Show (Live)"
:songCount 18
:created "2018-01-02T14:30:03.000Z"
:duration 5316
:artist "The Cure"
:id "698"
:coverArt "al-698"}
{:genre "Gothic"
:artistId "403"
:name "Mixed Up"
:songCount 11
:created "2018-01-02T14:29:43.000Z"
:duration 4260
:artist "The Cure"
:year 1990
:id "692"
:coverArt "al-692"}
{:id "1257"
:name "Saal"
:artist "Serengeti"
:artistId "678"
:songCount 13
:duration 2437
:created "2018-09-20T17:02:50.000Z"
:year 2013}
{:genre "Hip Hop"
:artistId "204"
:name "Leaders Of The Brew School"
:songCount 16
:created "2017-06-28T18:45:16.000Z"
:duration 2214
:artist "Betty Ford Boys"
:year 2013
:id "331"}
{:id "202"
:name "Sirens"
:artist "Nicolas Jaar"
:artistId "117"
:songCount 7
:duration 2841
:created "2017-06-28T20:04:34.000Z"
:year 2016}
{:genre "techno"
:artistId "682"
:name "Piñata"
:songCount 21
:created "2018-10-09T15:30:48.000Z"
:duration 3963
:artist "Freddie Gibbs & Madlib"
:year 2014
:id "1261"
:coverArt "al-1261"}
{:genre "electronic"
:artistId "681"
:name "We Must Become the Pitiless Censors of Ourselves"
:songCount 11
:created "2018-10-08T17:21:47.000Z"
:duration 1916
:artist "John Maus"
:year 2011
:id "1260"
:coverArt "al-1260"}
{:artistId "514"
:name "Time"
:songCount 1
:created "2017-07-24T13:19:05.000Z"
:duration 247
:artist "Lokke"
:year 2015
:id "923"
:coverArt "al-923"}
{:genre "jazz"
:artistId "680"
:name "These Things Take Time"
:songCount 13
:created "2018-10-08T17:21:09.000Z"
:duration 3013
:artist "Molly Nilsson"
:year 2008
:id "1259"
:coverArt "al-1259"}
{:artistId "463"
:name "Songs of Love and Hate"
:songCount 4
:created "2018-01-16T11:13:54.000Z"
:duration 1273
:artist "Leonard Cohen"
:year 1970
:id "829"
:coverArt "al-829"}
{:artistId "187"
:name "Vacation EP"
:songCount 7
:created "2017-06-28T20:19:17.000Z"
:duration 1902
:artist "Shlohmo"
:year 2012
:id "305"
:coverArt "al-305"}
{:genre "Electronic"
:artistId "187"
:name "Vacation (Remixes)"
:songCount 6
:created "2017-06-28T20:19:23.000Z"
:duration 3559
:artist "Shlohmo"
:year 2012
:id "303"}
{:genre "WeDidIt"
:artistId "302"
:name "Salvation Remixes"
:songCount 3
:created "2017-06-28T20:14:04.000Z"
:duration 739
:artist "Purple"
:year 2013
:id "525"
:coverArt "al-525"}
{:genre "Alternative Rock / Indie Rock"
:artistId "16"
:name "Sleeping With Ghosts"
:songCount 22
:created "2017-11-06T20:39:23.000Z"
:duration 5232
:artist "Placebo"
:year 2003
:id "38"
:coverArt "al-38"}
{:genre "Funk/Hip-Hop"
:artistId "198"
:name "Looking For the Perfect Beat"
:songCount 13
:created "2017-06-28T18:36:47.000Z"
:duration 4521
:artist "Afrika Bambaataa"
:year 2001
:id "323"
:coverArt "al-323"}
{:artistId "103"
:name "edits & cuts"
:songCount 14
:created "2017-06-28T19:37:20.000Z"
:duration 3550
:artist "M.Rux"
:year 2014
:id "182"
:coverArt "al-182"}
{:genre "Techno"
:artistId "117"
:name "Marks / Angles"
:songCount 3
:created "2017-06-28T20:03:46.000Z"
:duration 1000
:artist "Nicolas Jaar"
:year 2010
:id "196"}
{:genre "Electronic"
:artistId "73"
:name "Don't Break My Love EP"
:songCount 2
:created "2017-06-28T20:05:16.000Z"
:duration 673
:artist "Nicolas Jaar & Theatre Roosevelt"
:year 2011
:id "122"
:coverArt "al-122"}
{:genre "Electronic"
:artistId "233"
:name "Mother Earth's Plantasia"
:songCount 10
:created "2018-05-28T21:31:55.000Z"
:duration 1837
:artist "Mort Garson"
:year 1976
:id "397"
:coverArt "al-397"}
{:genre "Psychedelic Rock"
:artistId "424"
:name
"Nuggets: Original Artyfacts From the First Psychedelic Era, 1965-1968"
:songCount 27
:created "2018-02-21T12:01:38.000Z"
:duration 4614
:artist "Various Artists"
:year 1998
:id "743"
:coverArt "al-743"}
{:genre "Psychedelic Rock"
:artistId "37"
:name "Phluph"
:songCount 10
:created "2018-03-05T16:31:46.000Z"
:duration 2182
:artist "Phluph"
:year 2001
:id "64"
:coverArt "al-64"}
{:genre "Rock"
:artistId "305"
:name "The Best of Talking Heads (Remastered)"
:songCount 18
:created "2018-01-22T11:00:50.000Z"
:duration 4618
:artist "Talking Heads"
:year 2004
:id "529"
:coverArt "al-529"}
{:genre "Electronic"
:artistId "50"
:name "Divide And Exit"
:songCount 14
:created "2018-01-21T14:47:59.000Z"
:duration 2417
:artist "Sleaford Mods"
:year 2014
:id "86"
:coverArt "al-86"}
{:genre "electronic"
:artistId "349"
:name "Fade to Grey: The Best of Visage"
:songCount 12
:created "2018-08-29T13:01:26.000Z"
:duration 2757
:artist "Visage"
:year 1993
:id "1234"}
{:genre "electronic"
:artistId "334"
:name "Hounds of Love"
:songCount 18
:created "2018-08-29T13:00:32.000Z"
:duration 4419
:artist "Kate Bush"
:year 1997
:id "1215"
:coverArt "al-1215"}
{:genre "Psychedelic"
:artistId "424"
:name
"Forge Your Own Chains: Heavy Psychedelic Ballads and Dirges 1968-1974"
:songCount 15
:created "2018-01-27T12:23:47.000Z"
:duration 4241
:artist "Various Artists"
:id "742"
:coverArt "al-742"}
{:genre "Live Archive"
:artistId "141"
:name "2017-08-28 Rough Trade NYC, Brooklyn, NY"
:songCount 4
:created "2018-01-19T23:07:20.000Z"
:duration 2483
:artist "Sunburned Hand of the Man"
:year 2017
:id "242"
:coverArt "al-242"}
{:genre "electronic"
:artistId "236"
:name "Knock Knock"
:songCount 16
:created "2018-06-09T23:04:20.000Z"
:duration 4710
:artist "DJ Koze"
:year 2018
:id "401"
:coverArt "al-401"}
{:genre "Nintendocore"
:artistId "306"
:name "Gymnasiastik mit Antitainment"
:songCount 6
:created "2017-06-28T18:39:58.000Z"
:duration 795
:artist "Antitainment"
:year 2004
:id "533"}
{:genre "Electronic"
:artistId "584"
:name "Amok"
:songCount 9
:created "2017-06-28T18:41:30.000Z"
:duration 2681
:artist "Atoms for Peace"
:year 2013
:id "1023"
:coverArt "al-1023"}
{:artistId "26"
:name "TamponTango I"
:songCount 3
:created "2017-11-23T23:18:43.000Z"
:duration 851
:artist "Diederdas"
:year 2017
:id "51"
:coverArt "al-51"}
{:genre "electronic"
:artistId "679"
:name "Heaven and Earth"
:songCount 16
:created "2018-09-20T22:07:23.000Z"
:duration 8672
:artist "Kamasi Washington"
:year 2018
:id "1258"
:coverArt "al-1258"}
{:genre "rhy"
:artistId "661"
:name "Yawn Zen"
:songCount 12
:created "2018-08-21T21:36:43.000Z"
:duration 1883
:artist "Mndsgn"
:year 2014
:id "1200"
:coverArt "al-1200"}
{:genre "Rap"
:artistId "677"
:name "Elephant Eyelash"
:songCount 12
:created "2018-09-20T17:02:08.000Z"
:duration 2478
:artist "Why?"
:year 2005
:id "1256"}
{:genre "Electronic"
:artistId "41"
:name "Immunity"
:songCount 8
:created "2017-06-28T19:28:24.000Z"
:duration 3604
:artist "Jon Hopkins"
:year 2013
:id "104"
:coverArt "al-104"}
{:genre "IDM / Trip-Hop / Experimental"
:artistId "454"
:name "New Energy"
:songCount 14
:created "2017-11-25T19:44:56.000Z"
:duration 3381
:artist "Four Tet"
:year 2017
:id "800"
:coverArt "al-800"}
{:genre "Electronic"
:artistId "633"
:name "ƒIN (Special Edition)"
:songCount 20
:created "2017-06-28T19:26:41.000Z"
:duration 5822
:artist "John Talabot"
:year 2012
:id "1159"}
{:artistId "412"
:name "A Moot Point"
:songCount 2
:created "2017-06-28T18:32:24.000Z"
:duration 857
:artist "Pional"
:year 2010
:id "719"
:coverArt "al-719"}
{:id "740"
:name "KR Family EP, Pt. 1"
:artist "Peter Power"
:artistId "230"
:coverArt "al-740"
:songCount 3
:duration 1333
:created "2017-06-28T20:31:06.000Z"}
{:genre "House"
:artistId "482"
:name "Busy Days For Fools"
:songCount 11
:created "2017-06-28T19:35:05.000Z"
:duration 3238
:artist "Lee Burton"
:year 2012
:id "866"
:coverArt "al-866"}
{:id "851"
:name "Ry & Frank Wiedemann"
:artist "Ry & Frank Wiedemann"
:artistId "472"
:songCount 1
:duration 485
:created "2017-06-28T18:34:23.000Z"}
{:genre "Electronic"
:artistId "58"
:name "Deep Cuts"
:songCount 17
:created "2017-12-22T08:21:19.000Z"
:duration 3321
:artist "The Knife"
:year 2003
:id "96"
:coverArt "al-96"}
{:artistId "125"
:name "VIA Remixes"
:songCount 1
:created "2017-06-28T18:27:59.000Z"
:duration 362
:artist "Andi Otto"
:year 2017
:id "211"
:coverArt "al-211"}
{:artistId "626"
:name "Hummingbird / Milk & Honey"
:songCount 2
:created "2017-11-23T21:27:00.000Z"
:duration 303
:artist "Luca Nieri"
:year 2016
:id "1150"
:coverArt "al-1150"}
{:genre "Electronic"
:artistId "434"
:name "Mercy Street"
:songCount 2
:created "2017-12-22T08:18:55.000Z"
:duration 568
:artist "Fever Ray"
:year 2010
:id "762"
:coverArt "al-762"}
{:artistId "43"
:name "2012-2017"
:songCount 11
:created "2018-03-06T15:51:42.000Z"
:duration 3998
:artist "A.A.L."
:year 2018
:id "73"
:coverArt "al-73"}
{:genre "New Wave Music"
:artistId "337"
:name "Liaisons dangereuses"
:songCount 10
:created "2018-08-29T13:00:42.000Z"
:duration 2392
:artist "Liaisons Dangereuses"
:year 1985
:id "1216"}
{:genre "Electro"
:artistId "434"
:name "Fever Ray"
:songCount 12
:created "2017-12-22T08:19:04.000Z"
:duration 3380
:artist "Fever Ray"
:year 2009
:id "765"}
{:id "621"
:name "RSS Disco"
:artist "RSS Disco"
:artistId "358"
:songCount 2
:duration 841
:created "2018-04-25T10:11:14.000Z"}
{:genre "House"
:artistId "358"
:name "Very"
:songCount 3
:created "2017-06-28T20:17:12.000Z"
:duration 1339
:artist "RSS Disco"
:year 2012
:id "624"
:coverArt "al-624"}
{:genre "Disco"
:artistId "619"
:name "Sir John"
:songCount 1
:created "2018-03-12T20:21:14.000Z"
:duration 419
:artist "White Elephant"
:year 2011
:id "1134"
:coverArt "al-1134"}
{:genre "House"
:artistId "434"
:name "Sidetracked"
:songCount 1
:created "2017-06-28T18:20:10.000Z"
:duration 270
:artist "Fever Ray"
:year 2012
:id "920"
:coverArt "al-920"}
{:genre "Electronic"
:artistId "58"
:name "Hannah Med H Soundtrack"
:songCount 16
:created "2017-12-22T08:21:33.000Z"
:duration 2307
:artist "The Knife"
:year 2003
:id "97"
:coverArt "al-97"}
{:genre "Alternative Rock"
:artistId "478"
:name "6 Feet Beneath the Moon"
:songCount 14
:created "2017-09-08T17:37:16.000Z"
:duration 3136
:artist "King Krule"
:year 2013
:id "859"
:coverArt "al-859"}
{:artistId "103"
:name "Joga / Crazy Junker 7\""
:songCount 2
:created "2017-06-28T19:37:31.000Z"
:duration 442
:artist "M.Rux"
:year 2014
:id "177"
:coverArt "al-177"}
{:genre "House"
:artistId "267"
:name "Carat EP"
:songCount 5
:created "2017-06-28T20:08:58.000Z"
:duration 2080
:artist "Nu"
:year 2013
:id "467"}
{:artistId "419"
:name "On Claws (reissue)"
:songCount 1
:created "2017-07-24T13:48:20.000Z"
:duration 176
:artist "I am Oak"
:year 2013
:id "733"
:coverArt "al-733"}
{:genre "Indie Dance / Nu Disco"
:artistId "214"
:name "Thinking Allowed"
:songCount 1
:created "2018-01-02T18:54:41.000Z"
:duration 430
:artist "Tornado Wallace"
:year 2013
:id "354"
:coverArt "al-354"}
{:artistId "629"
:name "V.I.C.T.O.R"
:songCount 1
:created "2017-06-28T18:25:45.000Z"
:duration 279
:artist "Golden Bug"
:year 2016
:id "1153"
:coverArt "al-1153"}
{:genre "Avant-Garde"
:artistId "256"
:name "Ende Neu"
:songCount 9
:created "2017-06-28T19:09:43.000Z"
:duration 2693
:artist "Einstürzende Neubauten"
:year 1998
:id "426"
:coverArt "al-426"}
{:genre "House"
:artistId "245"
:name "Visibles"
:songCount 4
:created "2017-06-28T18:57:22.000Z"
:duration 1556
:artist "Constantijn Lange"
:year 2014
:id "413"
:coverArt "al-413"}
{:artistId "245"
:name "Orange Atlas"
:songCount 5
:created "2017-06-28T18:57:08.000Z"
:duration 2171
:artist "Constantijn Lange"
:year 2013
:id "412"
:coverArt "al-412"}
{:artistId "146"
:name "Mapping The Futures Gone By"
:songCount 7
:created "2017-06-28T18:57:28.000Z"
:duration 1536
:artist "CONTACT FIELD ORCHESTRA"
:year 2015
:id "247"
:coverArt "al-247"}
{:genre "electronic"
:artistId "253"
:name "It's Album Time"
:songCount 12
:created "2018-09-04T14:25:00.000Z"
:duration 3555
:artist "Todd Terje"
:year 2014
:id "1254"
:coverArt "al-1254"}
{:genre "electronic"
:artistId "676"
:name "The Big Cover-Up"
:songCount 8
:created "2018-09-04T14:44:38.000Z"
:duration 3130
:artist "Todd Terje & The Olsens"
:year 2016
:id "1255"
:coverArt "al-1255"}
{:genre "electronic"
:artistId "424"
:name "I-Robots: Italo Electro Disco Underground Classics"
:songCount 13
:created "2018-08-29T13:01:11.000Z"
:duration 4797
:artist "Various Artists"
:year 2004
:id "1217"}
{:genre "Electronic"
:artistId "497"
:name "Creature Dreams"
:songCount 7
:created "2017-06-28T20:27:36.000Z"
:duration 1709
:artist "TOKiMONSTA"
:year 2011
:id "897"}
{:genre "Other"
:artistId "466"
:name "Brighton Beach (Freddie Joachim Remix)"
:songCount 1
:created "2017-06-28T18:34:34.000Z"
:duration 187
:artist "Télépopmusik"
:year 2011
:id "838"}
{:genre "Hip-Hop"
:artistId "234"
:name "Viktor Vaughn - Vaudeville Villain"
:songCount 30
:created "2017-06-28T20:45:05.000Z"
:duration 6039
:artist "MF Doom"
:year 2012
:id "1079"
:coverArt "al-1079"}
{:genre "Hip-Hop"
:artistId "234"
:name "King Geedorah - Take Me To Your Leader"
:songCount 13
:created "2017-06-28T20:44:03.000Z"
:duration 2514
:artist "MF Doom"
:year 2003
:id "1078"
:coverArt "al-1078"}
{:genre "electronic"
:artistId "667"
:name "Solid State Survivor"
:songCount 8
:created "2018-08-29T13:02:20.000Z"
:duration 1921
:artist "Yellow Magic Orchestra"
:year 1979
:id "1231"}
{:genre "electronic"
:artistId "666"
:name "Technodon"
:songCount 12
:created "2018-08-29T13:02:40.000Z"
:duration 3806
:artist "Y̶M̶O̶"
:year 1993
:id "1224"}
{:genre "Alternative Hip Hop"
:artistId "669"
:name "Unicron"
:songCount 6
:created "2018-08-29T13:45:33.000Z"
:duration 887
:artist "MF DOOM & Trunks"
:year 2008
:id "1235"
:coverArt "al-1235"}
{:genre "Alternative Hip Hop"
:artistId "650"
:name "Special Herbs, Volume 5 & 6"
:songCount 13
:created "2018-08-29T13:45:33.000Z"
:duration 2760
:artist "Metal Fingers"
:year 2004
:id "1248"
:coverArt "al-1248"}
{:genre "Alternative Hip Hop"
:artistId "650"
:name "Special Herbs, Volume 3 & 4"
:songCount 16
:created "2018-08-29T13:45:43.000Z"
:duration 3054
:artist "Metal Fingers"
:year 2003
:id "1251"
:coverArt "al-1251"}
{:genre "electronic"
:artistId "650"
:name "Special Herbs, Volume 9 & 0"
:songCount 13
:created "2018-08-29T13:45:57.000Z"
:duration 2751
:artist "Metal Fingers"
:year 2005
:id "1249"
:coverArt "al-1249"}
{:genre "electronic"
:artistId "650"
:name "Special Herbs, Volume 7 & 8"
:songCount 13
:created "2018-08-29T13:46:07.000Z"
:duration 2680
:artist "Metal Fingers"
:year 2004
:id "1250"
:coverArt "al-1250"}
{:genre "raphiphop"
:artistId "674"
:name "Key to the Kuffs"
:songCount 15
:created "2018-08-29T13:46:12.000Z"
:duration 2520
:artist "JJ DOOM"
:year 2012
:id "1245"
:coverArt "al-1245"}
{:genre "Hip Hop Music"
:artistId "647"
:name "The Prof. Meets the Supervillain"
:songCount 5
:created "2018-08-29T13:46:19.000Z"
:duration 829
:artist "MF DOOM"
:year 2003
:id "1244"
:coverArt "al-1244"}
{:genre "Hip Hop Music"
:artistId "647"
:name "Vomit"
:songCount 6
:created "2018-08-29T13:46:24.000Z"
:duration 1254
:artist "MF DOOM"
:year 2006
:id "1241"
:coverArt "al-1241"}
{:genre "Hip-Hop"
:artistId "670"
:name "Victory Laps"
:songCount 6
:created "2018-08-29T13:46:34.000Z"
:duration 1026
:artist "DOOMSTARKS"
:year 2011
:id "1237"
:coverArt "al-1237"}
{:genre "rock"
:artistId "672"
:name "(VV:2) Venomous Villain"
:songCount 12
:created "2018-08-29T13:46:36.000Z"
:duration 1976
:artist "Viktor Vaughn"
:year 2004
:id "1242"
:coverArt "al-1242"}
{:genre "Hip Hop Music"
:artistId "671"
:name "Air"
:songCount 5
:created "2018-08-29T13:46:39.000Z"
:duration 803
:artist "Dabrye"
:year 2006
:id "1238"
:coverArt "al-1238"}
{:id "984"
:name "The Wicker Man"
:artist "The Wicker Man"
:artistId "553"
:coverArt "al-984"
:songCount 1
:duration 243
:created "2017-06-28T20:06:58.000Z"}]}}) |
3ecf812fda61c23c0d69993850c07fe858e429b2df3a2af61f7cd66779641c03 | drathier/elm-offline | Canonicalize.hs | # OPTIONS_GHC -Wall #
{-# LANGUAGE OverloadedStrings #-}
module Reporting.Error.Canonicalize
( Error(..)
, BadArityContext(..)
, InvalidPayload(..)
, PortProblem(..)
, DuplicatePatternContext(..)
, PossibleNames(..)
, VarKind(..)
, toReport
)
where
import qualified Data.Char as Char
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified AST.Canonical as Can
import qualified AST.Source as Src
import qualified AST.Module.Name as ModuleName
import qualified Data.Index as Index
import qualified Elm.Name as N
import qualified Reporting.Annotation as A
import qualified Reporting.Doc as D
import Reporting.Doc (Doc, (<+>), (<>))
import qualified Reporting.Region as R
import qualified Reporting.Render.Code as Code
import qualified Reporting.Render.Type as RT
import qualified Reporting.Report as Report
import qualified Reporting.Suggest as Suggest
CANONICALIZATION ERRORS
data Error
= AnnotationTooShort R.Region N.Name Index.ZeroBased Int
| AmbiguousVar R.Region (Maybe N.Name) N.Name [ModuleName.Canonical]
| AmbiguousType R.Region (Maybe N.Name) N.Name [ModuleName.Canonical]
| AmbiguousCtor R.Region (Maybe N.Name) N.Name [ModuleName.Canonical]
| AmbiguousBinop R.Region N.Name [ModuleName.Canonical]
| BadArity R.Region BadArityContext N.Name Int Int
| Binop R.Region N.Name N.Name
| DuplicateDecl N.Name R.Region R.Region
| DuplicateType N.Name R.Region R.Region
| DuplicateCtor N.Name R.Region R.Region
| DuplicateBinop N.Name R.Region R.Region
| DuplicateField N.Name R.Region R.Region
| DuplicateAliasArg N.Name N.Name R.Region R.Region
| DuplicateUnionArg N.Name N.Name R.Region R.Region
| DuplicatePattern DuplicatePatternContext N.Name R.Region R.Region
| EffectNotFound R.Region N.Name
| EffectFunctionNotFound R.Region N.Name
| ExportDuplicate N.Name R.Region R.Region
| ExportNotFound R.Region VarKind N.Name [N.Name]
| ExportOpenAlias R.Region N.Name
| ImportCtorByName R.Region N.Name N.Name
| ImportNotFound R.Region N.Name [ModuleName.Canonical]
| ImportOpenAlias R.Region N.Name
| ImportExposingNotFound R.Region ModuleName.Canonical N.Name [N.Name]
| NotFoundVar R.Region (Maybe N.Name) N.Name PossibleNames
| NotFoundType R.Region (Maybe N.Name) N.Name PossibleNames
| NotFoundCtor R.Region (Maybe N.Name) N.Name PossibleNames
| NotFoundBinop R.Region N.Name (Set.Set N.Name)
| PatternHasRecordCtor R.Region N.Name
| PortPayloadInvalid R.Region N.Name Can.Type InvalidPayload
| PortTypeInvalid R.Region N.Name PortProblem
| RecursiveAlias R.Region N.Name [N.Name] Src.Type [N.Name]
| RecursiveDecl [Can.Def]
| RecursiveLet (A.Located N.Name) [N.Name]
| Shadowing N.Name R.Region R.Region
| TupleLargerThanThree R.Region
| TypeVarsUnboundInUnion R.Region N.Name [N.Name] (N.Name, R.Region) [(N.Name, R.Region)]
| TypeVarsMessedUpInAlias R.Region N.Name [N.Name] [(N.Name, R.Region)] [(N.Name, R.Region)]
data BadArityContext
= TypeArity
| PatternArity
data DuplicatePatternContext
= DPLambdaArgs
| DPFuncArgs N.Name
| DPCaseBranch
| DPLetBinding
| DPDestruct
data InvalidPayload
= ExtendedRecord
| Function
| TypeVariable N.Name
| UnsupportedType N.Name
data PortProblem
= CmdNoArg
| CmdExtraArgs Int
| CmdBadMsg
| SubBad
| NotCmdOrSub
data PossibleNames =
PossibleNames
{ _locals :: Set.Set N.Name
, _quals :: Map.Map N.Name (Set.Set N.Name)
}
-- KIND
data VarKind
= BadOp
| BadVar
| BadPattern
| BadType
toKindInfo :: VarKind -> N.Name -> ( Doc, Doc, Doc )
toKindInfo kind name =
case kind of
BadOp ->
( "an", "operator", "(" <> D.fromName name <> ")" )
BadVar ->
( "a", "value", "`" <> D.fromName name <> "`" )
BadPattern ->
( "a", "pattern", "`" <> D.fromName name <> "`" )
BadType ->
( "a", "type", "`" <> D.fromName name <> "`" )
-- TO REPORT
toReport :: Code.Source -> Error -> Report.Report
toReport source err =
case err of
AnnotationTooShort region name index leftovers ->
let
numTypeArgs = Index.toMachine index
numDefArgs = numTypeArgs + leftovers
in
Report.Report "BAD TYPE ANNOTATION" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The type annotation for `" <> N.toString name <> "` says it can accept "
<> D.args numTypeArgs <> ", but the definition says it has "
<> D.args numDefArgs <> ":"
,
D.reflow $
"Is the type annotation missing something? Should some argument"
<> (if leftovers == 1 then "" else "s")
<> " be deleted? Maybe some parentheses are missing?"
)
AmbiguousVar region maybePrefix name possibleHomes ->
ambiguousName source region maybePrefix name possibleHomes "variable"
AmbiguousType region maybePrefix name possibleHomes ->
ambiguousName source region maybePrefix name possibleHomes "type"
AmbiguousCtor region maybePrefix name possibleHomes ->
ambiguousName source region maybePrefix name possibleHomes "constructor"
AmbiguousBinop region name possibleHomes ->
ambiguousName source region Nothing name possibleHomes "operator"
BadArity region badArityContext name expected actual ->
let
thing =
case badArityContext of
TypeArity -> "type"
PatternArity -> "constructor"
in
if actual < expected then
Report.Report "TOO FEW ARGS" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString name <> "` " <> thing <> " needs "
<> D.args expected <> ", but I see " <> show actual <> " instead:"
,
D.reflow $
"What is missing? Are some parentheses misplaced?"
)
else
Report.Report "TOO MANY ARGS" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString name <> "` " <> thing <> " needs "
<> D.args expected <> ", but I see " <> show actual <> " instead:"
,
if actual - expected == 1 then
"Which is the extra one? Maybe some parentheses are missing?"
else
"Which are the extra ones? Maybe some parentheses are missing?"
)
Binop region op1 op2 ->
Report.Report "INFIX PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You cannot mix (" <> N.toString op1 <> ") and (" <> N.toString op2 <> ") without parentheses."
,
D.reflow
"I do not know how to group these expressions. Add parentheses for me!"
)
DuplicateDecl name r1 r2 ->
nameClash source r1 r2 $
"This file has multiple `" <> N.toString name <> "` declarations."
DuplicateType name r1 r2 ->
nameClash source r1 r2 $
"This file defines multiple `" <> N.toString name <> "` types."
DuplicateCtor name r1 r2 ->
nameClash source r1 r2 $
"This file defines multiple `" <> N.toString name <> "` type constructors."
DuplicateBinop name r1 r2 ->
nameClash source r1 r2 $
"This file defines multiple (" <> N.toString name <> ") operators."
DuplicateField name r1 r2 ->
nameClash source r1 r2 $
"This record has multiple `" <> N.toString name <> "` fields."
DuplicateAliasArg typeName name r1 r2 ->
nameClash source r1 r2 $
"The `" <> N.toString typeName <> "` type alias has multilpe `" <> N.toString name <> "` type variables."
DuplicateUnionArg typeName name r1 r2 ->
nameClash source r1 r2 $
"The `" <> N.toString typeName <> "` type has multilpe `" <> N.toString name <> "` type variables."
DuplicatePattern context name r1 r2 ->
nameClash source r1 r2 $
case context of
DPLambdaArgs ->
"This anonymous function has multiple `" <> N.toString name <> "` arguments."
DPFuncArgs funcName ->
"The `" <> N.toString funcName <> "` function has multiple `" <> N.toString name <> "` arguments."
DPCaseBranch ->
"This `case` pattern has multiple `" <> N.toString name <> "` variables."
DPLetBinding ->
"This `let` expression defines `" <> N.toString name <> "` more than once!"
DPDestruct ->
"This pattern contains multiple `" <> N.toString name <> "` variables."
EffectNotFound region name ->
Report.Report "EFFECT PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You have declared that `" ++ N.toString name ++ "` is an effect type:"
,
D.reflow $
"But I cannot find a union type named `" ++ N.toString name ++ "` in this file!"
)
EffectFunctionNotFound region name ->
Report.Report "EFFECT PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"This kind of effect module must define a `" ++ N.toString name ++ "` function."
,
D.reflow $
"But I cannot find `" ++ N.toString name ++ "` in this file!"
)
ExportDuplicate name r1 r2 ->
let
messageThatEndsWithPunctuation =
"You are trying to expose `" <> N.toString name <> "` multiple times!"
in
Report.Report "REDUNDANT EXPORT" r2 [] $
Report.toCodePair source r1 r2
(
D.reflow messageThatEndsWithPunctuation
,
"Remove one of them and you should be all set!"
)
(
D.reflow (messageThatEndsWithPunctuation <> " Once here:")
,
"And again right here:"
,
"Remove one of them and you should be all set!"
)
ExportNotFound region kind rawName possibleNames ->
let
suggestions =
map N.toString $ take 4 $
Suggest.sort (N.toString rawName) N.toString possibleNames
in
Report.Report "UNKNOWN EXPORT" region suggestions $
let (a, thing, name) = toKindInfo kind rawName in
D.stack
[ D.fillSep
["You","are","trying","to","expose",a,thing,"named"
,name,"but","I","cannot","find","its","definition."
]
, case map D.fromString suggestions of
[] ->
D.reflow $
"I do not see any super similar names in this file. Is the definition missing?"
[alt] ->
D.fillSep ["Maybe","you","want",D.dullyellow alt,"instead?"]
alts ->
D.stack
[ "These names seem close though:"
, D.indent 4 $ D.vcat $ map D.dullyellow alts
]
]
ExportOpenAlias region name ->
Report.Report "BAD EXPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The (..) syntax is for exposing union type constructors. It cannot be used with a type alias like `"
++ N.toString name ++ "` though."
,
D.reflow $
"Remove the (..) and you should be fine!"
)
ImportCtorByName region ctor tipe ->
Report.Report "BAD IMPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You are trying to import the `" <> N.toString ctor
<> "` type constructor by name:"
,
D.fillSep
["Try","importing",D.green (D.fromName tipe <> "(..)"),"instead."
,"The","dots","mean","“expose","the",D.fromName tipe,"type","and"
,"all","its","constructors”","so","it","gives","you","access","to"
, D.fromName ctor <> "."
]
)
ImportNotFound region name _ ->
--
-- NOTE: this should always be detected by `builder`
-- So this error should never actually get printed out.
--
Report.Report "UNKNOWN IMPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I could not find a `" <> N.toString name <> "` module to import!"
,
mempty
)
ImportOpenAlias region name ->
Report.Report "BAD IMPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString name <> "` type alias cannot be followed by (..) like this:"
,
D.stack
[ "Remove the (..) and it should work."
, D.link "Hint"
"The distinction between `type` and `type alias` is important here. Read"
"types-vs-type-aliases"
"to learn more."
]
)
ImportExposingNotFound region (ModuleName.Canonical _ home) value possibleNames ->
let
suggestions =
map N.toString $ take 4 $
Suggest.sort (N.toString home) N.toString possibleNames
in
Report.Report "BAD IMPORT" region suggestions $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString home
<> "` module does not expose `"
<> N.toString value <> "`:"
,
case map D.fromString suggestions of
[] ->
"I cannot find any super similar exposed names. Maybe it is private?"
[alt] ->
D.fillSep ["Maybe","you","want",D.dullyellow alt,"instead?"]
alts ->
D.stack
[ "These names seem close though:"
, D.indent 4 $ D.vcat $ map D.dullyellow alts
]
)
NotFoundVar region prefix name possibleNames ->
notFound source region prefix name "variable" possibleNames
NotFoundType region prefix name possibleNames ->
notFound source region prefix name "type" possibleNames
NotFoundCtor region prefix name possibleNames ->
notFound source region prefix name "constructor" possibleNames
NotFoundBinop region op locals ->
if op == "===" then
Report.Report "UNKNOWN OPERATOR" region ["=="] $
Report.toCodeSnippet source region Nothing
(
"Elm does not have a (===) operator like JavaScript."
,
"Switch to (==) instead."
)
else if op == "!=" || op == "!==" then
Report.Report "UNKNOWN OPERATOR" region ["/="] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"Elm uses a different name for the “not equal” operator:"
,
D.stack
[ D.reflow "Switch to (/=) instead."
, D.toSimpleNote $
"Our (/=) operator is supposed to look like a real “not equal” sign (≠). I hope that history will remember ("
++ N.toString op ++ ") as a weird and temporary choice."
]
)
else if op == "**" then
Report.Report "UNKNOWN OPERATOR" region ["^","*"] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I do not recognize the (**) operator:"
,
D.reflow $
"Switch to (^) for exponentiation. Or switch to (*) for multiplication."
)
else if op == "%" then
Report.Report "UNKNOWN OPERATOR" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"Elm does not use (%) as the remainder operator:"
,
D.stack
[ D.reflow $
"If you want the behavior of (%) like in JavaScript, switch to:\
\ <-lang.org/packages/elm/core/latest/Basics#remainderBy>"
, D.reflow $
"If you want modular arithmetic like in math, switch to:\
\ <-lang.org/packages/elm/core/latest/Basics#modBy>"
, D.reflow $
"The difference is how things work when negative numbers are involved."
]
)
else
let
suggestions =
map N.toString $ take 2 $
Suggest.sort (N.toString op) N.toString (Set.toList locals)
format altOp =
D.green $ "(" <> altOp <> ")"
in
Report.Report "UNKNOWN OPERATOR" region suggestions $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I do not recognize the (" ++ N.toString op ++ ") operator."
,
D.fillSep $
["Is","there","an","`import`","and","`exposing`","entry","for","it?"]
++
case map D.fromString suggestions of
[] ->
[]
alts ->
["Maybe","you","want"] ++ D.commaSep "or" format alts ++ ["instead?"]
)
PatternHasRecordCtor region name ->
Report.Report "BAD PATTERN" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You can construct records by using `" <> N.toString name
<> "` as a function, but it is not available in pattern matching like this:"
,
D.reflow $
"I recommend matching matching the record as a variable and unpacking it later."
)
PortPayloadInvalid region portName _badType invalidPayload ->
let
formatDetails (aBadKindOfThing, elaboration) =
Report.Report "PORT ERROR" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString portName <> "` port is trying to transmit " <> aBadKindOfThing <> ":"
,
D.stack
[ elaboration
, D.link "Hint"
"Ports are not a traditional FFI, so if you have tons of annoying ports, definitely read"
"ports"
"to learn how they are meant to work. They require a different mindset!"
]
)
in
formatDetails $
case invalidPayload of
ExtendedRecord ->
(
"an extended record"
,
D.reflow $
"But the exact shape of the record must be known at compile time. No type variables!"
)
Function ->
(
"a function"
,
D.reflow $
"But functions cannot be sent in and out ports. If we allowed functions in from JS\
\ they may perform some side-effects. If we let functions out, they could produce\
\ incorrect results because Elm optimizations assume there are no side-effects."
)
TypeVariable name ->
(
"an unspecified type"
,
D.reflow $
"But type variables like `" <> N.toString name <> "` cannot flow through ports.\
\ I need to know exactly what type of data I am getting, so I can guarantee that\
\ unexpected data cannot sneak in and crash the Elm program."
)
UnsupportedType name ->
(
"a `" <> N.toString name <> "` value"
,
D.stack
[ D.reflow $ "I cannot handle that. The types that CAN flow in and out of Elm include:"
, D.indent 4 $
D.reflow $
"Ints, Floats, Bools, Strings, Maybes, Lists, Arrays,\
\ tuples, records, and JSON values."
, D.reflow $
"Since JSON values can flow through, you can use JSON encoders and decoders\
\ to allow other types through as well. More advanced users often just do\
\ everything with encoders and decoders for more control and better errors."
]
)
PortTypeInvalid region name portProblem ->
let
formatDetails (before, after) =
Report.Report "BAD PORT" region [] $
Report.toCodeSnippet source region Nothing $
(
D.reflow before
,
D.stack
[ after
, D.link "Hint" "Read" "ports"
"for more advice. For example, do not end up with one port per JS function!"
]
)
in
formatDetails $
case portProblem of
CmdNoArg ->
(
"The `" <> N.toString name <> "` port cannot be just a command."
,
D.reflow $
"It can be (() -> Cmd msg) if you just need to trigger a JavaScript\
\ function, but there is often a better way to set things up."
)
CmdExtraArgs n ->
(
"The `" <> N.toString name <> "` port can only send ONE value out to JavaScript."
,
let
theseItemsInSomething
| n == 2 = "both of these items into a tuple or record"
| n == 3 = "these " ++ show n ++ " items into a tuple or record"
| True = "these " ++ show n ++ " items into a record"
in
D.reflow $
"You can put " ++ theseItemsInSomething ++ " to send them out though."
)
CmdBadMsg ->
(
"The `" <> N.toString name <> "` port cannot send any messages to the `update` function."
,
D.reflow $
"It must produce a (Cmd msg) type. Notice the lower case `msg` type\
\ variable. The command will trigger some JS code, but it will not send\
\ anything particular back to Elm."
)
SubBad ->
( "There is something off about this `" <> N.toString name <> "` port declaration."
,
D.stack
[ D.reflow $
"To receive messages from JavaScript, you need to define a port like this:"
, D.indent 4 $ D.dullyellow $ D.fromString $
"port " <> N.toString name <> " : (Int -> msg) -> Sub msg"
, D.reflow $
"Now every time JS sends an `Int` to this port, it is converted to a `msg`.\
\ And if you subscribe, those `msg` values will be piped into your `update`\
\ function. The only thing you can customize here is the `Int` type."
]
)
NotCmdOrSub ->
(
"I am confused about the `" <> N.toString name <> "` port declaration."
,
D.reflow $
"Ports need to produce a command (Cmd) or a subscription (Sub) but\
\ this is neither. I do not know how to handle this."
)
RecursiveAlias region name args tipe others ->
aliasRecursionReport source region name args tipe others
RecursiveDecl cyclicValueDefs ->
let
toName def =
case def of
Can.Def name _ _ -> name
Can.TypedDef name _ _ _ _ -> name
makeTheory question details =
D.fillSep $ map (D.dullyellow . D.fromString) (words question) ++ map D.fromString (words details)
in
case map toName cyclicValueDefs of
[] ->
error
"There is some compiler bug in reporting cyclic definitions.\n\
\Please get a </> and share the details at\n\
\<>"
A.At region name : otherNames ->
Report.Report "CYCLIC DEFINITION" region [] $
Report.toCodeSnippet source region Nothing $
case map A.toValue otherNames of
[] ->
(
D.reflow $
"The `" <> N.toString name <> "` value is defined directly in terms of itself, causing an infinite loop."
,
D.stack
[ makeTheory "Are you are trying to mutate a variable?" $
"Elm does not have mutation, so when I see " ++ N.toString name
++ " defined in terms of " ++ N.toString name
++ ", I treat it as a recursive definition. Try giving the new value a new name!"
, makeTheory "Maybe you DO want a recursive value?" $
"To define " ++ N.toString name ++ " we need to know what " ++ N.toString name
++ " is, so let’s expand it. Wait, but now we need to know what " ++ N.toString name
++ " is, so let’s expand it... This will keep going infinitely!"
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do need a recursive value."
]
)
names ->
(
D.reflow $
"The `" <> N.toString name <> "` definition is causing a very tricky infinite loop."
,
D.stack
[ D.reflow $
"The `" <> N.toString name
<> "` value depends on itself through the following chain of definitions:"
, D.cycle 4 (name:names)
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do want mutually recursive values."
]
)
RecursiveLet (A.At region name) names ->
Report.Report "CYCLIC VALUE" region [] $
Report.toCodeSnippet source region Nothing $
case names of
[] ->
let
makeTheory question details =
D.fillSep $ map (D.dullyellow . D.fromString) (words question) ++ map D.fromString (words details)
in
(
D.reflow $
"The `" <> N.toString name <> "` value is defined directly in terms of itself, causing an infinite loop."
,
D.stack
[ makeTheory "Are you are trying to mutate a variable?" $
"Elm does not have mutation, so when I see " ++ N.toString name
++ " defined in terms of " ++ N.toString name
++ ", I treat it as a recursive definition. Try giving the new value a new name!"
, makeTheory "Maybe you DO want a recursive value?" $
"To define " ++ N.toString name ++ " we need to know what " ++ N.toString name
++ " is, so let’s expand it. Wait, but now we need to know what " ++ N.toString name
++ " is, so let’s expand it... This will keep going infinitely!"
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do need a recursive value."
]
)
_ ->
(
D.reflow $
"I do not allow cyclic values in `let` expressions."
,
D.stack
[ D.reflow $
"The `" <> N.toString name
<> "` value depends on itself through the following chain of definitions:"
, D.cycle 4 (name:names)
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do want mutually recursive values."
]
)
Shadowing name r1 r2 ->
Report.Report "SHADOWING" r2 [] $
Report.toCodePair source r1 r2
( "These variables cannot have the same name:"
, advice
)
( D.reflow $ "The name `" <> N.toString name <> "` is first defined here:"
, "But then it is defined AGAIN over here:"
, advice
)
where
advice =
D.stack
[ D.reflow $
"Think of a more helpful name for one of them and you should be all set!"
, D.link "Note"
"Linters advise against shadowing, so Elm makes “best practices” the default. Read"
"shadowing"
"for more details on this choice."
]
TupleLargerThanThree region ->
Report.Report "BAD TUPLE" region [] $
Report.toCodeSnippet source region Nothing
(
"I only accept tuples with two or three items. This has too many:"
,
D.stack
[ D.reflow $
"I recommend switching to records. Each item will be named, and you can use\
\ the `point.x` syntax to access them."
, D.link "Note" "Read" "tuples"
"for more comprehensive advice on working with large chunks of data in Elm."
]
)
TypeVarsUnboundInUnion unionRegion typeName allVars unbound unbounds ->
unboundTypeVars source unionRegion ["type"] typeName allVars unbound unbounds
TypeVarsMessedUpInAlias aliasRegion typeName allVars unusedVars unboundVars ->
case (unusedVars, unboundVars) of
(unused:unuseds, []) ->
let
backQuote name =
"`" <> D.fromName name <> "`"
allUnusedNames =
map fst unusedVars
(title, subRegion, overview, stuff) =
case unuseds of
[] ->
("UNUSED TYPE VARIABLE"
, Just (snd unused)
, ["Type","alias",backQuote typeName,"does","not","use","the"
,backQuote (fst unused),"type","variable."
]
, [D.dullyellow (backQuote (fst unused))]
)
_:_ ->
( "UNUSED TYPE VARIABLES"
, Nothing
, ["Type","variables"]
++ D.commaSep "and" id (map D.fromName allUnusedNames)
++ ["are","unused","in","the",backQuote typeName,"definition."]
, D.commaSep "and" D.dullyellow (map D.fromName allUnusedNames)
)
in
Report.Report title aliasRegion [] $
Report.toCodeSnippet source aliasRegion subRegion
(
D.fillSep overview
,
D.stack
[ D.fillSep $
["I","recommend","removing"] ++ stuff ++ ["from","the","declaration,","like","this:"]
, D.indent 4 $ D.hsep $
["type","alias",D.green (D.fromName typeName)]
++ map D.fromName (filter (`notElem` allUnusedNames) allVars)
++ ["=", "..."]
, D.reflow $
"Why? Well, if I allowed `type alias Height a = Float` I would need to answer\
\ some weird questions. Is `Height Bool` the same as `Float`? Is `Height Bool`\
\ the same as `Height Int`? My solution is to not need to ask them!"
]
)
([], unbound:unbounds) ->
unboundTypeVars source aliasRegion ["type","alias"] typeName allVars unbound unbounds
(_, _) ->
let
unused = map fst unusedVars
unbound = map fst unboundVars
theseAreUsed =
case unbound of
[x] ->
["Type","variable",D.dullyellow ("`" <> D.fromName x <> "`"),"appears"
,"in","the","definition,","but","I","do","not","see","it","declared."
]
_ ->
["Type","variables"]
++ D.commaSep "and" D.dullyellow (map D.fromName unbound)
++ ["are","used","in","the","definition,","but","I","do","not","see","them","declared."]
butTheseAreUnused =
case unused of
[x] ->
["Likewise,","type","variable"
,D.dullyellow ("`" <> D.fromName x <> "`")
,"is","delared,","but","not","used."
]
_ ->
["Likewise,","type","variables"]
++ D.commaSep "and" D.dullyellow (map D.fromName unused)
++ ["are","delared,","but","not","used."]
in
Report.Report "TYPE VARIABLE PROBLEMS" aliasRegion [] $
Report.toCodeSnippet source aliasRegion Nothing
(
D.reflow $
"Type alias `" <> N.toString typeName <> "` has some type variable problems."
,
D.stack
[ D.fillSep $ theseAreUsed ++ butTheseAreUnused
, D.reflow $
"My guess is that a definition like this will work better:"
, D.indent 4 $ D.hsep $
["type", "alias", D.fromName typeName]
++ map D.fromName (filter (`notElem` unused) allVars)
++ map (D.green . D.fromName) unbound
++ ["=", "..."]
]
)
-- BAD TYPE VARIABLES
unboundTypeVars :: Code.Source -> R.Region -> [D.Doc] -> N.Name -> [N.Name] -> (N.Name, R.Region) -> [(N.Name, R.Region)] -> Report.Report
unboundTypeVars source declRegion tipe typeName allVars (unboundVar, varRegion) unboundVars =
let
backQuote name =
"`" <> D.fromName name <> "`"
(title, subRegion, overview) =
case map fst unboundVars of
[] ->
( "UNBOUND TYPE VARIABLE"
, Just varRegion
, ["The",backQuote typeName]
++ tipe
++ ["uses","an","unbound","type","variable",D.dullyellow (backQuote unboundVar),"in","its","definition:"]
)
vars ->
( "UNBOUND TYPE VARIABLES"
, Nothing
, ["Type","variables"]
++ D.commaSep "and" D.dullyellow (D.fromName unboundVar : map D.fromName vars)
++ ["are","unbound","in","the",backQuote typeName] ++ tipe ++ ["definition:"]
)
in
Report.Report title declRegion [] $
Report.toCodeSnippet source declRegion subRegion
(
D.fillSep overview
,
D.stack
[ D.reflow $
"You probably need to change the declaration to something like this:"
, D.indent 4 $ D.hsep $
tipe
++ [D.fromName typeName]
++ map D.fromName allVars
++ map (D.green . D.fromName) (unboundVar : map fst unboundVars)
++ ["=", "..."]
, D.reflow $
"Why? Well, imagine one `" ++ N.toString typeName ++ "` where `" ++ N.toString unboundVar ++
"` is an Int and another where it is a Bool. When we explicitly list the type\
\ variables, the type checker can see that they are actually different types."
]
)
NAME CLASH
nameClash :: Code.Source -> R.Region -> R.Region -> String -> Report.Report
nameClash source r1 r2 messageThatEndsWithPunctuation =
Report.Report "NAME CLASH" r2 [] $
Report.toCodePair source r1 r2
(
D.reflow messageThatEndsWithPunctuation
,
"How can I know which one you want? Rename one of them!"
)
(
D.reflow (messageThatEndsWithPunctuation <> " One here:")
,
"And another one here:"
,
"How can I know which one you want? Rename one of them!"
)
AMBIGUOUS NAME
ambiguousName :: Code.Source -> R.Region -> Maybe N.Name -> N.Name -> [ModuleName.Canonical] -> String -> Report.Report
ambiguousName source region maybePrefix name possibleHomes thing =
Report.Report "AMBIGUOUS NAME" region [] $
Report.toCodeSnippet source region Nothing $
case maybePrefix of
Nothing ->
let
homeToYellowDoc (ModuleName.Canonical _ home) =
D.dullyellow (D.fromName home)
bothOrAll =
if length possibleHomes == 2 then "both" else "all"
in
(
D.reflow $ "This usage of `" ++ N.toString name ++ "` is ambiguous."
,
D.stack
[ D.reflow $
"Check your imports. The following modules " ++ bothOrAll
++ " expose a `" ++ N.toString name ++ "` " ++ thing ++ ":"
, D.indent 4 $ D.vcat $ map homeToYellowDoc possibleHomes
, D.reflowLink "Read" "imports" "to learn how to clarify which one you want."
]
)
Just prefix ->
let
homeToYellowDoc (ModuleName.Canonical _ home) =
if prefix == home then
D.blue "import" <+> D.dullyellow (D.fromName home)
else
D.blue "import" <+> D.dullyellow (D.fromName home) <+> D.blue "as" <+> D.dullyellow (D.fromName prefix)
eitherOrAny =
if length possibleHomes == 2 then "either" else "any"
in
(
D.reflow $ "This usage of `" ++ toQualString prefix name ++ "` is ambiguous."
,
D.stack
[ D.reflow $
"It could refer to a " ++ thing ++ " from "
++ eitherOrAny ++ " of these imports:"
, D.indent 4 $ D.vcat $ map homeToYellowDoc possibleHomes
, D.reflowLink "Read" "imports" "to learn how to clarify which one you want."
]
)
-- NOT FOUND
notFound :: Code.Source -> R.Region -> Maybe N.Name -> N.Name -> String -> PossibleNames -> Report.Report
notFound source region maybePrefix name thing (PossibleNames locals quals) =
let
givenName =
maybe N.toString toQualString maybePrefix name
possibleNames =
let
addQuals prefix localSet allNames =
Set.foldr (\x xs -> toQualString prefix x : xs) allNames localSet
in
Map.foldrWithKey addQuals (map N.toString (Set.toList locals)) quals
nearbyNames =
take 4 (Suggest.sort givenName id possibleNames)
toDetails noSuggestionDetails yesSuggestionDetails =
case nearbyNames of
[] ->
D.stack
[ D.reflow noSuggestionDetails
, D.link "Hint" "Read" "imports" "to see how `import` declarations work in Elm."
]
suggestions ->
D.stack
[ D.reflow yesSuggestionDetails
, D.indent 4 $ D.vcat $ map D.dullyellow $ map D.fromString suggestions
, D.link "Hint" "Read" "imports" "to see how `import` declarations work in Elm."
]
in
Report.Report "NAMING ERROR" region nearbyNames $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I cannot find a `" ++ givenName ++ "` " ++ thing ++ ":"
,
case maybePrefix of
Nothing ->
toDetails
"Is there an `import` or `exposing` missing up top?"
"These names seem close though:"
Just prefix ->
case Map.lookup prefix quals of
Nothing ->
toDetails
("I cannot find a `" ++ N.toString prefix ++ "` module. Is there an `import` for it?")
("I cannot find a `" ++ N.toString prefix ++ "` import. These names seem close though:")
Just _ ->
toDetails
("The `" ++ N.toString prefix ++ "` module does not expose a `" ++ N.toString name ++ "` " ++ thing ++ ".")
("The `" ++ N.toString prefix ++ "` module does not expose a `" ++ N.toString name ++ "` " ++ thing ++ ". These names seem close though:")
)
toQualString :: N.Name -> N.Name -> String
toQualString prefix name =
N.toString prefix ++ "." ++ N.toString name
- VAR ERROR
varErrorToReport : : VarError - > Report . Report
varErrorToReport ( VarError kind name problem suggestions ) =
let
learnMore orMaybe =
D.reflow $
orMaybe < > " ` import ` works different than you expect ? Learn all about it here : "
< > D.hintLink " imports "
namingError overview maybeStarter specializedSuggestions =
Report.reportDoc " NAMING ERROR " Nothing overview $
case D.maybeYouWant ' maybeStarter specializedSuggestions of
Nothing - >
learnMore " Maybe "
Just doc - >
D.stack [ doc , learnMore " Or maybe " ]
specialHint =
Report.reportDoc " NAMING ERROR " Nothing ( cannotFind kind name ) ( D.hsep specialHint )
in
case problem of
Ambiguous - >
namingError ( ambiguous kind name ) Nothing suggestions
UnknownQualifier qualifier localName - >
namingError
( cannotFind kind name )
( Just $ text $ " No module called ` " < > qualifier < > " ` has been imported . " )
( map ( \modul - > modul < > " . " < > localName ) suggestions )
QualifiedUnknown qualifier localName - >
namingError
( cannotFind kind name )
( Just $ text $ " ` " < > qualifier < > " ` does not expose ` " < > localName < > " ` . " )
( map ( \v - > qualifier < > " . " < > v ) suggestions )
ExposedUnknown - >
case name of
" ! = " - > ( name )
" ! = = " - > ( name )
" = = = " - > specialNamingError equalsHint
" % " - > specialNamingError modHint
_ - > namingError ( cannotFind kind name ) Nothing suggestions
cannotFind : : VarKind - > Text - > [ Doc ]
cannotFind kind rawName =
let ( a , thing , name ) = toKindInfo kind rawName in
[ " Can not " , " find " , a , thing , " named " , D.dullyellow name < > " : " ]
ambiguous : : VarKind - > Text - > [ Doc ]
ambiguous kind rawName =
let ( _ a , thing , name ) = toKindInfo kind rawName in
[ " This " , " usage " , " of " , " the " , D.dullyellow name , thing , " is " , " ambiguous . " ]
notEqualsHint : : Text - > [ Doc ]
notEqualsHint op =
[ " Looking " , " for " , " the " , " “ not " , " equal ” " , " operator ? " , " The " , " traditional "
, D.dullyellow $ text $ " ( " < > op < > " ) "
, " is " , " replaced " , " by " , D.green " ( /= ) " , " in " , " Elm . " , " It " , " is " , " meant "
, " to " , " look " , " like " , " the " , " “ not " , " equal ” " , " sign " , " from " , " math ! " , " ( ≠ ) "
]
equalsHint : : [ Doc ]
equalsHint =
[ " A " , " special " , D.dullyellow " (= = =) " , " operator " , " is " , " not " , " needed "
, " in " , " Elm . " , " We " , " use " , D.green " (= =) " , " for " , " everything ! "
]
modHint : : [ Doc ]
modHint =
[ " Rather " , " than " , " a " , ( % ) " , " operator , "
, " Elm " , " has " , " a " , D.green " modBy " , " function . "
, " Learn " , " more " , " here : "
, " < -lang.org/packages/elm/core/latest/Basics#modBy > "
]
varErrorToReport :: VarError -> Report.Report
varErrorToReport (VarError kind name problem suggestions) =
let
learnMore orMaybe =
D.reflow $
orMaybe <> " `import` works different than you expect? Learn all about it here: "
<> D.hintLink "imports"
namingError overview maybeStarter specializedSuggestions =
Report.reportDoc "NAMING ERROR" Nothing overview $
case D.maybeYouWant' maybeStarter specializedSuggestions of
Nothing ->
learnMore "Maybe"
Just doc ->
D.stack [ doc, learnMore "Or maybe" ]
specialNamingError specialHint =
Report.reportDoc "NAMING ERROR" Nothing (cannotFind kind name) (D.hsep specialHint)
in
case problem of
Ambiguous ->
namingError (ambiguous kind name) Nothing suggestions
UnknownQualifier qualifier localName ->
namingError
(cannotFind kind name)
(Just $ text $ "No module called `" <> qualifier <> "` has been imported.")
(map (\modul -> modul <> "." <> localName) suggestions)
QualifiedUnknown qualifier localName ->
namingError
(cannotFind kind name)
(Just $ text $ "`" <> qualifier <> "` does not expose `" <> localName <> "`.")
(map (\v -> qualifier <> "." <> v) suggestions)
ExposedUnknown ->
case name of
"!=" -> specialNamingError (notEqualsHint name)
"!==" -> specialNamingError (notEqualsHint name)
"===" -> specialNamingError equalsHint
"%" -> specialNamingError modHint
_ -> namingError (cannotFind kind name) Nothing suggestions
cannotFind :: VarKind -> Text -> [Doc]
cannotFind kind rawName =
let ( a, thing, name ) = toKindInfo kind rawName in
[ "Cannot", "find", a, thing, "named", D.dullyellow name <> ":" ]
ambiguous :: VarKind -> Text -> [Doc]
ambiguous kind rawName =
let ( _a, thing, name ) = toKindInfo kind rawName in
[ "This", "usage", "of", "the", D.dullyellow name, thing, "is", "ambiguous." ]
notEqualsHint :: Text -> [Doc]
notEqualsHint op =
[ "Looking", "for", "the", "“not", "equal”", "operator?", "The", "traditional"
, D.dullyellow $ text $ "(" <> op <> ")"
, "is", "replaced", "by", D.green "(/=)", "in", "Elm.", "It", "is", "meant"
, "to", "look", "like", "the", "“not", "equal”", "sign", "from", "math!", "(≠)"
]
equalsHint :: [Doc]
equalsHint =
[ "A", "special", D.dullyellow "(===)", "operator", "is", "not", "needed"
, "in", "Elm.", "We", "use", D.green "(==)", "for", "everything!"
]
modHint :: [Doc]
modHint =
[ "Rather", "than", "a", D.dullyellow "(%)", "operator,"
, "Elm", "has", "a", D.green "modBy", "function."
, "Learn", "more", "here:"
, "<-lang.org/packages/elm/core/latest/Basics#modBy>"
]
-}
-- ARG MISMATCH
_argMismatchReport :: Code.Source -> R.Region -> String -> N.Name -> Int -> Int -> Report.Report
_argMismatchReport source region kind name expected actual =
let
numArgs =
"too "
<> (if actual < expected then "few" else "many")
<> " arguments"
in
Report.Report (map Char.toUpper numArgs) region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
kind <> " " <> N.toString name <> " has " <> numArgs <> "."
,
D.reflow $
"Expecting " <> show expected <> ", but got " <> show actual <> "."
)
-- BAD ALIAS RECURSION
aliasRecursionReport :: Code.Source -> R.Region -> N.Name -> [N.Name] -> Src.Type -> [N.Name] -> Report.Report
aliasRecursionReport source region name args tipe others =
case others of
[] ->
Report.Report "ALIAS PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
"This type alias is recursive, forming an infinite type!"
,
D.stack
[ D.reflow $
"When I expand a recursive type alias, it just keeps getting bigger and bigger.\
\ So dealiasing results in an infinitely large type! Try this instead:"
, D.indent 4 $
aliasToUnionDoc name args tipe
, D.link "Hint"
"This is kind of a subtle distinction. I suggested the naive fix, but I recommend reading"
"recursive-alias"
"for ideas on how to do better."
]
)
_ ->
Report.Report "ALIAS PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
"This type alias is part of a mutually recursive set of type aliases."
,
D.stack
[ "It is part of this cycle of type aliases:"
, D.cycle 4 (name:others)
, D.reflow $
"You need to convert at least one of these type aliases into a `type`."
, D.link "Note" "Read" "recursive-alias"
"to learn why this `type` vs `type alias` distinction matters. It is subtle but important!"
]
)
aliasToUnionDoc :: N.Name -> [N.Name] -> Src.Type -> Doc
aliasToUnionDoc name args tipe =
D.vcat
[ D.dullyellow $
"type" <+> D.fromName name <+> (foldr (<+>) "=" (map D.fromName args))
, D.green $
D.indent 4 (D.fromName name)
, D.dullyellow $
D.indent 8 (RT.srcToDoc RT.App tipe)
]
| null | https://raw.githubusercontent.com/drathier/elm-offline/f562198cac29f4cda15b69fde7e66edde89b34fa/compiler/src/Reporting/Error/Canonicalize.hs | haskell | # LANGUAGE OverloadedStrings #
KIND
TO REPORT
NOTE: this should always be detected by `builder`
So this error should never actually get printed out.
BAD TYPE VARIABLES
NOT FOUND
ARG MISMATCH
BAD ALIAS RECURSION | # OPTIONS_GHC -Wall #
module Reporting.Error.Canonicalize
( Error(..)
, BadArityContext(..)
, InvalidPayload(..)
, PortProblem(..)
, DuplicatePatternContext(..)
, PossibleNames(..)
, VarKind(..)
, toReport
)
where
import qualified Data.Char as Char
import qualified Data.Map as Map
import qualified Data.Set as Set
import qualified AST.Canonical as Can
import qualified AST.Source as Src
import qualified AST.Module.Name as ModuleName
import qualified Data.Index as Index
import qualified Elm.Name as N
import qualified Reporting.Annotation as A
import qualified Reporting.Doc as D
import Reporting.Doc (Doc, (<+>), (<>))
import qualified Reporting.Region as R
import qualified Reporting.Render.Code as Code
import qualified Reporting.Render.Type as RT
import qualified Reporting.Report as Report
import qualified Reporting.Suggest as Suggest
CANONICALIZATION ERRORS
data Error
= AnnotationTooShort R.Region N.Name Index.ZeroBased Int
| AmbiguousVar R.Region (Maybe N.Name) N.Name [ModuleName.Canonical]
| AmbiguousType R.Region (Maybe N.Name) N.Name [ModuleName.Canonical]
| AmbiguousCtor R.Region (Maybe N.Name) N.Name [ModuleName.Canonical]
| AmbiguousBinop R.Region N.Name [ModuleName.Canonical]
| BadArity R.Region BadArityContext N.Name Int Int
| Binop R.Region N.Name N.Name
| DuplicateDecl N.Name R.Region R.Region
| DuplicateType N.Name R.Region R.Region
| DuplicateCtor N.Name R.Region R.Region
| DuplicateBinop N.Name R.Region R.Region
| DuplicateField N.Name R.Region R.Region
| DuplicateAliasArg N.Name N.Name R.Region R.Region
| DuplicateUnionArg N.Name N.Name R.Region R.Region
| DuplicatePattern DuplicatePatternContext N.Name R.Region R.Region
| EffectNotFound R.Region N.Name
| EffectFunctionNotFound R.Region N.Name
| ExportDuplicate N.Name R.Region R.Region
| ExportNotFound R.Region VarKind N.Name [N.Name]
| ExportOpenAlias R.Region N.Name
| ImportCtorByName R.Region N.Name N.Name
| ImportNotFound R.Region N.Name [ModuleName.Canonical]
| ImportOpenAlias R.Region N.Name
| ImportExposingNotFound R.Region ModuleName.Canonical N.Name [N.Name]
| NotFoundVar R.Region (Maybe N.Name) N.Name PossibleNames
| NotFoundType R.Region (Maybe N.Name) N.Name PossibleNames
| NotFoundCtor R.Region (Maybe N.Name) N.Name PossibleNames
| NotFoundBinop R.Region N.Name (Set.Set N.Name)
| PatternHasRecordCtor R.Region N.Name
| PortPayloadInvalid R.Region N.Name Can.Type InvalidPayload
| PortTypeInvalid R.Region N.Name PortProblem
| RecursiveAlias R.Region N.Name [N.Name] Src.Type [N.Name]
| RecursiveDecl [Can.Def]
| RecursiveLet (A.Located N.Name) [N.Name]
| Shadowing N.Name R.Region R.Region
| TupleLargerThanThree R.Region
| TypeVarsUnboundInUnion R.Region N.Name [N.Name] (N.Name, R.Region) [(N.Name, R.Region)]
| TypeVarsMessedUpInAlias R.Region N.Name [N.Name] [(N.Name, R.Region)] [(N.Name, R.Region)]
data BadArityContext
= TypeArity
| PatternArity
data DuplicatePatternContext
= DPLambdaArgs
| DPFuncArgs N.Name
| DPCaseBranch
| DPLetBinding
| DPDestruct
data InvalidPayload
= ExtendedRecord
| Function
| TypeVariable N.Name
| UnsupportedType N.Name
data PortProblem
= CmdNoArg
| CmdExtraArgs Int
| CmdBadMsg
| SubBad
| NotCmdOrSub
data PossibleNames =
PossibleNames
{ _locals :: Set.Set N.Name
, _quals :: Map.Map N.Name (Set.Set N.Name)
}
data VarKind
= BadOp
| BadVar
| BadPattern
| BadType
toKindInfo :: VarKind -> N.Name -> ( Doc, Doc, Doc )
toKindInfo kind name =
case kind of
BadOp ->
( "an", "operator", "(" <> D.fromName name <> ")" )
BadVar ->
( "a", "value", "`" <> D.fromName name <> "`" )
BadPattern ->
( "a", "pattern", "`" <> D.fromName name <> "`" )
BadType ->
( "a", "type", "`" <> D.fromName name <> "`" )
toReport :: Code.Source -> Error -> Report.Report
toReport source err =
case err of
AnnotationTooShort region name index leftovers ->
let
numTypeArgs = Index.toMachine index
numDefArgs = numTypeArgs + leftovers
in
Report.Report "BAD TYPE ANNOTATION" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The type annotation for `" <> N.toString name <> "` says it can accept "
<> D.args numTypeArgs <> ", but the definition says it has "
<> D.args numDefArgs <> ":"
,
D.reflow $
"Is the type annotation missing something? Should some argument"
<> (if leftovers == 1 then "" else "s")
<> " be deleted? Maybe some parentheses are missing?"
)
AmbiguousVar region maybePrefix name possibleHomes ->
ambiguousName source region maybePrefix name possibleHomes "variable"
AmbiguousType region maybePrefix name possibleHomes ->
ambiguousName source region maybePrefix name possibleHomes "type"
AmbiguousCtor region maybePrefix name possibleHomes ->
ambiguousName source region maybePrefix name possibleHomes "constructor"
AmbiguousBinop region name possibleHomes ->
ambiguousName source region Nothing name possibleHomes "operator"
BadArity region badArityContext name expected actual ->
let
thing =
case badArityContext of
TypeArity -> "type"
PatternArity -> "constructor"
in
if actual < expected then
Report.Report "TOO FEW ARGS" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString name <> "` " <> thing <> " needs "
<> D.args expected <> ", but I see " <> show actual <> " instead:"
,
D.reflow $
"What is missing? Are some parentheses misplaced?"
)
else
Report.Report "TOO MANY ARGS" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString name <> "` " <> thing <> " needs "
<> D.args expected <> ", but I see " <> show actual <> " instead:"
,
if actual - expected == 1 then
"Which is the extra one? Maybe some parentheses are missing?"
else
"Which are the extra ones? Maybe some parentheses are missing?"
)
Binop region op1 op2 ->
Report.Report "INFIX PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You cannot mix (" <> N.toString op1 <> ") and (" <> N.toString op2 <> ") without parentheses."
,
D.reflow
"I do not know how to group these expressions. Add parentheses for me!"
)
DuplicateDecl name r1 r2 ->
nameClash source r1 r2 $
"This file has multiple `" <> N.toString name <> "` declarations."
DuplicateType name r1 r2 ->
nameClash source r1 r2 $
"This file defines multiple `" <> N.toString name <> "` types."
DuplicateCtor name r1 r2 ->
nameClash source r1 r2 $
"This file defines multiple `" <> N.toString name <> "` type constructors."
DuplicateBinop name r1 r2 ->
nameClash source r1 r2 $
"This file defines multiple (" <> N.toString name <> ") operators."
DuplicateField name r1 r2 ->
nameClash source r1 r2 $
"This record has multiple `" <> N.toString name <> "` fields."
DuplicateAliasArg typeName name r1 r2 ->
nameClash source r1 r2 $
"The `" <> N.toString typeName <> "` type alias has multilpe `" <> N.toString name <> "` type variables."
DuplicateUnionArg typeName name r1 r2 ->
nameClash source r1 r2 $
"The `" <> N.toString typeName <> "` type has multilpe `" <> N.toString name <> "` type variables."
DuplicatePattern context name r1 r2 ->
nameClash source r1 r2 $
case context of
DPLambdaArgs ->
"This anonymous function has multiple `" <> N.toString name <> "` arguments."
DPFuncArgs funcName ->
"The `" <> N.toString funcName <> "` function has multiple `" <> N.toString name <> "` arguments."
DPCaseBranch ->
"This `case` pattern has multiple `" <> N.toString name <> "` variables."
DPLetBinding ->
"This `let` expression defines `" <> N.toString name <> "` more than once!"
DPDestruct ->
"This pattern contains multiple `" <> N.toString name <> "` variables."
EffectNotFound region name ->
Report.Report "EFFECT PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You have declared that `" ++ N.toString name ++ "` is an effect type:"
,
D.reflow $
"But I cannot find a union type named `" ++ N.toString name ++ "` in this file!"
)
EffectFunctionNotFound region name ->
Report.Report "EFFECT PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"This kind of effect module must define a `" ++ N.toString name ++ "` function."
,
D.reflow $
"But I cannot find `" ++ N.toString name ++ "` in this file!"
)
ExportDuplicate name r1 r2 ->
let
messageThatEndsWithPunctuation =
"You are trying to expose `" <> N.toString name <> "` multiple times!"
in
Report.Report "REDUNDANT EXPORT" r2 [] $
Report.toCodePair source r1 r2
(
D.reflow messageThatEndsWithPunctuation
,
"Remove one of them and you should be all set!"
)
(
D.reflow (messageThatEndsWithPunctuation <> " Once here:")
,
"And again right here:"
,
"Remove one of them and you should be all set!"
)
ExportNotFound region kind rawName possibleNames ->
let
suggestions =
map N.toString $ take 4 $
Suggest.sort (N.toString rawName) N.toString possibleNames
in
Report.Report "UNKNOWN EXPORT" region suggestions $
let (a, thing, name) = toKindInfo kind rawName in
D.stack
[ D.fillSep
["You","are","trying","to","expose",a,thing,"named"
,name,"but","I","cannot","find","its","definition."
]
, case map D.fromString suggestions of
[] ->
D.reflow $
"I do not see any super similar names in this file. Is the definition missing?"
[alt] ->
D.fillSep ["Maybe","you","want",D.dullyellow alt,"instead?"]
alts ->
D.stack
[ "These names seem close though:"
, D.indent 4 $ D.vcat $ map D.dullyellow alts
]
]
ExportOpenAlias region name ->
Report.Report "BAD EXPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The (..) syntax is for exposing union type constructors. It cannot be used with a type alias like `"
++ N.toString name ++ "` though."
,
D.reflow $
"Remove the (..) and you should be fine!"
)
ImportCtorByName region ctor tipe ->
Report.Report "BAD IMPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You are trying to import the `" <> N.toString ctor
<> "` type constructor by name:"
,
D.fillSep
["Try","importing",D.green (D.fromName tipe <> "(..)"),"instead."
,"The","dots","mean","“expose","the",D.fromName tipe,"type","and"
,"all","its","constructors”","so","it","gives","you","access","to"
, D.fromName ctor <> "."
]
)
ImportNotFound region name _ ->
Report.Report "UNKNOWN IMPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I could not find a `" <> N.toString name <> "` module to import!"
,
mempty
)
ImportOpenAlias region name ->
Report.Report "BAD IMPORT" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString name <> "` type alias cannot be followed by (..) like this:"
,
D.stack
[ "Remove the (..) and it should work."
, D.link "Hint"
"The distinction between `type` and `type alias` is important here. Read"
"types-vs-type-aliases"
"to learn more."
]
)
ImportExposingNotFound region (ModuleName.Canonical _ home) value possibleNames ->
let
suggestions =
map N.toString $ take 4 $
Suggest.sort (N.toString home) N.toString possibleNames
in
Report.Report "BAD IMPORT" region suggestions $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString home
<> "` module does not expose `"
<> N.toString value <> "`:"
,
case map D.fromString suggestions of
[] ->
"I cannot find any super similar exposed names. Maybe it is private?"
[alt] ->
D.fillSep ["Maybe","you","want",D.dullyellow alt,"instead?"]
alts ->
D.stack
[ "These names seem close though:"
, D.indent 4 $ D.vcat $ map D.dullyellow alts
]
)
NotFoundVar region prefix name possibleNames ->
notFound source region prefix name "variable" possibleNames
NotFoundType region prefix name possibleNames ->
notFound source region prefix name "type" possibleNames
NotFoundCtor region prefix name possibleNames ->
notFound source region prefix name "constructor" possibleNames
NotFoundBinop region op locals ->
if op == "===" then
Report.Report "UNKNOWN OPERATOR" region ["=="] $
Report.toCodeSnippet source region Nothing
(
"Elm does not have a (===) operator like JavaScript."
,
"Switch to (==) instead."
)
else if op == "!=" || op == "!==" then
Report.Report "UNKNOWN OPERATOR" region ["/="] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"Elm uses a different name for the “not equal” operator:"
,
D.stack
[ D.reflow "Switch to (/=) instead."
, D.toSimpleNote $
"Our (/=) operator is supposed to look like a real “not equal” sign (≠). I hope that history will remember ("
++ N.toString op ++ ") as a weird and temporary choice."
]
)
else if op == "**" then
Report.Report "UNKNOWN OPERATOR" region ["^","*"] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I do not recognize the (**) operator:"
,
D.reflow $
"Switch to (^) for exponentiation. Or switch to (*) for multiplication."
)
else if op == "%" then
Report.Report "UNKNOWN OPERATOR" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"Elm does not use (%) as the remainder operator:"
,
D.stack
[ D.reflow $
"If you want the behavior of (%) like in JavaScript, switch to:\
\ <-lang.org/packages/elm/core/latest/Basics#remainderBy>"
, D.reflow $
"If you want modular arithmetic like in math, switch to:\
\ <-lang.org/packages/elm/core/latest/Basics#modBy>"
, D.reflow $
"The difference is how things work when negative numbers are involved."
]
)
else
let
suggestions =
map N.toString $ take 2 $
Suggest.sort (N.toString op) N.toString (Set.toList locals)
format altOp =
D.green $ "(" <> altOp <> ")"
in
Report.Report "UNKNOWN OPERATOR" region suggestions $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I do not recognize the (" ++ N.toString op ++ ") operator."
,
D.fillSep $
["Is","there","an","`import`","and","`exposing`","entry","for","it?"]
++
case map D.fromString suggestions of
[] ->
[]
alts ->
["Maybe","you","want"] ++ D.commaSep "or" format alts ++ ["instead?"]
)
PatternHasRecordCtor region name ->
Report.Report "BAD PATTERN" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"You can construct records by using `" <> N.toString name
<> "` as a function, but it is not available in pattern matching like this:"
,
D.reflow $
"I recommend matching matching the record as a variable and unpacking it later."
)
PortPayloadInvalid region portName _badType invalidPayload ->
let
formatDetails (aBadKindOfThing, elaboration) =
Report.Report "PORT ERROR" region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"The `" <> N.toString portName <> "` port is trying to transmit " <> aBadKindOfThing <> ":"
,
D.stack
[ elaboration
, D.link "Hint"
"Ports are not a traditional FFI, so if you have tons of annoying ports, definitely read"
"ports"
"to learn how they are meant to work. They require a different mindset!"
]
)
in
formatDetails $
case invalidPayload of
ExtendedRecord ->
(
"an extended record"
,
D.reflow $
"But the exact shape of the record must be known at compile time. No type variables!"
)
Function ->
(
"a function"
,
D.reflow $
"But functions cannot be sent in and out ports. If we allowed functions in from JS\
\ they may perform some side-effects. If we let functions out, they could produce\
\ incorrect results because Elm optimizations assume there are no side-effects."
)
TypeVariable name ->
(
"an unspecified type"
,
D.reflow $
"But type variables like `" <> N.toString name <> "` cannot flow through ports.\
\ I need to know exactly what type of data I am getting, so I can guarantee that\
\ unexpected data cannot sneak in and crash the Elm program."
)
UnsupportedType name ->
(
"a `" <> N.toString name <> "` value"
,
D.stack
[ D.reflow $ "I cannot handle that. The types that CAN flow in and out of Elm include:"
, D.indent 4 $
D.reflow $
"Ints, Floats, Bools, Strings, Maybes, Lists, Arrays,\
\ tuples, records, and JSON values."
, D.reflow $
"Since JSON values can flow through, you can use JSON encoders and decoders\
\ to allow other types through as well. More advanced users often just do\
\ everything with encoders and decoders for more control and better errors."
]
)
PortTypeInvalid region name portProblem ->
let
formatDetails (before, after) =
Report.Report "BAD PORT" region [] $
Report.toCodeSnippet source region Nothing $
(
D.reflow before
,
D.stack
[ after
, D.link "Hint" "Read" "ports"
"for more advice. For example, do not end up with one port per JS function!"
]
)
in
formatDetails $
case portProblem of
CmdNoArg ->
(
"The `" <> N.toString name <> "` port cannot be just a command."
,
D.reflow $
"It can be (() -> Cmd msg) if you just need to trigger a JavaScript\
\ function, but there is often a better way to set things up."
)
CmdExtraArgs n ->
(
"The `" <> N.toString name <> "` port can only send ONE value out to JavaScript."
,
let
theseItemsInSomething
| n == 2 = "both of these items into a tuple or record"
| n == 3 = "these " ++ show n ++ " items into a tuple or record"
| True = "these " ++ show n ++ " items into a record"
in
D.reflow $
"You can put " ++ theseItemsInSomething ++ " to send them out though."
)
CmdBadMsg ->
(
"The `" <> N.toString name <> "` port cannot send any messages to the `update` function."
,
D.reflow $
"It must produce a (Cmd msg) type. Notice the lower case `msg` type\
\ variable. The command will trigger some JS code, but it will not send\
\ anything particular back to Elm."
)
SubBad ->
( "There is something off about this `" <> N.toString name <> "` port declaration."
,
D.stack
[ D.reflow $
"To receive messages from JavaScript, you need to define a port like this:"
, D.indent 4 $ D.dullyellow $ D.fromString $
"port " <> N.toString name <> " : (Int -> msg) -> Sub msg"
, D.reflow $
"Now every time JS sends an `Int` to this port, it is converted to a `msg`.\
\ And if you subscribe, those `msg` values will be piped into your `update`\
\ function. The only thing you can customize here is the `Int` type."
]
)
NotCmdOrSub ->
(
"I am confused about the `" <> N.toString name <> "` port declaration."
,
D.reflow $
"Ports need to produce a command (Cmd) or a subscription (Sub) but\
\ this is neither. I do not know how to handle this."
)
RecursiveAlias region name args tipe others ->
aliasRecursionReport source region name args tipe others
RecursiveDecl cyclicValueDefs ->
let
toName def =
case def of
Can.Def name _ _ -> name
Can.TypedDef name _ _ _ _ -> name
makeTheory question details =
D.fillSep $ map (D.dullyellow . D.fromString) (words question) ++ map D.fromString (words details)
in
case map toName cyclicValueDefs of
[] ->
error
"There is some compiler bug in reporting cyclic definitions.\n\
\Please get a </> and share the details at\n\
\<>"
A.At region name : otherNames ->
Report.Report "CYCLIC DEFINITION" region [] $
Report.toCodeSnippet source region Nothing $
case map A.toValue otherNames of
[] ->
(
D.reflow $
"The `" <> N.toString name <> "` value is defined directly in terms of itself, causing an infinite loop."
,
D.stack
[ makeTheory "Are you are trying to mutate a variable?" $
"Elm does not have mutation, so when I see " ++ N.toString name
++ " defined in terms of " ++ N.toString name
++ ", I treat it as a recursive definition. Try giving the new value a new name!"
, makeTheory "Maybe you DO want a recursive value?" $
"To define " ++ N.toString name ++ " we need to know what " ++ N.toString name
++ " is, so let’s expand it. Wait, but now we need to know what " ++ N.toString name
++ " is, so let’s expand it... This will keep going infinitely!"
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do need a recursive value."
]
)
names ->
(
D.reflow $
"The `" <> N.toString name <> "` definition is causing a very tricky infinite loop."
,
D.stack
[ D.reflow $
"The `" <> N.toString name
<> "` value depends on itself through the following chain of definitions:"
, D.cycle 4 (name:names)
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do want mutually recursive values."
]
)
RecursiveLet (A.At region name) names ->
Report.Report "CYCLIC VALUE" region [] $
Report.toCodeSnippet source region Nothing $
case names of
[] ->
let
makeTheory question details =
D.fillSep $ map (D.dullyellow . D.fromString) (words question) ++ map D.fromString (words details)
in
(
D.reflow $
"The `" <> N.toString name <> "` value is defined directly in terms of itself, causing an infinite loop."
,
D.stack
[ makeTheory "Are you are trying to mutate a variable?" $
"Elm does not have mutation, so when I see " ++ N.toString name
++ " defined in terms of " ++ N.toString name
++ ", I treat it as a recursive definition. Try giving the new value a new name!"
, makeTheory "Maybe you DO want a recursive value?" $
"To define " ++ N.toString name ++ " we need to know what " ++ N.toString name
++ " is, so let’s expand it. Wait, but now we need to know what " ++ N.toString name
++ " is, so let’s expand it... This will keep going infinitely!"
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do need a recursive value."
]
)
_ ->
(
D.reflow $
"I do not allow cyclic values in `let` expressions."
,
D.stack
[ D.reflow $
"The `" <> N.toString name
<> "` value depends on itself through the following chain of definitions:"
, D.cycle 4 (name:names)
, D.link "Hint"
"The root problem is often a typo in some variable name, but I recommend reading"
"bad-recursion"
"for more detailed advice, especially if you actually do want mutually recursive values."
]
)
Shadowing name r1 r2 ->
Report.Report "SHADOWING" r2 [] $
Report.toCodePair source r1 r2
( "These variables cannot have the same name:"
, advice
)
( D.reflow $ "The name `" <> N.toString name <> "` is first defined here:"
, "But then it is defined AGAIN over here:"
, advice
)
where
advice =
D.stack
[ D.reflow $
"Think of a more helpful name for one of them and you should be all set!"
, D.link "Note"
"Linters advise against shadowing, so Elm makes “best practices” the default. Read"
"shadowing"
"for more details on this choice."
]
TupleLargerThanThree region ->
Report.Report "BAD TUPLE" region [] $
Report.toCodeSnippet source region Nothing
(
"I only accept tuples with two or three items. This has too many:"
,
D.stack
[ D.reflow $
"I recommend switching to records. Each item will be named, and you can use\
\ the `point.x` syntax to access them."
, D.link "Note" "Read" "tuples"
"for more comprehensive advice on working with large chunks of data in Elm."
]
)
TypeVarsUnboundInUnion unionRegion typeName allVars unbound unbounds ->
unboundTypeVars source unionRegion ["type"] typeName allVars unbound unbounds
TypeVarsMessedUpInAlias aliasRegion typeName allVars unusedVars unboundVars ->
case (unusedVars, unboundVars) of
(unused:unuseds, []) ->
let
backQuote name =
"`" <> D.fromName name <> "`"
allUnusedNames =
map fst unusedVars
(title, subRegion, overview, stuff) =
case unuseds of
[] ->
("UNUSED TYPE VARIABLE"
, Just (snd unused)
, ["Type","alias",backQuote typeName,"does","not","use","the"
,backQuote (fst unused),"type","variable."
]
, [D.dullyellow (backQuote (fst unused))]
)
_:_ ->
( "UNUSED TYPE VARIABLES"
, Nothing
, ["Type","variables"]
++ D.commaSep "and" id (map D.fromName allUnusedNames)
++ ["are","unused","in","the",backQuote typeName,"definition."]
, D.commaSep "and" D.dullyellow (map D.fromName allUnusedNames)
)
in
Report.Report title aliasRegion [] $
Report.toCodeSnippet source aliasRegion subRegion
(
D.fillSep overview
,
D.stack
[ D.fillSep $
["I","recommend","removing"] ++ stuff ++ ["from","the","declaration,","like","this:"]
, D.indent 4 $ D.hsep $
["type","alias",D.green (D.fromName typeName)]
++ map D.fromName (filter (`notElem` allUnusedNames) allVars)
++ ["=", "..."]
, D.reflow $
"Why? Well, if I allowed `type alias Height a = Float` I would need to answer\
\ some weird questions. Is `Height Bool` the same as `Float`? Is `Height Bool`\
\ the same as `Height Int`? My solution is to not need to ask them!"
]
)
([], unbound:unbounds) ->
unboundTypeVars source aliasRegion ["type","alias"] typeName allVars unbound unbounds
(_, _) ->
let
unused = map fst unusedVars
unbound = map fst unboundVars
theseAreUsed =
case unbound of
[x] ->
["Type","variable",D.dullyellow ("`" <> D.fromName x <> "`"),"appears"
,"in","the","definition,","but","I","do","not","see","it","declared."
]
_ ->
["Type","variables"]
++ D.commaSep "and" D.dullyellow (map D.fromName unbound)
++ ["are","used","in","the","definition,","but","I","do","not","see","them","declared."]
butTheseAreUnused =
case unused of
[x] ->
["Likewise,","type","variable"
,D.dullyellow ("`" <> D.fromName x <> "`")
,"is","delared,","but","not","used."
]
_ ->
["Likewise,","type","variables"]
++ D.commaSep "and" D.dullyellow (map D.fromName unused)
++ ["are","delared,","but","not","used."]
in
Report.Report "TYPE VARIABLE PROBLEMS" aliasRegion [] $
Report.toCodeSnippet source aliasRegion Nothing
(
D.reflow $
"Type alias `" <> N.toString typeName <> "` has some type variable problems."
,
D.stack
[ D.fillSep $ theseAreUsed ++ butTheseAreUnused
, D.reflow $
"My guess is that a definition like this will work better:"
, D.indent 4 $ D.hsep $
["type", "alias", D.fromName typeName]
++ map D.fromName (filter (`notElem` unused) allVars)
++ map (D.green . D.fromName) unbound
++ ["=", "..."]
]
)
unboundTypeVars :: Code.Source -> R.Region -> [D.Doc] -> N.Name -> [N.Name] -> (N.Name, R.Region) -> [(N.Name, R.Region)] -> Report.Report
unboundTypeVars source declRegion tipe typeName allVars (unboundVar, varRegion) unboundVars =
let
backQuote name =
"`" <> D.fromName name <> "`"
(title, subRegion, overview) =
case map fst unboundVars of
[] ->
( "UNBOUND TYPE VARIABLE"
, Just varRegion
, ["The",backQuote typeName]
++ tipe
++ ["uses","an","unbound","type","variable",D.dullyellow (backQuote unboundVar),"in","its","definition:"]
)
vars ->
( "UNBOUND TYPE VARIABLES"
, Nothing
, ["Type","variables"]
++ D.commaSep "and" D.dullyellow (D.fromName unboundVar : map D.fromName vars)
++ ["are","unbound","in","the",backQuote typeName] ++ tipe ++ ["definition:"]
)
in
Report.Report title declRegion [] $
Report.toCodeSnippet source declRegion subRegion
(
D.fillSep overview
,
D.stack
[ D.reflow $
"You probably need to change the declaration to something like this:"
, D.indent 4 $ D.hsep $
tipe
++ [D.fromName typeName]
++ map D.fromName allVars
++ map (D.green . D.fromName) (unboundVar : map fst unboundVars)
++ ["=", "..."]
, D.reflow $
"Why? Well, imagine one `" ++ N.toString typeName ++ "` where `" ++ N.toString unboundVar ++
"` is an Int and another where it is a Bool. When we explicitly list the type\
\ variables, the type checker can see that they are actually different types."
]
)
NAME CLASH
nameClash :: Code.Source -> R.Region -> R.Region -> String -> Report.Report
nameClash source r1 r2 messageThatEndsWithPunctuation =
Report.Report "NAME CLASH" r2 [] $
Report.toCodePair source r1 r2
(
D.reflow messageThatEndsWithPunctuation
,
"How can I know which one you want? Rename one of them!"
)
(
D.reflow (messageThatEndsWithPunctuation <> " One here:")
,
"And another one here:"
,
"How can I know which one you want? Rename one of them!"
)
AMBIGUOUS NAME
ambiguousName :: Code.Source -> R.Region -> Maybe N.Name -> N.Name -> [ModuleName.Canonical] -> String -> Report.Report
ambiguousName source region maybePrefix name possibleHomes thing =
Report.Report "AMBIGUOUS NAME" region [] $
Report.toCodeSnippet source region Nothing $
case maybePrefix of
Nothing ->
let
homeToYellowDoc (ModuleName.Canonical _ home) =
D.dullyellow (D.fromName home)
bothOrAll =
if length possibleHomes == 2 then "both" else "all"
in
(
D.reflow $ "This usage of `" ++ N.toString name ++ "` is ambiguous."
,
D.stack
[ D.reflow $
"Check your imports. The following modules " ++ bothOrAll
++ " expose a `" ++ N.toString name ++ "` " ++ thing ++ ":"
, D.indent 4 $ D.vcat $ map homeToYellowDoc possibleHomes
, D.reflowLink "Read" "imports" "to learn how to clarify which one you want."
]
)
Just prefix ->
let
homeToYellowDoc (ModuleName.Canonical _ home) =
if prefix == home then
D.blue "import" <+> D.dullyellow (D.fromName home)
else
D.blue "import" <+> D.dullyellow (D.fromName home) <+> D.blue "as" <+> D.dullyellow (D.fromName prefix)
eitherOrAny =
if length possibleHomes == 2 then "either" else "any"
in
(
D.reflow $ "This usage of `" ++ toQualString prefix name ++ "` is ambiguous."
,
D.stack
[ D.reflow $
"It could refer to a " ++ thing ++ " from "
++ eitherOrAny ++ " of these imports:"
, D.indent 4 $ D.vcat $ map homeToYellowDoc possibleHomes
, D.reflowLink "Read" "imports" "to learn how to clarify which one you want."
]
)
notFound :: Code.Source -> R.Region -> Maybe N.Name -> N.Name -> String -> PossibleNames -> Report.Report
notFound source region maybePrefix name thing (PossibleNames locals quals) =
let
givenName =
maybe N.toString toQualString maybePrefix name
possibleNames =
let
addQuals prefix localSet allNames =
Set.foldr (\x xs -> toQualString prefix x : xs) allNames localSet
in
Map.foldrWithKey addQuals (map N.toString (Set.toList locals)) quals
nearbyNames =
take 4 (Suggest.sort givenName id possibleNames)
toDetails noSuggestionDetails yesSuggestionDetails =
case nearbyNames of
[] ->
D.stack
[ D.reflow noSuggestionDetails
, D.link "Hint" "Read" "imports" "to see how `import` declarations work in Elm."
]
suggestions ->
D.stack
[ D.reflow yesSuggestionDetails
, D.indent 4 $ D.vcat $ map D.dullyellow $ map D.fromString suggestions
, D.link "Hint" "Read" "imports" "to see how `import` declarations work in Elm."
]
in
Report.Report "NAMING ERROR" region nearbyNames $
Report.toCodeSnippet source region Nothing
(
D.reflow $
"I cannot find a `" ++ givenName ++ "` " ++ thing ++ ":"
,
case maybePrefix of
Nothing ->
toDetails
"Is there an `import` or `exposing` missing up top?"
"These names seem close though:"
Just prefix ->
case Map.lookup prefix quals of
Nothing ->
toDetails
("I cannot find a `" ++ N.toString prefix ++ "` module. Is there an `import` for it?")
("I cannot find a `" ++ N.toString prefix ++ "` import. These names seem close though:")
Just _ ->
toDetails
("The `" ++ N.toString prefix ++ "` module does not expose a `" ++ N.toString name ++ "` " ++ thing ++ ".")
("The `" ++ N.toString prefix ++ "` module does not expose a `" ++ N.toString name ++ "` " ++ thing ++ ". These names seem close though:")
)
toQualString :: N.Name -> N.Name -> String
toQualString prefix name =
N.toString prefix ++ "." ++ N.toString name
- VAR ERROR
varErrorToReport : : VarError - > Report . Report
varErrorToReport ( VarError kind name problem suggestions ) =
let
learnMore orMaybe =
D.reflow $
orMaybe < > " ` import ` works different than you expect ? Learn all about it here : "
< > D.hintLink " imports "
namingError overview maybeStarter specializedSuggestions =
Report.reportDoc " NAMING ERROR " Nothing overview $
case D.maybeYouWant ' maybeStarter specializedSuggestions of
Nothing - >
learnMore " Maybe "
Just doc - >
D.stack [ doc , learnMore " Or maybe " ]
specialHint =
Report.reportDoc " NAMING ERROR " Nothing ( cannotFind kind name ) ( D.hsep specialHint )
in
case problem of
Ambiguous - >
namingError ( ambiguous kind name ) Nothing suggestions
UnknownQualifier qualifier localName - >
namingError
( cannotFind kind name )
( Just $ text $ " No module called ` " < > qualifier < > " ` has been imported . " )
( map ( \modul - > modul < > " . " < > localName ) suggestions )
QualifiedUnknown qualifier localName - >
namingError
( cannotFind kind name )
( Just $ text $ " ` " < > qualifier < > " ` does not expose ` " < > localName < > " ` . " )
( map ( \v - > qualifier < > " . " < > v ) suggestions )
ExposedUnknown - >
case name of
" ! = " - > ( name )
" ! = = " - > ( name )
" = = = " - > specialNamingError equalsHint
" % " - > specialNamingError modHint
_ - > namingError ( cannotFind kind name ) Nothing suggestions
cannotFind : : VarKind - > Text - > [ Doc ]
cannotFind kind rawName =
let ( a , thing , name ) = toKindInfo kind rawName in
[ " Can not " , " find " , a , thing , " named " , D.dullyellow name < > " : " ]
ambiguous : : VarKind - > Text - > [ Doc ]
ambiguous kind rawName =
let ( _ a , thing , name ) = toKindInfo kind rawName in
[ " This " , " usage " , " of " , " the " , D.dullyellow name , thing , " is " , " ambiguous . " ]
notEqualsHint : : Text - > [ Doc ]
notEqualsHint op =
[ " Looking " , " for " , " the " , " “ not " , " equal ” " , " operator ? " , " The " , " traditional "
, D.dullyellow $ text $ " ( " < > op < > " ) "
, " is " , " replaced " , " by " , D.green " ( /= ) " , " in " , " Elm . " , " It " , " is " , " meant "
, " to " , " look " , " like " , " the " , " “ not " , " equal ” " , " sign " , " from " , " math ! " , " ( ≠ ) "
]
equalsHint : : [ Doc ]
equalsHint =
[ " A " , " special " , D.dullyellow " (= = =) " , " operator " , " is " , " not " , " needed "
, " in " , " Elm . " , " We " , " use " , D.green " (= =) " , " for " , " everything ! "
]
modHint : : [ Doc ]
modHint =
[ " Rather " , " than " , " a " , ( % ) " , " operator , "
, " Elm " , " has " , " a " , D.green " modBy " , " function . "
, " Learn " , " more " , " here : "
, " < -lang.org/packages/elm/core/latest/Basics#modBy > "
]
varErrorToReport :: VarError -> Report.Report
varErrorToReport (VarError kind name problem suggestions) =
let
learnMore orMaybe =
D.reflow $
orMaybe <> " `import` works different than you expect? Learn all about it here: "
<> D.hintLink "imports"
namingError overview maybeStarter specializedSuggestions =
Report.reportDoc "NAMING ERROR" Nothing overview $
case D.maybeYouWant' maybeStarter specializedSuggestions of
Nothing ->
learnMore "Maybe"
Just doc ->
D.stack [ doc, learnMore "Or maybe" ]
specialNamingError specialHint =
Report.reportDoc "NAMING ERROR" Nothing (cannotFind kind name) (D.hsep specialHint)
in
case problem of
Ambiguous ->
namingError (ambiguous kind name) Nothing suggestions
UnknownQualifier qualifier localName ->
namingError
(cannotFind kind name)
(Just $ text $ "No module called `" <> qualifier <> "` has been imported.")
(map (\modul -> modul <> "." <> localName) suggestions)
QualifiedUnknown qualifier localName ->
namingError
(cannotFind kind name)
(Just $ text $ "`" <> qualifier <> "` does not expose `" <> localName <> "`.")
(map (\v -> qualifier <> "." <> v) suggestions)
ExposedUnknown ->
case name of
"!=" -> specialNamingError (notEqualsHint name)
"!==" -> specialNamingError (notEqualsHint name)
"===" -> specialNamingError equalsHint
"%" -> specialNamingError modHint
_ -> namingError (cannotFind kind name) Nothing suggestions
cannotFind :: VarKind -> Text -> [Doc]
cannotFind kind rawName =
let ( a, thing, name ) = toKindInfo kind rawName in
[ "Cannot", "find", a, thing, "named", D.dullyellow name <> ":" ]
ambiguous :: VarKind -> Text -> [Doc]
ambiguous kind rawName =
let ( _a, thing, name ) = toKindInfo kind rawName in
[ "This", "usage", "of", "the", D.dullyellow name, thing, "is", "ambiguous." ]
notEqualsHint :: Text -> [Doc]
notEqualsHint op =
[ "Looking", "for", "the", "“not", "equal”", "operator?", "The", "traditional"
, D.dullyellow $ text $ "(" <> op <> ")"
, "is", "replaced", "by", D.green "(/=)", "in", "Elm.", "It", "is", "meant"
, "to", "look", "like", "the", "“not", "equal”", "sign", "from", "math!", "(≠)"
]
equalsHint :: [Doc]
equalsHint =
[ "A", "special", D.dullyellow "(===)", "operator", "is", "not", "needed"
, "in", "Elm.", "We", "use", D.green "(==)", "for", "everything!"
]
modHint :: [Doc]
modHint =
[ "Rather", "than", "a", D.dullyellow "(%)", "operator,"
, "Elm", "has", "a", D.green "modBy", "function."
, "Learn", "more", "here:"
, "<-lang.org/packages/elm/core/latest/Basics#modBy>"
]
-}
_argMismatchReport :: Code.Source -> R.Region -> String -> N.Name -> Int -> Int -> Report.Report
_argMismatchReport source region kind name expected actual =
let
numArgs =
"too "
<> (if actual < expected then "few" else "many")
<> " arguments"
in
Report.Report (map Char.toUpper numArgs) region [] $
Report.toCodeSnippet source region Nothing
(
D.reflow $
kind <> " " <> N.toString name <> " has " <> numArgs <> "."
,
D.reflow $
"Expecting " <> show expected <> ", but got " <> show actual <> "."
)
aliasRecursionReport :: Code.Source -> R.Region -> N.Name -> [N.Name] -> Src.Type -> [N.Name] -> Report.Report
aliasRecursionReport source region name args tipe others =
case others of
[] ->
Report.Report "ALIAS PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
"This type alias is recursive, forming an infinite type!"
,
D.stack
[ D.reflow $
"When I expand a recursive type alias, it just keeps getting bigger and bigger.\
\ So dealiasing results in an infinitely large type! Try this instead:"
, D.indent 4 $
aliasToUnionDoc name args tipe
, D.link "Hint"
"This is kind of a subtle distinction. I suggested the naive fix, but I recommend reading"
"recursive-alias"
"for ideas on how to do better."
]
)
_ ->
Report.Report "ALIAS PROBLEM" region [] $
Report.toCodeSnippet source region Nothing
(
"This type alias is part of a mutually recursive set of type aliases."
,
D.stack
[ "It is part of this cycle of type aliases:"
, D.cycle 4 (name:others)
, D.reflow $
"You need to convert at least one of these type aliases into a `type`."
, D.link "Note" "Read" "recursive-alias"
"to learn why this `type` vs `type alias` distinction matters. It is subtle but important!"
]
)
aliasToUnionDoc :: N.Name -> [N.Name] -> Src.Type -> Doc
aliasToUnionDoc name args tipe =
D.vcat
[ D.dullyellow $
"type" <+> D.fromName name <+> (foldr (<+>) "=" (map D.fromName args))
, D.green $
D.indent 4 (D.fromName name)
, D.dullyellow $
D.indent 8 (RT.srcToDoc RT.App tipe)
]
|
5d393b543ffb19a8364649fa46b497f7a1e3f8f3d3f86cfb8544168797737ee1 | FranklinChen/hugs98-plus-Sep2006 | XmlLex.hs | module XmlLex
( xmlLex -- :: String -> String -> [Token]
, xmlReLex -- :: Posn -> String -> [Token]
, posInNewCxt -- :: String -> Posn
, Posn(..)
, TokenT(..)
, Token
, Special(..)
, Section(..)
) where
-- This is a hand-written lexer for tokenising the text of an XML
-- document so that it is ready for parsing. It attaches position
-- information in (line,column) format to every token. The main
-- entry point is xmlLex. A secondary entry point, xmlReLex, is
-- provided for when the parser needs to stuff a string back onto
-- the front of the text and re-tokenise it (typically when expanding
-- macros).
--
-- As one would expect, the lexer is essentially a small finite
-- state machine.
import Prelude
import Char
import XmlChar
data Where = InTag | NotInTag
deriving (Eq)
type Token = (Posn, TokenT)
data Posn = Pn String Int Int (Maybe Posn) -- filename, line, column, incl.point
deriving (Eq)
instance Show Posn where
showsPrec p (Pn f l c i) = showString f .
showString " at line " . shows l .
showString " col " . shows c .
( case i of
Nothing -> id
Just p -> showString "\n used by " .
shows p )
data TokenT =
TokCommentOpen -- <!--
| TokCommentClose -- -->
| TokPIOpen -- <?
| TokPIClose -- ?>
| TokSectionOpen -- <![
| TokSectionClose -- ]]>
CDATA INCLUDE IGNORE etc
| TokSpecialOpen -- <!
DOCTYPE ELEMENT ATTLIST etc
| TokEndOpen -- </
| TokEndClose -- />
| TokAnyOpen -- <
| TokAnyClose -- >
| TokSqOpen -- [
| TokSqClose -- ]
| TokEqual -- =
| TokQuery -- ?
| TokStar -- *
| TokPlus -- +
| TokAmp -- &
| TokSemi -- ;
| TokHash -- #
| TokBraOpen -- (
| TokBraClose -- )
| TokPipe -- |
| TokPercent -- %
| TokComma -- ,
| TokQuote -- '' or ""
| TokName String -- begins with letter
| TokFreeText String -- any character data
| TokNull -- fake token
deriving (Eq)
data Special =
DOCTYPEx
| ELEMENTx
| ATTLISTx
| ENTITYx
| NOTATIONx
deriving (Eq,Show)
data Section =
CDATAx
| INCLUDEx
| IGNOREx
deriving (Eq,Show)
instance Show TokenT where
showsPrec p TokCommentOpen = showString "<!--"
showsPrec p TokCommentClose = showString "-->"
showsPrec p TokPIOpen = showString "<?"
showsPrec p TokPIClose = showString "?>"
showsPrec p TokSectionOpen = showString "<!["
showsPrec p TokSectionClose = showString "]]>"
showsPrec p (TokSection s) = showsPrec p s
showsPrec p TokSpecialOpen = showString "<!"
showsPrec p (TokSpecial s) = showsPrec p s
showsPrec p TokEndOpen = showString "</"
showsPrec p TokEndClose = showString "/>"
showsPrec p TokAnyOpen = showString "<"
showsPrec p TokAnyClose = showString ">"
showsPrec p TokSqOpen = showString "["
showsPrec p TokSqClose = showString "]"
showsPrec p TokEqual = showString "="
showsPrec p TokQuery = showString "?"
showsPrec p TokStar = showString "*"
showsPrec p TokPlus = showString "+"
showsPrec p TokAmp = showString "&"
showsPrec p TokSemi = showString ";"
showsPrec p TokHash = showString "#"
showsPrec p TokBraOpen = showString "("
showsPrec p TokBraClose = showString ")"
showsPrec p TokPipe = showString "|"
showsPrec p TokPercent = showString "%"
showsPrec p TokComma = showString ","
showsPrec p TokQuote = showString "' or \""
showsPrec p (TokName s) = showString s
showsPrec p (TokFreeText s) = showString s
showsPrec p TokNull = showString "(null)"
--trim, revtrim :: String -> String
trim = f . f where f = reverse .
--revtrim = f.reverse.f where f = dropWhile isSpace
revtrim = reverse . dropWhile (=='\n')
emit :: TokenT -> Posn -> Token
emit tok p = forcep p `seq` (p,tok)
forcep (Pn f n m i) = m `seq` n
lexerror :: String -> Posn -> a
lexerror s p = error ("Lexical error in "++show p++": "++s++"\n")
addcol :: Int -> Posn -> Posn
addcol n (Pn f r c i) = Pn f r (c+n) i
newline, tab :: Posn -> Posn
newline (Pn f r c i) = Pn f (r+1) 1 i
tab (Pn f r c i) = Pn f r (((c`div`8)+1)*8) i
white :: Char -> Posn -> Posn
white ' ' = addcol 1
white '\n' = newline
white '\r' = id
white '\t' = tab
white '\xa0' = addcol 1
skip :: Int -> Posn -> String -> (Posn->String->[Token]) -> [Token]
skip n p s k = k (addcol n p) (drop n s)
blank :: ([Where]->Posn->String->[Token]) -> [Where]-> Posn-> String-> [Token]
blank k (InTag:_) p [] = lexerror "unexpected EOF in tag" p
blank k _ p [] = []
blank k w p (' ': s) = blank k w (addcol 1 p) s
blank k w p ('\t':s) = blank k w (tab p) s
blank k w p ('\n':s) = blank k w (newline p) s
blank k w p ('\r':s) = blank k w p s
blank k w p ('\xa0': s) = blank k w (addcol 1 p) s
blank k w p s = k w p s
prefixes :: String -> String -> Bool
[] `prefixes` ys = True
(x:xs) `prefixes` (y:ys) = x==y && xs `prefixes` ys
error " unexpected EOF in prefix "
accumulateUntil (c:cs) tok acc pos p [] k =
lexerror ("unexpected EOF while looking for "++c:cs++" after "++show pos) p
accumulateUntil (c:cs) tok acc pos p (s:ss) k
| c==s && cs `prefixes` ss = emit (TokFreeText (reverse acc)) pos:
emit tok p: skip (length cs) p ss k
| isSpace s = accumulateUntil (c:cs) tok (s:acc) pos (white s p) ss k
| otherwise = accumulateUntil (c:cs) tok (s:acc) pos (addcol 1 p) ss k
----
posInNewCxt :: String -> Maybe Posn -> Posn
posInNewCxt name pos = Pn name 1 1 pos
xmlLex :: String -> String -> [Token]
xmlLex filename = xmlAny [] (posInNewCxt ("file "++filename) Nothing)
xmlReLex :: Posn -> String -> [Token]
xmlReLex p s
| "INCLUDE" `prefixes` s = emit (TokSection INCLUDEx) p: k 7
| "IGNORE" `prefixes` s = emit (TokSection IGNOREx) p: k 6
| otherwise = blank xmlAny [] p s
where
k n = skip n p s (blank xmlAny [])
: : Posn - > String - > [ Token ]
xmltop p [ ] = [ ]
xmltop p s
| " < ? " ` prefixes ` s = emit TokPIOpen p : next 2 ( xmlPI [ InTag ] )
| " < ! -- " ` prefixes ` s = emit TokCommentOpen p : next 4 ( xmlComment [ ] )
| " < ! " ` prefixes ` s = emit TokSpecialOpen p : next 2 ( xmlSpecial [ InTag ] )
-- | otherwise = lexerror "expected <?xml?> or <!DOCTYPE>" p
where next n k = skip n p s k
xmlPI w p s = xmlName p s (blank xmlPIEnd w)
xmlPIEnd w p s = accumulateUntil "?>" TokPIClose "" p p s
(blank xmlAny (tail w))
xmlComment w p s = accumulateUntil "-->" TokCommentClose "" p p s
(blank xmlAny w)
-- Note: the order of the clauses in xmlAny is very important.
Some matches must precede the NotInTag test , the rest must follow it .
xmlAny :: [Where] -> Posn -> String -> [Token]
xmlAny (InTag:_) p [] = lexerror "unexpected EOF inside tag" p
xmlAny _ p [] = []
xmlAny w p s@('<':ss)
| "?" `prefixes` ss = emit TokPIOpen p: skip 2 p s (xmlPI (InTag:w))
| "!--" `prefixes` ss = emit TokCommentOpen p: skip 4 p s (xmlComment w)
| "![" `prefixes` ss = emit TokSectionOpen p: skip 3 p s (xmlSection w)
| "!" `prefixes` ss = emit TokSpecialOpen p:
skip 2 p s (xmlSpecial (InTag:w))
| "/" `prefixes` ss = emit TokEndOpen p:
skip 2 p s (xmlTag (InTag:tail w))
| otherwise = emit TokAnyOpen p:
skip 1 p s (xmlTag (InTag:NotInTag:w))
xmlAny (_:_:w) p s@('/':ss)
| ">" `prefixes` ss = emit TokEndClose p: skip 2 p s (xmlAny w)
xmlAny w p ('&':ss) = emit TokAmp p: accumulateUntil ";" TokSemi "" p
(addcol 1 p) ss (xmlAny w)
xmlAny w@(NotInTag:_) p s = xmlContent "" w p p s
xmlAny w p ('>':ss) = emit TokAnyClose p: xmlAny (tail w) (addcol 1 p) ss
xmlAny w p ('[':ss) = emit TokSqOpen p: blank xmlAny (InTag:w) (addcol 1 p) ss
xmlAny w p (']':ss)
| "]>" `prefixes` ss =
emit TokSectionClose p: skip 3 p (']':ss) (xmlAny (tail w))
| otherwise = emit TokSqClose p: blank xmlAny (tail w) (addcol 1 p) ss
xmlAny w p ('(':ss) = emit TokBraOpen p: blank xmlAny (InTag:w) (addcol 1 p) ss
xmlAny w p (')':ss) = emit TokBraClose p: blank xmlAny (tail w) (addcol 1 p) ss
xmlAny w p ('=':ss) = emit TokEqual p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('*':ss) = emit TokStar p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('+':ss) = emit TokPlus p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('?':ss) = emit TokQuery p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('|':ss) = emit TokPipe p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('%':ss) = emit TokPercent p: blank xmlAny w (addcol 1 p) ss
xmlAny w p (';':ss) = emit TokSemi p: blank xmlAny w (addcol 1 p) ss
xmlAny w p (',':ss) = emit TokComma p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('#':ss) = emit TokHash p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('"':ss) = emit TokQuote p: accumulateUntil "\"" TokQuote "" p
(addcol 1 p) ss (xmlAny w)
xmlAny w p ('\'':ss) = emit TokQuote p: accumulateUntil "'" TokQuote "" p
(addcol 1 p) ss (xmlAny w)
xmlAny w p s
| isSpace (head s) = blank xmlAny w p s
| isNmstart (head s) = xmlName p s (blank xmlAny w)
| otherwise = lexerror "unrecognised token" p
xmlTag w p s = xmlName p s (blank xmlAny w)
xmlSection = blank xmlSection0
where
xmlSection0 w p s
| "CDATA[" `prefixes` s = emit (TokSection CDATAx) p: accum w p s 6
| "INCLUDE" `prefixes` s = emit (TokSection INCLUDEx) p: k w p s 7
| "IGNORE" `prefixes` s = emit (TokSection IGNOREx) p: k w p s 6
| "%" `prefixes` s = emit TokPercent p: k w p s 1
| otherwise = lexerror ("expected CDATA, IGNORE, or INCLUDE") p
accum w p s n =
let p0 = addcol n p in
accumulateUntil "]]>" TokSectionClose "" p0 p0 (drop n s) (blank xmlAny w)
k w p s n =
skip n p s (xmlAny w)
xmlSpecial w p s
| "DOCTYPE" `prefixes` s = emit (TokSpecial DOCTYPEx) p: k 7
| "ELEMENT" `prefixes` s = emit (TokSpecial ELEMENTx) p: k 7
| "ATTLIST" `prefixes` s = emit (TokSpecial ATTLISTx) p: k 7
| "ENTITY" `prefixes` s = emit (TokSpecial ENTITYx) p: k 6
| otherwise = lexerror "expected DOCTYPE, ELEMENT, ENTITY, or ATTLIST" p
where k n = skip n p s (blank xmlAny w)
xmlName p (s:ss) k
| isNmstart s = gatherName (s:[]) p (addcol 1 p) ss k
| isAlphaNum s || s== ' : ' || s== ' _ ' = gatherName ( s : [ ] ) p ( addcol 1 p ) ss k
| otherwise = lexerror ((show$ord s) ++" expected name") p
where
gatherName acc pos p [] k =
emit (TokName (reverse acc)) pos: k p []
lexerror ( " unexpected EOF in name at " + + show pos ) p
gatherName acc pos p (s:ss) k
-- | isAlphaNum s || s `elem` ".-_:"
| isNmchar s|| s `elem` ".-_:"
= gatherName (s:acc) pos (addcol 1 p) ss k
| otherwise = emit (TokName (reverse acc)) pos: k p (s:ss)
xmlContent acc w pos p [] = if all isSpace acc then []
else lexerror "unexpected EOF between tags" p
xmlContent acc w pos p (s:ss)
| elem s "<&" = if all isSpace acc then xmlAny w p (s:ss)
else emit (TokFreeText (revtrim acc)) pos: xmlAny w p (s:ss)
| isSpace s = xmlContent (s:acc) w pos (white s p) ss
| otherwise = xmlContent (s:acc) w pos (addcol 1 p) ss
ident : : ( String->TokenT ) - >
-- Posn -> String -> [String] ->
-- (Posn->String->[String]->[Token]) -> [Token]
ident tok p s ss k =
let ( name , s0 ) = span ( \c- c ` elem ` " ` -_#.'/\\ " ) s
in emit ( tok name ) p : skip ( length name ) p s ss k
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/HaXml/bugs/panitz/XmlLex.hs | haskell | :: String -> String -> [Token]
:: Posn -> String -> [Token]
:: String -> Posn
This is a hand-written lexer for tokenising the text of an XML
document so that it is ready for parsing. It attaches position
information in (line,column) format to every token. The main
entry point is xmlLex. A secondary entry point, xmlReLex, is
provided for when the parser needs to stuff a string back onto
the front of the text and re-tokenise it (typically when expanding
macros).
As one would expect, the lexer is essentially a small finite
state machine.
filename, line, column, incl.point
<!--
-->
<?
?>
<![
]]>
<!
</
/>
<
>
[
]
=
?
*
+
&
;
#
(
)
|
%
,
'' or ""
begins with letter
any character data
fake token
trim, revtrim :: String -> String
revtrim = f.reverse.f where f = dropWhile isSpace
--
| otherwise = lexerror "expected <?xml?> or <!DOCTYPE>" p
Note: the order of the clauses in xmlAny is very important.
| isAlphaNum s || s `elem` ".-_:"
Posn -> String -> [String] ->
(Posn->String->[String]->[Token]) -> [Token] | module XmlLex
, Posn(..)
, TokenT(..)
, Token
, Special(..)
, Section(..)
) where
import Prelude
import Char
import XmlChar
data Where = InTag | NotInTag
deriving (Eq)
type Token = (Posn, TokenT)
deriving (Eq)
instance Show Posn where
showsPrec p (Pn f l c i) = showString f .
showString " at line " . shows l .
showString " col " . shows c .
( case i of
Nothing -> id
Just p -> showString "\n used by " .
shows p )
data TokenT =
CDATA INCLUDE IGNORE etc
DOCTYPE ELEMENT ATTLIST etc
deriving (Eq)
data Special =
DOCTYPEx
| ELEMENTx
| ATTLISTx
| ENTITYx
| NOTATIONx
deriving (Eq,Show)
data Section =
CDATAx
| INCLUDEx
| IGNOREx
deriving (Eq,Show)
instance Show TokenT where
showsPrec p TokCommentOpen = showString "<!--"
showsPrec p TokCommentClose = showString "-->"
showsPrec p TokPIOpen = showString "<?"
showsPrec p TokPIClose = showString "?>"
showsPrec p TokSectionOpen = showString "<!["
showsPrec p TokSectionClose = showString "]]>"
showsPrec p (TokSection s) = showsPrec p s
showsPrec p TokSpecialOpen = showString "<!"
showsPrec p (TokSpecial s) = showsPrec p s
showsPrec p TokEndOpen = showString "</"
showsPrec p TokEndClose = showString "/>"
showsPrec p TokAnyOpen = showString "<"
showsPrec p TokAnyClose = showString ">"
showsPrec p TokSqOpen = showString "["
showsPrec p TokSqClose = showString "]"
showsPrec p TokEqual = showString "="
showsPrec p TokQuery = showString "?"
showsPrec p TokStar = showString "*"
showsPrec p TokPlus = showString "+"
showsPrec p TokAmp = showString "&"
showsPrec p TokSemi = showString ";"
showsPrec p TokHash = showString "#"
showsPrec p TokBraOpen = showString "("
showsPrec p TokBraClose = showString ")"
showsPrec p TokPipe = showString "|"
showsPrec p TokPercent = showString "%"
showsPrec p TokComma = showString ","
showsPrec p TokQuote = showString "' or \""
showsPrec p (TokName s) = showString s
showsPrec p (TokFreeText s) = showString s
showsPrec p TokNull = showString "(null)"
trim = f . f where f = reverse .
revtrim = reverse . dropWhile (=='\n')
emit :: TokenT -> Posn -> Token
emit tok p = forcep p `seq` (p,tok)
forcep (Pn f n m i) = m `seq` n
lexerror :: String -> Posn -> a
lexerror s p = error ("Lexical error in "++show p++": "++s++"\n")
addcol :: Int -> Posn -> Posn
addcol n (Pn f r c i) = Pn f r (c+n) i
newline, tab :: Posn -> Posn
newline (Pn f r c i) = Pn f (r+1) 1 i
tab (Pn f r c i) = Pn f r (((c`div`8)+1)*8) i
white :: Char -> Posn -> Posn
white ' ' = addcol 1
white '\n' = newline
white '\r' = id
white '\t' = tab
white '\xa0' = addcol 1
skip :: Int -> Posn -> String -> (Posn->String->[Token]) -> [Token]
skip n p s k = k (addcol n p) (drop n s)
blank :: ([Where]->Posn->String->[Token]) -> [Where]-> Posn-> String-> [Token]
blank k (InTag:_) p [] = lexerror "unexpected EOF in tag" p
blank k _ p [] = []
blank k w p (' ': s) = blank k w (addcol 1 p) s
blank k w p ('\t':s) = blank k w (tab p) s
blank k w p ('\n':s) = blank k w (newline p) s
blank k w p ('\r':s) = blank k w p s
blank k w p ('\xa0': s) = blank k w (addcol 1 p) s
blank k w p s = k w p s
prefixes :: String -> String -> Bool
[] `prefixes` ys = True
(x:xs) `prefixes` (y:ys) = x==y && xs `prefixes` ys
error " unexpected EOF in prefix "
accumulateUntil (c:cs) tok acc pos p [] k =
lexerror ("unexpected EOF while looking for "++c:cs++" after "++show pos) p
accumulateUntil (c:cs) tok acc pos p (s:ss) k
| c==s && cs `prefixes` ss = emit (TokFreeText (reverse acc)) pos:
emit tok p: skip (length cs) p ss k
| isSpace s = accumulateUntil (c:cs) tok (s:acc) pos (white s p) ss k
| otherwise = accumulateUntil (c:cs) tok (s:acc) pos (addcol 1 p) ss k
posInNewCxt :: String -> Maybe Posn -> Posn
posInNewCxt name pos = Pn name 1 1 pos
xmlLex :: String -> String -> [Token]
xmlLex filename = xmlAny [] (posInNewCxt ("file "++filename) Nothing)
xmlReLex :: Posn -> String -> [Token]
xmlReLex p s
| "INCLUDE" `prefixes` s = emit (TokSection INCLUDEx) p: k 7
| "IGNORE" `prefixes` s = emit (TokSection IGNOREx) p: k 6
| otherwise = blank xmlAny [] p s
where
k n = skip n p s (blank xmlAny [])
: : Posn - > String - > [ Token ]
xmltop p [ ] = [ ]
xmltop p s
| " < ? " ` prefixes ` s = emit TokPIOpen p : next 2 ( xmlPI [ InTag ] )
| " < ! -- " ` prefixes ` s = emit TokCommentOpen p : next 4 ( xmlComment [ ] )
| " < ! " ` prefixes ` s = emit TokSpecialOpen p : next 2 ( xmlSpecial [ InTag ] )
where next n k = skip n p s k
xmlPI w p s = xmlName p s (blank xmlPIEnd w)
xmlPIEnd w p s = accumulateUntil "?>" TokPIClose "" p p s
(blank xmlAny (tail w))
xmlComment w p s = accumulateUntil "-->" TokCommentClose "" p p s
(blank xmlAny w)
Some matches must precede the NotInTag test , the rest must follow it .
xmlAny :: [Where] -> Posn -> String -> [Token]
xmlAny (InTag:_) p [] = lexerror "unexpected EOF inside tag" p
xmlAny _ p [] = []
xmlAny w p s@('<':ss)
| "?" `prefixes` ss = emit TokPIOpen p: skip 2 p s (xmlPI (InTag:w))
| "!--" `prefixes` ss = emit TokCommentOpen p: skip 4 p s (xmlComment w)
| "![" `prefixes` ss = emit TokSectionOpen p: skip 3 p s (xmlSection w)
| "!" `prefixes` ss = emit TokSpecialOpen p:
skip 2 p s (xmlSpecial (InTag:w))
| "/" `prefixes` ss = emit TokEndOpen p:
skip 2 p s (xmlTag (InTag:tail w))
| otherwise = emit TokAnyOpen p:
skip 1 p s (xmlTag (InTag:NotInTag:w))
xmlAny (_:_:w) p s@('/':ss)
| ">" `prefixes` ss = emit TokEndClose p: skip 2 p s (xmlAny w)
xmlAny w p ('&':ss) = emit TokAmp p: accumulateUntil ";" TokSemi "" p
(addcol 1 p) ss (xmlAny w)
xmlAny w@(NotInTag:_) p s = xmlContent "" w p p s
xmlAny w p ('>':ss) = emit TokAnyClose p: xmlAny (tail w) (addcol 1 p) ss
xmlAny w p ('[':ss) = emit TokSqOpen p: blank xmlAny (InTag:w) (addcol 1 p) ss
xmlAny w p (']':ss)
| "]>" `prefixes` ss =
emit TokSectionClose p: skip 3 p (']':ss) (xmlAny (tail w))
| otherwise = emit TokSqClose p: blank xmlAny (tail w) (addcol 1 p) ss
xmlAny w p ('(':ss) = emit TokBraOpen p: blank xmlAny (InTag:w) (addcol 1 p) ss
xmlAny w p (')':ss) = emit TokBraClose p: blank xmlAny (tail w) (addcol 1 p) ss
xmlAny w p ('=':ss) = emit TokEqual p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('*':ss) = emit TokStar p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('+':ss) = emit TokPlus p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('?':ss) = emit TokQuery p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('|':ss) = emit TokPipe p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('%':ss) = emit TokPercent p: blank xmlAny w (addcol 1 p) ss
xmlAny w p (';':ss) = emit TokSemi p: blank xmlAny w (addcol 1 p) ss
xmlAny w p (',':ss) = emit TokComma p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('#':ss) = emit TokHash p: blank xmlAny w (addcol 1 p) ss
xmlAny w p ('"':ss) = emit TokQuote p: accumulateUntil "\"" TokQuote "" p
(addcol 1 p) ss (xmlAny w)
xmlAny w p ('\'':ss) = emit TokQuote p: accumulateUntil "'" TokQuote "" p
(addcol 1 p) ss (xmlAny w)
xmlAny w p s
| isSpace (head s) = blank xmlAny w p s
| isNmstart (head s) = xmlName p s (blank xmlAny w)
| otherwise = lexerror "unrecognised token" p
xmlTag w p s = xmlName p s (blank xmlAny w)
xmlSection = blank xmlSection0
where
xmlSection0 w p s
| "CDATA[" `prefixes` s = emit (TokSection CDATAx) p: accum w p s 6
| "INCLUDE" `prefixes` s = emit (TokSection INCLUDEx) p: k w p s 7
| "IGNORE" `prefixes` s = emit (TokSection IGNOREx) p: k w p s 6
| "%" `prefixes` s = emit TokPercent p: k w p s 1
| otherwise = lexerror ("expected CDATA, IGNORE, or INCLUDE") p
accum w p s n =
let p0 = addcol n p in
accumulateUntil "]]>" TokSectionClose "" p0 p0 (drop n s) (blank xmlAny w)
k w p s n =
skip n p s (xmlAny w)
xmlSpecial w p s
| "DOCTYPE" `prefixes` s = emit (TokSpecial DOCTYPEx) p: k 7
| "ELEMENT" `prefixes` s = emit (TokSpecial ELEMENTx) p: k 7
| "ATTLIST" `prefixes` s = emit (TokSpecial ATTLISTx) p: k 7
| "ENTITY" `prefixes` s = emit (TokSpecial ENTITYx) p: k 6
| otherwise = lexerror "expected DOCTYPE, ELEMENT, ENTITY, or ATTLIST" p
where k n = skip n p s (blank xmlAny w)
xmlName p (s:ss) k
| isNmstart s = gatherName (s:[]) p (addcol 1 p) ss k
| isAlphaNum s || s== ' : ' || s== ' _ ' = gatherName ( s : [ ] ) p ( addcol 1 p ) ss k
| otherwise = lexerror ((show$ord s) ++" expected name") p
where
gatherName acc pos p [] k =
emit (TokName (reverse acc)) pos: k p []
lexerror ( " unexpected EOF in name at " + + show pos ) p
gatherName acc pos p (s:ss) k
| isNmchar s|| s `elem` ".-_:"
= gatherName (s:acc) pos (addcol 1 p) ss k
| otherwise = emit (TokName (reverse acc)) pos: k p (s:ss)
xmlContent acc w pos p [] = if all isSpace acc then []
else lexerror "unexpected EOF between tags" p
xmlContent acc w pos p (s:ss)
| elem s "<&" = if all isSpace acc then xmlAny w p (s:ss)
else emit (TokFreeText (revtrim acc)) pos: xmlAny w p (s:ss)
| isSpace s = xmlContent (s:acc) w pos (white s p) ss
| otherwise = xmlContent (s:acc) w pos (addcol 1 p) ss
ident : : ( String->TokenT ) - >
ident tok p s ss k =
let ( name , s0 ) = span ( \c- c ` elem ` " ` -_#.'/\\ " ) s
in emit ( tok name ) p : skip ( length name ) p s ss k
|
8ca773a5c80a47ac5541e837fd0132a0ccd46b8ae5e3f2981bf83e9a604744b4 | brendanhay/credentials | Format.hs | # LANGUAGE ExtendedDefaultRules #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TupleSections #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -fno - warn - type - defaults #
-- |
Module : Credentials .
Copyright : ( c ) 2015 - 2016
License : Mozilla Public License , v. 2.0 .
Maintainer : < >
-- Stability : provisional
Portability : non - portable ( GHC extensions )
--
module Credentials.CLI.Format where
import Credentials
import Credentials.CLI.Types
import Data.Aeson (ToJSON (..), object, (.=))
import Data.Bifunctor
import Data.ByteString (ByteString)
import Data.List (foldl', intersperse)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Monoid
import Network.AWS.Data
import Options.Applicative.Help hiding (list, string)
import qualified Data.Text as Text
data Status
= Deleted
| Truncated
instance ToLog Status where
build = build . toText
instance ToText Status where
toText = \case
Deleted -> "deleted"
Truncated -> "truncated"
data Emit = Emit { store' :: Store, result :: Result }
instance ToJSON Emit where
toJSON (Emit s r) = object [toText s .= r]
instance Pretty Emit where
pretty (Emit s r) = doc s <> char ':' .$. indent 2 (pretty r)
data Result
= SetupR Setup
| TeardownR
| InsertR Name Revision
| SelectR Name Revision ByteString
| DeleteR Name Revision
| TruncateR Name
| ListR [(Name, NonEmpty Revision)]
instance ToLog Result where
build = \case
SetupR s -> build s
TeardownR -> build Deleted
InsertR _ r -> build r
SelectR _ _ v -> build v
DeleteR {} -> build Deleted
TruncateR {} -> build Truncated
ListR rs -> foldMap f rs
where
f (n, v :| vs) =
build n % "," % mconcat (intersperse "," $ map build (v:vs)) % "\n"
instance ToJSON Result where
toJSON = \case
SetupR s -> object ["status" =~ s]
TeardownR -> object ["status" =~ Deleted]
InsertR n r -> object ["name" =~ n, "revision" =~ r]
SelectR n r v -> object ["name" =~ n, "revision" =~ r, "secret" =~ toBS v]
DeleteR n r -> object ["name" =~ n, "revision" =~ r, "status" =~ Deleted]
TruncateR n -> object ["name" =~ n, "status" =~ Truncated]
ListR rs -> object (map go rs)
where
k =~ v = k .= toText v
go (n, v :| vs) = toText n .= map toText (v:vs)
instance Pretty Result where
pretty = \case
SetupR s -> stat s
TeardownR -> stat Deleted
InsertR n r -> name n .$. rev r
SelectR n r v -> name n .$. rev r .$. val v
DeleteR n r -> name n .$. rev r .$. stat Deleted
TruncateR n -> name n .$. stat Truncated
ListR rs -> list rs
where
name n = "name:" <+> doc n
rev r = "revision:" <+> doc r
stat s = "status:" <+> doc s
val v = "secret:" <+> doc (toBS v)
list [] = mempty
list (r:rs) = foldl' (.$.) (f r) (map f rs)
where
f (n, v :| vs) = doc n <> ":" .$.
indent 2 (extractChunk (revs v vs))
revs v vs = table $ (v, "# latest") : map (,mempty) vs
table [] = mempty
table xs = pure $ vcat
[indent 2 (fillBreak n (item k) <+> v) | (k, v) <- ys]
where
n = maximum (map (Text.length . fst) ys) + 2
ys = map (first toText) xs
item x = "-" <+> doc x
doc :: ToText a => a -> Doc
doc = text . string
| null | https://raw.githubusercontent.com/brendanhay/credentials/4cba2c238b7d99712ee6745748864134096a106b/credentials-cli/src/Credentials/CLI/Format.hs | haskell | # LANGUAGE OverloadedStrings #
|
Stability : provisional
| # LANGUAGE ExtendedDefaultRules #
# LANGUAGE FlexibleContexts #
# LANGUAGE LambdaCase #
# LANGUAGE TupleSections #
# LANGUAGE UndecidableInstances #
# OPTIONS_GHC -fno - warn - type - defaults #
Module : Credentials .
Copyright : ( c ) 2015 - 2016
License : Mozilla Public License , v. 2.0 .
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Credentials.CLI.Format where
import Credentials
import Credentials.CLI.Types
import Data.Aeson (ToJSON (..), object, (.=))
import Data.Bifunctor
import Data.ByteString (ByteString)
import Data.List (foldl', intersperse)
import Data.List.NonEmpty (NonEmpty (..))
import Data.Monoid
import Network.AWS.Data
import Options.Applicative.Help hiding (list, string)
import qualified Data.Text as Text
data Status
= Deleted
| Truncated
instance ToLog Status where
build = build . toText
instance ToText Status where
toText = \case
Deleted -> "deleted"
Truncated -> "truncated"
data Emit = Emit { store' :: Store, result :: Result }
instance ToJSON Emit where
toJSON (Emit s r) = object [toText s .= r]
instance Pretty Emit where
pretty (Emit s r) = doc s <> char ':' .$. indent 2 (pretty r)
data Result
= SetupR Setup
| TeardownR
| InsertR Name Revision
| SelectR Name Revision ByteString
| DeleteR Name Revision
| TruncateR Name
| ListR [(Name, NonEmpty Revision)]
instance ToLog Result where
build = \case
SetupR s -> build s
TeardownR -> build Deleted
InsertR _ r -> build r
SelectR _ _ v -> build v
DeleteR {} -> build Deleted
TruncateR {} -> build Truncated
ListR rs -> foldMap f rs
where
f (n, v :| vs) =
build n % "," % mconcat (intersperse "," $ map build (v:vs)) % "\n"
instance ToJSON Result where
toJSON = \case
SetupR s -> object ["status" =~ s]
TeardownR -> object ["status" =~ Deleted]
InsertR n r -> object ["name" =~ n, "revision" =~ r]
SelectR n r v -> object ["name" =~ n, "revision" =~ r, "secret" =~ toBS v]
DeleteR n r -> object ["name" =~ n, "revision" =~ r, "status" =~ Deleted]
TruncateR n -> object ["name" =~ n, "status" =~ Truncated]
ListR rs -> object (map go rs)
where
k =~ v = k .= toText v
go (n, v :| vs) = toText n .= map toText (v:vs)
instance Pretty Result where
pretty = \case
SetupR s -> stat s
TeardownR -> stat Deleted
InsertR n r -> name n .$. rev r
SelectR n r v -> name n .$. rev r .$. val v
DeleteR n r -> name n .$. rev r .$. stat Deleted
TruncateR n -> name n .$. stat Truncated
ListR rs -> list rs
where
name n = "name:" <+> doc n
rev r = "revision:" <+> doc r
stat s = "status:" <+> doc s
val v = "secret:" <+> doc (toBS v)
list [] = mempty
list (r:rs) = foldl' (.$.) (f r) (map f rs)
where
f (n, v :| vs) = doc n <> ":" .$.
indent 2 (extractChunk (revs v vs))
revs v vs = table $ (v, "# latest") : map (,mempty) vs
table [] = mempty
table xs = pure $ vcat
[indent 2 (fillBreak n (item k) <+> v) | (k, v) <- ys]
where
n = maximum (map (Text.length . fst) ys) + 2
ys = map (first toText) xs
item x = "-" <+> doc x
doc :: ToText a => a -> Doc
doc = text . string
|
4396e371cf852fb606f00c59b1225d35e8121f4eda7f172b3ef8e46b7d6f3219 | mirage/ocaml-matrix | empty.ml | open Json_encoding
module type JSON = sig
type%accessor t = unit
val encoding : t encoding
val pp : t Fmt.t
end
module type QUERY = sig
type%accessor t = unit
val args : t -> (string * string list) list
end
module Json : JSON = struct
type t = unit [@@deriving accessor]
let encoding = unit
let pp _ppf () = ()
end
module Query : QUERY = struct
type t = unit [@@deriving accessor]
let args () = []
end
| null | https://raw.githubusercontent.com/mirage/ocaml-matrix/2a58d3d41c43404741f2dfdaf1d2d0f3757b2b69/lib/matrix-common/empty.ml | ocaml | open Json_encoding
module type JSON = sig
type%accessor t = unit
val encoding : t encoding
val pp : t Fmt.t
end
module type QUERY = sig
type%accessor t = unit
val args : t -> (string * string list) list
end
module Json : JSON = struct
type t = unit [@@deriving accessor]
let encoding = unit
let pp _ppf () = ()
end
module Query : QUERY = struct
type t = unit [@@deriving accessor]
let args () = []
end
| |
c5401287c5b98d4c2a0ed08f41d636b0bf0cddcac31bf8c4daf8787bf603c8e6 | haskell-mafia/boris | Git.hs | # LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - missing - signatures #
module Test.Boris.Service.Git where
import Boris.Core.Data
import Boris.Service.Git (InitialiseError (..), findRef)
import qualified Data.Text as T
import Disorder.Core (neg)
import P
import Test.Boris.Core.Arbitrary
import Test.QuickCheck
import Test.QuickCheck.Instances ()
prop_findRef_none b p x =
findRef b p x [] === Left (NoMatchingRef b p)
prop_findRef_exact b p r =
findRef b p Nothing [r] === Right r
prop_findRef_exact_with_target b p r =
findRef b p (Just r) [r] === Right r
prop_findRef_exact_with_target_mismatch b p r x = r /= x ==>
findRef b p (Just x) [r] === Left (MismatchedRef b p x [r])
prop_findRef_multi_without_target b p r x = r /= x ==>
findRef b p Nothing [r, x] === Left (AmbiguousRef b p [r, x])
prop_findRef_multi_with_target b p r x = r /= x ==>
findRef b p (Just r) [r, x] === Right r
prop_findRef_multi_with_target_mismatch b p r x y = r /= x && r /= y && x /= y ==>
findRef b p (Just y) [r, x] === Left (MismatchedRef b p y [r, x])
prop_matchesBuild (BuildWithPattern b g) =
matchesBuild g b
prop_matchesBuild_examples =
let
match p b =
either
(flip counterexample False . T.unpack)
(counterexample (T.unpack $ p <> " can't match " <> b) . flip matchesBuild (Build b))
(parseBuildNamePattern p)
in
conjoin [
match "a" "a"
, match "a*" "a"
, match "a*" "abc"
, match "*c" "abc"
, match "*c*" "abcde"
-- Single character matches
, match "a?" "ab"
, neg $ match "a?" "abc"
-- We've _only_ enabled * and ?
, neg $ match "[abc]" "a"
, match "[abc]" "[abc]"
-- NOTE: This is really a glob, and slashes _do_ mean something
, match "a*/c" "ab/c"
, neg $ match "a*" "a/b"
-- No recursive wildcards
, neg $ match "a/**" "a/b/c/d"
]
return []
tests = $forAllProperties $ quickCheckWithResult (stdArgs { maxSuccess = 1000 })
| null | https://raw.githubusercontent.com/haskell-mafia/boris/fb670071600e8b2d8dbb9191fcf6bf8488f83f5a/boris-service/test/Test/Boris/Service/Git.hs | haskell | # LANGUAGE OverloadedStrings #
Single character matches
We've _only_ enabled * and ?
NOTE: This is really a glob, and slashes _do_ mean something
No recursive wildcards | # LANGUAGE NoImplicitPrelude #
# LANGUAGE TemplateHaskell #
# OPTIONS_GHC -fno - warn - missing - signatures #
module Test.Boris.Service.Git where
import Boris.Core.Data
import Boris.Service.Git (InitialiseError (..), findRef)
import qualified Data.Text as T
import Disorder.Core (neg)
import P
import Test.Boris.Core.Arbitrary
import Test.QuickCheck
import Test.QuickCheck.Instances ()
prop_findRef_none b p x =
findRef b p x [] === Left (NoMatchingRef b p)
prop_findRef_exact b p r =
findRef b p Nothing [r] === Right r
prop_findRef_exact_with_target b p r =
findRef b p (Just r) [r] === Right r
prop_findRef_exact_with_target_mismatch b p r x = r /= x ==>
findRef b p (Just x) [r] === Left (MismatchedRef b p x [r])
prop_findRef_multi_without_target b p r x = r /= x ==>
findRef b p Nothing [r, x] === Left (AmbiguousRef b p [r, x])
prop_findRef_multi_with_target b p r x = r /= x ==>
findRef b p (Just r) [r, x] === Right r
prop_findRef_multi_with_target_mismatch b p r x y = r /= x && r /= y && x /= y ==>
findRef b p (Just y) [r, x] === Left (MismatchedRef b p y [r, x])
prop_matchesBuild (BuildWithPattern b g) =
matchesBuild g b
prop_matchesBuild_examples =
let
match p b =
either
(flip counterexample False . T.unpack)
(counterexample (T.unpack $ p <> " can't match " <> b) . flip matchesBuild (Build b))
(parseBuildNamePattern p)
in
conjoin [
match "a" "a"
, match "a*" "a"
, match "a*" "abc"
, match "*c" "abc"
, match "*c*" "abcde"
, match "a?" "ab"
, neg $ match "a?" "abc"
, neg $ match "[abc]" "a"
, match "[abc]" "[abc]"
, match "a*/c" "ab/c"
, neg $ match "a*" "a/b"
, neg $ match "a/**" "a/b/c/d"
]
return []
tests = $forAllProperties $ quickCheckWithResult (stdArgs { maxSuccess = 1000 })
|
2cb8aa227e736429229c405230fa98c4f93aebc10894935163a9853f14a2da3d | jrh13/hol-light | bernoulli.ml | (* ========================================================================= *)
numbers and polynomials ; sum of powers .
(* ========================================================================= *)
needs "Library/binomial.ml";;
needs "Library/analysis.ml";;
needs "Library/transc.ml";;
prioritize_real();;
(* ------------------------------------------------------------------------- *)
(* A couple of basic lemmas about new-style sums. *)
(* ------------------------------------------------------------------------- *)
let SUM_DIFFS = prove
(`!a m n. m <= n + 1 ==> sum(m..n) (\i. a(i + 1) - a(i)) = a(n + 1) - a(m)`,
GEN_TAC THEN GEN_TAC THEN INDUCT_TAC THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG] THENL
[REWRITE_TAC[ARITH_RULE `m <= 0 + 1 <=> m = 0 \/ m = 1`] THEN
STRIP_TAC THEN ASM_REWRITE_TAC[ARITH; ADD_CLAUSES; REAL_SUB_REFL];
SIMP_TAC[ARITH_RULE `m <= SUC n + 1 <=> m <= n + 1 \/ m = SUC n + 1`] THEN
STRIP_TAC THEN ASM_SIMP_TAC[ADD1] THENL [REAL_ARITH_TAC; ALL_TAC] THEN
REWRITE_TAC[REAL_SUB_REFL; ARITH_RULE `~((n + 1) + 1 <= n + 1)`] THEN
MATCH_MP_TAC SUM_TRIV_NUMSEG THEN ARITH_TAC]);;
let DIFF_SUM = prove
(`!f f' a b.
(!k. a <= k /\ k <= b ==> ((\x. f x k) diffl f'(k)) x)
==> ((\x. sum(a..b) (f x)) diffl (sum(a..b) f')) x`,
REPLICATE_TAC 3 GEN_TAC THEN INDUCT_TAC THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG] THEN COND_CASES_TAC THEN
ASM_SIMP_TAC[ARITH; DIFF_CONST; SUM_TRIV_NUMSEG;
ARITH_RULE `~(a <= SUC b) ==> b < a`] THEN
DISCH_TAC THEN MATCH_MP_TAC DIFF_ADD THEN
ASM_SIMP_TAC[LE_REFL; ARITH_RULE `k <= b ==> k <= SUC b`]);;
(* ------------------------------------------------------------------------- *)
numbers .
(* ------------------------------------------------------------------------- *)
let bernoulli = define
`(bernoulli 0 = &1) /\
(!n. bernoulli(SUC n) =
--sum(0..n) (\j. &(binom(n + 2,j)) * bernoulli j) / (&n + &2))`;;
(* ------------------------------------------------------------------------- *)
(* A slightly tidier-looking form of the recurrence. *)
(* ------------------------------------------------------------------------- *)
let BERNOULLI = prove
(`!n. sum(0..n) (\j. &(binom(n + 1,j)) * bernoulli j) =
if n = 0 then &1 else &0`,
INDUCT_TAC THEN
REWRITE_TAC[bernoulli; SUM_CLAUSES_NUMSEG; GSYM ADD1; ADD_CLAUSES; binom;
REAL_MUL_LID; LE_0; NOT_SUC] THEN
SIMP_TAC[BINOM_LT; ARITH_RULE `n < SUC n`; BINOM_REFL; REAL_ADD_LID] THEN
REWRITE_TAC[ADD_CLAUSES] THEN REWRITE_TAC[GSYM REAL_OF_NUM_ADD] THEN
REWRITE_TAC[ARITH_RULE `SUC(SUC n) = n + 2`] THEN
MATCH_MP_TAC(REAL_FIELD `x = &n + &2 ==> s + x * --s / (&n + &2) = &0`) THEN
REWRITE_TAC[ADD1; BINOM_TOP_STEP_REAL; ARITH_RULE `~(n = n + 1)`] THEN
REWRITE_TAC[BINOM_REFL] THEN REAL_ARITH_TAC);;
(* ------------------------------------------------------------------------- *)
polynomials .
(* ------------------------------------------------------------------------- *)
let bernpoly = new_definition
`bernpoly n x = sum(0..n) (\k. &(binom(n,k)) * bernoulli k * x pow (n - k))`;;
(* ------------------------------------------------------------------------- *)
(* The key derivative recurrence. *)
(* ------------------------------------------------------------------------- *)
let DIFF_BERNPOLY = prove
(`!n x. ((bernpoly (SUC n)) diffl (&(SUC n) * bernpoly n x)) x`,
REPEAT GEN_TAC THEN
GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV) [GSYM ETA_AX] THEN
REWRITE_TAC[bernpoly; SUM_CLAUSES_NUMSEG; LE_0] THEN
GEN_REWRITE_TAC LAND_CONV [GSYM REAL_ADD_RID] THEN
MATCH_MP_TAC DIFF_ADD THEN REWRITE_TAC[SUB_REFL; real_pow; DIFF_CONST] THEN
REWRITE_TAC[GSYM SUM_LMUL] THEN MATCH_MP_TAC DIFF_SUM THEN
REPEAT STRIP_TAC THEN REWRITE_TAC[ADD1; BINOM_TOP_STEP_REAL] THEN
DIFF_TAC THEN ASM_SIMP_TAC[ARITH_RULE `k <= n ==> ~(k = n + 1)`] THEN
REWRITE_TAC[REAL_MUL_LZERO; REAL_ADD_LID] THEN
ASM_SIMP_TAC[ARITH_RULE `k <= n ==> (n + 1) - k - 1 = n - k`] THEN
ASM_SIMP_TAC[GSYM REAL_OF_NUM_SUB; ARITH_RULE `k <= n ==> k <= n + 1`] THEN
UNDISCH_TAC `k <= n:num` THEN
REWRITE_TAC[GSYM REAL_OF_NUM_ADD; GSYM REAL_OF_NUM_LE] THEN
ABBREV_TAC `z = x pow (n - k)` THEN CONV_TAC REAL_FIELD);;
(* ------------------------------------------------------------------------- *)
(* Hence the key stepping recurrence. *)
(* ------------------------------------------------------------------------- *)
let INTEGRALS_EQ = prove
(`!f g. (!x. ((\x. f(x) - g(x)) diffl &0) x) /\ f(&0) = g(&0)
==> !x. f(x) = g(x)`,
REPEAT STRIP_TAC THEN
MP_TAC(SPECL [`\x:real. f(x) - g(x)`; `x:real`; `&0`] DIFF_ISCONST_ALL) THEN
ASM_REWRITE_TAC[] THEN REAL_ARITH_TAC);;
let RECURRENCE_BERNPOLY = prove
(`!n x. bernpoly n (x + &1) - bernpoly n x = &n * x pow (n - 1)`,
INDUCT_TAC THENL
[REWRITE_TAC[bernpoly; SUM_SING_NUMSEG; REAL_SUB_REFL; SUB_REFL;
real_pow; REAL_MUL_LZERO];
ALL_TAC] THEN
MATCH_MP_TAC INTEGRALS_EQ THEN CONJ_TAC THENL
[X_GEN_TAC `x:real` THEN FIRST_X_ASSUM(MP_TAC o SPEC `x:real`) THEN
ONCE_REWRITE_TAC[GSYM REAL_SUB_0] THEN
) ( & ( SUC n ) ) ` ) THEN
REWRITE_TAC[REAL_MUL_RZERO ] THEN DISCH_THEN(SUBST1_TAC o ) THEN
] THEN
REPEAT(MATCH_MP_TAC DIFF_SUB THEN ) THEN
SIMP_TAC[SUC_SUB1 ; DIFF_CMUL ; DIFF_POW ; DIFF_BERNPOLY ; ETA_AX ] THEN
LAND_CONV [ GSYM REAL_MUL_RID ] THEN
MATCH_MP_TAC DIFF_CHAIN THEN REWRITE_TAC[DIFF_BERNPOLY ] THEN
DIFF_TAC THEN REAL_ARITH_TAC ;
ALL_TAC ] THEN
REWRITE_TAC[bernpoly ; GSYM SUM_SUB_NUMSEG ] THEN
REWRITE_TAC[REAL_ADD_LID ; REAL_POW_ONE ; GSYM REAL_SUB_LDISTRIB ] THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG ; LE_0 ; SUB_REFL ; real_pow ] THEN
REWRITE_TAC[REAL_SUB_REFL ; REAL_MUL_RZERO ; REAL_ADD_RID ] THEN
SIMP_TAC[ARITH_RULE ` i < = n = = > SUC n - i = SUC(n - i ) ` ] THEN
REWRITE_TAC[real_pow ; REAL_MUL_LZERO ; REAL_SUB_RZERO ; REAL_MUL_RID ] THEN
REWRITE_TAC[BERNOULLI ; ADD1 ] THEN
COND_CASES_TAC THEN ASM_REWRITE_TAC[ARITH ; real_pow ; REAL_MUL_LID ] THEN
CONV_TAC SYM_CONV THEN REWRITE_TAC[REAL_ENTIRE ; REAL_POW_EQ_0 ] THEN
ASM_REWRITE_TAC[ADD_SUB ] ) ; ;
( * -------------------------------------------------------------------------
REWRITE_TAC[REAL_MUL_RZERO] THEN DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[REAL_SUB_LDISTRIB] THEN
REPEAT(MATCH_MP_TAC DIFF_SUB THEN CONJ_TAC) THEN
SIMP_TAC[SUC_SUB1; DIFF_CMUL; DIFF_POW; DIFF_BERNPOLY; ETA_AX] THEN
GEN_REWRITE_TAC LAND_CONV [GSYM REAL_MUL_RID] THEN
MATCH_MP_TAC DIFF_CHAIN THEN REWRITE_TAC[DIFF_BERNPOLY] THEN
DIFF_TAC THEN REAL_ARITH_TAC;
ALL_TAC] THEN
REWRITE_TAC[bernpoly; GSYM SUM_SUB_NUMSEG] THEN
REWRITE_TAC[REAL_ADD_LID; REAL_POW_ONE; GSYM REAL_SUB_LDISTRIB] THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG; LE_0; SUB_REFL; real_pow] THEN
REWRITE_TAC[REAL_SUB_REFL; REAL_MUL_RZERO; REAL_ADD_RID] THEN
SIMP_TAC[ARITH_RULE `i <= n ==> SUC n - i = SUC(n - i)`] THEN
REWRITE_TAC[real_pow; REAL_MUL_LZERO; REAL_SUB_RZERO; REAL_MUL_RID] THEN
REWRITE_TAC[BERNOULLI; ADD1] THEN
COND_CASES_TAC THEN ASM_REWRITE_TAC[ARITH; real_pow; REAL_MUL_LID] THEN
CONV_TAC SYM_CONV THEN REWRITE_TAC[REAL_ENTIRE; REAL_POW_EQ_0] THEN
ASM_REWRITE_TAC[ADD_SUB]);;
(* ------------------------------------------------------------------------- *)
(* Hence we get the main result. *)
(* ------------------------------------------------------------------------- *)
let SUM_OF_POWERS = prove
(`!n. sum(0..n) (\k. &k pow m) =
(bernpoly(SUC m) (&n + &1) - bernpoly(SUC m) (&0)) / (&m + &1)`,
GEN_TAC THEN ASM_SIMP_TAC[REAL_EQ_RDIV_EQ; REAL_ARITH `&0 < &n + &1`] THEN
ONCE_REWRITE_TAC[GSYM REAL_MUL_SYM] THEN
REWRITE_TAC[GSYM SUM_LMUL] THEN MATCH_MP_TAC EQ_TRANS THEN EXISTS_TAC
`sum(0..n) (\i. bernpoly (SUC m) (&(i + 1)) - bernpoly (SUC m) (&i))` THEN
CONJ_TAC THENL
[REWRITE_TAC[RECURRENCE_BERNPOLY; GSYM REAL_OF_NUM_ADD] THEN
REWRITE_TAC[GSYM REAL_OF_NUM_SUC; SUC_SUB1];
SIMP_TAC[SUM_DIFFS; LE_0] THEN REWRITE_TAC[REAL_OF_NUM_ADD]]);;
(* ------------------------------------------------------------------------- *)
(* Now explicit computations of the various terms on specific instances. *)
(* ------------------------------------------------------------------------- *)
let SUM_CONV =
let pth = prove
(`sum(0..0) f = f 0 /\ sum(0..SUC n) f = sum(0..n) f + f(SUC n)`,
SIMP_TAC[SUM_CLAUSES_NUMSEG; LE_0]) in
let econv_0 = GEN_REWRITE_CONV I [CONJUNCT1 pth]
and econv_1 = GEN_REWRITE_CONV I [CONJUNCT2 pth] in
let rec sconv tm =
(econv_0 ORELSEC
(LAND_CONV(RAND_CONV num_CONV) THENC econv_1 THENC
COMB2_CONV (RAND_CONV sconv) (RAND_CONV NUM_SUC_CONV))) tm in
sconv;;
let BINOM_CONV =
let pth = prove
(`a * b * x = FACT c ==> x = (FACT c) DIV (a * b)`,
REPEAT STRIP_TAC THEN CONV_TAC SYM_CONV THEN
MATCH_MP_TAC DIV_UNIQ THEN EXISTS_TAC `0` THEN CONJ_TAC THENL
[POP_ASSUM MP_TAC THEN ARITH_TAC;
POP_ASSUM MP_TAC THEN ONCE_REWRITE_TAC[GSYM CONTRAPOS_THM] THEN
SIMP_TAC[LT_NZ; MULT_ASSOC; MULT_CLAUSES] THEN
MESON_TAC[LT_NZ; FACT_LT]]) in
let match_pth = MATCH_MP pth
and binom_tm = `binom` in
fun tm ->
let bop,lr = dest_comb tm in
if bop <> binom_tm then failwith "BINOM_CONV" else
let l,r = dest_pair lr in
let n = dest_numeral l and k = dest_numeral r in
if n </ k then
let th = SPECL [l;r] BINOM_LT in
MP th (EQT_ELIM(NUM_LT_CONV(lhand(concl th))))
else
let d = n -/ k in
let th1 = match_pth(SPECL [mk_numeral d; r] BINOM_FACT) in
CONV_RULE NUM_REDUCE_CONV th1;;
let BERNOULLIS =
let th_0,th_1 = CONJ_PAIR bernoulli
and b_tm = `bernoulli` in
let conv_1 = GEN_REWRITE_CONV I [th_1] in
let rec bconv n =
if n <= 0 then [th_0] else
let bths = bconv (n - 1)
and tm = mk_comb(b_tm,mk_small_numeral n) in
(RAND_CONV num_CONV THENC conv_1 THENC
LAND_CONV(RAND_CONV SUM_CONV) THENC
ONCE_DEPTH_CONV BETA_CONV THENC
DEPTH_CONV(NUM_RED_CONV ORELSEC BINOM_CONV) THENC
GEN_REWRITE_CONV ONCE_DEPTH_CONV bths THENC
REAL_RAT_REDUCE_CONV) tm :: bths in
bconv;;
let BERNOULLI_CONV =
let b_tm = `bernoulli` in
fun tm -> let op,n = dest_comb tm in
if op <> b_tm || not(is_numeral n) then failwith "BERNOULLI_CONV"
else hd(BERNOULLIS(dest_small_numeral n));;
let BERNPOLY_CONV =
let conv_1 =
REWR_CONV bernpoly THENC SUM_CONV THENC
TOP_DEPTH_CONV BETA_CONV THENC NUM_REDUCE_CONV
and conv_3 =
ONCE_DEPTH_CONV BINOM_CONV THENC REAL_POLY_CONV in
fun tm ->
let n = dest_small_numeral(lhand tm) in
let conv_2 = GEN_REWRITE_CONV ONCE_DEPTH_CONV (BERNOULLIS n) in
(conv_1 THENC conv_2 THENC conv_3) tm;;
let SOP_CONV =
let pth = prove
(`sum(0..n) (\k. &k pow m) =
(\p. (p(&n + &1) - p(&0)) / (&m + &1))
(\x. bernpoly (SUC m) x)`,
REWRITE_TAC[SUM_OF_POWERS]) in
let conv_0 = REWR_CONV pth in
REWR_CONV pth THENC
RAND_CONV(ABS_CONV(LAND_CONV NUM_SUC_CONV THENC BERNPOLY_CONV)) THENC
TOP_DEPTH_CONV BETA_CONV THENC
REAL_POLY_CONV;;
let SOP_NUM_CONV =
let pth = prove
(`sum(0..n) (\k. &k pow p) = &m ==> nsum(0..n) (\k. k EXP p) = m`,
REWRITE_TAC[REAL_OF_NUM_POW; GSYM REAL_OF_NUM_SUM_NUMSEG;
REAL_OF_NUM_EQ]) in
let rule_1 = PART_MATCH (lhs o rand) pth in
fun tm ->
let th1 = rule_1 tm in
let th2 = SOP_CONV(lhs(lhand(concl th1))) in
MATCH_MP th1 th2;;
(* ------------------------------------------------------------------------- *)
The example bragged about .
(* ------------------------------------------------------------------------- *)
time SOP_NUM_CONV `nsum(0..1000) (\k. k EXP 10)`;;
(* ------------------------------------------------------------------------- *)
(* The general formulas for moderate powers. *)
(* ------------------------------------------------------------------------- *)
time SOP_CONV `sum(0..n) (\k. &k pow 0)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 1)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 2)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 3)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 4)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 5)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 6)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 7)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 8)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 9)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 10)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 11)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 12)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 13)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 14)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 15)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 16)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 17)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 18)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 19)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 20)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 21)`;;
| null | https://raw.githubusercontent.com/jrh13/hol-light/d125b0ae73e546a63ed458a7891f4e14ae0409e2/100/bernoulli.ml | ocaml | =========================================================================
=========================================================================
-------------------------------------------------------------------------
A couple of basic lemmas about new-style sums.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
A slightly tidier-looking form of the recurrence.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
The key derivative recurrence.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Hence the key stepping recurrence.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Hence we get the main result.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
Now explicit computations of the various terms on specific instances.
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
-------------------------------------------------------------------------
The general formulas for moderate powers.
------------------------------------------------------------------------- | numbers and polynomials ; sum of powers .
needs "Library/binomial.ml";;
needs "Library/analysis.ml";;
needs "Library/transc.ml";;
prioritize_real();;
let SUM_DIFFS = prove
(`!a m n. m <= n + 1 ==> sum(m..n) (\i. a(i + 1) - a(i)) = a(n + 1) - a(m)`,
GEN_TAC THEN GEN_TAC THEN INDUCT_TAC THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG] THENL
[REWRITE_TAC[ARITH_RULE `m <= 0 + 1 <=> m = 0 \/ m = 1`] THEN
STRIP_TAC THEN ASM_REWRITE_TAC[ARITH; ADD_CLAUSES; REAL_SUB_REFL];
SIMP_TAC[ARITH_RULE `m <= SUC n + 1 <=> m <= n + 1 \/ m = SUC n + 1`] THEN
STRIP_TAC THEN ASM_SIMP_TAC[ADD1] THENL [REAL_ARITH_TAC; ALL_TAC] THEN
REWRITE_TAC[REAL_SUB_REFL; ARITH_RULE `~((n + 1) + 1 <= n + 1)`] THEN
MATCH_MP_TAC SUM_TRIV_NUMSEG THEN ARITH_TAC]);;
let DIFF_SUM = prove
(`!f f' a b.
(!k. a <= k /\ k <= b ==> ((\x. f x k) diffl f'(k)) x)
==> ((\x. sum(a..b) (f x)) diffl (sum(a..b) f')) x`,
REPLICATE_TAC 3 GEN_TAC THEN INDUCT_TAC THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG] THEN COND_CASES_TAC THEN
ASM_SIMP_TAC[ARITH; DIFF_CONST; SUM_TRIV_NUMSEG;
ARITH_RULE `~(a <= SUC b) ==> b < a`] THEN
DISCH_TAC THEN MATCH_MP_TAC DIFF_ADD THEN
ASM_SIMP_TAC[LE_REFL; ARITH_RULE `k <= b ==> k <= SUC b`]);;
numbers .
let bernoulli = define
`(bernoulli 0 = &1) /\
(!n. bernoulli(SUC n) =
--sum(0..n) (\j. &(binom(n + 2,j)) * bernoulli j) / (&n + &2))`;;
let BERNOULLI = prove
(`!n. sum(0..n) (\j. &(binom(n + 1,j)) * bernoulli j) =
if n = 0 then &1 else &0`,
INDUCT_TAC THEN
REWRITE_TAC[bernoulli; SUM_CLAUSES_NUMSEG; GSYM ADD1; ADD_CLAUSES; binom;
REAL_MUL_LID; LE_0; NOT_SUC] THEN
SIMP_TAC[BINOM_LT; ARITH_RULE `n < SUC n`; BINOM_REFL; REAL_ADD_LID] THEN
REWRITE_TAC[ADD_CLAUSES] THEN REWRITE_TAC[GSYM REAL_OF_NUM_ADD] THEN
REWRITE_TAC[ARITH_RULE `SUC(SUC n) = n + 2`] THEN
MATCH_MP_TAC(REAL_FIELD `x = &n + &2 ==> s + x * --s / (&n + &2) = &0`) THEN
REWRITE_TAC[ADD1; BINOM_TOP_STEP_REAL; ARITH_RULE `~(n = n + 1)`] THEN
REWRITE_TAC[BINOM_REFL] THEN REAL_ARITH_TAC);;
polynomials .
let bernpoly = new_definition
`bernpoly n x = sum(0..n) (\k. &(binom(n,k)) * bernoulli k * x pow (n - k))`;;
let DIFF_BERNPOLY = prove
(`!n x. ((bernpoly (SUC n)) diffl (&(SUC n) * bernpoly n x)) x`,
REPEAT GEN_TAC THEN
GEN_REWRITE_TAC (RATOR_CONV o LAND_CONV) [GSYM ETA_AX] THEN
REWRITE_TAC[bernpoly; SUM_CLAUSES_NUMSEG; LE_0] THEN
GEN_REWRITE_TAC LAND_CONV [GSYM REAL_ADD_RID] THEN
MATCH_MP_TAC DIFF_ADD THEN REWRITE_TAC[SUB_REFL; real_pow; DIFF_CONST] THEN
REWRITE_TAC[GSYM SUM_LMUL] THEN MATCH_MP_TAC DIFF_SUM THEN
REPEAT STRIP_TAC THEN REWRITE_TAC[ADD1; BINOM_TOP_STEP_REAL] THEN
DIFF_TAC THEN ASM_SIMP_TAC[ARITH_RULE `k <= n ==> ~(k = n + 1)`] THEN
REWRITE_TAC[REAL_MUL_LZERO; REAL_ADD_LID] THEN
ASM_SIMP_TAC[ARITH_RULE `k <= n ==> (n + 1) - k - 1 = n - k`] THEN
ASM_SIMP_TAC[GSYM REAL_OF_NUM_SUB; ARITH_RULE `k <= n ==> k <= n + 1`] THEN
UNDISCH_TAC `k <= n:num` THEN
REWRITE_TAC[GSYM REAL_OF_NUM_ADD; GSYM REAL_OF_NUM_LE] THEN
ABBREV_TAC `z = x pow (n - k)` THEN CONV_TAC REAL_FIELD);;
let INTEGRALS_EQ = prove
(`!f g. (!x. ((\x. f(x) - g(x)) diffl &0) x) /\ f(&0) = g(&0)
==> !x. f(x) = g(x)`,
REPEAT STRIP_TAC THEN
MP_TAC(SPECL [`\x:real. f(x) - g(x)`; `x:real`; `&0`] DIFF_ISCONST_ALL) THEN
ASM_REWRITE_TAC[] THEN REAL_ARITH_TAC);;
let RECURRENCE_BERNPOLY = prove
(`!n x. bernpoly n (x + &1) - bernpoly n x = &n * x pow (n - 1)`,
INDUCT_TAC THENL
[REWRITE_TAC[bernpoly; SUM_SING_NUMSEG; REAL_SUB_REFL; SUB_REFL;
real_pow; REAL_MUL_LZERO];
ALL_TAC] THEN
MATCH_MP_TAC INTEGRALS_EQ THEN CONJ_TAC THENL
[X_GEN_TAC `x:real` THEN FIRST_X_ASSUM(MP_TAC o SPEC `x:real`) THEN
ONCE_REWRITE_TAC[GSYM REAL_SUB_0] THEN
) ( & ( SUC n ) ) ` ) THEN
REWRITE_TAC[REAL_MUL_RZERO ] THEN DISCH_THEN(SUBST1_TAC o ) THEN
] THEN
REPEAT(MATCH_MP_TAC DIFF_SUB THEN ) THEN
SIMP_TAC[SUC_SUB1 ; DIFF_CMUL ; DIFF_POW ; DIFF_BERNPOLY ; ETA_AX ] THEN
LAND_CONV [ GSYM REAL_MUL_RID ] THEN
MATCH_MP_TAC DIFF_CHAIN THEN REWRITE_TAC[DIFF_BERNPOLY ] THEN
DIFF_TAC THEN REAL_ARITH_TAC ;
ALL_TAC ] THEN
REWRITE_TAC[bernpoly ; GSYM SUM_SUB_NUMSEG ] THEN
REWRITE_TAC[REAL_ADD_LID ; REAL_POW_ONE ; GSYM REAL_SUB_LDISTRIB ] THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG ; LE_0 ; SUB_REFL ; real_pow ] THEN
REWRITE_TAC[REAL_SUB_REFL ; REAL_MUL_RZERO ; REAL_ADD_RID ] THEN
SIMP_TAC[ARITH_RULE ` i < = n = = > SUC n - i = SUC(n - i ) ` ] THEN
REWRITE_TAC[real_pow ; REAL_MUL_LZERO ; REAL_SUB_RZERO ; REAL_MUL_RID ] THEN
REWRITE_TAC[BERNOULLI ; ADD1 ] THEN
COND_CASES_TAC THEN ASM_REWRITE_TAC[ARITH ; real_pow ; REAL_MUL_LID ] THEN
CONV_TAC SYM_CONV THEN REWRITE_TAC[REAL_ENTIRE ; REAL_POW_EQ_0 ] THEN
ASM_REWRITE_TAC[ADD_SUB ] ) ; ;
( * -------------------------------------------------------------------------
REWRITE_TAC[REAL_MUL_RZERO] THEN DISCH_THEN(SUBST1_TAC o SYM) THEN
REWRITE_TAC[REAL_SUB_LDISTRIB] THEN
REPEAT(MATCH_MP_TAC DIFF_SUB THEN CONJ_TAC) THEN
SIMP_TAC[SUC_SUB1; DIFF_CMUL; DIFF_POW; DIFF_BERNPOLY; ETA_AX] THEN
GEN_REWRITE_TAC LAND_CONV [GSYM REAL_MUL_RID] THEN
MATCH_MP_TAC DIFF_CHAIN THEN REWRITE_TAC[DIFF_BERNPOLY] THEN
DIFF_TAC THEN REAL_ARITH_TAC;
ALL_TAC] THEN
REWRITE_TAC[bernpoly; GSYM SUM_SUB_NUMSEG] THEN
REWRITE_TAC[REAL_ADD_LID; REAL_POW_ONE; GSYM REAL_SUB_LDISTRIB] THEN
REWRITE_TAC[SUM_CLAUSES_NUMSEG; LE_0; SUB_REFL; real_pow] THEN
REWRITE_TAC[REAL_SUB_REFL; REAL_MUL_RZERO; REAL_ADD_RID] THEN
SIMP_TAC[ARITH_RULE `i <= n ==> SUC n - i = SUC(n - i)`] THEN
REWRITE_TAC[real_pow; REAL_MUL_LZERO; REAL_SUB_RZERO; REAL_MUL_RID] THEN
REWRITE_TAC[BERNOULLI; ADD1] THEN
COND_CASES_TAC THEN ASM_REWRITE_TAC[ARITH; real_pow; REAL_MUL_LID] THEN
CONV_TAC SYM_CONV THEN REWRITE_TAC[REAL_ENTIRE; REAL_POW_EQ_0] THEN
ASM_REWRITE_TAC[ADD_SUB]);;
let SUM_OF_POWERS = prove
(`!n. sum(0..n) (\k. &k pow m) =
(bernpoly(SUC m) (&n + &1) - bernpoly(SUC m) (&0)) / (&m + &1)`,
GEN_TAC THEN ASM_SIMP_TAC[REAL_EQ_RDIV_EQ; REAL_ARITH `&0 < &n + &1`] THEN
ONCE_REWRITE_TAC[GSYM REAL_MUL_SYM] THEN
REWRITE_TAC[GSYM SUM_LMUL] THEN MATCH_MP_TAC EQ_TRANS THEN EXISTS_TAC
`sum(0..n) (\i. bernpoly (SUC m) (&(i + 1)) - bernpoly (SUC m) (&i))` THEN
CONJ_TAC THENL
[REWRITE_TAC[RECURRENCE_BERNPOLY; GSYM REAL_OF_NUM_ADD] THEN
REWRITE_TAC[GSYM REAL_OF_NUM_SUC; SUC_SUB1];
SIMP_TAC[SUM_DIFFS; LE_0] THEN REWRITE_TAC[REAL_OF_NUM_ADD]]);;
let SUM_CONV =
let pth = prove
(`sum(0..0) f = f 0 /\ sum(0..SUC n) f = sum(0..n) f + f(SUC n)`,
SIMP_TAC[SUM_CLAUSES_NUMSEG; LE_0]) in
let econv_0 = GEN_REWRITE_CONV I [CONJUNCT1 pth]
and econv_1 = GEN_REWRITE_CONV I [CONJUNCT2 pth] in
let rec sconv tm =
(econv_0 ORELSEC
(LAND_CONV(RAND_CONV num_CONV) THENC econv_1 THENC
COMB2_CONV (RAND_CONV sconv) (RAND_CONV NUM_SUC_CONV))) tm in
sconv;;
let BINOM_CONV =
let pth = prove
(`a * b * x = FACT c ==> x = (FACT c) DIV (a * b)`,
REPEAT STRIP_TAC THEN CONV_TAC SYM_CONV THEN
MATCH_MP_TAC DIV_UNIQ THEN EXISTS_TAC `0` THEN CONJ_TAC THENL
[POP_ASSUM MP_TAC THEN ARITH_TAC;
POP_ASSUM MP_TAC THEN ONCE_REWRITE_TAC[GSYM CONTRAPOS_THM] THEN
SIMP_TAC[LT_NZ; MULT_ASSOC; MULT_CLAUSES] THEN
MESON_TAC[LT_NZ; FACT_LT]]) in
let match_pth = MATCH_MP pth
and binom_tm = `binom` in
fun tm ->
let bop,lr = dest_comb tm in
if bop <> binom_tm then failwith "BINOM_CONV" else
let l,r = dest_pair lr in
let n = dest_numeral l and k = dest_numeral r in
if n </ k then
let th = SPECL [l;r] BINOM_LT in
MP th (EQT_ELIM(NUM_LT_CONV(lhand(concl th))))
else
let d = n -/ k in
let th1 = match_pth(SPECL [mk_numeral d; r] BINOM_FACT) in
CONV_RULE NUM_REDUCE_CONV th1;;
let BERNOULLIS =
let th_0,th_1 = CONJ_PAIR bernoulli
and b_tm = `bernoulli` in
let conv_1 = GEN_REWRITE_CONV I [th_1] in
let rec bconv n =
if n <= 0 then [th_0] else
let bths = bconv (n - 1)
and tm = mk_comb(b_tm,mk_small_numeral n) in
(RAND_CONV num_CONV THENC conv_1 THENC
LAND_CONV(RAND_CONV SUM_CONV) THENC
ONCE_DEPTH_CONV BETA_CONV THENC
DEPTH_CONV(NUM_RED_CONV ORELSEC BINOM_CONV) THENC
GEN_REWRITE_CONV ONCE_DEPTH_CONV bths THENC
REAL_RAT_REDUCE_CONV) tm :: bths in
bconv;;
let BERNOULLI_CONV =
let b_tm = `bernoulli` in
fun tm -> let op,n = dest_comb tm in
if op <> b_tm || not(is_numeral n) then failwith "BERNOULLI_CONV"
else hd(BERNOULLIS(dest_small_numeral n));;
let BERNPOLY_CONV =
let conv_1 =
REWR_CONV bernpoly THENC SUM_CONV THENC
TOP_DEPTH_CONV BETA_CONV THENC NUM_REDUCE_CONV
and conv_3 =
ONCE_DEPTH_CONV BINOM_CONV THENC REAL_POLY_CONV in
fun tm ->
let n = dest_small_numeral(lhand tm) in
let conv_2 = GEN_REWRITE_CONV ONCE_DEPTH_CONV (BERNOULLIS n) in
(conv_1 THENC conv_2 THENC conv_3) tm;;
let SOP_CONV =
let pth = prove
(`sum(0..n) (\k. &k pow m) =
(\p. (p(&n + &1) - p(&0)) / (&m + &1))
(\x. bernpoly (SUC m) x)`,
REWRITE_TAC[SUM_OF_POWERS]) in
let conv_0 = REWR_CONV pth in
REWR_CONV pth THENC
RAND_CONV(ABS_CONV(LAND_CONV NUM_SUC_CONV THENC BERNPOLY_CONV)) THENC
TOP_DEPTH_CONV BETA_CONV THENC
REAL_POLY_CONV;;
let SOP_NUM_CONV =
let pth = prove
(`sum(0..n) (\k. &k pow p) = &m ==> nsum(0..n) (\k. k EXP p) = m`,
REWRITE_TAC[REAL_OF_NUM_POW; GSYM REAL_OF_NUM_SUM_NUMSEG;
REAL_OF_NUM_EQ]) in
let rule_1 = PART_MATCH (lhs o rand) pth in
fun tm ->
let th1 = rule_1 tm in
let th2 = SOP_CONV(lhs(lhand(concl th1))) in
MATCH_MP th1 th2;;
The example bragged about .
time SOP_NUM_CONV `nsum(0..1000) (\k. k EXP 10)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 0)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 1)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 2)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 3)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 4)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 5)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 6)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 7)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 8)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 9)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 10)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 11)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 12)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 13)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 14)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 15)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 16)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 17)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 18)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 19)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 20)`;;
time SOP_CONV `sum(0..n) (\k. &k pow 21)`;;
|
0d0cb4c6776343ebafac91e2bace6ba0ac93c7198b2c287c25d5956d3e384a6d | redis-sd/erlang-ryng | ryng_event.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
@copyright 2014 , Pagoda Box , Inc.
%%% @doc
%%%
%%% @end
Created : 10 Sep 2013 by < >
%%%-------------------------------------------------------------------
-module(ryng_event).
-include("ryng.hrl").
-define(MANAGER, ryng_manager).
%% API
-export([manager/0, add_handler/2]).
-export([node_add/2, node_del/2, node_set/2, ring_add/1, ring_del/1, ring_refresh/1]).
%%%===================================================================
%%% API functions
%%%===================================================================
manager() ->
?MANAGER.
add_handler(Handler, Pid) ->
gen_event:add_handler(manager(), Handler, Pid).
node_add(RingName, Node) ->
notify({node, add, RingName, Node}).
node_del(RingName, NodeObject) ->
notify({node, del, RingName, NodeObject}).
node_set(RingName, Node) ->
notify({ring, set, RingName, Node}).
ring_add(Ring) ->
notify({ring, add, Ring}).
ring_del(RingName) ->
notify({ring, del, RingName}).
ring_refresh(Ring) ->
notify({ring, refresh, Ring}).
%%%-------------------------------------------------------------------
Internal functions
%%%-------------------------------------------------------------------
@private
notify(Message) ->
gen_event:notify(manager(), Message).
| null | https://raw.githubusercontent.com/redis-sd/erlang-ryng/6b9f3db2f6380de09b173721108083aade4d53ee/src/ryng_event.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
API
===================================================================
API functions
===================================================================
-------------------------------------------------------------------
------------------------------------------------------------------- | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
@copyright 2014 , Pagoda Box , Inc.
Created : 10 Sep 2013 by < >
-module(ryng_event).
-include("ryng.hrl").
-define(MANAGER, ryng_manager).
-export([manager/0, add_handler/2]).
-export([node_add/2, node_del/2, node_set/2, ring_add/1, ring_del/1, ring_refresh/1]).
manager() ->
?MANAGER.
add_handler(Handler, Pid) ->
gen_event:add_handler(manager(), Handler, Pid).
node_add(RingName, Node) ->
notify({node, add, RingName, Node}).
node_del(RingName, NodeObject) ->
notify({node, del, RingName, NodeObject}).
node_set(RingName, Node) ->
notify({ring, set, RingName, Node}).
ring_add(Ring) ->
notify({ring, add, Ring}).
ring_del(RingName) ->
notify({ring, del, RingName}).
ring_refresh(Ring) ->
notify({ring, refresh, Ring}).
Internal functions
@private
notify(Message) ->
gen_event:notify(manager(), Message).
|
517fcd3bc188ec516a6e832a96e823102b56648d63c800529e5acadc82b883ed | hanshuebner/bknr-web | web-utils.lisp | (in-package :bknr.web)
(enable-interpol-syntax)
(defstruct upload name pathname original-filename content-type)
(defgeneric object-url (obj))
(defgeneric edit-object-url (obj))
(defgeneric html-link (obj))
(defgeneric html-edit-link (obj))
(defparameter *upload-file-size-limit* 5000000)
(defun error-404 ()
(with-http-response (:response +http-not-found+)
(with-http-body ()
(html "The page you requested could not be found."))))
(defun redirect-uri (uri)
(make-instance 'uri :path (uri-path uri)
:query (uri-query uri)))
(defun request-uploaded-files ()
"Return a list of UPLOAD structures describing the file uploads in the request."
(unless (aux-request-value 'uploaded-files)
(setf (aux-request-value 'uploaded-files)
(let ((uploads (remove-if-not #'listp (post-parameters*) :key #'cdr)) retval)
(dolist (upload-info uploads)
(destructuring-bind (name pathname original-pathname content-type) upload-info
(push (make-upload :name name :pathname pathname :original-filename (cl-ppcre:regex-replace "^.*[\\\\/]" original-pathname "")
:content-type content-type) retval)))
(nreverse retval))))
(aux-request-value 'uploaded-files))
(defun request-uploaded-file (parameter-name)
(find parameter-name (request-uploaded-files) :test #'equal :key #'upload-name))
(defmacro with-image-from-upload ((image upload &rest args) &body body)
`(with-image-from-file (,image (upload-pathname ,upload)
(make-keyword-from-string (pathname-type (upload-original-filename ,upload)))
,@args)
,@body))
(defmacro with-image-from-upload* ((upload &rest args) &body body)
`(with-image-from-upload (cl-gd:*default-image* ,upload ,@args)
,@body))
(defmethod bknr.images:import-image ((upload upload) &rest args &key &allow-other-keys)
(apply #'bknr.images:import-image (upload-pathname upload)
:name (pathname-name (upload-original-filename upload))
:type (image-type-symbol (upload-content-type upload)) args))
(defun all-request-params ()
"Return all non-empty request parameters - This includes all parameters encoded in the URL as
well as those in the request body, either as urlencoded strings or as multipart body. If a multipart
body is present in the request, any uploaded files are saved in a temporary file and noted in the
request's plist. Uploaded files will be automatically deleted by the with-http-response
macro after the request body has been executed."
(unless (aux-request-value 'bknr-parsed-parameters)
(setf (aux-request-value 'bknr-parsed-parameters)
(remove-if (lambda (value)
"Remove empty strings (reported as NIL) and uploaded files"
(or (equal value "")
(listp value)))
(query-params)
:key #'cdr)))
(aux-request-value 'bknr-parsed-parameters))
(defun query-params (&key (get t) (post t))
(append (when get (get-parameters*))
(when post (post-parameters*))))
(defun query-param (param-name &key (get t) (post t) type)
(let ((value (cdr (assoc param-name (query-params :get get :post post) :test #'string-equal))))
(if type
(hunchentoot::convert-parameter value type)
(unless (equal value "")
value))))
(defun query-param-list (param-name &key (get t) (post t))
(assoc-values param-name (query-params :get get :post post)
:test #'string-equal))
(defun request-variable (var)
(gethash var *req-var-hash*))
(defun (setf request-variable) (new-value var)
(setf (gethash var *req-var-hash*) new-value))
(defun request-variables ()
(loop for key being the hash-keys of *req-var-hash*
collect key
collect (request-variable key)))
(defun http-error (response message)
(with-bknr-page (:title #?"error: $(message)" :response response)
(:princ-safe message))
(error message))
(defun keywords-from-query-param-list (param &key (remove-empty t))
(let ((keywords (mapcar #'(lambda (s)
(make-keyword-from-string (string-trim '(#\Space #\Tab #\Newline) s)))
param)))
(if remove-empty
(remove-if #'(lambda (x) (eq x :||)) keywords)
keywords)))
(defun html-quote (string)
(regex-replace-all "([&<>])" string #'(lambda (target-string start end match-start &rest args)
(declare (ignore start end args))
(ecase (elt target-string match-start)
(#\& "&")
(#\< "<")
(#\> ">")))))
(defun parse-url ()
(values-list (cddr (mapcar #'url-decode (split "/" (script-name*))))))
(defun last-url-component ()
(register-groups-bind (last)
("/([^\\/]+)$" (script-name*))
last))
(defun parse-date-field (name)
(let ((timespec (mapcar #'(lambda (var) (parse-integer
(query-param (concatenate 'string name "-" var))
:junk-allowed t))
'("minute" "hour" "day" "month" "year"))))
(unless (car timespec)
(rplaca timespec 0))
(unless (cadr timespec)
(rplaca (cdr timespec) 0))
(if (every #'identity timespec)
(apply #'encode-universal-time 0 timespec)
nil)))
(defun bknr-url-path (handler)
"Returns the Path of the request under the handler prefix"
(let ((len (length (page-handler-prefix handler))))
(subseq (script-name*) len)))
(defun self-url (&key command prefix)
(destructuring-bind
(empty old-prefix object-id &rest old-command)
(split "/" (script-name*))
(declare (ignore empty))
#?"/$((or prefix old-prefix))/$(object-id)/$((or command old-command))"))
(defmethod html-link ((object store-object))
(html (:princ (format nil "[persistent object with id #~a]" (store-object-id object)))))
(defun text-to-html (string)
"Perform simple text to HTML conversion. http urls are replaced by links, internal links to
images become image tags."
(setf string (regex-replace-all
#?r"bknr:([0-9A-Za-z$-_.+!*'()]+)" string
#'(lambda (target-string start end match-start match-end reg-starts reg-ends)
(declare (ignore start end match-start match-end))
(let ((url (subseq target-string (aref reg-starts 0) (aref reg-ends 0))))
(regex-replace-all "URL" (if (all-matches "^/image" url)
"<img src=\"URL\" />"
"<a href=\"URL\">URL</a>")
url)))))
(setf string (regex-replace-all
#?r"(http://[0-9A-Za-z$-_.+!*'()]+)" string
#'(lambda (target-string start end match-start match-end &rest args)
(declare (ignore start end args))
(let ((url (subseq target-string match-start match-end)))
(regex-replace-all "URL" (if (all-matches "(?i)\\.(gif|jpe?g|png)$" url)
"<img src=\"URL\" />"
"<a href=\"URL\" target=\"_blank\">URL</a>")
url)))))
(setf string (regex-replace-all "[\\r\\n]" string "<br>"))
string)
(defun make-wiki-hrefs (string)
(regex-replace-all #?r"\[(.+?)\]" string
#'(lambda (target-string start end match-start match-end
reg-starts reg-ends)
(declare (ignore start end match-start match-end))
(let ((keyword (subseq target-string
(svref reg-starts 0)
(svref reg-ends 0))))
(format nil "<a class=\"wikilink\" href=\"/wiki/~a\">~a</a>"
keyword
keyword)))))
(defmacro bknr-handler-case (body &rest handler-forms)
`(if *bknr-debug*
,body
(handler-case
,body
,@handler-forms)))
(defun emit-element-attributes (attributes)
(loop for (key value) on attributes by #'cddr
do (progn
(princ " ")
(princ (string-downcase (symbol-name key)))
(princ "=\"")
(princ value)
(princ "\""))))
(defun emit-html (&rest forms)
(let ((element (car forms)))
(etypecase element
;; :foo
(keyword (handle-tag element nil nil))
;; (:foo ...) or ((:foo ...) ...)
(cons (if (consp (car element))
(handle-tag (caar element) (cdar element) (cdr element)) ; ((:foo ...) ...)
(handle-tag (car element) nil (cdr element)))) ; (:foo ...)
;; "foo"
(string (princ element))))
(when (cdr forms)
(apply #'emit-html (cdr forms))))
(defun handle-tag (tag-symbol attributes body)
emit
(let ((tag-name (string-downcase (symbol-name tag-symbol))))
;; emit < and tag name
(princ "<")
(princ tag-name)
;; emit possible attributes
(when attributes
(emit-element-attributes attributes))
(if body
;; emit tag body
(progn
(princ ">")
(apply #'emit-html body)
(princ "</")
(princ tag-name)
(princ ">"))
;; empty body, close tag immediately
(princ " />"))))
(defun encode-urlencoded (string)
(regex-replace-all #?r"\+" (url-encode string) "%20"))
(defmacro with-json-response (() &body body)
`(with-http-response (:content-type "application/json")
(yason:with-output-to-string* ()
(yason:with-object ()
,@body)))) | null | https://raw.githubusercontent.com/hanshuebner/bknr-web/5c30b61818a2f02f6f2e5dc69fd77396ec3afc51/src/web/web-utils.lisp | lisp | :foo
(:foo ...) or ((:foo ...) ...)
((:foo ...) ...)
(:foo ...)
"foo"
emit < and tag name
emit possible attributes
emit tag body
empty body, close tag immediately | (in-package :bknr.web)
(enable-interpol-syntax)
(defstruct upload name pathname original-filename content-type)
(defgeneric object-url (obj))
(defgeneric edit-object-url (obj))
(defgeneric html-link (obj))
(defgeneric html-edit-link (obj))
(defparameter *upload-file-size-limit* 5000000)
(defun error-404 ()
(with-http-response (:response +http-not-found+)
(with-http-body ()
(html "The page you requested could not be found."))))
(defun redirect-uri (uri)
(make-instance 'uri :path (uri-path uri)
:query (uri-query uri)))
(defun request-uploaded-files ()
"Return a list of UPLOAD structures describing the file uploads in the request."
(unless (aux-request-value 'uploaded-files)
(setf (aux-request-value 'uploaded-files)
(let ((uploads (remove-if-not #'listp (post-parameters*) :key #'cdr)) retval)
(dolist (upload-info uploads)
(destructuring-bind (name pathname original-pathname content-type) upload-info
(push (make-upload :name name :pathname pathname :original-filename (cl-ppcre:regex-replace "^.*[\\\\/]" original-pathname "")
:content-type content-type) retval)))
(nreverse retval))))
(aux-request-value 'uploaded-files))
(defun request-uploaded-file (parameter-name)
(find parameter-name (request-uploaded-files) :test #'equal :key #'upload-name))
(defmacro with-image-from-upload ((image upload &rest args) &body body)
`(with-image-from-file (,image (upload-pathname ,upload)
(make-keyword-from-string (pathname-type (upload-original-filename ,upload)))
,@args)
,@body))
(defmacro with-image-from-upload* ((upload &rest args) &body body)
`(with-image-from-upload (cl-gd:*default-image* ,upload ,@args)
,@body))
(defmethod bknr.images:import-image ((upload upload) &rest args &key &allow-other-keys)
(apply #'bknr.images:import-image (upload-pathname upload)
:name (pathname-name (upload-original-filename upload))
:type (image-type-symbol (upload-content-type upload)) args))
(defun all-request-params ()
"Return all non-empty request parameters - This includes all parameters encoded in the URL as
well as those in the request body, either as urlencoded strings or as multipart body. If a multipart
body is present in the request, any uploaded files are saved in a temporary file and noted in the
request's plist. Uploaded files will be automatically deleted by the with-http-response
macro after the request body has been executed."
(unless (aux-request-value 'bknr-parsed-parameters)
(setf (aux-request-value 'bknr-parsed-parameters)
(remove-if (lambda (value)
"Remove empty strings (reported as NIL) and uploaded files"
(or (equal value "")
(listp value)))
(query-params)
:key #'cdr)))
(aux-request-value 'bknr-parsed-parameters))
(defun query-params (&key (get t) (post t))
(append (when get (get-parameters*))
(when post (post-parameters*))))
(defun query-param (param-name &key (get t) (post t) type)
(let ((value (cdr (assoc param-name (query-params :get get :post post) :test #'string-equal))))
(if type
(hunchentoot::convert-parameter value type)
(unless (equal value "")
value))))
(defun query-param-list (param-name &key (get t) (post t))
(assoc-values param-name (query-params :get get :post post)
:test #'string-equal))
(defun request-variable (var)
(gethash var *req-var-hash*))
(defun (setf request-variable) (new-value var)
(setf (gethash var *req-var-hash*) new-value))
(defun request-variables ()
(loop for key being the hash-keys of *req-var-hash*
collect key
collect (request-variable key)))
(defun http-error (response message)
(with-bknr-page (:title #?"error: $(message)" :response response)
(:princ-safe message))
(error message))
(defun keywords-from-query-param-list (param &key (remove-empty t))
(let ((keywords (mapcar #'(lambda (s)
(make-keyword-from-string (string-trim '(#\Space #\Tab #\Newline) s)))
param)))
(if remove-empty
(remove-if #'(lambda (x) (eq x :||)) keywords)
keywords)))
(defun html-quote (string)
(regex-replace-all "([&<>])" string #'(lambda (target-string start end match-start &rest args)
(declare (ignore start end args))
(ecase (elt target-string match-start)
(#\& "&")
(#\< "<")
(#\> ">")))))
(defun parse-url ()
(values-list (cddr (mapcar #'url-decode (split "/" (script-name*))))))
(defun last-url-component ()
(register-groups-bind (last)
("/([^\\/]+)$" (script-name*))
last))
(defun parse-date-field (name)
(let ((timespec (mapcar #'(lambda (var) (parse-integer
(query-param (concatenate 'string name "-" var))
:junk-allowed t))
'("minute" "hour" "day" "month" "year"))))
(unless (car timespec)
(rplaca timespec 0))
(unless (cadr timespec)
(rplaca (cdr timespec) 0))
(if (every #'identity timespec)
(apply #'encode-universal-time 0 timespec)
nil)))
(defun bknr-url-path (handler)
"Returns the Path of the request under the handler prefix"
(let ((len (length (page-handler-prefix handler))))
(subseq (script-name*) len)))
(defun self-url (&key command prefix)
(destructuring-bind
(empty old-prefix object-id &rest old-command)
(split "/" (script-name*))
(declare (ignore empty))
#?"/$((or prefix old-prefix))/$(object-id)/$((or command old-command))"))
(defmethod html-link ((object store-object))
(html (:princ (format nil "[persistent object with id #~a]" (store-object-id object)))))
(defun text-to-html (string)
"Perform simple text to HTML conversion. http urls are replaced by links, internal links to
images become image tags."
(setf string (regex-replace-all
#?r"bknr:([0-9A-Za-z$-_.+!*'()]+)" string
#'(lambda (target-string start end match-start match-end reg-starts reg-ends)
(declare (ignore start end match-start match-end))
(let ((url (subseq target-string (aref reg-starts 0) (aref reg-ends 0))))
(regex-replace-all "URL" (if (all-matches "^/image" url)
"<img src=\"URL\" />"
"<a href=\"URL\">URL</a>")
url)))))
(setf string (regex-replace-all
#?r"(http://[0-9A-Za-z$-_.+!*'()]+)" string
#'(lambda (target-string start end match-start match-end &rest args)
(declare (ignore start end args))
(let ((url (subseq target-string match-start match-end)))
(regex-replace-all "URL" (if (all-matches "(?i)\\.(gif|jpe?g|png)$" url)
"<img src=\"URL\" />"
"<a href=\"URL\" target=\"_blank\">URL</a>")
url)))))
(setf string (regex-replace-all "[\\r\\n]" string "<br>"))
string)
(defun make-wiki-hrefs (string)
(regex-replace-all #?r"\[(.+?)\]" string
#'(lambda (target-string start end match-start match-end
reg-starts reg-ends)
(declare (ignore start end match-start match-end))
(let ((keyword (subseq target-string
(svref reg-starts 0)
(svref reg-ends 0))))
(format nil "<a class=\"wikilink\" href=\"/wiki/~a\">~a</a>"
keyword
keyword)))))
(defmacro bknr-handler-case (body &rest handler-forms)
`(if *bknr-debug*
,body
(handler-case
,body
,@handler-forms)))
(defun emit-element-attributes (attributes)
(loop for (key value) on attributes by #'cddr
do (progn
(princ " ")
(princ (string-downcase (symbol-name key)))
(princ "=\"")
(princ value)
(princ "\""))))
(defun emit-html (&rest forms)
(let ((element (car forms)))
(etypecase element
(keyword (handle-tag element nil nil))
(cons (if (consp (car element))
(string (princ element))))
(when (cdr forms)
(apply #'emit-html (cdr forms))))
(defun handle-tag (tag-symbol attributes body)
emit
(let ((tag-name (string-downcase (symbol-name tag-symbol))))
(princ "<")
(princ tag-name)
(when attributes
(emit-element-attributes attributes))
(if body
(progn
(princ ">")
(apply #'emit-html body)
(princ "</")
(princ tag-name)
(princ ">"))
(princ " />"))))
(defun encode-urlencoded (string)
(regex-replace-all #?r"\+" (url-encode string) "%20"))
(defmacro with-json-response (() &body body)
`(with-http-response (:content-type "application/json")
(yason:with-output-to-string* ()
(yason:with-object ()
,@body)))) |
102fa15ac6110a1e0e6110357d3b4ee089a85ed8de88497c833c30baae419d2a | launchdarkly/haskell-server-sdk | Eventing.hs | module LaunchDarkly.Server.Network.Eventing (eventThread) where
import Control.Concurrent (killThread, myThreadId)
import Control.Concurrent.MVar (modifyMVar_, readMVar, swapMVar, takeMVar)
import Control.Monad (forever, unless, void, when)
import Control.Monad.Catch (MonadMask, MonadThrow)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger (MonadLogger, logDebug, logError, logWarn)
import Data.Aeson (encode)
import qualified Data.ByteString.Lazy as L
import Data.Function ((&))
import Data.Generics.Product (getField)
import Data.IORef (atomicModifyIORef', newIORef, readIORef)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import Data.Tuple (swap)
import qualified Data.UUID as UUID
import Network.HTTP.Client (Manager, Request (..), RequestBody (..), httpLbs, responseStatus)
import Network.HTTP.Types.Status (Status (statusCode), status400)
import System.Random (newStdGen, random)
import System.Timeout (timeout)
import LaunchDarkly.Server.Client.Internal (Client, Status (ShuttingDown))
import LaunchDarkly.Server.Config.ClientContext
import LaunchDarkly.Server.Config.HttpConfiguration (prepareRequest)
import LaunchDarkly.Server.Events (EventState, processSummary)
import LaunchDarkly.Server.Network.Common (addToAL, checkAuthorization, getServerTime, isHttpUnrecoverable, tryAuthorized, tryHTTP)
-- A true result indicates a retry does not need to be attempted
processSend :: (MonadIO m, MonadLogger m, MonadMask m, MonadThrow m) => Manager -> Request -> m (Bool, Integer)
processSend manager req =
(liftIO $ tryHTTP $ httpLbs req manager) >>= \case
(Left err) -> $(logError) (T.pack $ show err) >> pure (False, 0)
(Right response) -> do
checkAuthorization response
let code = responseStatus response
serverTime = getServerTime response
in $(logWarn) (T.append "@@@ server time from LD was determined to be: " $ T.pack $ show serverTime)
>> if code < status400
then pure (True, serverTime)
else
if isHttpUnrecoverable $ statusCode $ code
then $(logWarn) (T.append "got non recoverable event post response dropping payload: " $ T.pack $ show code) >> pure (True, serverTime)
else pure (False, serverTime)
setEventHeaders :: Request -> Request
setEventHeaders request =
request
{ requestHeaders =
(requestHeaders request)
& \l ->
addToAL l "Content-Type" "application/json"
& \l -> addToAL l "X-LaunchDarkly-Event-Schema" "4"
, method = "POST"
}
updateLastKnownServerTime :: EventState -> Integer -> IO ()
updateLastKnownServerTime state serverTime = modifyMVar_ (getField @"lastKnownServerTime" state) (\lastKnown -> pure $ max serverTime lastKnown)
eventThread :: (MonadIO m, MonadLogger m, MonadMask m) => Manager -> Client -> ClientContext -> m ()
eventThread manager client clientContext = do
let state = getField @"events" client; config = getField @"config" client; httpConfig = httpConfiguration clientContext
rngRef <- liftIO $ newStdGen >>= newIORef
req <- (liftIO $ prepareRequest httpConfig $ (T.unpack $ getField @"eventsURI" config) ++ "/bulk") >>= pure . setEventHeaders
void $ tryAuthorized client $ forever $ do
liftIO $ processSummary config state
events' <- liftIO $ swapMVar (getField @"events" state) []
when (not $ null events') $ do
payloadId <- liftIO $ atomicModifyIORef' rngRef (swap . random)
let
encoded = encode events'
thisReq =
req
{ requestBody = RequestBodyLBS encoded
, requestHeaders =
(requestHeaders req)
& \l -> addToAL l "X-LaunchDarkly-Payload-ID" (UUID.toASCIIBytes payloadId)
}
(success, serverTime) <- processSend manager thisReq
$(logDebug) $ T.append "sending events: " $ decodeUtf8 $ L.toStrict encoded
_ <- case success of
True -> liftIO $ updateLastKnownServerTime state serverTime
False -> do
$(logWarn) "retrying event delivery after one second"
liftIO $ void $ timeout (1 * 1000000) $ readMVar $ getField @"flush" state
(success', serverTime') <- processSend manager thisReq
unless success' $ do
$(logWarn) "failed sending events on retry, dropping event batch"
liftIO $ updateLastKnownServerTime state serverTime'
$(logDebug) "finished send of event batch"
status <- liftIO $ readIORef $ getField @"status" client
liftIO $ when (status == ShuttingDown) (myThreadId >>= killThread)
liftIO $ void $ timeout ((*) 1000000 $ fromIntegral $ getField @"flushIntervalSeconds" config) $ takeMVar $ getField @"flush" state
| null | https://raw.githubusercontent.com/launchdarkly/haskell-server-sdk/b8642084591733e620dfc5c1598409be7cc40a63/src/LaunchDarkly/Server/Network/Eventing.hs | haskell | A true result indicates a retry does not need to be attempted | module LaunchDarkly.Server.Network.Eventing (eventThread) where
import Control.Concurrent (killThread, myThreadId)
import Control.Concurrent.MVar (modifyMVar_, readMVar, swapMVar, takeMVar)
import Control.Monad (forever, unless, void, when)
import Control.Monad.Catch (MonadMask, MonadThrow)
import Control.Monad.IO.Class (MonadIO, liftIO)
import Control.Monad.Logger (MonadLogger, logDebug, logError, logWarn)
import Data.Aeson (encode)
import qualified Data.ByteString.Lazy as L
import Data.Function ((&))
import Data.Generics.Product (getField)
import Data.IORef (atomicModifyIORef', newIORef, readIORef)
import qualified Data.Text as T
import Data.Text.Encoding (decodeUtf8)
import Data.Tuple (swap)
import qualified Data.UUID as UUID
import Network.HTTP.Client (Manager, Request (..), RequestBody (..), httpLbs, responseStatus)
import Network.HTTP.Types.Status (Status (statusCode), status400)
import System.Random (newStdGen, random)
import System.Timeout (timeout)
import LaunchDarkly.Server.Client.Internal (Client, Status (ShuttingDown))
import LaunchDarkly.Server.Config.ClientContext
import LaunchDarkly.Server.Config.HttpConfiguration (prepareRequest)
import LaunchDarkly.Server.Events (EventState, processSummary)
import LaunchDarkly.Server.Network.Common (addToAL, checkAuthorization, getServerTime, isHttpUnrecoverable, tryAuthorized, tryHTTP)
processSend :: (MonadIO m, MonadLogger m, MonadMask m, MonadThrow m) => Manager -> Request -> m (Bool, Integer)
processSend manager req =
(liftIO $ tryHTTP $ httpLbs req manager) >>= \case
(Left err) -> $(logError) (T.pack $ show err) >> pure (False, 0)
(Right response) -> do
checkAuthorization response
let code = responseStatus response
serverTime = getServerTime response
in $(logWarn) (T.append "@@@ server time from LD was determined to be: " $ T.pack $ show serverTime)
>> if code < status400
then pure (True, serverTime)
else
if isHttpUnrecoverable $ statusCode $ code
then $(logWarn) (T.append "got non recoverable event post response dropping payload: " $ T.pack $ show code) >> pure (True, serverTime)
else pure (False, serverTime)
setEventHeaders :: Request -> Request
setEventHeaders request =
request
{ requestHeaders =
(requestHeaders request)
& \l ->
addToAL l "Content-Type" "application/json"
& \l -> addToAL l "X-LaunchDarkly-Event-Schema" "4"
, method = "POST"
}
updateLastKnownServerTime :: EventState -> Integer -> IO ()
updateLastKnownServerTime state serverTime = modifyMVar_ (getField @"lastKnownServerTime" state) (\lastKnown -> pure $ max serverTime lastKnown)
eventThread :: (MonadIO m, MonadLogger m, MonadMask m) => Manager -> Client -> ClientContext -> m ()
eventThread manager client clientContext = do
let state = getField @"events" client; config = getField @"config" client; httpConfig = httpConfiguration clientContext
rngRef <- liftIO $ newStdGen >>= newIORef
req <- (liftIO $ prepareRequest httpConfig $ (T.unpack $ getField @"eventsURI" config) ++ "/bulk") >>= pure . setEventHeaders
void $ tryAuthorized client $ forever $ do
liftIO $ processSummary config state
events' <- liftIO $ swapMVar (getField @"events" state) []
when (not $ null events') $ do
payloadId <- liftIO $ atomicModifyIORef' rngRef (swap . random)
let
encoded = encode events'
thisReq =
req
{ requestBody = RequestBodyLBS encoded
, requestHeaders =
(requestHeaders req)
& \l -> addToAL l "X-LaunchDarkly-Payload-ID" (UUID.toASCIIBytes payloadId)
}
(success, serverTime) <- processSend manager thisReq
$(logDebug) $ T.append "sending events: " $ decodeUtf8 $ L.toStrict encoded
_ <- case success of
True -> liftIO $ updateLastKnownServerTime state serverTime
False -> do
$(logWarn) "retrying event delivery after one second"
liftIO $ void $ timeout (1 * 1000000) $ readMVar $ getField @"flush" state
(success', serverTime') <- processSend manager thisReq
unless success' $ do
$(logWarn) "failed sending events on retry, dropping event batch"
liftIO $ updateLastKnownServerTime state serverTime'
$(logDebug) "finished send of event batch"
status <- liftIO $ readIORef $ getField @"status" client
liftIO $ when (status == ShuttingDown) (myThreadId >>= killThread)
liftIO $ void $ timeout ((*) 1000000 $ fromIntegral $ getField @"flushIntervalSeconds" config) $ takeMVar $ getField @"flush" state
|
195dd2775d3382faa1b65df2cbedd62424a91b1589e2fe9320276287c0e3b113 | brandonchinn178/advent-of-code | Day8.hs | {- stack script
--resolver lts-12.12
-}
# LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
data Node = Node
{ children :: [Node]
, metadata :: [Int]
} deriving (Show)
main :: IO ()
main = do
input <- parse . head . lines <$> readFile "Day8.txt"
let example = fromData [2,3,0,3,10,11,12,1,1,0,1,99,2,1,1,2]
-- print example
-- print $ part1 example
print $ part1 input
-- print $ part2 example
print $ part2 input
parse :: String -> Node
parse = fromData . map read . words
fromData :: [Int] -> Node
fromData input = case fromData' input of
(node, []) -> node
(_, rest) -> error $ "Not finished processing: " ++ show rest
where
fromData' = \case
0:numMeta:rest ->
let (metadata, rest') = splitAt numMeta rest
in (Node{children = [], ..}, rest')
numChildren:numMeta:rest ->
let (children', rest') = (!! numChildren) $ iterate parseChildren ([], rest)
children = reverse children'
(metadata, rest'') = splitAt numMeta rest'
in (Node{..}, rest'')
parseChildren (acc, input) =
let (child, rest) = fromData' input
in (child:acc, rest)
part1 :: Node -> Int
part1 Node{..} = sum $ metadata ++ map part1 children
part2 :: Node -> Int
part2 node@Node{..} =
if null children
then part1 node
else sum $ map getChild metadata
where
getChild i = if i > length children then 0 else part2 $ children !! (i - 1)
| null | https://raw.githubusercontent.com/brandonchinn178/advent-of-code/a4b0616f8764620e28759276c333fe37d6685d57/2018/Day8.hs | haskell | stack script
--resolver lts-12.12
print example
print $ part1 example
print $ part2 example | # LANGUAGE LambdaCase #
# LANGUAGE RecordWildCards #
data Node = Node
{ children :: [Node]
, metadata :: [Int]
} deriving (Show)
main :: IO ()
main = do
input <- parse . head . lines <$> readFile "Day8.txt"
let example = fromData [2,3,0,3,10,11,12,1,1,0,1,99,2,1,1,2]
print $ part1 input
print $ part2 input
parse :: String -> Node
parse = fromData . map read . words
fromData :: [Int] -> Node
fromData input = case fromData' input of
(node, []) -> node
(_, rest) -> error $ "Not finished processing: " ++ show rest
where
fromData' = \case
0:numMeta:rest ->
let (metadata, rest') = splitAt numMeta rest
in (Node{children = [], ..}, rest')
numChildren:numMeta:rest ->
let (children', rest') = (!! numChildren) $ iterate parseChildren ([], rest)
children = reverse children'
(metadata, rest'') = splitAt numMeta rest'
in (Node{..}, rest'')
parseChildren (acc, input) =
let (child, rest) = fromData' input
in (child:acc, rest)
part1 :: Node -> Int
part1 Node{..} = sum $ metadata ++ map part1 children
part2 :: Node -> Int
part2 node@Node{..} =
if null children
then part1 node
else sum $ map getChild metadata
where
getChild i = if i > length children then 0 else part2 $ children !! (i - 1)
|
0656aef83780b7001536d5388513961293410ddc6637c94a9d897dc1e2e3ac7d | fyquah/hardcaml_zprize | config_presets.mli | open Field_ops_lib
module For_bls12_377 : sig
val karatsuba_ofman_mult_config : Karatsuba_ofman_mult.Config.t
val multiply : Ec_fpn_ops_config.fn
val montgomery_reduce : Ec_fpn_ops_config.fn
val barrett_reduce : Ec_fpn_ops_config.fn
val barrett_reduce_coarse : Ec_fpn_ops_config.fn
val montgomery_reduction_config : Montgomery_reduction.Config.t
val barrett_reduction_config : Barrett_reduction.Config.t
val ec_fpn_ops_with_barrett_reduction : Ec_fpn_ops_config.t
val ec_fpn_ops_with_montgomery_reduction : Ec_fpn_ops_config.t
end
| null | https://raw.githubusercontent.com/fyquah/hardcaml_zprize/7eb1bd214908fa801781db33287eaf12691715f8/libs/elliptic_curve/lib/config_presets.mli | ocaml | open Field_ops_lib
module For_bls12_377 : sig
val karatsuba_ofman_mult_config : Karatsuba_ofman_mult.Config.t
val multiply : Ec_fpn_ops_config.fn
val montgomery_reduce : Ec_fpn_ops_config.fn
val barrett_reduce : Ec_fpn_ops_config.fn
val barrett_reduce_coarse : Ec_fpn_ops_config.fn
val montgomery_reduction_config : Montgomery_reduction.Config.t
val barrett_reduction_config : Barrett_reduction.Config.t
val ec_fpn_ops_with_barrett_reduction : Ec_fpn_ops_config.t
val ec_fpn_ops_with_montgomery_reduction : Ec_fpn_ops_config.t
end
| |
012e971cae730082d387b1f63daf3cd61481c16d70753810f9e3da44d6eb7bf5 | mankiw/xiaoxiao_s | dota_sup.erl | -module(dota_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
ws_handler:init_ets(),
Procs = [
{
tick_server,
{tick_server, start_link, []},
permanent,
2000,
worker,
[tick_server]
}
],
{ok, {{one_for_one, 1, 5}, Procs}}.
| null | https://raw.githubusercontent.com/mankiw/xiaoxiao_s/7cc206240f236c6c71398ed60f5445a7d264c7f1/src/dota_sup.erl | erlang | -module(dota_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init([]) ->
ws_handler:init_ets(),
Procs = [
{
tick_server,
{tick_server, start_link, []},
permanent,
2000,
worker,
[tick_server]
}
],
{ok, {{one_for_one, 1, 5}, Procs}}.
| |
c1420e19fa1d2b300e14074014871f6b87350c0cda17731c269da1150ca0dc64 | UCSD-PL/refscript | Transformations.hs | {-# LANGUAGE ConstraintKinds #-}
# LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TupleSections #
{-# LANGUAGE TypeSynonymInstances #-}
{-# LANGUAGE ViewPatterns #-}
module Language.Rsc.Transformations (
Transformable (..), NameTransformable (..), AnnotTransformable (..)
, transFmap, ntransFmap, ntransPure
, emapReft, mapReftM
, replaceDotRef
, replaceAbsolute
, fixFunBinders
) where
import Control.Arrow ((***))
import Control.Monad.State.Strict
import Data.Default
import Data.Functor.Identity
import Data.Generics
import qualified Data.HashSet as HS
import qualified Data.IntMap.Strict as I
import Data.List (find)
import Data.Text (pack, splitOn)
import qualified Data.Traversable as T
import qualified Language.Fixpoint.Types as F
import Language.Fixpoint.Types.Errors
import Language.Fixpoint.Types.Names (suffixSymbol, symbolString)
import qualified Language.Fixpoint.Types.Visitor as FV
import Language.Rsc.Annotations
import Language.Rsc.AST
import Language.Rsc.Core.Env
import Language.Rsc.Errors
import Language.Rsc.Locations
import Language.Rsc.Misc
import Language.Rsc.Module
import Language.Rsc.Names
import Language.Rsc.Pretty
import Language.Rsc.Program
import Language.Rsc.Symbols
import Language.Rsc.Traversals
import Language.Rsc.Typecheck.Types
import Language.Rsc.Types
import Language.Rsc.Visitor
--------------------------------------------------------------------------------
-- | Transformable
--------------------------------------------------------------------------------
class Transformable t where
trans :: F.Reftable r => ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> [BindQ q r] -> t q r -> t q r
instance Transformable RTypeQ where
trans = transRType
instance Transformable BindQ where
trans f αs xs b = b { b_type = trans f αs xs $ b_type b }
instance Transformable FactQ where
trans = transFact
instance Transformable TypeDeclQ where
trans f αs xs (TD s@(TS _ b _) p es) = TD (trans f αs xs s) p (trans f αs' xs es)
where
αs' = map btvToTV (b_args b) ++ αs
instance Transformable TypeSigQ where
trans f αs xs (TS k b h) = TS k (trans f αs xs b) (transIFDBase f αs' xs h)
where
αs' = map btvToTV (b_args b) ++ αs
instance Transformable TypeMembersQ where
trans f αs xs (TM m sm c k s n) =
TM (fmap g m) (fmap g sm) (fmap g c) (fmap g k) (fmap (g *** g) s) (fmap (g *** g) n)
where
g = trans f αs xs
instance Transformable BTGenQ where
trans f αs xs (BGen n ts) = BGen n $ trans f αs xs <$> ts
instance Transformable TGenQ where
trans f αs xs (Gen n ts) = Gen n $ trans f αs xs <$> ts
instance Transformable BTVarQ where
trans f αs xs (BTV x l c) = BTV x l $ trans f αs xs <$> c
instance Transformable TypeMemberQ where
trans f αs xs (FI n o a t') = FI n o (trans f αs xs a) (trans f αs xs t')
trans f αs xs (MI n o mts) = MI n o (mapSnd (trans f αs xs) <$> mts)
instance Transformable ModuleDefQ where
trans f αs xs (ModuleDef v t e p)
= ModuleDef (envMap (trans f αs xs) v) (envMap (trans f αs xs) t) e p
instance Transformable SymInfoQ where
trans f αs xs (SI x l a t) = SI x l a $ trans f αs xs t
instance Transformable FAnnQ where
trans f αs xs (FA i s ys) = FA i s $ trans f αs xs <$> ys
transFmap :: (F.Reftable r, Functor thing)
=> ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> thing (FAnnQ q r) -> thing (FAnnQ q r)
transFmap f αs = fmap (trans f αs [])
transIFDBase f αs xs (es,is) = (trans f αs xs <$> es, trans f αs xs <$> is)
transFact :: F.Reftable r => ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> [BindQ q r] -> FactQ q r -> FactQ q r
transFact f = go
where
go αs xs (TypInst x y ts) = TypInst x y $ trans f αs xs <$> ts
go αs xs (EltOverload x m t) = EltOverload x (trans f αs xs m) (trans f αs xs t)
go αs xs (VarAnn x l a t) = VarAnn x l a $ trans f αs xs <$> t
go αs xs (MemberAnn t) = MemberAnn $ trans f αs xs t
go αs xs (CtorAnn t) = CtorAnn $ trans f αs xs t
go αs xs (UserCast t) = UserCast $ trans f αs xs t
go αs xs (SigAnn x l t) = SigAnn x l $ trans f αs xs t
go αs xs (ClassAnn l ts) = ClassAnn l $ trans f αs xs ts
go αs xs (ClassInvAnn r) = ClassInvAnn $ rTypeR -- PV: a little indirect
$ trans f αs xs
$ tVoid `strengthen` r
go αs xs (InterfaceAnn td) = InterfaceAnn $ trans f αs xs td
go _ _ t = t
| transRType :
--
-- Binds (αs and bs) accumulate on the left.
--
transRType :: F.Reftable r
=> ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r
transRType f = go
where
go αs xs (TPrim c r) = f αs xs $ TPrim c r
go αs xs (TVar v r) = f αs xs $ TVar v r
go αs xs (TOr ts r) = f αs xs $ TOr ts' r where ts' = go αs xs <$> ts
go αs xs (TAnd ts) = f αs xs $ TAnd ts' where ts' = mapSnd (go αs xs) <$> ts
go αs xs (TRef n r) = f αs xs $ TRef n' r where n' = trans f αs xs n
go αs xs (TObj m ms r) = f αs xs $ TObj m' ms' r where m' = trans f αs xs m
ms' = trans f αs xs ms
go αs xs (TClass n) = f αs xs $ TClass n' where n' = trans f αs xs n
go αs xs (TMod m) = f αs xs $ TMod m
go αs xs (TAll a t) = f αs xs $ TAll a' t' where a' = trans f αs xs a
t' = go αs' xs t
αs' = αs ++ [btvToTV a]
go αs xs (TFun bs t r) = f αs xs $ TFun bs' t' r where bs' = trans f αs xs' <$> bs
t' = go αs xs' t
xs' = bs ++ xs
go _ _ (TExp e) = TExp e
--------------------------------------------------------------------------------
-- | Transform names
--------------------------------------------------------------------------------
ntransPure :: (NameTransformable t, F.Reftable r) => (QN p -> QN q) -> (QP p -> QP q) -> t p r -> t q r
ntransPure f g a = runIdentity (ntrans f' g' a)
where g' = return . g
f' = return . f
class NameTransformable t where
ntrans :: (Monad m, Applicative m, F.Reftable r)
=> (QN p -> m (QN q)) -> (QP p -> m (QP q)) -> t p r -> m (t q r)
instance NameTransformable RTypeQ where
ntrans = ntransRType
instance NameTransformable BindQ where
ntrans f g (B s o t) = B s o <$> ntrans f g t
instance NameTransformable FactQ where
ntrans = ntransFact
instance NameTransformable TypeDeclQ where
ntrans f g (TD s p m) = TD <$> ntrans f g s <*> pure p <*> ntrans f g m
instance NameTransformable TypeSigQ where
ntrans f g (TS k b (e,i))
= TS k <$> ntrans f g b <*> liftM2 (,) (mapM (ntrans f g) e) (mapM (ntrans f g) i)
instance NameTransformable TypeMembersQ where
ntrans f g (TM m sm c k s n) = TM <$> T.mapM h m
<*> T.mapM h sm
<*> T.mapM h c
<*> T.mapM h k
<*> T.mapM (\(m_, t_) -> (,) <$> h m_ <*> h t_) s
<*> T.mapM (\(m_, t_) -> (,) <$> h m_ <*> h t_) n
where
h = ntrans f g
instance NameTransformable BTGenQ where
ntrans f g (BGen n ts) = BGen <$> f n <*> mapM (ntrans f g) ts
instance NameTransformable TGenQ where
ntrans f g (Gen n ts) = Gen <$> f n <*> mapM (ntrans f g) ts
instance NameTransformable BTVarQ where
ntrans f g (BTV x l c) = BTV x l <$> T.mapM (ntrans f g) c
instance NameTransformable TypeMemberQ where
ntrans f g (FI x o m t) = FI x o <$> ntrans f g m <*> ntrans f g t
ntrans f g (MI x o mts) = MI x o <$> mapM (mapPairM (ntrans f g) (ntrans f g)) mts
ntransFmap :: (F.Reftable r, Applicative m, Monad m, T.Traversable t)
=> (QN p -> m (QN q)) -> (QP p -> m (QP q)) -> t (FAnnQ p r) -> m (t (FAnnQ q r))
ntransFmap f g x = T.mapM (ntrans f g) x
ntransFact f g = go
where
go (PhiVar v) = pure $ PhiVar v
go (PhiLoopTC v) = pure $ PhiLoopTC v
go (PhiLoop xs) = pure $ PhiLoop xs
go (Overload x m i) = pure $ Overload x m i
go (EnumAnn e) = pure $ EnumAnn e
go (BypassUnique) = pure $ BypassUnique
go (DeadCast x es) = pure $ DeadCast x es
go (TypeCast x t) = pure $ TypeCast x t -- TODO: transform this?
go (ClassInvAnn r) = pure $ ClassInvAnn r
go (ModuleAnn l m) = ModuleAnn l <$> g m
go (TypInst x y ts) = TypInst x y <$> mapM (ntrans f g) ts
go (EltOverload x m t) = EltOverload x <$> ntrans f g m <*> ntrans f g t
go (VarAnn x l a t) = VarAnn x l a <$> T.mapM (ntrans f g) t
go (MemberAnn t) = MemberAnn <$> ntrans f g t
go (CtorAnn t) = CtorAnn <$> ntrans f g t
go (UserCast t) = UserCast <$> ntrans f g t
go (SigAnn x l t) = SigAnn x l <$> ntrans f g t
go (ClassAnn l t) = ClassAnn l <$> ntrans f g t
go (InterfaceAnn t) = InterfaceAnn <$> ntrans f g t
ntransRType :: (Monad m, Applicative m, F.Reftable r)
=> (QN p -> m (QN q)) -> (QP p -> m (QP q)) -> RTypeQ p r -> m (RTypeQ q r)
ntransRType f g t = go t
where
go (TPrim p r) = pure $ TPrim p r
go (TVar v r) = pure $ TVar v r
go (TExp e) = pure $ TExp e
go (TOr ts r) = TOr <$> ts'
<*> pure r where ts' = mapM go ts
go (TAnd ts) = TAnd <$> ts' where ts' = mapM (mapSndM go) ts
go (TRef n r) = TRef <$> n'
<*> pure r where n' = ntrans f g n
go (TObj m ms r) = TObj <$> m'
<*> ms'
<*> pure r where m' = ntrans f g m
ms' = ntrans f g ms
go (TClass n) = TClass <$> n' where n' = ntrans f g n
go (TMod p) = TMod <$> p' where p' = g p
go (TAll a t) = TAll <$> a'
<*> t' where a' = ntrans f g a
t' = go t
go (TFun bs t r) = TFun <$> bs'
<*> t'
<*> pure r where bs' = mapM (ntrans f g) bs
t' = go t
instance NameTransformable FAnnQ where
ntrans f g (FA i s ys) = FA i s <$> mapM (ntrans f g) ys
--------------------------------------------------------------------------------
-- | Transformers over @RType@
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
emapReft :: PPR r => ([F.Symbol] -> r -> r') -> [F.Symbol] -> RTypeQ q r -> RTypeQ q r'
--------------------------------------------------------------------------------
emapReft f γ (TVar α r) = TVar α (f γ r)
emapReft f γ (TPrim c r) = TPrim c (f γ r)
emapReft f γ (TRef n r) = TRef (emapReftGen f γ n) (f γ r)
emapReft f γ (TAll α t) = TAll (emapReftBTV f γ α) (emapReft f γ t)
emapReft f γ (TFun xts t r) = TFun (emapReftBind f γ' <$> xts)
(emapReft f γ' t) (f γ r)
where γ' = (b_sym <$> xts) ++ γ
emapReft f γ (TObj m xts r) = TObj (emapReft f γ m) (emapReftTM f γ xts) (f γ r)
emapReft f γ (TClass n) = TClass (emapReftBGen f γ n)
emapReft _ _ (TMod m) = TMod m
emapReft f γ (TOr ts r) = TOr (emapReft f γ <$> ts) (f γ r)
emapReft f γ (TAnd ts) = TAnd (mapSnd (emapReft f γ) <$> ts)
emapReft _ _ _ = error "Not supported in emapReft"
emapReftBTV f γ (BTV s l c) = BTV s l $ emapReft f γ <$> c
emapReftGen f γ (Gen n ts) = Gen n $ emapReft f γ <$> ts
emapReftBGen f γ (BGen n ts) = BGen n $ emapReftBTV f γ <$> ts
emapReftBind f γ (B x o t) = B x o $ emapReft f γ t
emapReftTM f γ (TM m sm c k s n)
= TM (fmap (emapReftElt f γ) m)
(fmap (emapReftElt f γ) sm)
(emapReft f γ <$> c)
(emapReft f γ <$> k)
((emapReft f γ *** emapReft f γ) <$> s)
((emapReft f γ *** emapReft f γ) <$> n)
emapReftElt f γ (FI x m a t) = FI x m (emapReft f γ a) (emapReft f γ t)
emapReftElt f γ (MI x m mts) = MI x m (mapPair (emapReft f γ) <$> mts)
--------------------------------------------------------------------------------
mapReftM :: (F.Reftable r, PP r, Applicative m, Monad m)
=> (r -> m r') -> RTypeQ q r -> m (RTypeQ q r')
--------------------------------------------------------------------------------
mapReftM f (TVar α r) = TVar α <$> f r
mapReftM f (TPrim c r) = TPrim c <$> f r
mapReftM f (TRef n r) = TRef <$> mapReftGenM f n <*> f r
mapReftM f (TFun xts t r) = TFun <$> mapM (mapReftBindM f) xts <*> mapReftM f t <*> f r
mapReftM f (TAll α t) = TAll <$> mapReftBTV f α <*> mapReftM f t
mapReftM f (TAnd ts) = TAnd <$> mapM (mapSndM (mapReftM f)) ts
mapReftM f (TOr ts r) = TOr <$> mapM (mapReftM f) ts <*> f r
mapReftM f (TObj m xts r) = TObj <$> mapReftM f m
<*> mapTypeMembers f xts <*> f r
mapReftM f (TClass n) = TClass <$> mapReftBGenM f n
mapReftM _ (TMod a) = TMod <$> pure a
mapReftM _ t = error $ "Not supported in mapReftM: " ++ ppshow t
mapReftBTV f (BTV s l c) = BTV s l <$> T.mapM (mapReftM f) c
mapReftGenM f (Gen n ts) = Gen n <$> mapM (mapReftM f) ts
mapReftBGenM f (BGen n ts) = BGen n <$> mapM (mapReftBTV f) ts
mapReftBindM f (B x o t) = B x o <$> mapReftM f t
mapTypeMembers f (TM m sm c k s n)
= TM <$> T.mapM (mapReftElt f) m
<*> T.mapM (mapReftElt f) sm
<*> T.mapM (mapReftM f) c
<*> T.mapM (mapReftM f) k
<*> T.mapM (\(m_,t_) -> (,) <$> mapReftM f m_ <*> mapReftM f t_) s
<*> T.mapM (\(m_,t_) -> (,) <$> mapReftM f m_ <*> mapReftM f t_) n
mapReftElt f (FI x m a t) = FI x m <$> mapReftM f a <*> mapReftM f t
mapReftElt f (MI x m mts) = MI x m <$> mapM (mapPairM (mapReftM f) (mapReftM f)) mts
--------------------------------------------------------------------------------
-- | Replace all relatively qualified names/paths with absolute ones.
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
replaceAbsolute
:: (PPR r, Data r, Typeable r) => BareRelRsc r -> Either [Error] (BareRsc r)
--------------------------------------------------------------------------------
replaceAbsolute pgm@(Rsc { code = Src ss }) =
case sOut of
[] -> Right (pgm { code = Src ss' })
_ -> Left sOut
where
(ss', sOut) = runState (mapM (T.mapM (\l -> ntrans (safeAbsName l) (safeAbsPath l) l)) ss) []
(ns, ps) = accumNamesAndPaths ss
safeAbsName l a@(absAct (absoluteName ns) l -> n)
| Just a' <- n = return a'
| Nothing <- n
, isAlias a = return $ toAbsoluteName a
| otherwise = modify (errorUnboundName (srcPos l) a:) >> pure (mkAbsName [] a)
safeAbsPath l a@(absAct (absolutePath ps) l -> n)
| Just a' <- n = return a'
| otherwise = modify (errorUnboundPath (srcPos l) a:) >> pure (mkAbsPath [])
isAlias (QN (QP RK_ _ []) s) = envMem s $ tAlias pgm
isAlias (QN _ _) = False
absAct f l a = I.lookup (fId l) mm >>= (`f` a)
mm = snd $ visitStmts vs (QP AK_ def []) ss
vs = defaultVisitor { ctxStmt = cStmt }
{ accStmt = acc }
{ accExpr = acc }
{ accCElt = acc }
{ accVDec = acc }
cStmt (QP AK_ l p) (ModuleStmt _ x _)
= QP AK_ l $ p ++ [F.symbol x]
cStmt q _ = q
acc c s = I.singleton (fId a) c where a = getAnnotation s
--------------------------------------------------------------------------------
-- | Replace `a.b.c...z` with `offset(offset(...(offset(a),"b"),"c"),...,"z")`
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
replaceDotRef :: RefScript -> RefScript
--------------------------------------------------------------------------------
replaceDotRef p@(Rsc { code = Src fs, tAlias = ta, pAlias = pa, invts = is })
= p { code = Src $ tf <##> fs
, tAlias = trans tt [] [] <###> ta
, pAlias = tt [] [] <##> pa
, invts = trans tt [] [] <##> is
}
where
tf (FA l a facts) = FA l a (map (trans tt [] []) facts)
tt _ _ = fmap $ FV.trans vs () ()
vs = FV.defaultVisitor { FV.txExpr = tx }
tx _ (F.EVar s) | (x:y:zs) <- pack "." `splitOn` pack (symbolString s)
= foldl offset (F.eVar x) (y:zs)
tx _ e = e
offset k v = F.mkEApp offsetLocSym [F.expr k, F.expr v]
--
-- XXX: Treat this at lookup
--
-- --------------------------------------------------------------------------------
-- -- | Replace `TRef x _ _` where `x` is a name for an enumeration with `number`
-- --------------------------------------------------------------------------------
--
-- --------------------------------------------------------------------------------
: : PPR r = > QEnv ( ModuleDef r ) - > BareRsc r - > ( QEnv ( ModuleDef r ) , r )
-- --------------------------------------------------------------------------------
fixEnums m p@(Rsc { code = Src ss } ) = ( m',p ' )
-- where
-- p' = p { code = Src $ (trans f [] [] <$>) <$> ss }
m ' = fixEnumsInModule m ` qenvMap ` m
-- f _ _ = fixEnumInType m
--
fixEnumInType : : F.Reftable r = > QEnv ( ModuleDef r ) - > RType r - > RType r
fixEnumInType ms ( ( Gen ( QN p x ) [ ] ) r )
-- | Just m <- qenvFindTy p ms
, Just e < - envFindTy x $ m_enums m
-- = if isBvEnum e then tBV32 `strengthen` r
else tNum ` strengthen ` r
-- fixEnumInType _ t = t
--
fixEnumsInModule : : F.Reftable r = > QEnv ( ModuleDef r ) - > ModuleDef r - > ModuleDef r
fixEnumsInModule m = trans ( const $ const $ fixEnumInType m ) [ ] [ ]
--
--------------------------------------------------------------------------------
-- | Add a '#' at the end of every function binder (to avoid capture)
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
fixFunBinders :: RefScript -> RefScript
--------------------------------------------------------------------------------
fixFunBinders p@(Rsc { code = Src ss }) = p'
where
p' = p { code = Src $ (trans fixFunBindersInType [] [] <$>) <$> ss }
fixFunBindersInType _ bs = go
where
ks = [ y | B y _ _ <- bs ]
ss = (`suffixSymbol` F.symbol "")
ks' = map (F.eVar . ss) ks
sub :: F.Subable a => a -> a
sub = F.subst (F.mkSubst (zip ks ks'))
go (TFun bs t r) = TFun [B (ss s) o ts | B s o ts <- bs] t (sub r)
go t = toplevel sub t
-- When costructing the substitution haskmap, if the list contains duplicate
-- mappings, the later mappings take precedence.
--------------------------------------------------------------------------------
-- | Spec Transformer
--------------------------------------------------------------------------------
class AnnotTransformable t where
strans :: (ctx -> a -> b) -> (ctx -> b -> ctx) -> ctx -> t a -> t b
instance AnnotTransformable Statement where
strans = stransStatement
instance AnnotTransformable Expression where
strans = stransExpression
instance AnnotTransformable Id where
strans = stransId
instance AnnotTransformable ForInInit where
strans = stransForInInit
instance AnnotTransformable ForInit where
strans = stransForInit
instance AnnotTransformable CatchClause where
strans = stransCatchClause
instance AnnotTransformable VarDecl where
strans = stransVarDecl
instance AnnotTransformable ClassElt where
strans = stransClassElt
instance AnnotTransformable EnumElt where
strans = stransEnumElt
instance AnnotTransformable Prop where
strans = stransProp
instance AnnotTransformable LValue where
strans = stransLvalue
stransStatement f g ctx st = go st
where
a = getAnnotation st
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (BlockStmt _ sts) = BlockStmt b (ss <$> sts)
go (EmptyStmt _) = EmptyStmt b
go (ExprStmt _ e) = ExprStmt b (ss e)
go (IfStmt _ e s1 s2) = IfStmt b (ss e) (ss s1) (ss s2)
go (IfSingleStmt _ e s) = IfSingleStmt b (ss e) (ss s)
go (WhileStmt _ e s) = WhileStmt b (ss e) (ss s)
go (DoWhileStmt _ s e) = DoWhileStmt b (ss s) (ss e)
go (BreakStmt _ i) = BreakStmt b (ss <$> i)
go (ContinueStmt _ i) = ContinueStmt b (ss <$> i)
go (LabelledStmt _ i s) = LabelledStmt b (ss i) (ss s)
go (ForInStmt _ fi e s) = ForInStmt b (ss fi) (ss e) (ss s)
go (ForStmt _ fi me1 me2 s) = ForStmt b (ss fi) (ss <$> me1) (ss <$> me2) (ss s)
go (TryStmt _ s mcc ms) = TryStmt b (ss s) (ss <$> mcc) (ss <$> ms)
go (ThrowStmt _ e) = ThrowStmt b (ss e)
go (ReturnStmt _ me) = ReturnStmt b (ss <$> me)
go (WithStmt _ e s) = WithStmt b (ss e) (ss s)
go (VarDeclStmt _ vs) = VarDeclStmt b (ss <$> vs)
go (FunctionStmt _ i is mss) = FunctionStmt b (ss i) (ss <$> is) ((ss <$>) <$> mss)
go (ClassStmt _ i cs) = ClassStmt b (ss i) (ss <$> cs)
go (ModuleStmt _ i sts) = ModuleStmt b (ss i) (ss <$> sts)
go (InterfaceStmt _ i) = InterfaceStmt b (ss i)
go (EnumStmt _ i es) = EnumStmt b (ss i) (ss <$> es)
go s = error $ "[unimplemented] stransStatement for " ++ ppshow s
stransExpression f g ctx exp = go exp
where
a = getAnnotation exp
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (StringLit _ s) = StringLit b s
go (RegexpLit _ s b1 b2) = RegexpLit b s b1 b2
go (NumLit _ d) = NumLit b d
go (IntLit _ i) = IntLit b i
go (BoolLit _ bl) = BoolLit b bl
go (NullLit _) = NullLit b
go (ArrayLit _ es) = ArrayLit b (ss <$> es)
go (ObjectLit _ pes) = ObjectLit b ((\(p,e) -> (ss p, ss e)) <$> pes)
go (HexLit _ s) = HexLit b s
go (ThisRef _) = ThisRef b
go (VarRef _ i) = VarRef b (ss i)
go (DotRef _ e i) = DotRef b (ss e) (ss i)
go (BracketRef _ e1 e2) = BracketRef b (ss e1) (ss e2)
go (NewExpr _ e es) = NewExpr b (ss e) (ss <$> es)
go (PrefixExpr _ op e) = PrefixExpr b op (ss e)
go (UnaryAssignExpr _ op l) = UnaryAssignExpr b op (ss l)
go (InfixExpr _ op e1 e2) = InfixExpr b op (ss e1) (ss e2)
go (CondExpr _ e1 e2 e3) = CondExpr b (ss e1) (ss e2) (ss e3)
go (AssignExpr _ op l e) = AssignExpr b op (ss l) (ss e)
go (ListExpr _ es) = ListExpr b (ss <$> es)
go (CallExpr _ e es) = CallExpr b (ss e) (ss <$> es)
go (SuperRef _) = SuperRef b
go (FuncExpr _ mi is sts) = FuncExpr b (ss <$> mi) (ss <$> is) (ss <$> sts)
go (Cast _ e) = Cast b (ss e)
go (Cast_ _ e) = Cast_ b (ss e)
stransId f _ ctx (Id a s) = Id (f ctx a) s
stransForInInit f g ctx (ForInVar i) = ForInVar (strans f g ctx i)
stransForInInit f g ctx (ForInLVal i) = ForInLVal (strans f g ctx i)
stransForInit _ _ _ NoInit = NoInit
stransForInit f g ctx (VarInit vs) = VarInit (strans f g ctx <$> vs)
stransForInit f g ctx (ExprInit e) = ExprInit (strans f g ctx e)
stransCatchClause f g ctx (CatchClause a i s) = CatchClause b (ss i) (ss s)
where
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
stransVarDecl f g ctx (VarDecl a i me) = VarDecl b (ss i) (ss <$> me)
where
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
stransClassElt f g ctx ce = go ce
where
a = getAnnotation ce
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (Constructor _ is sts) = Constructor b (ss <$> is) (ss <$> sts)
go (MemberVarDecl _ st i me) = MemberVarDecl b st (ss i) (ss <$> me)
go (MemberMethDecl _ st i is sts) = MemberMethDecl b st (ss i) (ss <$> is) (ss <$> sts)
stransEnumElt f g ctx (EnumElt a i e) = EnumElt b (ss i) (ss e)
where
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
stransProp f g ctx p = go p
where
a = getAnnotation p
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (PropId _ i) = PropId b (ss i)
go (PropString _ s) = PropString b s
go (PropNum _ i) = PropNum b i
stransLvalue f g ctx lv = go lv
where
a = getAnnotation lv
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (LVar _ s) = LVar b s
go (LDot _ e s) = LDot b (ss e) s
go (LBracket _ e1 e2) = LBracket b (ss e1) (ss e2)
--------------------------------------------------------------------------
-- | Name transformation
--------------------------------------------------------------------------
-- | `absoluteName env p r` returns `Just a` where `a` is the absolute path of
-- the relative name `r` when referenced in the context of the absolute path
-- `p`; `Nothing` otherwise.
--
If p = and r = C.D.E then the paths that will be checked in this
-- order are:
--
-- A.B.C.C.D.E
A.B.C.D.E
--
---------------------------------------------------------------------------------
absoluteName :: HS.HashSet AbsName -> AbsPath -> RelName -> Maybe AbsName
---------------------------------------------------------------------------------
absoluteName ns (QP AK_ _ p) (QN (QP RK_ _ ss) s) =
find (`HS.member` ns) $ (`mkAbsName` s) . (++ ss) <$> prefixes p
where
prefixes = map reverse . suffixes . reverse
suffixes [] = [[]]
suffixes (x:xs) = (x:xs) : suffixes xs
---------------------------------------------------------------------------------
absolutePath :: HS.HashSet AbsPath -> AbsPath -> RelPath -> Maybe AbsPath
---------------------------------------------------------------------------------
absolutePath ps (QP AK_ _ p) (QP RK_ _ ss) =
find (`HS.member` ps) $ mkAbsPath . (++ ss) <$> prefixes p
where
prefixes = map reverse . suffixes . reverse
suffixes [] = [[]]
suffixes (x:xs) = (x:xs) : suffixes xs
toAbsoluteName (QN (QP RK_ l ss) s) = QN (QP AK_ l ss) s
| null | https://raw.githubusercontent.com/UCSD-PL/refscript/884306fef72248ac41ecdbb928bbd7b06ca71bd4/src/Language/Rsc/Transformations.hs | haskell | # LANGUAGE ConstraintKinds #
# LANGUAGE TypeSynonymInstances #
# LANGUAGE ViewPatterns #
------------------------------------------------------------------------------
| Transformable
------------------------------------------------------------------------------
PV: a little indirect
Binds (αs and bs) accumulate on the left.
------------------------------------------------------------------------------
| Transform names
------------------------------------------------------------------------------
TODO: transform this?
------------------------------------------------------------------------------
| Transformers over @RType@
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Replace all relatively qualified names/paths with absolute ones.
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
| Replace `a.b.c...z` with `offset(offset(...(offset(a),"b"),"c"),...,"z")`
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
XXX: Treat this at lookup
--------------------------------------------------------------------------------
-- | Replace `TRef x _ _` where `x` is a name for an enumeration with `number`
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
--------------------------------------------------------------------------------
where
p' = p { code = Src $ (trans f [] [] <$>) <$> ss }
f _ _ = fixEnumInType m
| Just m <- qenvFindTy p ms
= if isBvEnum e then tBV32 `strengthen` r
fixEnumInType _ t = t
------------------------------------------------------------------------------
| Add a '#' at the end of every function binder (to avoid capture)
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
When costructing the substitution haskmap, if the list contains duplicate
mappings, the later mappings take precedence.
------------------------------------------------------------------------------
| Spec Transformer
------------------------------------------------------------------------------
------------------------------------------------------------------------
| Name transformation
------------------------------------------------------------------------
| `absoluteName env p r` returns `Just a` where `a` is the absolute path of
the relative name `r` when referenced in the context of the absolute path
`p`; `Nothing` otherwise.
order are:
A.B.C.C.D.E
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
-------------------------------------------------------------------------------
------------------------------------------------------------------------------- | # LANGUAGE DeriveFunctor #
# LANGUAGE FlexibleContexts #
# LANGUAGE FlexibleInstances #
# LANGUAGE NoMonomorphismRestriction #
# LANGUAGE TupleSections #
module Language.Rsc.Transformations (
Transformable (..), NameTransformable (..), AnnotTransformable (..)
, transFmap, ntransFmap, ntransPure
, emapReft, mapReftM
, replaceDotRef
, replaceAbsolute
, fixFunBinders
) where
import Control.Arrow ((***))
import Control.Monad.State.Strict
import Data.Default
import Data.Functor.Identity
import Data.Generics
import qualified Data.HashSet as HS
import qualified Data.IntMap.Strict as I
import Data.List (find)
import Data.Text (pack, splitOn)
import qualified Data.Traversable as T
import qualified Language.Fixpoint.Types as F
import Language.Fixpoint.Types.Errors
import Language.Fixpoint.Types.Names (suffixSymbol, symbolString)
import qualified Language.Fixpoint.Types.Visitor as FV
import Language.Rsc.Annotations
import Language.Rsc.AST
import Language.Rsc.Core.Env
import Language.Rsc.Errors
import Language.Rsc.Locations
import Language.Rsc.Misc
import Language.Rsc.Module
import Language.Rsc.Names
import Language.Rsc.Pretty
import Language.Rsc.Program
import Language.Rsc.Symbols
import Language.Rsc.Traversals
import Language.Rsc.Typecheck.Types
import Language.Rsc.Types
import Language.Rsc.Visitor
class Transformable t where
trans :: F.Reftable r => ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> [BindQ q r] -> t q r -> t q r
instance Transformable RTypeQ where
trans = transRType
instance Transformable BindQ where
trans f αs xs b = b { b_type = trans f αs xs $ b_type b }
instance Transformable FactQ where
trans = transFact
instance Transformable TypeDeclQ where
trans f αs xs (TD s@(TS _ b _) p es) = TD (trans f αs xs s) p (trans f αs' xs es)
where
αs' = map btvToTV (b_args b) ++ αs
instance Transformable TypeSigQ where
trans f αs xs (TS k b h) = TS k (trans f αs xs b) (transIFDBase f αs' xs h)
where
αs' = map btvToTV (b_args b) ++ αs
instance Transformable TypeMembersQ where
trans f αs xs (TM m sm c k s n) =
TM (fmap g m) (fmap g sm) (fmap g c) (fmap g k) (fmap (g *** g) s) (fmap (g *** g) n)
where
g = trans f αs xs
instance Transformable BTGenQ where
trans f αs xs (BGen n ts) = BGen n $ trans f αs xs <$> ts
instance Transformable TGenQ where
trans f αs xs (Gen n ts) = Gen n $ trans f αs xs <$> ts
instance Transformable BTVarQ where
trans f αs xs (BTV x l c) = BTV x l $ trans f αs xs <$> c
instance Transformable TypeMemberQ where
trans f αs xs (FI n o a t') = FI n o (trans f αs xs a) (trans f αs xs t')
trans f αs xs (MI n o mts) = MI n o (mapSnd (trans f αs xs) <$> mts)
instance Transformable ModuleDefQ where
trans f αs xs (ModuleDef v t e p)
= ModuleDef (envMap (trans f αs xs) v) (envMap (trans f αs xs) t) e p
instance Transformable SymInfoQ where
trans f αs xs (SI x l a t) = SI x l a $ trans f αs xs t
instance Transformable FAnnQ where
trans f αs xs (FA i s ys) = FA i s $ trans f αs xs <$> ys
transFmap :: (F.Reftable r, Functor thing)
=> ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> thing (FAnnQ q r) -> thing (FAnnQ q r)
transFmap f αs = fmap (trans f αs [])
transIFDBase f αs xs (es,is) = (trans f αs xs <$> es, trans f αs xs <$> is)
transFact :: F.Reftable r => ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> [BindQ q r] -> FactQ q r -> FactQ q r
transFact f = go
where
go αs xs (TypInst x y ts) = TypInst x y $ trans f αs xs <$> ts
go αs xs (EltOverload x m t) = EltOverload x (trans f αs xs m) (trans f αs xs t)
go αs xs (VarAnn x l a t) = VarAnn x l a $ trans f αs xs <$> t
go αs xs (MemberAnn t) = MemberAnn $ trans f αs xs t
go αs xs (CtorAnn t) = CtorAnn $ trans f αs xs t
go αs xs (UserCast t) = UserCast $ trans f αs xs t
go αs xs (SigAnn x l t) = SigAnn x l $ trans f αs xs t
go αs xs (ClassAnn l ts) = ClassAnn l $ trans f αs xs ts
$ trans f αs xs
$ tVoid `strengthen` r
go αs xs (InterfaceAnn td) = InterfaceAnn $ trans f αs xs td
go _ _ t = t
| transRType :
transRType :: F.Reftable r
=> ([TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r)
-> [TVar] -> [BindQ q r] -> RTypeQ q r -> RTypeQ q r
transRType f = go
where
go αs xs (TPrim c r) = f αs xs $ TPrim c r
go αs xs (TVar v r) = f αs xs $ TVar v r
go αs xs (TOr ts r) = f αs xs $ TOr ts' r where ts' = go αs xs <$> ts
go αs xs (TAnd ts) = f αs xs $ TAnd ts' where ts' = mapSnd (go αs xs) <$> ts
go αs xs (TRef n r) = f αs xs $ TRef n' r where n' = trans f αs xs n
go αs xs (TObj m ms r) = f αs xs $ TObj m' ms' r where m' = trans f αs xs m
ms' = trans f αs xs ms
go αs xs (TClass n) = f αs xs $ TClass n' where n' = trans f αs xs n
go αs xs (TMod m) = f αs xs $ TMod m
go αs xs (TAll a t) = f αs xs $ TAll a' t' where a' = trans f αs xs a
t' = go αs' xs t
αs' = αs ++ [btvToTV a]
go αs xs (TFun bs t r) = f αs xs $ TFun bs' t' r where bs' = trans f αs xs' <$> bs
t' = go αs xs' t
xs' = bs ++ xs
go _ _ (TExp e) = TExp e
ntransPure :: (NameTransformable t, F.Reftable r) => (QN p -> QN q) -> (QP p -> QP q) -> t p r -> t q r
ntransPure f g a = runIdentity (ntrans f' g' a)
where g' = return . g
f' = return . f
class NameTransformable t where
ntrans :: (Monad m, Applicative m, F.Reftable r)
=> (QN p -> m (QN q)) -> (QP p -> m (QP q)) -> t p r -> m (t q r)
instance NameTransformable RTypeQ where
ntrans = ntransRType
instance NameTransformable BindQ where
ntrans f g (B s o t) = B s o <$> ntrans f g t
instance NameTransformable FactQ where
ntrans = ntransFact
instance NameTransformable TypeDeclQ where
ntrans f g (TD s p m) = TD <$> ntrans f g s <*> pure p <*> ntrans f g m
instance NameTransformable TypeSigQ where
ntrans f g (TS k b (e,i))
= TS k <$> ntrans f g b <*> liftM2 (,) (mapM (ntrans f g) e) (mapM (ntrans f g) i)
instance NameTransformable TypeMembersQ where
ntrans f g (TM m sm c k s n) = TM <$> T.mapM h m
<*> T.mapM h sm
<*> T.mapM h c
<*> T.mapM h k
<*> T.mapM (\(m_, t_) -> (,) <$> h m_ <*> h t_) s
<*> T.mapM (\(m_, t_) -> (,) <$> h m_ <*> h t_) n
where
h = ntrans f g
instance NameTransformable BTGenQ where
ntrans f g (BGen n ts) = BGen <$> f n <*> mapM (ntrans f g) ts
instance NameTransformable TGenQ where
ntrans f g (Gen n ts) = Gen <$> f n <*> mapM (ntrans f g) ts
instance NameTransformable BTVarQ where
ntrans f g (BTV x l c) = BTV x l <$> T.mapM (ntrans f g) c
instance NameTransformable TypeMemberQ where
ntrans f g (FI x o m t) = FI x o <$> ntrans f g m <*> ntrans f g t
ntrans f g (MI x o mts) = MI x o <$> mapM (mapPairM (ntrans f g) (ntrans f g)) mts
ntransFmap :: (F.Reftable r, Applicative m, Monad m, T.Traversable t)
=> (QN p -> m (QN q)) -> (QP p -> m (QP q)) -> t (FAnnQ p r) -> m (t (FAnnQ q r))
ntransFmap f g x = T.mapM (ntrans f g) x
ntransFact f g = go
where
go (PhiVar v) = pure $ PhiVar v
go (PhiLoopTC v) = pure $ PhiLoopTC v
go (PhiLoop xs) = pure $ PhiLoop xs
go (Overload x m i) = pure $ Overload x m i
go (EnumAnn e) = pure $ EnumAnn e
go (BypassUnique) = pure $ BypassUnique
go (DeadCast x es) = pure $ DeadCast x es
go (ClassInvAnn r) = pure $ ClassInvAnn r
go (ModuleAnn l m) = ModuleAnn l <$> g m
go (TypInst x y ts) = TypInst x y <$> mapM (ntrans f g) ts
go (EltOverload x m t) = EltOverload x <$> ntrans f g m <*> ntrans f g t
go (VarAnn x l a t) = VarAnn x l a <$> T.mapM (ntrans f g) t
go (MemberAnn t) = MemberAnn <$> ntrans f g t
go (CtorAnn t) = CtorAnn <$> ntrans f g t
go (UserCast t) = UserCast <$> ntrans f g t
go (SigAnn x l t) = SigAnn x l <$> ntrans f g t
go (ClassAnn l t) = ClassAnn l <$> ntrans f g t
go (InterfaceAnn t) = InterfaceAnn <$> ntrans f g t
ntransRType :: (Monad m, Applicative m, F.Reftable r)
=> (QN p -> m (QN q)) -> (QP p -> m (QP q)) -> RTypeQ p r -> m (RTypeQ q r)
ntransRType f g t = go t
where
go (TPrim p r) = pure $ TPrim p r
go (TVar v r) = pure $ TVar v r
go (TExp e) = pure $ TExp e
go (TOr ts r) = TOr <$> ts'
<*> pure r where ts' = mapM go ts
go (TAnd ts) = TAnd <$> ts' where ts' = mapM (mapSndM go) ts
go (TRef n r) = TRef <$> n'
<*> pure r where n' = ntrans f g n
go (TObj m ms r) = TObj <$> m'
<*> ms'
<*> pure r where m' = ntrans f g m
ms' = ntrans f g ms
go (TClass n) = TClass <$> n' where n' = ntrans f g n
go (TMod p) = TMod <$> p' where p' = g p
go (TAll a t) = TAll <$> a'
<*> t' where a' = ntrans f g a
t' = go t
go (TFun bs t r) = TFun <$> bs'
<*> t'
<*> pure r where bs' = mapM (ntrans f g) bs
t' = go t
instance NameTransformable FAnnQ where
ntrans f g (FA i s ys) = FA i s <$> mapM (ntrans f g) ys
emapReft :: PPR r => ([F.Symbol] -> r -> r') -> [F.Symbol] -> RTypeQ q r -> RTypeQ q r'
emapReft f γ (TVar α r) = TVar α (f γ r)
emapReft f γ (TPrim c r) = TPrim c (f γ r)
emapReft f γ (TRef n r) = TRef (emapReftGen f γ n) (f γ r)
emapReft f γ (TAll α t) = TAll (emapReftBTV f γ α) (emapReft f γ t)
emapReft f γ (TFun xts t r) = TFun (emapReftBind f γ' <$> xts)
(emapReft f γ' t) (f γ r)
where γ' = (b_sym <$> xts) ++ γ
emapReft f γ (TObj m xts r) = TObj (emapReft f γ m) (emapReftTM f γ xts) (f γ r)
emapReft f γ (TClass n) = TClass (emapReftBGen f γ n)
emapReft _ _ (TMod m) = TMod m
emapReft f γ (TOr ts r) = TOr (emapReft f γ <$> ts) (f γ r)
emapReft f γ (TAnd ts) = TAnd (mapSnd (emapReft f γ) <$> ts)
emapReft _ _ _ = error "Not supported in emapReft"
emapReftBTV f γ (BTV s l c) = BTV s l $ emapReft f γ <$> c
emapReftGen f γ (Gen n ts) = Gen n $ emapReft f γ <$> ts
emapReftBGen f γ (BGen n ts) = BGen n $ emapReftBTV f γ <$> ts
emapReftBind f γ (B x o t) = B x o $ emapReft f γ t
emapReftTM f γ (TM m sm c k s n)
= TM (fmap (emapReftElt f γ) m)
(fmap (emapReftElt f γ) sm)
(emapReft f γ <$> c)
(emapReft f γ <$> k)
((emapReft f γ *** emapReft f γ) <$> s)
((emapReft f γ *** emapReft f γ) <$> n)
emapReftElt f γ (FI x m a t) = FI x m (emapReft f γ a) (emapReft f γ t)
emapReftElt f γ (MI x m mts) = MI x m (mapPair (emapReft f γ) <$> mts)
mapReftM :: (F.Reftable r, PP r, Applicative m, Monad m)
=> (r -> m r') -> RTypeQ q r -> m (RTypeQ q r')
mapReftM f (TVar α r) = TVar α <$> f r
mapReftM f (TPrim c r) = TPrim c <$> f r
mapReftM f (TRef n r) = TRef <$> mapReftGenM f n <*> f r
mapReftM f (TFun xts t r) = TFun <$> mapM (mapReftBindM f) xts <*> mapReftM f t <*> f r
mapReftM f (TAll α t) = TAll <$> mapReftBTV f α <*> mapReftM f t
mapReftM f (TAnd ts) = TAnd <$> mapM (mapSndM (mapReftM f)) ts
mapReftM f (TOr ts r) = TOr <$> mapM (mapReftM f) ts <*> f r
mapReftM f (TObj m xts r) = TObj <$> mapReftM f m
<*> mapTypeMembers f xts <*> f r
mapReftM f (TClass n) = TClass <$> mapReftBGenM f n
mapReftM _ (TMod a) = TMod <$> pure a
mapReftM _ t = error $ "Not supported in mapReftM: " ++ ppshow t
mapReftBTV f (BTV s l c) = BTV s l <$> T.mapM (mapReftM f) c
mapReftGenM f (Gen n ts) = Gen n <$> mapM (mapReftM f) ts
mapReftBGenM f (BGen n ts) = BGen n <$> mapM (mapReftBTV f) ts
mapReftBindM f (B x o t) = B x o <$> mapReftM f t
mapTypeMembers f (TM m sm c k s n)
= TM <$> T.mapM (mapReftElt f) m
<*> T.mapM (mapReftElt f) sm
<*> T.mapM (mapReftM f) c
<*> T.mapM (mapReftM f) k
<*> T.mapM (\(m_,t_) -> (,) <$> mapReftM f m_ <*> mapReftM f t_) s
<*> T.mapM (\(m_,t_) -> (,) <$> mapReftM f m_ <*> mapReftM f t_) n
mapReftElt f (FI x m a t) = FI x m <$> mapReftM f a <*> mapReftM f t
mapReftElt f (MI x m mts) = MI x m <$> mapM (mapPairM (mapReftM f) (mapReftM f)) mts
replaceAbsolute
:: (PPR r, Data r, Typeable r) => BareRelRsc r -> Either [Error] (BareRsc r)
replaceAbsolute pgm@(Rsc { code = Src ss }) =
case sOut of
[] -> Right (pgm { code = Src ss' })
_ -> Left sOut
where
(ss', sOut) = runState (mapM (T.mapM (\l -> ntrans (safeAbsName l) (safeAbsPath l) l)) ss) []
(ns, ps) = accumNamesAndPaths ss
safeAbsName l a@(absAct (absoluteName ns) l -> n)
| Just a' <- n = return a'
| Nothing <- n
, isAlias a = return $ toAbsoluteName a
| otherwise = modify (errorUnboundName (srcPos l) a:) >> pure (mkAbsName [] a)
safeAbsPath l a@(absAct (absolutePath ps) l -> n)
| Just a' <- n = return a'
| otherwise = modify (errorUnboundPath (srcPos l) a:) >> pure (mkAbsPath [])
isAlias (QN (QP RK_ _ []) s) = envMem s $ tAlias pgm
isAlias (QN _ _) = False
absAct f l a = I.lookup (fId l) mm >>= (`f` a)
mm = snd $ visitStmts vs (QP AK_ def []) ss
vs = defaultVisitor { ctxStmt = cStmt }
{ accStmt = acc }
{ accExpr = acc }
{ accCElt = acc }
{ accVDec = acc }
cStmt (QP AK_ l p) (ModuleStmt _ x _)
= QP AK_ l $ p ++ [F.symbol x]
cStmt q _ = q
acc c s = I.singleton (fId a) c where a = getAnnotation s
replaceDotRef :: RefScript -> RefScript
replaceDotRef p@(Rsc { code = Src fs, tAlias = ta, pAlias = pa, invts = is })
= p { code = Src $ tf <##> fs
, tAlias = trans tt [] [] <###> ta
, pAlias = tt [] [] <##> pa
, invts = trans tt [] [] <##> is
}
where
tf (FA l a facts) = FA l a (map (trans tt [] []) facts)
tt _ _ = fmap $ FV.trans vs () ()
vs = FV.defaultVisitor { FV.txExpr = tx }
tx _ (F.EVar s) | (x:y:zs) <- pack "." `splitOn` pack (symbolString s)
= foldl offset (F.eVar x) (y:zs)
tx _ e = e
offset k v = F.mkEApp offsetLocSym [F.expr k, F.expr v]
: : PPR r = > QEnv ( ModuleDef r ) - > BareRsc r - > ( QEnv ( ModuleDef r ) , r )
fixEnums m p@(Rsc { code = Src ss } ) = ( m',p ' )
m ' = fixEnumsInModule m ` qenvMap ` m
fixEnumInType : : F.Reftable r = > QEnv ( ModuleDef r ) - > RType r - > RType r
fixEnumInType ms ( ( Gen ( QN p x ) [ ] ) r )
, Just e < - envFindTy x $ m_enums m
else tNum ` strengthen ` r
fixEnumsInModule : : F.Reftable r = > QEnv ( ModuleDef r ) - > ModuleDef r - > ModuleDef r
fixEnumsInModule m = trans ( const $ const $ fixEnumInType m ) [ ] [ ]
fixFunBinders :: RefScript -> RefScript
fixFunBinders p@(Rsc { code = Src ss }) = p'
where
p' = p { code = Src $ (trans fixFunBindersInType [] [] <$>) <$> ss }
fixFunBindersInType _ bs = go
where
ks = [ y | B y _ _ <- bs ]
ss = (`suffixSymbol` F.symbol "")
ks' = map (F.eVar . ss) ks
sub :: F.Subable a => a -> a
sub = F.subst (F.mkSubst (zip ks ks'))
go (TFun bs t r) = TFun [B (ss s) o ts | B s o ts <- bs] t (sub r)
go t = toplevel sub t
class AnnotTransformable t where
strans :: (ctx -> a -> b) -> (ctx -> b -> ctx) -> ctx -> t a -> t b
instance AnnotTransformable Statement where
strans = stransStatement
instance AnnotTransformable Expression where
strans = stransExpression
instance AnnotTransformable Id where
strans = stransId
instance AnnotTransformable ForInInit where
strans = stransForInInit
instance AnnotTransformable ForInit where
strans = stransForInit
instance AnnotTransformable CatchClause where
strans = stransCatchClause
instance AnnotTransformable VarDecl where
strans = stransVarDecl
instance AnnotTransformable ClassElt where
strans = stransClassElt
instance AnnotTransformable EnumElt where
strans = stransEnumElt
instance AnnotTransformable Prop where
strans = stransProp
instance AnnotTransformable LValue where
strans = stransLvalue
stransStatement f g ctx st = go st
where
a = getAnnotation st
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (BlockStmt _ sts) = BlockStmt b (ss <$> sts)
go (EmptyStmt _) = EmptyStmt b
go (ExprStmt _ e) = ExprStmt b (ss e)
go (IfStmt _ e s1 s2) = IfStmt b (ss e) (ss s1) (ss s2)
go (IfSingleStmt _ e s) = IfSingleStmt b (ss e) (ss s)
go (WhileStmt _ e s) = WhileStmt b (ss e) (ss s)
go (DoWhileStmt _ s e) = DoWhileStmt b (ss s) (ss e)
go (BreakStmt _ i) = BreakStmt b (ss <$> i)
go (ContinueStmt _ i) = ContinueStmt b (ss <$> i)
go (LabelledStmt _ i s) = LabelledStmt b (ss i) (ss s)
go (ForInStmt _ fi e s) = ForInStmt b (ss fi) (ss e) (ss s)
go (ForStmt _ fi me1 me2 s) = ForStmt b (ss fi) (ss <$> me1) (ss <$> me2) (ss s)
go (TryStmt _ s mcc ms) = TryStmt b (ss s) (ss <$> mcc) (ss <$> ms)
go (ThrowStmt _ e) = ThrowStmt b (ss e)
go (ReturnStmt _ me) = ReturnStmt b (ss <$> me)
go (WithStmt _ e s) = WithStmt b (ss e) (ss s)
go (VarDeclStmt _ vs) = VarDeclStmt b (ss <$> vs)
go (FunctionStmt _ i is mss) = FunctionStmt b (ss i) (ss <$> is) ((ss <$>) <$> mss)
go (ClassStmt _ i cs) = ClassStmt b (ss i) (ss <$> cs)
go (ModuleStmt _ i sts) = ModuleStmt b (ss i) (ss <$> sts)
go (InterfaceStmt _ i) = InterfaceStmt b (ss i)
go (EnumStmt _ i es) = EnumStmt b (ss i) (ss <$> es)
go s = error $ "[unimplemented] stransStatement for " ++ ppshow s
stransExpression f g ctx exp = go exp
where
a = getAnnotation exp
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (StringLit _ s) = StringLit b s
go (RegexpLit _ s b1 b2) = RegexpLit b s b1 b2
go (NumLit _ d) = NumLit b d
go (IntLit _ i) = IntLit b i
go (BoolLit _ bl) = BoolLit b bl
go (NullLit _) = NullLit b
go (ArrayLit _ es) = ArrayLit b (ss <$> es)
go (ObjectLit _ pes) = ObjectLit b ((\(p,e) -> (ss p, ss e)) <$> pes)
go (HexLit _ s) = HexLit b s
go (ThisRef _) = ThisRef b
go (VarRef _ i) = VarRef b (ss i)
go (DotRef _ e i) = DotRef b (ss e) (ss i)
go (BracketRef _ e1 e2) = BracketRef b (ss e1) (ss e2)
go (NewExpr _ e es) = NewExpr b (ss e) (ss <$> es)
go (PrefixExpr _ op e) = PrefixExpr b op (ss e)
go (UnaryAssignExpr _ op l) = UnaryAssignExpr b op (ss l)
go (InfixExpr _ op e1 e2) = InfixExpr b op (ss e1) (ss e2)
go (CondExpr _ e1 e2 e3) = CondExpr b (ss e1) (ss e2) (ss e3)
go (AssignExpr _ op l e) = AssignExpr b op (ss l) (ss e)
go (ListExpr _ es) = ListExpr b (ss <$> es)
go (CallExpr _ e es) = CallExpr b (ss e) (ss <$> es)
go (SuperRef _) = SuperRef b
go (FuncExpr _ mi is sts) = FuncExpr b (ss <$> mi) (ss <$> is) (ss <$> sts)
go (Cast _ e) = Cast b (ss e)
go (Cast_ _ e) = Cast_ b (ss e)
stransId f _ ctx (Id a s) = Id (f ctx a) s
stransForInInit f g ctx (ForInVar i) = ForInVar (strans f g ctx i)
stransForInInit f g ctx (ForInLVal i) = ForInLVal (strans f g ctx i)
stransForInit _ _ _ NoInit = NoInit
stransForInit f g ctx (VarInit vs) = VarInit (strans f g ctx <$> vs)
stransForInit f g ctx (ExprInit e) = ExprInit (strans f g ctx e)
stransCatchClause f g ctx (CatchClause a i s) = CatchClause b (ss i) (ss s)
where
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
stransVarDecl f g ctx (VarDecl a i me) = VarDecl b (ss i) (ss <$> me)
where
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
stransClassElt f g ctx ce = go ce
where
a = getAnnotation ce
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (Constructor _ is sts) = Constructor b (ss <$> is) (ss <$> sts)
go (MemberVarDecl _ st i me) = MemberVarDecl b st (ss i) (ss <$> me)
go (MemberMethDecl _ st i is sts) = MemberMethDecl b st (ss i) (ss <$> is) (ss <$> sts)
stransEnumElt f g ctx (EnumElt a i e) = EnumElt b (ss i) (ss e)
where
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
stransProp f g ctx p = go p
where
a = getAnnotation p
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (PropId _ i) = PropId b (ss i)
go (PropString _ s) = PropString b s
go (PropNum _ i) = PropNum b i
stransLvalue f g ctx lv = go lv
where
a = getAnnotation lv
b = f ctx a
ctx' = g ctx b
ss = strans f g ctx'
go (LVar _ s) = LVar b s
go (LDot _ e s) = LDot b (ss e) s
go (LBracket _ e1 e2) = LBracket b (ss e1) (ss e2)
If p = and r = C.D.E then the paths that will be checked in this
A.B.C.D.E
absoluteName :: HS.HashSet AbsName -> AbsPath -> RelName -> Maybe AbsName
absoluteName ns (QP AK_ _ p) (QN (QP RK_ _ ss) s) =
find (`HS.member` ns) $ (`mkAbsName` s) . (++ ss) <$> prefixes p
where
prefixes = map reverse . suffixes . reverse
suffixes [] = [[]]
suffixes (x:xs) = (x:xs) : suffixes xs
absolutePath :: HS.HashSet AbsPath -> AbsPath -> RelPath -> Maybe AbsPath
absolutePath ps (QP AK_ _ p) (QP RK_ _ ss) =
find (`HS.member` ps) $ mkAbsPath . (++ ss) <$> prefixes p
where
prefixes = map reverse . suffixes . reverse
suffixes [] = [[]]
suffixes (x:xs) = (x:xs) : suffixes xs
toAbsoluteName (QN (QP RK_ l ss) s) = QN (QP AK_ l ss) s
|
4bd7fdf0941fed4f08484003afb339878e1482f76a752cf1eb09b5bd733cdd0f | timbertson/nix-wrangle | Cmd.hs | # LANGUAGE CPP #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE QuasiQuotes #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
module Wrangle.Cmd where
import Prelude hiding (error)
import Control.Applicative
import Control.Monad
import Control.Monad.Except (throwError)
import Control.Monad.Catch (throwM)
import Control.Monad.State
import Data.Char (toUpper)
import Data.Maybe (fromMaybe, isJust)
import Data.List (partition, intercalate, intersperse)
import Data.List.NonEmpty (NonEmpty(..))
import System.Exit (exitFailure)
import Wrangle.Source (PackageName(..), StringMap, asString)
import Wrangle.Util
import Data.Aeson.Key (Key)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.HashMap.Strict as HMap
import qualified Data.Aeson.KeyMap as AMap
import qualified Data.Aeson.Key as Key
import qualified Data.String.QQ as QQ
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString as B
import qualified Wrangle.Fetch as Fetch
import qualified Wrangle.Source as Source
import qualified System.Directory as Dir
#ifdef ENABLE_SPLICE
import qualified Wrangle.Splice as Splice
#endif
import qualified Options.Applicative as Opts
import qualified Options.Applicative.Help.Pretty as Doc
import qualified System.FilePath.Posix as PosixPath
main :: IO ()
main = join $ Opts.customExecParser prefs opts where
opts = Opts.info (parseCommand <**> Opts.helper) $ mconcat desc
prefs = Opts.prefs Opts.showHelpOnEmpty
desc =
[ Opts.fullDesc
, Opts.header "Nix-wrangle - source & dependency manager for Nix projects"
]
parseCommand :: Opts.Parser (IO ())
parseCommand = Opts.subparser (
Opts.command "init" parseCmdInit <>
Opts.command "add" parseCmdAdd <>
Opts.command "rm" parseCmdRm <>
Opts.command "update" parseCmdUpdate <>
#ifdef ENABLE_SPLICE
Opts.command "splice" parseCmdSplice <>
#endif
Opts.command "show" parseCmdShow <>
Opts.command "ls" parseCmdLs <>
Opts.command "default-nix" parseCmdDefaultNix
) <|> Opts.subparser (
(Opts.command "installcheck"
(subcommand "postinstall check" (pure cmdInstallCheck) []))
<> Opts.internal
)
subcommand desc action infoMod =
Opts.info
(Opts.helper <*> action) $
mconcat ([
Opts.fullDesc,
Opts.progDesc desc
] ++ infoMod)
docLines :: [Doc.Doc] -> Doc.Doc
docLines lines = foldr (<>) Doc.empty (intersperse Doc.hardline lines)
softDocLines lines = foldr (<>) Doc.empty (intersperse Doc.softline lines)
examplesDoc ex = Opts.footerDoc $ Just $ docLines ["Examples:", Doc.indent 2 $ docLines ex]
newtype CommonOpts = CommonOpts {
sources :: Maybe (NonEmpty Source.SourceFile)
} deriving newtype Show
parseCommon :: Opts.Parser CommonOpts
parseCommon =
build <$> parseSources <*> parseLocal <*> parsePublic
where
build src a b = CommonOpts { sources = NonEmpty.nonEmpty (src <> a <> b) }
parseSources = many $ Source.NamedSource <$> Opts.strOption
( Opts.long "source" <>
Opts.short 's' <>
Opts.metavar "SOURCE.json" <>
Opts.help "Specify wrangle.json file to operate on"
)
parseLocal = Opts.flag [] [Source.LocalSource]
( Opts.long "local" <>
Opts.help "use nix/wrangle-local.json"
)
parsePublic = Opts.flag [] [Source.DefaultSource]
( Opts.long "public" <>
Opts.help "use nix/wrangle.json"
)
parseName :: Opts.Parser Source.PackageName
parseName = Source.PackageName <$> Opts.argument Opts.str (Opts.metavar "NAME")
parseNames :: Opts.Parser (Maybe (NonEmpty Source.PackageName))
parseNames = NonEmpty.nonEmpty <$> many parseName
(|>) a fn = fn a
lookupAttr :: Key -> StringMap -> (Maybe String, StringMap)
lookupAttr key map = (AMap.lookup key map, map)
consumeAttr :: Key -> StringMap -> (Maybe String, StringMap)
consumeAttr key map = (AMap.lookup key map, AMap.delete key map)
attrRequired :: Key -> String
attrRequired key = "--"<> (Key.toString key) <> " required"
consumeRequiredAttr :: Key -> StringMap -> (Either String String, StringMap)
consumeRequiredAttr key map = require $ consumeAttr key map where
-- this error message is a little presumptuous...
require (value, map) = (toRight (attrRequired key) value, map)
type StringMapState a = StateT StringMap (Either String) a
consumeOptionalAttrT :: Key -> StringMapState (Maybe String)
consumeOptionalAttrT key = state $ consumeAttr key
lookupOptionalAttrT :: Key -> StringMapState (Maybe String)
lookupOptionalAttrT key = state $ lookupAttr key
consumeAttrT :: Key -> StringMapState String
consumeAttrT key = StateT consume where
consume :: StringMap -> Either String (String, StringMap)
consume = reshape . consumeRequiredAttr key
reshape (result, map) = (\result -> (result, map)) <$> result
defaultGitRef = "master"
data ParsedAttrs = ParsedAttrs StringMap
instance Show ParsedAttrs where
show (ParsedAttrs attrs) = show attrs
isEmptyAttrs :: ParsedAttrs -> Bool
isEmptyAttrs (ParsedAttrs attrs) = AMap.empty == attrs
extractAttrs :: PackageAttrsMode -> Maybe Source.PackageName -> ParsedAttrs -> StringMap
extractAttrs mode nameOpt (ParsedAttrs attrs) = canonicalizeNix withDefaultNix where
withDefaultNix = case mode of
PackageAttrsForAdd -> addDefaultNix nameOpt attrs
_ -> attrs
-- drop nix attribute it if it's explicitly `"false"`
canonicalizeNix attrs = case AMap.lookup key attrs of
Just "false" -> AMap.delete key attrs
_ -> attrs
where key = "nix"
-- add default nix attribute, unless it's the `self` package
addDefaultNix nameOpt attrs = case (nameOpt, AMap.lookup key attrs) of
(Just (Source.PackageName "self"), Nothing) -> attrs
(_, Just _) -> attrs
(_, Nothing) -> AMap.insert key defaultDepNixPath attrs
where key = "nix"
processAdd :: Maybe PackageName -> Maybe String -> ParsedAttrs -> Either AppError (Maybe PackageName, Source.PackageSpec)
processAdd nameOpt source attrs = mapLeft AppError $ build nameOpt source attrs
where
build :: Maybe PackageName -> Maybe String -> ParsedAttrs -> Either String (Maybe PackageName, Source.PackageSpec)
build nameOpt source parsedAttrs = evalStateT
(build' nameOpt source)
(extractAttrs PackageAttrsForAdd nameOpt parsedAttrs)
build' :: Maybe PackageName -> Maybe String -> StringMapState (Maybe PackageName, Source.PackageSpec)
build' nameOpt sourceOpt = typ >>= \case
Source.FetchGithub -> buildGithub sourceOpt nameOpt
(Source.FetchUrl urlType) -> withName nameOpt $ buildUrl urlType sourceOpt
Source.FetchPath -> withName nameOpt $ buildLocalPath sourceOpt
Source.FetchGitLocal -> withName nameOpt $ buildGitLocal sourceOpt
Source.FetchGit -> withName nameOpt $ buildGit sourceOpt
where
typ :: StringMapState Source.FetchType
typ = (consumeAttrT "type" <|> pure "github") >>= lift . Source.parseFetchType
withName :: Maybe PackageName -> StringMapState a -> StringMapState (Maybe PackageName, a)
withName name = fmap (\snd -> (name, snd))
packageSpec :: Source.SourceSpec -> StringMapState Source.PackageSpec
packageSpec sourceSpec = state $ \attrs -> (Source.PackageSpec {
Source.sourceSpec,
Source.packageAttrs = attrs,
Source.fetchAttrs = AMap.empty
}, AMap.empty)
buildPathOpt :: StringMapState (Maybe Source.LocalPath)
buildPathOpt = fmap pathOfString <$> consumeOptionalAttrT "path" where
buildPath :: Maybe String -> StringMapState Source.LocalPath
buildPath source =
buildPathOpt >>= \path -> lift $
toRight "--path or source required" (path <|> (pathOfString <$> source))
pathOfString :: String -> Source.LocalPath
pathOfString path = if PosixPath.isAbsolute path
then Source.FullPath path
else Source.RelativePath path
buildLocalPath :: Maybe String -> StringMapState Source.PackageSpec
buildLocalPath source = do
path <- buildPath source
packageSpec (Source.Path path)
buildGitCommon :: StringMapState Source.GitCommon
buildGitCommon = do
fetchSubmodulesStr <- lookupOptionalAttrT Source.fetchSubmodulesKeyJSON
fetchSubmodules <- lift $ case fetchSubmodulesStr of
Just "true" -> Right True
Just "false" -> Right False
Nothing -> Right False
Just other -> Left ("fetchSubmodules: expected Bool, got: " ++ (other))
return $ Source.GitCommon { Source.fetchSubmodules }
buildGit :: Maybe String -> StringMapState Source.PackageSpec
buildGit source = do
urlArg <- consumeOptionalAttrT "url"
gitRef <- consumeOptionalAttrT "ref"
gitUrl <- lift $ toRight
("--url or source required")
(urlArg <|> source)
gitCommon <- buildGitCommon
packageSpec $ Source.Git $ Source.GitSpec {
Source.gitUrl, Source.gitCommon,
Source.gitRef = Source.Template (gitRef `orElse` defaultGitRef)
}
buildGitLocal :: Maybe String -> StringMapState Source.PackageSpec
buildGitLocal source = do
glPath <- buildPath source
ref <- consumeOptionalAttrT "ref"
glCommon <- buildGitCommon
packageSpec $ Source.GitLocal $ Source.GitLocalSpec {
Source.glPath, Source.glCommon,
Source.glRef = Source.Template <$> ref
}
buildUrl :: Source.UrlFetchType -> Maybe String -> StringMapState Source.PackageSpec
buildUrl urlType source = do
urlAttr <- consumeOptionalAttrT "url"
url <- lift $ toRight "--url or souce required" (urlAttr <|> source)
packageSpec $ Source.Url Source.UrlSpec {
Source.urlType = urlType,
Source.url = Source.Template url
}
parseGithubSource :: Maybe PackageName -> String -> Either String (PackageName, String, String)
parseGithubSource name source = case span (/= '/') source of
(owner, '/':repo) -> Right (fromMaybe (PackageName repo) name, owner, repo)
_ -> throwError ("`" <> source <> "` doesn't look like a github repo")
buildGithub :: Maybe String -> Maybe PackageName -> StringMapState (Maybe PackageName, Source.PackageSpec)
buildGithub source name = do
(name, ghOwner, ghRepo) <- identity
ref <- consumeOptionalAttrT "ref"
ghCommon <- buildGitCommon
withName (Just name) $ packageSpec $ Source.Github Source.GithubSpec {
Source.ghOwner,
Source.ghRepo,
Source.ghCommon,
Source.ghRef = Source.Template . fromMaybe "master" $ ref
}
where
explicitSource (owner, repo) = (fromMaybe (PackageName repo) name, owner, repo)
identity :: StringMapState (PackageName, String, String)
identity = do
owner <- consumeOptionalAttrT "owner"
repo <- consumeOptionalAttrT "repo"
lift $ buildIdentity owner repo
buildIdentity :: Maybe String -> Maybe String -> Either String (PackageName, String, String)
buildIdentity owner repo = case (fromAttrs, fromSource, fromNameAsSource) of
(Just fromAttrs, Nothing, _) -> Right fromAttrs
(Nothing, Just fromSource, _) -> fromSource
(Nothing, Nothing, Just fromName) -> fromName
(Nothing, Nothing, Nothing) -> throwError "name, source or --owner/--repo required"
(Just _, Just _, _) -> throwError "use source or --owner/--repo, not both"
where
ownerAndRepo :: Maybe (String, String) = (,) <$> owner <*> repo
fromAttrs :: Maybe (PackageName, String, String) = explicitSource <$> ownerAndRepo
fromSource = parseGithubSource name <$> source
fromNameAsSource = parseGithubSource Nothing <$> unPackageName <$> name
parseAdd :: Opts.Parser (Either AppError (PackageName, Source.PackageSpec))
parseAdd = build
<$> Opts.optional parseName
<*> Opts.optional parseSource
<*> parsePackageAttrs PackageAttrsForAdd
where
parseSource = Opts.argument Opts.str (Opts.metavar "SOURCE")
build :: Maybe PackageName -> Maybe String -> ParsedAttrs -> Either AppError (PackageName, Source.PackageSpec)
build nameOpt source attrs = do
(name, package) <- processAdd nameOpt source attrs
name <- toRight (AppError "--name required") name
return (name, package)
data PackageAttrsMode = PackageAttrsForAdd | PackageAttrsForUpdate | PackageAttrsForSlice
parsePackageAttrs :: PackageAttrsMode -> Opts.Parser ParsedAttrs
parsePackageAttrs mode = ParsedAttrs . AMap.fromList <$> many parseAttribute where
parseAttribute :: Opts.Parser (Key, String)
parseAttribute =
Opts.option (Opts.maybeReader parseKeyVal)
( Opts.long "attr" <>
Opts.short 'a' <>
Opts.metavar "KEY=VAL" <>
Opts.help "Set the package spec attribute <KEY> to <VAL>"
) <|> shortcutAttributes <|>
(("type",) <$> Opts.strOption
( Opts.long "type" <>
Opts.short 't' <>
Opts.metavar "TYPE" <>
Opts.help ("The source type. "<> Source.validTypesDoc)
))
Parse " key = val " into ( " key " , " val " )
parseKeyVal :: String -> Maybe (Key, String)
parseKeyVal str = case span (/= '=') str of
(key, '=':val) -> Just (Key.fromString key, val)
_ -> Nothing
-- Shortcuts for known attributes
shortcutAttributes :: Opts.Parser (Key, String)
shortcutAttributes = foldr (<|>) empty $ mkShortcutAttribute <$> shortcuts
where
shortcuts = case mode of
PackageAttrsForAdd -> allShortcuts
PackageAttrsForUpdate -> allShortcuts
PackageAttrsForSlice -> sourceShortcuts
allShortcuts = ("nix", "all") : sourceShortcuts
sourceShortcuts = [
("ref", "github / git / git-local"),
("fetchSubmodules", "github / git / git-local"),
("owner", "github"),
("repo", "github"),
("url", "url / file / git"),
("path", "git-local"),
("version", "all")]
mkShortcutAttribute :: (String, String) -> Opts.Parser (Key, String)
mkShortcutAttribute (attr, types) =
(Key.fromString attr,) <$> Opts.strOption
( Opts.long attr <>
Opts.metavar (toUpper <$> attr) <>
Opts.help
(
"Equivalent to --attr " <> attr <> "=" <> (toUpper <$> attr) <>
", used for source type " <> types
)
)
-------------------------------------------------------------------------------
-- Show
-------------------------------------------------------------------------------
parseCmdShow :: Opts.ParserInfo (IO ())
parseCmdShow = subcommand "Show source details" (cmdShow <$> parseCommon <*> parseNames) []
cmdShow :: CommonOpts -> Maybe (NonEmpty PackageName) -> IO ()
cmdShow opts names =
do
sourceFiles <- requireConfiguredSources $ sources opts
sequence_ $ map showPkgs (NonEmpty.toList sourceFiles) where
showPkgs :: Source.SourceFile -> IO ()
showPkgs sourceFile = do
putStrLn $ " - "<>Source.pathOfSource sourceFile<>":"
packages <- Source.loadSourceFile sourceFile
putStrLn $ Source.encodePrettyString (filterPackages names packages)
filterPackages Nothing p = Source.unPackages p
filterPackages (Just names) p = HMap.filterWithKey pred (Source.unPackages p) where
pred name _ = elem name names
parseCmdLs :: Opts.ParserInfo (IO ())
parseCmdLs = subcommand "list sources" (cmdLs <$> parseCommon) []
cmdLs :: CommonOpts -> IO ()
cmdLs opts =
do
sourceFiles <- requireConfiguredSources $ sources opts
sources <- Source.loadSources sourceFiles
putStrLn $
intercalate "\n" $
map (\s -> " - "<> asString s) $
HMap.keys $ Source.unPackages $
Source.merge $ sources
requireConfiguredSources :: Maybe (NonEmpty Source.SourceFile) -> IO (NonEmpty Source.SourceFile)
requireConfiguredSources sources =
Source.configuredSources sources >>=
(liftMaybe (AppError "No wrangle JSON files found"))
-------------------------------------------------------------------------------
Init
-------------------------------------------------------------------------------
data InitOpts = InitOpts {
nixpkgsChannel :: Maybe String
}
parseCmdInit :: Opts.ParserInfo (IO ())
parseCmdInit = subcommand "Initialize nix-wrangle" (
cmdInit <$> parseInit) []
where
parseInit = Opts.optional (Opts.strOption
( Opts.long "pkgs" <>
Opts.short 'p' <>
Opts.metavar "CHANNEL" <>
Opts.help ("Pin nixpkgs to CHANNEL")
))
cmdInit :: Maybe String -> IO ()
cmdInit nixpkgs = do
isGit <- Dir.doesPathExist ".git"
debugLn $ "isGit ? " <> (show isGit)
addMultiple OverwriteSource NoAutoInit (Right (wrangleSpec : (selfSpecs isGit ++ nixpkgsSpecs))) commonOpts
updateDefaultNix defaultNixOptsDefault
where
commonOpts = CommonOpts { sources = Nothing }
wrangleSpec = (PackageName "nix-wrangle", Source.PackageSpec {
Source.sourceSpec = Source.Github Source.GithubSpec {
Source.ghOwner = "timbertson",
Source.ghRepo = "nix-wrangle",
Source.ghCommon = Source.defaultGitCommon,
Source.ghRef = Source.Template "v1"
},
Source.fetchAttrs = AMap.empty,
Source.packageAttrs = AMap.fromList [("nix", "nix")]
})
nixpkgsSpecs = case nixpkgs of
Nothing -> []
Just channel -> [(PackageName "pkgs", Source.PackageSpec {
Source.sourceSpec = Source.Github Source.GithubSpec {
Source.ghOwner = "NixOS",
Source.ghRepo = "nixpkgs-channels",
Source.ghCommon = Source.defaultGitCommon,
Source.ghRef = Source.Template channel
},
Source.fetchAttrs = AMap.empty,
Source.packageAttrs = AMap.fromList [("nix", defaultDepNixPath)]
})]
selfSpecs isGit =
if isGit then [
(PackageName "self", Source.PackageSpec {
Source.sourceSpec = Source.GitLocal Source.GitLocalSpec {
Source.glPath = Source.RelativePath ".",
Source.glRef = Nothing,
Source.glCommon = Source.defaultGitCommon
},
Source.fetchAttrs = AMap.empty,
Source.packageAttrs = AMap.empty
})
] else []
-------------------------------------------------------------------------------
-- Add
-------------------------------------------------------------------------------
data AddMode = AddSource | OverwriteSource | AddIfMissing
data AutoInit = AutoInit | NoAutoInit
parseCmdAdd :: Opts.ParserInfo (IO ())
parseCmdAdd = subcommand "Add a source" (cmdAdd <$> parseAddMode <*> parseAdd <*> parseCommon)
[ examplesDoc [
"nix-wrangle add timbertson/opam2nix-packages",
"nix-wrangle add pkgs nixos/nixpkgs-channels --ref nixos-unstable",
"nix-wrangle add pkgs nixos/nixpkgs-channels --ref nixos-unstable",
"nix-wrangle add pkgs --owner nixos --repo nixpkgs-channels --ref nixos-unstable",
"nix-wrangle add --type git-local self .."
]]
where
parseAddMode = Opts.flag AddSource OverwriteSource
(Opts.long "replace" <> Opts.help "Replace existing source")
addMultiple :: AddMode -> AutoInit -> Either AppError [(PackageName, Source.PackageSpec)] -> CommonOpts -> IO ()
addMultiple addMode autoInit addOpts opts =
do
addSpecs <- liftEither $ addOpts
configuredSources <- Source.configuredSources $ sources opts
let sourceFile = NonEmpty.head <$> configuredSources
debugLn $ "sourceFile: " <> show sourceFile
source <- loadOrInit autoInit sourceFile
debugLn $ "source: " <> show source
let (sourceFile, inputSource) = source
let baseSource = fromMaybe (Source.emptyPackages) inputSource
modifiedSource <- foldM addSingle baseSource addSpecs
Dir.createDirectoryIfMissing True $ PosixPath.takeDirectory (Source.pathOfSource sourceFile)
Source.writeSourceFile sourceFile modifiedSource
where
addSingle :: Source.Packages -> (PackageName, Source.PackageSpec) -> IO Source.Packages
addSingle base (name, inputSpec) = do
shouldAdd' <- shouldAdd addMode name base
if shouldAdd' then do
putStrLn $ "Adding " <> show name <> " // " <> show inputSpec
spec <- Fetch.prefetch name inputSpec
return $ Source.add base name spec
else
return base
loadOrInit :: AutoInit -> Maybe Source.SourceFile -> IO (Source.SourceFile, Maybe Source.Packages)
-- TODO: arrows?
loadOrInit AutoInit Nothing = do
let source = Source.DefaultSource
infoLn $ Source.pathOfSource source <> " does not exist, initializing..."
cmdInit Nothing
loadOrInit NoAutoInit (Just source)
loadOrInit NoAutoInit Nothing = return (Source.DefaultSource, Nothing)
loadOrInit _ (Just f) = do
exists <- Source.doesSourceExist f
loaded <- sequence $ if exists
then Just $ Source.loadSourceFile f
else Nothing
return (f, loaded)
shouldAdd :: AddMode -> PackageName -> Source.Packages -> IO Bool
shouldAdd mode name@(PackageName nameStr) existing =
if Source.member existing name then
case mode of
AddSource -> throwM $ AppError $ nameStr <> " already present, use --replace to replace it"
OverwriteSource -> infoLn ("Replacing existing " <> nameStr) >> return True
AddIfMissing -> infoLn ("Not replacing existing " <> nameStr) >> return False
else return True
cmdAdd :: AddMode -> Either AppError (PackageName, Source.PackageSpec) -> CommonOpts -> IO ()
cmdAdd addMode addOpt opts = addMultiple addMode AutoInit ((\x -> [x]) <$> addOpt) opts
-------------------------------------------------------------------------------
Rm
-------------------------------------------------------------------------------
parseCmdRm :: Opts.ParserInfo (IO ())
parseCmdRm = subcommand "Remove one or more sources" (cmdRm <$> parseNames <*> parseCommon) []
cmdRm :: Maybe (NonEmpty PackageName) -> CommonOpts -> IO ()
cmdRm maybeNames opts = do
packageNames <- liftMaybe (AppError "at least one name required") maybeNames
alterPackagesNamed (Just packageNames) opts updateSingle where
updateSingle :: Source.Packages -> PackageName -> IO Source.Packages
updateSingle packages name = do
infoLn $ " - removing " <> (show name) <> "..."
return $ Source.remove packages name
-------------------------------------------------------------------------------
-- Update
-------------------------------------------------------------------------------
parseCmdUpdate :: Opts.ParserInfo (IO ())
parseCmdUpdate = subcommand "Update one or more sources"
(cmdUpdate <$> parseNames <*> parsePackageAttrs PackageAttrsForUpdate <*> parseCommon)
[ examplesDoc [
"nix-wrangle update pkgs --ref nixpkgs-unstable",
"nix-wrangle update gup --nix nix/"
]]
cmdUpdate :: Maybe (NonEmpty PackageName) -> ParsedAttrs -> CommonOpts -> IO ()
cmdUpdate packageNamesOpt parsedAttrs opts =
-- Update must either specify no attributes (update everything to latest version)
or specify one or more explicit package names
if isJust packageNamesOpt || isEmptyAttrs parsedAttrs
then alterPackagesNamed packageNamesOpt opts updateSingle
else throwM $ AppError (
"You must explicitly list dependency names when modifying attributes (" <> show parsedAttrs <> ")"
) where
updateSingle :: Source.Packages -> PackageName -> IO Source.Packages
updateSingle packages name = do
infoLn $ " - updating " <> (show name) <> "..."
original <- liftEither $ Source.lookup name packages
debugLn $ "original: " <> show original
let updateAttrs = extractAttrs PackageAttrsForUpdate (Just name) parsedAttrs
debugLn $ "updateAttrs: " <> show updateAttrs
newSpec <- liftEither $ Source.updatePackageSpec original updateAttrs
fetched <- Fetch.prefetch name newSpec
if fetched == original
then infoLn " ... (unchanged)"
else return ()
return $ Source.add packages name fetched
-- shared by update/rm
-- TODO: pass actual source, since it is always Just
processPackagesNamed :: Maybe (NonEmpty PackageName) -> CommonOpts
-> (Source.SourceFile -> Source.Packages -> [PackageName] -> IO ())-> IO ()
processPackagesNamed packageNamesOpt opts process = do
sourceFiles <- requireConfiguredSources $ sources opts
sources <- sequence $ loadSource <$> sourceFiles
checkMissingKeys (snd <$> sources)
sequence_ $ traverseSources <$> sources
where
checkMissingKeys :: NonEmpty Source.Packages -> IO ()
checkMissingKeys sources = case missingKeys of
[] -> return ()
_ -> fail $ "No such packages: " <> show missingKeys
where
(_, missingKeys) = partitionPackageNames $ Source.merge sources
partitionPackageNames :: Source.Packages -> ([PackageName], [PackageName])
partitionPackageNames sources = case packageNamesOpt of
Nothing -> (Source.keys sources, [])
(Just names) -> partition (Source.member sources) (NonEmpty.toList names)
traverseSources :: (Source.SourceFile, Source.Packages) -> IO ()
traverseSources (sourceFile, sources) = do
let (packageNames, _) = partitionPackageNames sources
debugLn $ "Package names: " <> (show packageNames)
process sourceFile sources packageNames
-- shared by update/rm
alterPackagesNamed :: Maybe (NonEmpty PackageName) -> CommonOpts -> (Source.Packages -> PackageName -> IO Source.Packages)-> IO ()
alterPackagesNamed packageNamesOpt opts updateSingle =
processPackagesNamed packageNamesOpt opts $ \sourceFile sources packageNames -> do
infoLn $ "Updating "<> Source.pathOfSource sourceFile <> " ..."
updated <- foldM updateSingle sources packageNames
Source.writeSourceFile sourceFile updated
loadSource :: Source.SourceFile -> IO (Source.SourceFile, Source.Packages)
loadSource f = (,) f <$> Source.loadSourceFile f
#ifdef ENABLE_SPLICE
-------------------------------------------------------------------------------
Splice
-------------------------------------------------------------------------------
data SpliceOutput = SpliceOutput FilePath | SpliceReplace
data SpliceOpts = SpliceOpts {
spliceName :: Maybe PackageName,
spliceAttrs :: StringMap,
spliceInput :: FilePath,
spliceOutput :: SpliceOutput,
spliceUpdate :: Bool
}
parseCmdSplice :: Opts.ParserInfo (IO ())
parseCmdSplice = subcommand "Splice current `self` source into a .nix document"
(cmdSplice <$> parseSplice <*> parseCommon) [
Opts.footerDoc $ Just $ docLines [
softDocLines [
"This command generates a copy of the input .nix file, with",
"the `src` attribute replaced with the current fetcher for",
"the source named `public`."],
"",
softDocLines [
"This allows you to build a standalone",
".nix file for publishing (e.g. to nixpkgs itself)" ],
"",
softDocLines [
"If your source does not come from an existing wrangle.json,",
"you can pass it in explicitly as attributes, like with",
"`nix-wrangle add` (i.e. --type, --repo, --owner, --url, etc)"]
]]
where
parseSplice = build <$> parseInput <*> parseOutput <*> parseName <*> parsePackageAttrs ParsePackageAttrsSource <*> parseUpdate where
build spliceInput spliceOutput spliceName spliceAttrs spliceUpdate =
SpliceOpts { spliceInput, spliceOutput, spliceName, spliceAttrs, spliceUpdate }
parseInput = Opts.argument Opts.str (Opts.metavar "SOURCE")
parseName = Opts.optional (PackageName <$> Opts.strOption
( Opts.long "name" <>
Opts.short 'n' <>
Opts.metavar "NAME" <>
Opts.help ("Source name to use (default: public)")
))
parseOutput = explicitOutput <|> replaceOutput
replaceOutput = Opts.flag' SpliceReplace
( Opts.long "replace" <>
Opts.short 'r' <>
Opts.help "Overwrite input file"
)
explicitOutput = SpliceOutput <$> (Opts.strOption
( Opts.long "output" <>
Opts.short 'o' <>
Opts.metavar "DEST" <>
Opts.help ("Destination file")
))
parseUpdate = Opts.flag True False
( Opts.long "no-update" <>
Opts.help "Don't fetch the latest version of `public` before splicing"
)
cmdSplice :: SpliceOpts -> CommonOpts -> IO ()
cmdSplice (SpliceOpts { spliceName, spliceAttrs, spliceInput, spliceOutput, spliceUpdate}) opts = do
fileContents <- Splice.load spliceInput
let expr = Splice.parse fileContents
expr <- Splice.getExn expr
-- putStrLn $ show $ expr
let existingSrcSpans = Splice.extractSourceLocs expr
srcSpan <- case existingSrcSpans of
[single] -> return single
other -> fail $ "No single source found in " ++ (show other)
self <- getPublic
debugLn $ "got source: " <> show self
replacedText <- liftEither $ Splice.replaceSourceLoc fileContents self srcSpan
Source.writeFileText outputPath replacedText
where
outputPath = case spliceOutput of
SpliceOutput p -> p
SpliceReplace -> spliceInput
getPublic :: IO Source.PackageSpec
getPublic =
if HMap.null spliceAttrs then do
sourceFiles <- requireConfiguredSources $ sources opts
sources <- Source.merge <$> Source.loadSources sourceFiles
let name = (spliceName `orElse` PackageName "public")
if spliceUpdate then
cmdUpdate (Just $ name :| []) HMap.empty opts
else
return ()
liftEither $ Source.lookup name sources
else do
-- For splicing, we support a subset of `add` arguments. We don't
-- accept a name or source, only explicit spliceAttrs
infoLn $ "Splicing anonymous source from attributes: " <> show spliceAttrs
self <- liftEither $ snd <$> processAdd Nothing Nothing spliceAttrs
Fetch.prefetch (PackageName "self") self
#endif
-- ^ ENABLE_SPLICE
-------------------------------------------------------------------------------
-- default-nix
-------------------------------------------------------------------------------
parseCmdDefaultNix :: Opts.ParserInfo (IO ())
parseCmdDefaultNix = subcommand "Generate default.nix"
(pure cmdDefaultNix) [
Opts.footerDoc $ Just $
"Typically this only needs to be done once, though it" <>
" may be necessary if you have a very old default.nix"
]
cmdDefaultNix :: IO ()
cmdDefaultNix = updateDefaultNix (DefaultNixOpts { force = True })
data DefaultNixOpts = DefaultNixOpts {
force :: Bool
}
defaultNixOptsDefault = DefaultNixOpts { force = False }
updateDefaultNix :: DefaultNixOpts -> IO ()
updateDefaultNix (DefaultNixOpts { force }) = do
continue <- if force then return True else shouldWriteFile
if continue then Source.writeFileText path contents
else infoLn $ "Note: not replacing existing "<>path<>", run `nix-wrangle default-nix` to explicitly override"
where
path = "default.nix"
markerText :: T.Text = "# Note: This file is generated by nix-wrangle"
contents :: T.Text
contents = T.unlines [
markerText,
"# It can be regenerated with `nix-wrangle default-nix`",
defaultNixContents ]
shouldWriteFile :: IO Bool
shouldWriteFile = do
exists <- Dir.doesFileExist path
if exists then
(T.isInfixOf markerText) <$> TE.decodeUtf8 <$> B.readFile path
else
return True
defaultDepNixPath = "default.nix"
defaultNixContents = T.strip [QQ.s|
let
systemNixpkgs = import <nixpkgs> {};
fallback = val: dfl: if val == null then dfl else val;
makeFetchers = pkgs: {
github = pkgs.fetchFromGitHub;
url = builtins.fetchTarball;
};
fetch = pkgs: source:
(builtins.getAttr source.type (makeFetchers pkgs)) source.fetch;
sourcesJson = (builtins.fromJSON (builtins.readFile ./nix/wrangle.json)).sources;
wrangleJson = sourcesJson.nix-wrangle or (abort "No nix-wrangle entry in nix/wrangle.json");
in
{ pkgs ? null, nix-wrangle ? null, ... }@provided:
let
_pkgs = fallback pkgs (
if builtins.hasAttr "pkgs" sourcesJson
then import (fetch systemNixpkgs sourcesJson.pkgs) {} else systemNixpkgs
);
_wrangle = fallback nix-wrangle (_pkgs.callPackage "${fetch _pkgs wrangleJson}/${wrangleJson.nix}" {});
in
(_wrangle.api { pkgs = _pkgs; }).inject { inherit provided; path = ./.; }
|]
cmdInstallCheck :: IO ()
cmdInstallCheck = do
apiContext <- Fetch.globalApiContext
let apiPath = Fetch.apiNix apiContext
infoLn $ "checking for nix API at "<>apiPath
apiExists <- Dir.doesFileExist apiPath
if not apiExists
then exitFailure
else return ()
infoLn "ok"
| null | https://raw.githubusercontent.com/timbertson/nix-wrangle/3c1c3b7785e95a39fbd9c986e113c82c02043c2d/src/Wrangle/Cmd.hs | haskell | # LANGUAGE OverloadedStrings #
this error message is a little presumptuous...
drop nix attribute it if it's explicitly `"false"`
add default nix attribute, unless it's the `self` package
Shortcuts for known attributes
-----------------------------------------------------------------------------
Show
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Add
-----------------------------------------------------------------------------
TODO: arrows?
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
Update
-----------------------------------------------------------------------------
Update must either specify no attributes (update everything to latest version)
shared by update/rm
TODO: pass actual source, since it is always Just
shared by update/rm
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
putStrLn $ show $ expr
For splicing, we support a subset of `add` arguments. We don't
accept a name or source, only explicit spliceAttrs
^ ENABLE_SPLICE
-----------------------------------------------------------------------------
default-nix
----------------------------------------------------------------------------- | # LANGUAGE CPP #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE NamedFieldPuns #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE QuasiQuotes #
# LANGUAGE TupleSections #
# LANGUAGE ViewPatterns #
module Wrangle.Cmd where
import Prelude hiding (error)
import Control.Applicative
import Control.Monad
import Control.Monad.Except (throwError)
import Control.Monad.Catch (throwM)
import Control.Monad.State
import Data.Char (toUpper)
import Data.Maybe (fromMaybe, isJust)
import Data.List (partition, intercalate, intersperse)
import Data.List.NonEmpty (NonEmpty(..))
import System.Exit (exitFailure)
import Wrangle.Source (PackageName(..), StringMap, asString)
import Wrangle.Util
import Data.Aeson.Key (Key)
import qualified Data.List.NonEmpty as NonEmpty
import qualified Data.HashMap.Strict as HMap
import qualified Data.Aeson.KeyMap as AMap
import qualified Data.Aeson.Key as Key
import qualified Data.String.QQ as QQ
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import qualified Data.ByteString as B
import qualified Wrangle.Fetch as Fetch
import qualified Wrangle.Source as Source
import qualified System.Directory as Dir
#ifdef ENABLE_SPLICE
import qualified Wrangle.Splice as Splice
#endif
import qualified Options.Applicative as Opts
import qualified Options.Applicative.Help.Pretty as Doc
import qualified System.FilePath.Posix as PosixPath
main :: IO ()
main = join $ Opts.customExecParser prefs opts where
opts = Opts.info (parseCommand <**> Opts.helper) $ mconcat desc
prefs = Opts.prefs Opts.showHelpOnEmpty
desc =
[ Opts.fullDesc
, Opts.header "Nix-wrangle - source & dependency manager for Nix projects"
]
parseCommand :: Opts.Parser (IO ())
parseCommand = Opts.subparser (
Opts.command "init" parseCmdInit <>
Opts.command "add" parseCmdAdd <>
Opts.command "rm" parseCmdRm <>
Opts.command "update" parseCmdUpdate <>
#ifdef ENABLE_SPLICE
Opts.command "splice" parseCmdSplice <>
#endif
Opts.command "show" parseCmdShow <>
Opts.command "ls" parseCmdLs <>
Opts.command "default-nix" parseCmdDefaultNix
) <|> Opts.subparser (
(Opts.command "installcheck"
(subcommand "postinstall check" (pure cmdInstallCheck) []))
<> Opts.internal
)
subcommand desc action infoMod =
Opts.info
(Opts.helper <*> action) $
mconcat ([
Opts.fullDesc,
Opts.progDesc desc
] ++ infoMod)
docLines :: [Doc.Doc] -> Doc.Doc
docLines lines = foldr (<>) Doc.empty (intersperse Doc.hardline lines)
softDocLines lines = foldr (<>) Doc.empty (intersperse Doc.softline lines)
examplesDoc ex = Opts.footerDoc $ Just $ docLines ["Examples:", Doc.indent 2 $ docLines ex]
newtype CommonOpts = CommonOpts {
sources :: Maybe (NonEmpty Source.SourceFile)
} deriving newtype Show
parseCommon :: Opts.Parser CommonOpts
parseCommon =
build <$> parseSources <*> parseLocal <*> parsePublic
where
build src a b = CommonOpts { sources = NonEmpty.nonEmpty (src <> a <> b) }
parseSources = many $ Source.NamedSource <$> Opts.strOption
( Opts.long "source" <>
Opts.short 's' <>
Opts.metavar "SOURCE.json" <>
Opts.help "Specify wrangle.json file to operate on"
)
parseLocal = Opts.flag [] [Source.LocalSource]
( Opts.long "local" <>
Opts.help "use nix/wrangle-local.json"
)
parsePublic = Opts.flag [] [Source.DefaultSource]
( Opts.long "public" <>
Opts.help "use nix/wrangle.json"
)
parseName :: Opts.Parser Source.PackageName
parseName = Source.PackageName <$> Opts.argument Opts.str (Opts.metavar "NAME")
parseNames :: Opts.Parser (Maybe (NonEmpty Source.PackageName))
parseNames = NonEmpty.nonEmpty <$> many parseName
(|>) a fn = fn a
lookupAttr :: Key -> StringMap -> (Maybe String, StringMap)
lookupAttr key map = (AMap.lookup key map, map)
consumeAttr :: Key -> StringMap -> (Maybe String, StringMap)
consumeAttr key map = (AMap.lookup key map, AMap.delete key map)
attrRequired :: Key -> String
attrRequired key = "--"<> (Key.toString key) <> " required"
consumeRequiredAttr :: Key -> StringMap -> (Either String String, StringMap)
consumeRequiredAttr key map = require $ consumeAttr key map where
require (value, map) = (toRight (attrRequired key) value, map)
type StringMapState a = StateT StringMap (Either String) a
consumeOptionalAttrT :: Key -> StringMapState (Maybe String)
consumeOptionalAttrT key = state $ consumeAttr key
lookupOptionalAttrT :: Key -> StringMapState (Maybe String)
lookupOptionalAttrT key = state $ lookupAttr key
consumeAttrT :: Key -> StringMapState String
consumeAttrT key = StateT consume where
consume :: StringMap -> Either String (String, StringMap)
consume = reshape . consumeRequiredAttr key
reshape (result, map) = (\result -> (result, map)) <$> result
defaultGitRef = "master"
data ParsedAttrs = ParsedAttrs StringMap
instance Show ParsedAttrs where
show (ParsedAttrs attrs) = show attrs
isEmptyAttrs :: ParsedAttrs -> Bool
isEmptyAttrs (ParsedAttrs attrs) = AMap.empty == attrs
extractAttrs :: PackageAttrsMode -> Maybe Source.PackageName -> ParsedAttrs -> StringMap
extractAttrs mode nameOpt (ParsedAttrs attrs) = canonicalizeNix withDefaultNix where
withDefaultNix = case mode of
PackageAttrsForAdd -> addDefaultNix nameOpt attrs
_ -> attrs
canonicalizeNix attrs = case AMap.lookup key attrs of
Just "false" -> AMap.delete key attrs
_ -> attrs
where key = "nix"
addDefaultNix nameOpt attrs = case (nameOpt, AMap.lookup key attrs) of
(Just (Source.PackageName "self"), Nothing) -> attrs
(_, Just _) -> attrs
(_, Nothing) -> AMap.insert key defaultDepNixPath attrs
where key = "nix"
processAdd :: Maybe PackageName -> Maybe String -> ParsedAttrs -> Either AppError (Maybe PackageName, Source.PackageSpec)
processAdd nameOpt source attrs = mapLeft AppError $ build nameOpt source attrs
where
build :: Maybe PackageName -> Maybe String -> ParsedAttrs -> Either String (Maybe PackageName, Source.PackageSpec)
build nameOpt source parsedAttrs = evalStateT
(build' nameOpt source)
(extractAttrs PackageAttrsForAdd nameOpt parsedAttrs)
build' :: Maybe PackageName -> Maybe String -> StringMapState (Maybe PackageName, Source.PackageSpec)
build' nameOpt sourceOpt = typ >>= \case
Source.FetchGithub -> buildGithub sourceOpt nameOpt
(Source.FetchUrl urlType) -> withName nameOpt $ buildUrl urlType sourceOpt
Source.FetchPath -> withName nameOpt $ buildLocalPath sourceOpt
Source.FetchGitLocal -> withName nameOpt $ buildGitLocal sourceOpt
Source.FetchGit -> withName nameOpt $ buildGit sourceOpt
where
typ :: StringMapState Source.FetchType
typ = (consumeAttrT "type" <|> pure "github") >>= lift . Source.parseFetchType
withName :: Maybe PackageName -> StringMapState a -> StringMapState (Maybe PackageName, a)
withName name = fmap (\snd -> (name, snd))
packageSpec :: Source.SourceSpec -> StringMapState Source.PackageSpec
packageSpec sourceSpec = state $ \attrs -> (Source.PackageSpec {
Source.sourceSpec,
Source.packageAttrs = attrs,
Source.fetchAttrs = AMap.empty
}, AMap.empty)
buildPathOpt :: StringMapState (Maybe Source.LocalPath)
buildPathOpt = fmap pathOfString <$> consumeOptionalAttrT "path" where
buildPath :: Maybe String -> StringMapState Source.LocalPath
buildPath source =
buildPathOpt >>= \path -> lift $
toRight "--path or source required" (path <|> (pathOfString <$> source))
pathOfString :: String -> Source.LocalPath
pathOfString path = if PosixPath.isAbsolute path
then Source.FullPath path
else Source.RelativePath path
buildLocalPath :: Maybe String -> StringMapState Source.PackageSpec
buildLocalPath source = do
path <- buildPath source
packageSpec (Source.Path path)
buildGitCommon :: StringMapState Source.GitCommon
buildGitCommon = do
fetchSubmodulesStr <- lookupOptionalAttrT Source.fetchSubmodulesKeyJSON
fetchSubmodules <- lift $ case fetchSubmodulesStr of
Just "true" -> Right True
Just "false" -> Right False
Nothing -> Right False
Just other -> Left ("fetchSubmodules: expected Bool, got: " ++ (other))
return $ Source.GitCommon { Source.fetchSubmodules }
buildGit :: Maybe String -> StringMapState Source.PackageSpec
buildGit source = do
urlArg <- consumeOptionalAttrT "url"
gitRef <- consumeOptionalAttrT "ref"
gitUrl <- lift $ toRight
("--url or source required")
(urlArg <|> source)
gitCommon <- buildGitCommon
packageSpec $ Source.Git $ Source.GitSpec {
Source.gitUrl, Source.gitCommon,
Source.gitRef = Source.Template (gitRef `orElse` defaultGitRef)
}
buildGitLocal :: Maybe String -> StringMapState Source.PackageSpec
buildGitLocal source = do
glPath <- buildPath source
ref <- consumeOptionalAttrT "ref"
glCommon <- buildGitCommon
packageSpec $ Source.GitLocal $ Source.GitLocalSpec {
Source.glPath, Source.glCommon,
Source.glRef = Source.Template <$> ref
}
buildUrl :: Source.UrlFetchType -> Maybe String -> StringMapState Source.PackageSpec
buildUrl urlType source = do
urlAttr <- consumeOptionalAttrT "url"
url <- lift $ toRight "--url or souce required" (urlAttr <|> source)
packageSpec $ Source.Url Source.UrlSpec {
Source.urlType = urlType,
Source.url = Source.Template url
}
parseGithubSource :: Maybe PackageName -> String -> Either String (PackageName, String, String)
parseGithubSource name source = case span (/= '/') source of
(owner, '/':repo) -> Right (fromMaybe (PackageName repo) name, owner, repo)
_ -> throwError ("`" <> source <> "` doesn't look like a github repo")
buildGithub :: Maybe String -> Maybe PackageName -> StringMapState (Maybe PackageName, Source.PackageSpec)
buildGithub source name = do
(name, ghOwner, ghRepo) <- identity
ref <- consumeOptionalAttrT "ref"
ghCommon <- buildGitCommon
withName (Just name) $ packageSpec $ Source.Github Source.GithubSpec {
Source.ghOwner,
Source.ghRepo,
Source.ghCommon,
Source.ghRef = Source.Template . fromMaybe "master" $ ref
}
where
explicitSource (owner, repo) = (fromMaybe (PackageName repo) name, owner, repo)
identity :: StringMapState (PackageName, String, String)
identity = do
owner <- consumeOptionalAttrT "owner"
repo <- consumeOptionalAttrT "repo"
lift $ buildIdentity owner repo
buildIdentity :: Maybe String -> Maybe String -> Either String (PackageName, String, String)
buildIdentity owner repo = case (fromAttrs, fromSource, fromNameAsSource) of
(Just fromAttrs, Nothing, _) -> Right fromAttrs
(Nothing, Just fromSource, _) -> fromSource
(Nothing, Nothing, Just fromName) -> fromName
(Nothing, Nothing, Nothing) -> throwError "name, source or --owner/--repo required"
(Just _, Just _, _) -> throwError "use source or --owner/--repo, not both"
where
ownerAndRepo :: Maybe (String, String) = (,) <$> owner <*> repo
fromAttrs :: Maybe (PackageName, String, String) = explicitSource <$> ownerAndRepo
fromSource = parseGithubSource name <$> source
fromNameAsSource = parseGithubSource Nothing <$> unPackageName <$> name
parseAdd :: Opts.Parser (Either AppError (PackageName, Source.PackageSpec))
parseAdd = build
<$> Opts.optional parseName
<*> Opts.optional parseSource
<*> parsePackageAttrs PackageAttrsForAdd
where
parseSource = Opts.argument Opts.str (Opts.metavar "SOURCE")
build :: Maybe PackageName -> Maybe String -> ParsedAttrs -> Either AppError (PackageName, Source.PackageSpec)
build nameOpt source attrs = do
(name, package) <- processAdd nameOpt source attrs
name <- toRight (AppError "--name required") name
return (name, package)
data PackageAttrsMode = PackageAttrsForAdd | PackageAttrsForUpdate | PackageAttrsForSlice
parsePackageAttrs :: PackageAttrsMode -> Opts.Parser ParsedAttrs
parsePackageAttrs mode = ParsedAttrs . AMap.fromList <$> many parseAttribute where
parseAttribute :: Opts.Parser (Key, String)
parseAttribute =
Opts.option (Opts.maybeReader parseKeyVal)
( Opts.long "attr" <>
Opts.short 'a' <>
Opts.metavar "KEY=VAL" <>
Opts.help "Set the package spec attribute <KEY> to <VAL>"
) <|> shortcutAttributes <|>
(("type",) <$> Opts.strOption
( Opts.long "type" <>
Opts.short 't' <>
Opts.metavar "TYPE" <>
Opts.help ("The source type. "<> Source.validTypesDoc)
))
Parse " key = val " into ( " key " , " val " )
parseKeyVal :: String -> Maybe (Key, String)
parseKeyVal str = case span (/= '=') str of
(key, '=':val) -> Just (Key.fromString key, val)
_ -> Nothing
shortcutAttributes :: Opts.Parser (Key, String)
shortcutAttributes = foldr (<|>) empty $ mkShortcutAttribute <$> shortcuts
where
shortcuts = case mode of
PackageAttrsForAdd -> allShortcuts
PackageAttrsForUpdate -> allShortcuts
PackageAttrsForSlice -> sourceShortcuts
allShortcuts = ("nix", "all") : sourceShortcuts
sourceShortcuts = [
("ref", "github / git / git-local"),
("fetchSubmodules", "github / git / git-local"),
("owner", "github"),
("repo", "github"),
("url", "url / file / git"),
("path", "git-local"),
("version", "all")]
mkShortcutAttribute :: (String, String) -> Opts.Parser (Key, String)
mkShortcutAttribute (attr, types) =
(Key.fromString attr,) <$> Opts.strOption
( Opts.long attr <>
Opts.metavar (toUpper <$> attr) <>
Opts.help
(
"Equivalent to --attr " <> attr <> "=" <> (toUpper <$> attr) <>
", used for source type " <> types
)
)
parseCmdShow :: Opts.ParserInfo (IO ())
parseCmdShow = subcommand "Show source details" (cmdShow <$> parseCommon <*> parseNames) []
cmdShow :: CommonOpts -> Maybe (NonEmpty PackageName) -> IO ()
cmdShow opts names =
do
sourceFiles <- requireConfiguredSources $ sources opts
sequence_ $ map showPkgs (NonEmpty.toList sourceFiles) where
showPkgs :: Source.SourceFile -> IO ()
showPkgs sourceFile = do
putStrLn $ " - "<>Source.pathOfSource sourceFile<>":"
packages <- Source.loadSourceFile sourceFile
putStrLn $ Source.encodePrettyString (filterPackages names packages)
filterPackages Nothing p = Source.unPackages p
filterPackages (Just names) p = HMap.filterWithKey pred (Source.unPackages p) where
pred name _ = elem name names
parseCmdLs :: Opts.ParserInfo (IO ())
parseCmdLs = subcommand "list sources" (cmdLs <$> parseCommon) []
cmdLs :: CommonOpts -> IO ()
cmdLs opts =
do
sourceFiles <- requireConfiguredSources $ sources opts
sources <- Source.loadSources sourceFiles
putStrLn $
intercalate "\n" $
map (\s -> " - "<> asString s) $
HMap.keys $ Source.unPackages $
Source.merge $ sources
requireConfiguredSources :: Maybe (NonEmpty Source.SourceFile) -> IO (NonEmpty Source.SourceFile)
requireConfiguredSources sources =
Source.configuredSources sources >>=
(liftMaybe (AppError "No wrangle JSON files found"))
Init
data InitOpts = InitOpts {
nixpkgsChannel :: Maybe String
}
parseCmdInit :: Opts.ParserInfo (IO ())
parseCmdInit = subcommand "Initialize nix-wrangle" (
cmdInit <$> parseInit) []
where
parseInit = Opts.optional (Opts.strOption
( Opts.long "pkgs" <>
Opts.short 'p' <>
Opts.metavar "CHANNEL" <>
Opts.help ("Pin nixpkgs to CHANNEL")
))
cmdInit :: Maybe String -> IO ()
cmdInit nixpkgs = do
isGit <- Dir.doesPathExist ".git"
debugLn $ "isGit ? " <> (show isGit)
addMultiple OverwriteSource NoAutoInit (Right (wrangleSpec : (selfSpecs isGit ++ nixpkgsSpecs))) commonOpts
updateDefaultNix defaultNixOptsDefault
where
commonOpts = CommonOpts { sources = Nothing }
wrangleSpec = (PackageName "nix-wrangle", Source.PackageSpec {
Source.sourceSpec = Source.Github Source.GithubSpec {
Source.ghOwner = "timbertson",
Source.ghRepo = "nix-wrangle",
Source.ghCommon = Source.defaultGitCommon,
Source.ghRef = Source.Template "v1"
},
Source.fetchAttrs = AMap.empty,
Source.packageAttrs = AMap.fromList [("nix", "nix")]
})
nixpkgsSpecs = case nixpkgs of
Nothing -> []
Just channel -> [(PackageName "pkgs", Source.PackageSpec {
Source.sourceSpec = Source.Github Source.GithubSpec {
Source.ghOwner = "NixOS",
Source.ghRepo = "nixpkgs-channels",
Source.ghCommon = Source.defaultGitCommon,
Source.ghRef = Source.Template channel
},
Source.fetchAttrs = AMap.empty,
Source.packageAttrs = AMap.fromList [("nix", defaultDepNixPath)]
})]
selfSpecs isGit =
if isGit then [
(PackageName "self", Source.PackageSpec {
Source.sourceSpec = Source.GitLocal Source.GitLocalSpec {
Source.glPath = Source.RelativePath ".",
Source.glRef = Nothing,
Source.glCommon = Source.defaultGitCommon
},
Source.fetchAttrs = AMap.empty,
Source.packageAttrs = AMap.empty
})
] else []
data AddMode = AddSource | OverwriteSource | AddIfMissing
data AutoInit = AutoInit | NoAutoInit
parseCmdAdd :: Opts.ParserInfo (IO ())
parseCmdAdd = subcommand "Add a source" (cmdAdd <$> parseAddMode <*> parseAdd <*> parseCommon)
[ examplesDoc [
"nix-wrangle add timbertson/opam2nix-packages",
"nix-wrangle add pkgs nixos/nixpkgs-channels --ref nixos-unstable",
"nix-wrangle add pkgs nixos/nixpkgs-channels --ref nixos-unstable",
"nix-wrangle add pkgs --owner nixos --repo nixpkgs-channels --ref nixos-unstable",
"nix-wrangle add --type git-local self .."
]]
where
parseAddMode = Opts.flag AddSource OverwriteSource
(Opts.long "replace" <> Opts.help "Replace existing source")
addMultiple :: AddMode -> AutoInit -> Either AppError [(PackageName, Source.PackageSpec)] -> CommonOpts -> IO ()
addMultiple addMode autoInit addOpts opts =
do
addSpecs <- liftEither $ addOpts
configuredSources <- Source.configuredSources $ sources opts
let sourceFile = NonEmpty.head <$> configuredSources
debugLn $ "sourceFile: " <> show sourceFile
source <- loadOrInit autoInit sourceFile
debugLn $ "source: " <> show source
let (sourceFile, inputSource) = source
let baseSource = fromMaybe (Source.emptyPackages) inputSource
modifiedSource <- foldM addSingle baseSource addSpecs
Dir.createDirectoryIfMissing True $ PosixPath.takeDirectory (Source.pathOfSource sourceFile)
Source.writeSourceFile sourceFile modifiedSource
where
addSingle :: Source.Packages -> (PackageName, Source.PackageSpec) -> IO Source.Packages
addSingle base (name, inputSpec) = do
shouldAdd' <- shouldAdd addMode name base
if shouldAdd' then do
putStrLn $ "Adding " <> show name <> " // " <> show inputSpec
spec <- Fetch.prefetch name inputSpec
return $ Source.add base name spec
else
return base
loadOrInit :: AutoInit -> Maybe Source.SourceFile -> IO (Source.SourceFile, Maybe Source.Packages)
loadOrInit AutoInit Nothing = do
let source = Source.DefaultSource
infoLn $ Source.pathOfSource source <> " does not exist, initializing..."
cmdInit Nothing
loadOrInit NoAutoInit (Just source)
loadOrInit NoAutoInit Nothing = return (Source.DefaultSource, Nothing)
loadOrInit _ (Just f) = do
exists <- Source.doesSourceExist f
loaded <- sequence $ if exists
then Just $ Source.loadSourceFile f
else Nothing
return (f, loaded)
shouldAdd :: AddMode -> PackageName -> Source.Packages -> IO Bool
shouldAdd mode name@(PackageName nameStr) existing =
if Source.member existing name then
case mode of
AddSource -> throwM $ AppError $ nameStr <> " already present, use --replace to replace it"
OverwriteSource -> infoLn ("Replacing existing " <> nameStr) >> return True
AddIfMissing -> infoLn ("Not replacing existing " <> nameStr) >> return False
else return True
cmdAdd :: AddMode -> Either AppError (PackageName, Source.PackageSpec) -> CommonOpts -> IO ()
cmdAdd addMode addOpt opts = addMultiple addMode AutoInit ((\x -> [x]) <$> addOpt) opts
Rm
parseCmdRm :: Opts.ParserInfo (IO ())
parseCmdRm = subcommand "Remove one or more sources" (cmdRm <$> parseNames <*> parseCommon) []
cmdRm :: Maybe (NonEmpty PackageName) -> CommonOpts -> IO ()
cmdRm maybeNames opts = do
packageNames <- liftMaybe (AppError "at least one name required") maybeNames
alterPackagesNamed (Just packageNames) opts updateSingle where
updateSingle :: Source.Packages -> PackageName -> IO Source.Packages
updateSingle packages name = do
infoLn $ " - removing " <> (show name) <> "..."
return $ Source.remove packages name
parseCmdUpdate :: Opts.ParserInfo (IO ())
parseCmdUpdate = subcommand "Update one or more sources"
(cmdUpdate <$> parseNames <*> parsePackageAttrs PackageAttrsForUpdate <*> parseCommon)
[ examplesDoc [
"nix-wrangle update pkgs --ref nixpkgs-unstable",
"nix-wrangle update gup --nix nix/"
]]
cmdUpdate :: Maybe (NonEmpty PackageName) -> ParsedAttrs -> CommonOpts -> IO ()
cmdUpdate packageNamesOpt parsedAttrs opts =
or specify one or more explicit package names
if isJust packageNamesOpt || isEmptyAttrs parsedAttrs
then alterPackagesNamed packageNamesOpt opts updateSingle
else throwM $ AppError (
"You must explicitly list dependency names when modifying attributes (" <> show parsedAttrs <> ")"
) where
updateSingle :: Source.Packages -> PackageName -> IO Source.Packages
updateSingle packages name = do
infoLn $ " - updating " <> (show name) <> "..."
original <- liftEither $ Source.lookup name packages
debugLn $ "original: " <> show original
let updateAttrs = extractAttrs PackageAttrsForUpdate (Just name) parsedAttrs
debugLn $ "updateAttrs: " <> show updateAttrs
newSpec <- liftEither $ Source.updatePackageSpec original updateAttrs
fetched <- Fetch.prefetch name newSpec
if fetched == original
then infoLn " ... (unchanged)"
else return ()
return $ Source.add packages name fetched
processPackagesNamed :: Maybe (NonEmpty PackageName) -> CommonOpts
-> (Source.SourceFile -> Source.Packages -> [PackageName] -> IO ())-> IO ()
processPackagesNamed packageNamesOpt opts process = do
sourceFiles <- requireConfiguredSources $ sources opts
sources <- sequence $ loadSource <$> sourceFiles
checkMissingKeys (snd <$> sources)
sequence_ $ traverseSources <$> sources
where
checkMissingKeys :: NonEmpty Source.Packages -> IO ()
checkMissingKeys sources = case missingKeys of
[] -> return ()
_ -> fail $ "No such packages: " <> show missingKeys
where
(_, missingKeys) = partitionPackageNames $ Source.merge sources
partitionPackageNames :: Source.Packages -> ([PackageName], [PackageName])
partitionPackageNames sources = case packageNamesOpt of
Nothing -> (Source.keys sources, [])
(Just names) -> partition (Source.member sources) (NonEmpty.toList names)
traverseSources :: (Source.SourceFile, Source.Packages) -> IO ()
traverseSources (sourceFile, sources) = do
let (packageNames, _) = partitionPackageNames sources
debugLn $ "Package names: " <> (show packageNames)
process sourceFile sources packageNames
alterPackagesNamed :: Maybe (NonEmpty PackageName) -> CommonOpts -> (Source.Packages -> PackageName -> IO Source.Packages)-> IO ()
alterPackagesNamed packageNamesOpt opts updateSingle =
processPackagesNamed packageNamesOpt opts $ \sourceFile sources packageNames -> do
infoLn $ "Updating "<> Source.pathOfSource sourceFile <> " ..."
updated <- foldM updateSingle sources packageNames
Source.writeSourceFile sourceFile updated
loadSource :: Source.SourceFile -> IO (Source.SourceFile, Source.Packages)
loadSource f = (,) f <$> Source.loadSourceFile f
#ifdef ENABLE_SPLICE
Splice
data SpliceOutput = SpliceOutput FilePath | SpliceReplace
data SpliceOpts = SpliceOpts {
spliceName :: Maybe PackageName,
spliceAttrs :: StringMap,
spliceInput :: FilePath,
spliceOutput :: SpliceOutput,
spliceUpdate :: Bool
}
parseCmdSplice :: Opts.ParserInfo (IO ())
parseCmdSplice = subcommand "Splice current `self` source into a .nix document"
(cmdSplice <$> parseSplice <*> parseCommon) [
Opts.footerDoc $ Just $ docLines [
softDocLines [
"This command generates a copy of the input .nix file, with",
"the `src` attribute replaced with the current fetcher for",
"the source named `public`."],
"",
softDocLines [
"This allows you to build a standalone",
".nix file for publishing (e.g. to nixpkgs itself)" ],
"",
softDocLines [
"If your source does not come from an existing wrangle.json,",
"you can pass it in explicitly as attributes, like with",
"`nix-wrangle add` (i.e. --type, --repo, --owner, --url, etc)"]
]]
where
parseSplice = build <$> parseInput <*> parseOutput <*> parseName <*> parsePackageAttrs ParsePackageAttrsSource <*> parseUpdate where
build spliceInput spliceOutput spliceName spliceAttrs spliceUpdate =
SpliceOpts { spliceInput, spliceOutput, spliceName, spliceAttrs, spliceUpdate }
parseInput = Opts.argument Opts.str (Opts.metavar "SOURCE")
parseName = Opts.optional (PackageName <$> Opts.strOption
( Opts.long "name" <>
Opts.short 'n' <>
Opts.metavar "NAME" <>
Opts.help ("Source name to use (default: public)")
))
parseOutput = explicitOutput <|> replaceOutput
replaceOutput = Opts.flag' SpliceReplace
( Opts.long "replace" <>
Opts.short 'r' <>
Opts.help "Overwrite input file"
)
explicitOutput = SpliceOutput <$> (Opts.strOption
( Opts.long "output" <>
Opts.short 'o' <>
Opts.metavar "DEST" <>
Opts.help ("Destination file")
))
parseUpdate = Opts.flag True False
( Opts.long "no-update" <>
Opts.help "Don't fetch the latest version of `public` before splicing"
)
cmdSplice :: SpliceOpts -> CommonOpts -> IO ()
cmdSplice (SpliceOpts { spliceName, spliceAttrs, spliceInput, spliceOutput, spliceUpdate}) opts = do
fileContents <- Splice.load spliceInput
let expr = Splice.parse fileContents
expr <- Splice.getExn expr
let existingSrcSpans = Splice.extractSourceLocs expr
srcSpan <- case existingSrcSpans of
[single] -> return single
other -> fail $ "No single source found in " ++ (show other)
self <- getPublic
debugLn $ "got source: " <> show self
replacedText <- liftEither $ Splice.replaceSourceLoc fileContents self srcSpan
Source.writeFileText outputPath replacedText
where
outputPath = case spliceOutput of
SpliceOutput p -> p
SpliceReplace -> spliceInput
getPublic :: IO Source.PackageSpec
getPublic =
if HMap.null spliceAttrs then do
sourceFiles <- requireConfiguredSources $ sources opts
sources <- Source.merge <$> Source.loadSources sourceFiles
let name = (spliceName `orElse` PackageName "public")
if spliceUpdate then
cmdUpdate (Just $ name :| []) HMap.empty opts
else
return ()
liftEither $ Source.lookup name sources
else do
infoLn $ "Splicing anonymous source from attributes: " <> show spliceAttrs
self <- liftEither $ snd <$> processAdd Nothing Nothing spliceAttrs
Fetch.prefetch (PackageName "self") self
#endif
parseCmdDefaultNix :: Opts.ParserInfo (IO ())
parseCmdDefaultNix = subcommand "Generate default.nix"
(pure cmdDefaultNix) [
Opts.footerDoc $ Just $
"Typically this only needs to be done once, though it" <>
" may be necessary if you have a very old default.nix"
]
cmdDefaultNix :: IO ()
cmdDefaultNix = updateDefaultNix (DefaultNixOpts { force = True })
data DefaultNixOpts = DefaultNixOpts {
force :: Bool
}
defaultNixOptsDefault = DefaultNixOpts { force = False }
updateDefaultNix :: DefaultNixOpts -> IO ()
updateDefaultNix (DefaultNixOpts { force }) = do
continue <- if force then return True else shouldWriteFile
if continue then Source.writeFileText path contents
else infoLn $ "Note: not replacing existing "<>path<>", run `nix-wrangle default-nix` to explicitly override"
where
path = "default.nix"
markerText :: T.Text = "# Note: This file is generated by nix-wrangle"
contents :: T.Text
contents = T.unlines [
markerText,
"# It can be regenerated with `nix-wrangle default-nix`",
defaultNixContents ]
shouldWriteFile :: IO Bool
shouldWriteFile = do
exists <- Dir.doesFileExist path
if exists then
(T.isInfixOf markerText) <$> TE.decodeUtf8 <$> B.readFile path
else
return True
defaultDepNixPath = "default.nix"
defaultNixContents = T.strip [QQ.s|
let
systemNixpkgs = import <nixpkgs> {};
fallback = val: dfl: if val == null then dfl else val;
makeFetchers = pkgs: {
github = pkgs.fetchFromGitHub;
url = builtins.fetchTarball;
};
fetch = pkgs: source:
(builtins.getAttr source.type (makeFetchers pkgs)) source.fetch;
sourcesJson = (builtins.fromJSON (builtins.readFile ./nix/wrangle.json)).sources;
wrangleJson = sourcesJson.nix-wrangle or (abort "No nix-wrangle entry in nix/wrangle.json");
in
{ pkgs ? null, nix-wrangle ? null, ... }@provided:
let
_pkgs = fallback pkgs (
if builtins.hasAttr "pkgs" sourcesJson
then import (fetch systemNixpkgs sourcesJson.pkgs) {} else systemNixpkgs
);
_wrangle = fallback nix-wrangle (_pkgs.callPackage "${fetch _pkgs wrangleJson}/${wrangleJson.nix}" {});
in
(_wrangle.api { pkgs = _pkgs; }).inject { inherit provided; path = ./.; }
|]
cmdInstallCheck :: IO ()
cmdInstallCheck = do
apiContext <- Fetch.globalApiContext
let apiPath = Fetch.apiNix apiContext
infoLn $ "checking for nix API at "<>apiPath
apiExists <- Dir.doesFileExist apiPath
if not apiExists
then exitFailure
else return ()
infoLn "ok"
|
21b107ba9ed5da9fc83d4e33f4e4a1e56850ff03c7a6fb5e570e1075f75010fd | Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library | PaymentMethodDetailsAchCreditTransfer.hs | {-# LANGUAGE MultiWayIf #-}
CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
{-# LANGUAGE OverloadedStrings #-}
| Contains the types generated from the schema PaymentMethodDetailsAchCreditTransfer
module StripeAPI.Types.PaymentMethodDetailsAchCreditTransfer where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
-- | Defines the object schema located at @components.schemas.payment_method_details_ach_credit_transfer@ in the specification.
data PaymentMethodDetailsAchCreditTransfer = PaymentMethodDetailsAchCreditTransfer
{ -- | account_number: Account number to transfer funds to.
--
-- Constraints:
--
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferAccountNumber :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
-- | bank_name: Name of the bank associated with the routing number.
--
-- Constraints:
--
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferBankName :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
-- | routing_number: Routing transit number for the bank account to transfer funds to.
--
-- Constraints:
--
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferRoutingNumber :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
-- | swift_code: SWIFT code of the bank associated with the routing number.
--
-- Constraints:
--
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferSwiftCode :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON PaymentMethodDetailsAchCreditTransfer where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("account_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferAccountNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("bank_name" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferBankName obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("routing_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferRoutingNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("swift_code" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferSwiftCode obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("account_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferAccountNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("bank_name" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferBankName obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("routing_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferRoutingNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("swift_code" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferSwiftCode obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON PaymentMethodDetailsAchCreditTransfer where
parseJSON = Data.Aeson.Types.FromJSON.withObject "PaymentMethodDetailsAchCreditTransfer" (\obj -> (((GHC.Base.pure PaymentMethodDetailsAchCreditTransfer GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "account_number")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "bank_name")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "routing_number")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "swift_code"))
-- | Create a new 'PaymentMethodDetailsAchCreditTransfer' with all required fields.
mkPaymentMethodDetailsAchCreditTransfer :: PaymentMethodDetailsAchCreditTransfer
mkPaymentMethodDetailsAchCreditTransfer =
PaymentMethodDetailsAchCreditTransfer
{ paymentMethodDetailsAchCreditTransferAccountNumber = GHC.Maybe.Nothing,
paymentMethodDetailsAchCreditTransferBankName = GHC.Maybe.Nothing,
paymentMethodDetailsAchCreditTransferRoutingNumber = GHC.Maybe.Nothing,
paymentMethodDetailsAchCreditTransferSwiftCode = GHC.Maybe.Nothing
}
| null | https://raw.githubusercontent.com/Haskell-OpenAPI-Code-Generator/Stripe-Haskell-Library/ba4401f083ff054f8da68c741f762407919de42f/src/StripeAPI/Types/PaymentMethodDetailsAchCreditTransfer.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Defines the object schema located at @components.schemas.payment_method_details_ach_credit_transfer@ in the specification.
| account_number: Account number to transfer funds to.
Constraints:
| bank_name: Name of the bank associated with the routing number.
Constraints:
| routing_number: Routing transit number for the bank account to transfer funds to.
Constraints:
| swift_code: SWIFT code of the bank associated with the routing number.
Constraints:
| Create a new 'PaymentMethodDetailsAchCreditTransfer' with all required fields. | CHANGE WITH CAUTION : This is a generated code file generated by -OpenAPI-Code-Generator/Haskell-OpenAPI-Client-Code-Generator .
| Contains the types generated from the schema PaymentMethodDetailsAchCreditTransfer
module StripeAPI.Types.PaymentMethodDetailsAchCreditTransfer where
import qualified Control.Monad.Fail
import qualified Data.Aeson
import qualified Data.Aeson as Data.Aeson.Encoding.Internal
import qualified Data.Aeson as Data.Aeson.Types
import qualified Data.Aeson as Data.Aeson.Types.FromJSON
import qualified Data.Aeson as Data.Aeson.Types.Internal
import qualified Data.Aeson as Data.Aeson.Types.ToJSON
import qualified Data.ByteString.Char8
import qualified Data.ByteString.Char8 as Data.ByteString.Internal
import qualified Data.Foldable
import qualified Data.Functor
import qualified Data.Maybe
import qualified Data.Scientific
import qualified Data.Text
import qualified Data.Text.Internal
import qualified Data.Time.Calendar as Data.Time.Calendar.Days
import qualified Data.Time.LocalTime as Data.Time.LocalTime.Internal.ZonedTime
import qualified GHC.Base
import qualified GHC.Classes
import qualified GHC.Int
import qualified GHC.Show
import qualified GHC.Types
import qualified StripeAPI.Common
import StripeAPI.TypeAlias
import qualified Prelude as GHC.Integer.Type
import qualified Prelude as GHC.Maybe
data PaymentMethodDetailsAchCreditTransfer = PaymentMethodDetailsAchCreditTransfer
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferAccountNumber :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferBankName :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferRoutingNumber :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text)),
* Maximum length of 5000
paymentMethodDetailsAchCreditTransferSwiftCode :: (GHC.Maybe.Maybe (StripeAPI.Common.Nullable Data.Text.Internal.Text))
}
deriving
( GHC.Show.Show,
GHC.Classes.Eq
)
instance Data.Aeson.Types.ToJSON.ToJSON PaymentMethodDetailsAchCreditTransfer where
toJSON obj = Data.Aeson.Types.Internal.object (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("account_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferAccountNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("bank_name" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferBankName obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("routing_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferRoutingNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("swift_code" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferSwiftCode obj) : GHC.Base.mempty))
toEncoding obj = Data.Aeson.Encoding.Internal.pairs (GHC.Base.mconcat (Data.Foldable.concat (Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("account_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferAccountNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("bank_name" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferBankName obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("routing_number" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferRoutingNumber obj) : Data.Maybe.maybe GHC.Base.mempty (GHC.Base.pure GHC.Base.. ("swift_code" Data.Aeson.Types.ToJSON..=)) (paymentMethodDetailsAchCreditTransferSwiftCode obj) : GHC.Base.mempty)))
instance Data.Aeson.Types.FromJSON.FromJSON PaymentMethodDetailsAchCreditTransfer where
parseJSON = Data.Aeson.Types.FromJSON.withObject "PaymentMethodDetailsAchCreditTransfer" (\obj -> (((GHC.Base.pure PaymentMethodDetailsAchCreditTransfer GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "account_number")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "bank_name")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "routing_number")) GHC.Base.<*> (obj Data.Aeson.Types.FromJSON..:! "swift_code"))
mkPaymentMethodDetailsAchCreditTransfer :: PaymentMethodDetailsAchCreditTransfer
mkPaymentMethodDetailsAchCreditTransfer =
PaymentMethodDetailsAchCreditTransfer
{ paymentMethodDetailsAchCreditTransferAccountNumber = GHC.Maybe.Nothing,
paymentMethodDetailsAchCreditTransferBankName = GHC.Maybe.Nothing,
paymentMethodDetailsAchCreditTransferRoutingNumber = GHC.Maybe.Nothing,
paymentMethodDetailsAchCreditTransferSwiftCode = GHC.Maybe.Nothing
}
|
4f696501198f0b7124dcb118ef2f523de8207c6c2fd722f678023a60799fa20d | melange-re/melange | pparse_driver.mli | val parse_implementation : string -> Parsetree.structure
val parse_interface : string -> Parsetree.signature
| null | https://raw.githubusercontent.com/melange-re/melange/246e6df78fe3b6cc124cb48e5a37fdffd99379ed/jscomp/core/pparse_driver.mli | ocaml | val parse_implementation : string -> Parsetree.structure
val parse_interface : string -> Parsetree.signature
| |
0ab462239b78d83a0f0526bcfde0ba4406dd229877bee3f0b1f7b139d829c046 | conreality/conreality | irc.mli | (* This is free and unencumbered software released into the public domain. *)
module Message : sig
include module type of struct include Irc_message end
end
module Client : sig
include module type of struct include Irc_client_lwt end
end
module Connection : sig
type t = Client.connection_t
end
module Callback : sig
type t = Connection.t -> Message.parse_result -> unit Lwt.t
end
| null | https://raw.githubusercontent.com/conreality/conreality/e03328ef1f0056b58e4ffe181a279a1dc776e094/src/consensus/messaging/irc.mli | ocaml | This is free and unencumbered software released into the public domain. |
module Message : sig
include module type of struct include Irc_message end
end
module Client : sig
include module type of struct include Irc_client_lwt end
end
module Connection : sig
type t = Client.connection_t
end
module Callback : sig
type t = Connection.t -> Message.parse_result -> unit Lwt.t
end
|
088008c1e1b16bb58e9566b8fbebfc2e99e3ca7474636febb7ce1e995d5d2a26 | g-andrade/quack | quic_instream.erl | -module(quic_instream).
-behaviour(gen_server).
-include("quic_data_kv.hrl").
-include("quic_frame.hrl").
%% ------------------------------------------------------------------
%% API Function Exports
%% ------------------------------------------------------------------
-export([start_link/3]). -ignore_xref({start_link, 3}).
-export([dispatch_frame/2]).
%% ------------------------------------------------------------------
gen_server Function Exports
%% ------------------------------------------------------------------
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
%% ------------------------------------------------------------------
Macro Definitions
%% ------------------------------------------------------------------
-define(CB_MODULE, ?MODULE).
%% ------------------------------------------------------------------
%% Record Definitions
%% ------------------------------------------------------------------
-record(state, {
stream_id :: stream_id(),
data_packing :: quic_stream_handler:data_packing(),
handler_module :: module(),
handler_pid :: pid(),
instream_window :: quic_instream_window:window()
}).
-type state() :: #state{}.
-export_type([state/0]).
-type dispatch_option() :: quic_outflow:packet_option().
-export_type([dispatch_option/0]).
%% ------------------------------------------------------------------
%% API Function Definitions
%% ------------------------------------------------------------------
start_link(StreamId, HandlerModule, HandlerPid) ->
gen_server:start_link(?CB_MODULE,
[StreamId, HandlerModule, HandlerPid],
[]).
-spec dispatch_frame(Pid :: pid(), Frame :: stream_frame() | stream_fin_frame()) -> ok.
dispatch_frame(Pid, Frame) ->
gen_server:cast(Pid, {inbound_frame, Frame}).
%% ------------------------------------------------------------------
gen_server Function Definitions
%% ------------------------------------------------------------------
init([StreamId, HandlerModule, HandlerPid]) ->
gen_server:cast(self(), {initialize, StreamId, HandlerModule, HandlerPid}),
{ok, uninitialized}.
handle_call(Request, From, State) when State =/= uninitialized ->
lager:debug("unhandled call ~p from ~p on state ~p",
[Request, From, State]),
{noreply, State}.
handle_cast({initialize, StreamId, HandlerModule, HandlerPid}, uninitialized) ->
{ok, DataPacking} = HandlerModule:start_instream(HandlerPid, StreamId, self()),
InitialState =
#state{
stream_id = StreamId,
data_packing = DataPacking,
handler_module = HandlerModule,
handler_pid = HandlerPid,
instream_window = new_instream_window(DataPacking)
},
{noreply, InitialState};
handle_cast({inbound_frame, #stream_frame{} = Frame}, State) ->
#stream_frame{ offset = Offset,
data_payload = Data } = Frame,
StateB = insert_into_instream_window(Offset, Data, State),
{StateC, ConsumedValue} = consume_instream_window_value(StateB),
(is_consumed_value_empty(ConsumedValue, StateC#state.data_packing)
orelse handle_consumed_value(ConsumedValue, StateC)),
{noreply, StateC};
handle_cast(Msg, State) when State =/= uninitialized ->
lager:debug("unhandled cast ~p on state ~p", [Msg, State]),
{noreply, State}.
handle_info(Info, State) when State =/= uninitialized ->
lager:debug("unhandled info ~p on state ~p", [Info, State]),
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
new_instream_window(raw) ->
quic_instream_window_unordered_data:new();
new_instream_window(data_kv) ->
DataInstream = quic_instream_window_unordered_data:new(),
quic_instream_window_data_kv:new(DataInstream);
new_instream_window(http) ->
DataInstream = quic_instream_window_unordered_data:new(),
quic_instream_window_http:new(DataInstream).
insert_into_instream_window(Offset, Data, State) ->
Instream = State#state.instream_window,
case quic_instream_window:insert(Instream, Offset, Data) of
{ok, NewInstream} ->
State#state{ instream_window = NewInstream };
{error, stale_data} ->
lager:debug("got outdated data for stream ~p, offset ~p, with length ~p",
[State#state.stream_id, Offset, iolist_size(Data)]),
State
end.
consume_instream_window_value(State) ->
Instream = State#state.instream_window,
{NewInstream, ConsumedValue} = quic_instream_window:consume(Instream),
NewState = State#state{ instream_window = NewInstream },
{NewState, ConsumedValue}.
-spec is_consumed_value_empty(iodata() | [data_kv()] | [h2_frame:frame()], quic_stream_handler:data_packing())
-> boolean().
is_consumed_value_empty(Data, raw) ->
iolist_size(Data) < 1;
is_consumed_value_empty(DataKvs, data_kv) ->
DataKvs =:= [];
is_consumed_value_empty(HttpFrames, http) ->
HttpFrames =:= [].
handle_consumed_value(Consumed, State) ->
#state{ stream_id = StreamId,
handler_module = HandlerModule,
handler_pid = HandlerPid } = State,
ok = HandlerModule:handle_inbound(HandlerPid, StreamId, Consumed).
| null | https://raw.githubusercontent.com/g-andrade/quack/b6decb23674dd97f6918d8ac637ede2ef7bcc507/src/quic_instream.erl | erlang | ------------------------------------------------------------------
API Function Exports
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
Record Definitions
------------------------------------------------------------------
------------------------------------------------------------------
API Function Definitions
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------ | -module(quic_instream).
-behaviour(gen_server).
-include("quic_data_kv.hrl").
-include("quic_frame.hrl").
-export([start_link/3]). -ignore_xref({start_link, 3}).
-export([dispatch_frame/2]).
gen_server Function Exports
-export([init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3]).
Macro Definitions
-define(CB_MODULE, ?MODULE).
-record(state, {
stream_id :: stream_id(),
data_packing :: quic_stream_handler:data_packing(),
handler_module :: module(),
handler_pid :: pid(),
instream_window :: quic_instream_window:window()
}).
-type state() :: #state{}.
-export_type([state/0]).
-type dispatch_option() :: quic_outflow:packet_option().
-export_type([dispatch_option/0]).
start_link(StreamId, HandlerModule, HandlerPid) ->
gen_server:start_link(?CB_MODULE,
[StreamId, HandlerModule, HandlerPid],
[]).
-spec dispatch_frame(Pid :: pid(), Frame :: stream_frame() | stream_fin_frame()) -> ok.
dispatch_frame(Pid, Frame) ->
gen_server:cast(Pid, {inbound_frame, Frame}).
gen_server Function Definitions
init([StreamId, HandlerModule, HandlerPid]) ->
gen_server:cast(self(), {initialize, StreamId, HandlerModule, HandlerPid}),
{ok, uninitialized}.
handle_call(Request, From, State) when State =/= uninitialized ->
lager:debug("unhandled call ~p from ~p on state ~p",
[Request, From, State]),
{noreply, State}.
handle_cast({initialize, StreamId, HandlerModule, HandlerPid}, uninitialized) ->
{ok, DataPacking} = HandlerModule:start_instream(HandlerPid, StreamId, self()),
InitialState =
#state{
stream_id = StreamId,
data_packing = DataPacking,
handler_module = HandlerModule,
handler_pid = HandlerPid,
instream_window = new_instream_window(DataPacking)
},
{noreply, InitialState};
handle_cast({inbound_frame, #stream_frame{} = Frame}, State) ->
#stream_frame{ offset = Offset,
data_payload = Data } = Frame,
StateB = insert_into_instream_window(Offset, Data, State),
{StateC, ConsumedValue} = consume_instream_window_value(StateB),
(is_consumed_value_empty(ConsumedValue, StateC#state.data_packing)
orelse handle_consumed_value(ConsumedValue, StateC)),
{noreply, StateC};
handle_cast(Msg, State) when State =/= uninitialized ->
lager:debug("unhandled cast ~p on state ~p", [Msg, State]),
{noreply, State}.
handle_info(Info, State) when State =/= uninitialized ->
lager:debug("unhandled info ~p on state ~p", [Info, State]),
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
new_instream_window(raw) ->
quic_instream_window_unordered_data:new();
new_instream_window(data_kv) ->
DataInstream = quic_instream_window_unordered_data:new(),
quic_instream_window_data_kv:new(DataInstream);
new_instream_window(http) ->
DataInstream = quic_instream_window_unordered_data:new(),
quic_instream_window_http:new(DataInstream).
insert_into_instream_window(Offset, Data, State) ->
Instream = State#state.instream_window,
case quic_instream_window:insert(Instream, Offset, Data) of
{ok, NewInstream} ->
State#state{ instream_window = NewInstream };
{error, stale_data} ->
lager:debug("got outdated data for stream ~p, offset ~p, with length ~p",
[State#state.stream_id, Offset, iolist_size(Data)]),
State
end.
consume_instream_window_value(State) ->
Instream = State#state.instream_window,
{NewInstream, ConsumedValue} = quic_instream_window:consume(Instream),
NewState = State#state{ instream_window = NewInstream },
{NewState, ConsumedValue}.
-spec is_consumed_value_empty(iodata() | [data_kv()] | [h2_frame:frame()], quic_stream_handler:data_packing())
-> boolean().
is_consumed_value_empty(Data, raw) ->
iolist_size(Data) < 1;
is_consumed_value_empty(DataKvs, data_kv) ->
DataKvs =:= [];
is_consumed_value_empty(HttpFrames, http) ->
HttpFrames =:= [].
handle_consumed_value(Consumed, State) ->
#state{ stream_id = StreamId,
handler_module = HandlerModule,
handler_pid = HandlerPid } = State,
ok = HandlerModule:handle_inbound(HandlerPid, StreamId, Consumed).
|
982d148a909fd52d5020e0603aaa59d2aaceff383c9b67c9619f552c52c924db | aws-beam/aws-erlang | aws_savingsplans.erl | %% WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
See -beam/aws-codegen for more details .
%% @doc Savings Plans are a pricing model that offer significant savings on
AWS usage ( for example , on Amazon EC2 instances ) .
%%
You commit to a consistent amount of usage , in USD per hour , for a term of
1 or 3 years , and receive a lower price for that usage . For more
%% information, see the AWS Savings Plans User Guide.
-module(aws_savingsplans).
-export([create_savings_plan/2,
create_savings_plan/3,
delete_queued_savings_plan/2,
delete_queued_savings_plan/3,
describe_savings_plan_rates/2,
describe_savings_plan_rates/3,
describe_savings_plans/2,
describe_savings_plans/3,
describe_savings_plans_offering_rates/2,
describe_savings_plans_offering_rates/3,
describe_savings_plans_offerings/2,
describe_savings_plans_offerings/3,
list_tags_for_resource/2,
list_tags_for_resource/3,
tag_resource/2,
tag_resource/3,
untag_resource/2,
untag_resource/3]).
-include_lib("hackney/include/hackney_lib.hrl").
%%====================================================================
%% API
%%====================================================================
%% @doc Creates a Savings Plan.
create_savings_plan(Client, Input) ->
create_savings_plan(Client, Input, []).
create_savings_plan(Client, Input0, Options0) ->
Method = post,
Path = ["/CreateSavingsPlan"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Deletes the queued purchase for the specified Savings Plan.
delete_queued_savings_plan(Client, Input) ->
delete_queued_savings_plan(Client, Input, []).
delete_queued_savings_plan(Client, Input0, Options0) ->
Method = post,
Path = ["/DeleteQueuedSavingsPlan"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes the specified Savings Plans rates .
describe_savings_plan_rates(Client, Input) ->
describe_savings_plan_rates(Client, Input, []).
describe_savings_plan_rates(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlanRates"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Describes the specified Savings Plans.
describe_savings_plans(Client, Input) ->
describe_savings_plans(Client, Input, []).
describe_savings_plans(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlans"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes the specified Savings Plans offering rates .
describe_savings_plans_offering_rates(Client, Input) ->
describe_savings_plans_offering_rates(Client, Input, []).
describe_savings_plans_offering_rates(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlansOfferingRates"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Describes the specified Savings Plans offerings.
describe_savings_plans_offerings(Client, Input) ->
describe_savings_plans_offerings(Client, Input, []).
describe_savings_plans_offerings(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlansOfferings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Lists the tags for the specified resource.
list_tags_for_resource(Client, Input) ->
list_tags_for_resource(Client, Input, []).
list_tags_for_resource(Client, Input0, Options0) ->
Method = post,
Path = ["/ListTagsForResource"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Adds the specified tags to the specified resource.
tag_resource(Client, Input) ->
tag_resource(Client, Input, []).
tag_resource(Client, Input0, Options0) ->
Method = post,
Path = ["/TagResource"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%% @doc Removes the specified tags from the specified resource.
untag_resource(Client, Input) ->
untag_resource(Client, Input, []).
untag_resource(Client, Input0, Options0) ->
Method = post,
Path = ["/UntagResource"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
%%====================================================================
Internal functions
%%====================================================================
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"savingsplans">>,
region => <<"us-east-1">>},
Host = build_host(<<"savingsplans">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
| null | https://raw.githubusercontent.com/aws-beam/aws-erlang/699287cee7dfc9dc8c08ced5f090dcc192c9cba8/src/aws_savingsplans.erl | erlang | WARNING: DO NOT EDIT, AUTO-GENERATED CODE!
@doc Savings Plans are a pricing model that offer significant savings on
information, see the AWS Savings Plans User Guide.
====================================================================
API
====================================================================
@doc Creates a Savings Plan.
@doc Deletes the queued purchase for the specified Savings Plan.
@doc Describes the specified Savings Plans.
@doc Describes the specified Savings Plans offerings.
@doc Lists the tags for the specified resource.
@doc Adds the specified tags to the specified resource.
@doc Removes the specified tags from the specified resource.
====================================================================
==================================================================== | See -beam/aws-codegen for more details .
AWS usage ( for example , on Amazon EC2 instances ) .
You commit to a consistent amount of usage , in USD per hour , for a term of
1 or 3 years , and receive a lower price for that usage . For more
-module(aws_savingsplans).
-export([create_savings_plan/2,
create_savings_plan/3,
delete_queued_savings_plan/2,
delete_queued_savings_plan/3,
describe_savings_plan_rates/2,
describe_savings_plan_rates/3,
describe_savings_plans/2,
describe_savings_plans/3,
describe_savings_plans_offering_rates/2,
describe_savings_plans_offering_rates/3,
describe_savings_plans_offerings/2,
describe_savings_plans_offerings/3,
list_tags_for_resource/2,
list_tags_for_resource/3,
tag_resource/2,
tag_resource/3,
untag_resource/2,
untag_resource/3]).
-include_lib("hackney/include/hackney_lib.hrl").
create_savings_plan(Client, Input) ->
create_savings_plan(Client, Input, []).
create_savings_plan(Client, Input0, Options0) ->
Method = post,
Path = ["/CreateSavingsPlan"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
delete_queued_savings_plan(Client, Input) ->
delete_queued_savings_plan(Client, Input, []).
delete_queued_savings_plan(Client, Input0, Options0) ->
Method = post,
Path = ["/DeleteQueuedSavingsPlan"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes the specified Savings Plans rates .
describe_savings_plan_rates(Client, Input) ->
describe_savings_plan_rates(Client, Input, []).
describe_savings_plan_rates(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlanRates"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
describe_savings_plans(Client, Input) ->
describe_savings_plans(Client, Input, []).
describe_savings_plans(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlans"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
@doc Describes the specified Savings Plans offering rates .
describe_savings_plans_offering_rates(Client, Input) ->
describe_savings_plans_offering_rates(Client, Input, []).
describe_savings_plans_offering_rates(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlansOfferingRates"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
describe_savings_plans_offerings(Client, Input) ->
describe_savings_plans_offerings(Client, Input, []).
describe_savings_plans_offerings(Client, Input0, Options0) ->
Method = post,
Path = ["/DescribeSavingsPlansOfferings"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
list_tags_for_resource(Client, Input) ->
list_tags_for_resource(Client, Input, []).
list_tags_for_resource(Client, Input0, Options0) ->
Method = post,
Path = ["/ListTagsForResource"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
tag_resource(Client, Input) ->
tag_resource(Client, Input, []).
tag_resource(Client, Input0, Options0) ->
Method = post,
Path = ["/TagResource"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
untag_resource(Client, Input) ->
untag_resource(Client, Input, []).
untag_resource(Client, Input0, Options0) ->
Method = post,
Path = ["/UntagResource"],
SuccessStatusCode = undefined,
Options = [{send_body_as_binary, false},
{receive_body_as_binary, false},
{append_sha256_content_hash, false}
| Options0],
Headers = [],
Input1 = Input0,
CustomHeaders = [],
Input2 = Input1,
Query_ = [],
Input = Input2,
request(Client, Method, Path, Query_, CustomHeaders ++ Headers, Input, Options, SuccessStatusCode).
Internal functions
-spec request(aws_client:aws_client(), atom(), iolist(), list(),
list(), map() | undefined, list(), pos_integer() | undefined) ->
{ok, {integer(), list()}} |
{ok, Result, {integer(), list(), hackney:client()}} |
{error, Error, {integer(), list(), hackney:client()}} |
{error, term()} when
Result :: map(),
Error :: map().
request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
RequestFun = fun() -> do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) end,
aws_request:request(RequestFun, Options).
do_request(Client, Method, Path, Query, Headers0, Input, Options, SuccessStatusCode) ->
Client1 = Client#{service => <<"savingsplans">>,
region => <<"us-east-1">>},
Host = build_host(<<"savingsplans">>, Client1),
URL0 = build_url(Host, Path, Client1),
URL = aws_request:add_query(URL0, Query),
AdditionalHeaders1 = [ {<<"Host">>, Host}
, {<<"Content-Type">>, <<"application/x-amz-json-1.1">>}
],
Payload =
case proplists:get_value(send_body_as_binary, Options) of
true ->
maps:get(<<"Body">>, Input, <<"">>);
false ->
encode_payload(Input)
end,
AdditionalHeaders = case proplists:get_value(append_sha256_content_hash, Options, false) of
true ->
add_checksum_hash_header(AdditionalHeaders1, Payload);
false ->
AdditionalHeaders1
end,
Headers1 = aws_request:add_headers(AdditionalHeaders, Headers0),
MethodBin = aws_request:method_to_binary(Method),
SignedHeaders = aws_request:sign_request(Client1, MethodBin, URL, Headers1, Payload),
Response = hackney:request(Method, URL, SignedHeaders, Payload, Options),
DecodeBody = not proplists:get_value(receive_body_as_binary, Options),
handle_response(Response, SuccessStatusCode, DecodeBody).
add_checksum_hash_header(Headers, Body) ->
[ {<<"X-Amz-CheckSum-SHA256">>, base64:encode(crypto:hash(sha256, Body))}
| Headers
].
handle_response({ok, StatusCode, ResponseHeaders}, SuccessStatusCode, _DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
{ok, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders}, _, _DecodeBody) ->
{error, {StatusCode, ResponseHeaders}};
handle_response({ok, StatusCode, ResponseHeaders, Client}, SuccessStatusCode, DecodeBody)
when StatusCode =:= 200;
StatusCode =:= 202;
StatusCode =:= 204;
StatusCode =:= 206;
StatusCode =:= SuccessStatusCode ->
case hackney:body(Client) of
{ok, <<>>} when StatusCode =:= 200;
StatusCode =:= SuccessStatusCode ->
{ok, #{}, {StatusCode, ResponseHeaders, Client}};
{ok, Body} ->
Result = case DecodeBody of
true ->
try
jsx:decode(Body)
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
false -> #{<<"Body">> => Body}
end,
{ok, Result, {StatusCode, ResponseHeaders, Client}}
end;
handle_response({ok, StatusCode, _ResponseHeaders, _Client}, _, _DecodeBody)
when StatusCode =:= 503 ->
Retriable error if retries are enabled
{error, service_unavailable};
handle_response({ok, StatusCode, ResponseHeaders, Client}, _, _DecodeBody) ->
{ok, Body} = hackney:body(Client),
try
DecodedError = jsx:decode(Body),
{error, DecodedError, {StatusCode, ResponseHeaders, Client}}
catch
Error:Reason:Stack ->
erlang:raise(error, {body_decode_failed, Error, Reason, StatusCode, Body}, Stack)
end;
handle_response({error, Reason}, _, _DecodeBody) ->
{error, Reason}.
build_host(_EndpointPrefix, #{region := <<"local">>, endpoint := Endpoint}) ->
Endpoint;
build_host(_EndpointPrefix, #{region := <<"local">>}) ->
<<"localhost">>;
build_host(EndpointPrefix, #{endpoint := Endpoint}) ->
aws_util:binary_join([EndpointPrefix, Endpoint], <<".">>).
build_url(Host, Path0, Client) ->
Proto = aws_client:proto(Client),
Path = erlang:iolist_to_binary(Path0),
Port = aws_client:port(Client),
aws_util:binary_join([Proto, <<"://">>, Host, <<":">>, Port, Path], <<"">>).
-spec encode_payload(undefined | map()) -> binary().
encode_payload(undefined) ->
<<>>;
encode_payload(Input) ->
jsx:encode(Input).
|
9757350d183fd574db9119a299f044404c0b978f05f5c9f80b431f68e7e0dbd5 | takikawa/drracket-vim-tool | commands.rkt | #lang racket/base
Parse and output representation of vim commands
(require racket/class
racket/function
racket/gui/base
racket/match)
(provide parse-command
(struct-out motion-command)
(struct-out mark-command)
(struct-out repeat-command)
(struct-out repeat-motion)
(struct-out replace-command)
(struct-out goto-command)
movement-command?)
A Command is one of
;; - Symbol
- ( repeat - command Repeat Command )
- ( motion - command Operator Motion )
- ( mark - command )
- ( replace - command )
- ( goto - command ( U ' line Integer ) )
(struct repeat-command (repeat command))
(struct motion-command (operator motion))
(struct mark-command (kind mark))
(struct replace-command (char))
(struct goto-command (line))
;; A Repeat is an integer
An Operator ( for a motion command ) is one of
;; (or/c 'change
;; 'delete
;; 'yank
;; 'swap-case
;; 'lowercase
;; 'uppercase
;; 'format
;; 'rot13
;; 'shift-right
;; 'shift-left
;; 'filter)
;;
;; TODO: Operator leaves out
;; folding, and function calls from vim
;;
A Motion is one of
;; - 'a-word
;; - 'inner-word
;; - 'word-forward
;; - 'word-backward
;; - 'sentence
;; - 'a-paragraph
;; - 'match
;; - 'double-quote
- ( repeat - motion Repeat Motion )
where Motion is a symbol
(struct repeat-motion (repeat motion))
;; These codes are ignored in the sense that they are skipped over
;; and they don't interrupt a command sequence.
(define ignored-codes
(list 'shift
'rshift
'control
'rcontrol
'scroll
'wheel-up
'wheel-down
'wheel-left
'wheel-right
'release
'press))
;; Key-Code -> Boolean
(define (digit? char)
(and (char? char)
(memq char '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9))))
;; Key-Event% (-> Key-Event%) -> Command
Parse a single command
(define (parse-command key *next-key)
;; helper that ignores ignored characters
(define (next-key)
(let loop ()
(define key (*next-key))
(define code (send key get-key-code))
(if (memq code ignored-codes)
(loop)
key)))
(define code (send key get-key-code))
(match code
;; repeats
[(? digit? digit)
;; '0' cannot start a repeat sequence in vim
#:when (not (eq? #\0 digit))
(parse-repeat code next-key)]
;; multi-char/motion commands
[#\d (parse-delete next-key)]
[#\y (parse-yank next-key)]
[#\m (parse-mark next-key 'save-mark)]
[#\' (parse-mark next-key 'apostrophe)]
[#\` (parse-mark next-key 'backtick)]
[#\g (parse-global next-key)]
[#\r #:when (not (send key get-control-down))
(parse-replace next-key)]
[#\= (parse-filter next-key)]
[#\c (parse-change next-key)]
[#\w #:when (send key get-control-down)
(parse-window next-key)]
[#\> (parse-shift-right next-key)]
[#\< (parse-shift-left next-key)]
;; insertion / change
[#\a 'insert-end]
[#\A 'insert-end-line]
[#\i 'insert]
[#\I 'insert-line]
[#\O 'insert-previous-line]
[#\o 'insert-next-line]
[#\s 'insert-at-delete]
[#\S 'change-line]
[#\C 'change-rest]
;; modes
[#\v 'visual]
[#\V 'visual-line]
[#\: 'ex]
;; movement
[(or #\h 'left) 'left]
[(or #\j 'down) 'down]
[(or #\k 'up) 'up]
[(or #\l 'right) 'right]
[#\f #:when (send key get-control-down)
'next-page]
[#\b #:when (send key get-control-down)
'previous-page]
[#\w 'next-word]
[#\b 'previous-word]
['prior 'previous-page]
['next 'next-page]
[#\space 'continue]
[#\0 'start-of-line]
[#\$ 'end-of-line]
[#\^ 'start-of-line-content]
[#\% 'match]
[#\G 'end-of-file]
;; editing
[#\J 'join-line]
[#\x 'delete-at-cursor]
[#\X 'delete-before-cursor]
[#\~ 'toggle-case]
;; copy & paste & editing
[#\D 'delete-rest]
[#\p 'paste]
[#\P 'paste-before]
[#\u 'undo]
[#\r #:when (send key get-control-down)
'redo]
;; search
[#\/ 'search]
[#\n 'next-search]
[#\N 'prev-search]
[#\* 'search-cursor]
;; other
[#\. 'single-repeat]
[_ #f]))
(define (parse-repeat digit next-key)
(define (char-numeric->number x)
(- (char->integer x) (char->integer #\0)))
(let loop ([num (char-numeric->number digit)])
(define event (next-key))
(match (send event get-key-code)
[#\G
(goto-command (if (zero? num) 'last-line num))]
[#\g
(match (send (next-key) get-key-code)
[#\g (goto-command num)]
[_ #f])]
[(? digit? digit)
(loop (+ (char-numeric->number digit) (* 10 num)))]
[_
(repeat-command num (parse-command event next-key))])))
(define (parse-replace next-key)
(define char (send (next-key) get-key-code))
(and (char? char)
(replace-command char)))
(define (parse-delete next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\d 'delete-line]
[c
(define motion (parse-motion key next-key))
(and motion (motion-command 'delete motion))]))
(define (parse-yank next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\y 'yank-line]
[c
(define motion (parse-motion key next-key))
(and motion (motion-command 'yank motion))]))
(define (parse-global next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\g 'start-of-file]
[#\t 'next-tab]
[#\T 'prev-tab]
[_ #f]))
(define (parse-mark next-key kind)
(define key (next-key))
(define code (send key get-key-code))
(and (mark-char? code)
(match kind
['apostrophe (mark-command 'goto-mark-line code)]
['backtick (mark-command 'goto-mark-char code)]
['save-mark (mark-command 'save-mark code)])))
(define (mark-char? key)
(and (char? key)
(char>=? key #\a)
(char<=? key #\z)))
(define (parse-filter next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\= 'filter-line]
[_ #f]))
(define (parse-change next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\c 'change-line]
;; FIXME: implement change with motions
[_
(define motion (parse-motion key next-key))
(and motion (motion-command 'change motion))]))
;; window commands with ctrl-w
(define (parse-window next-key)
(define key (next-key))
(define code (send key get-key-code))
;; ctrl can be down or up for most of these
(match code
[#\w 'window-next]
[_ #f]))
(define (parse-shift-right next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\> 'shift-right]
;; FIXME: support motions
[_ #f]))
(define (parse-shift-left next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\< 'shift-left]
;; FIXME: support motions
[_ #f]))
(define (parse-motion first-key next-key)
(define code (send first-key get-key-code))
(match code
[#\a (match (send (next-key) get-key-code)
[#\w 'a-word]
[#\p 'a-paragraph]
[(or #\b #\( #\)) 'a-block]
[_ #f])]
[#\i (match (send (next-key) get-key-code)
[(or #\b #\( #\)) 'inner-block]
[_ #f])]
[#\h 'left]
[#\j 'down]
[#\k 'up]
[#\w 'word-forward]
[#\b 'word-backward]
[#\% 'match]
[#\" 'double-quote]
[(or #\space #\l) 'right]
[_ #f]))
;; check if the given command is a movement command
(define (movement-command? command)
(or (goto-command? command)
(memq command
'(left down up right
next-page previous-page
next-word previous-word
continue
start-of-line end-of-line
start-of-line-content
match
start-of-file end-of-file))))
| null | https://raw.githubusercontent.com/takikawa/drracket-vim-tool/c347e8f8dcb0d89efd44755587b108e1f420912a/private/commands.rkt | racket | - Symbol
A Repeat is an integer
(or/c 'change
'delete
'yank
'swap-case
'lowercase
'uppercase
'format
'rot13
'shift-right
'shift-left
'filter)
TODO: Operator leaves out
folding, and function calls from vim
- 'a-word
- 'inner-word
- 'word-forward
- 'word-backward
- 'sentence
- 'a-paragraph
- 'match
- 'double-quote
These codes are ignored in the sense that they are skipped over
and they don't interrupt a command sequence.
Key-Code -> Boolean
Key-Event% (-> Key-Event%) -> Command
helper that ignores ignored characters
repeats
'0' cannot start a repeat sequence in vim
multi-char/motion commands
insertion / change
modes
movement
editing
copy & paste & editing
search
other
FIXME: implement change with motions
window commands with ctrl-w
ctrl can be down or up for most of these
FIXME: support motions
FIXME: support motions
check if the given command is a movement command | #lang racket/base
Parse and output representation of vim commands
(require racket/class
racket/function
racket/gui/base
racket/match)
(provide parse-command
(struct-out motion-command)
(struct-out mark-command)
(struct-out repeat-command)
(struct-out repeat-motion)
(struct-out replace-command)
(struct-out goto-command)
movement-command?)
A Command is one of
- ( repeat - command Repeat Command )
- ( motion - command Operator Motion )
- ( mark - command )
- ( replace - command )
- ( goto - command ( U ' line Integer ) )
(struct repeat-command (repeat command))
(struct motion-command (operator motion))
(struct mark-command (kind mark))
(struct replace-command (char))
(struct goto-command (line))
An Operator ( for a motion command ) is one of
A Motion is one of
- ( repeat - motion Repeat Motion )
where Motion is a symbol
(struct repeat-motion (repeat motion))
(define ignored-codes
(list 'shift
'rshift
'control
'rcontrol
'scroll
'wheel-up
'wheel-down
'wheel-left
'wheel-right
'release
'press))
(define (digit? char)
(and (char? char)
(memq char '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9))))
Parse a single command
(define (parse-command key *next-key)
(define (next-key)
(let loop ()
(define key (*next-key))
(define code (send key get-key-code))
(if (memq code ignored-codes)
(loop)
key)))
(define code (send key get-key-code))
(match code
[(? digit? digit)
#:when (not (eq? #\0 digit))
(parse-repeat code next-key)]
[#\d (parse-delete next-key)]
[#\y (parse-yank next-key)]
[#\m (parse-mark next-key 'save-mark)]
[#\' (parse-mark next-key 'apostrophe)]
[#\` (parse-mark next-key 'backtick)]
[#\g (parse-global next-key)]
[#\r #:when (not (send key get-control-down))
(parse-replace next-key)]
[#\= (parse-filter next-key)]
[#\c (parse-change next-key)]
[#\w #:when (send key get-control-down)
(parse-window next-key)]
[#\> (parse-shift-right next-key)]
[#\< (parse-shift-left next-key)]
[#\a 'insert-end]
[#\A 'insert-end-line]
[#\i 'insert]
[#\I 'insert-line]
[#\O 'insert-previous-line]
[#\o 'insert-next-line]
[#\s 'insert-at-delete]
[#\S 'change-line]
[#\C 'change-rest]
[#\v 'visual]
[#\V 'visual-line]
[#\: 'ex]
[(or #\h 'left) 'left]
[(or #\j 'down) 'down]
[(or #\k 'up) 'up]
[(or #\l 'right) 'right]
[#\f #:when (send key get-control-down)
'next-page]
[#\b #:when (send key get-control-down)
'previous-page]
[#\w 'next-word]
[#\b 'previous-word]
['prior 'previous-page]
['next 'next-page]
[#\space 'continue]
[#\0 'start-of-line]
[#\$ 'end-of-line]
[#\^ 'start-of-line-content]
[#\% 'match]
[#\G 'end-of-file]
[#\J 'join-line]
[#\x 'delete-at-cursor]
[#\X 'delete-before-cursor]
[#\~ 'toggle-case]
[#\D 'delete-rest]
[#\p 'paste]
[#\P 'paste-before]
[#\u 'undo]
[#\r #:when (send key get-control-down)
'redo]
[#\/ 'search]
[#\n 'next-search]
[#\N 'prev-search]
[#\* 'search-cursor]
[#\. 'single-repeat]
[_ #f]))
(define (parse-repeat digit next-key)
(define (char-numeric->number x)
(- (char->integer x) (char->integer #\0)))
(let loop ([num (char-numeric->number digit)])
(define event (next-key))
(match (send event get-key-code)
[#\G
(goto-command (if (zero? num) 'last-line num))]
[#\g
(match (send (next-key) get-key-code)
[#\g (goto-command num)]
[_ #f])]
[(? digit? digit)
(loop (+ (char-numeric->number digit) (* 10 num)))]
[_
(repeat-command num (parse-command event next-key))])))
(define (parse-replace next-key)
(define char (send (next-key) get-key-code))
(and (char? char)
(replace-command char)))
(define (parse-delete next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\d 'delete-line]
[c
(define motion (parse-motion key next-key))
(and motion (motion-command 'delete motion))]))
(define (parse-yank next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\y 'yank-line]
[c
(define motion (parse-motion key next-key))
(and motion (motion-command 'yank motion))]))
(define (parse-global next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\g 'start-of-file]
[#\t 'next-tab]
[#\T 'prev-tab]
[_ #f]))
(define (parse-mark next-key kind)
(define key (next-key))
(define code (send key get-key-code))
(and (mark-char? code)
(match kind
['apostrophe (mark-command 'goto-mark-line code)]
['backtick (mark-command 'goto-mark-char code)]
['save-mark (mark-command 'save-mark code)])))
(define (mark-char? key)
(and (char? key)
(char>=? key #\a)
(char<=? key #\z)))
(define (parse-filter next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\= 'filter-line]
[_ #f]))
(define (parse-change next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\c 'change-line]
[_
(define motion (parse-motion key next-key))
(and motion (motion-command 'change motion))]))
(define (parse-window next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\w 'window-next]
[_ #f]))
(define (parse-shift-right next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\> 'shift-right]
[_ #f]))
(define (parse-shift-left next-key)
(define key (next-key))
(define code (send key get-key-code))
(match code
[#\< 'shift-left]
[_ #f]))
(define (parse-motion first-key next-key)
(define code (send first-key get-key-code))
(match code
[#\a (match (send (next-key) get-key-code)
[#\w 'a-word]
[#\p 'a-paragraph]
[(or #\b #\( #\)) 'a-block]
[_ #f])]
[#\i (match (send (next-key) get-key-code)
[(or #\b #\( #\)) 'inner-block]
[_ #f])]
[#\h 'left]
[#\j 'down]
[#\k 'up]
[#\w 'word-forward]
[#\b 'word-backward]
[#\% 'match]
[#\" 'double-quote]
[(or #\space #\l) 'right]
[_ #f]))
(define (movement-command? command)
(or (goto-command? command)
(memq command
'(left down up right
next-page previous-page
next-word previous-word
continue
start-of-line end-of-line
start-of-line-content
match
start-of-file end-of-file))))
|
958156e8a39aa6970ec2d0b80bddc74ad4b18919e377075bccc0e62c583b37bb | abdulapopoola/SICPBook | eceval-machine.scm | (define eceval-operations
(list
; install inbuilt scheme operators
(list 'read read)
; install operators from scheme-operators.scm
(list 'self-evaluating? self-evaluating?)
(list 'variable? variable?)
(list 'quoted? quoted?)
(list 'text-of-quotation text-of-quotation)
(list 'assignment? assignment?)
(list 'assignment-variable assignment-variable)
(list 'assignment-value assignment-value)
(list 'definition? definition?)
(list 'definition-variable definition-variable)
(list 'definition-value definition-value)
(list 'lambda? lambda?)
(list 'lambda-parameters lambda-parameters)
(list 'lambda-body lambda-body)
(list 'if? if?)
(list 'if-predicate if-predicate)
(list 'if-alternative if-alternative)
(list 'if-consequent if-consequent)
(list 'begin? begin?)
(list 'begin-actions begin-actions)
(list 'first-exp first-exp)
(list 'last-exp? last-exp?)
(list 'rest-exps rest-exps)
(list 'application? application?)
(list 'operator operator)
(list 'operands operands)
(list 'no-operands? no-operands?)
(list 'first-operand first-operand)
(list 'rest-operands rest-operands)
; install operators from machine-operations.scm
(list 'true? true?)
(list 'make-procedure make-procedure)
(list 'compound-procedure? compound-procedure?)
(list 'procedure-parameters procedure-parameters)
(list 'procedure-body procedure-body)
(list 'procedure-environment procedure-environment)
(list 'extend-environment extend-environment)
(list 'lookup-variable-value lookup-variable-value)
(list 'set-variable-value! set-variable-value!)
(list 'define-variable! define-variable!)
(list 'primitive-procedure? primitive-procedure?)
(list 'apply-primitive-procedure apply-primitive-procedure)
(list 'prompt-for-input prompt-for-input)
(list 'announce-output announce-output)
(list 'user-print user-print)
(list 'empty-arglist empty-arglist)
(list 'adjoin-arg adjoin-arg)
(list 'last-operand? last-operand?)
(list 'no-more-exps? no-more-exps?)
(list 'get-global-environment get-global-environment)))
(define eceval
(make-machine
'(exp env val proc argl continue unev)
eceval-operations
'(
read-eval-print-loop
(perform (op initialize-stack))
(perform (op prompt-for-input)
(const ";;; EC-Eval input:"))
(assign exp (op read))
(assign env (op get-global-environment))
(assign continue (label print-result))
(goto (label eval-dispatch))
print-result
;; Add stack tracing
(perform (op print-stack-statistics))
(perform (op announce-output)
(const ";;; EC-Eval value:"))
(perform (op user-print) (reg val))
(goto (label read-eval-print-loop))
unknown-expression-type
(assign
val
(const unknown-expression-type-error))
(goto (label signal-error))
unknown-procedure-type
; clean up stack (from apply-dispatch):
(restore continue)
(assign
val
(const unknown-procedure-type-error))
(goto (label signal-error))
signal-error
(perform (op user-print) (reg val))
(goto (label read-eval-print-loop))
eval-dispatch
(test (op self-evaluating?) (reg exp))
(branch (label ev-self-eval))
(test (op variable?) (reg exp))
(branch (label ev-variable))
(test (op quoted?) (reg exp))
(branch (label ev-quoted))
(test (op assignment?) (reg exp))
(branch (label ev-assignment))
(test (op definition?) (reg exp))
(branch (label ev-definition))
(test (op if?) (reg exp))
(branch (label ev-if))
(test (op lambda?) (reg exp))
(branch (label ev-lambda))
(test (op begin?) (reg exp))
(branch (label ev-begin))
(test (op application?) (reg exp))
(branch (label ev-application))
(goto (label unknown-expression-type))
ev-self-eval
(assign val (reg exp))
(goto (reg continue))
ev-variable
(assign val
(op lookup-variable-value)
(reg exp)
(reg env))
(goto (reg continue))
ev-quoted
(assign val
(op text-of-quotation)
(reg exp))
(goto (reg continue))
ev-lambda
(assign unev
(op lambda-parameters)
(reg exp))
(assign exp
(op lambda-body)
(reg exp))
(assign val
(op make-procedure)
(reg unev)
(reg exp)
(reg env))
(goto (reg continue))
ev-application
(save continue)
(save env)
(assign unev (op operands) (reg exp))
(save unev)
(assign exp (op operator) (reg exp))
(assign
continue (label ev-appl-did-operator))
(goto (label eval-dispatch))
ev-appl-did-operator
(restore unev) ; the operands
(restore env)
(assign argl (op empty-arglist))
(assign proc (reg val)) ; the operator
(test (op no-operands?) (reg unev))
(branch (label apply-dispatch))
(save proc)
ev-appl-operand-loop
(save argl)
(assign exp
(op first-operand)
(reg unev))
(test (op last-operand?) (reg unev))
(branch (label ev-appl-last-arg))
(save env)
(save unev)
(assign continue
(label ev-appl-accumulate-arg))
(goto (label eval-dispatch))
ev-appl-accumulate-arg
(restore unev)
(restore env)
(restore argl)
(assign argl
(op adjoin-arg)
(reg val)
(reg argl))
(assign unev
(op rest-operands)
(reg unev))
(goto (label ev-appl-operand-loop))
ev-appl-last-arg
(assign continue
(label ev-appl-accum-last-arg))
(goto (label eval-dispatch))
ev-appl-accum-last-arg
(restore argl)
(assign argl
(op adjoin-arg)
(reg val)
(reg argl))
(restore proc)
(goto (label apply-dispatch))
apply-dispatch
(test (op primitive-procedure?) (reg proc))
(branch (label primitive-apply))
(test (op compound-procedure?) (reg proc))
(branch (label compound-apply))
(goto (label unknown-procedure-type))
primitive-apply
(assign val (op apply-primitive-procedure)
(reg proc)
(reg argl))
(restore continue)
(goto (reg continue))
compound-apply
(assign unev
(op procedure-parameters)
(reg proc))
(assign env
(op procedure-environment)
(reg proc))
(assign env
(op extend-environment)
(reg unev)
(reg argl)
(reg env))
(assign unev
(op procedure-body)
(reg proc))
(goto (label ev-sequence))
ev-begin
(assign unev
(op begin-actions)
(reg exp))
(save continue)
(goto (label ev-sequence))
ev-sequence
(assign exp (op first-exp) (reg unev))
(test (op last-exp?) (reg unev))
(branch (label ev-sequence-last-exp))
(save unev)
(save env)
(assign continue
(label ev-sequence-continue))
(goto (label eval-dispatch))
ev-sequence-continue
(restore env)
(restore unev)
(assign unev (op rest-exps) (reg unev))
(goto (label ev-sequence))
ev-sequence-last-exp
(restore continue)
(goto (label eval-dispatch))
ev-if
(save exp) ; save expression for later
(save env)
(save continue)
(assign continue (label ev-if-decide))
(assign exp (op if-predicate) (reg exp))
; evaluate the predicate:
(goto (label eval-dispatch))
ev-if-decide
(restore continue)
(restore env)
(restore exp)
(test (op true?) (reg val))
(branch (label ev-if-consequent))
ev-if-alternative
(assign exp (op if-alternative) (reg exp))
(goto (label eval-dispatch))
ev-if-consequent
(assign exp (op if-consequent) (reg exp))
(goto (label eval-dispatch))
ev-assignment
(assign unev
(op assignment-variable)
(reg exp))
(save unev) ; save variable for later
(assign exp
(op assignment-value)
(reg exp))
(save env)
(save continue)
(assign continue
(label ev-assignment-1))
; evaluate the assignment value:
(goto (label eval-dispatch))
ev-assignment-1
(restore continue)
(restore env)
(restore unev)
(perform (op set-variable-value!)
(reg unev)
(reg val)
(reg env))
(assign val
(const ok))
(goto (reg continue))
ev-definition
(assign unev
(op definition-variable)
(reg exp))
(save unev) ; save variable for later
(assign exp
(op definition-value)
(reg exp))
(save env)
(save continue)
(assign continue (label ev-definition-1))
; evaluate the definition value:
(goto (label eval-dispatch))
ev-definition-1
(restore continue)
(restore env)
(restore unev)
(perform (op define-variable!)
(reg unev)
(reg val)
(reg env))
(assign val (const ok))
(goto (reg continue)))))
'(EC-EVAL LOADED) | null | https://raw.githubusercontent.com/abdulapopoola/SICPBook/c8a0228ebf66d9c1ddc5ef1fcc1d05d8684f090a/Chapter%205/5.4/eceval-machine.scm | scheme | install inbuilt scheme operators
install operators from scheme-operators.scm
install operators from machine-operations.scm
Add stack tracing
clean up stack (from apply-dispatch):
the operands
the operator
save expression for later
evaluate the predicate:
save variable for later
evaluate the assignment value:
save variable for later
evaluate the definition value: | (define eceval-operations
(list
(list 'read read)
(list 'self-evaluating? self-evaluating?)
(list 'variable? variable?)
(list 'quoted? quoted?)
(list 'text-of-quotation text-of-quotation)
(list 'assignment? assignment?)
(list 'assignment-variable assignment-variable)
(list 'assignment-value assignment-value)
(list 'definition? definition?)
(list 'definition-variable definition-variable)
(list 'definition-value definition-value)
(list 'lambda? lambda?)
(list 'lambda-parameters lambda-parameters)
(list 'lambda-body lambda-body)
(list 'if? if?)
(list 'if-predicate if-predicate)
(list 'if-alternative if-alternative)
(list 'if-consequent if-consequent)
(list 'begin? begin?)
(list 'begin-actions begin-actions)
(list 'first-exp first-exp)
(list 'last-exp? last-exp?)
(list 'rest-exps rest-exps)
(list 'application? application?)
(list 'operator operator)
(list 'operands operands)
(list 'no-operands? no-operands?)
(list 'first-operand first-operand)
(list 'rest-operands rest-operands)
(list 'true? true?)
(list 'make-procedure make-procedure)
(list 'compound-procedure? compound-procedure?)
(list 'procedure-parameters procedure-parameters)
(list 'procedure-body procedure-body)
(list 'procedure-environment procedure-environment)
(list 'extend-environment extend-environment)
(list 'lookup-variable-value lookup-variable-value)
(list 'set-variable-value! set-variable-value!)
(list 'define-variable! define-variable!)
(list 'primitive-procedure? primitive-procedure?)
(list 'apply-primitive-procedure apply-primitive-procedure)
(list 'prompt-for-input prompt-for-input)
(list 'announce-output announce-output)
(list 'user-print user-print)
(list 'empty-arglist empty-arglist)
(list 'adjoin-arg adjoin-arg)
(list 'last-operand? last-operand?)
(list 'no-more-exps? no-more-exps?)
(list 'get-global-environment get-global-environment)))
(define eceval
(make-machine
'(exp env val proc argl continue unev)
eceval-operations
'(
read-eval-print-loop
(perform (op initialize-stack))
(perform (op prompt-for-input)
(const ";;; EC-Eval input:"))
(assign exp (op read))
(assign env (op get-global-environment))
(assign continue (label print-result))
(goto (label eval-dispatch))
print-result
(perform (op print-stack-statistics))
(perform (op announce-output)
(const ";;; EC-Eval value:"))
(perform (op user-print) (reg val))
(goto (label read-eval-print-loop))
unknown-expression-type
(assign
val
(const unknown-expression-type-error))
(goto (label signal-error))
unknown-procedure-type
(restore continue)
(assign
val
(const unknown-procedure-type-error))
(goto (label signal-error))
signal-error
(perform (op user-print) (reg val))
(goto (label read-eval-print-loop))
eval-dispatch
(test (op self-evaluating?) (reg exp))
(branch (label ev-self-eval))
(test (op variable?) (reg exp))
(branch (label ev-variable))
(test (op quoted?) (reg exp))
(branch (label ev-quoted))
(test (op assignment?) (reg exp))
(branch (label ev-assignment))
(test (op definition?) (reg exp))
(branch (label ev-definition))
(test (op if?) (reg exp))
(branch (label ev-if))
(test (op lambda?) (reg exp))
(branch (label ev-lambda))
(test (op begin?) (reg exp))
(branch (label ev-begin))
(test (op application?) (reg exp))
(branch (label ev-application))
(goto (label unknown-expression-type))
ev-self-eval
(assign val (reg exp))
(goto (reg continue))
ev-variable
(assign val
(op lookup-variable-value)
(reg exp)
(reg env))
(goto (reg continue))
ev-quoted
(assign val
(op text-of-quotation)
(reg exp))
(goto (reg continue))
ev-lambda
(assign unev
(op lambda-parameters)
(reg exp))
(assign exp
(op lambda-body)
(reg exp))
(assign val
(op make-procedure)
(reg unev)
(reg exp)
(reg env))
(goto (reg continue))
ev-application
(save continue)
(save env)
(assign unev (op operands) (reg exp))
(save unev)
(assign exp (op operator) (reg exp))
(assign
continue (label ev-appl-did-operator))
(goto (label eval-dispatch))
ev-appl-did-operator
(restore env)
(assign argl (op empty-arglist))
(test (op no-operands?) (reg unev))
(branch (label apply-dispatch))
(save proc)
ev-appl-operand-loop
(save argl)
(assign exp
(op first-operand)
(reg unev))
(test (op last-operand?) (reg unev))
(branch (label ev-appl-last-arg))
(save env)
(save unev)
(assign continue
(label ev-appl-accumulate-arg))
(goto (label eval-dispatch))
ev-appl-accumulate-arg
(restore unev)
(restore env)
(restore argl)
(assign argl
(op adjoin-arg)
(reg val)
(reg argl))
(assign unev
(op rest-operands)
(reg unev))
(goto (label ev-appl-operand-loop))
ev-appl-last-arg
(assign continue
(label ev-appl-accum-last-arg))
(goto (label eval-dispatch))
ev-appl-accum-last-arg
(restore argl)
(assign argl
(op adjoin-arg)
(reg val)
(reg argl))
(restore proc)
(goto (label apply-dispatch))
apply-dispatch
(test (op primitive-procedure?) (reg proc))
(branch (label primitive-apply))
(test (op compound-procedure?) (reg proc))
(branch (label compound-apply))
(goto (label unknown-procedure-type))
primitive-apply
(assign val (op apply-primitive-procedure)
(reg proc)
(reg argl))
(restore continue)
(goto (reg continue))
compound-apply
(assign unev
(op procedure-parameters)
(reg proc))
(assign env
(op procedure-environment)
(reg proc))
(assign env
(op extend-environment)
(reg unev)
(reg argl)
(reg env))
(assign unev
(op procedure-body)
(reg proc))
(goto (label ev-sequence))
ev-begin
(assign unev
(op begin-actions)
(reg exp))
(save continue)
(goto (label ev-sequence))
ev-sequence
(assign exp (op first-exp) (reg unev))
(test (op last-exp?) (reg unev))
(branch (label ev-sequence-last-exp))
(save unev)
(save env)
(assign continue
(label ev-sequence-continue))
(goto (label eval-dispatch))
ev-sequence-continue
(restore env)
(restore unev)
(assign unev (op rest-exps) (reg unev))
(goto (label ev-sequence))
ev-sequence-last-exp
(restore continue)
(goto (label eval-dispatch))
ev-if
(save env)
(save continue)
(assign continue (label ev-if-decide))
(assign exp (op if-predicate) (reg exp))
(goto (label eval-dispatch))
ev-if-decide
(restore continue)
(restore env)
(restore exp)
(test (op true?) (reg val))
(branch (label ev-if-consequent))
ev-if-alternative
(assign exp (op if-alternative) (reg exp))
(goto (label eval-dispatch))
ev-if-consequent
(assign exp (op if-consequent) (reg exp))
(goto (label eval-dispatch))
ev-assignment
(assign unev
(op assignment-variable)
(reg exp))
(assign exp
(op assignment-value)
(reg exp))
(save env)
(save continue)
(assign continue
(label ev-assignment-1))
(goto (label eval-dispatch))
ev-assignment-1
(restore continue)
(restore env)
(restore unev)
(perform (op set-variable-value!)
(reg unev)
(reg val)
(reg env))
(assign val
(const ok))
(goto (reg continue))
ev-definition
(assign unev
(op definition-variable)
(reg exp))
(assign exp
(op definition-value)
(reg exp))
(save env)
(save continue)
(assign continue (label ev-definition-1))
(goto (label eval-dispatch))
ev-definition-1
(restore continue)
(restore env)
(restore unev)
(perform (op define-variable!)
(reg unev)
(reg val)
(reg env))
(assign val (const ok))
(goto (reg continue)))))
'(EC-EVAL LOADED) |
f070c740b0729c0e9ceab422cd8e299be404752c199c496bd33c48d7c1aa941a | ghcjs/ghcjs-dom | SVGPathSegLinetoAbs.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
module GHCJS.DOM.JSFFI.Generated.SVGPathSegLinetoAbs
(js_setX, setX, js_getX, getX, js_setY, setY, js_getY, getY,
SVGPathSegLinetoAbs(..), gTypeSVGPathSegLinetoAbs)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"x\"] = $2;" js_setX ::
SVGPathSegLinetoAbs -> Float -> IO ()
| < -US/docs/Web/API/SVGPathSegLinetoAbs.x Mozilla documentation >
setX :: (MonadIO m) => SVGPathSegLinetoAbs -> Float -> m ()
setX self val = liftIO (js_setX self val)
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
SVGPathSegLinetoAbs -> IO Float
| < -US/docs/Web/API/SVGPathSegLinetoAbs.x Mozilla documentation >
getX :: (MonadIO m) => SVGPathSegLinetoAbs -> m Float
getX self = liftIO (js_getX self)
foreign import javascript unsafe "$1[\"y\"] = $2;" js_setY ::
SVGPathSegLinetoAbs -> Float -> IO ()
-- | <-US/docs/Web/API/SVGPathSegLinetoAbs.y Mozilla SVGPathSegLinetoAbs.y documentation>
setY :: (MonadIO m) => SVGPathSegLinetoAbs -> Float -> m ()
setY self val = liftIO (js_setY self val)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
SVGPathSegLinetoAbs -> IO Float
-- | <-US/docs/Web/API/SVGPathSegLinetoAbs.y Mozilla SVGPathSegLinetoAbs.y documentation>
getY :: (MonadIO m) => SVGPathSegLinetoAbs -> m Float
getY self = liftIO (js_getY self) | null | https://raw.githubusercontent.com/ghcjs/ghcjs-dom/749963557d878d866be2d0184079836f367dd0ea/ghcjs-dom-jsffi/src/GHCJS/DOM/JSFFI/Generated/SVGPathSegLinetoAbs.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #
| <-US/docs/Web/API/SVGPathSegLinetoAbs.y Mozilla SVGPathSegLinetoAbs.y documentation>
| <-US/docs/Web/API/SVGPathSegLinetoAbs.y Mozilla SVGPathSegLinetoAbs.y documentation> | # LANGUAGE PatternSynonyms #
# LANGUAGE ForeignFunctionInterface #
# LANGUAGE JavaScriptFFI #
module GHCJS.DOM.JSFFI.Generated.SVGPathSegLinetoAbs
(js_setX, setX, js_getX, getX, js_setY, setY, js_getY, getY,
SVGPathSegLinetoAbs(..), gTypeSVGPathSegLinetoAbs)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, fmap, Show, Read, Eq, Ord)
import qualified Prelude (error)
import Data.Typeable (Typeable)
import GHCJS.Types (JSVal(..), JSString)
import GHCJS.Foreign (jsNull, jsUndefined)
import GHCJS.Foreign.Callback (syncCallback, asyncCallback, syncCallback1, asyncCallback1, syncCallback2, asyncCallback2, OnBlocked(..))
import GHCJS.Marshal (ToJSVal(..), FromJSVal(..))
import GHCJS.Marshal.Pure (PToJSVal(..), PFromJSVal(..))
import Control.Monad (void)
import Control.Monad.IO.Class (MonadIO(..))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import Data.Maybe (fromJust)
import Data.Traversable (mapM)
import GHCJS.DOM.Types
import Control.Applicative ((<$>))
import GHCJS.DOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import GHCJS.DOM.JSFFI.Generated.Enums
foreign import javascript unsafe "$1[\"x\"] = $2;" js_setX ::
SVGPathSegLinetoAbs -> Float -> IO ()
| < -US/docs/Web/API/SVGPathSegLinetoAbs.x Mozilla documentation >
setX :: (MonadIO m) => SVGPathSegLinetoAbs -> Float -> m ()
setX self val = liftIO (js_setX self val)
foreign import javascript unsafe "$1[\"x\"]" js_getX ::
SVGPathSegLinetoAbs -> IO Float
| < -US/docs/Web/API/SVGPathSegLinetoAbs.x Mozilla documentation >
getX :: (MonadIO m) => SVGPathSegLinetoAbs -> m Float
getX self = liftIO (js_getX self)
foreign import javascript unsafe "$1[\"y\"] = $2;" js_setY ::
SVGPathSegLinetoAbs -> Float -> IO ()
setY :: (MonadIO m) => SVGPathSegLinetoAbs -> Float -> m ()
setY self val = liftIO (js_setY self val)
foreign import javascript unsafe "$1[\"y\"]" js_getY ::
SVGPathSegLinetoAbs -> IO Float
getY :: (MonadIO m) => SVGPathSegLinetoAbs -> m Float
getY self = liftIO (js_getY self) |
90e69613fa9801bbcb79ad28c43dedbb117829ab0ebf0a276174671e5cc85b50 | roman01la/cljs-rum-realworld-example-app | mixins.cljs | (ns conduit.mixins
(:require [rum.core :as rum]
[citrus.core :as citrus]))
(defn dispatch-on-mount [events-fn]
{:did-mount
(fn [{[r] :rum/args :as state}]
(doseq [[ctrl event-vector] (apply events-fn (:rum/args state))]
(apply citrus/dispatch! (into [r ctrl] event-vector)))
state)
:did-remount
(fn [old {[r] :rum/args :as state}]
(when (not= (:rum/args old) (:rum/args state))
(doseq [[ctrl event-vector] (apply events-fn (:rum/args state))]
(apply citrus/dispatch! (into [r ctrl] event-vector))))
state)})
(defn- check-errors [validators value]
(->> validators
(filter (fn [[validator]] (-> value validator not)))
(map second)))
(defn- remove-hidden-fields [fields]
(reduce-kv
(fn [m k v]
(if-not (contains? v :hidden)
(assoc m k v)
m))
{}
fields))
(defn form [{:keys [fields validators on-submit]}]
(let [data-init (->> fields keys (reduce
#(assoc %1 %2 (get-in fields [%2 :initial-value] ""))
{}))
errors-init (->> fields keys (reduce #(assoc %1 %2 nil) {}))
data (atom data-init)
errors (atom errors-init)
fields-init (->> fields
(reduce-kv
(fn [m k v]
(assoc m k (-> v
(#(if (contains? % :container)
(assoc % :container ((:container %) data errors k)) %))
(#(if (contains? % :events)
(assoc % :events
(into {} (for [[evt-name evt-fn] (:events %)]
{evt-name (evt-fn data errors k)}))) %)))))
{}))
fields (atom fields-init)
foreign-data (atom {})]
{:will-mount
(fn [{[r _ _ current-values] :rum/args
comp :rum/react-component
:as state}]
(when current-values
(do
(reset! data (into {} (for [[k v] @data] {k (or (get current-values k)
v)})))
(reset! foreign-data current-values)))
(add-watch data ::form-data (fn [_ _ old-state next-state]
(when-not (= old-state next-state)
(rum/request-render comp))))
(add-watch errors ::form-errors (fn [_ _ old-state next-state]
(when-not (= old-state next-state)
(rum/request-render comp))))
(add-watch fields ::form-fields (fn [_ _ old-state next-state]
(when-not (= old-state next-state)
(rum/request-render comp))))
state)
:will-update
(fn [{[_ _ _ current-values] :rum/args
:as state}]
(when (and current-values (not= current-values @foreign-data))
(do
(reset! data (into {}
(for [[k v] @data] {k (or (get current-values k) v)})))
(reset! foreign-data current-values)))
state)
:will-unmount
(fn [state]
(remove-watch data ::form-data)
(remove-watch errors ::form-errors)
(reset! data data-init)
(reset! errors errors-init)
(reset! fields fields-init)
(assoc state ::form {}))
:wrap-render
(fn [render-fn]
(fn [{[r] :rum/args :as state}]
(let [has-errors? (->> @errors vals (apply concat) (every? nil?) not)
pristine? (->> @fields remove-hidden-fields vals (map :touched?) (every? nil?))
state
(assoc state ::form {:fields (remove-hidden-fields @fields)
:validators validators
:validate #(swap! errors assoc %1 (check-errors (get validators %1) %2))
:on-change #(swap! data assoc %1 %2)
:on-submit #(on-submit r @data @errors validators %)
:on-focus #(swap! fields assoc-in [% :touched?] true)
:data @data
:errors @errors
:has-errors? has-errors?
:pristine? pristine?})]
(render-fn state))))}))
| null | https://raw.githubusercontent.com/roman01la/cljs-rum-realworld-example-app/962695bd391806a7066340461c3e02f2215fdc48/src/conduit/mixins.cljs | clojure | (ns conduit.mixins
(:require [rum.core :as rum]
[citrus.core :as citrus]))
(defn dispatch-on-mount [events-fn]
{:did-mount
(fn [{[r] :rum/args :as state}]
(doseq [[ctrl event-vector] (apply events-fn (:rum/args state))]
(apply citrus/dispatch! (into [r ctrl] event-vector)))
state)
:did-remount
(fn [old {[r] :rum/args :as state}]
(when (not= (:rum/args old) (:rum/args state))
(doseq [[ctrl event-vector] (apply events-fn (:rum/args state))]
(apply citrus/dispatch! (into [r ctrl] event-vector))))
state)})
(defn- check-errors [validators value]
(->> validators
(filter (fn [[validator]] (-> value validator not)))
(map second)))
(defn- remove-hidden-fields [fields]
(reduce-kv
(fn [m k v]
(if-not (contains? v :hidden)
(assoc m k v)
m))
{}
fields))
(defn form [{:keys [fields validators on-submit]}]
(let [data-init (->> fields keys (reduce
#(assoc %1 %2 (get-in fields [%2 :initial-value] ""))
{}))
errors-init (->> fields keys (reduce #(assoc %1 %2 nil) {}))
data (atom data-init)
errors (atom errors-init)
fields-init (->> fields
(reduce-kv
(fn [m k v]
(assoc m k (-> v
(#(if (contains? % :container)
(assoc % :container ((:container %) data errors k)) %))
(#(if (contains? % :events)
(assoc % :events
(into {} (for [[evt-name evt-fn] (:events %)]
{evt-name (evt-fn data errors k)}))) %)))))
{}))
fields (atom fields-init)
foreign-data (atom {})]
{:will-mount
(fn [{[r _ _ current-values] :rum/args
comp :rum/react-component
:as state}]
(when current-values
(do
(reset! data (into {} (for [[k v] @data] {k (or (get current-values k)
v)})))
(reset! foreign-data current-values)))
(add-watch data ::form-data (fn [_ _ old-state next-state]
(when-not (= old-state next-state)
(rum/request-render comp))))
(add-watch errors ::form-errors (fn [_ _ old-state next-state]
(when-not (= old-state next-state)
(rum/request-render comp))))
(add-watch fields ::form-fields (fn [_ _ old-state next-state]
(when-not (= old-state next-state)
(rum/request-render comp))))
state)
:will-update
(fn [{[_ _ _ current-values] :rum/args
:as state}]
(when (and current-values (not= current-values @foreign-data))
(do
(reset! data (into {}
(for [[k v] @data] {k (or (get current-values k) v)})))
(reset! foreign-data current-values)))
state)
:will-unmount
(fn [state]
(remove-watch data ::form-data)
(remove-watch errors ::form-errors)
(reset! data data-init)
(reset! errors errors-init)
(reset! fields fields-init)
(assoc state ::form {}))
:wrap-render
(fn [render-fn]
(fn [{[r] :rum/args :as state}]
(let [has-errors? (->> @errors vals (apply concat) (every? nil?) not)
pristine? (->> @fields remove-hidden-fields vals (map :touched?) (every? nil?))
state
(assoc state ::form {:fields (remove-hidden-fields @fields)
:validators validators
:validate #(swap! errors assoc %1 (check-errors (get validators %1) %2))
:on-change #(swap! data assoc %1 %2)
:on-submit #(on-submit r @data @errors validators %)
:on-focus #(swap! fields assoc-in [% :touched?] true)
:data @data
:errors @errors
:has-errors? has-errors?
:pristine? pristine?})]
(render-fn state))))}))
| |
c01e20f5c2f206f7cc67dadb5b761da939128646dd3649fb9126fbefa5480f25 | weavejester/lein-generate | namespace.clj | (ns leiningen.generate.namespace
(:refer-clojure :exclude [namespace])
(:require [leiningen.generate.templates :as tmpl]
[leiningen.new.templates :refer [name-to-path]]
[leiningen.core.main :as main]))
(defn namespace
"Generate a new namespace and test file."
[project name]
(let [data {:name name, :sanitized (name-to-path name)}]
(doto (tmpl/renderer "namespace")
(tmpl/create-file "source.clj" "src/{{sanitized}}.clj" data)
(tmpl/create-file "test.clj" "test/{{sanitized}}_test.clj" data))))
| null | https://raw.githubusercontent.com/weavejester/lein-generate/481a116e5fd652c3d8bcf6b725da97c0129368c8/generators/src/leiningen/generate/namespace.clj | clojure | (ns leiningen.generate.namespace
(:refer-clojure :exclude [namespace])
(:require [leiningen.generate.templates :as tmpl]
[leiningen.new.templates :refer [name-to-path]]
[leiningen.core.main :as main]))
(defn namespace
"Generate a new namespace and test file."
[project name]
(let [data {:name name, :sanitized (name-to-path name)}]
(doto (tmpl/renderer "namespace")
(tmpl/create-file "source.clj" "src/{{sanitized}}.clj" data)
(tmpl/create-file "test.clj" "test/{{sanitized}}_test.clj" data))))
| |
b3b7161731cc8b92d02a3d2ba38c8ff2f29b5ac55903e29d34435a9368ea020a | mario-goulart/salmonella | srfi-13.scm | (import (chicken string))
(define (string-prefix? prefix str)
(let ((index (substring-index prefix str)))
(and index (zero? index))))
(define (string-pad-right s n char)
(let ((len (string-length s)))
(if (<= n len)
(substring s (- len n) len)
(let ((pad (make-string (- n len) char)))
(string-append s pad)))))
| null | https://raw.githubusercontent.com/mario-goulart/salmonella/3b207e3b33490e5a5f81ccaa3c2c7f7ffcbe8125/libs/srfi-13.scm | scheme | (import (chicken string))
(define (string-prefix? prefix str)
(let ((index (substring-index prefix str)))
(and index (zero? index))))
(define (string-pad-right s n char)
(let ((len (string-length s)))
(if (<= n len)
(substring s (- len n) len)
(let ((pad (make-string (- n len) char)))
(string-append s pad)))))
| |
1244159c9a6eea2083290a4d932fff13f889b634ab5f11eff127b0e36d375529 | sellout/Kilns | network.lisp | (in-package :kilns)
;; Came up with the default port number by mapping the letters i–s to 0–10, then
;; spelling “kilns” with those numbers:
;; (k -> 2), (i -> 0), (l -> 3), (n -> 5), (s -> 10)
(defvar *base-port* 20360)
Protocol format – use kilns reader
;;; {keep-alive}
{ handshake ( par { path { from { top } } }
{ version " 0.0.1 " } ) }
(defun handle-request (client)
(unwind-protect
(multiple-value-bind (who remote-port) (sockets:remote-name client)
(handler-case
(let* ((*readtable* *kilns-readtable*)
(*package* (find-package :kilns-user))
(request (eval (read client))))
(case (name request)
(kilns-user::keep-alive
(write (eval (read-from-string "{keep-alive}"))
:stream client))
(kilns-user::handshake
(let* ((subst (match-local (eval (read-from-string "{handshake (par {path ?path} {version ?version})}"))
request))
(version (find-variable-value (intern "?version")
subst nil)))
(if (equal version "0.0.1")
(write (eval (read-from-string "{handshake (par {path {path {to {local {kell}}}}} {version \"0.0.1\"})}"))
:stream client)
(write (eval (read-from-string "{handshake {error \"incompatible protocol\"}}"))
:stream client)))))
(finish-output client))
(sockets:socket-connection-reset-error ()
(format t "Client reset connection!~%"))
(iolib.streams:hangup () (format t "Client closed conection!~%"))))
(close client)))
(defun dispatch-request (socket)
(let ((client (sockets:accept-connection socket :wait t)))
(when client
(make-thread (lambda () (handle-request client))
:name "request-handler"))))
(defun start-kilns-listener (&optional port)
(make-thread (lambda ()
(handler-case
(sockets:with-open-socket
(socket :connect :passive
:address-family :internet
:type :stream
:external-format '(:utf-8 :eol-style :lf)
:ipv6 nil
:local-host sockets:+ipv4-unspecified+
:local-port (or port *base-port*)
:reuse-address t)
(sockets:listen-on socket :backlog 5)
(loop while socket
do (dispatch-request socket)))
(sockets:socket-address-in-use-error ()
;; FIXME: we should try another port
(format t "Bind: Address already in use, forgot :reuse-addr t?"))))
:name "kilns network listener"))
;;; This file implements the behavior of distributed kells. They need to be
;;; modeled on each instance, and behavior that crosses boundaries needs to be
;;; initiated by the host that most causes the reaction – depending on the level
;;; of the reaction, it might have to update the state for multiple hosts.
(defclass network-kell (kell)
((multicast-address
:documentation "Each network kell has a multicast group address. Every host
in that kell needs to both be a member of that group _and_ a
sender to that group, all the way up to the top kell. The
problem is that there are 256 multicast addresses to be used
within each subnet. These need to be divided intelligently
among the layers to maximize efficiency (if each smaller
subnet allocates a different set of multicast addresses,
then that encroaches on the set available for enclosing
subnets). Also, multicasting outside of subnets may be
problematic as well, since the space is chopped up and
largely allocated. IPv6 might offer some help.")
(uses-ssl)
(uses-encryption)))
(defmethod print-object ((obj network-kell) stream)
"Adds a little indicator to the kell to mark it as a network kell."
(format stream "[<@>~a ~a~:[ ~a~;~]]"
(name obj) (state obj)
(eql (continuation obj) null) (continuation obj)))
;;; A multicast message is sent whenever 'match-on' is triggered on a network
;;; kell. Unlike local matches, once the message is sent, the host continues to
;;; process other events while it waits for a response. Network events that have
;;; started processing have a higher priority than local events, as handling
;;; them quickly can result in less work done on the network (since every host
;;; in the multicast group starts working on a match when it receives a
;;; request).
;;;
;;; ======= THIS PARAGRAPH MAY BE A BAD IDEA =======
;;; The hosts that received the multicast (including the sender) respond with a
;;; unicast request containing any messages they have that match individual
;;; messages in the pattern. The recipient sends out another multicast when it
has at least one message for each in the pattern , which is to stop other
;;; hosts from building any more responses.
;;;
;;; The originator then sends out unicast addresses to each host that it is
;;; using messages from, to tell them that they were used in the match. The
;;; recipient then responds saying "great, they're still available (or not)",
;;; and the originator sends out a final multicast(?) confirming that the match
;;; has been completed, or that it has been rolled back, in which case it goes
;;; in the queue to be tried again.
whenever a process is added to a network kell , we share the process across
all hosts inside that it gets added to each of their event queues . So ,
;;; the network kells should basically be in sync.
;;;
When a process in a network kell is matched , its removal notice is sent to
;;; all hosts. Each has to reply that it has been removed before the reaction
;;; actually occurs.
;;;
;;; If there's a timeout, the host that times out is passivated, and a notice is
;;; sent to all hosts to passivate that host.
(defvar *top-kell*)
(defvar *local-kell*)
(defvar *host-definitions* ())
(defvar *local-host*)
(defvar *local-port*)
(defvar *real-kell*)
(defclass host-kell (kell)
((hostname :initarg :hostname :reader hostname)
(port :initarg :port :reader port)
(socket :initform nil))
(:documentation "This is a kell that represents the processes running on a
particular host. It is opaque from the current machine."))
(defmethod print-object ((obj host-kell) stream)
(format stream "[~a <~a:~d>~:[ ~a~;~]]"
(name obj) (hostname obj) (port obj)
(eql (continuation obj) null) (continuation obj)))
(defgeneric socket (kell)
(:method ((kell host-kell))
(or (slot-value kell 'socket)
(setf (slot-value kell 'socket)
(sockets:make-socket :address-family :internet
:type :stream
:connect :active
:remote-host (hostname kell)
:remote-port (port kell)
:keepalive t)))))
(defmethod add-process ((process kell) (kell network-kell) &optional watchp)
"When adding a kell to a network kell, it can be either the kell representing
the current host, a kell representing a different host, or another level of
network kell. This determines which one it should be and adds an appropriate
subkell."
(declare (ignore watchp))
(let ((new-kell (cond ((string-equal (name process) *local-kell*)
(ccl::def-standard-initial-binding *real-kell* process)
(setf *real-kell* process))
((assoc (list (name process)) *host-definitions*
:test #'equal)
(destructuring-bind ((name) host port)
(assoc (list (name process))
*host-definitions*
:test #'equal)
(make-instance 'host-kell
:name name :hostname host :port port)))
(t (change-class process 'network-kell)
process))))
(setf (state kell) (compose new-kell (state kell)))
(activate-process new-kell kell)))
(defmethod activate-process (process (kell host-kell))
(declare (ignore process))
(error "No processes can exist inside a host kell."))
FIXME : need to define a LOCAL - KELL , so that a network kell is distinct
( defmethod activate - process ( ( process network - kell ) ( ) )
( error " A network kell can not exist inside a local kell . " ) )
( defmethod activate - process ( ( process host - kell ) ( ) )
( error " A host can not exist inside a local kell . " ) )
(defmethod add-process (process (kell host-kell) &optional watchp)
(declare (ignore process watchp))
(error "No processes can exist inside a host kell."))
( defmethod add - process ( ( process network - kell ) ( ) )
( error " A network kell can not exist inside a local kell . " ) )
( defmethod add - process ( ( process host - kell ) ( ) )
( error " A host can not exist inside a local kell . " ) )
(defun defhost (kell-path hostname port)
"A defhost also implies that all containing kells are network kells."
(if (string-equal kell-path *local-kell*)
(listen-for-processes port)
FIXME : surrounding - kell - path should be the path from the outermost
to the the file is loaded in
(let ((surrounding-kell-path))
(push (list (append surrounding-kell-path
(if (consp kell-path) kell-path (list kell-path)))
hostname
port)
*host-definitions*)))
null)
;;; Adding-processes to network kells
(defmethod add-process ((process process) (kell network-kell) &optional watchp)
(declare (ignore watchp))
(mapc (lambda (sk)
FIXME : should probably allow here
(when (and (not (typep process 'kell))
(not (typep process 'trigger))
(typep sk 'host-kell))
(send-process process sk)))
(subkells kell))
(call-next-method))
(defun broadcast-event (item)
;;; FIXME: implement
(declare (ignore item))
(values))
(defun receive-broadcast-event (item)
FIXME : I think we need to do something to convert the kellpath to a
(push-event item))
(defmethod match-on ((process message) (kell network-kell))
"Find all triggers that could match – up, down, or local."
(select-matching-pattern (find-triggers-matching-message (name process) kell)
process))
(defmethod match-on ((process kell) (kell network-kell))
"Find all triggers that could match."
(select-matching-pattern (gethash (name process) (kell-patterns kell))
process))
(defmethod match-on ((process trigger) (kell network-kell))
"Just match on the new trigger."
(select-matching-pattern (list process) process))
IOLib stuff
(defun listen-for-processes (port)
(make-thread (lambda ()
(let ((socket (sockets:make-socket :address-family :internet
:type :stream
:connect :passive)))
(sockets:bind-address socket sockets:+ipv4-unspecified+
:port port :reuse-addr t)
(sockets:listen-on socket)
(loop for client = (sockets:accept-connection socket :wait t)
do (loop do
(handler-case (let ((process (eval (read client))))
(add-process process
(the kell (parent *real-kell*))))
(end-of-file () (return))
(kiln-error (c) (handle-error c))))
(close client))
(close socket)
(finish-output)))
:name "network-kell"))
(defun send-process (process dest-kell)
(sockets:send-to (socket dest-kell)
(map '(SIMPLE-ARRAY (UNSIGNED-BYTE 8) (*))
#'char-code
(format nil "~s" process))))
| null | https://raw.githubusercontent.com/sellout/Kilns/467ba599f457812daea41a7c56f74a1ec1cdc9b2/src/network.lisp | lisp | Came up with the default port number by mapping the letters i–s to 0–10, then
spelling “kilns” with those numbers:
(k -> 2), (i -> 0), (l -> 3), (n -> 5), (s -> 10)
{keep-alive}
FIXME: we should try another port
This file implements the behavior of distributed kells. They need to be
modeled on each instance, and behavior that crosses boundaries needs to be
initiated by the host that most causes the reaction – depending on the level
of the reaction, it might have to update the state for multiple hosts.
A multicast message is sent whenever 'match-on' is triggered on a network
kell. Unlike local matches, once the message is sent, the host continues to
process other events while it waits for a response. Network events that have
started processing have a higher priority than local events, as handling
them quickly can result in less work done on the network (since every host
in the multicast group starts working on a match when it receives a
request).
======= THIS PARAGRAPH MAY BE A BAD IDEA =======
The hosts that received the multicast (including the sender) respond with a
unicast request containing any messages they have that match individual
messages in the pattern. The recipient sends out another multicast when it
hosts from building any more responses.
The originator then sends out unicast addresses to each host that it is
using messages from, to tell them that they were used in the match. The
recipient then responds saying "great, they're still available (or not)",
and the originator sends out a final multicast(?) confirming that the match
has been completed, or that it has been rolled back, in which case it goes
in the queue to be tried again.
the network kells should basically be in sync.
all hosts. Each has to reply that it has been removed before the reaction
actually occurs.
If there's a timeout, the host that times out is passivated, and a notice is
sent to all hosts to passivate that host.
Adding-processes to network kells
FIXME: implement | (in-package :kilns)
(defvar *base-port* 20360)
Protocol format – use kilns reader
{ handshake ( par { path { from { top } } }
{ version " 0.0.1 " } ) }
(defun handle-request (client)
(unwind-protect
(multiple-value-bind (who remote-port) (sockets:remote-name client)
(handler-case
(let* ((*readtable* *kilns-readtable*)
(*package* (find-package :kilns-user))
(request (eval (read client))))
(case (name request)
(kilns-user::keep-alive
(write (eval (read-from-string "{keep-alive}"))
:stream client))
(kilns-user::handshake
(let* ((subst (match-local (eval (read-from-string "{handshake (par {path ?path} {version ?version})}"))
request))
(version (find-variable-value (intern "?version")
subst nil)))
(if (equal version "0.0.1")
(write (eval (read-from-string "{handshake (par {path {path {to {local {kell}}}}} {version \"0.0.1\"})}"))
:stream client)
(write (eval (read-from-string "{handshake {error \"incompatible protocol\"}}"))
:stream client)))))
(finish-output client))
(sockets:socket-connection-reset-error ()
(format t "Client reset connection!~%"))
(iolib.streams:hangup () (format t "Client closed conection!~%"))))
(close client)))
(defun dispatch-request (socket)
(let ((client (sockets:accept-connection socket :wait t)))
(when client
(make-thread (lambda () (handle-request client))
:name "request-handler"))))
(defun start-kilns-listener (&optional port)
(make-thread (lambda ()
(handler-case
(sockets:with-open-socket
(socket :connect :passive
:address-family :internet
:type :stream
:external-format '(:utf-8 :eol-style :lf)
:ipv6 nil
:local-host sockets:+ipv4-unspecified+
:local-port (or port *base-port*)
:reuse-address t)
(sockets:listen-on socket :backlog 5)
(loop while socket
do (dispatch-request socket)))
(sockets:socket-address-in-use-error ()
(format t "Bind: Address already in use, forgot :reuse-addr t?"))))
:name "kilns network listener"))
(defclass network-kell (kell)
((multicast-address
:documentation "Each network kell has a multicast group address. Every host
in that kell needs to both be a member of that group _and_ a
sender to that group, all the way up to the top kell. The
problem is that there are 256 multicast addresses to be used
within each subnet. These need to be divided intelligently
among the layers to maximize efficiency (if each smaller
subnet allocates a different set of multicast addresses,
then that encroaches on the set available for enclosing
subnets). Also, multicasting outside of subnets may be
problematic as well, since the space is chopped up and
largely allocated. IPv6 might offer some help.")
(uses-ssl)
(uses-encryption)))
(defmethod print-object ((obj network-kell) stream)
"Adds a little indicator to the kell to mark it as a network kell."
(format stream "[<@>~a ~a~:[ ~a~;~]]"
(name obj) (state obj)
(eql (continuation obj) null) (continuation obj)))
has at least one message for each in the pattern , which is to stop other
whenever a process is added to a network kell , we share the process across
all hosts inside that it gets added to each of their event queues . So ,
When a process in a network kell is matched , its removal notice is sent to
(defvar *top-kell*)
(defvar *local-kell*)
(defvar *host-definitions* ())
(defvar *local-host*)
(defvar *local-port*)
(defvar *real-kell*)
(defclass host-kell (kell)
((hostname :initarg :hostname :reader hostname)
(port :initarg :port :reader port)
(socket :initform nil))
(:documentation "This is a kell that represents the processes running on a
particular host. It is opaque from the current machine."))
(defmethod print-object ((obj host-kell) stream)
(format stream "[~a <~a:~d>~:[ ~a~;~]]"
(name obj) (hostname obj) (port obj)
(eql (continuation obj) null) (continuation obj)))
(defgeneric socket (kell)
(:method ((kell host-kell))
(or (slot-value kell 'socket)
(setf (slot-value kell 'socket)
(sockets:make-socket :address-family :internet
:type :stream
:connect :active
:remote-host (hostname kell)
:remote-port (port kell)
:keepalive t)))))
(defmethod add-process ((process kell) (kell network-kell) &optional watchp)
"When adding a kell to a network kell, it can be either the kell representing
the current host, a kell representing a different host, or another level of
network kell. This determines which one it should be and adds an appropriate
subkell."
(declare (ignore watchp))
(let ((new-kell (cond ((string-equal (name process) *local-kell*)
(ccl::def-standard-initial-binding *real-kell* process)
(setf *real-kell* process))
((assoc (list (name process)) *host-definitions*
:test #'equal)
(destructuring-bind ((name) host port)
(assoc (list (name process))
*host-definitions*
:test #'equal)
(make-instance 'host-kell
:name name :hostname host :port port)))
(t (change-class process 'network-kell)
process))))
(setf (state kell) (compose new-kell (state kell)))
(activate-process new-kell kell)))
(defmethod activate-process (process (kell host-kell))
(declare (ignore process))
(error "No processes can exist inside a host kell."))
FIXME : need to define a LOCAL - KELL , so that a network kell is distinct
( defmethod activate - process ( ( process network - kell ) ( ) )
( error " A network kell can not exist inside a local kell . " ) )
( defmethod activate - process ( ( process host - kell ) ( ) )
( error " A host can not exist inside a local kell . " ) )
(defmethod add-process (process (kell host-kell) &optional watchp)
(declare (ignore process watchp))
(error "No processes can exist inside a host kell."))
( defmethod add - process ( ( process network - kell ) ( ) )
( error " A network kell can not exist inside a local kell . " ) )
( defmethod add - process ( ( process host - kell ) ( ) )
( error " A host can not exist inside a local kell . " ) )
(defun defhost (kell-path hostname port)
"A defhost also implies that all containing kells are network kells."
(if (string-equal kell-path *local-kell*)
(listen-for-processes port)
FIXME : surrounding - kell - path should be the path from the outermost
to the the file is loaded in
(let ((surrounding-kell-path))
(push (list (append surrounding-kell-path
(if (consp kell-path) kell-path (list kell-path)))
hostname
port)
*host-definitions*)))
null)
(defmethod add-process ((process process) (kell network-kell) &optional watchp)
(declare (ignore watchp))
(mapc (lambda (sk)
FIXME : should probably allow here
(when (and (not (typep process 'kell))
(not (typep process 'trigger))
(typep sk 'host-kell))
(send-process process sk)))
(subkells kell))
(call-next-method))
(defun broadcast-event (item)
(declare (ignore item))
(values))
(defun receive-broadcast-event (item)
FIXME : I think we need to do something to convert the kellpath to a
(push-event item))
(defmethod match-on ((process message) (kell network-kell))
"Find all triggers that could match – up, down, or local."
(select-matching-pattern (find-triggers-matching-message (name process) kell)
process))
(defmethod match-on ((process kell) (kell network-kell))
"Find all triggers that could match."
(select-matching-pattern (gethash (name process) (kell-patterns kell))
process))
(defmethod match-on ((process trigger) (kell network-kell))
"Just match on the new trigger."
(select-matching-pattern (list process) process))
IOLib stuff
(defun listen-for-processes (port)
(make-thread (lambda ()
(let ((socket (sockets:make-socket :address-family :internet
:type :stream
:connect :passive)))
(sockets:bind-address socket sockets:+ipv4-unspecified+
:port port :reuse-addr t)
(sockets:listen-on socket)
(loop for client = (sockets:accept-connection socket :wait t)
do (loop do
(handler-case (let ((process (eval (read client))))
(add-process process
(the kell (parent *real-kell*))))
(end-of-file () (return))
(kiln-error (c) (handle-error c))))
(close client))
(close socket)
(finish-output)))
:name "network-kell"))
(defun send-process (process dest-kell)
(sockets:send-to (socket dest-kell)
(map '(SIMPLE-ARRAY (UNSIGNED-BYTE 8) (*))
#'char-code
(format nil "~s" process))))
|
10c05b558acc5656a5db600708183bae8d0058c2a7f575b160d874b99d78c11b | datastax/cass-config-builder | data_test.clj | ;; Copyright DataStax, Inc.
;; Please see the included license file for details.
(ns lcm.utils.data-test
(:refer-clojure :exclude [uuid?])
(:require [lcm.utils.data :refer :all]
[slingshot.test :refer :all]
[clojure.test :refer :all]))
(deftest test-truncate-string
(is (= "hell..." (truncate-string "hello" 4)))
(is (= "hello" (truncate-string "hello" 9)))
(is (= "h..." (truncate-string "hello" 1)))
(is (nil? (truncate-string nil 10))))
(deftest test-uuid?
(is (uuid? "352f86d8-9019-46e9-8a4a-37d6cce8de65"))
(is (not (uuid? "I'm not a uuid")))
(is (not (uuid? {:me "neither"})))
(is (not (uuid? nil))))
(deftest test-insert-into-vector
(is (= [:a :b :c] (insert-into-vector [:a :c] 1 :b)))
(is (= [:a :b :c] (insert-into-vector [:b :c] 0 :a)))
(is (= [:a :b :c] (insert-into-vector [:a :b] 2 :c)))
(is (thrown+? java.lang.IndexOutOfBoundsException
(insert-into-vector [:a :b] 42 :c))))
(deftest test-remove-from-vector
(is (= [:a :b] (remove-from-vector [:a :b :c] 2)))
(is (= [:a :c] (remove-from-vector [:a :b :c] 1)))
(is (= [:b :c] (remove-from-vector [:a :b :c] 0)))
(is (thrown+? java.lang.IndexOutOfBoundsException
(remove-from-vector [:a :b :c] 42))))
(deftest test-add-to-vector
(is (= [:a :b :c]
(add-to-vector [:a :b] :c)))
(is (= [:a :b :c]
(add-to-vector [:a :b] :c :at :end)))
(is (= [:a :b :c]
(add-to-vector [:b :c] :a :at :start)))
(is (= [:a :b :c]
(add-to-vector [:a :c] :b :after :a)))
(is (= [:a :b :c]
(add-to-vector [:a :b] :c :after :b))))
(deftest test-find-index
(is (= 0 (find-index #{:a} [:a :b :c])))
(is (= 1 (find-index #(= "foo" (:name %))
[{:name "bar"} {:name "foo"}]))))
(deftest test-deep-merge
(is (= {:a 1
:one {:b 2
:two {:c 3
:three {:d 4
:four 4}}}}
(deep-merge {:one {:two {:three {:four 4}}}}
{:a 1
:one {:b 2
:two {:c 3
:three {:d 4}}}})))
(testing "not a map"
(is (= 5 (deep-merge {:a 1} {:b 2} 5)))))
(deftest test-two-level-merge
(is (= {:a 1
:one {:b 2
replace map at : two
(two-level-merge
{:one {:two {:three {:four 4}}}}
{:a 1
:one {:b 2
:two {:c 3}}}))))
(deftest test-map-by
(let [maps [{:id 1 :name "foo"} {:id 2 :name "bar"}]]
(is (= {1 {:id 1 :name "foo"} 2 {:id 2 :name "bar"}}
(map-by :id maps)))))
(deftest test-asymmetric-interleave
(is (= [] (asymmetric-interleave)))
(is (= [] (asymmetric-interleave [])))
(is (= [1 2] (asymmetric-interleave [1 2])))
(is (= [1 2] (asymmetric-interleave [1 2] [])))
(is (= [1 3 2] (asymmetric-interleave [1 2] [3])))
(is (= [3 1 2] (asymmetric-interleave [3] [1 2]))))
(deftest get-by-val-test
(let [searchme [{:k1 1} {:k2 2} {"string-key" 3}]]
(is (= {:k1 1}
(get-by-val searchme :k1 1))
"Successfully get a map by value")
(is (= {"string-key" 3}
(get-by-val searchme "string-key" 3))
"Able to get get keys that aren't keywords")
(is (nil? (get-by-val searchme :k3 3))
"Return nil if key is missing")
(is (nil? (get-by-val searchme :k1 10))
"Return nil if value doesn't match"))
(is (= {:k1 1, :k2 2}
(get-by-val [{:k1 1, :k2 2}
{:k1 1, :k2 20}]
:k1 1))
"If there are multiple matches, return the first"))
(deftest test-format-map
(is (= "key1=\"val1\" key3=\"val3\" key4=\"\\\"val4\\\"\""
(format-map {:key1 "val1"
:key2 nil
:key3 "val3"
:key4 "\"val4\""}))))
(deftest test-format-seq
(is (= "f1, f2" (format-seq [:f1 :f2]))))
(deftest test-map-values
(is (= {:a 2 :b 3 :c 4}
(map-values inc {:a 1 :b 2 :c 3})))
(is (= {:a 2 :b 3 :c 4 :d 5}
(map-values inc (hash-map :a 1 :b 2 :c 3 :d 4)))))
(deftest test-as-boolean
(is (true? (as-boolean 1)))
(is (false? (as-boolean 0)))
(is (true? (as-boolean true)))
(is (false? (as-boolean false)))
(is (true? (as-boolean "True")))
(is (true? (as-boolean "true")))
(is (true? (as-boolean "1")))
(is (false? (as-boolean "False")))
(is (false? (as-boolean "false")))
(is (false? (as-boolean "0")))
(is (thrown+? IllegalArgumentException
(as-boolean 5)))
(is (thrown+? IllegalArgumentException
(as-boolean "foobie"))))
(deftest test-as-int
(is (= 5 (as-int (int 5))))
(is (= 5 (as-int 5)))
(is (= 1 (as-int true)))
(is (= 0 (as-int false)))
(is (= 5 (as-int "5"))))
(deftest test-sliding-map
(is (= [3 5 7 9] (sliding-map 2 + [1 2 3 4 5])))
(is (= [15] (sliding-map 8 + [1 2 3 4 5])))
(is (= [1 2 3 4 5] (sliding-map 1 + [1 2 3 4 5])))
(is (= [1 2 3 4 5] (sliding-map 0 + [1 2 3 4 5]))))
(deftest test-map-paths
(is (= [[:a :b :c]] (map-paths {:a {:b {:c 1}}})))
(is (= [[:a :b :c] [:a :b :d]] (map-paths {:a {:b {:c 1 :d "foo"}}})))
(is (= [[:a :b :c] [:a :d]] (map-paths {:a {:b {:c 1} :d 4}})))
(is (= [[:a]] (map-paths {:a [1 2 3]})))
(is (= [] (map-paths #(even? %2) {:a {:b {:c 1}}})))
(is (= [[:a :b :c]] (map-paths #(odd? %2) {:a {:b {:c 1}}}))))
(deftest test-all-paths
(is (= [[:a 0] [:a 1] [:a 2]] (all-paths {:a [1 2 3]}))))
(deftest test-patch-vector
(is (= [1 4 5] (patch-vector [nil 2] [nil 4] [1 nil 5])))
(is (= [1 2 3] (patch-vector nil nil [1 2 3])))
(is (= [1 2 3] (patch-vector [] [] [1 2 3])))
(is (= [1 2 3] (patch-vector [1 2 3 4] nil [1 2 3 nil]))))
(deftest test-prune
(is (= {} (prune {:a {}})))
(is (= {} (prune {:a []})))
(is (= [] (prune [{} []])))
(is (= {:a [:b]} (prune {:a [{} :b []]})))
(is (= {:a {:b {:c [1]}}}
(prune {:a {:b {:c [[] 1 {}]}
:d {}
:e []}})))
(is (= {} (prune {:a [{}]}))))
(deftest test-find-index-of-item-by-pred
(let [idx-value-fn (comp keyword str)
data (map idx-value-fn (range 50))
test-fn (fn [data]
(testing "returns the index of the first item for which pred returns thruthy"
(is (= 5 (find-index-of-item-by-pred #{:5 :50 :71}
data)))
(doseq [i (range 50)]
(is (= i (find-index-of-item-by-pred #(= % (idx-value-fn i))
data)))))
(testing "returns nil if pred does not return truthy for any item in the collection"
(is (= nil (find-index-of-item-by-pred #{:does-not-exist}
data)))))]
(testing "Given a LazySeq"
(test-fn data))
(testing "Given a vector"
(test-fn (vec data)))
(testing "Given a list"
(test-fn (apply list data)))))
(deftest test-extract-item-from-vector-by-pred
(let [idx-value-fn (comp keyword str)
data (into [] (map idx-value-fn (range 10)))]
(testing "Returns the first item matching pred plus vector without item"
(is (= [:3 [:0 :1 :2 :4 :5 :6 :7 :8 :9]]
(extract-item-from-vector-by-pred #{:4 :3 :5 :10} data)))
(doseq [i data]
(let [[match remaining] (extract-item-from-vector-by-pred #{i} data)]
(is (= i match))
(is (vector? remaining))
(is (= (remove #{i} data)
remaining)))))
(testing "Returns nil for match and given vector if pred does not match any element"
(is (= [nil data]
(extract-item-from-vector-by-pred #{:does-not-exist} data))))))
(deftest test-select-values
(is (= [1 2] (select-values {:a 1 :b 2} [:a :b]))))
(deftest test-sanitize-filename
(is (= "foo_bar_bazz" (sanitize-filename "foo%bar-bazz"))))
(deftest test-sec->ms
(is (= 1000 (sec->ms 1))))
(comment
(run-tests))
| null | https://raw.githubusercontent.com/datastax/cass-config-builder/88623cca0c09b510a6e363c4c7579ffa7d773344/test/lcm/utils/data_test.clj | clojure | Copyright DataStax, Inc.
Please see the included license file for details. |
(ns lcm.utils.data-test
(:refer-clojure :exclude [uuid?])
(:require [lcm.utils.data :refer :all]
[slingshot.test :refer :all]
[clojure.test :refer :all]))
(deftest test-truncate-string
(is (= "hell..." (truncate-string "hello" 4)))
(is (= "hello" (truncate-string "hello" 9)))
(is (= "h..." (truncate-string "hello" 1)))
(is (nil? (truncate-string nil 10))))
(deftest test-uuid?
(is (uuid? "352f86d8-9019-46e9-8a4a-37d6cce8de65"))
(is (not (uuid? "I'm not a uuid")))
(is (not (uuid? {:me "neither"})))
(is (not (uuid? nil))))
(deftest test-insert-into-vector
(is (= [:a :b :c] (insert-into-vector [:a :c] 1 :b)))
(is (= [:a :b :c] (insert-into-vector [:b :c] 0 :a)))
(is (= [:a :b :c] (insert-into-vector [:a :b] 2 :c)))
(is (thrown+? java.lang.IndexOutOfBoundsException
(insert-into-vector [:a :b] 42 :c))))
(deftest test-remove-from-vector
(is (= [:a :b] (remove-from-vector [:a :b :c] 2)))
(is (= [:a :c] (remove-from-vector [:a :b :c] 1)))
(is (= [:b :c] (remove-from-vector [:a :b :c] 0)))
(is (thrown+? java.lang.IndexOutOfBoundsException
(remove-from-vector [:a :b :c] 42))))
(deftest test-add-to-vector
(is (= [:a :b :c]
(add-to-vector [:a :b] :c)))
(is (= [:a :b :c]
(add-to-vector [:a :b] :c :at :end)))
(is (= [:a :b :c]
(add-to-vector [:b :c] :a :at :start)))
(is (= [:a :b :c]
(add-to-vector [:a :c] :b :after :a)))
(is (= [:a :b :c]
(add-to-vector [:a :b] :c :after :b))))
(deftest test-find-index
(is (= 0 (find-index #{:a} [:a :b :c])))
(is (= 1 (find-index #(= "foo" (:name %))
[{:name "bar"} {:name "foo"}]))))
(deftest test-deep-merge
(is (= {:a 1
:one {:b 2
:two {:c 3
:three {:d 4
:four 4}}}}
(deep-merge {:one {:two {:three {:four 4}}}}
{:a 1
:one {:b 2
:two {:c 3
:three {:d 4}}}})))
(testing "not a map"
(is (= 5 (deep-merge {:a 1} {:b 2} 5)))))
(deftest test-two-level-merge
(is (= {:a 1
:one {:b 2
replace map at : two
(two-level-merge
{:one {:two {:three {:four 4}}}}
{:a 1
:one {:b 2
:two {:c 3}}}))))
(deftest test-map-by
(let [maps [{:id 1 :name "foo"} {:id 2 :name "bar"}]]
(is (= {1 {:id 1 :name "foo"} 2 {:id 2 :name "bar"}}
(map-by :id maps)))))
(deftest test-asymmetric-interleave
(is (= [] (asymmetric-interleave)))
(is (= [] (asymmetric-interleave [])))
(is (= [1 2] (asymmetric-interleave [1 2])))
(is (= [1 2] (asymmetric-interleave [1 2] [])))
(is (= [1 3 2] (asymmetric-interleave [1 2] [3])))
(is (= [3 1 2] (asymmetric-interleave [3] [1 2]))))
(deftest get-by-val-test
(let [searchme [{:k1 1} {:k2 2} {"string-key" 3}]]
(is (= {:k1 1}
(get-by-val searchme :k1 1))
"Successfully get a map by value")
(is (= {"string-key" 3}
(get-by-val searchme "string-key" 3))
"Able to get get keys that aren't keywords")
(is (nil? (get-by-val searchme :k3 3))
"Return nil if key is missing")
(is (nil? (get-by-val searchme :k1 10))
"Return nil if value doesn't match"))
(is (= {:k1 1, :k2 2}
(get-by-val [{:k1 1, :k2 2}
{:k1 1, :k2 20}]
:k1 1))
"If there are multiple matches, return the first"))
(deftest test-format-map
(is (= "key1=\"val1\" key3=\"val3\" key4=\"\\\"val4\\\"\""
(format-map {:key1 "val1"
:key2 nil
:key3 "val3"
:key4 "\"val4\""}))))
(deftest test-format-seq
(is (= "f1, f2" (format-seq [:f1 :f2]))))
(deftest test-map-values
(is (= {:a 2 :b 3 :c 4}
(map-values inc {:a 1 :b 2 :c 3})))
(is (= {:a 2 :b 3 :c 4 :d 5}
(map-values inc (hash-map :a 1 :b 2 :c 3 :d 4)))))
(deftest test-as-boolean
(is (true? (as-boolean 1)))
(is (false? (as-boolean 0)))
(is (true? (as-boolean true)))
(is (false? (as-boolean false)))
(is (true? (as-boolean "True")))
(is (true? (as-boolean "true")))
(is (true? (as-boolean "1")))
(is (false? (as-boolean "False")))
(is (false? (as-boolean "false")))
(is (false? (as-boolean "0")))
(is (thrown+? IllegalArgumentException
(as-boolean 5)))
(is (thrown+? IllegalArgumentException
(as-boolean "foobie"))))
(deftest test-as-int
(is (= 5 (as-int (int 5))))
(is (= 5 (as-int 5)))
(is (= 1 (as-int true)))
(is (= 0 (as-int false)))
(is (= 5 (as-int "5"))))
(deftest test-sliding-map
(is (= [3 5 7 9] (sliding-map 2 + [1 2 3 4 5])))
(is (= [15] (sliding-map 8 + [1 2 3 4 5])))
(is (= [1 2 3 4 5] (sliding-map 1 + [1 2 3 4 5])))
(is (= [1 2 3 4 5] (sliding-map 0 + [1 2 3 4 5]))))
(deftest test-map-paths
(is (= [[:a :b :c]] (map-paths {:a {:b {:c 1}}})))
(is (= [[:a :b :c] [:a :b :d]] (map-paths {:a {:b {:c 1 :d "foo"}}})))
(is (= [[:a :b :c] [:a :d]] (map-paths {:a {:b {:c 1} :d 4}})))
(is (= [[:a]] (map-paths {:a [1 2 3]})))
(is (= [] (map-paths #(even? %2) {:a {:b {:c 1}}})))
(is (= [[:a :b :c]] (map-paths #(odd? %2) {:a {:b {:c 1}}}))))
(deftest test-all-paths
(is (= [[:a 0] [:a 1] [:a 2]] (all-paths {:a [1 2 3]}))))
(deftest test-patch-vector
(is (= [1 4 5] (patch-vector [nil 2] [nil 4] [1 nil 5])))
(is (= [1 2 3] (patch-vector nil nil [1 2 3])))
(is (= [1 2 3] (patch-vector [] [] [1 2 3])))
(is (= [1 2 3] (patch-vector [1 2 3 4] nil [1 2 3 nil]))))
(deftest test-prune
(is (= {} (prune {:a {}})))
(is (= {} (prune {:a []})))
(is (= [] (prune [{} []])))
(is (= {:a [:b]} (prune {:a [{} :b []]})))
(is (= {:a {:b {:c [1]}}}
(prune {:a {:b {:c [[] 1 {}]}
:d {}
:e []}})))
(is (= {} (prune {:a [{}]}))))
(deftest test-find-index-of-item-by-pred
(let [idx-value-fn (comp keyword str)
data (map idx-value-fn (range 50))
test-fn (fn [data]
(testing "returns the index of the first item for which pred returns thruthy"
(is (= 5 (find-index-of-item-by-pred #{:5 :50 :71}
data)))
(doseq [i (range 50)]
(is (= i (find-index-of-item-by-pred #(= % (idx-value-fn i))
data)))))
(testing "returns nil if pred does not return truthy for any item in the collection"
(is (= nil (find-index-of-item-by-pred #{:does-not-exist}
data)))))]
(testing "Given a LazySeq"
(test-fn data))
(testing "Given a vector"
(test-fn (vec data)))
(testing "Given a list"
(test-fn (apply list data)))))
(deftest test-extract-item-from-vector-by-pred
(let [idx-value-fn (comp keyword str)
data (into [] (map idx-value-fn (range 10)))]
(testing "Returns the first item matching pred plus vector without item"
(is (= [:3 [:0 :1 :2 :4 :5 :6 :7 :8 :9]]
(extract-item-from-vector-by-pred #{:4 :3 :5 :10} data)))
(doseq [i data]
(let [[match remaining] (extract-item-from-vector-by-pred #{i} data)]
(is (= i match))
(is (vector? remaining))
(is (= (remove #{i} data)
remaining)))))
(testing "Returns nil for match and given vector if pred does not match any element"
(is (= [nil data]
(extract-item-from-vector-by-pred #{:does-not-exist} data))))))
(deftest test-select-values
(is (= [1 2] (select-values {:a 1 :b 2} [:a :b]))))
(deftest test-sanitize-filename
(is (= "foo_bar_bazz" (sanitize-filename "foo%bar-bazz"))))
(deftest test-sec->ms
(is (= 1000 (sec->ms 1))))
(comment
(run-tests))
|
be4d45412bf4c1c34f43dc521f3bf9f63160c8c36fe8d7f5a6a7d8f3fa86e4db | haskell/stm | TArray.hs | # LANGUAGE CPP , DeriveDataTypeable , FlexibleInstances , MultiParamTypeClasses #
#if __GLASGOW_HASKELL__ >= 701
# LANGUAGE Trustworthy #
#endif
-----------------------------------------------------------------------------
-- |
-- Module : Control.Concurrent.STM.TArray
Copyright : ( c ) The University of Glasgow 2005
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : experimental
Portability : non - portable ( requires STM )
--
TArrays : transactional arrays , for use in the STM monad
--
-----------------------------------------------------------------------------
module Control.Concurrent.STM.TArray (
TArray
) where
import Data.Array (Array, bounds)
import Data.Array.Base (listArray, arrEleBottom, unsafeAt, MArray(..),
IArray(numElements))
import Data.Ix (rangeSize)
import Data.Typeable (Typeable)
import Control.Concurrent.STM.TVar (TVar, newTVar, readTVar, writeTVar)
#ifdef __GLASGOW_HASKELL__
import GHC.Conc (STM)
#else
import Control.Sequential.STM (STM)
#endif
|TArray is a transactional array , supporting the usual ' MArray '
-- interface for mutable arrays.
--
It is currently implemented as @Array ix ( TVar e)@ ,
-- but it may be replaced by a more efficient implementation in the future
-- (the interface will remain the same, however).
--
newtype TArray i e = TArray (Array i (TVar e)) deriving (Eq, Typeable)
instance MArray TArray e STM where
getBounds (TArray a) = return (bounds a)
newArray b e = do
a <- rep (rangeSize b) (newTVar e)
return $ TArray (listArray b a)
newArray_ b = do
a <- rep (rangeSize b) (newTVar arrEleBottom)
return $ TArray (listArray b a)
unsafeRead (TArray a) i = readTVar $ unsafeAt a i
unsafeWrite (TArray a) i e = writeTVar (unsafeAt a i) e
getNumElements (TArray a) = return (numElements a)
-- | Like 'replicateM' but uses an accumulator to prevent stack overflows.
-- Unlike 'replicateM' the returned list is in reversed order.
-- This doesn't matter though since this function is only used to create
-- arrays with identical elements.
rep :: Monad m => Int -> m a -> m [a]
rep n m = go n []
where
go 0 xs = return xs
go i xs = do
x <- m
go (i-1) (x:xs)
| null | https://raw.githubusercontent.com/haskell/stm/319e380ab2ddfb99ab499b6783e22f2b4faaee38/Control/Concurrent/STM/TArray.hs | haskell | ---------------------------------------------------------------------------
|
Module : Control.Concurrent.STM.TArray
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : experimental
---------------------------------------------------------------------------
interface for mutable arrays.
but it may be replaced by a more efficient implementation in the future
(the interface will remain the same, however).
| Like 'replicateM' but uses an accumulator to prevent stack overflows.
Unlike 'replicateM' the returned list is in reversed order.
This doesn't matter though since this function is only used to create
arrays with identical elements. | # LANGUAGE CPP , DeriveDataTypeable , FlexibleInstances , MultiParamTypeClasses #
#if __GLASGOW_HASKELL__ >= 701
# LANGUAGE Trustworthy #
#endif
Copyright : ( c ) The University of Glasgow 2005
Portability : non - portable ( requires STM )
TArrays : transactional arrays , for use in the STM monad
module Control.Concurrent.STM.TArray (
TArray
) where
import Data.Array (Array, bounds)
import Data.Array.Base (listArray, arrEleBottom, unsafeAt, MArray(..),
IArray(numElements))
import Data.Ix (rangeSize)
import Data.Typeable (Typeable)
import Control.Concurrent.STM.TVar (TVar, newTVar, readTVar, writeTVar)
#ifdef __GLASGOW_HASKELL__
import GHC.Conc (STM)
#else
import Control.Sequential.STM (STM)
#endif
|TArray is a transactional array , supporting the usual ' MArray '
It is currently implemented as @Array ix ( TVar e)@ ,
newtype TArray i e = TArray (Array i (TVar e)) deriving (Eq, Typeable)
instance MArray TArray e STM where
getBounds (TArray a) = return (bounds a)
newArray b e = do
a <- rep (rangeSize b) (newTVar e)
return $ TArray (listArray b a)
newArray_ b = do
a <- rep (rangeSize b) (newTVar arrEleBottom)
return $ TArray (listArray b a)
unsafeRead (TArray a) i = readTVar $ unsafeAt a i
unsafeWrite (TArray a) i e = writeTVar (unsafeAt a i) e
getNumElements (TArray a) = return (numElements a)
rep :: Monad m => Int -> m a -> m [a]
rep n m = go n []
where
go 0 xs = return xs
go i xs = do
x <- m
go (i-1) (x:xs)
|
a29cbc30ed952ee77149a5724a8ee5c594b522cfbc9e67be356b801753308c8d | peterschwarz/clj-gpio | build.clj | (require 'cljs.build.api)
(cljs.build.api/build
(cljs.build.api/inputs "src/main/clojure" "src/dev/clojure")
{:main 'gpio.dev
:output-to "target/out/dev.js"
:output-dir "target/out"
:target :nodejs })
| null | https://raw.githubusercontent.com/peterschwarz/clj-gpio/88fedec2605f6ff6713bfca382a9451538e99fe2/scripts/build.clj | clojure | (require 'cljs.build.api)
(cljs.build.api/build
(cljs.build.api/inputs "src/main/clojure" "src/dev/clojure")
{:main 'gpio.dev
:output-to "target/out/dev.js"
:output-dir "target/out"
:target :nodejs })
| |
6c35d2f7d7eaa682a21439e6bed0cae2eebe1b392b132fdf6f8cb2d08b4a84c7 | pouyakary/Nota | FunctionCall.hs |
module Language.BackEnd.Evaluator.Nodes.FunctionCall ( evalFunctionCall ) where
-- ─── IMPORTS ────────────────────────────────────────────────────────────────────
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Time
import Language.BackEnd.Evaluator.Types
import Language.FrontEnd.AST
import Model
import System.IO.Unsafe
-- ─── EVAL FUNCTION CALL ─────────────────────────────────────────────────────────
evalFunctionCall :: StemEvalSignature
evalFunctionCall ( evalFunc ) ( ASTFunctionCall ( ASTIdentifier name ) args ) model =
case name of
"Out" ->
execOutFunc evalFunc args model
"Sqrt" ->
runSingleArgFunc "Square Bracket" sqrt
"Root" ->
computeNthRoot evalFunc args model
"Log" ->
computeLogarithm evalFunc args model
"Abs" ->
runSingleArgFunc "Absolute" abs
"Floor" ->
runSingleArgFunc "Floor" (\x -> (read (show (floor x)) :: Double))
"Ceil" ->
runSingleArgFunc "Ceiling" (\x -> (read (show (ceiling x)) :: Double))
"Sin" ->
runSingleArgFunc "Sine" sin
"Cos" ->
runSingleArgFunc "Cosine" cos
"Tan" ->
runSingleArgFunc "Tangent" tan
"Cot" ->
runSingleArgFunc "Cotangent" (\x -> ((cos x) / (sin x)))
"Sec" ->
runSingleArgFunc "Secant" (\x -> (1 / (cos x)))
"Csc" ->
runSingleArgFunc "Cosecant" (\x -> (1 / (sin x)))
"Asin" ->
runSingleArgFunc "Area Sine" asin
"Acos" ->
runSingleArgFunc "Area Cosine" acos
"Atan" ->
runSingleArgFunc "Area Tangent" atan
"Sinh" ->
runSingleArgFunc "Hyperbolic Sine" sinh
"Cosh" ->
runSingleArgFunc "Hyperbolic Cosine" cosh
"Sech" ->
runSingleArgFunc "Hyperbolic Secant" (\x -> (1 / (sinh x)))
"Csch" ->
runSingleArgFunc "Hyperbolic Cosecant" (\x -> (1 / (cosh x)))
"Tanh" ->
runSingleArgFunc "Hyperbolic Tangent" tanh
"Asinh" ->
runSingleArgFunc "Hyperbolic Area Sine" asinh
"Acosh" ->
runSingleArgFunc "Hyperbolic Area Cosine" acosh
"Atanh" ->
runSingleArgFunc "Hyperbolic Area Tangent" atanh
"Max" ->
runArrayArgFunc "Maximum" maximum
"Min" ->
runArrayArgFunc "Minimum" minimum
"Sum" ->
runArrayArgFunc "Sum" sum
"Exp" ->
runSingleArgFunc "Exponent" exp
"Sgn" ->
runSingleArgFunc "Sign" sgnFunc
_ ->
Left $ "Function \"" ++ name ++ "\" does not exist."
where
runSingleArgFunc =
runSingleArgumentedFunction model evalFunc args
runArrayArgFunc =
runFunctionOnArray evalFunc args model
-- ─── SIGN ───────────────────────────────────────────────────────────────────────
sgnFunc :: Double -> Double
sgnFunc x =
if x == 0 then 0 else ( if x > 0 then 1 else -1 )
─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─
computeLogarithm (evalFunc) arguments model =
case length arguments of
2 ->
case evalFunc (arguments !! 0) model of
Left xError ->
Left xError
Right baseResult ->
case evalFunc (arguments !! 1) model of
Left baseError ->
Left baseError
Right xResult ->
Right $ logBase baseResult xResult
1 ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
Right $ log result
_ ->
Left $ functionGetsThisMuchArguments "Logarithm" "one or two"
-- ─── ROOT ───────────────────────────────────────────────────────────────────────
computeNthRoot (evalFunc) arguments model =
case length arguments of
2 ->
case evalFunc (arguments !! 0) model of
Left xError ->
Left xError
Right baseResult ->
case evalFunc (arguments !! 1) model of
Left baseError ->
Left baseError
Right xResult ->
Right $ xResult ** (1 / baseResult)
_ ->
Left $ functionGetsThisMuchArguments "Nth Root" "two"
-- ─── OUT PUT FUNCTION ───────────────────────────────────────────────────────────
execOutFunc (evalFunc) arguments model =
case length arguments of
1 ->
case evalFunc ( arguments !! 0 ) model of
Left error ->
Left error
Right historyNumber ->
if index > 0 && index <= ( length historyOfResults )
then Right $ historyOfResults !! ( index - 1 )
else Left $ "Output no. " ++ ( show index ) ++ " does not exists."
where
index =
floor historyNumber
_ ->
Left $ functionGetsThisMuchArguments "Out" "one"
where
historyOfResults =
computedHistory model
-- ─── RUN FUNCTION ON ARRAY ──────────────────────────────────────────────────────
runFunctionOnArray (evalFunc) arguments model name (computeFunc) =
case length arguments of
0 ->
Left $ functionGetsThisMuchArguments name "at least one"
1 ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
Right result
_ ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
case runFunctionOnArray evalFunc (tail arguments) model name computeFunc of
Left restError ->
Left restError
Right restResult ->
Right $ computeFunc [ result, restResult ]
-- ─── RUN SINGLE ARGUMENT FUNCTION ───────────────────────────────────────────────
runSingleArgumentedFunction model (evalFunc) arguments name (computeFunc) =
case length arguments of
1 ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
Right $ computeFunc result
_ ->
Left $ functionGetsThisMuchArguments name "one"
-- ─── FUNCTION GETS THIS MUCH ARGUMNETS ──────────────────────────────────────────
functionGetsThisMuchArguments name size =
"Function '" ++ name ++ "' accepts only " ++ size ++ " argument."
-- ──────────────────────────────────────────────────────────────────────────────── | null | https://raw.githubusercontent.com/pouyakary/Nota/d5e29eca7ea34d72835a9708977fa33c030393d1/source/Language/BackEnd/Evaluator/Nodes/FunctionCall.hs | haskell | ─── IMPORTS ────────────────────────────────────────────────────────────────────
─── EVAL FUNCTION CALL ─────────────────────────────────────────────────────────
─── SIGN ───────────────────────────────────────────────────────────────────────
─── ROOT ───────────────────────────────────────────────────────────────────────
─── OUT PUT FUNCTION ───────────────────────────────────────────────────────────
─── RUN FUNCTION ON ARRAY ──────────────────────────────────────────────────────
─── RUN SINGLE ARGUMENT FUNCTION ───────────────────────────────────────────────
─── FUNCTION GETS THIS MUCH ARGUMNETS ──────────────────────────────────────────
──────────────────────────────────────────────────────────────────────────────── |
module Language.BackEnd.Evaluator.Nodes.FunctionCall ( evalFunctionCall ) where
import Data.Map ( Map )
import qualified Data.Map as Map
import Data.Time
import Language.BackEnd.Evaluator.Types
import Language.FrontEnd.AST
import Model
import System.IO.Unsafe
evalFunctionCall :: StemEvalSignature
evalFunctionCall ( evalFunc ) ( ASTFunctionCall ( ASTIdentifier name ) args ) model =
case name of
"Out" ->
execOutFunc evalFunc args model
"Sqrt" ->
runSingleArgFunc "Square Bracket" sqrt
"Root" ->
computeNthRoot evalFunc args model
"Log" ->
computeLogarithm evalFunc args model
"Abs" ->
runSingleArgFunc "Absolute" abs
"Floor" ->
runSingleArgFunc "Floor" (\x -> (read (show (floor x)) :: Double))
"Ceil" ->
runSingleArgFunc "Ceiling" (\x -> (read (show (ceiling x)) :: Double))
"Sin" ->
runSingleArgFunc "Sine" sin
"Cos" ->
runSingleArgFunc "Cosine" cos
"Tan" ->
runSingleArgFunc "Tangent" tan
"Cot" ->
runSingleArgFunc "Cotangent" (\x -> ((cos x) / (sin x)))
"Sec" ->
runSingleArgFunc "Secant" (\x -> (1 / (cos x)))
"Csc" ->
runSingleArgFunc "Cosecant" (\x -> (1 / (sin x)))
"Asin" ->
runSingleArgFunc "Area Sine" asin
"Acos" ->
runSingleArgFunc "Area Cosine" acos
"Atan" ->
runSingleArgFunc "Area Tangent" atan
"Sinh" ->
runSingleArgFunc "Hyperbolic Sine" sinh
"Cosh" ->
runSingleArgFunc "Hyperbolic Cosine" cosh
"Sech" ->
runSingleArgFunc "Hyperbolic Secant" (\x -> (1 / (sinh x)))
"Csch" ->
runSingleArgFunc "Hyperbolic Cosecant" (\x -> (1 / (cosh x)))
"Tanh" ->
runSingleArgFunc "Hyperbolic Tangent" tanh
"Asinh" ->
runSingleArgFunc "Hyperbolic Area Sine" asinh
"Acosh" ->
runSingleArgFunc "Hyperbolic Area Cosine" acosh
"Atanh" ->
runSingleArgFunc "Hyperbolic Area Tangent" atanh
"Max" ->
runArrayArgFunc "Maximum" maximum
"Min" ->
runArrayArgFunc "Minimum" minimum
"Sum" ->
runArrayArgFunc "Sum" sum
"Exp" ->
runSingleArgFunc "Exponent" exp
"Sgn" ->
runSingleArgFunc "Sign" sgnFunc
_ ->
Left $ "Function \"" ++ name ++ "\" does not exist."
where
runSingleArgFunc =
runSingleArgumentedFunction model evalFunc args
runArrayArgFunc =
runFunctionOnArray evalFunc args model
sgnFunc :: Double -> Double
sgnFunc x =
if x == 0 then 0 else ( if x > 0 then 1 else -1 )
─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─ ─
computeLogarithm (evalFunc) arguments model =
case length arguments of
2 ->
case evalFunc (arguments !! 0) model of
Left xError ->
Left xError
Right baseResult ->
case evalFunc (arguments !! 1) model of
Left baseError ->
Left baseError
Right xResult ->
Right $ logBase baseResult xResult
1 ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
Right $ log result
_ ->
Left $ functionGetsThisMuchArguments "Logarithm" "one or two"
computeNthRoot (evalFunc) arguments model =
case length arguments of
2 ->
case evalFunc (arguments !! 0) model of
Left xError ->
Left xError
Right baseResult ->
case evalFunc (arguments !! 1) model of
Left baseError ->
Left baseError
Right xResult ->
Right $ xResult ** (1 / baseResult)
_ ->
Left $ functionGetsThisMuchArguments "Nth Root" "two"
execOutFunc (evalFunc) arguments model =
case length arguments of
1 ->
case evalFunc ( arguments !! 0 ) model of
Left error ->
Left error
Right historyNumber ->
if index > 0 && index <= ( length historyOfResults )
then Right $ historyOfResults !! ( index - 1 )
else Left $ "Output no. " ++ ( show index ) ++ " does not exists."
where
index =
floor historyNumber
_ ->
Left $ functionGetsThisMuchArguments "Out" "one"
where
historyOfResults =
computedHistory model
runFunctionOnArray (evalFunc) arguments model name (computeFunc) =
case length arguments of
0 ->
Left $ functionGetsThisMuchArguments name "at least one"
1 ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
Right result
_ ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
case runFunctionOnArray evalFunc (tail arguments) model name computeFunc of
Left restError ->
Left restError
Right restResult ->
Right $ computeFunc [ result, restResult ]
runSingleArgumentedFunction model (evalFunc) arguments name (computeFunc) =
case length arguments of
1 ->
case evalFunc (arguments !! 0) model of
Left error ->
Left error
Right result ->
Right $ computeFunc result
_ ->
Left $ functionGetsThisMuchArguments name "one"
functionGetsThisMuchArguments name size =
"Function '" ++ name ++ "' accepts only " ++ size ++ " argument."
|
172d3e0a960524fb366203c0c31c23af8c4446e98bf4a72e8ef589a4a3dfdd77 | AlexKnauth/unnwraith | snowman.rkt | #lang unnwraith racket
// Example based on "Programmable Publishing: Digital Humanities for Everyone!"
// -humanities/
//
// tutorial on "Building a snowman with Racket"
// -humanities/Snowman.html
//
require pict
define left-arm:
colorize:
text "Y" #'(bold) 30:
pi * .5
"brown"
define right-arm:
colorize:
text "Y" #'(bold) 30:
pi * 1.5
"brown"
define head:
cc-superimpose:
disk 50 ~color "white"
text ":^D" #'(bold) 20:
pi * 1.5
define body:
hc-append: left-arm
cc-superimpose:
disk 65 ~color "white"
text "* * *" #'(bold) 15:
pi * .5
right-arm
define butt:
disk 80 ~color "white"
define top-hat:
vc-append:
filled-rectangle 30 30:
~color "black"
filled-rectangle 50 10:
~color "black"
define snowman:
vc-append top-hat head body butt
define snow-scene:
vc-append:
cbl-superimpose:
filled-rectangle 400 300:
~color "aqua"
snowman
filled-rectangle 400 100:
~color "white"
| null | https://raw.githubusercontent.com/AlexKnauth/unnwraith/779051a56324a71b5ccf90d583855a47228b84cb/unnwraith/test/snowman.rkt | racket | #lang unnwraith racket
// Example based on "Programmable Publishing: Digital Humanities for Everyone!"
// -humanities/
//
// tutorial on "Building a snowman with Racket"
// -humanities/Snowman.html
//
require pict
define left-arm:
colorize:
text "Y" #'(bold) 30:
pi * .5
"brown"
define right-arm:
colorize:
text "Y" #'(bold) 30:
pi * 1.5
"brown"
define head:
cc-superimpose:
disk 50 ~color "white"
text ":^D" #'(bold) 20:
pi * 1.5
define body:
hc-append: left-arm
cc-superimpose:
disk 65 ~color "white"
text "* * *" #'(bold) 15:
pi * .5
right-arm
define butt:
disk 80 ~color "white"
define top-hat:
vc-append:
filled-rectangle 30 30:
~color "black"
filled-rectangle 50 10:
~color "black"
define snowman:
vc-append top-hat head body butt
define snow-scene:
vc-append:
cbl-superimpose:
filled-rectangle 400 300:
~color "aqua"
snowman
filled-rectangle 400 100:
~color "white"
| |
1f0c197b2609e37606c73d2bd7835e7ebdfe0e80f84ce10caf54212bea43a995 | skanev/playground | 49.scm | SICP exercise 3.49
;
; Give a scenario where the deadlock-avoidance mechanism described above does
; not work. (Hint: In the exchange problem, each process knows in advance
; which accounts it will neet to get access to. Consider a situation where a
; process must get access to some shared resource before it can know which
; additional shared resources it will require.)
; The question pretty much answers itself. Let's say that we need to acquire
two locks . We need to acquire the first lock in order to determine what
second lock we need to acquire later . Let 's say we need to acquire a in
order to determine that we need to acquire b second . If the reverse case is
; possible (we acquire b and then we determine that we need to acquire a
second ) , there is a possibility of a deadlock .
;
We can solve this problem by having a third lock we acquire before acquiring
the first one .
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/sicp/03/49.scm | scheme |
Give a scenario where the deadlock-avoidance mechanism described above does
not work. (Hint: In the exchange problem, each process knows in advance
which accounts it will neet to get access to. Consider a situation where a
process must get access to some shared resource before it can know which
additional shared resources it will require.)
The question pretty much answers itself. Let's say that we need to acquire
possible (we acquire b and then we determine that we need to acquire a
| SICP exercise 3.49
two locks . We need to acquire the first lock in order to determine what
second lock we need to acquire later . Let 's say we need to acquire a in
order to determine that we need to acquire b second . If the reverse case is
second ) , there is a possibility of a deadlock .
We can solve this problem by having a third lock we acquire before acquiring
the first one .
|
36e2559218c008ca5e63c60631d8aac61b97fa117e392dbf8e3846cbaf607f10 | glebec/haskell-programming-allen-moronuki | Benching.hs | module Benching where
import Criterion.Main
-- -- Report-adapted version (slow)
infixl 9 ! ?
( ! ? ) : : ( t , t ) = > [ a ] - > t - > Maybe a
-- _ !? n | n < 0 = Nothing
-- [] !? _ = Nothing
-- (x:_) !? 0 = Just x
-- (_:xs) !? n = xs !? (n - 1)
-- optimized version
infixl 9 !?
# INLINABLE ( ! ? ) #
(!?) :: [a] -> Int -> Maybe a -- fast
( ! ? ) : : ( t , t ) = > [ a ] - > t - > Maybe a -- slow
xs !? n
| n < 0 = Nothing
| otherwise =
foldr
(\x r k ->
case k of
0 -> Just x
_ -> r (k - 1))
(const Nothing) xs n
myList :: [Int]
myList = [1..9999]
benching :: IO ()
benching = defaultMain
[ bench "index list 9999"
$ whnf (myList !!) 9998
, bench "index list maybe index 9999"
$ whnf (myList !?) 9998
]
| null | https://raw.githubusercontent.com/glebec/haskell-programming-allen-moronuki/99bd232f523e426d18a5e096f1cf771228c55f52/28-basic-libraries/exercises/src/Benching.hs | haskell | -- Report-adapted version (slow)
_ !? n | n < 0 = Nothing
[] !? _ = Nothing
(x:_) !? 0 = Just x
(_:xs) !? n = xs !? (n - 1)
optimized version
fast
slow | module Benching where
import Criterion.Main
infixl 9 ! ?
( ! ? ) : : ( t , t ) = > [ a ] - > t - > Maybe a
infixl 9 !?
# INLINABLE ( ! ? ) #
xs !? n
| n < 0 = Nothing
| otherwise =
foldr
(\x r k ->
case k of
0 -> Just x
_ -> r (k - 1))
(const Nothing) xs n
myList :: [Int]
myList = [1..9999]
benching :: IO ()
benching = defaultMain
[ bench "index list 9999"
$ whnf (myList !!) 9998
, bench "index list maybe index 9999"
$ whnf (myList !?) 9998
]
|
2d8ceab98f21078242a748954654105ddfe49fea23ec5ab5d2d5e2897399d572 | migae/datastore | ekey.clj | (in-ns 'migae.datastore)
;; FIXME: rename: co-ctors-key?
( load " emap " )
(declare keychain? keychain= dogtag)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti kind class)
(defmethod kind Key
[^Key k]
(keyword (.getKind k)))
(defmethod kind Entity
[^Entity e]
(keyword (.getKind e)))
;; (defmethod kind migae.datastore.IPersistentEntityMap
;; [^migae.datastore.PersistentEntityMap e]
( log / trace " IPersistentEntityMap.kind " )
;; (keyword (.getKind (.content e))))
(defmethod kind migae.datastore.PersistentEntityMap
[^migae.datastore.PersistentEntityMap e]
;; (log/trace "PersistentEntityMap.kind")
(keyword (.getKind (.content e))))
(defmethod kind migae.datastore.PersistentEntityHashMap
[^migae.datastore.PersistentEntityHashMap e]
(keyword (namespace (last (.k e)))))
(defmethod kind clojure.lang.Keyword
[^clojure.lang.Keyword kw]
(when-let [k (namespace kw)]
(keyword k)))
;; (clojure.core/name kw)))
(defmethod kind clojure.lang.PersistentVector
[^clojure.lang.PersistentVector k]
FIXME : validate keychain contains only keylinks
(if (keychain? k)
(keyword (namespace (last k)))))
(defmulti identifier class)
(defmethod identifier Key
[^Key k]
;; (log/trace "Key identifier" k)
(let [nm (.getName k)
id (.getId k)]
(if (nil? nm) id (str nm))))
(defmethod identifier migae.datastore.PersistentEntityMap
[^migae.datastore.PersistentEntityMap em]
;; (log/trace "PersistentEntityMap.identifier")
(let [k (.getKey (.content em))
nm (.getName k)
id (.getId k)]
(if (nil? nm) id (str nm))))
(defmethod identifier migae.datastore.PersistentEntityHashMap
[^migae.datastore.PersistentEntityHashMap em]
;; (log/trace "PersistentEntityHashMap.identifier")
(let [fob (dogtag (.k em))
nm (read-string (name fob))]
nm))
(defmethod identifier clojure.lang.PersistentVector
[^clojure.lang.PersistentVector keychain]
FIXME : validate vector contains only keylinks
(let [k (last keychain)]
(if-let [nm (.getName k)]
nm
(.getId k))))
(defn ekey? [^com.google.appengine.api.datastore.Key k]
(= (type k) com.google.appengine.api.datastore.Key))
(defn keylink?
[k]
( log / trace " keylink ? " k ( and ( keyword k )
;; (not (nil? (namespace k)))))
(or (and (keyword k)
(not (nil? (namespace k))))
(= (type k) com.google.appengine.api.datastore.Key)))
;; (defmulti to-keychain class)
;; (defmethod to-keychain Key
;; [k]
;; (keychain k))
;; (defmethod to-keychain migae.datastore.PersistentEntityMap
;; [em]
;; (keychain (.getKey (.content em))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; (defmulti keychain class)
;; (defmethod keychain Key
;; [k]
;; (let [keychain (keychain k)]
;; keychain))
( defmethod keychain migae.datastore . PersistentEntityMap
;; [em]
;; (let [keychain (keychain (.getKey (.content em)))]
;; keychain))
;; (defmethod keychain clojure.lang.PersistentVector
;; [keychain]
;; keychain)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti dogtag class)
(defmethod dogtag Key
[k]
(let [keychain (keychain k)]
(last keychain)))
(defmethod dogtag migae.datastore.PersistentEntityMap
[em]
(let [keychain (keychain (.getKey (.content em)))]
(last keychain)))
(defmethod dogtag clojure.lang.PersistentVector
[keychain]
FIXME : validate vector contains only keylinks
(if (every? keylink? keychain)
(last keychain)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defmulti entity-key class)
(defmethod entity-key Key
[^Key k]
k)
(defmethod entity-key migae.datastore.PersistentEntityMap
[^migae.datastore.PersistentEntityMap e]
(.getKey (.content e)))
(defmethod entity-key com.google.appengine.api.datastore.Entity
[^Entity e]
(.getKey e))
(defmethod entity-key clojure.lang.Keyword
[^clojure.lang.Keyword k]
(keychain-to-key [k]))
(defmethod entity-key clojure.lang.PersistentVector
[kchain]
FIXME : validate vector contains only keylinks
(keychain-to-key kchain))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defn key=
[em1 em2]
(if (emap? em1)
(if (emap? em2)
(.equals (.content em1) (.content em2))
(keychain= em1 em2))
(if (map? em1)
(keychain= em1 em2)
(log/trace "EXCEPTION: key= applies only to maps and emaps"))))
(defn keykind?
[k]
;; (log/trace "keykind?" k (and (keyword k)
;; (not (nil? (namespace k)))))
(and (keyword? k) (nil? (namespace k))))
NB : this dups what 's in the keychain namespace
;; to make it available as ds/keychain?
(defn keychain? [k]
(keychain? k))
( defn keychain
;; [arg]
( log / debug " KEYCHAIN : " arg )
;; (keychain arg))
(defn keychain=
[k1 k2]
(let [kch1 (if (emap? k1)
;; recur with .getParent
(if (map? k1)
(:migae/key (meta k1))))
kch2 (if (emap? k2)
;; recur with .getParent
(if (map? k2)
(:migae/key (meta k2))))]
))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; (let [kind (edn/read-string fst)
;; name (edn/read-string )]
( KeyFactory / createKey ( str kind ) ident ) ) ) )
( [ ^String k ^Long ident ]
;; (let [kind (edn/read-string k)
;; name (edn/read-string ident)]
( KeyFactory / createKey ( str kind ) ident ) ) ) )
;; (if (= (type name) java.lang.Long)
( KeyFactory / createKey ns n ) ) )
( KeyFactory / createKey ns n ) ) ) ) )
( log / trace " ns " ns " n : " n " , first n : " ( first n ) )
| null | https://raw.githubusercontent.com/migae/datastore/61b2fc243cfd95956d531a57c86ea58eb19af7b7/obsolete/ekey.clj | clojure | FIXME: rename: co-ctors-key?
(defmethod kind migae.datastore.IPersistentEntityMap
[^migae.datastore.PersistentEntityMap e]
(keyword (.getKind (.content e))))
(log/trace "PersistentEntityMap.kind")
(clojure.core/name kw)))
(log/trace "Key identifier" k)
(log/trace "PersistentEntityMap.identifier")
(log/trace "PersistentEntityHashMap.identifier")
(not (nil? (namespace k)))))
(defmulti to-keychain class)
(defmethod to-keychain Key
[k]
(keychain k))
(defmethod to-keychain migae.datastore.PersistentEntityMap
[em]
(keychain (.getKey (.content em))))
(defmulti keychain class)
(defmethod keychain Key
[k]
(let [keychain (keychain k)]
keychain))
[em]
(let [keychain (keychain (.getKey (.content em)))]
keychain))
(defmethod keychain clojure.lang.PersistentVector
[keychain]
keychain)
(log/trace "keykind?" k (and (keyword k)
(not (nil? (namespace k)))))
to make it available as ds/keychain?
[arg]
(keychain arg))
recur with .getParent
recur with .getParent
(let [kind (edn/read-string fst)
name (edn/read-string )]
(let [kind (edn/read-string k)
name (edn/read-string ident)]
(if (= (type name) java.lang.Long) | (in-ns 'migae.datastore)
( load " emap " )
(declare keychain? keychain= dogtag)
(defmulti kind class)
(defmethod kind Key
[^Key k]
(keyword (.getKind k)))
(defmethod kind Entity
[^Entity e]
(keyword (.getKind e)))
( log / trace " IPersistentEntityMap.kind " )
(defmethod kind migae.datastore.PersistentEntityMap
[^migae.datastore.PersistentEntityMap e]
(keyword (.getKind (.content e))))
(defmethod kind migae.datastore.PersistentEntityHashMap
[^migae.datastore.PersistentEntityHashMap e]
(keyword (namespace (last (.k e)))))
(defmethod kind clojure.lang.Keyword
[^clojure.lang.Keyword kw]
(when-let [k (namespace kw)]
(keyword k)))
(defmethod kind clojure.lang.PersistentVector
[^clojure.lang.PersistentVector k]
FIXME : validate keychain contains only keylinks
(if (keychain? k)
(keyword (namespace (last k)))))
(defmulti identifier class)
(defmethod identifier Key
[^Key k]
(let [nm (.getName k)
id (.getId k)]
(if (nil? nm) id (str nm))))
(defmethod identifier migae.datastore.PersistentEntityMap
[^migae.datastore.PersistentEntityMap em]
(let [k (.getKey (.content em))
nm (.getName k)
id (.getId k)]
(if (nil? nm) id (str nm))))
(defmethod identifier migae.datastore.PersistentEntityHashMap
[^migae.datastore.PersistentEntityHashMap em]
(let [fob (dogtag (.k em))
nm (read-string (name fob))]
nm))
(defmethod identifier clojure.lang.PersistentVector
[^clojure.lang.PersistentVector keychain]
FIXME : validate vector contains only keylinks
(let [k (last keychain)]
(if-let [nm (.getName k)]
nm
(.getId k))))
(defn ekey? [^com.google.appengine.api.datastore.Key k]
(= (type k) com.google.appengine.api.datastore.Key))
(defn keylink?
[k]
( log / trace " keylink ? " k ( and ( keyword k )
(or (and (keyword k)
(not (nil? (namespace k))))
(= (type k) com.google.appengine.api.datastore.Key)))
( defmethod keychain migae.datastore . PersistentEntityMap
(defmulti dogtag class)
(defmethod dogtag Key
[k]
(let [keychain (keychain k)]
(last keychain)))
(defmethod dogtag migae.datastore.PersistentEntityMap
[em]
(let [keychain (keychain (.getKey (.content em)))]
(last keychain)))
(defmethod dogtag clojure.lang.PersistentVector
[keychain]
FIXME : validate vector contains only keylinks
(if (every? keylink? keychain)
(last keychain)))
(defmulti entity-key class)
(defmethod entity-key Key
[^Key k]
k)
(defmethod entity-key migae.datastore.PersistentEntityMap
[^migae.datastore.PersistentEntityMap e]
(.getKey (.content e)))
(defmethod entity-key com.google.appengine.api.datastore.Entity
[^Entity e]
(.getKey e))
(defmethod entity-key clojure.lang.Keyword
[^clojure.lang.Keyword k]
(keychain-to-key [k]))
(defmethod entity-key clojure.lang.PersistentVector
[kchain]
FIXME : validate vector contains only keylinks
(keychain-to-key kchain))
(defn key=
[em1 em2]
(if (emap? em1)
(if (emap? em2)
(.equals (.content em1) (.content em2))
(keychain= em1 em2))
(if (map? em1)
(keychain= em1 em2)
(log/trace "EXCEPTION: key= applies only to maps and emaps"))))
(defn keykind?
[k]
(and (keyword? k) (nil? (namespace k))))
NB : this dups what 's in the keychain namespace
(defn keychain? [k]
(keychain? k))
( defn keychain
( log / debug " KEYCHAIN : " arg )
(defn keychain=
[k1 k2]
(let [kch1 (if (emap? k1)
(if (map? k1)
(:migae/key (meta k1))))
kch2 (if (emap? k2)
(if (map? k2)
(:migae/key (meta k2))))]
))
( KeyFactory / createKey ( str kind ) ident ) ) ) )
( [ ^String k ^Long ident ]
( KeyFactory / createKey ( str kind ) ident ) ) ) )
( KeyFactory / createKey ns n ) ) )
( KeyFactory / createKey ns n ) ) ) ) )
( log / trace " ns " ns " n : " n " , first n : " ( first n ) )
|
c6fffc5c185a7fc496bc82d3c4a7fdb47c0eaa25c4743bf813f3010f3674de7c | anton-k/processing-for-haskell | Output.hs | module Graphics.Proc.Lib.Output(
module Graphics.Proc.Lib.Output.TextArea
) where
import Graphics.Proc.Lib.Output.TextArea
| null | https://raw.githubusercontent.com/anton-k/processing-for-haskell/7f99414f3c266135b6a2848978fcb572fd7b67b1/src/Graphics/Proc/Lib/Output.hs | haskell | module Graphics.Proc.Lib.Output(
module Graphics.Proc.Lib.Output.TextArea
) where
import Graphics.Proc.Lib.Output.TextArea
| |
3c048357fa05481d48f945c6ca5d50caefa0db6b2834bc6f92dfde7dbff95cc1 | KavrakiLab/tmkit | itmp-linear-test.lisp | (in-package :tmsmt)
(defparameter *base*
(robray::format-pathname "~A/../pddl/itmp/"
(asdf:system-source-directory :tmsmt)))
(defparameter *operators*
(load-operators (robray::format-pathname "~A/itmp-blocks-linear.pddl" *base*)))
(defparameter *facts*
(load-facts (robray::format-pathname "~A/blocks-linear-0.pddl" *base*)))
;(defparameter *ground* (ground-domain *operators* *facts*))
(with-smt (smt)
(let* ((cx (smt-plan-context
:operators *operators*
:facts *facts*
:action-encoding :enum
:smt smt))
(result (smt-plan-next cx :max-steps 3)))
(print result)))
| null | https://raw.githubusercontent.com/KavrakiLab/tmkit/4fb6fc0fa02c5b242dfcbe0013d3c1178e9d9224/demo/old/test/itmp-linear-test.lisp | lisp | (defparameter *ground* (ground-domain *operators* *facts*)) | (in-package :tmsmt)
(defparameter *base*
(robray::format-pathname "~A/../pddl/itmp/"
(asdf:system-source-directory :tmsmt)))
(defparameter *operators*
(load-operators (robray::format-pathname "~A/itmp-blocks-linear.pddl" *base*)))
(defparameter *facts*
(load-facts (robray::format-pathname "~A/blocks-linear-0.pddl" *base*)))
(with-smt (smt)
(let* ((cx (smt-plan-context
:operators *operators*
:facts *facts*
:action-encoding :enum
:smt smt))
(result (smt-plan-next cx :max-steps 3)))
(print result)))
|
5b4f288ee129b691b60180f0dbfbfcb5196410566a3d6630dbb19d9efa764b9f | Flexiana/notion-to-md | http_client.clj | (ns notion-to-md.http-client
(:require
[clj-http.client :as client]
[clojure.data.json :as json]))
(defn body->clj [response]
(->> response
:body
json/read-json))
(defn fetch-image [url]
(:body (client/get url {:as :stream
:async? false})))
(defn- fetch-children-paginated [secret id & opts]
(body->clj
(client/get
(str "/" id "/children"
(when-let [start_cursor (first opts)]
(str "/?start_cursor=" start_cursor)))
{:headers {"Authorization" (str "Bearer " secret)
"Notion-Version" "2021-08-16"}})))
(defn fetch-children
"Returns a coll of blocks"
[secret]
(fn [id]
(loop [response (fetch-children-paginated secret id)
result []]
(if-not (:has_more response)
(conj result response)
(recur (fetch-children-paginated secret id (:next_cursor response))
(conj result response))))))
| null | https://raw.githubusercontent.com/Flexiana/notion-to-md/7dea73f01f093e295c824f9111490998a657e0fd/src/notion_to_md/http_client.clj | clojure | (ns notion-to-md.http-client
(:require
[clj-http.client :as client]
[clojure.data.json :as json]))
(defn body->clj [response]
(->> response
:body
json/read-json))
(defn fetch-image [url]
(:body (client/get url {:as :stream
:async? false})))
(defn- fetch-children-paginated [secret id & opts]
(body->clj
(client/get
(str "/" id "/children"
(when-let [start_cursor (first opts)]
(str "/?start_cursor=" start_cursor)))
{:headers {"Authorization" (str "Bearer " secret)
"Notion-Version" "2021-08-16"}})))
(defn fetch-children
"Returns a coll of blocks"
[secret]
(fn [id]
(loop [response (fetch-children-paginated secret id)
result []]
(if-not (:has_more response)
(conj result response)
(recur (fetch-children-paginated secret id (:next_cursor response))
(conj result response))))))
| |
fbafbdbba72512648ffcd88f3f06d6070adcc7abe87a137c70c7366c6f22af61 | peterholko/pax_server | character.erl | Author :
Created : Dec 26 , 2008
%% Description: TODO: Add description to character
-module(character).
-export([]).
| null | https://raw.githubusercontent.com/peterholko/pax_server/62b2ec1fae195ff915d19af06e56a7c4567fd4b8/src/character.erl | erlang | Description: TODO: Add description to character | Author :
Created : Dec 26 , 2008
-module(character).
-export([]).
|
03ebcbe33702b2244326c9bd9ec4d532e53bb8135ad908b56bafbae543aa4bee | mpickering/apply-refact | Duplicate5.hs | main = do a; a; a; b; a; a | null | https://raw.githubusercontent.com/mpickering/apply-refact/a4343ea0f4f9d8c2e16d6b16b9068f321ba4f272/tests/examples/Duplicate5.hs | haskell | main = do a; a; a; b; a; a | |
913ad3bf79ea09cd921687249209c5393ffdbe4794fae2ee62cb7b85f3568890 | ocaml-multicore/parafuzz | uchar_esc.ml | (* TEST
* toplevel
*)
(* Correct escapes and their encoding *)
let () =
assert ("\xF0\x9F\x90\xAB" = "\u{1F42B}");
assert ("\xF0\x9F\x90\xAB" = "\u{01F42B}");
assert ("\x00" = "\u{0}");
assert ("\x00" = "\u{00}");
assert ("\x00" = "\u{000}");
assert ("\x00" = "\u{0000}");
assert ("\x00" = "\u{00000}");
assert ("\x00" = "\u{000000}");
assert ("\xC3\xA9" = "\u{E9}");
assert ("\xC3\xA9" = "\u{0E9}");
assert ("\xC3\xA9" = "\u{00E9}");
assert ("\xC3\xA9" = "\u{000E9}");
assert ("\xC3\xA9" = "\u{0000E9}");
assert ("\xC3\xA9" = "\u{0000E9}");
assert ("\xF4\x8F\xBF\xBF" = "\u{10FFFF}");
()
;;
(* Errors *)
let invalid_sv = "\u{0D800}" ;;
let invalid_sv = "\u{D800}" ;;
let invalid_sv = "\u{D900}" ;;
let invalid_sv = "\u{DFFF}" ;;
let invalid_sv = "\u{110000} ;;
let too_many_digits = "\u{01234567}" ;;
let no_hex_digits = "\u{}" ;;
let illegal_hex_digit = "\u{u}" ;;
| null | https://raw.githubusercontent.com/ocaml-multicore/parafuzz/6a92906f1ba03287ffcb433063bded831a644fd5/testsuite/tests/lexing/uchar_esc.ml | ocaml | TEST
* toplevel
Correct escapes and their encoding
Errors |
let () =
assert ("\xF0\x9F\x90\xAB" = "\u{1F42B}");
assert ("\xF0\x9F\x90\xAB" = "\u{01F42B}");
assert ("\x00" = "\u{0}");
assert ("\x00" = "\u{00}");
assert ("\x00" = "\u{000}");
assert ("\x00" = "\u{0000}");
assert ("\x00" = "\u{00000}");
assert ("\x00" = "\u{000000}");
assert ("\xC3\xA9" = "\u{E9}");
assert ("\xC3\xA9" = "\u{0E9}");
assert ("\xC3\xA9" = "\u{00E9}");
assert ("\xC3\xA9" = "\u{000E9}");
assert ("\xC3\xA9" = "\u{0000E9}");
assert ("\xC3\xA9" = "\u{0000E9}");
assert ("\xF4\x8F\xBF\xBF" = "\u{10FFFF}");
()
;;
let invalid_sv = "\u{0D800}" ;;
let invalid_sv = "\u{D800}" ;;
let invalid_sv = "\u{D900}" ;;
let invalid_sv = "\u{DFFF}" ;;
let invalid_sv = "\u{110000} ;;
let too_many_digits = "\u{01234567}" ;;
let no_hex_digits = "\u{}" ;;
let illegal_hex_digit = "\u{u}" ;;
|
1205c4859313c6cca0ad45ca0b133578f65a7f47437847593b89a82c0202400d | RefactoringTools/HaRe | GA3.hs | module GA3 where
zipperM :: [a] -> IO [(a, Integer)]
zipperM lst = do
lst2 <- getOtherList
return $ zip lst lst2
getOtherList = return [1..]
| null | https://raw.githubusercontent.com/RefactoringTools/HaRe/ef5dee64c38fb104e6e5676095946279fbce381c/test/testdata/GenApplicative/GA3.hs | haskell | module GA3 where
zipperM :: [a] -> IO [(a, Integer)]
zipperM lst = do
lst2 <- getOtherList
return $ zip lst lst2
getOtherList = return [1..]
| |
4f17fb3b6e0bb41d5a75e802a564b001789e182dd23960299e30149cd8ebef8a | ghc/ghc | Compile.hs | {-# LANGUAGE GADTs #-}
module Rules.Compile (compilePackage) where
import Hadrian.BuildPath
import Hadrian.Oracles.TextFile
import Base
import Context as C
import Expression
import Oracles.Flag (platformSupportsSharedLibs)
import Rules.Generate
import Settings
import Target
import Utilities
import qualified Text.Parsec as Parsec
* Rules for building objects and Haskell interface files
compilePackage :: [(Resource, Int)] -> Rules ()
compilePackage rs = do
root <- buildRootRules
-- We match all file paths that look like:
-- <root>/...stuffs.../build/...stuffs.../<something>.<suffix>
--
-- where:
-- - the '...stuffs...' bits can be one or more path components,
-- - the '<suffix>' part is a way prefix (e.g thr_p_, or nothing if
-- vanilla) followed by an object file extension, without the dot
-- (o, o-boot, hi, hi-boot),
--
-- and parse the information we need (stage, package path, ...) from
-- the path and figure out the suitable way to produce that object file.
alternatives $ do
-- Language is identified by subdirectory under /build.
These are non - haskell files so only have a .o or .<way>_o suffix .
[ root -/- "**/build/c/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs C
[ root -/- "**/build/cmm/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Cmm
[ root -/- "**/build/cpp/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Cxx
[ root -/- "**/build/s/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Asm
[ root -/- "**/build/S/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Asm
[ root -/- "**/build/js/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs JS
All else is .
-- These come last as they overlap with the above rules' file patterns.
When building dynamically we depend on the static rule if shared libs
-- are supported, because it will add the -dynamic-too flag when
-- compiling to build the dynamic files alongside the static files
( root -/- "**/build/**/*.dyn_o" :& root -/- "**/build/**/*.dyn_hi" :& Nil )
&%> \ ( dyn_o :& _dyn_hi :& _ ) -> do
p <- platformSupportsSharedLibs
if p
then do
We ` need ` " .o/.hi " because GHC is called with ` -dynamic - too `
-- and builds ".dyn_o/.dyn_hi" too.
changed <- needHasChanged [dyn_o -<.> "o", dyn_o -<.> "hi"]
If for some reason a previous Hadrian execution has been
-- interrupted after the rule for .o/.hi generation has completed
but before the current rule for .dyn_o/.dyn_hi has completed ,
-- or if some of the dynamic artifacts have been removed by the
-- user, "needing" the non dynamic artifacts is not enough as
-- Shake won't execute the associated action. Hence we detect
-- this case and we explicitly build the dynamic artifacts here:
case changed of
[] -> compileHsObjectAndHi rs dyn_o
_ -> pure ()
else compileHsObjectAndHi rs dyn_o
forM_ ((,) <$> hsExts <*> wayPats) $ \ ((oExt, hiExt), wayPat) ->
( (root -/- "**/build/**/*." ++ wayPat ++ oExt)
:& (root -/- "**/build/**/*." ++ wayPat ++ hiExt)
:& Nil ) &%>
\ ( o :& _hi :& _ ) ->
compileHsObjectAndHi rs o
where
hsExts = [ ("o", "hi")
, ("o-boot", "hi-boot")
]
wayPats = [ "", "*_" ]
-- * Object file paths types and parsers
We are using a non uniform representation that separates
object files produced from code and from other
languages , because the two " groups " have to be parsed
differently enough that this would complicated the parser
significantly .
Indeed , non - Haskell files can only produce .o ( or .thr_o , ... )
files while Haskell modules can produce those as well as
interface files , both in -boot or non - boot variants .
Moreover , non - Haskell object files live under :
< root>/stage < N>/<path / to / pkg>/build/{c , cmm , s}/
while object / interface files live under :
< root>/stage < N>/<path / to / pkg>/build/
So the kind of object is partially determined by
whether we 're in c/ , cmm/ or s/ but also by the
object file 's extension , in the case of a file .
This could have been addressed with some knot - tying but
Parsec 's monad does n't give us a MonadFix instance .
We therefore stick to treating those two type of object
files non uniformly .
object files produced from Haskell code and from other
languages, because the two "groups" have to be parsed
differently enough that this would complicated the parser
significantly.
Indeed, non-Haskell files can only produce .o (or .thr_o, ...)
files while Haskell modules can produce those as well as
interface files, both in -boot or non-boot variants.
Moreover, non-Haskell object files live under:
<root>/stage<N>/<path/to/pkg>/build/{c,cmm,s}/
while Haskell object/interface files live under:
<root>/stage<N>/<path/to/pkg>/build/
So the kind of object is partially determined by
whether we're in c/, cmm/ or s/ but also by the
object file's extension, in the case of a Haskell file.
This could have been addressed with some knot-tying but
Parsec's monad doesn't give us a MonadFix instance.
We therefore stick to treating those two type of object
files non uniformly.
-}
-- | Non Haskell source languages that we compile to get object files.
data SourceLang = Asm | C | Cmm | Cxx | JS deriving (Eq, Show)
parseSourceLang :: Parsec.Parsec String () SourceLang
parseSourceLang = Parsec.choice
[ Parsec.string "js" *> pure JS
, Parsec.char 'c' *> Parsec.choice
[ Parsec.string "mm" *> pure Cmm
, Parsec.string "pp" *> pure Cxx
, pure C
]
, Parsec.char 's' *> pure Asm
]
type Basename = String
parseBasename :: Parsec.Parsec String () Basename
parseBasename = Parsec.manyTill Parsec.anyChar (Parsec.try $ Parsec.char '.')
-- | > <c|cmm|s>/<file>.<way prefix>_o
data NonHsObject = NonHsObject SourceLang Basename Way
deriving (Eq, Show)
parseNonHsObject :: Parsec.Parsec String () NonHsObject
parseNonHsObject = do
lang <- parseSourceLang
_ <- Parsec.char '/'
file <- parseBasename
way <- parseWayPrefix vanilla
_ <- Parsec.char 'o'
return (NonHsObject lang file way)
-- | > <o|hi|o-boot|hi-boot>
data SuffixType = O | Hi | OBoot | HiBoot deriving (Eq, Show)
parseSuffixType :: Parsec.Parsec String () SuffixType
parseSuffixType = Parsec.choice
[ Parsec.char 'o' *> Parsec.choice
[ Parsec.string "-boot" *> pure OBoot
, pure O
]
, Parsec.string "hi" *> Parsec.choice
[ Parsec.string "-boot" *> pure HiBoot
, pure Hi
]
]
-- | > <way prefix>_<o|hi|o-boot|hi-boot>
data Extension = Extension Way SuffixType deriving (Eq, Show)
parseExtension :: Parsec.Parsec String () Extension
parseExtension = Extension <$> parseWayPrefix vanilla <*> parseSuffixType
| > < file>.<way prefix>_<o|hi|o - boot|hi - boot >
data HsObject = HsObject Basename Extension deriving (Eq, Show)
parseHsObject :: Parsec.Parsec String () HsObject
parseHsObject = do
file <- parseBasename
ext <- parseExtension
return (HsObject file ext)
data Object = Hs HsObject | NonHs NonHsObject deriving (Eq, Show)
parseObject :: Parsec.Parsec String () Object
parseObject = Parsec.choice
[ NonHs <$> parseNonHsObject
, Hs <$> parseHsObject ]
* Toplevel parsers
parseBuildObject :: FilePath -> Parsec.Parsec String () (BuildPath Object)
parseBuildObject root = parseBuildPath root parseObject
-- * Getting contexts from objects
objectContext :: BuildPath Object -> Context
objectContext (BuildPath _ stage pkgPath obj) =
Context stage (unsafeFindPackageByPath pkgPath) way Inplace
where
way = case obj of
NonHs (NonHsObject _lang _file w) -> w
Hs (HsObject _file (Extension w _suf)) -> w
-- * Building an object
compileHsObjectAndHi
:: [(Resource, Int)] -> FilePath -> Action ()
compileHsObjectAndHi rs objpath = do
root <- buildRoot
b@(BuildPath _root stage _path _o)
<- parsePath (parseBuildObject root) "<object file path parser>" objpath
let ctx = objectContext b
way = C.way ctx
ctxPath <- contextPath ctx
(src, deps) <- lookupDependencies (ctxPath -/- ".dependencies") objpath
need (src:deps)
The .dependencies file lists indicating inputs . ghc will
-- generally read more *.hi and *.hi-boot files (direct inputs).
-- Allow such reads (see -Hadrian#haskell-object-files-and-hi-inputs)
-- Note that this may allow too many *.hi and *.hi-boot files, but
-- calculating the exact set of direct inputs is not feasible.
trackAllow [ "**/*." ++ hisuf way
, "**/*." ++ hibootsuf way
]
buildWithResources rs $ target ctx (Ghc CompileHs stage) [src] [objpath]
compileNonHsObject :: [(Resource, Int)] -> SourceLang -> FilePath -> Action ()
compileNonHsObject rs lang path = do
root <- buildRoot
b@(BuildPath _root stage _path _o)
<- parsePath (parseBuildObject root) "<object file path parser>" path
let
ctx = objectContext b
builder = case lang of
C -> Ghc CompileCWithGhc
Cxx-> Ghc CompileCppWithGhc
_ -> Ghc CompileHs
src <- case lang of
Asm -> obj2src "S" (const False) ctx path
C -> obj2src "c" (const False) ctx path
Cmm -> obj2src "cmm" isGeneratedCmmFile ctx path
Cxx -> obj2src "cpp" (const False) ctx path
JS -> obj2src "js" (const False) ctx path
need [src]
needDependencies lang ctx src (path <.> "d")
buildWithResources rs $ target ctx (builder stage) [src] [path]
-- * Helpers
| Discover dependencies of a given source file by iteratively calling @gcc@
in the @-MM -MG@ mode and building generated dependencies if they are missing
-- until reaching a fixed point.
needDependencies :: SourceLang -> Context -> FilePath -> FilePath -> Action ()
needDependencies lang context@Context {..} src depFile = do
gens <- interpretInContext context generatedDependencies
need gens
discover
where
discover = do
build $ target context (Cc (FindCDependencies depType) stage) [src] [depFile]
deps <- parseFile depFile
-- Generated dependencies, if not yet built, will not be found and hence
-- will be referred to simply by their file names.
let notFound = filter (\file -> file == takeFileName file) deps
-- We find the full paths to generated dependencies, so we can request
-- to build them by calling 'need'.
todo <- catMaybes <$> mapM (fullPathIfGenerated context) notFound
if null todo
then need deps -- The list of dependencies is final, need all
else do
need todo -- Build newly discovered generated dependencies
discover -- Continue the discovery process
We need to pass different flags to cc depending on whether the
file to compile is a .c or a .cpp file
depType = if lang == Cxx then CxxDep else CDep
parseFile :: FilePath -> Action [String]
parseFile file = do
input <- liftIO $ readFile file
case parseMakefile input of
[(_file, deps)] -> return deps
_ -> return []
-- | Find a given 'FilePath' in the list of generated files in the given
-- 'Context' and return its full path.
fullPathIfGenerated :: Context -> FilePath -> Action (Maybe FilePath)
fullPathIfGenerated context file = interpretInContext context $ do
generated <- generatedDependencies
return $ find ((== file) . takeFileName) generated
obj2src :: String -> (FilePath -> Bool) -> Context -> FilePath -> Action FilePath
obj2src extension isGenerated context@Context {..} obj
| isGenerated src = return src
| otherwise = (pkgPath package ++) <$> suffix
where
src = obj -<.> extension
suffix = do
path <- buildPath context
return $ fromMaybe ("Cannot determine source for " ++ obj)
$ stripPrefix (path -/- extension) src
| null | https://raw.githubusercontent.com/ghc/ghc/97ac8230b0a645aae27b7ee42aa55b0c84735684/hadrian/src/Rules/Compile.hs | haskell | # LANGUAGE GADTs #
We match all file paths that look like:
<root>/...stuffs.../build/...stuffs.../<something>.<suffix>
where:
- the '...stuffs...' bits can be one or more path components,
- the '<suffix>' part is a way prefix (e.g thr_p_, or nothing if
vanilla) followed by an object file extension, without the dot
(o, o-boot, hi, hi-boot),
and parse the information we need (stage, package path, ...) from
the path and figure out the suitable way to produce that object file.
Language is identified by subdirectory under /build.
These come last as they overlap with the above rules' file patterns.
are supported, because it will add the -dynamic-too flag when
compiling to build the dynamic files alongside the static files
and builds ".dyn_o/.dyn_hi" too.
interrupted after the rule for .o/.hi generation has completed
or if some of the dynamic artifacts have been removed by the
user, "needing" the non dynamic artifacts is not enough as
Shake won't execute the associated action. Hence we detect
this case and we explicitly build the dynamic artifacts here:
* Object file paths types and parsers
| Non Haskell source languages that we compile to get object files.
| > <c|cmm|s>/<file>.<way prefix>_o
| > <o|hi|o-boot|hi-boot>
| > <way prefix>_<o|hi|o-boot|hi-boot>
* Getting contexts from objects
* Building an object
generally read more *.hi and *.hi-boot files (direct inputs).
Allow such reads (see -Hadrian#haskell-object-files-and-hi-inputs)
Note that this may allow too many *.hi and *.hi-boot files, but
calculating the exact set of direct inputs is not feasible.
* Helpers
until reaching a fixed point.
Generated dependencies, if not yet built, will not be found and hence
will be referred to simply by their file names.
We find the full paths to generated dependencies, so we can request
to build them by calling 'need'.
The list of dependencies is final, need all
Build newly discovered generated dependencies
Continue the discovery process
| Find a given 'FilePath' in the list of generated files in the given
'Context' and return its full path. |
module Rules.Compile (compilePackage) where
import Hadrian.BuildPath
import Hadrian.Oracles.TextFile
import Base
import Context as C
import Expression
import Oracles.Flag (platformSupportsSharedLibs)
import Rules.Generate
import Settings
import Target
import Utilities
import qualified Text.Parsec as Parsec
* Rules for building objects and Haskell interface files
compilePackage :: [(Resource, Int)] -> Rules ()
compilePackage rs = do
root <- buildRootRules
alternatives $ do
These are non - haskell files so only have a .o or .<way>_o suffix .
[ root -/- "**/build/c/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs C
[ root -/- "**/build/cmm/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Cmm
[ root -/- "**/build/cpp/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Cxx
[ root -/- "**/build/s/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Asm
[ root -/- "**/build/S/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs Asm
[ root -/- "**/build/js/**/*." ++ wayPat ++ "o"
| wayPat <- wayPats] |%> compileNonHsObject rs JS
All else is .
When building dynamically we depend on the static rule if shared libs
( root -/- "**/build/**/*.dyn_o" :& root -/- "**/build/**/*.dyn_hi" :& Nil )
&%> \ ( dyn_o :& _dyn_hi :& _ ) -> do
p <- platformSupportsSharedLibs
if p
then do
We ` need ` " .o/.hi " because GHC is called with ` -dynamic - too `
changed <- needHasChanged [dyn_o -<.> "o", dyn_o -<.> "hi"]
If for some reason a previous Hadrian execution has been
but before the current rule for .dyn_o/.dyn_hi has completed ,
case changed of
[] -> compileHsObjectAndHi rs dyn_o
_ -> pure ()
else compileHsObjectAndHi rs dyn_o
forM_ ((,) <$> hsExts <*> wayPats) $ \ ((oExt, hiExt), wayPat) ->
( (root -/- "**/build/**/*." ++ wayPat ++ oExt)
:& (root -/- "**/build/**/*." ++ wayPat ++ hiExt)
:& Nil ) &%>
\ ( o :& _hi :& _ ) ->
compileHsObjectAndHi rs o
where
hsExts = [ ("o", "hi")
, ("o-boot", "hi-boot")
]
wayPats = [ "", "*_" ]
We are using a non uniform representation that separates
object files produced from code and from other
languages , because the two " groups " have to be parsed
differently enough that this would complicated the parser
significantly .
Indeed , non - Haskell files can only produce .o ( or .thr_o , ... )
files while Haskell modules can produce those as well as
interface files , both in -boot or non - boot variants .
Moreover , non - Haskell object files live under :
< root>/stage < N>/<path / to / pkg>/build/{c , cmm , s}/
while object / interface files live under :
< root>/stage < N>/<path / to / pkg>/build/
So the kind of object is partially determined by
whether we 're in c/ , cmm/ or s/ but also by the
object file 's extension , in the case of a file .
This could have been addressed with some knot - tying but
Parsec 's monad does n't give us a MonadFix instance .
We therefore stick to treating those two type of object
files non uniformly .
object files produced from Haskell code and from other
languages, because the two "groups" have to be parsed
differently enough that this would complicated the parser
significantly.
Indeed, non-Haskell files can only produce .o (or .thr_o, ...)
files while Haskell modules can produce those as well as
interface files, both in -boot or non-boot variants.
Moreover, non-Haskell object files live under:
<root>/stage<N>/<path/to/pkg>/build/{c,cmm,s}/
while Haskell object/interface files live under:
<root>/stage<N>/<path/to/pkg>/build/
So the kind of object is partially determined by
whether we're in c/, cmm/ or s/ but also by the
object file's extension, in the case of a Haskell file.
This could have been addressed with some knot-tying but
Parsec's monad doesn't give us a MonadFix instance.
We therefore stick to treating those two type of object
files non uniformly.
-}
data SourceLang = Asm | C | Cmm | Cxx | JS deriving (Eq, Show)
parseSourceLang :: Parsec.Parsec String () SourceLang
parseSourceLang = Parsec.choice
[ Parsec.string "js" *> pure JS
, Parsec.char 'c' *> Parsec.choice
[ Parsec.string "mm" *> pure Cmm
, Parsec.string "pp" *> pure Cxx
, pure C
]
, Parsec.char 's' *> pure Asm
]
type Basename = String
parseBasename :: Parsec.Parsec String () Basename
parseBasename = Parsec.manyTill Parsec.anyChar (Parsec.try $ Parsec.char '.')
data NonHsObject = NonHsObject SourceLang Basename Way
deriving (Eq, Show)
parseNonHsObject :: Parsec.Parsec String () NonHsObject
parseNonHsObject = do
lang <- parseSourceLang
_ <- Parsec.char '/'
file <- parseBasename
way <- parseWayPrefix vanilla
_ <- Parsec.char 'o'
return (NonHsObject lang file way)
data SuffixType = O | Hi | OBoot | HiBoot deriving (Eq, Show)
parseSuffixType :: Parsec.Parsec String () SuffixType
parseSuffixType = Parsec.choice
[ Parsec.char 'o' *> Parsec.choice
[ Parsec.string "-boot" *> pure OBoot
, pure O
]
, Parsec.string "hi" *> Parsec.choice
[ Parsec.string "-boot" *> pure HiBoot
, pure Hi
]
]
data Extension = Extension Way SuffixType deriving (Eq, Show)
parseExtension :: Parsec.Parsec String () Extension
parseExtension = Extension <$> parseWayPrefix vanilla <*> parseSuffixType
| > < file>.<way prefix>_<o|hi|o - boot|hi - boot >
data HsObject = HsObject Basename Extension deriving (Eq, Show)
parseHsObject :: Parsec.Parsec String () HsObject
parseHsObject = do
file <- parseBasename
ext <- parseExtension
return (HsObject file ext)
data Object = Hs HsObject | NonHs NonHsObject deriving (Eq, Show)
parseObject :: Parsec.Parsec String () Object
parseObject = Parsec.choice
[ NonHs <$> parseNonHsObject
, Hs <$> parseHsObject ]
* Toplevel parsers
parseBuildObject :: FilePath -> Parsec.Parsec String () (BuildPath Object)
parseBuildObject root = parseBuildPath root parseObject
objectContext :: BuildPath Object -> Context
objectContext (BuildPath _ stage pkgPath obj) =
Context stage (unsafeFindPackageByPath pkgPath) way Inplace
where
way = case obj of
NonHs (NonHsObject _lang _file w) -> w
Hs (HsObject _file (Extension w _suf)) -> w
compileHsObjectAndHi
:: [(Resource, Int)] -> FilePath -> Action ()
compileHsObjectAndHi rs objpath = do
root <- buildRoot
b@(BuildPath _root stage _path _o)
<- parsePath (parseBuildObject root) "<object file path parser>" objpath
let ctx = objectContext b
way = C.way ctx
ctxPath <- contextPath ctx
(src, deps) <- lookupDependencies (ctxPath -/- ".dependencies") objpath
need (src:deps)
The .dependencies file lists indicating inputs . ghc will
trackAllow [ "**/*." ++ hisuf way
, "**/*." ++ hibootsuf way
]
buildWithResources rs $ target ctx (Ghc CompileHs stage) [src] [objpath]
compileNonHsObject :: [(Resource, Int)] -> SourceLang -> FilePath -> Action ()
compileNonHsObject rs lang path = do
root <- buildRoot
b@(BuildPath _root stage _path _o)
<- parsePath (parseBuildObject root) "<object file path parser>" path
let
ctx = objectContext b
builder = case lang of
C -> Ghc CompileCWithGhc
Cxx-> Ghc CompileCppWithGhc
_ -> Ghc CompileHs
src <- case lang of
Asm -> obj2src "S" (const False) ctx path
C -> obj2src "c" (const False) ctx path
Cmm -> obj2src "cmm" isGeneratedCmmFile ctx path
Cxx -> obj2src "cpp" (const False) ctx path
JS -> obj2src "js" (const False) ctx path
need [src]
needDependencies lang ctx src (path <.> "d")
buildWithResources rs $ target ctx (builder stage) [src] [path]
| Discover dependencies of a given source file by iteratively calling @gcc@
in the @-MM -MG@ mode and building generated dependencies if they are missing
needDependencies :: SourceLang -> Context -> FilePath -> FilePath -> Action ()
needDependencies lang context@Context {..} src depFile = do
gens <- interpretInContext context generatedDependencies
need gens
discover
where
discover = do
build $ target context (Cc (FindCDependencies depType) stage) [src] [depFile]
deps <- parseFile depFile
let notFound = filter (\file -> file == takeFileName file) deps
todo <- catMaybes <$> mapM (fullPathIfGenerated context) notFound
if null todo
else do
We need to pass different flags to cc depending on whether the
file to compile is a .c or a .cpp file
depType = if lang == Cxx then CxxDep else CDep
parseFile :: FilePath -> Action [String]
parseFile file = do
input <- liftIO $ readFile file
case parseMakefile input of
[(_file, deps)] -> return deps
_ -> return []
fullPathIfGenerated :: Context -> FilePath -> Action (Maybe FilePath)
fullPathIfGenerated context file = interpretInContext context $ do
generated <- generatedDependencies
return $ find ((== file) . takeFileName) generated
obj2src :: String -> (FilePath -> Bool) -> Context -> FilePath -> Action FilePath
obj2src extension isGenerated context@Context {..} obj
| isGenerated src = return src
| otherwise = (pkgPath package ++) <$> suffix
where
src = obj -<.> extension
suffix = do
path <- buildPath context
return $ fromMaybe ("Cannot determine source for " ++ obj)
$ stripPrefix (path -/- extension) src
|
ff6866f956cf90b38be82186dde74df25f2adad244af68903c97d6e412c5a3bf | alexwl/haskell-code-explorer | PackageInfo.hs | # LANGUAGE CPP #
# LANGUAGE TupleSections #
# LANGUAGE FlexibleInstances #
# LANGUAGE RecordWildCards #
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE Rank2Types #-}
# LANGUAGE ScopedTypeVariables #
# LANGUAGE DuplicateRecordFields #
# OPTIONS_GHC -fno - warn - orphans #
module HaskellCodeExplorer.PackageInfo
( createPackageInfo
, ghcVersion
) where
import Control.DeepSeq(deepseq)
import Control.Exception
( IOException
, SomeAsyncException
, SomeException
, fromException
, throw
, try
)
import Control.Monad (foldM, unless, when)
import Control.Monad.Extra (anyM, findM)
import Control.Monad.Logger
( LoggingT(..)
, MonadLogger(..)
, MonadLoggerIO(..)
, logDebugN
, logErrorN
, logInfoN
)
import qualified Data.ByteString as BS
import qualified Data.HashMap.Strict as HM
import Data.IORef (readIORef)
import qualified Data.IntMap.Strict as IM
import qualified Data.List as L
import Data.Maybe (fromMaybe, isJust, maybeToList)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Version (Version(..), showVersion, makeVersion)
import Digraph (flattenSCCs)
import Distribution.Helper
( ChComponentName(..)
, ChEntrypoint(..)
, ChModuleName(..)
, components
, entrypoints
, ghcOptions
, mkQueryEnv
, packageId
, runQuery
, sourceDirs
, compilerVersion
)
import DynFlags
( DynFlags(..)
, GeneralFlag(..)
, GhcMode(..)
, WarnReason(..)
, gopt_set
, parseDynamicFlagsCmdLine
)
import Exception (ExceptionMonad(..), ghandle)
import GHC
( GhcLink(..)
, HscTarget(..)
, LoadHowMuch(..)
, ModLocation(..)
, ModSummary(..)
, Severity
, SrcSpan
, getModuleGraph
, getSession
, getSessionDynFlags
, guessTarget
, load
, noLoc
, parseModule
, runGhcT
, setSessionDynFlags
, setTargets
, topSortModuleGraph
, typecheckModule
, moduleNameString
, moduleName
)
import GHC.Paths (libdir)
import GhcMonad (GhcT(..), liftIO)
import HaskellCodeExplorer.GhcUtils (isHsBoot,toText)
import HaskellCodeExplorer.ModuleInfo (ModuleDependencies, createModuleInfo)
import qualified HaskellCodeExplorer.Types as HCE
import HscTypes (hsc_EPS, hsc_HPT)
import Outputable (PprStyle, SDoc, neverQualify, showSDocForUser)
import Packages (initPackages)
import Prelude hiding (id)
import System.Directory
( doesFileExist
, findExecutable
, setCurrentDirectory
, getCurrentDirectory
, makeAbsolute
, getDirectoryContents
)
import qualified System.Directory.Tree as DT
import System.Exit (exitFailure)
import System.FilePath
( (</>)
, addTrailingPathSeparator
, joinPath
, normalise
, replaceExtension
, splitPath
, takeExtension
, takeFileName
, takeBaseName
, takeDirectory
, splitDirectories
)
import System.FilePath.Find (find,always,(==?),fileName)
import System.Process (readProcess)
createPackageInfo ::
^ Path to a Cabal package
-> Maybe FilePath -- ^ Relative path to a dist directory
-> HCE.SourceCodePreprocessing -- ^ Before or after preprocessor
^ Options for GHC
-> [String] -- ^ Directories to ignore
-> LoggingT IO (HCE.PackageInfo HCE.ModuleInfo)
createPackageInfo packageDirectoryPath mbDistDirRelativePath sourceCodePreprocessing additionalGhcOptions ignoreDirectories = do
packageDirectoryAbsPath <- liftIO $ makeAbsolute packageDirectoryPath
currentDirectory <- liftIO getCurrentDirectory
liftIO $ setCurrentDirectory packageDirectoryAbsPath
distDir <-
case mbDistDirRelativePath of
Just path -> return $ packageDirectoryAbsPath </> path
Nothing -> do
eitherDistDir <- findDistDirectory packageDirectoryAbsPath
case eitherDistDir of
Right distDir -> return distDir
Left errorMessage ->
logErrorN (T.pack errorMessage) >> liftIO exitFailure
cabalFiles <-
liftIO $
length .
filter
(\path -> takeFileName path /= ".cabal" && takeExtension path == ".cabal") <$>
getDirectoryContents packageDirectoryAbsPath
_ <-
if cabalFiles == 0
then do
logErrorN $
T.concat ["No .cabal file found in ", T.pack packageDirectoryAbsPath]
liftIO exitFailure
else when (cabalFiles >= 2) $ do
logErrorN $
T.concat
[ "Found more than one .cabal file in "
, T.pack packageDirectoryAbsPath
]
liftIO exitFailure
let cabalHelperQueryEnv = mkQueryEnv packageDirectoryAbsPath distDir
((packageName, packageVersion), (_packageCompilerName, packageCompilerVersion), compInfo) <-
liftIO $
runQuery
cabalHelperQueryEnv
((,,) <$> packageId <*> compilerVersion <*>
(zip3 <$> components ((,) <$> ghcOptions) <*>
components ((,) <$> entrypoints) <*>
components ((,) <$> sourceDirs)))
let currentPackageId = HCE.PackageId (T.pack packageName) packageVersion
unless
(take 3 (versionBranch packageCompilerVersion) ==
take 3 (versionBranch ghcVersion)) $ do
logErrorN $
T.concat
[ "GHC version mismatch. haskell-code-indexer: "
, T.pack $ showVersion ghcVersion
, ", package: "
, T.pack $ showVersion packageCompilerVersion
]
liftIO exitFailure
logInfoN $ T.append "Indexing " $ HCE.packageIdToText currentPackageId
let buildComponents =
L.map
(\((options, compName), (entrypoint, _), (srcDirs, _)) ->
( chComponentNameToComponentId compName
, options
, chEntrypointsToModules entrypoint
, srcDirs
, chComponentNameToComponentType compName)) .
L.sortBy
(\((_, compName1), _, _) ((_, compName2), _, _) ->
compare compName1 compName2) $
compInfo
libSrcDirs =
concatMap (\(_, _, _, srcDirs, _) -> srcDirs) .
filter (\(_, _, _, _, compType) -> HCE.isLibrary compType) $
buildComponents
(indexedModules, (_fileMapResult, _defSiteMapResult, modNameMapResult)) <-
foldM
(\(modules, (fileMap, defSiteMap, modNameMap)) (compId, options, (mbMain, moduleNames), srcDirs, _) -> do
mbMainPath <-
case mbMain of
Just mainPath ->
liftIO $
findM doesFileExist $
mainPath :
map (\srcDir -> normalise $ srcDir </> mainPath) srcDirs
Nothing -> return Nothing
(modules', (fileMap', defSiteMap', modNameMap')) <-
indexBuildComponent
sourceCodePreprocessing
currentPackageId
compId
(fileMap, defSiteMap, modNameMap)
srcDirs
libSrcDirs
(options ++ additionalGhcOptions)
(maybe moduleNames (: moduleNames) mbMainPath)
return (modules ++ modules', (fileMap', defSiteMap', modNameMap')))
([], (HM.empty, HM.empty, HM.empty))
buildComponents
let modId = HCE.id :: HCE.ModuleInfo -> HCE.HaskellModulePath
moduleMap =
HM.fromList . map (\modInfo -> (modId modInfo, modInfo)) $
indexedModules
references = L.foldl' addReferencesFromModule HM.empty indexedModules
moduleId = HCE.id :: HCE.ModuleInfo -> HCE.HaskellModulePath
topLevelIdentifiersTrie =
L.foldl' addTopLevelIdentifiersFromModule HCE.emptyTrie .
L.filter (not . isHsBoot . moduleId) $
indexedModules
directoryTree <-
liftIO $
buildDirectoryTree
packageDirectoryAbsPath
ignoreDirectories
(\path -> HM.member (HCE.HaskellModulePath . T.pack $ path) moduleMap)
liftIO $ setCurrentDirectory currentDirectory
return
HCE.PackageInfo
{ id = currentPackageId
, moduleMap = moduleMap
, moduleNameMap = modNameMapResult
, directoryTree = directoryTree
, externalIdOccMap = references
, externalIdInfoMap = topLevelIdentifiersTrie
}
where
chEntrypointsToModules :: ChEntrypoint -> (Maybe String, [String])
chEntrypointsToModules (ChLibEntrypoint modules otherModules signatures) =
( Nothing
, L.map chModuleToString modules ++
L.map chModuleToString otherModules ++ L.map chModuleToString signatures)
chEntrypointsToModules (ChExeEntrypoint mainModule _otherModules) =
(Just mainModule, [])
chEntrypointsToModules ChSetupEntrypoint = (Nothing, [])
chModuleToString :: ChModuleName -> String
chModuleToString (ChModuleName n) = n
chComponentNameToComponentType :: ChComponentName -> HCE.ComponentType
chComponentNameToComponentType ChSetupHsName = HCE.Setup
chComponentNameToComponentType ChLibName = HCE.Lib
chComponentNameToComponentType (ChSubLibName name) =
HCE.SubLib $ T.pack name
chComponentNameToComponentType (ChFLibName name) = HCE.FLib $ T.pack name
chComponentNameToComponentType (ChExeName name) = HCE.Exe $ T.pack name
chComponentNameToComponentType (ChTestName name) = HCE.Test $ T.pack name
chComponentNameToComponentType (ChBenchName name) = HCE.Bench $ T.pack name
chComponentNameToComponentId :: ChComponentName -> HCE.ComponentId
chComponentNameToComponentId ChLibName = HCE.ComponentId "lib"
chComponentNameToComponentId (ChSubLibName name) =
HCE.ComponentId . T.append "sublib-" . T.pack $ name
chComponentNameToComponentId (ChFLibName name) =
HCE.ComponentId . T.append "flib-" . T.pack $ name
chComponentNameToComponentId (ChExeName name) =
HCE.ComponentId . T.append "exe-" . T.pack $ name
chComponentNameToComponentId (ChTestName name) =
HCE.ComponentId . T.append "test-" . T.pack $ name
chComponentNameToComponentId (ChBenchName name) =
HCE.ComponentId . T.append "bench-" . T.pack $ name
chComponentNameToComponentId ChSetupHsName = HCE.ComponentId "setup"
#if MIN_VERSION_GLASGOW_HASKELL(8,6,5,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 6, 5, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,6,4,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 6, 4, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,6,3,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 6, 3, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,4,4,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 4, 4, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,4,3,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 4, 3, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,2,2,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 2, 2, 0]
#else
ghcVersion :: Version
ghcVersion = makeVersion [8, 0, 2, 0]
#endif
buildDirectoryTree :: FilePath -> [FilePath] -> (FilePath -> Bool) -> IO HCE.DirTree
buildDirectoryTree path ignoreDirectories isHaskellModule = do
(_dir DT.:/ tree) <- DT.readDirectoryWith (const . return $ ()) path
-- Tuple up the complete file path with the file contents, by building up the path,
trie - style , from the root . The filepath will be relative to " anchored " directory .
let treeWithPaths = DT.zipPaths ("" DT.:/ DT.filterDir (not . ignore) tree)
return $ toDirTree (removeTopDir . fst <$> treeWithPaths)
where
ignore :: DT.DirTree a -> Bool
ignore (DT.Dir dirName _)
| "." `L.isPrefixOf` dirName = True
| dirName == "dist" = True
| dirName == "dist-newstyle" = True
| dirName == "tmp" = True
| otherwise = dirName `elem` ignoreDirectories
ignore (DT.Failed _ _) = True
ignore _ = False
removeTopDir :: FilePath -> FilePath
removeTopDir p =
case splitPath p of
_x:xs -> joinPath xs
[] -> ""
toDirTree :: DT.DirTree FilePath -> HCE.DirTree
toDirTree (DT.Dir name contents) =
HCE.Dir name (map toDirTree . filter (not . DT.failed) $ contents)
toDirTree (DT.File name filePath) =
HCE.File name filePath (isHaskellModule filePath)
toDirTree (DT.Failed name err) =
HCE.File (name ++ " : " ++ show err) "" False
addTopLevelIdentifiersFromModule ::
HCE.Trie Char HCE.ExternalIdentifierInfo
-> HCE.ModuleInfo
-> HCE.Trie Char HCE.ExternalIdentifierInfo
addTopLevelIdentifiersFromModule trieIdInfo HCE.ModuleInfo {..} =
L.foldl'
(\trie idInfo@(HCE.ExternalIdentifierInfo HCE.IdentifierInfo {..}) ->
HCE.insertToTrie S.insert (T.unpack demangledOccName) idInfo trie)
trieIdInfo
externalIds
addReferencesFromModule ::
HM.HashMap HCE.ExternalId (S.Set HCE.IdentifierSrcSpan)
-> HCE.ModuleInfo
-> HM.HashMap HCE.ExternalId (S.Set HCE.IdentifierSrcSpan)
addReferencesFromModule references {..} =
eachIdentifierOccurrence
references
modInfo
(\occMap lineNumber startCol endCol occ ->
let mbIdExternalId =
HCE.externalId =<<
maybe
Nothing
(`HM.lookup` idInfoMap)
(HCE.internalId (occ :: HCE.IdentifierOccurrence))
idSrcSpan =
HCE.IdentifierSrcSpan
{ modulePath = id
, line = lineNumber
, startColumn = startCol
, endColumn = endCol
}
in case mbIdExternalId of
Just externalId ->
HM.insertWith S.union externalId (S.singleton idSrcSpan) occMap
Nothing -> occMap)
findDistDirectory :: FilePath -> LoggingT IO (Either String FilePath)
findDistDirectory packagePath = do
let parents =
reverse . map joinPath . filter (not . null) . L.inits . splitPath $
packagePath
e.g. , [ " /dir / subdir / subsubdir","/dir / subdir/","/dir/","/ " ]
hasStackYaml <-
liftIO $ anyM (\path -> doesFileExist (path </> "stack.yaml")) parents
mbStackExecutable <- liftIO $ findExecutable "stack"
case (hasStackYaml, mbStackExecutable) of
(True, Just stack) -> do
let removeEndOfLine str
| null str = str
| otherwise = init str
logInfoN
"Found stack.yaml. Executing \"stack path --dist-dir\" to get dist directory."
eitherDistDir :: (Either IOException String) <-
liftIO .
try . fmap removeEndOfLine . readProcess stack ["path", "--dist-dir"] $
""
case eitherDistDir of
Right distDir -> do
logInfoN $ T.append "Stack dist directory : " $ T.pack distDir
hasSetupConfig <- liftIO $ doesFileExist $ distDir </> "setup-config"
if hasSetupConfig
then return $ Right distDir
else return $
Left
"Cannot find setup-config file in a dist directory. Has the package been built?"
Left exception ->
return $
Left $
"Error while executing \"stack path --dist-dir\" : " ++ show exception
_ -> do
logInfoN "Trying to find dist directory"
setupConfigPaths <-
liftIO $
map (takeDirectory . normalise) <$>
find always (fileName ==? "setup-config") "."
case setupConfigPaths of
[] ->
return $
Left "Cannot find dist directory. Has the package been built?"
[path] -> do
logInfoN $ T.append "Found dist directory : " $ T.pack path
return $ Right path
_ ->
return $
Left $
"Found multiple possible dist directories : \n" ++
show setupConfigPaths ++ " \nPlease specify --dist option"
eachIdentifierOccurrence ::
forall a.
a
-> HCE.ModuleInfo
-> (a -> IM.Key -> Int -> Int -> HCE.IdentifierOccurrence -> a)
-> a
eachIdentifierOccurrence accumulator HCE.ModuleInfo {..} f =
IM.foldlWithKey'
(\acc lineNumber occurences ->
L.foldl'
(\a ((startCol, endCol), occ) -> f a lineNumber startCol endCol occ)
acc
occurences)
accumulator
idOccMap
instance ExceptionMonad (LoggingT IO) where
gcatch act h =
LoggingT $ \logFn ->
runLoggingT act logFn `gcatch` \e -> runLoggingT (h e) logFn
gmask f =
LoggingT $ \logFn ->
gmask $ \io_restore ->
let g_restore (LoggingT m) = LoggingT $ \lf -> io_restore (m lf)
in runLoggingT (f g_restore) logFn
instance MonadLoggerIO (GhcT (LoggingT IO)) where
askLoggerIO = GhcT $ const askLoggerIO
instance MonadLogger (GhcT (LoggingT IO)) where
monadLoggerLog loc source level =
GhcT . const . monadLoggerLog loc source level
gtrySync :: (ExceptionMonad m) => m a -> m (Either SomeException a)
gtrySync action = ghandleSync (return . Left) (fmap Right action)
ghandleSync :: (ExceptionMonad m) => (SomeException -> m a) -> m a -> m a
ghandleSync onError =
ghandle
(\ex ->
case fromException ex of
Just (asyncEx :: SomeAsyncException) -> throw asyncEx
_ -> onError ex)
indexBuildComponent ::
HCE.SourceCodePreprocessing -- ^ Before or after preprocessor
-> HCE.PackageId -- ^ Current package id
-> HCE.ComponentId -- ^ Current component id
-> ModuleDependencies -- ^ Already indexed modules
^
^ of libraries
^ Command - line options for GHC
-> [String] -- ^ Modules to compile
-> LoggingT IO ([HCE.ModuleInfo],ModuleDependencies)
indexBuildComponent sourceCodePreprocessing currentPackageId componentId deps@(fileMap, defSiteMap, modNameMap) srcDirs libSrcDirs options modules = do
let onError ex = do
logErrorN $
T.concat
[ "Error while indexing component "
, HCE.getComponentId componentId
, " : "
, T.pack . show $ ex
]
return ([], deps)
ghandleSync onError $
runGhcT (Just libdir) $ do
logDebugN (T.append "Component id : " $ HCE.getComponentId componentId)
logDebugN (T.append "Modules : " $ T.pack $ show modules)
logDebugN
(T.append "GHC command line options : " $
T.pack $ L.unwords (options ++ modules))
flags <- getSessionDynFlags
(flags', _, _) <-
parseDynamicFlagsCmdLine
flags
(L.map noLoc . L.filter ("-Werror" /=) $ options) -- -Werror flag makes warnings fatal
(flags'', _) <- liftIO $ initPackages flags'
logFn <- askLoggerIO
let logAction ::
DynFlags
-> WarnReason
-> Severity
-> SrcSpan
-> Outputable.PprStyle
-> SDoc
-> IO ()
logAction fs _reason _severity srcSpan _stype msg =
runLoggingT
(logDebugN
(T.append "GHC message : " $
T.pack $
showSDocForUser fs neverQualify msg ++
" , SrcSpan : " ++ show srcSpan))
logFn
mbTmpDir =
case hiDir flags'' of
Just buildDir ->
Just $ buildDir </> (takeBaseName buildDir ++ "-tmp")
Nothing -> Nothing
_ <-
setSessionDynFlags $
L.foldl'
gopt_set
(flags''
{ hscTarget = HscAsm
, ghcLink = LinkInMemory
, ghcMode = CompManager
, log_action = logAction
, importPaths = importPaths flags'' ++ maybeToList mbTmpDir
})
[Opt_Haddock]
targets <- mapM (`guessTarget` Nothing) modules
setTargets targets
_ <- load LoadAllTargets
modGraph <- getModuleGraph
let topSortMods = flattenSCCs (topSortModuleGraph False modGraph Nothing)
buildDir =
addTrailingPathSeparator . normalise . fromMaybe "" . hiDir $
flags''
pathsModuleName =
"Paths_" ++
map
(\c ->
if c == '-'
then '_'
else c)
(T.unpack (HCE.name (currentPackageId :: HCE.PackageId)))
(modSumWithPath, modulesNotFound) <-
(\(mods, notFound) ->
( L.reverse .
L.foldl'
(\acc (mbPath, modSum) ->
case mbPath of
Just path
| not $ HM.member path defSiteMap -> (path, modSum) : acc
_ -> acc)
[] $
mods
, map snd notFound)) .
L.partition (\(mbPath, _) -> isJust mbPath) <$>
mapM
(\modSum ->
liftIO $
(, modSum) <$>
findHaskellModulePath buildDir (srcDirs ++ libSrcDirs) modSum)
(filter
(\modSum ->
pathsModuleName /=
(moduleNameString . moduleName $ ms_mod modSum))
topSortMods)
unless (null modulesNotFound) $
logErrorN $
T.append
"Cannot find module path : "
(toText flags'' $ map ms_mod modulesNotFound)
foldM
(\(indexedModules, (fileMap', defSiteMap', modNameMap')) (modulePath, modSum) -> do
result <-
indexModule
sourceCodePreprocessing
componentId
currentPackageId
flags''
(fileMap', defSiteMap', modNameMap')
(modulePath, modSum)
case result of
Right (modInfo, (fileMap'', defSiteMap'', modNameMap'')) ->
return
( modInfo : indexedModules
, (fileMap'', defSiteMap'', modNameMap''))
Left exception -> do
logErrorN $
T.concat
[ "Error while indexing "
, T.pack . show $ modulePath
, " : "
, T.pack . show $ exception
]
return (indexedModules, (fileMap', defSiteMap', modNameMap')))
([], (fileMap, defSiteMap, modNameMap))
modSumWithPath
findHaskellModulePath ::
FilePath -> [FilePath] -> ModSummary -> IO (Maybe HCE.HaskellModulePath)
findHaskellModulePath buildDir srcDirs modSum =
case normalise <$> (ml_hs_file . ms_location $ modSum) of
Just modulePath ->
let toHaskellModulePath = return . Just . HCE.HaskellModulePath . T.pack
removeTmpDir path =
case splitDirectories path of
parent:rest ->
if "-tmp" `L.isSuffixOf` parent
then joinPath rest
else path
_ -> path
in case removeTmpDir <$> L.stripPrefix buildDir modulePath of
-- File is in the build directory
Just path
| takeExtension path == ".hs-boot" -> do
let possiblePaths = path : map (</> path) srcDirs
mbFoundPath <- findM doesFileExist possiblePaths
case mbFoundPath of
Just p -> toHaskellModulePath p
_ -> return Nothing
| takeExtension path == ".hs" -> do
let paths =
map
(replaceExtension path)
HCE.haskellPreprocessorExtensions
possiblePaths =
paths ++
concatMap (\srcDir -> map (srcDir </>) paths) srcDirs
mbFoundPath <- findM doesFileExist possiblePaths
case mbFoundPath of
Just p -> toHaskellModulePath p
_ -> return Nothing
| otherwise -> return Nothing
Nothing -> toHaskellModulePath modulePath
Nothing -> return Nothing
indexModule ::
HCE.SourceCodePreprocessing
-> HCE.ComponentId
-> HCE.PackageId
-> DynFlags
-> ModuleDependencies
-> (HCE.HaskellModulePath, ModSummary)
-> GhcT (LoggingT IO) (Either SomeException ( HCE.ModuleInfo
, ModuleDependencies))
indexModule sourceCodePreprocessing componentId currentPackageId flags deps (modulePath, modSum) =
gtrySync $ do
logDebugN (T.append "Indexing " $ HCE.getHaskellModulePath modulePath)
parsedModule <- parseModule modSum
typecheckedModule <- typecheckModule parsedModule
hscEnv <- getSession
externalPackageState <- liftIO . readIORef . hsc_EPS $ hscEnv
originalSourceCode <-
liftIO $
T.replace "\t" " " . TE.decodeUtf8 <$>
BS.readFile (T.unpack . HCE.getHaskellModulePath $ modulePath)
let (modInfo, (fileMap', exportMap', moduleNameMap'), typeErrors) =
createModuleInfo
deps
( flags
, typecheckedModule
, hsc_HPT hscEnv
, externalPackageState
, modSum)
modulePath
currentPackageId
componentId
(originalSourceCode, sourceCodePreprocessing)
unless (null typeErrors) $
logInfoN $ T.append "Type errors : " $ T.pack $ show typeErrors
deepseq modInfo $ return (modInfo, (fileMap', exportMap', moduleNameMap'))
| null | https://raw.githubusercontent.com/alexwl/haskell-code-explorer/2f1c2a4c87ebd55b8a335bc4670eec875af8b4c4/src/HaskellCodeExplorer/PackageInfo.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE Rank2Types #
^ Relative path to a dist directory
^ Before or after preprocessor
^ Directories to ignore
Tuple up the complete file path with the file contents, by building up the path,
dist-dir\" to get dist directory."
dist-dir\" : " ++ show exception
^ Before or after preprocessor
^ Current package id
^ Current component id
^ Already indexed modules
^ Modules to compile
-Werror flag makes warnings fatal
File is in the build directory | # LANGUAGE CPP #
# LANGUAGE TupleSections #
# LANGUAGE FlexibleInstances #
# LANGUAGE RecordWildCards #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE DuplicateRecordFields #
# OPTIONS_GHC -fno - warn - orphans #
module HaskellCodeExplorer.PackageInfo
( createPackageInfo
, ghcVersion
) where
import Control.DeepSeq(deepseq)
import Control.Exception
( IOException
, SomeAsyncException
, SomeException
, fromException
, throw
, try
)
import Control.Monad (foldM, unless, when)
import Control.Monad.Extra (anyM, findM)
import Control.Monad.Logger
( LoggingT(..)
, MonadLogger(..)
, MonadLoggerIO(..)
, logDebugN
, logErrorN
, logInfoN
)
import qualified Data.ByteString as BS
import qualified Data.HashMap.Strict as HM
import Data.IORef (readIORef)
import qualified Data.IntMap.Strict as IM
import qualified Data.List as L
import Data.Maybe (fromMaybe, isJust, maybeToList)
import qualified Data.Set as S
import qualified Data.Text as T
import qualified Data.Text.Encoding as TE
import Data.Version (Version(..), showVersion, makeVersion)
import Digraph (flattenSCCs)
import Distribution.Helper
( ChComponentName(..)
, ChEntrypoint(..)
, ChModuleName(..)
, components
, entrypoints
, ghcOptions
, mkQueryEnv
, packageId
, runQuery
, sourceDirs
, compilerVersion
)
import DynFlags
( DynFlags(..)
, GeneralFlag(..)
, GhcMode(..)
, WarnReason(..)
, gopt_set
, parseDynamicFlagsCmdLine
)
import Exception (ExceptionMonad(..), ghandle)
import GHC
( GhcLink(..)
, HscTarget(..)
, LoadHowMuch(..)
, ModLocation(..)
, ModSummary(..)
, Severity
, SrcSpan
, getModuleGraph
, getSession
, getSessionDynFlags
, guessTarget
, load
, noLoc
, parseModule
, runGhcT
, setSessionDynFlags
, setTargets
, topSortModuleGraph
, typecheckModule
, moduleNameString
, moduleName
)
import GHC.Paths (libdir)
import GhcMonad (GhcT(..), liftIO)
import HaskellCodeExplorer.GhcUtils (isHsBoot,toText)
import HaskellCodeExplorer.ModuleInfo (ModuleDependencies, createModuleInfo)
import qualified HaskellCodeExplorer.Types as HCE
import HscTypes (hsc_EPS, hsc_HPT)
import Outputable (PprStyle, SDoc, neverQualify, showSDocForUser)
import Packages (initPackages)
import Prelude hiding (id)
import System.Directory
( doesFileExist
, findExecutable
, setCurrentDirectory
, getCurrentDirectory
, makeAbsolute
, getDirectoryContents
)
import qualified System.Directory.Tree as DT
import System.Exit (exitFailure)
import System.FilePath
( (</>)
, addTrailingPathSeparator
, joinPath
, normalise
, replaceExtension
, splitPath
, takeExtension
, takeFileName
, takeBaseName
, takeDirectory
, splitDirectories
)
import System.FilePath.Find (find,always,(==?),fileName)
import System.Process (readProcess)
createPackageInfo ::
^ Path to a Cabal package
^ Options for GHC
-> LoggingT IO (HCE.PackageInfo HCE.ModuleInfo)
createPackageInfo packageDirectoryPath mbDistDirRelativePath sourceCodePreprocessing additionalGhcOptions ignoreDirectories = do
packageDirectoryAbsPath <- liftIO $ makeAbsolute packageDirectoryPath
currentDirectory <- liftIO getCurrentDirectory
liftIO $ setCurrentDirectory packageDirectoryAbsPath
distDir <-
case mbDistDirRelativePath of
Just path -> return $ packageDirectoryAbsPath </> path
Nothing -> do
eitherDistDir <- findDistDirectory packageDirectoryAbsPath
case eitherDistDir of
Right distDir -> return distDir
Left errorMessage ->
logErrorN (T.pack errorMessage) >> liftIO exitFailure
cabalFiles <-
liftIO $
length .
filter
(\path -> takeFileName path /= ".cabal" && takeExtension path == ".cabal") <$>
getDirectoryContents packageDirectoryAbsPath
_ <-
if cabalFiles == 0
then do
logErrorN $
T.concat ["No .cabal file found in ", T.pack packageDirectoryAbsPath]
liftIO exitFailure
else when (cabalFiles >= 2) $ do
logErrorN $
T.concat
[ "Found more than one .cabal file in "
, T.pack packageDirectoryAbsPath
]
liftIO exitFailure
let cabalHelperQueryEnv = mkQueryEnv packageDirectoryAbsPath distDir
((packageName, packageVersion), (_packageCompilerName, packageCompilerVersion), compInfo) <-
liftIO $
runQuery
cabalHelperQueryEnv
((,,) <$> packageId <*> compilerVersion <*>
(zip3 <$> components ((,) <$> ghcOptions) <*>
components ((,) <$> entrypoints) <*>
components ((,) <$> sourceDirs)))
let currentPackageId = HCE.PackageId (T.pack packageName) packageVersion
unless
(take 3 (versionBranch packageCompilerVersion) ==
take 3 (versionBranch ghcVersion)) $ do
logErrorN $
T.concat
[ "GHC version mismatch. haskell-code-indexer: "
, T.pack $ showVersion ghcVersion
, ", package: "
, T.pack $ showVersion packageCompilerVersion
]
liftIO exitFailure
logInfoN $ T.append "Indexing " $ HCE.packageIdToText currentPackageId
let buildComponents =
L.map
(\((options, compName), (entrypoint, _), (srcDirs, _)) ->
( chComponentNameToComponentId compName
, options
, chEntrypointsToModules entrypoint
, srcDirs
, chComponentNameToComponentType compName)) .
L.sortBy
(\((_, compName1), _, _) ((_, compName2), _, _) ->
compare compName1 compName2) $
compInfo
libSrcDirs =
concatMap (\(_, _, _, srcDirs, _) -> srcDirs) .
filter (\(_, _, _, _, compType) -> HCE.isLibrary compType) $
buildComponents
(indexedModules, (_fileMapResult, _defSiteMapResult, modNameMapResult)) <-
foldM
(\(modules, (fileMap, defSiteMap, modNameMap)) (compId, options, (mbMain, moduleNames), srcDirs, _) -> do
mbMainPath <-
case mbMain of
Just mainPath ->
liftIO $
findM doesFileExist $
mainPath :
map (\srcDir -> normalise $ srcDir </> mainPath) srcDirs
Nothing -> return Nothing
(modules', (fileMap', defSiteMap', modNameMap')) <-
indexBuildComponent
sourceCodePreprocessing
currentPackageId
compId
(fileMap, defSiteMap, modNameMap)
srcDirs
libSrcDirs
(options ++ additionalGhcOptions)
(maybe moduleNames (: moduleNames) mbMainPath)
return (modules ++ modules', (fileMap', defSiteMap', modNameMap')))
([], (HM.empty, HM.empty, HM.empty))
buildComponents
let modId = HCE.id :: HCE.ModuleInfo -> HCE.HaskellModulePath
moduleMap =
HM.fromList . map (\modInfo -> (modId modInfo, modInfo)) $
indexedModules
references = L.foldl' addReferencesFromModule HM.empty indexedModules
moduleId = HCE.id :: HCE.ModuleInfo -> HCE.HaskellModulePath
topLevelIdentifiersTrie =
L.foldl' addTopLevelIdentifiersFromModule HCE.emptyTrie .
L.filter (not . isHsBoot . moduleId) $
indexedModules
directoryTree <-
liftIO $
buildDirectoryTree
packageDirectoryAbsPath
ignoreDirectories
(\path -> HM.member (HCE.HaskellModulePath . T.pack $ path) moduleMap)
liftIO $ setCurrentDirectory currentDirectory
return
HCE.PackageInfo
{ id = currentPackageId
, moduleMap = moduleMap
, moduleNameMap = modNameMapResult
, directoryTree = directoryTree
, externalIdOccMap = references
, externalIdInfoMap = topLevelIdentifiersTrie
}
where
chEntrypointsToModules :: ChEntrypoint -> (Maybe String, [String])
chEntrypointsToModules (ChLibEntrypoint modules otherModules signatures) =
( Nothing
, L.map chModuleToString modules ++
L.map chModuleToString otherModules ++ L.map chModuleToString signatures)
chEntrypointsToModules (ChExeEntrypoint mainModule _otherModules) =
(Just mainModule, [])
chEntrypointsToModules ChSetupEntrypoint = (Nothing, [])
chModuleToString :: ChModuleName -> String
chModuleToString (ChModuleName n) = n
chComponentNameToComponentType :: ChComponentName -> HCE.ComponentType
chComponentNameToComponentType ChSetupHsName = HCE.Setup
chComponentNameToComponentType ChLibName = HCE.Lib
chComponentNameToComponentType (ChSubLibName name) =
HCE.SubLib $ T.pack name
chComponentNameToComponentType (ChFLibName name) = HCE.FLib $ T.pack name
chComponentNameToComponentType (ChExeName name) = HCE.Exe $ T.pack name
chComponentNameToComponentType (ChTestName name) = HCE.Test $ T.pack name
chComponentNameToComponentType (ChBenchName name) = HCE.Bench $ T.pack name
chComponentNameToComponentId :: ChComponentName -> HCE.ComponentId
chComponentNameToComponentId ChLibName = HCE.ComponentId "lib"
chComponentNameToComponentId (ChSubLibName name) =
HCE.ComponentId . T.append "sublib-" . T.pack $ name
chComponentNameToComponentId (ChFLibName name) =
HCE.ComponentId . T.append "flib-" . T.pack $ name
chComponentNameToComponentId (ChExeName name) =
HCE.ComponentId . T.append "exe-" . T.pack $ name
chComponentNameToComponentId (ChTestName name) =
HCE.ComponentId . T.append "test-" . T.pack $ name
chComponentNameToComponentId (ChBenchName name) =
HCE.ComponentId . T.append "bench-" . T.pack $ name
chComponentNameToComponentId ChSetupHsName = HCE.ComponentId "setup"
#if MIN_VERSION_GLASGOW_HASKELL(8,6,5,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 6, 5, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,6,4,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 6, 4, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,6,3,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 6, 3, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,4,4,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 4, 4, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,4,3,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 4, 3, 0]
#elif MIN_VERSION_GLASGOW_HASKELL(8,2,2,0)
ghcVersion :: Version
ghcVersion = makeVersion [8, 2, 2, 0]
#else
ghcVersion :: Version
ghcVersion = makeVersion [8, 0, 2, 0]
#endif
buildDirectoryTree :: FilePath -> [FilePath] -> (FilePath -> Bool) -> IO HCE.DirTree
buildDirectoryTree path ignoreDirectories isHaskellModule = do
(_dir DT.:/ tree) <- DT.readDirectoryWith (const . return $ ()) path
trie - style , from the root . The filepath will be relative to " anchored " directory .
let treeWithPaths = DT.zipPaths ("" DT.:/ DT.filterDir (not . ignore) tree)
return $ toDirTree (removeTopDir . fst <$> treeWithPaths)
where
ignore :: DT.DirTree a -> Bool
ignore (DT.Dir dirName _)
| "." `L.isPrefixOf` dirName = True
| dirName == "dist" = True
| dirName == "dist-newstyle" = True
| dirName == "tmp" = True
| otherwise = dirName `elem` ignoreDirectories
ignore (DT.Failed _ _) = True
ignore _ = False
removeTopDir :: FilePath -> FilePath
removeTopDir p =
case splitPath p of
_x:xs -> joinPath xs
[] -> ""
toDirTree :: DT.DirTree FilePath -> HCE.DirTree
toDirTree (DT.Dir name contents) =
HCE.Dir name (map toDirTree . filter (not . DT.failed) $ contents)
toDirTree (DT.File name filePath) =
HCE.File name filePath (isHaskellModule filePath)
toDirTree (DT.Failed name err) =
HCE.File (name ++ " : " ++ show err) "" False
addTopLevelIdentifiersFromModule ::
HCE.Trie Char HCE.ExternalIdentifierInfo
-> HCE.ModuleInfo
-> HCE.Trie Char HCE.ExternalIdentifierInfo
addTopLevelIdentifiersFromModule trieIdInfo HCE.ModuleInfo {..} =
L.foldl'
(\trie idInfo@(HCE.ExternalIdentifierInfo HCE.IdentifierInfo {..}) ->
HCE.insertToTrie S.insert (T.unpack demangledOccName) idInfo trie)
trieIdInfo
externalIds
addReferencesFromModule ::
HM.HashMap HCE.ExternalId (S.Set HCE.IdentifierSrcSpan)
-> HCE.ModuleInfo
-> HM.HashMap HCE.ExternalId (S.Set HCE.IdentifierSrcSpan)
addReferencesFromModule references {..} =
eachIdentifierOccurrence
references
modInfo
(\occMap lineNumber startCol endCol occ ->
let mbIdExternalId =
HCE.externalId =<<
maybe
Nothing
(`HM.lookup` idInfoMap)
(HCE.internalId (occ :: HCE.IdentifierOccurrence))
idSrcSpan =
HCE.IdentifierSrcSpan
{ modulePath = id
, line = lineNumber
, startColumn = startCol
, endColumn = endCol
}
in case mbIdExternalId of
Just externalId ->
HM.insertWith S.union externalId (S.singleton idSrcSpan) occMap
Nothing -> occMap)
findDistDirectory :: FilePath -> LoggingT IO (Either String FilePath)
findDistDirectory packagePath = do
let parents =
reverse . map joinPath . filter (not . null) . L.inits . splitPath $
packagePath
e.g. , [ " /dir / subdir / subsubdir","/dir / subdir/","/dir/","/ " ]
hasStackYaml <-
liftIO $ anyM (\path -> doesFileExist (path </> "stack.yaml")) parents
mbStackExecutable <- liftIO $ findExecutable "stack"
case (hasStackYaml, mbStackExecutable) of
(True, Just stack) -> do
let removeEndOfLine str
| null str = str
| otherwise = init str
logInfoN
eitherDistDir :: (Either IOException String) <-
liftIO .
try . fmap removeEndOfLine . readProcess stack ["path", "--dist-dir"] $
""
case eitherDistDir of
Right distDir -> do
logInfoN $ T.append "Stack dist directory : " $ T.pack distDir
hasSetupConfig <- liftIO $ doesFileExist $ distDir </> "setup-config"
if hasSetupConfig
then return $ Right distDir
else return $
Left
"Cannot find setup-config file in a dist directory. Has the package been built?"
Left exception ->
return $
Left $
_ -> do
logInfoN "Trying to find dist directory"
setupConfigPaths <-
liftIO $
map (takeDirectory . normalise) <$>
find always (fileName ==? "setup-config") "."
case setupConfigPaths of
[] ->
return $
Left "Cannot find dist directory. Has the package been built?"
[path] -> do
logInfoN $ T.append "Found dist directory : " $ T.pack path
return $ Right path
_ ->
return $
Left $
"Found multiple possible dist directories : \n" ++
show setupConfigPaths ++ " \nPlease specify --dist option"
eachIdentifierOccurrence ::
forall a.
a
-> HCE.ModuleInfo
-> (a -> IM.Key -> Int -> Int -> HCE.IdentifierOccurrence -> a)
-> a
eachIdentifierOccurrence accumulator HCE.ModuleInfo {..} f =
IM.foldlWithKey'
(\acc lineNumber occurences ->
L.foldl'
(\a ((startCol, endCol), occ) -> f a lineNumber startCol endCol occ)
acc
occurences)
accumulator
idOccMap
instance ExceptionMonad (LoggingT IO) where
gcatch act h =
LoggingT $ \logFn ->
runLoggingT act logFn `gcatch` \e -> runLoggingT (h e) logFn
gmask f =
LoggingT $ \logFn ->
gmask $ \io_restore ->
let g_restore (LoggingT m) = LoggingT $ \lf -> io_restore (m lf)
in runLoggingT (f g_restore) logFn
instance MonadLoggerIO (GhcT (LoggingT IO)) where
askLoggerIO = GhcT $ const askLoggerIO
instance MonadLogger (GhcT (LoggingT IO)) where
monadLoggerLog loc source level =
GhcT . const . monadLoggerLog loc source level
gtrySync :: (ExceptionMonad m) => m a -> m (Either SomeException a)
gtrySync action = ghandleSync (return . Left) (fmap Right action)
ghandleSync :: (ExceptionMonad m) => (SomeException -> m a) -> m a -> m a
ghandleSync onError =
ghandle
(\ex ->
case fromException ex of
Just (asyncEx :: SomeAsyncException) -> throw asyncEx
_ -> onError ex)
indexBuildComponent ::
^
^ of libraries
^ Command - line options for GHC
-> LoggingT IO ([HCE.ModuleInfo],ModuleDependencies)
indexBuildComponent sourceCodePreprocessing currentPackageId componentId deps@(fileMap, defSiteMap, modNameMap) srcDirs libSrcDirs options modules = do
let onError ex = do
logErrorN $
T.concat
[ "Error while indexing component "
, HCE.getComponentId componentId
, " : "
, T.pack . show $ ex
]
return ([], deps)
ghandleSync onError $
runGhcT (Just libdir) $ do
logDebugN (T.append "Component id : " $ HCE.getComponentId componentId)
logDebugN (T.append "Modules : " $ T.pack $ show modules)
logDebugN
(T.append "GHC command line options : " $
T.pack $ L.unwords (options ++ modules))
flags <- getSessionDynFlags
(flags', _, _) <-
parseDynamicFlagsCmdLine
flags
(flags'', _) <- liftIO $ initPackages flags'
logFn <- askLoggerIO
let logAction ::
DynFlags
-> WarnReason
-> Severity
-> SrcSpan
-> Outputable.PprStyle
-> SDoc
-> IO ()
logAction fs _reason _severity srcSpan _stype msg =
runLoggingT
(logDebugN
(T.append "GHC message : " $
T.pack $
showSDocForUser fs neverQualify msg ++
" , SrcSpan : " ++ show srcSpan))
logFn
mbTmpDir =
case hiDir flags'' of
Just buildDir ->
Just $ buildDir </> (takeBaseName buildDir ++ "-tmp")
Nothing -> Nothing
_ <-
setSessionDynFlags $
L.foldl'
gopt_set
(flags''
{ hscTarget = HscAsm
, ghcLink = LinkInMemory
, ghcMode = CompManager
, log_action = logAction
, importPaths = importPaths flags'' ++ maybeToList mbTmpDir
})
[Opt_Haddock]
targets <- mapM (`guessTarget` Nothing) modules
setTargets targets
_ <- load LoadAllTargets
modGraph <- getModuleGraph
let topSortMods = flattenSCCs (topSortModuleGraph False modGraph Nothing)
buildDir =
addTrailingPathSeparator . normalise . fromMaybe "" . hiDir $
flags''
pathsModuleName =
"Paths_" ++
map
(\c ->
if c == '-'
then '_'
else c)
(T.unpack (HCE.name (currentPackageId :: HCE.PackageId)))
(modSumWithPath, modulesNotFound) <-
(\(mods, notFound) ->
( L.reverse .
L.foldl'
(\acc (mbPath, modSum) ->
case mbPath of
Just path
| not $ HM.member path defSiteMap -> (path, modSum) : acc
_ -> acc)
[] $
mods
, map snd notFound)) .
L.partition (\(mbPath, _) -> isJust mbPath) <$>
mapM
(\modSum ->
liftIO $
(, modSum) <$>
findHaskellModulePath buildDir (srcDirs ++ libSrcDirs) modSum)
(filter
(\modSum ->
pathsModuleName /=
(moduleNameString . moduleName $ ms_mod modSum))
topSortMods)
unless (null modulesNotFound) $
logErrorN $
T.append
"Cannot find module path : "
(toText flags'' $ map ms_mod modulesNotFound)
foldM
(\(indexedModules, (fileMap', defSiteMap', modNameMap')) (modulePath, modSum) -> do
result <-
indexModule
sourceCodePreprocessing
componentId
currentPackageId
flags''
(fileMap', defSiteMap', modNameMap')
(modulePath, modSum)
case result of
Right (modInfo, (fileMap'', defSiteMap'', modNameMap'')) ->
return
( modInfo : indexedModules
, (fileMap'', defSiteMap'', modNameMap''))
Left exception -> do
logErrorN $
T.concat
[ "Error while indexing "
, T.pack . show $ modulePath
, " : "
, T.pack . show $ exception
]
return (indexedModules, (fileMap', defSiteMap', modNameMap')))
([], (fileMap, defSiteMap, modNameMap))
modSumWithPath
findHaskellModulePath ::
FilePath -> [FilePath] -> ModSummary -> IO (Maybe HCE.HaskellModulePath)
findHaskellModulePath buildDir srcDirs modSum =
case normalise <$> (ml_hs_file . ms_location $ modSum) of
Just modulePath ->
let toHaskellModulePath = return . Just . HCE.HaskellModulePath . T.pack
removeTmpDir path =
case splitDirectories path of
parent:rest ->
if "-tmp" `L.isSuffixOf` parent
then joinPath rest
else path
_ -> path
in case removeTmpDir <$> L.stripPrefix buildDir modulePath of
Just path
| takeExtension path == ".hs-boot" -> do
let possiblePaths = path : map (</> path) srcDirs
mbFoundPath <- findM doesFileExist possiblePaths
case mbFoundPath of
Just p -> toHaskellModulePath p
_ -> return Nothing
| takeExtension path == ".hs" -> do
let paths =
map
(replaceExtension path)
HCE.haskellPreprocessorExtensions
possiblePaths =
paths ++
concatMap (\srcDir -> map (srcDir </>) paths) srcDirs
mbFoundPath <- findM doesFileExist possiblePaths
case mbFoundPath of
Just p -> toHaskellModulePath p
_ -> return Nothing
| otherwise -> return Nothing
Nothing -> toHaskellModulePath modulePath
Nothing -> return Nothing
indexModule ::
HCE.SourceCodePreprocessing
-> HCE.ComponentId
-> HCE.PackageId
-> DynFlags
-> ModuleDependencies
-> (HCE.HaskellModulePath, ModSummary)
-> GhcT (LoggingT IO) (Either SomeException ( HCE.ModuleInfo
, ModuleDependencies))
indexModule sourceCodePreprocessing componentId currentPackageId flags deps (modulePath, modSum) =
gtrySync $ do
logDebugN (T.append "Indexing " $ HCE.getHaskellModulePath modulePath)
parsedModule <- parseModule modSum
typecheckedModule <- typecheckModule parsedModule
hscEnv <- getSession
externalPackageState <- liftIO . readIORef . hsc_EPS $ hscEnv
originalSourceCode <-
liftIO $
T.replace "\t" " " . TE.decodeUtf8 <$>
BS.readFile (T.unpack . HCE.getHaskellModulePath $ modulePath)
let (modInfo, (fileMap', exportMap', moduleNameMap'), typeErrors) =
createModuleInfo
deps
( flags
, typecheckedModule
, hsc_HPT hscEnv
, externalPackageState
, modSum)
modulePath
currentPackageId
componentId
(originalSourceCode, sourceCodePreprocessing)
unless (null typeErrors) $
logInfoN $ T.append "Type errors : " $ T.pack $ show typeErrors
deepseq modInfo $ return (modInfo, (fileMap', exportMap', moduleNameMap'))
|
3ca54a5e7ea1d3769ab38cbac3381f8e6c678a30753bad10ec7b9c742720a685 | froggey/Mezzano | theme.lisp | ;;;; Theme variables
(in-package :mezzano.gui.theme)
(defvar *foreground* (gui:make-colour-from-octets #xDC #xDC #xCC))
(defvar *background* (gui:make-colour-from-octets #x3E #x3E #x3E #xD8))
(defvar *active-frame* (gui:make-colour-from-octets #x80 #x80 #x80))
(defvar *active-frame-top* (gui:make-colour-from-octets #xFF #xFF #xFF))
(defvar *inactive-frame* (gui:make-colour-from-octets #x40 #x40 #x40))
(defvar *inactive-frame-top* (gui:make-colour-from-octets #x80 #x80 #x80))
(defvar *frame-title* (gui:make-colour-from-octets #x3F #x3F #x3F))
(defvar *filer-lisp-source-code* (gui:make-colour-from-octets #x94 #xBF #xF3))
(defvar *filer-compiled-lisp-code* (gui:make-colour-from-octets #xF0 #xAF #x8F))
(defvar *filer-text* (gui:make-colour-from-octets #xCC #x93 #x93))
(defvar *filer-font* (gui:make-colour-from-octets #x7F #x9F #x7F))
(defvar *filer-media* (gui:make-colour-from-octets #xDC #x8C #xC3))
(defvar *memory-monitor-not-present* (gui:make-colour 0 0 0))
(defvar *memory-monitor-free* (gui:make-colour-from-octets 53 148 254))
(defvar *memory-monitor-wired* (gui:make-colour-from-octets 248 8 23))
(defvar *memory-monitor-wired-backing* (gui:make-colour-from-octets 143 80 10))
(defvar *memory-monitor-active* (gui:make-colour-from-octets 147 253 21))
(defvar *memory-monitor-active-writeback* (gui:make-colour-from-octets 81 145 7))
(defvar *memory-monitor-inactive-writeback* (gui:make-colour-from-octets 82 9 146))
(defvar *memory-monitor-page-table* (gui:make-colour-from-octets 251 131 216))
(defvar *memory-monitor-other* (gui:make-colour-from-octets 121 121 121))
(defvar *memory-monitor-mixed* (gui:make-colour 1 1 1))
(defvar *memory-monitor-graph-background* *background*)
(defvar *memory-monitor-graph-tracker* (gui:make-colour-from-octets 255 0 0))
(defvar *memory-monitor-general-area-usage* (gui:make-colour 0 0 1))
(defvar *memory-monitor-general-area-alloc* (gui:make-colour 0.5 0.5 1))
(defvar *memory-monitor-general-area-commit* (gui:make-colour 0.5 0.2 1))
(defvar *memory-monitor-cons-area-usage* (gui:make-colour 0 1 0))
(defvar *memory-monitor-cons-area-alloc* (gui:make-colour 0.5 1 0.5))
(defvar *memory-monitor-cons-area-commit* (gui:make-colour 0.5 1 0.2))
(defvar *memory-monitor-pinned-area-usage* (gui:make-colour 1 0 0))
(defvar *memory-monitor-wired-area-usage* (gui:make-colour 1 0 1))
(defvar *memory-monitor-function-area-usage* (gui:make-colour 0.75 0.5 0))
(defvar *memory-monitor-wired-function-area-usage* (gui:make-colour 0.75 0.5 1))
(defvar *desktop-text* (gui:make-colour 1 1 1))
(defvar *xterm-background* (gui:make-colour 0 0 0 0.85))
(defun set-desktop-background-image (image)
(mezzano.supervisor:fifo-push
(make-instance 'mezzano.gui.desktop::set-background-image :image-pathname image)
mezzano.internals::*desktop*))
(defun set-desktop-background-colour (colour)
(mezzano.supervisor:fifo-push
(make-instance 'mezzano.gui.desktop::set-background-colour :colour colour)
mezzano.internals::*desktop*))
(defun set-desktop-text-colour (colour)
(mezzano.supervisor:fifo-push
(make-instance 'mezzano.gui.desktop::set-text-colour :colour colour)
mezzano.internals::*desktop*))
| null | https://raw.githubusercontent.com/froggey/Mezzano/f0eeb2a3f032098b394e31e3dfd32800f8a51122/gui/theme.lisp | lisp | Theme variables |
(in-package :mezzano.gui.theme)
(defvar *foreground* (gui:make-colour-from-octets #xDC #xDC #xCC))
(defvar *background* (gui:make-colour-from-octets #x3E #x3E #x3E #xD8))
(defvar *active-frame* (gui:make-colour-from-octets #x80 #x80 #x80))
(defvar *active-frame-top* (gui:make-colour-from-octets #xFF #xFF #xFF))
(defvar *inactive-frame* (gui:make-colour-from-octets #x40 #x40 #x40))
(defvar *inactive-frame-top* (gui:make-colour-from-octets #x80 #x80 #x80))
(defvar *frame-title* (gui:make-colour-from-octets #x3F #x3F #x3F))
(defvar *filer-lisp-source-code* (gui:make-colour-from-octets #x94 #xBF #xF3))
(defvar *filer-compiled-lisp-code* (gui:make-colour-from-octets #xF0 #xAF #x8F))
(defvar *filer-text* (gui:make-colour-from-octets #xCC #x93 #x93))
(defvar *filer-font* (gui:make-colour-from-octets #x7F #x9F #x7F))
(defvar *filer-media* (gui:make-colour-from-octets #xDC #x8C #xC3))
(defvar *memory-monitor-not-present* (gui:make-colour 0 0 0))
(defvar *memory-monitor-free* (gui:make-colour-from-octets 53 148 254))
(defvar *memory-monitor-wired* (gui:make-colour-from-octets 248 8 23))
(defvar *memory-monitor-wired-backing* (gui:make-colour-from-octets 143 80 10))
(defvar *memory-monitor-active* (gui:make-colour-from-octets 147 253 21))
(defvar *memory-monitor-active-writeback* (gui:make-colour-from-octets 81 145 7))
(defvar *memory-monitor-inactive-writeback* (gui:make-colour-from-octets 82 9 146))
(defvar *memory-monitor-page-table* (gui:make-colour-from-octets 251 131 216))
(defvar *memory-monitor-other* (gui:make-colour-from-octets 121 121 121))
(defvar *memory-monitor-mixed* (gui:make-colour 1 1 1))
(defvar *memory-monitor-graph-background* *background*)
(defvar *memory-monitor-graph-tracker* (gui:make-colour-from-octets 255 0 0))
(defvar *memory-monitor-general-area-usage* (gui:make-colour 0 0 1))
(defvar *memory-monitor-general-area-alloc* (gui:make-colour 0.5 0.5 1))
(defvar *memory-monitor-general-area-commit* (gui:make-colour 0.5 0.2 1))
(defvar *memory-monitor-cons-area-usage* (gui:make-colour 0 1 0))
(defvar *memory-monitor-cons-area-alloc* (gui:make-colour 0.5 1 0.5))
(defvar *memory-monitor-cons-area-commit* (gui:make-colour 0.5 1 0.2))
(defvar *memory-monitor-pinned-area-usage* (gui:make-colour 1 0 0))
(defvar *memory-monitor-wired-area-usage* (gui:make-colour 1 0 1))
(defvar *memory-monitor-function-area-usage* (gui:make-colour 0.75 0.5 0))
(defvar *memory-monitor-wired-function-area-usage* (gui:make-colour 0.75 0.5 1))
(defvar *desktop-text* (gui:make-colour 1 1 1))
(defvar *xterm-background* (gui:make-colour 0 0 0 0.85))
(defun set-desktop-background-image (image)
(mezzano.supervisor:fifo-push
(make-instance 'mezzano.gui.desktop::set-background-image :image-pathname image)
mezzano.internals::*desktop*))
(defun set-desktop-background-colour (colour)
(mezzano.supervisor:fifo-push
(make-instance 'mezzano.gui.desktop::set-background-colour :colour colour)
mezzano.internals::*desktop*))
(defun set-desktop-text-colour (colour)
(mezzano.supervisor:fifo-push
(make-instance 'mezzano.gui.desktop::set-text-colour :colour colour)
mezzano.internals::*desktop*))
|
a6fc409110dd4ed600a27eae7496d2982040bd55a68ce5bac33c03374b618d15 | mgree/smoosh | test_evaluation.ml | open Test_prelude
open Smoosh
open Os_symbolic
open Path
open Printf
(***********************************************************************)
EXIT CODE TESTS * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(***********************************************************************)
let run_cmd_for_exit_code (cmd : string) (os0 : symbolic os_state) : int =
let c = Shim.parse_string cmd in
let os1 = Semantics.symbolic_full_evaluation os0 c in
if out_of_fuel os1
then -1
else os1.sh.exit_code
let check_exit_code (cmd, state, expected) =
checker (run_cmd_for_exit_code cmd) (=) (cmd, state, expected)
let exit_code_tests : (string * symbolic os_state * int) list =
(* basic logic *)
[ ("true", os_empty, 0)
; ("false", os_empty, 1)
; ("true && true", os_empty, 0)
; ("true && false", os_empty, 1)
; ("false && true", os_empty, 1)
; ("false || true", os_empty, 0)
; ("false ; true", os_empty, 0)
; ("true ; false", os_empty, 1)
; ("! true", os_empty, 1)
; ("! false", os_empty, 0)
; ("! { true ; false ; }", os_empty, 0)
; ("! { false ; true ; }", os_empty, 1)
; ("x=`false`", os_empty, 1)
; ("x=$(exit 5)", os_empty, 5)
; ("exit $(echo 3; exit 5)", os_empty, 3)
(* expansion *)
; ("x=5 ; echo ${x?erp}", os_empty, 0)
; ("echo ${x?erp}", os_empty, 1)
; ("for y in ${x?oh no}; do exit 5; done", os_empty, 1)
; ("x=5 ; for y in ${x?oh no}; do exit $y; done", os_empty, 5)
; ("case ${x?alas} in *) true;; esac", os_empty, 1)
; ("x=7 ; case ${x?alas} in *) exit $x;; esac", os_empty, 7)
; ("x=$(echo 5) ; exit $x", os_empty, 5)
; ("x=$(echo hello) ; case $x in *ell*) true;; *) false;; esac", os_empty, 0)
(* exit *)
; ("exit", os_empty, 0)
; ("exit 2", os_empty, 2)
; ("false; exit", os_empty, 1)
; ("false; exit 2", os_empty, 2)
; ("exit 2; false", os_empty, 2)
; ("exit 2; exit 40", os_empty, 2)
(* break *)
; ("while true; do break; done", os_empty, 0)
(* for loop with no args should exit 0 *)
; ("for x in; do exit 1; done", os_empty, 0)
; ("for x in \"\"; do exit 1; done", os_empty, 1)
(* case cascades *)
; ("case abc in ab) true;; abc) false;; esac", os_empty, 1)
; ("case abc in ab|ab*) true;; abc) false;; esac", os_empty, 0)
; ("case abc in *) true;; abc) false;; esac", os_empty, 0)
; ("x=hello ; case $x in *el*) true;; *) false;; esac", os_empty, 0)
; ("case \"no one is home\" in esac", os_empty, 0)
; ("case Linux in Lin*) true;; *) false;; esac", os_empty, 0)
(* pipes *)
; ("false | true", os_empty, 0)
; ("true | false", os_empty, 1)
; ("true | exit 5", os_empty, 5)
(* unset *)
; ("x=5 ; exit $x", os_empty, 5)
; ("x=5 ; unset x; exit $x", os_empty, 0)
; ("x=5 ; unset x; exit ${x-42}", os_empty, 42)
; ("f() { exit 3 ; } ; f", os_empty, 3)
; ("f() { exit 3 ; } ; unset f ; f", os_empty, 3)
; ("f() { exit 3 ; } ; unset -f f ; f", os_empty, 127)
(* readonly *)
; ("x=5 ; readonly x", os_empty, 0)
; ("x=5 ; readonly x ; readonly x=10", os_empty, 1)
; ("x=- ; readonly $x=derp", os_empty, 1)
(* export *)
; ("x=- ; export $x=derp", os_empty, 1)
(* eval *)
; ("eval exit 0", os_empty, 0)
; ("eval exit 1", os_empty, 1)
; ("! ( eval exit 1 )", os_empty, 0)
; ("! eval exit 1", os_empty, 1)
; ("! eval exit 47", os_empty, 47)
(* function calls *)
; ("g() { exit 5 ; } ; h() { exit 6 ; } ; i() { $1 ; exit 7 ; } ; i g", os_empty, 5)
; ("g() { exit 5 ; } ; h() { exit 6 ; } ; i() { $1 ; exit 7 ; } ; i h", os_empty, 6)
; ("g() { exit 5 ; } ; h() { exit 6 ; } ; i() { $1 ; exit 7 ; } ; i :", os_empty, 7)
(* $# *)
; ("f() { exit $# ; } ; f", os_empty, 0)
; ("f() { exit $# ; } ; f a", os_empty, 1)
; ("f() { exit $# ; } ; f a b", os_empty, 2)
; ("f() { exit $# ; } ; f a b c", os_empty, 3)
; ("f() { $@ ; } ; f exit 12", os_empty, 12)
; ("f() { $* ; } ; f exit 12", os_empty, 12)
(* set *)
; ("set -- a b c; exit $#", os_empty, 3)
; ("set -- ; exit $#", os_empty, 0)
; ("set -n ; exit 5", os_empty, 0)
; ("set -u ; echo $x", os_empty, 1)
; ("set -- a b c; set -u; exit $#", os_empty, 3)
; ("set -- a b c; set -u a b; exit $#", os_empty, 2)
(* test *)
; ("test hi = hi", os_empty, 0)
; ("test hi = bye", os_empty, 1)
; ("test hi != hi", os_empty, 1)
; ("test hi != bye", os_empty, 0)
; ("test", os_empty, 1)
; ("test hello", os_empty, 0)
; ("test \"\"", os_empty, 1)
; ("test -n hello", os_empty, 0)
; ("test -n \"\"", os_empty, 1)
; ("test -z \"\"", os_empty, 0)
; ("test -z hello", os_empty, 1)
; ("test 5 -eq 5", os_empty, 0)
; ("test 5 -ne 5", os_empty, 1)
; ("test 1 -lt 5", os_empty, 0)
; ("test 5 -lt 5", os_empty, 1)
; ("test 1 -le 1", os_empty, 0)
; ("test 1 -gt 5", os_empty, 1)
; ("test 5 -gt 5", os_empty, 1)
; ("test 1 -ge 1", os_empty, 0)
; ("test hi -ge 1", os_empty, 2)
; ("test \\( 5 -ne 5 \\) -o -n hello", os_empty, 0)
; ("test \\( 5 -eq 5 \\) -a -z \"\"", os_empty, 0)
; ("test 5 -eq 5 -a -z \"\"", os_empty, 0)
; ("test hi \\< hello", os_empty, 1)
; ("test lol \\< hello", os_empty, 1)
; ("test a \\< b", os_empty, 0)
(* regression: support negative numbers *)
; ("test -5 -eq $((0-5))", os_empty, 0)
; ("test -5 -eq $((5*5))", os_empty, 1)
; ("test $((0-5)) -eq -5", os_empty, 0)
; ("test $((5*5)) -eq -5", os_empty, 1)
]
(***********************************************************************)
(* STDOUT TESTS ********************************************************)
(***********************************************************************)
let run_cmd_for_stdout (cmd : string) (os0 : symbolic os_state) : string =
let c = Shim.parse_string cmd in
let os1 = Semantics.symbolic_full_evaluation os0 c in
if out_of_fuel os1
then "!!! OUT OF FUEL"
else get_stdout os1
let check_stdout (cmd, state, expected) =
checker (run_cmd_for_stdout cmd) (=) (cmd, state, expected)
let stdout_tests : (string * symbolic os_state * string) list =
(* basic logic *)
[ ("true", os_empty, "")
; ("false", os_empty, "")
; ("echo hi ; echo there", os_empty, "hi\nthere\n")
; ("echo -n hi ; echo there", os_empty, "hithere\n")
; ("echo -n \"hi \" ; echo there", os_empty, "hi there\n")
; ("x=${y:=1} ; echo $((x+=`echo 2`))", os_empty, "3\n")
regression :
see test_prelude.ml for fs details of why these are the outputs
see test_prelude.ml for fs details of why these are the outputs
*)
; ("echo *", os_complicated_fs, "a b c\n")
; ("echo \\*", os_complicated_fs, "*\n")
; ("x=\\* ; echo $x", os_complicated_fs, "a b c\n")
; ("x=\\* ; set -f ; echo $x ; set +f ; echo $x", os_complicated_fs, "*\na b c\n")
; ("x=\\* ; cd b ; echo $x", os_complicated_fs, "user\n")
; ("case hi\\\"there\\\" in *\\\"there\\\") echo matched;; *) echo did not;; esac", os_complicated_fs, "matched\n")
; ("case hi\\\"there\\\" in *\"there\") echo matched;; *) echo did not;; esac", os_complicated_fs, "did not\n")
; ("x='' ; case $x in \"\") echo e ;; *) echo nope ;; esac", os_empty, "e\n")
; ("case hi\\\"there\\\" in *\\\") echo m;; *) echo n;; esac", os_empty, "m\n")
; ("x=hello\\*there ; echo ${x#*\\*}", os_complicated_fs, "there\n")
; ("case Linux in Lin*) echo matched;; *) echo nope;; esac", os_empty, "matched\n")
; ("case Linux in *) echo matched;; esac", os_empty, "matched\n")
(* regression: don't do pathname expansion on patterns *)
; ("case Linux in *) echo matched;; esac", os_complicated_fs, "matched\n")
; ("case Linux in *) echo matched;; esac", os_complicated_fs_in_a, "matched\n")
; ("echo []", os_complicated_fs, "[]\n")
; ("echo \"[]\"", os_complicated_fs, "[]\n")
; ("echo '[]'", os_complicated_fs, "[]\n")
; ("echo \\[]", os_complicated_fs, "[]\n")
(* regression: support [a-zA-Z][a-zA-Z0-9_] as varnames *)
; ("var_1=5 ; echo $((var_1 + 1))", os_empty, "6\n")
; ("_var1=5 ; echo $((_var1 * 2))", os_empty, "10\n")
; ("_=5 ; echo $((_ - 3))", os_empty, "2\n")
; ("_234=5 ; echo $((_234 % 4))", os_empty, "1\n")
(* regression: correct handling of patterns *)
; ("x=foo_47.bar ; echo ${x%%[!0-9]*}", os_empty, "\n")
; ("x=foo_47.bar ; echo ${x%%[!0-9]*}", os_complicated_fs, "\n")
; ("x=foo_47.bar ; echo ${x##[!0-9]*}", os_empty, "\n")
; ("x=foo_47.bar ; echo ${x##[!0-9]*}", os_complicated_fs, "\n")
(* regression: correct positional param restore on function return *)
; ("g() { set -- q ; } ; f() { echo $# [$*] ; g ; echo $# [$*] ; } ; f a b c",
os_empty,
"3 [a b c]\n3 [a b c]\n")
(* regression: shift shouldn't affect $0 *)
; ("echo $0 ; set -- a b c ; echo $0 ; shift ; echo $0 ; shift 2 ; echo $0",
os_empty,
"smoosh\nsmoosh\nsmoosh\nsmoosh\n")
(* regression: eval and set *)
; ("eval set -- 1 2 3 ; echo $#", os_empty, "3\n")
; ("eval set -- 1 2 3 ; echo $*", os_empty, "1 2 3\n")
(* regression: set -- *)
; ("set -- 1 2 3; echo $#; set --; echo $#", os_empty, "3\n0\n")
(* subshells *)
; ("x=$(echo *) ; echo $x", os_complicated_fs, "a b c\n")
; ("x=$(echo hello there); echo $x", os_empty, "hello there\n")
; ("x=$(echo 5); echo $((x * x))", os_empty, "25\n")
(* shift *)
; ("set -- a b c ; shift ; echo $#", os_empty, "2\n")
; ("set -- a b c ; shift 1 ; echo $#", os_empty, "2\n")
; ("set -- a b c ; shift 2 ; echo $# [$*]", os_empty, "1 [c]\n")
; ("set -- a b c ; shift 3 ; echo $# [$*]", os_empty, "0 []\n")
; ("set -- a b c ; shift 0 ; echo $# [$*]", os_empty, "3 [a b c]\n")
; ("set -- a b c ; shift 4 ; echo failed", os_empty, "")
; ("set -- a b c ; shift 4 ; echo failed", os_empty, "")
(* redirects and pipes *)
; ("( echo ${x?oops} ) 2>&1", os_empty, "x: oops\n")
; ("echo hi | echo no", os_empty, "no\n")
; ("echo ${y?oh no}", os_empty, "")
; ("exec 2>&1; echo ${y?oh no}", os_empty, "y: oh no\n")
; ("echo ${y?oh no}", os_empty, "")
; ("exec 1>&2; echo ${y?oh no}", os_empty, "")
; ("while true; do echo 5; done | echo done", os_empty, "done\n")
; ("while true; do echo 5; done | { read x; echo $((x + 42)) ; }", os_empty, "47\n")
(* $* vs $@
e.g.s from -bash-command-line-args-vs/12316565
*)
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in $*; do echo \"$x\"; done",
os_empty,
"arg\n1\narg\n2\narg\n3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in $@; do echo \"$x\"; done",
os_empty,
"arg\n1\narg\n2\narg\n3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in \"$*\"; do echo \"$x\"; done",
os_empty,
"arg 1 arg 2 arg 3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in \"$@\"; do echo \"$x\"; done",
os_empty,
"arg 1\narg 2\narg 3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in \"$@\"; do echo $x; done",
os_empty,
"arg 1\narg 2\narg 3\n")
(* command types *)
; ("type type", os_empty, "type is a shell builtin\n")
; ("type :", os_empty, ": is a special shell builtin\n")
; ("f() : ; type f", os_empty, "f is a shell function\n")
; ("type nonesuch", os_empty, "nonesuch: not found\n")
; ("pwd() echo florp ; pwd", os_empty, "florp\n")
; ("pwd() echo florp ; command pwd", os_empty, "/\n")
; ("pwd() echo florp ; command -p pwd", os_empty, "/\n")
(* umask *)
; ("umask", os_empty, "0022\n")
; ("umask -S", os_empty, "u=rwx,g=rx,o=rx\n")
; ("umask 0044 ; umask", os_empty, "0044\n")
; ("umask 0044 ; umask -S", os_empty, "u=rwx,g=wx,o=wx\n")
; ("umask a= ; umask", os_empty, "0777\n")
; ("umask a= ; umask u+rwx ; umask", os_empty, "0077\n")
; ("umask a= ; umask u+rwx,go+rx ; umask", os_empty, "0022\n")
; ("umask u=o; umask", os_empty, "0222\n")
; ("umask u=g; umask", os_empty, "0222\n")
; ("umask g+u; umask", os_empty, "0002\n")
(* read *)
; ("echo 1 | read x; echo ${x-subshell}", os_empty, "subshell\n")
; ("echo 1 | { read x ; echo $x ; }", os_empty, "1\n")
; ("echo 1 2 | { read x y ; echo $x ; echo $y ; }", os_empty, "1\n2\n")
(* printf *)
; ("printf", os_empty, "")
; ("printf \\\\n", os_empty, "\n")
; ("printf hi\\\\n%%\\\\tthere", os_empty, "hi\n%\tthere")
; ("printf oct%spus o", os_empty, "octopus")
; ("printf %s $((10 * 10))", os_empty, "100")
; ("printf %b 'hello\\n'", os_empty, "hello\n")
; ("printf %b \"hello\\n\"", os_empty, "hello\n")
; ("printf %b \"hello\\\\n\"", os_empty, "hello\n")
; ("printf %c oops", os_empty, "o")
; ("printf %.1s hello", os_empty, "h")
; ("printf %.0s hello", os_empty, "")
; ("printf %.s hello", os_empty, "")
; ("printf %d 0xf", os_empty, "15")
; ("printf %i 0xf", os_empty, "15")
; ("printf %x 16", os_empty, "10")
; ("printf %X 15", os_empty, "F")
; ("printf %#X 15", os_empty, "0XF")
; ("printf %#x 15", os_empty, "0xf")
; ("printf %04x 15", os_empty, "000f")
; ("printf %#04x 15", os_empty, "0x0f")
; ("printf %#05x 15", os_empty, "0x00f")
; ("printf %#5x 15", os_empty, " 0xf")
; ("printf %x -5", os_empty, "fffffffffffffffb")
; ("printf %u -5", os_empty, "18446744073709551611")
(* regression: printf should rerun to print all arguments *)
; ("printf '%d %d' 1 2 3 4 5 6 7 8 9", os_empty, "1 23 45 67 89 0")
(* kill *)
; ("echo hi & wait", os_empty, "hi\n")
; ("echo hi & kill $! ; wait", os_empty, "")
; ("(trap 'echo bye' SIGTERM ; echo hi) & wait", os_empty, "hi\n")
(* this test doesn't work, because demand-driven scheduling means the trap
is never installed before teh signal arrives *)
; ( " ( trap ' echo bye ' SIGTERM ; echo hi ) & kill % 1 ; wait " , os_empty , " bye\n " )
; ("(trap 'echo bye' EXIT) & wait", os_empty, "bye\n")
; ("trap 'echo bye' EXIT", os_empty, "bye\n")
; ("(trap 'echo bye' EXIT; echo hi) ; wait", os_empty, "hi\nbye\n")
; ("trap 'echo sig' SIGTERM; kill $$", os_empty, "sig\n")
; ("(trap 'echo hi; exit' TERM; while true; do echo loop; done) & :; :; kill $!", os_empty, "loop\nhi\n")
(* getopts *)
; ("getopts ab opt -a -b -- c d ; " ^
"echo $opt $OPTIND $?; " ^
"getopts ab opt -a -b -- c d ; " ^
"echo $opt $OPTIND $?; " ^
"getopts ab opt -a -b -- c d ; " ^
"echo $opt $OPTIND $?",
os_empty,
"a 2 0\nb 3 0\n? 4 1\n")
; ("getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=c OPTIND=2 OPTARG= ?=0\n" ^
"opt=a OPTIND=2 OPTARG= ?=0\n" ^
"opt=a OPTIND=2 OPTARG= ?=0\n" ^
"opt=a OPTIND=2 OPTARG= ?=0\n" ^
"opt=? OPTIND=2 OPTARG= ?=1\n")
; ("getopts a:b opt -a -b ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=a OPTIND=3 OPTARG=-b ?=0\n")
; ("getopts a:b opt -a -b ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts a:b opt -a -b ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=a OPTIND=3 OPTARG=-b ?=0\n" ^
"opt=? OPTIND=3 OPTARG= ?=1\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n" ^
"opt=a OPTIND=3 OPTARG= ?=0\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n" ^
"opt=a OPTIND=3 OPTARG= ?=0\n" ^
"opt=? OPTIND=4 OPTARG=c ?=0\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n" ^
"opt=a OPTIND=3 OPTARG= ?=0\n" ^
"opt=? OPTIND=4 OPTARG=c ?=0\n" ^
"opt=? OPTIND=4 OPTARG= ?=1\n")
(* set -e *)
; ("set -e; false; echo hi", os_empty, "")
; ("set -e; true; echo hi", os_empty, "hi\n")
; ("set -e; ! false; echo hi", os_empty, "hi\n")
; ("set -e; ! true; echo hi", os_empty, "hi\n")
; ("set -e; (false; echo one) | echo two; echo three", os_empty, "two\nthree\n")
; ("set -e; (false; echo one) ; echo two", os_empty, "")
(* exit *)
; ("echo hi; exit; echo farewell", os_empty, "hi\n")
]
(***********************************************************************)
(* DRIVER **************************************************************)
(***********************************************************************)
let run_tests () =
let failed = ref 0 in
let test_count = ref 0 in
print_endline "\n=== Initializing Dash parser...";
Dash.initialize ();
print_endline "=== Running evaluation tests...";
test_part "Exit code" check_exit_code string_of_int exit_code_tests test_count failed;
test_part "Output on STDOUT" check_stdout (fun s -> s) stdout_tests test_count failed;
printf "=== ...ran %d evaluation tests with %d failures.\n\n" !test_count !failed;
!failed = 0
| null | https://raw.githubusercontent.com/mgree/smoosh/84b1ff86f59573a2e4fd7e23edfa0cf9fdb45db9/src/test_evaluation.ml | ocaml | *********************************************************************
*********************************************************************
basic logic
expansion
exit
break
for loop with no args should exit 0
case cascades
pipes
unset
readonly
export
eval
function calls
$#
set
test
regression: support negative numbers
*********************************************************************
STDOUT TESTS *******************************************************
*********************************************************************
basic logic
regression: don't do pathname expansion on patterns
regression: support [a-zA-Z][a-zA-Z0-9_] as varnames
regression: correct handling of patterns
regression: correct positional param restore on function return
regression: shift shouldn't affect $0
regression: eval and set
regression: set --
subshells
shift
redirects and pipes
$* vs $@
e.g.s from -bash-command-line-args-vs/12316565
command types
umask
read
printf
regression: printf should rerun to print all arguments
kill
this test doesn't work, because demand-driven scheduling means the trap
is never installed before teh signal arrives
getopts
set -e
exit
*********************************************************************
DRIVER *************************************************************
********************************************************************* | open Test_prelude
open Smoosh
open Os_symbolic
open Path
open Printf
EXIT CODE TESTS * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
let run_cmd_for_exit_code (cmd : string) (os0 : symbolic os_state) : int =
let c = Shim.parse_string cmd in
let os1 = Semantics.symbolic_full_evaluation os0 c in
if out_of_fuel os1
then -1
else os1.sh.exit_code
let check_exit_code (cmd, state, expected) =
checker (run_cmd_for_exit_code cmd) (=) (cmd, state, expected)
let exit_code_tests : (string * symbolic os_state * int) list =
[ ("true", os_empty, 0)
; ("false", os_empty, 1)
; ("true && true", os_empty, 0)
; ("true && false", os_empty, 1)
; ("false && true", os_empty, 1)
; ("false || true", os_empty, 0)
; ("false ; true", os_empty, 0)
; ("true ; false", os_empty, 1)
; ("! true", os_empty, 1)
; ("! false", os_empty, 0)
; ("! { true ; false ; }", os_empty, 0)
; ("! { false ; true ; }", os_empty, 1)
; ("x=`false`", os_empty, 1)
; ("x=$(exit 5)", os_empty, 5)
; ("exit $(echo 3; exit 5)", os_empty, 3)
; ("x=5 ; echo ${x?erp}", os_empty, 0)
; ("echo ${x?erp}", os_empty, 1)
; ("for y in ${x?oh no}; do exit 5; done", os_empty, 1)
; ("x=5 ; for y in ${x?oh no}; do exit $y; done", os_empty, 5)
; ("case ${x?alas} in *) true;; esac", os_empty, 1)
; ("x=7 ; case ${x?alas} in *) exit $x;; esac", os_empty, 7)
; ("x=$(echo 5) ; exit $x", os_empty, 5)
; ("x=$(echo hello) ; case $x in *ell*) true;; *) false;; esac", os_empty, 0)
; ("exit", os_empty, 0)
; ("exit 2", os_empty, 2)
; ("false; exit", os_empty, 1)
; ("false; exit 2", os_empty, 2)
; ("exit 2; false", os_empty, 2)
; ("exit 2; exit 40", os_empty, 2)
; ("while true; do break; done", os_empty, 0)
; ("for x in; do exit 1; done", os_empty, 0)
; ("for x in \"\"; do exit 1; done", os_empty, 1)
; ("case abc in ab) true;; abc) false;; esac", os_empty, 1)
; ("case abc in ab|ab*) true;; abc) false;; esac", os_empty, 0)
; ("case abc in *) true;; abc) false;; esac", os_empty, 0)
; ("x=hello ; case $x in *el*) true;; *) false;; esac", os_empty, 0)
; ("case \"no one is home\" in esac", os_empty, 0)
; ("case Linux in Lin*) true;; *) false;; esac", os_empty, 0)
; ("false | true", os_empty, 0)
; ("true | false", os_empty, 1)
; ("true | exit 5", os_empty, 5)
; ("x=5 ; exit $x", os_empty, 5)
; ("x=5 ; unset x; exit $x", os_empty, 0)
; ("x=5 ; unset x; exit ${x-42}", os_empty, 42)
; ("f() { exit 3 ; } ; f", os_empty, 3)
; ("f() { exit 3 ; } ; unset f ; f", os_empty, 3)
; ("f() { exit 3 ; } ; unset -f f ; f", os_empty, 127)
; ("x=5 ; readonly x", os_empty, 0)
; ("x=5 ; readonly x ; readonly x=10", os_empty, 1)
; ("x=- ; readonly $x=derp", os_empty, 1)
; ("x=- ; export $x=derp", os_empty, 1)
; ("eval exit 0", os_empty, 0)
; ("eval exit 1", os_empty, 1)
; ("! ( eval exit 1 )", os_empty, 0)
; ("! eval exit 1", os_empty, 1)
; ("! eval exit 47", os_empty, 47)
; ("g() { exit 5 ; } ; h() { exit 6 ; } ; i() { $1 ; exit 7 ; } ; i g", os_empty, 5)
; ("g() { exit 5 ; } ; h() { exit 6 ; } ; i() { $1 ; exit 7 ; } ; i h", os_empty, 6)
; ("g() { exit 5 ; } ; h() { exit 6 ; } ; i() { $1 ; exit 7 ; } ; i :", os_empty, 7)
; ("f() { exit $# ; } ; f", os_empty, 0)
; ("f() { exit $# ; } ; f a", os_empty, 1)
; ("f() { exit $# ; } ; f a b", os_empty, 2)
; ("f() { exit $# ; } ; f a b c", os_empty, 3)
; ("f() { $@ ; } ; f exit 12", os_empty, 12)
; ("f() { $* ; } ; f exit 12", os_empty, 12)
; ("set -- a b c; exit $#", os_empty, 3)
; ("set -- ; exit $#", os_empty, 0)
; ("set -n ; exit 5", os_empty, 0)
; ("set -u ; echo $x", os_empty, 1)
; ("set -- a b c; set -u; exit $#", os_empty, 3)
; ("set -- a b c; set -u a b; exit $#", os_empty, 2)
; ("test hi = hi", os_empty, 0)
; ("test hi = bye", os_empty, 1)
; ("test hi != hi", os_empty, 1)
; ("test hi != bye", os_empty, 0)
; ("test", os_empty, 1)
; ("test hello", os_empty, 0)
; ("test \"\"", os_empty, 1)
; ("test -n hello", os_empty, 0)
; ("test -n \"\"", os_empty, 1)
; ("test -z \"\"", os_empty, 0)
; ("test -z hello", os_empty, 1)
; ("test 5 -eq 5", os_empty, 0)
; ("test 5 -ne 5", os_empty, 1)
; ("test 1 -lt 5", os_empty, 0)
; ("test 5 -lt 5", os_empty, 1)
; ("test 1 -le 1", os_empty, 0)
; ("test 1 -gt 5", os_empty, 1)
; ("test 5 -gt 5", os_empty, 1)
; ("test 1 -ge 1", os_empty, 0)
; ("test hi -ge 1", os_empty, 2)
; ("test \\( 5 -ne 5 \\) -o -n hello", os_empty, 0)
; ("test \\( 5 -eq 5 \\) -a -z \"\"", os_empty, 0)
; ("test 5 -eq 5 -a -z \"\"", os_empty, 0)
; ("test hi \\< hello", os_empty, 1)
; ("test lol \\< hello", os_empty, 1)
; ("test a \\< b", os_empty, 0)
; ("test -5 -eq $((0-5))", os_empty, 0)
; ("test -5 -eq $((5*5))", os_empty, 1)
; ("test $((0-5)) -eq -5", os_empty, 0)
; ("test $((5*5)) -eq -5", os_empty, 1)
]
let run_cmd_for_stdout (cmd : string) (os0 : symbolic os_state) : string =
let c = Shim.parse_string cmd in
let os1 = Semantics.symbolic_full_evaluation os0 c in
if out_of_fuel os1
then "!!! OUT OF FUEL"
else get_stdout os1
let check_stdout (cmd, state, expected) =
checker (run_cmd_for_stdout cmd) (=) (cmd, state, expected)
let stdout_tests : (string * symbolic os_state * string) list =
[ ("true", os_empty, "")
; ("false", os_empty, "")
; ("echo hi ; echo there", os_empty, "hi\nthere\n")
; ("echo -n hi ; echo there", os_empty, "hithere\n")
; ("echo -n \"hi \" ; echo there", os_empty, "hi there\n")
; ("x=${y:=1} ; echo $((x+=`echo 2`))", os_empty, "3\n")
regression :
see test_prelude.ml for fs details of why these are the outputs
see test_prelude.ml for fs details of why these are the outputs
*)
; ("echo *", os_complicated_fs, "a b c\n")
; ("echo \\*", os_complicated_fs, "*\n")
; ("x=\\* ; echo $x", os_complicated_fs, "a b c\n")
; ("x=\\* ; set -f ; echo $x ; set +f ; echo $x", os_complicated_fs, "*\na b c\n")
; ("x=\\* ; cd b ; echo $x", os_complicated_fs, "user\n")
; ("case hi\\\"there\\\" in *\\\"there\\\") echo matched;; *) echo did not;; esac", os_complicated_fs, "matched\n")
; ("case hi\\\"there\\\" in *\"there\") echo matched;; *) echo did not;; esac", os_complicated_fs, "did not\n")
; ("x='' ; case $x in \"\") echo e ;; *) echo nope ;; esac", os_empty, "e\n")
; ("case hi\\\"there\\\" in *\\\") echo m;; *) echo n;; esac", os_empty, "m\n")
; ("x=hello\\*there ; echo ${x#*\\*}", os_complicated_fs, "there\n")
; ("case Linux in Lin*) echo matched;; *) echo nope;; esac", os_empty, "matched\n")
; ("case Linux in *) echo matched;; esac", os_empty, "matched\n")
; ("case Linux in *) echo matched;; esac", os_complicated_fs, "matched\n")
; ("case Linux in *) echo matched;; esac", os_complicated_fs_in_a, "matched\n")
; ("echo []", os_complicated_fs, "[]\n")
; ("echo \"[]\"", os_complicated_fs, "[]\n")
; ("echo '[]'", os_complicated_fs, "[]\n")
; ("echo \\[]", os_complicated_fs, "[]\n")
; ("var_1=5 ; echo $((var_1 + 1))", os_empty, "6\n")
; ("_var1=5 ; echo $((_var1 * 2))", os_empty, "10\n")
; ("_=5 ; echo $((_ - 3))", os_empty, "2\n")
; ("_234=5 ; echo $((_234 % 4))", os_empty, "1\n")
; ("x=foo_47.bar ; echo ${x%%[!0-9]*}", os_empty, "\n")
; ("x=foo_47.bar ; echo ${x%%[!0-9]*}", os_complicated_fs, "\n")
; ("x=foo_47.bar ; echo ${x##[!0-9]*}", os_empty, "\n")
; ("x=foo_47.bar ; echo ${x##[!0-9]*}", os_complicated_fs, "\n")
; ("g() { set -- q ; } ; f() { echo $# [$*] ; g ; echo $# [$*] ; } ; f a b c",
os_empty,
"3 [a b c]\n3 [a b c]\n")
; ("echo $0 ; set -- a b c ; echo $0 ; shift ; echo $0 ; shift 2 ; echo $0",
os_empty,
"smoosh\nsmoosh\nsmoosh\nsmoosh\n")
; ("eval set -- 1 2 3 ; echo $#", os_empty, "3\n")
; ("eval set -- 1 2 3 ; echo $*", os_empty, "1 2 3\n")
; ("set -- 1 2 3; echo $#; set --; echo $#", os_empty, "3\n0\n")
; ("x=$(echo *) ; echo $x", os_complicated_fs, "a b c\n")
; ("x=$(echo hello there); echo $x", os_empty, "hello there\n")
; ("x=$(echo 5); echo $((x * x))", os_empty, "25\n")
; ("set -- a b c ; shift ; echo $#", os_empty, "2\n")
; ("set -- a b c ; shift 1 ; echo $#", os_empty, "2\n")
; ("set -- a b c ; shift 2 ; echo $# [$*]", os_empty, "1 [c]\n")
; ("set -- a b c ; shift 3 ; echo $# [$*]", os_empty, "0 []\n")
; ("set -- a b c ; shift 0 ; echo $# [$*]", os_empty, "3 [a b c]\n")
; ("set -- a b c ; shift 4 ; echo failed", os_empty, "")
; ("set -- a b c ; shift 4 ; echo failed", os_empty, "")
; ("( echo ${x?oops} ) 2>&1", os_empty, "x: oops\n")
; ("echo hi | echo no", os_empty, "no\n")
; ("echo ${y?oh no}", os_empty, "")
; ("exec 2>&1; echo ${y?oh no}", os_empty, "y: oh no\n")
; ("echo ${y?oh no}", os_empty, "")
; ("exec 1>&2; echo ${y?oh no}", os_empty, "")
; ("while true; do echo 5; done | echo done", os_empty, "done\n")
; ("while true; do echo 5; done | { read x; echo $((x + 42)) ; }", os_empty, "47\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in $*; do echo \"$x\"; done",
os_empty,
"arg\n1\narg\n2\narg\n3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in $@; do echo \"$x\"; done",
os_empty,
"arg\n1\narg\n2\narg\n3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in \"$*\"; do echo \"$x\"; done",
os_empty,
"arg 1 arg 2 arg 3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in \"$@\"; do echo \"$x\"; done",
os_empty,
"arg 1\narg 2\narg 3\n")
; ("set -- 'arg 1' 'arg 2' 'arg 3' ; for x in \"$@\"; do echo $x; done",
os_empty,
"arg 1\narg 2\narg 3\n")
; ("type type", os_empty, "type is a shell builtin\n")
; ("type :", os_empty, ": is a special shell builtin\n")
; ("f() : ; type f", os_empty, "f is a shell function\n")
; ("type nonesuch", os_empty, "nonesuch: not found\n")
; ("pwd() echo florp ; pwd", os_empty, "florp\n")
; ("pwd() echo florp ; command pwd", os_empty, "/\n")
; ("pwd() echo florp ; command -p pwd", os_empty, "/\n")
; ("umask", os_empty, "0022\n")
; ("umask -S", os_empty, "u=rwx,g=rx,o=rx\n")
; ("umask 0044 ; umask", os_empty, "0044\n")
; ("umask 0044 ; umask -S", os_empty, "u=rwx,g=wx,o=wx\n")
; ("umask a= ; umask", os_empty, "0777\n")
; ("umask a= ; umask u+rwx ; umask", os_empty, "0077\n")
; ("umask a= ; umask u+rwx,go+rx ; umask", os_empty, "0022\n")
; ("umask u=o; umask", os_empty, "0222\n")
; ("umask u=g; umask", os_empty, "0222\n")
; ("umask g+u; umask", os_empty, "0002\n")
; ("echo 1 | read x; echo ${x-subshell}", os_empty, "subshell\n")
; ("echo 1 | { read x ; echo $x ; }", os_empty, "1\n")
; ("echo 1 2 | { read x y ; echo $x ; echo $y ; }", os_empty, "1\n2\n")
; ("printf", os_empty, "")
; ("printf \\\\n", os_empty, "\n")
; ("printf hi\\\\n%%\\\\tthere", os_empty, "hi\n%\tthere")
; ("printf oct%spus o", os_empty, "octopus")
; ("printf %s $((10 * 10))", os_empty, "100")
; ("printf %b 'hello\\n'", os_empty, "hello\n")
; ("printf %b \"hello\\n\"", os_empty, "hello\n")
; ("printf %b \"hello\\\\n\"", os_empty, "hello\n")
; ("printf %c oops", os_empty, "o")
; ("printf %.1s hello", os_empty, "h")
; ("printf %.0s hello", os_empty, "")
; ("printf %.s hello", os_empty, "")
; ("printf %d 0xf", os_empty, "15")
; ("printf %i 0xf", os_empty, "15")
; ("printf %x 16", os_empty, "10")
; ("printf %X 15", os_empty, "F")
; ("printf %#X 15", os_empty, "0XF")
; ("printf %#x 15", os_empty, "0xf")
; ("printf %04x 15", os_empty, "000f")
; ("printf %#04x 15", os_empty, "0x0f")
; ("printf %#05x 15", os_empty, "0x00f")
; ("printf %#5x 15", os_empty, " 0xf")
; ("printf %x -5", os_empty, "fffffffffffffffb")
; ("printf %u -5", os_empty, "18446744073709551611")
; ("printf '%d %d' 1 2 3 4 5 6 7 8 9", os_empty, "1 23 45 67 89 0")
; ("echo hi & wait", os_empty, "hi\n")
; ("echo hi & kill $! ; wait", os_empty, "")
; ("(trap 'echo bye' SIGTERM ; echo hi) & wait", os_empty, "hi\n")
; ( " ( trap ' echo bye ' SIGTERM ; echo hi ) & kill % 1 ; wait " , os_empty , " bye\n " )
; ("(trap 'echo bye' EXIT) & wait", os_empty, "bye\n")
; ("trap 'echo bye' EXIT", os_empty, "bye\n")
; ("(trap 'echo bye' EXIT; echo hi) ; wait", os_empty, "hi\nbye\n")
; ("trap 'echo sig' SIGTERM; kill $$", os_empty, "sig\n")
; ("(trap 'echo hi; exit' TERM; while true; do echo loop; done) & :; :; kill $!", os_empty, "loop\nhi\n")
; ("getopts ab opt -a -b -- c d ; " ^
"echo $opt $OPTIND $?; " ^
"getopts ab opt -a -b -- c d ; " ^
"echo $opt $OPTIND $?; " ^
"getopts ab opt -a -b -- c d ; " ^
"echo $opt $OPTIND $?",
os_empty,
"a 2 0\nb 3 0\n? 4 1\n")
; ("getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts abc opt -caaa c d e ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=c OPTIND=2 OPTARG= ?=0\n" ^
"opt=a OPTIND=2 OPTARG= ?=0\n" ^
"opt=a OPTIND=2 OPTARG= ?=0\n" ^
"opt=a OPTIND=2 OPTARG= ?=0\n" ^
"opt=? OPTIND=2 OPTARG= ?=1\n")
; ("getopts a:b opt -a -b ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=a OPTIND=3 OPTARG=-b ?=0\n")
; ("getopts a:b opt -a -b ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts a:b opt -a -b ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=a OPTIND=3 OPTARG=-b ?=0\n" ^
"opt=? OPTIND=3 OPTARG= ?=1\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n" ^
"opt=a OPTIND=3 OPTARG= ?=0\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n" ^
"opt=a OPTIND=3 OPTARG= ?=0\n" ^
"opt=? OPTIND=4 OPTARG=c ?=0\n")
; ("getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; " ^
"getopts :a opt -b -a -c ; " ^
"echo opt=$opt OPTIND=$OPTIND OPTARG=$OPTARG ?=$? ; ",
os_empty,
"opt=? OPTIND=2 OPTARG=b ?=0\n" ^
"opt=a OPTIND=3 OPTARG= ?=0\n" ^
"opt=? OPTIND=4 OPTARG=c ?=0\n" ^
"opt=? OPTIND=4 OPTARG= ?=1\n")
; ("set -e; false; echo hi", os_empty, "")
; ("set -e; true; echo hi", os_empty, "hi\n")
; ("set -e; ! false; echo hi", os_empty, "hi\n")
; ("set -e; ! true; echo hi", os_empty, "hi\n")
; ("set -e; (false; echo one) | echo two; echo three", os_empty, "two\nthree\n")
; ("set -e; (false; echo one) ; echo two", os_empty, "")
; ("echo hi; exit; echo farewell", os_empty, "hi\n")
]
let run_tests () =
let failed = ref 0 in
let test_count = ref 0 in
print_endline "\n=== Initializing Dash parser...";
Dash.initialize ();
print_endline "=== Running evaluation tests...";
test_part "Exit code" check_exit_code string_of_int exit_code_tests test_count failed;
test_part "Output on STDOUT" check_stdout (fun s -> s) stdout_tests test_count failed;
printf "=== ...ran %d evaluation tests with %d failures.\n\n" !test_count !failed;
!failed = 0
|
fedf404927ddc3ced8a557e6cfc1bcfb3addbd287137285a4d7b0862c4efdbab | calyau/maxima | fsub.lisp | ;;; Compiled by f2cl version:
( " f2cl1.l , v 1.221 2010/05/26 19:25:52 "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.204 2010/02/23 05:21:30 "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.114 2010/05/17 01:42:14 " )
Using Lisp CMU Common Lisp CVS Head 2010 - 05 - 25 18:21:07 ( 20A Unicode )
;;;
;;; Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
;;; (:coerce-assigns :as-needed) (:array-type ':array)
;;; (:array-slicing t) (:declare-common nil)
;;; (:float-format double-float))
(in-package :colnew)
(defun fsub (x z f)
(declare (type (array double-float (*)) f)
(type (array double-float (*)) z)
(type (double-float) x))
(f2cl-lib:with-multi-array-data
((z double-float z-%data% z-%offset%)
(f double-float f-%data% f-%offset%))
(prog ()
(declare)
(setf (f2cl-lib:fref f-%data% (1) ((1 1)) f-%offset%)
(/
(+ 1.0f0
(* -6.0f0
(expt x 2)
(f2cl-lib:fref z-%data% (4) ((1 4)) z-%offset%))
(* -6.0f0 x (f2cl-lib:fref z-%data% (3) ((1 4)) z-%offset%)))
(expt x 3)))
(go end_label)
end_label
(return (values nil nil nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::fsub fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((double-float) (array double-float (4))
(array double-float (1)))
:return-values '(nil nil nil)
:calls 'nil)))
| null | https://raw.githubusercontent.com/calyau/maxima/9352a3f5c22b9b5d0b367fddeb0185c53d7f4d02/share/colnew/ex1/fsub.lisp | lisp | Compiled by f2cl version:
Options: ((:prune-labels nil) (:auto-save t) (:relaxed-array-decls t)
(:coerce-assigns :as-needed) (:array-type ':array)
(:array-slicing t) (:declare-common nil)
(:float-format double-float)) | ( " f2cl1.l , v 1.221 2010/05/26 19:25:52 "
" f2cl2.l , v 1.37 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl3.l , v 1.6 2008/02/22 22:19:33 rtoy Exp $ "
" f2cl4.l , v 1.7 2008/02/22 22:19:34 rtoy Exp $ "
" f2cl5.l , v 1.204 2010/02/23 05:21:30 "
" f2cl6.l , v 1.48 2008/08/24 00:56:27 rtoy Exp $ "
" macros.l , v 1.114 2010/05/17 01:42:14 " )
Using Lisp CMU Common Lisp CVS Head 2010 - 05 - 25 18:21:07 ( 20A Unicode )
(in-package :colnew)
(defun fsub (x z f)
(declare (type (array double-float (*)) f)
(type (array double-float (*)) z)
(type (double-float) x))
(f2cl-lib:with-multi-array-data
((z double-float z-%data% z-%offset%)
(f double-float f-%data% f-%offset%))
(prog ()
(declare)
(setf (f2cl-lib:fref f-%data% (1) ((1 1)) f-%offset%)
(/
(+ 1.0f0
(* -6.0f0
(expt x 2)
(f2cl-lib:fref z-%data% (4) ((1 4)) z-%offset%))
(* -6.0f0 x (f2cl-lib:fref z-%data% (3) ((1 4)) z-%offset%)))
(expt x 3)))
(go end_label)
end_label
(return (values nil nil nil)))))
(in-package #-gcl #:cl-user #+gcl "CL-USER")
#+#.(cl:if (cl:find-package '#:f2cl) '(and) '(or))
(eval-when (:load-toplevel :compile-toplevel :execute)
(setf (gethash 'fortran-to-lisp::fsub fortran-to-lisp::*f2cl-function-info*)
(fortran-to-lisp::make-f2cl-finfo
:arg-types '((double-float) (array double-float (4))
(array double-float (1)))
:return-values '(nil nil nil)
:calls 'nil)))
|
75ad0dbb274381ab1a2a3e86ef3f68155e962054286dfdf37817e902164c4f21 | jgm/gitit2 | Cache.hs | module Network.Gitit2.Cache
where
import Blaze.ByteString.Builder (toLazyByteString)
import Control.Applicative ((<$>))
import Control.Monad (filterM, unless, when)
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.ByteString.UTF8 as BSU
import Data.Time (diffUTCTime, getCurrentTime)
import Network.Gitit2.Foundation (GH, cache_dir, feed_minutes, use_cache)
import Network.Gitit2.Helper (getConfig)
import Network.HTTP (urlDecode, urlEncode)
import System.Directory (createDirectoryIfMissing, doesDirectoryExist, doesFileExist,
getDirectoryContents, getModificationTime, removeDirectoryRecursive)
import System.FilePath ((</>), takeDirectory)
import Yesod (Content(ContentBuilder), liftIO, sendFile, toTypedContent, ToTypedContent, TypedContent(TypedContent))
tryCache :: FilePath -> GH master ()
tryCache path = do
conf <- getConfig
when (use_cache conf) $
do
let fullpath = cache_dir conf </> path
exists <- liftIO $ doesDirectoryExist fullpath
when exists $
do
files <- liftIO $ getDirectoryContents fullpath >>=
filterM (doesFileExist . (fullpath </>))
case files of
(x:_) -> do
let ct = BSU.fromString $ urlDecode x
sendFile ct $ fullpath </> x
_ -> return ()
caching :: ToTypedContent a
=> FilePath -> GH master a -> GH master a
caching path handler = do
conf <- getConfig
if use_cache conf
then do
result <- handler
cacheContent path $ toTypedContent result
return result
else handler
cacheContent :: FilePath -> TypedContent -> GH master ()
cacheContent path (TypedContent ct content) = do
conf <- getConfig
when (use_cache conf) $
case content of
ContentBuilder builder _ -> liftIO $ do
let fullpath = cache_dir conf </> path </> urlEncode (BSU.toString ct)
createDirectoryIfMissing True $ takeDirectory fullpath
B.writeFile fullpath $ toLazyByteString builder
_ -> liftIO $
TODO replace w logging
putStrLn $ "Can't cache " ++ path
expireCache :: FilePath -> GH master ()
expireCache path = do
conf <- getConfig
expireFeed (feed_minutes conf) (path </> "_feed")
expireFeed (feed_minutes conf) "_feed"
expireCategories
cachedir <- cache_dir <$> getConfig
let fullpath = cachedir </> path
liftIO $ do
exists <- doesDirectoryExist fullpath
when exists $ removeDirectoryRecursive fullpath
expireCategories :: GH master ()
expireCategories = do
cachedir <- cache_dir <$> getConfig
let fullpath = cachedir </> "_categories"
liftIO $ do
exists <- doesDirectoryExist fullpath
when exists $ removeDirectoryRecursive fullpath
-- | Expire the cached feed unless it is younger than 'minutes' old.
expireFeed :: Integer -> FilePath -> GH master ()
expireFeed minutes path = do
cachedir <- cache_dir <$> getConfig
let fullpath = cachedir </> path
liftIO $ do
exists <- doesDirectoryExist fullpath
when exists $ do
seconds <- getModificationTime fullpath
seconds' <- getCurrentTime
unless (diffUTCTime seconds' seconds < realToFrac (minutes * 60))
$ removeDirectoryRecursive fullpath
| null | https://raw.githubusercontent.com/jgm/gitit2/db61eae0e17c356830979747e2899ac5eaad03a5/Network/Gitit2/Cache.hs | haskell | | Expire the cached feed unless it is younger than 'minutes' old. | module Network.Gitit2.Cache
where
import Blaze.ByteString.Builder (toLazyByteString)
import Control.Applicative ((<$>))
import Control.Monad (filterM, unless, when)
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.ByteString.UTF8 as BSU
import Data.Time (diffUTCTime, getCurrentTime)
import Network.Gitit2.Foundation (GH, cache_dir, feed_minutes, use_cache)
import Network.Gitit2.Helper (getConfig)
import Network.HTTP (urlDecode, urlEncode)
import System.Directory (createDirectoryIfMissing, doesDirectoryExist, doesFileExist,
getDirectoryContents, getModificationTime, removeDirectoryRecursive)
import System.FilePath ((</>), takeDirectory)
import Yesod (Content(ContentBuilder), liftIO, sendFile, toTypedContent, ToTypedContent, TypedContent(TypedContent))
tryCache :: FilePath -> GH master ()
tryCache path = do
conf <- getConfig
when (use_cache conf) $
do
let fullpath = cache_dir conf </> path
exists <- liftIO $ doesDirectoryExist fullpath
when exists $
do
files <- liftIO $ getDirectoryContents fullpath >>=
filterM (doesFileExist . (fullpath </>))
case files of
(x:_) -> do
let ct = BSU.fromString $ urlDecode x
sendFile ct $ fullpath </> x
_ -> return ()
caching :: ToTypedContent a
=> FilePath -> GH master a -> GH master a
caching path handler = do
conf <- getConfig
if use_cache conf
then do
result <- handler
cacheContent path $ toTypedContent result
return result
else handler
cacheContent :: FilePath -> TypedContent -> GH master ()
cacheContent path (TypedContent ct content) = do
conf <- getConfig
when (use_cache conf) $
case content of
ContentBuilder builder _ -> liftIO $ do
let fullpath = cache_dir conf </> path </> urlEncode (BSU.toString ct)
createDirectoryIfMissing True $ takeDirectory fullpath
B.writeFile fullpath $ toLazyByteString builder
_ -> liftIO $
TODO replace w logging
putStrLn $ "Can't cache " ++ path
expireCache :: FilePath -> GH master ()
expireCache path = do
conf <- getConfig
expireFeed (feed_minutes conf) (path </> "_feed")
expireFeed (feed_minutes conf) "_feed"
expireCategories
cachedir <- cache_dir <$> getConfig
let fullpath = cachedir </> path
liftIO $ do
exists <- doesDirectoryExist fullpath
when exists $ removeDirectoryRecursive fullpath
expireCategories :: GH master ()
expireCategories = do
cachedir <- cache_dir <$> getConfig
let fullpath = cachedir </> "_categories"
liftIO $ do
exists <- doesDirectoryExist fullpath
when exists $ removeDirectoryRecursive fullpath
expireFeed :: Integer -> FilePath -> GH master ()
expireFeed minutes path = do
cachedir <- cache_dir <$> getConfig
let fullpath = cachedir </> path
liftIO $ do
exists <- doesDirectoryExist fullpath
when exists $ do
seconds <- getModificationTime fullpath
seconds' <- getCurrentTime
unless (diffUTCTime seconds' seconds < realToFrac (minutes * 60))
$ removeDirectoryRecursive fullpath
|
ad766f6fc91cb9db193db7e4c4b957785a8be12c6e6c536f91c849964f968c10 | unclebob/more-speech | article_panel.clj | (ns more-speech.ui.swing.article-panel
(:require [more-speech.user-configuration :as uconfig]
[more-speech.nostr.events :as events]
[more-speech.nostr.util :as util]
[more-speech.ui.formatters :as formatters]
[more-speech.ui.formatter-util :as f-util]
[more-speech.ui.swing.article-panel-util :as article-panel-util]
[more-speech.ui.swing.edit-window :as edit-window]
[more-speech.mem :refer :all]
[more-speech.ui.swing.util :as swing-util :refer [copy-to-clipboard]]
[more-speech.db.gateway :as gateway]
[more-speech.config :refer [get-db]]
[more-speech.nostr.event-composers :as composers]
[more-speech.config :as config])
(:use [seesaw core border]))
(defn bold-label [s]
(label :text s :font (uconfig/get-bold-font)))
(defn copy-click [e]
(when (.isPopupTrigger e)
(let [x (.x (.getPoint e))
y (.y (.getPoint e))
node (.getComponent e)
hex-id (util/hexify (config node :user-data))
p (popup :items [(action :name "Copy"
:handler (partial copy-to-clipboard hex-id))])]
(.show p (to-widget e) x y))))
(defn id-click [e]
(if (.isPopupTrigger e)
(copy-click e)
(swing-util/select-event (config e :user-data))))
(defn reaction-click [polarity]
(let [frame (get-mem :frame)
up-arrow (select frame [:#up-arrow])
dn-arrow (select frame [:#dn-arrow])
event-id (get-mem :selected-event)
event (gateway/get-event (get-db) event-id)]
(when (not= (text up-arrow) " ")
(composers/compose-and-send-reaction-event event polarity))
(text! up-arrow " ")
(text! dn-arrow " ")))
(defn up-click [_e]
(reaction-click "+"))
(defn dn-click [_e]
(reaction-click "-"))
(defn make-article-info-panel []
(let [author-name-label (label :id :author-name-label)
label-font (uconfig/get-small-font)
author-id-label (text :id :author-id-label :editable? false :font label-font)
created-time-label (label :id :created-time-label)
reactions-popup (popup :enabled? false)
reactions-label (label :id :reactions-count :user-data reactions-popup)
reply-to-label (label :id :reply-to-label)
id-label (text :id :id-label :editable? false :font label-font)
citing-label (text :id :citing-label :editable? false :font label-font)
subject-label (label :id :subject-label :font label-font)
root-label (text :id :root-label :editable? false :font label-font)
relays-popup (popup :enabled? false)
relays-label (label :id :relays-label :user-data relays-popup)
up-arrow (label :text " " :id :up-arrow :font (uconfig/get-bold-font))
dn-arrow (label :text " " :id :dn-arrow :font (uconfig/get-bold-font))]
(listen relays-label
:mouse-entered (fn [e]
(-> relays-popup
(move! :to (.getLocationOnScreen e))
show!))
:mouse-exited (fn [_e] (hide! relays-popup)))
(listen reactions-label
:mouse-entered (fn [e]
(-> reactions-popup
(move! :to (.getLocationOnScreen e))
show!))
:mouse-exited (fn [_e] (hide! reactions-popup)))
(listen citing-label :mouse-pressed id-click)
(listen root-label :mouse-pressed id-click)
(listen id-label :mouse-pressed copy-click)
(listen author-id-label :mouse-pressed copy-click)
(listen up-arrow :mouse-pressed up-click)
(listen dn-arrow :mouse-pressed dn-click)
(let [grid
(grid-panel
:columns 3
:preferred-size [-1 :by 70] ;icky.
:items [
(flow-panel :align :left :items [up-arrow
(bold-label "Author:") author-name-label
(bold-label "Reactions:") reactions-label])
(flow-panel :align :left :items [(bold-label "Subject:") subject-label])
(flow-panel :align :left :items [(bold-label "pubkey:") author-id-label])
(flow-panel :align :left :items [(bold-label "Created at:") created-time-label])
(flow-panel :align :left :items [(bold-label "Reply to:") reply-to-label])
(flow-panel :align :left :items [(bold-label "Relays:") relays-label])
(flow-panel :align :left :items [dn-arrow (bold-label "id:") id-label])
(flow-panel :align :left :items [(bold-label "Citing:") citing-label])
(flow-panel :align :left :items [(bold-label "Root:") root-label])])]
grid)))
(def editor-pane-stylesheet
"<style>
font - style : normal ; font - size : 14 ; font - weight : lighter ; }
a {color: #6495ED; text-decoration: none;}</style>")
(defn make-article-area []
(editor-pane
:content-type "text/html"
:editable? false
:id :article-area
:text editor-pane-stylesheet))
(defn go-back [_e]
(article-panel-util/go-back-by 1))
(defn go-forward [_e]
(article-panel-util/go-back-by -1))
(defn make-control-panel []
(let [reply-button (button :text "Reply")
create-button (button :text "Create")
back-button (button :text "Back")
forward-button (button :text "Forward")]
(listen reply-button :action
(fn [_]
(edit-window/make-edit-window :reply)))
(listen create-button :action
(fn [_] (edit-window/make-edit-window :send)))
(listen back-button :action go-back)
(listen forward-button :action go-forward)
(border-panel :west back-button
:east forward-button
:center (flow-panel :items [reply-button create-button]))))
(defn has-my-reaction? [event]
(let [me (get-mem :pubkey)
reactions (:reactions event)]
(some #(= me (first %)) reactions)))
(defn reaction-items [reactions]
(loop [reactions reactions
items [""]]
(if (empty? reactions)
items
(let [[id content] (first reactions)
name (formatters/format-user-id id 50)]
(recur (rest reactions) (conj items (str content " " name)))))))
(defn load-article-info [selected-id]
(let [main-frame (get-mem :frame)
event (gateway/get-event (get-db) selected-id)
[root-id _ referent] (events/get-references event)
reply-to (select main-frame [:#reply-to-label])
citing (select main-frame [:#citing-label])
root-label (select main-frame [:#root-label])
relays-label (select main-frame [:#relays-label])
relays-popup (config relays-label :user-data)
article-area (select main-frame [:#article-area])
subject-label (select main-frame [:#subject-label])
up-arrow (select main-frame [:#up-arrow])
dn-arrow (select main-frame [:#dn-arrow])
reacted? (has-my-reaction? event)
reactions (count (:reactions event))
reactions-label (select main-frame [:#reactions-count])
reactions-popup (config reactions-label :user-data)
relay-names (map #(re-find config/relay-pattern %) (:relays event))
author-id (select main-frame [:#author-id-label])
event-id (select main-frame [:#id-label])]
(text! reactions-label (str reactions))
(if reacted?
(do
(text! up-arrow " ")
(text! dn-arrow " "))
(do
(text! up-arrow "⬆")
(text! dn-arrow "⬇")))
(swing-util/clear-popup relays-popup)
(swing-util/clear-popup reactions-popup)
(config! relays-popup :items relay-names)
(config! reactions-popup :items (reaction-items (:reactions event)))
(text! article-area (formatters/reformat-article
(formatters/replace-references event)))
(text! (select main-frame [:#author-name-label])
(formatters/format-user-id (:pubkey event) 50))
(text! (select main-frame [:#created-time-label])
(f-util/format-time (:created-at event)))
(config! author-id :user-data (:pubkey event)
:text (f-util/abbreviate (util/num32->hex-string (:pubkey event)) 20))
(config! event-id
:user-data (:id event)
:text (f-util/abbreviate (util/num32->hex-string (:id event)) 20))
(if (some? referent)
(let [replied-event (gateway/get-event (get-db) referent)]
(text! reply-to (formatters/format-user-id (:pubkey replied-event) 50))
(config! citing
:user-data referent
:text (f-util/abbreviate (util/num32->hex-string referent) 20)))
(do (text! reply-to "")
(text! citing "")))
(if (some? root-id)
(config! root-label
:user-data root-id
:text (f-util/abbreviate (util/num32->hex-string root-id) 20))
(text! root-label ""))
(text! subject-label (formatters/get-subject (:tags event)))
(text! relays-label (format "%d %s"
(count relay-names)
(f-util/abbreviate (first relay-names) 40))))) | null | https://raw.githubusercontent.com/unclebob/more-speech/a8db802e43a36d359eb46deef41f7edbcda7724c/src/more_speech/ui/swing/article_panel.clj | clojure | icky.
font - size : 14 ; font - weight : lighter ; }
text-decoration: none;}</style>") | (ns more-speech.ui.swing.article-panel
(:require [more-speech.user-configuration :as uconfig]
[more-speech.nostr.events :as events]
[more-speech.nostr.util :as util]
[more-speech.ui.formatters :as formatters]
[more-speech.ui.formatter-util :as f-util]
[more-speech.ui.swing.article-panel-util :as article-panel-util]
[more-speech.ui.swing.edit-window :as edit-window]
[more-speech.mem :refer :all]
[more-speech.ui.swing.util :as swing-util :refer [copy-to-clipboard]]
[more-speech.db.gateway :as gateway]
[more-speech.config :refer [get-db]]
[more-speech.nostr.event-composers :as composers]
[more-speech.config :as config])
(:use [seesaw core border]))
(defn bold-label [s]
(label :text s :font (uconfig/get-bold-font)))
(defn copy-click [e]
(when (.isPopupTrigger e)
(let [x (.x (.getPoint e))
y (.y (.getPoint e))
node (.getComponent e)
hex-id (util/hexify (config node :user-data))
p (popup :items [(action :name "Copy"
:handler (partial copy-to-clipboard hex-id))])]
(.show p (to-widget e) x y))))
(defn id-click [e]
(if (.isPopupTrigger e)
(copy-click e)
(swing-util/select-event (config e :user-data))))
(defn reaction-click [polarity]
(let [frame (get-mem :frame)
up-arrow (select frame [:#up-arrow])
dn-arrow (select frame [:#dn-arrow])
event-id (get-mem :selected-event)
event (gateway/get-event (get-db) event-id)]
(when (not= (text up-arrow) " ")
(composers/compose-and-send-reaction-event event polarity))
(text! up-arrow " ")
(text! dn-arrow " ")))
(defn up-click [_e]
(reaction-click "+"))
(defn dn-click [_e]
(reaction-click "-"))
(defn make-article-info-panel []
(let [author-name-label (label :id :author-name-label)
label-font (uconfig/get-small-font)
author-id-label (text :id :author-id-label :editable? false :font label-font)
created-time-label (label :id :created-time-label)
reactions-popup (popup :enabled? false)
reactions-label (label :id :reactions-count :user-data reactions-popup)
reply-to-label (label :id :reply-to-label)
id-label (text :id :id-label :editable? false :font label-font)
citing-label (text :id :citing-label :editable? false :font label-font)
subject-label (label :id :subject-label :font label-font)
root-label (text :id :root-label :editable? false :font label-font)
relays-popup (popup :enabled? false)
relays-label (label :id :relays-label :user-data relays-popup)
up-arrow (label :text " " :id :up-arrow :font (uconfig/get-bold-font))
dn-arrow (label :text " " :id :dn-arrow :font (uconfig/get-bold-font))]
(listen relays-label
:mouse-entered (fn [e]
(-> relays-popup
(move! :to (.getLocationOnScreen e))
show!))
:mouse-exited (fn [_e] (hide! relays-popup)))
(listen reactions-label
:mouse-entered (fn [e]
(-> reactions-popup
(move! :to (.getLocationOnScreen e))
show!))
:mouse-exited (fn [_e] (hide! reactions-popup)))
(listen citing-label :mouse-pressed id-click)
(listen root-label :mouse-pressed id-click)
(listen id-label :mouse-pressed copy-click)
(listen author-id-label :mouse-pressed copy-click)
(listen up-arrow :mouse-pressed up-click)
(listen dn-arrow :mouse-pressed dn-click)
(let [grid
(grid-panel
:columns 3
:items [
(flow-panel :align :left :items [up-arrow
(bold-label "Author:") author-name-label
(bold-label "Reactions:") reactions-label])
(flow-panel :align :left :items [(bold-label "Subject:") subject-label])
(flow-panel :align :left :items [(bold-label "pubkey:") author-id-label])
(flow-panel :align :left :items [(bold-label "Created at:") created-time-label])
(flow-panel :align :left :items [(bold-label "Reply to:") reply-to-label])
(flow-panel :align :left :items [(bold-label "Relays:") relays-label])
(flow-panel :align :left :items [dn-arrow (bold-label "id:") id-label])
(flow-panel :align :left :items [(bold-label "Citing:") citing-label])
(flow-panel :align :left :items [(bold-label "Root:") root-label])])]
grid)))
(def editor-pane-stylesheet
"<style>
(defn make-article-area []
(editor-pane
:content-type "text/html"
:editable? false
:id :article-area
:text editor-pane-stylesheet))
(defn go-back [_e]
(article-panel-util/go-back-by 1))
(defn go-forward [_e]
(article-panel-util/go-back-by -1))
(defn make-control-panel []
(let [reply-button (button :text "Reply")
create-button (button :text "Create")
back-button (button :text "Back")
forward-button (button :text "Forward")]
(listen reply-button :action
(fn [_]
(edit-window/make-edit-window :reply)))
(listen create-button :action
(fn [_] (edit-window/make-edit-window :send)))
(listen back-button :action go-back)
(listen forward-button :action go-forward)
(border-panel :west back-button
:east forward-button
:center (flow-panel :items [reply-button create-button]))))
(defn has-my-reaction? [event]
(let [me (get-mem :pubkey)
reactions (:reactions event)]
(some #(= me (first %)) reactions)))
(defn reaction-items [reactions]
(loop [reactions reactions
items [""]]
(if (empty? reactions)
items
(let [[id content] (first reactions)
name (formatters/format-user-id id 50)]
(recur (rest reactions) (conj items (str content " " name)))))))
(defn load-article-info [selected-id]
(let [main-frame (get-mem :frame)
event (gateway/get-event (get-db) selected-id)
[root-id _ referent] (events/get-references event)
reply-to (select main-frame [:#reply-to-label])
citing (select main-frame [:#citing-label])
root-label (select main-frame [:#root-label])
relays-label (select main-frame [:#relays-label])
relays-popup (config relays-label :user-data)
article-area (select main-frame [:#article-area])
subject-label (select main-frame [:#subject-label])
up-arrow (select main-frame [:#up-arrow])
dn-arrow (select main-frame [:#dn-arrow])
reacted? (has-my-reaction? event)
reactions (count (:reactions event))
reactions-label (select main-frame [:#reactions-count])
reactions-popup (config reactions-label :user-data)
relay-names (map #(re-find config/relay-pattern %) (:relays event))
author-id (select main-frame [:#author-id-label])
event-id (select main-frame [:#id-label])]
(text! reactions-label (str reactions))
(if reacted?
(do
(text! up-arrow " ")
(text! dn-arrow " "))
(do
(text! up-arrow "⬆")
(text! dn-arrow "⬇")))
(swing-util/clear-popup relays-popup)
(swing-util/clear-popup reactions-popup)
(config! relays-popup :items relay-names)
(config! reactions-popup :items (reaction-items (:reactions event)))
(text! article-area (formatters/reformat-article
(formatters/replace-references event)))
(text! (select main-frame [:#author-name-label])
(formatters/format-user-id (:pubkey event) 50))
(text! (select main-frame [:#created-time-label])
(f-util/format-time (:created-at event)))
(config! author-id :user-data (:pubkey event)
:text (f-util/abbreviate (util/num32->hex-string (:pubkey event)) 20))
(config! event-id
:user-data (:id event)
:text (f-util/abbreviate (util/num32->hex-string (:id event)) 20))
(if (some? referent)
(let [replied-event (gateway/get-event (get-db) referent)]
(text! reply-to (formatters/format-user-id (:pubkey replied-event) 50))
(config! citing
:user-data referent
:text (f-util/abbreviate (util/num32->hex-string referent) 20)))
(do (text! reply-to "")
(text! citing "")))
(if (some? root-id)
(config! root-label
:user-data root-id
:text (f-util/abbreviate (util/num32->hex-string root-id) 20))
(text! root-label ""))
(text! subject-label (formatters/get-subject (:tags event)))
(text! relays-label (format "%d %s"
(count relay-names)
(f-util/abbreviate (first relay-names) 40))))) |
6c9e7c6a3dff2edc2d362846e78f461a34962e741c53ce5b46d0a12368f0a629 | jacius/lispbuilder | image-example.lisp | Demonstration / Test of using SDL ( Simple Media Layer ) library
using CFFI for foreign function interfacing ...
( C)2006 Justin Heyes - Jones , .
;;;; see COPYING for license
(in-package #:sdl-image-examples)
(defun image-example ()
(sdl:load-library)
(sdl:with-init ()
(sdl:window 540 350 :title-caption "Loading images of various formats." :icon-caption "IMAGE-EXAMPLE")
(setf (sdl:frame-rate) 30)
(sdl:initialise-default-font)
allows the jpg , png and
;; libraries to be preloaded to speed subsequent
;; loading of these images.
(sdl-image:init-image :jpg :png :tif)
(let ((images (list
(sdl:draw-string-solid-* "BMP" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.bmp" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "GIF" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.gif" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "LBM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.lbm" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "PCX" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.pcx" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "PBM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.pbm" *bmp-path*)))
(sdl:draw-string-solid-* "PPM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.ppm" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "PGM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.pgm" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "TGA" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.tga" *bmp-path*)
TGA must be specified
:force t ; And loaded by force
:color-key-at #(0 0)))
allow testing
for png / jpg / tif support prior to loading the image .
(when (sdl-image:image-init-p :png)
(sdl:draw-string-solid-* "PNG" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.png" *bmp-path*)
:color-key-at #(0 0))))
(when (sdl-image:image-init-p :tif)
(sdl:draw-string-solid-* "TIF" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.tif" *bmp-path*)
:color-key-at #(0 0))))
(when (sdl-image:image-init-p :jpg)
(sdl:draw-string-solid-* "JPG" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.jpg" *bmp-path*)
:color-key-at #(0 0))))
)))
(loop for image in (remove nil images)
for i from 0
for (y x) = (multiple-value-list (floor i 4))
for position = (sdl:point :x (+ 10 (* x 128))
:y (+ 10 (* y 111)))
do (sdl:draw-surface-at image position)))
(sdl:update-display)
(sdl:with-events ()
(:quit-event ()
requires a corresponding
;; sdl-image:quit *if* sdl-image:init is used.
(sdl-image:quit-image)
t)
(:key-down-event (:key key)
(if (sdl:key= key :SDL-KEY-ESCAPE)
(sdl:push-quit-event)))
(:video-expose-event () (sdl:update-display)))))
| null | https://raw.githubusercontent.com/jacius/lispbuilder/e693651b95f6818e3cab70f0074af9f9511584c3/lispbuilder-sdl-image/examples/image-example.lisp | lisp | see COPYING for license
libraries to be preloaded to speed subsequent
loading of these images.
And loaded by force
sdl-image:quit *if* sdl-image:init is used. | Demonstration / Test of using SDL ( Simple Media Layer ) library
using CFFI for foreign function interfacing ...
( C)2006 Justin Heyes - Jones , .
(in-package #:sdl-image-examples)
(defun image-example ()
(sdl:load-library)
(sdl:with-init ()
(sdl:window 540 350 :title-caption "Loading images of various formats." :icon-caption "IMAGE-EXAMPLE")
(setf (sdl:frame-rate) 30)
(sdl:initialise-default-font)
allows the jpg , png and
(sdl-image:init-image :jpg :png :tif)
(let ((images (list
(sdl:draw-string-solid-* "BMP" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.bmp" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "GIF" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.gif" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "LBM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.lbm" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "PCX" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.pcx" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "PBM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.pbm" *bmp-path*)))
(sdl:draw-string-solid-* "PPM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.ppm" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "PGM" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.pgm" *bmp-path*)
:color-key-at #(0 0)))
(sdl:draw-string-solid-* "TGA" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.tga" *bmp-path*)
TGA must be specified
:color-key-at #(0 0)))
allow testing
for png / jpg / tif support prior to loading the image .
(when (sdl-image:image-init-p :png)
(sdl:draw-string-solid-* "PNG" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.png" *bmp-path*)
:color-key-at #(0 0))))
(when (sdl-image:image-init-p :tif)
(sdl:draw-string-solid-* "TIF" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.tif" *bmp-path*)
:color-key-at #(0 0))))
(when (sdl-image:image-init-p :jpg)
(sdl:draw-string-solid-* "JPG" 0 0 :color sdl:*yellow*
:surface (sdl-image:load-image (merge-pathnames "lisp.jpg" *bmp-path*)
:color-key-at #(0 0))))
)))
(loop for image in (remove nil images)
for i from 0
for (y x) = (multiple-value-list (floor i 4))
for position = (sdl:point :x (+ 10 (* x 128))
:y (+ 10 (* y 111)))
do (sdl:draw-surface-at image position)))
(sdl:update-display)
(sdl:with-events ()
(:quit-event ()
requires a corresponding
(sdl-image:quit-image)
t)
(:key-down-event (:key key)
(if (sdl:key= key :SDL-KEY-ESCAPE)
(sdl:push-quit-event)))
(:video-expose-event () (sdl:update-display)))))
|
60b86db69639f6f4e034320e5466704ffcdb077df003cd7315a24066a85795ce | piotr-yuxuan/slava | encode_test.clj | (ns piotr-yuxuan.slava.encode-test
(:require [piotr-yuxuan.slava.config :as config]
[piotr-yuxuan.slava.encode :as encode]
[clojure.test :refer [deftest testing are is]])
(:import (piotr_yuxuan.slava.slava_record SlavaGenericRecord)
(org.apache.avro SchemaBuilder SchemaBuilder$NamespacedBuilder SchemaBuilder$RecordBuilder SchemaBuilder$FieldAssembler SchemaBuilder$UnionAccumulator)
(org.apache.avro.generic GenericData$Record GenericRecordBuilder)))
(deftest encoder-name-test
(are [x y] (= x y)
:encoder/avro-record (encode/encoder-name (-> (SchemaBuilder/builder) (.record "Record") .fields .endRecord))
:encoder/avro-array (encode/encoder-name (-> (SchemaBuilder/builder) .array .items .stringType))
:encoder/avro-map (encode/encoder-name (-> (SchemaBuilder/builder) .map .values .stringType))
:encoder/avro-long (encode/encoder-name (.longType (SchemaBuilder/builder)))))
(deftest avro-record-test
(let [nested-record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "NestedRecord")
^SchemaBuilder$RecordBuilder (.namespace "piotr-yuxuan.slava.test")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .intType .noDefault
(.name "mapField") (.type (-> (SchemaBuilder/builder) .map .values .intType)) .noDefault
^GenericData$Record .endRecord)
record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "Record")
^SchemaBuilder$RecordBuilder (.namespace "piotr-yuxuan.slava.test")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .intType .noDefault
(.name "nestedRecord") (.type nested-record-schema) .noDefault
^GenericData$Record .endRecord)
record-encoder (encode/avro-record config/default record-schema)
generic-record (doto (GenericRecordBuilder. record-schema)
(.set "field" (int 1))
(.set "nestedRecord" (.build (doto (GenericRecordBuilder. nested-record-schema)
(.set "field" (int 1))
(.set "mapField" {"field" (int 1)})))))]
(testing "record is properly encoded, as are field values"
(is (= (record-encoder {"field" (int 1)
"nestedRecord" {"field" (int 1)
"mapField" {"field" (int 1)}}})
(.build generic-record)))
(is (= generic-record (record-encoder (SlavaGenericRecord. generic-record {:piotr-yuxuan.slava/generic-record generic-record})))))))
(deftest avro-array-test
(let [map-schema (-> (SchemaBuilder/builder) .map .values .longType)
array-schema (-> (SchemaBuilder/builder) .array (.items map-schema))
array-encoder (encode/avro-array config/default array-schema)]
(testing "no need to mount an encoder"
(is (not array-encoder))))
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map (.values record-schema))
array-schema (-> (SchemaBuilder/builder) .array (.items map-schema))
array-encoder (encode/avro-array config/default array-schema)]
(testing "encoder needs to be mounted"
(is array-encoder))
(testing "array is properly encoded, as are items values"
(let [record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(is (= (array-encoder [{"field" {"field" 1}} {"field" {"field" 1}}])
[{"field" record} {"field" record}]))))))
(deftest avro-map-test
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map (.values (-> (SchemaBuilder/builder) .map (.values record-schema))))
map-encoder (encode/avro-map config/default map-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(testing "map is properly encoded, as are its values"
(is (= (map-encoder {"field" {"field" {"field" 1}}})
{"field" {"field" record}})))))
(deftest avro-union-test
(let [;; All concrete, non-container types
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator .nullType
.and (.type (-> (SchemaBuilder/builder) (.enumeration "enum") (.symbols (into-array String ["A" "B" "C"]))))
.and .stringType
.and (.type (-> (SchemaBuilder/builder) (.fixed "fixed") (.size 16)))
.and .bytesType
.and .intType
.and .longType
.and .floatType
.and .doubleType
.and .booleanType
.endUnion)]
(testing "No union encoder created, as per the default config values"
(is (not (encode/avro-union config/default union-schema)))))
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map (.values record-schema))
array-schema (-> (SchemaBuilder/builder) .array (.items (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator .nullType
.and (.type map-schema)
.and (.type record-schema)
.endUnion)))
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator .nullType
.and (.type array-schema)
.and (.type map-schema) ; map is before record
.and (.type record-schema)
.endUnion)
union-encoder (encode/avro-union config/default union-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(testing "encoder is created for concrete container type"
(is union-encoder))
(is (nil? (union-encoder nil)))
(is (= [] (union-encoder [])))
(is (= [nil] (union-encoder [nil])))
(is (= record (union-encoder {"field" 1 :piotr-yuxuan.slava/type :avro-record})))
(is (= [nil record {"field" record}]
(union-encoder [nil
(with-meta {"field" 1} {:piotr-yuxuan.slava/type :avro-record})
{"field" {"field" 1}}])
(union-encoder [nil
{"field" 1
:piotr-yuxuan.slava/type :avro-record}
{"field" {"field" 1}}])))
(is (= [{"field" record}] (union-encoder [{"field" {"field" 1}}])))
(is (= [record] (union-encoder [{"field" 1 :piotr-yuxuan.slava/type :avro-record}])))
(is (= {"field" record} (union-encoder {"field" {"field" 1}}))))
(testing "resolve tie when a union contains map, and then record"
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map .values .longType)
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator (.type map-schema) .and (.type record-schema)
.endUnion)
union-encoder (encode/avro-union config/default union-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(is (= record (union-encoder {"field" 1 :piotr-yuxuan.slava/type :avro-record})))
(is (= record (union-encoder (with-meta {"field" 1} {:piotr-yuxuan.slava/type :avro-record}))))
(is (= record (union-encoder {"field" 1})))))
(testing "resolve tie when a union contains record, and then map"
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map .values .longType)
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator (.type record-schema) .and (.type map-schema)
.endUnion)
union-encoder (encode/avro-union config/default union-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(is (= record (union-encoder {"field" 1})))
(is (= record (union-encoder (with-meta {"field" 1} {:piotr-yuxuan.slava/type :avro-map})))))))
| null | https://raw.githubusercontent.com/piotr-yuxuan/slava/7b018b9ed948f7d5f7470d4a71d77ec1e133f3cd/test/piotr_yuxuan/slava/encode_test.clj | clojure | All concrete, non-container types
map is before record | (ns piotr-yuxuan.slava.encode-test
(:require [piotr-yuxuan.slava.config :as config]
[piotr-yuxuan.slava.encode :as encode]
[clojure.test :refer [deftest testing are is]])
(:import (piotr_yuxuan.slava.slava_record SlavaGenericRecord)
(org.apache.avro SchemaBuilder SchemaBuilder$NamespacedBuilder SchemaBuilder$RecordBuilder SchemaBuilder$FieldAssembler SchemaBuilder$UnionAccumulator)
(org.apache.avro.generic GenericData$Record GenericRecordBuilder)))
(deftest encoder-name-test
(are [x y] (= x y)
:encoder/avro-record (encode/encoder-name (-> (SchemaBuilder/builder) (.record "Record") .fields .endRecord))
:encoder/avro-array (encode/encoder-name (-> (SchemaBuilder/builder) .array .items .stringType))
:encoder/avro-map (encode/encoder-name (-> (SchemaBuilder/builder) .map .values .stringType))
:encoder/avro-long (encode/encoder-name (.longType (SchemaBuilder/builder)))))
(deftest avro-record-test
(let [nested-record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "NestedRecord")
^SchemaBuilder$RecordBuilder (.namespace "piotr-yuxuan.slava.test")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .intType .noDefault
(.name "mapField") (.type (-> (SchemaBuilder/builder) .map .values .intType)) .noDefault
^GenericData$Record .endRecord)
record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "Record")
^SchemaBuilder$RecordBuilder (.namespace "piotr-yuxuan.slava.test")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .intType .noDefault
(.name "nestedRecord") (.type nested-record-schema) .noDefault
^GenericData$Record .endRecord)
record-encoder (encode/avro-record config/default record-schema)
generic-record (doto (GenericRecordBuilder. record-schema)
(.set "field" (int 1))
(.set "nestedRecord" (.build (doto (GenericRecordBuilder. nested-record-schema)
(.set "field" (int 1))
(.set "mapField" {"field" (int 1)})))))]
(testing "record is properly encoded, as are field values"
(is (= (record-encoder {"field" (int 1)
"nestedRecord" {"field" (int 1)
"mapField" {"field" (int 1)}}})
(.build generic-record)))
(is (= generic-record (record-encoder (SlavaGenericRecord. generic-record {:piotr-yuxuan.slava/generic-record generic-record})))))))
(deftest avro-array-test
(let [map-schema (-> (SchemaBuilder/builder) .map .values .longType)
array-schema (-> (SchemaBuilder/builder) .array (.items map-schema))
array-encoder (encode/avro-array config/default array-schema)]
(testing "no need to mount an encoder"
(is (not array-encoder))))
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map (.values record-schema))
array-schema (-> (SchemaBuilder/builder) .array (.items map-schema))
array-encoder (encode/avro-array config/default array-schema)]
(testing "encoder needs to be mounted"
(is array-encoder))
(testing "array is properly encoded, as are items values"
(let [record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(is (= (array-encoder [{"field" {"field" 1}} {"field" {"field" 1}}])
[{"field" record} {"field" record}]))))))
(deftest avro-map-test
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map (.values (-> (SchemaBuilder/builder) .map (.values record-schema))))
map-encoder (encode/avro-map config/default map-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(testing "map is properly encoded, as are its values"
(is (= (map-encoder {"field" {"field" {"field" 1}}})
{"field" {"field" record}})))))
(deftest avro-union-test
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator .nullType
.and (.type (-> (SchemaBuilder/builder) (.enumeration "enum") (.symbols (into-array String ["A" "B" "C"]))))
.and .stringType
.and (.type (-> (SchemaBuilder/builder) (.fixed "fixed") (.size 16)))
.and .bytesType
.and .intType
.and .longType
.and .floatType
.and .doubleType
.and .booleanType
.endUnion)]
(testing "No union encoder created, as per the default config values"
(is (not (encode/avro-union config/default union-schema)))))
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map (.values record-schema))
array-schema (-> (SchemaBuilder/builder) .array (.items (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator .nullType
.and (.type map-schema)
.and (.type record-schema)
.endUnion)))
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator .nullType
.and (.type array-schema)
.and (.type record-schema)
.endUnion)
union-encoder (encode/avro-union config/default union-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(testing "encoder is created for concrete container type"
(is union-encoder))
(is (nil? (union-encoder nil)))
(is (= [] (union-encoder [])))
(is (= [nil] (union-encoder [nil])))
(is (= record (union-encoder {"field" 1 :piotr-yuxuan.slava/type :avro-record})))
(is (= [nil record {"field" record}]
(union-encoder [nil
(with-meta {"field" 1} {:piotr-yuxuan.slava/type :avro-record})
{"field" {"field" 1}}])
(union-encoder [nil
{"field" 1
:piotr-yuxuan.slava/type :avro-record}
{"field" {"field" 1}}])))
(is (= [{"field" record}] (union-encoder [{"field" {"field" 1}}])))
(is (= [record] (union-encoder [{"field" 1 :piotr-yuxuan.slava/type :avro-record}])))
(is (= {"field" record} (union-encoder {"field" {"field" 1}}))))
(testing "resolve tie when a union contains map, and then record"
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map .values .longType)
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator (.type map-schema) .and (.type record-schema)
.endUnion)
union-encoder (encode/avro-union config/default union-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(is (= record (union-encoder {"field" 1 :piotr-yuxuan.slava/type :avro-record})))
(is (= record (union-encoder (with-meta {"field" 1} {:piotr-yuxuan.slava/type :avro-record}))))
(is (= record (union-encoder {"field" 1})))))
(testing "resolve tie when a union contains record, and then map"
(let [record-schema (-> (SchemaBuilder/builder)
^SchemaBuilder$NamespacedBuilder (.record "RecordSchema")
^SchemaBuilder$FieldAssembler .fields
(.name "field") .type .longType .noDefault
^GenericData$Record .endRecord)
map-schema (-> (SchemaBuilder/builder) .map .values .longType)
union-schema (-> (SchemaBuilder/builder)
.unionOf
^SchemaBuilder$UnionAccumulator (.type record-schema) .and (.type map-schema)
.endUnion)
union-encoder (encode/avro-union config/default union-schema)
record (.build (doto (GenericRecordBuilder. record-schema)
(.set "field" 1)))]
(is (= record (union-encoder {"field" 1})))
(is (= record (union-encoder (with-meta {"field" 1} {:piotr-yuxuan.slava/type :avro-map})))))))
|
e11d5651329ebe15a2b00d4769bfc8f03f870b1d627d08a2772f010564664ab1 | LPCIC/matita | nCicDisambiguate.mli |
||M|| This file is part of HELM , an Hypertextual , Electronic
||A|| Library of Mathematics , developed at the Computer Science
||T|| Department , University of Bologna , Italy .
||I||
||T|| HELM is free software ; you can redistribute it and/or
||A|| modify it under the terms of the GNU General Public License
\ / version 2 or ( at your option ) any later version .
\ / This software is distributed as is , NO WARRANTY .
V _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
||M|| This file is part of HELM, an Hypertextual, Electronic
||A|| Library of Mathematics, developed at the Computer Science
||T|| Department, University of Bologna, Italy.
||I||
||T|| HELM is free software; you can redistribute it and/or
||A|| modify it under the terms of the GNU General Public License
\ / version 2 or (at your option) any later version.
\ / This software is distributed as is, NO WARRANTY.
V_______________________________________________________________ *)
$ I d : nCic.ml 9058 2008 - 10 - 13 17:42:30Z tassi $
val debug : bool ref
val disambiguate_term :
#NCicCoercion.status ->
context:NCic.context ->
metasenv:NCic.metasenv ->
subst:NCic.substitution ->
expty:NCic.term NCicRefiner.expected_type ->
mk_implicit: (bool -> 'alias) ->
description_of_alias:('alias -> string) ->
fix_instance:(DisambiguateTypes.domain_item -> 'alias list -> 'alias list) ->
mk_choice:('alias -> NCic.term DisambiguateTypes.codomain_item) ->
aliases:'alias DisambiguateTypes.Environment.t ->
universe:'alias list DisambiguateTypes.Environment.t option ->
lookup_in_library:(
DisambiguateTypes.interactive_user_uri_choice_type ->
DisambiguateTypes.input_or_locate_uri_type ->
DisambiguateTypes.Environment.key ->
'alias list) ->
NotationPt.term Disambiguate.disambiguator_input ->
((DisambiguateTypes.domain_item * 'alias) list *
NCic.metasenv *
NCic.substitution *
NCic.term) list *
bool
val disambiguate_obj :
#NCicCoercion.status ->
mk_implicit:(bool -> 'alias) ->
description_of_alias:('alias -> string) ->
fix_instance:(DisambiguateTypes.domain_item -> 'alias list -> 'alias list) ->
mk_choice:('alias -> NCic.term DisambiguateTypes.codomain_item) ->
aliases:'alias DisambiguateTypes.Environment.t ->
universe:'alias list DisambiguateTypes.Environment.t option ->
lookup_in_library:(
DisambiguateTypes.interactive_user_uri_choice_type ->
DisambiguateTypes.input_or_locate_uri_type ->
DisambiguateTypes.Environment.key ->
'alias list) ->
uri:NUri.uri ->
string * int * NotationPt.term NotationPt.obj ->
((DisambiguateTypes.Environment.key * 'alias) list * NCic.metasenv *
NCic.substitution * NCic.obj)
list * bool
val disambiguate_path: #NCic.status -> NotationPt.term -> NCic.term
| null | https://raw.githubusercontent.com/LPCIC/matita/794ed25e6e608b2136ce7fa2963bca4115c7e175/matita/components/ng_disambiguation/nCicDisambiguate.mli | ocaml |
||M|| This file is part of HELM , an Hypertextual , Electronic
||A|| Library of Mathematics , developed at the Computer Science
||T|| Department , University of Bologna , Italy .
||I||
||T|| HELM is free software ; you can redistribute it and/or
||A|| modify it under the terms of the GNU General Public License
\ / version 2 or ( at your option ) any later version .
\ / This software is distributed as is , NO WARRANTY .
V _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _ _
||M|| This file is part of HELM, an Hypertextual, Electronic
||A|| Library of Mathematics, developed at the Computer Science
||T|| Department, University of Bologna, Italy.
||I||
||T|| HELM is free software; you can redistribute it and/or
||A|| modify it under the terms of the GNU General Public License
\ / version 2 or (at your option) any later version.
\ / This software is distributed as is, NO WARRANTY.
V_______________________________________________________________ *)
$ I d : nCic.ml 9058 2008 - 10 - 13 17:42:30Z tassi $
val debug : bool ref
val disambiguate_term :
#NCicCoercion.status ->
context:NCic.context ->
metasenv:NCic.metasenv ->
subst:NCic.substitution ->
expty:NCic.term NCicRefiner.expected_type ->
mk_implicit: (bool -> 'alias) ->
description_of_alias:('alias -> string) ->
fix_instance:(DisambiguateTypes.domain_item -> 'alias list -> 'alias list) ->
mk_choice:('alias -> NCic.term DisambiguateTypes.codomain_item) ->
aliases:'alias DisambiguateTypes.Environment.t ->
universe:'alias list DisambiguateTypes.Environment.t option ->
lookup_in_library:(
DisambiguateTypes.interactive_user_uri_choice_type ->
DisambiguateTypes.input_or_locate_uri_type ->
DisambiguateTypes.Environment.key ->
'alias list) ->
NotationPt.term Disambiguate.disambiguator_input ->
((DisambiguateTypes.domain_item * 'alias) list *
NCic.metasenv *
NCic.substitution *
NCic.term) list *
bool
val disambiguate_obj :
#NCicCoercion.status ->
mk_implicit:(bool -> 'alias) ->
description_of_alias:('alias -> string) ->
fix_instance:(DisambiguateTypes.domain_item -> 'alias list -> 'alias list) ->
mk_choice:('alias -> NCic.term DisambiguateTypes.codomain_item) ->
aliases:'alias DisambiguateTypes.Environment.t ->
universe:'alias list DisambiguateTypes.Environment.t option ->
lookup_in_library:(
DisambiguateTypes.interactive_user_uri_choice_type ->
DisambiguateTypes.input_or_locate_uri_type ->
DisambiguateTypes.Environment.key ->
'alias list) ->
uri:NUri.uri ->
string * int * NotationPt.term NotationPt.obj ->
((DisambiguateTypes.Environment.key * 'alias) list * NCic.metasenv *
NCic.substitution * NCic.obj)
list * bool
val disambiguate_path: #NCic.status -> NotationPt.term -> NCic.term
| |
c041d9b172f76f35c9a31d388b8a30fec9431f5802b197cb61152a9108fa92eb | alvatar/spheres | util.scm |
;; utility
(define (fold kons knil lst)
(let loop ((acc knil)
(lst lst))
(if (null? lst)
acc
(loop (kons (car lst) acc)
(cdr lst)))))
(define (parser-error . args)
(for-each
(lambda (x)
(if (not (string? x))
(write x (current-error-port))
(display x (current-error-port))))
args)
(newline (current-error-port))
#f)
(define-macro (assert test . result)
`(let ((res ,test))
(if (not ,(if (pair? result)
`(equal? res ,(car result))
'res))
(begin
(parser-error " *** ASSERTION FAILURE *** ")
(parser-error ',test)
(parser-error res " != "
,(if (pair? result)
(car result)
#t))
(exit 1)))))
| null | https://raw.githubusercontent.com/alvatar/spheres/568836f234a469ef70c69f4a2d9b56d41c3fc5bd/doc/parser/silex-ffi-example/util.scm | scheme | utility |
(define (fold kons knil lst)
(let loop ((acc knil)
(lst lst))
(if (null? lst)
acc
(loop (kons (car lst) acc)
(cdr lst)))))
(define (parser-error . args)
(for-each
(lambda (x)
(if (not (string? x))
(write x (current-error-port))
(display x (current-error-port))))
args)
(newline (current-error-port))
#f)
(define-macro (assert test . result)
`(let ((res ,test))
(if (not ,(if (pair? result)
`(equal? res ,(car result))
'res))
(begin
(parser-error " *** ASSERTION FAILURE *** ")
(parser-error ',test)
(parser-error res " != "
,(if (pair? result)
(car result)
#t))
(exit 1)))))
|
85307a08da42abb4adf32a658b9ba730b6fe23036ba088b135310da671def088 | AshleyYakeley/Truth | GetDynamicSupertype.hs | module Pinafore.Language.Type.GetDynamicSupertype
( getGreatestDynamicSupertype
, getOptGreatestDynamicSupertype
, getOptGreatestDynamicSupertypeSW
) where
import Data.Shim
import Language.Expression.Dolan
import Pinafore.Language.Shim
import Pinafore.Language.Type.DynamicSupertype
import Pinafore.Language.Type.Ground
import Pinafore.Language.Type.Subtype ()
import Pinafore.Language.Type.Type
import Shapes
pfmap ::
(HasVariance f, VarianceOf f ~ 'Covariance)
=> PolarMap (QPolyShim Type) 'Negative a b
-> PolarMap (QPolyShim Type) 'Negative (f a) (f b)
pfmap (MkPolarMap mp) = MkPolarMap $ cfmap mp
zip2 :: PolarMap (QPolyShim Type) 'Negative (Maybe (MeetType a b)) (MeetType (Maybe a) (Maybe b))
zip2 = MkPolarMap $ functionToShim "zip2" $ \(BothMeetType a b) -> liftA2 BothMeetType a b
getOptSingleGreatestDynamicSupertype :: QSingularType 'Negative t -> Maybe (QShimWit 'Negative (Maybe t))
getOptSingleGreatestDynamicSupertype (GroundedDolanSingularType (MkDolanGroundedType gt args)) = do
dt <- getPolyGreatestDynamicSupertype (qgtGreatestDynamicSupertype gt) args
return $ shimWitToDolan dt
getOptSingleGreatestDynamicSupertype (RecursiveDolanSingularType var t) =
case unrollRecursiveType var t of
MkShimWit t' iconv -> let
t'' = getGreatestDynamicSupertype t'
in return $ mapShimWit (pfmap $ polarPolyIsoForwards iconv) t''
getOptSingleGreatestDynamicSupertype _ = Nothing
getSingleGreatestDynamicSupertype :: QSingularType 'Negative t -> QShimWit 'Negative (Maybe t)
getSingleGreatestDynamicSupertype t =
case getOptSingleGreatestDynamicSupertype t of
Just t' -> t'
Nothing -> mapPolarShimWit (MkPolarMap $ functionToShim "Just" Just) $ typeToDolan t
getGreatestDynamicSupertype :: QType 'Negative t -> QShimWit 'Negative (Maybe t)
getGreatestDynamicSupertype NilDolanType =
mapPolarShimWit (MkPolarMap $ functionToShim "Just" Just) $ mkShimWit NilDolanType
getGreatestDynamicSupertype (ConsDolanType t1 NilDolanType) =
mapShimWit (pfmap iPolarL1) $ getSingleGreatestDynamicSupertype t1
getGreatestDynamicSupertype (ConsDolanType t1 tr) =
mapShimWit zip2 $ joinMeetShimWit (getSingleGreatestDynamicSupertype t1) (getGreatestDynamicSupertype tr)
getOptGreatestDynamicSupertype :: QType 'Negative t -> Maybe (QShimWit 'Negative (Maybe t))
getOptGreatestDynamicSupertype (ConsDolanType t1 NilDolanType) = do
t1' <- getOptSingleGreatestDynamicSupertype t1
return $ shimWitToDolan $ mapShimWit (pfmap iPolarL1) t1'
getOptGreatestDynamicSupertype t = Just $ getGreatestDynamicSupertype t
getOptGreatestDynamicSupertypeSW :: QShimWit 'Negative t -> Maybe (QShimWit 'Negative (Maybe t))
getOptGreatestDynamicSupertypeSW (MkShimWit t conv) = do
t' <- getOptGreatestDynamicSupertype t
return $ mapShimWit (pfmap conv) t'
| null | https://raw.githubusercontent.com/AshleyYakeley/Truth/ecacfea61f96a9f925606ff06020f4fbcdd37159/Pinafore/pinafore-language/lib/Pinafore/Language/Type/GetDynamicSupertype.hs | haskell | module Pinafore.Language.Type.GetDynamicSupertype
( getGreatestDynamicSupertype
, getOptGreatestDynamicSupertype
, getOptGreatestDynamicSupertypeSW
) where
import Data.Shim
import Language.Expression.Dolan
import Pinafore.Language.Shim
import Pinafore.Language.Type.DynamicSupertype
import Pinafore.Language.Type.Ground
import Pinafore.Language.Type.Subtype ()
import Pinafore.Language.Type.Type
import Shapes
pfmap ::
(HasVariance f, VarianceOf f ~ 'Covariance)
=> PolarMap (QPolyShim Type) 'Negative a b
-> PolarMap (QPolyShim Type) 'Negative (f a) (f b)
pfmap (MkPolarMap mp) = MkPolarMap $ cfmap mp
zip2 :: PolarMap (QPolyShim Type) 'Negative (Maybe (MeetType a b)) (MeetType (Maybe a) (Maybe b))
zip2 = MkPolarMap $ functionToShim "zip2" $ \(BothMeetType a b) -> liftA2 BothMeetType a b
getOptSingleGreatestDynamicSupertype :: QSingularType 'Negative t -> Maybe (QShimWit 'Negative (Maybe t))
getOptSingleGreatestDynamicSupertype (GroundedDolanSingularType (MkDolanGroundedType gt args)) = do
dt <- getPolyGreatestDynamicSupertype (qgtGreatestDynamicSupertype gt) args
return $ shimWitToDolan dt
getOptSingleGreatestDynamicSupertype (RecursiveDolanSingularType var t) =
case unrollRecursiveType var t of
MkShimWit t' iconv -> let
t'' = getGreatestDynamicSupertype t'
in return $ mapShimWit (pfmap $ polarPolyIsoForwards iconv) t''
getOptSingleGreatestDynamicSupertype _ = Nothing
getSingleGreatestDynamicSupertype :: QSingularType 'Negative t -> QShimWit 'Negative (Maybe t)
getSingleGreatestDynamicSupertype t =
case getOptSingleGreatestDynamicSupertype t of
Just t' -> t'
Nothing -> mapPolarShimWit (MkPolarMap $ functionToShim "Just" Just) $ typeToDolan t
getGreatestDynamicSupertype :: QType 'Negative t -> QShimWit 'Negative (Maybe t)
getGreatestDynamicSupertype NilDolanType =
mapPolarShimWit (MkPolarMap $ functionToShim "Just" Just) $ mkShimWit NilDolanType
getGreatestDynamicSupertype (ConsDolanType t1 NilDolanType) =
mapShimWit (pfmap iPolarL1) $ getSingleGreatestDynamicSupertype t1
getGreatestDynamicSupertype (ConsDolanType t1 tr) =
mapShimWit zip2 $ joinMeetShimWit (getSingleGreatestDynamicSupertype t1) (getGreatestDynamicSupertype tr)
getOptGreatestDynamicSupertype :: QType 'Negative t -> Maybe (QShimWit 'Negative (Maybe t))
getOptGreatestDynamicSupertype (ConsDolanType t1 NilDolanType) = do
t1' <- getOptSingleGreatestDynamicSupertype t1
return $ shimWitToDolan $ mapShimWit (pfmap iPolarL1) t1'
getOptGreatestDynamicSupertype t = Just $ getGreatestDynamicSupertype t
getOptGreatestDynamicSupertypeSW :: QShimWit 'Negative t -> Maybe (QShimWit 'Negative (Maybe t))
getOptGreatestDynamicSupertypeSW (MkShimWit t conv) = do
t' <- getOptGreatestDynamicSupertype t
return $ mapShimWit (pfmap conv) t'
| |
8acf339295428618cad6410548482691b369b4774a05494bc5355d6bc280b2dd | bendyworks/api-server | Server.hs | {-# LANGUAGE RankNTypes #-}
module Api.Types.Server where
import Control.Monad.Reader (ReaderT)
import Data.Text.Lazy (Text, pack)
import Hasql (Session)
import Hasql.Postgres (Postgres)
import Network.Mail.Mime (Mail)
import Web.Scotty.Trans (ActionT, ScottyError(..), ScottyT)
data ApiException
= MalformedParam Text
| MissingParam Text
| NoQueryResults
| MissingAuthToken
| UnauthorizedUser
| NoResourceForUser
| ServerError Text
deriving (Eq, Read, Show)
instance ScottyError ApiException where
stringError = ServerError . pack
showError = pack . show
data ServerState = ServerState
{ mailer :: Mail -> IO ()
}
type ApiServerM s = ScottyT ApiException (ReaderT ServerState (Session Postgres s IO))
type ApiActionM s = ActionT ApiException (ReaderT ServerState (Session Postgres s IO))
| null | https://raw.githubusercontent.com/bendyworks/api-server/9dd6d7c2599bd1c5a7e898a417a7aeb319415dd2/lib/Api/Types/Server.hs | haskell | # LANGUAGE RankNTypes # |
module Api.Types.Server where
import Control.Monad.Reader (ReaderT)
import Data.Text.Lazy (Text, pack)
import Hasql (Session)
import Hasql.Postgres (Postgres)
import Network.Mail.Mime (Mail)
import Web.Scotty.Trans (ActionT, ScottyError(..), ScottyT)
data ApiException
= MalformedParam Text
| MissingParam Text
| NoQueryResults
| MissingAuthToken
| UnauthorizedUser
| NoResourceForUser
| ServerError Text
deriving (Eq, Read, Show)
instance ScottyError ApiException where
stringError = ServerError . pack
showError = pack . show
data ServerState = ServerState
{ mailer :: Mail -> IO ()
}
type ApiServerM s = ScottyT ApiException (ReaderT ServerState (Session Postgres s IO))
type ApiActionM s = ActionT ApiException (ReaderT ServerState (Session Postgres s IO))
|
58a01267cbb0919adeefa83e2f2bb502cc809ab3e1666d636048ab287fb621f6 | jacobobryant/hallway | api.clj | (ns findka.hallway.api
(:require [biff.util :as bu]
[clj-http.client :as http]
[clojure.java.shell :as sh]
[clojure.string :as str]))
; todo rate limit
; todo cache results in crux
(defmacro catchall-verbose [& forms]
`(try ~@forms (catch Exception e# (.printStackTrace e#))))
(defn search-hn [subject-url]
(->> (http/get ""
{:query-params {:query (str "\"" subject-url "\"")
:restrictSearchableAttributes "url"
:attributesToRetrieve "created_at,num_comments,points,title,author,url"
:attributesToHighlight ""
:queryType "prefixNone"}
:as :json})
catchall-verbose
:body
:hits
(keep (fn [{:keys [objectID
url
title
num_comments
created_at
points
author] :as result}]
(when (= (str/lower-case subject-url) (str/lower-case url))
(catchall-verbose
{:source :hn
:url (str "=" objectID)
:title title
:n-comments num_comments
:created (bu/parse-date created_at)
:points points
:author author}))))))
(defn search-reddit [subject-url]
(->> (http/get ""
{:query-params {:url subject-url}
:headers {"User-Agent" ""}
:as :json})
catchall-verbose
:body
:data
:children
(keep (fn [{{:keys [name
permalink
num_comments
created_utc
title
author
score
subreddit_name_prefixed]} :data}]
(catchall-verbose
{:source :reddit
:url (str "" permalink)
:title (str subreddit_name_prefixed ": " title)
:n-comments num_comments
:created (java.util.Date. (* (long created_utc) 1000))
:points score
:author author})))))
(defn search-twitter [subject-url]
(->> (sh/sh ".local/bin/twint" "-s" subject-url "--format"
">>> {username} {id} {date}T{time} {replies} {retweets} {likes} {tweet}"
"-pt" "--limit" "20" :dir (System/getProperty "user.home"))
:out
str/split-lines
(keep (fn [line]
(catchall-verbose
(when (str/starts-with? line ">>> ")
(let [[_ username id datetime replies retweets likes text]
(str/split line #" " 8)
datetime (bu/parse-date datetime "yyyy-MM-dd'T'HH:mm:ss")
replies (Long/parseLong replies)
retweets (Long/parseLong retweets)
likes (Long/parseLong likes)]
{:source :twitter
:url (str "/" username "/status/" id)
:title (str "@" username ": " text)
:author username
:n-comments replies
:created datetime
:points (+ replies retweets likes)})))))))
(defn search-all [subject-url]
(apply concat (pmap #(%1 subject-url)
[search-hn search-reddit search-twitter])))
| null | https://raw.githubusercontent.com/jacobobryant/hallway/9326f972a25ca0f77c2901da2b253d149dc4a46d/src/findka/hallway/api.clj | clojure | todo rate limit
todo cache results in crux | (ns findka.hallway.api
(:require [biff.util :as bu]
[clj-http.client :as http]
[clojure.java.shell :as sh]
[clojure.string :as str]))
(defmacro catchall-verbose [& forms]
`(try ~@forms (catch Exception e# (.printStackTrace e#))))
(defn search-hn [subject-url]
(->> (http/get ""
{:query-params {:query (str "\"" subject-url "\"")
:restrictSearchableAttributes "url"
:attributesToRetrieve "created_at,num_comments,points,title,author,url"
:attributesToHighlight ""
:queryType "prefixNone"}
:as :json})
catchall-verbose
:body
:hits
(keep (fn [{:keys [objectID
url
title
num_comments
created_at
points
author] :as result}]
(when (= (str/lower-case subject-url) (str/lower-case url))
(catchall-verbose
{:source :hn
:url (str "=" objectID)
:title title
:n-comments num_comments
:created (bu/parse-date created_at)
:points points
:author author}))))))
(defn search-reddit [subject-url]
(->> (http/get ""
{:query-params {:url subject-url}
:headers {"User-Agent" ""}
:as :json})
catchall-verbose
:body
:data
:children
(keep (fn [{{:keys [name
permalink
num_comments
created_utc
title
author
score
subreddit_name_prefixed]} :data}]
(catchall-verbose
{:source :reddit
:url (str "" permalink)
:title (str subreddit_name_prefixed ": " title)
:n-comments num_comments
:created (java.util.Date. (* (long created_utc) 1000))
:points score
:author author})))))
(defn search-twitter [subject-url]
(->> (sh/sh ".local/bin/twint" "-s" subject-url "--format"
">>> {username} {id} {date}T{time} {replies} {retweets} {likes} {tweet}"
"-pt" "--limit" "20" :dir (System/getProperty "user.home"))
:out
str/split-lines
(keep (fn [line]
(catchall-verbose
(when (str/starts-with? line ">>> ")
(let [[_ username id datetime replies retweets likes text]
(str/split line #" " 8)
datetime (bu/parse-date datetime "yyyy-MM-dd'T'HH:mm:ss")
replies (Long/parseLong replies)
retweets (Long/parseLong retweets)
likes (Long/parseLong likes)]
{:source :twitter
:url (str "/" username "/status/" id)
:title (str "@" username ": " text)
:author username
:n-comments replies
:created datetime
:points (+ replies retweets likes)})))))))
(defn search-all [subject-url]
(apply concat (pmap #(%1 subject-url)
[search-hn search-reddit search-twitter])))
|
6a41d9335cb7bf1145d68cb1601a63dd0ccbba165f33d37cce89a0115b87c53d | janestreet/memtrace_viewer_with_deps | table_char.mli | * [ Table_char ] holds a box - drawing character
open! Core_kernel
open! Import
type t =
{ ascii : char
; utf8 : string
}
[@@deriving sexp_of]
val connect : ?top:unit -> ?bottom:unit -> ?left:unit -> ?right:unit -> unit -> t
val to_buffer : t -> Buffer.t -> [ `Ascii | `Unicode ] -> unit
| null | https://raw.githubusercontent.com/janestreet/memtrace_viewer_with_deps/5a9e1f927f5f8333e2d71c8d3ca03a45587422c4/vendor/textutils/ascii_table/kernel/table_char.mli | ocaml | * [ Table_char ] holds a box - drawing character
open! Core_kernel
open! Import
type t =
{ ascii : char
; utf8 : string
}
[@@deriving sexp_of]
val connect : ?top:unit -> ?bottom:unit -> ?left:unit -> ?right:unit -> unit -> t
val to_buffer : t -> Buffer.t -> [ `Ascii | `Unicode ] -> unit
| |
4be4b7a8022ec44db4d2f1d2adfefb8a5488383a1c885df063057448701089ad | GaloisInc/pads-haskell | Errors.hs | {-# LANGUAGE NamedFieldPuns, DeriveDataTypeable #-}
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* ( c ) < > *
* < > *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
** *********************************************************************
* *
* (c) Kathleen Fisher <> *
* John Launchbury <> *
* *
************************************************************************
-}
module Language.Pads.Errors where
import Text.PrettyPrint.Mainland as PP
import qualified Language.Pads.Source as S
import Data.Data
data ErrMsg =
FoundWhenExpecting String String
| MissingLiteral String
| ExtraBeforeLiteral String
| LineError String
| Insufficient Int Int
| RegexMatchFail String
| TransformToDstFail String String String
| TransformToSrcFail String String String
| UnderlyingTypedefFail
| PredicateFailure
| ExtraStuffBeforeTy String String
| FileError String String
deriving (Typeable, Data, Eq, Ord, Show)
{- XXX-KSF: fix pretty printing to use pretty printing combinators rather than string ++ -}
instance Pretty ErrMsg where
ppr (FoundWhenExpecting str1 str2) = text ("Encountered " ++ str1 ++ " when expecting " ++ str2 ++ ".")
ppr (MissingLiteral s) = text ("Missing Literal: " ++ s ++ ".")
ppr (ExtraBeforeLiteral s) = text ("Extra bytes before literal: " ++ s ++ ".")
ppr (ExtraStuffBeforeTy junk ty) = text ("Extra bytes: " ++ junk ++ " before " ++ ty ++ ".")
ppr (Insufficient found expected) = text("Found " ++ (show found) ++ " bytes when looking for " ++ (show expected) ++ "bytes.")
ppr (RegexMatchFail s) = text ("Failed to match regular expression: " ++ s ++ ".")
ppr (TransformToDstFail s1 s2 s3) = text ("Parsing transform " ++ s1 ++ " failed on input: " ++ s2 ++ s3)
ppr (TransformToSrcFail s1 s2 s3) = text ("Printing transform "++ s1 ++ " failed on input: " ++ s2 ++ s3)
ppr (LineError s) = text s
ppr UnderlyingTypedefFail = text "Pads predicate is true, but underlying type had an error."
ppr PredicateFailure = text "Pads predicate is false."
ppr (FileError err file) = text ("Problem with file: " ++ file ++ "("++ err ++ ").")
data ErrInfo = ErrInfo { msg :: ErrMsg,
position :: Maybe S.Pos }
deriving (Typeable, Data, Eq, Ord, Show)
instance Pretty ErrInfo where
ppr (ErrInfo {msg,position}) = PP.ppr msg <+>
case position of
Nothing -> empty
Just pos -> (text "at:") <+> PP.ppr pos
mergeErrInfo (ErrInfo{msg=msg1, position=position1}) (ErrInfo{msg=msg2, position=position2}) =
(ErrInfo{msg=msg1, position=position1})
maybeMergeErrInfo m1 m2 = case (m1,m2) of
(Nothing,Nothing) -> Nothing
(Just p, Nothing) -> Just p
(Nothing, Just p) -> Just p
(Just p1, Just p2) -> Just (mergeErrInfo p1 p2)
| null | https://raw.githubusercontent.com/GaloisInc/pads-haskell/ffa01030cd1c1edf5f50df53f3783ff28bf43c7e/Language/Pads/Errors.hs | haskell | # LANGUAGE NamedFieldPuns, DeriveDataTypeable #
XXX-KSF: fix pretty printing to use pretty printing combinators rather than string ++ |
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* ( c ) < > *
* < > *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
** *********************************************************************
* *
* (c) Kathleen Fisher <> *
* John Launchbury <> *
* *
************************************************************************
-}
module Language.Pads.Errors where
import Text.PrettyPrint.Mainland as PP
import qualified Language.Pads.Source as S
import Data.Data
data ErrMsg =
FoundWhenExpecting String String
| MissingLiteral String
| ExtraBeforeLiteral String
| LineError String
| Insufficient Int Int
| RegexMatchFail String
| TransformToDstFail String String String
| TransformToSrcFail String String String
| UnderlyingTypedefFail
| PredicateFailure
| ExtraStuffBeforeTy String String
| FileError String String
deriving (Typeable, Data, Eq, Ord, Show)
instance Pretty ErrMsg where
ppr (FoundWhenExpecting str1 str2) = text ("Encountered " ++ str1 ++ " when expecting " ++ str2 ++ ".")
ppr (MissingLiteral s) = text ("Missing Literal: " ++ s ++ ".")
ppr (ExtraBeforeLiteral s) = text ("Extra bytes before literal: " ++ s ++ ".")
ppr (ExtraStuffBeforeTy junk ty) = text ("Extra bytes: " ++ junk ++ " before " ++ ty ++ ".")
ppr (Insufficient found expected) = text("Found " ++ (show found) ++ " bytes when looking for " ++ (show expected) ++ "bytes.")
ppr (RegexMatchFail s) = text ("Failed to match regular expression: " ++ s ++ ".")
ppr (TransformToDstFail s1 s2 s3) = text ("Parsing transform " ++ s1 ++ " failed on input: " ++ s2 ++ s3)
ppr (TransformToSrcFail s1 s2 s3) = text ("Printing transform "++ s1 ++ " failed on input: " ++ s2 ++ s3)
ppr (LineError s) = text s
ppr UnderlyingTypedefFail = text "Pads predicate is true, but underlying type had an error."
ppr PredicateFailure = text "Pads predicate is false."
ppr (FileError err file) = text ("Problem with file: " ++ file ++ "("++ err ++ ").")
data ErrInfo = ErrInfo { msg :: ErrMsg,
position :: Maybe S.Pos }
deriving (Typeable, Data, Eq, Ord, Show)
instance Pretty ErrInfo where
ppr (ErrInfo {msg,position}) = PP.ppr msg <+>
case position of
Nothing -> empty
Just pos -> (text "at:") <+> PP.ppr pos
mergeErrInfo (ErrInfo{msg=msg1, position=position1}) (ErrInfo{msg=msg2, position=position2}) =
(ErrInfo{msg=msg1, position=position1})
maybeMergeErrInfo m1 m2 = case (m1,m2) of
(Nothing,Nothing) -> Nothing
(Just p, Nothing) -> Just p
(Nothing, Just p) -> Just p
(Just p1, Just p2) -> Just (mergeErrInfo p1 p2)
|
07ed9d549e3f4d40bad0ac5f967bb6a792ee0f12034bbb39223932f590736485 | biokoda/actordb_core | actordb_sqlproc.erl | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(actordb_sqlproc).
-behaviour(gen_server).
-define(LAGERDBG,true).
-export([start/1, stop/1, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([print_info/1]).
-export([read/4,write/4,call/4,call/5,diepls/2,try_actornum/3]).
-export([call_slave/4,call_slave/5,start_copylock/2]). %call_master/4,call_master/5
-export([write_call/3, write_call1/4, read_call/3, read_call1/5]).
-include_lib("actordb_sqlproc.hrl").
% Read actor number without creating actor.
try_actornum(Name,Type,CbMod) ->
case call({Name,Type},[actornum],{state_rw,actornum},CbMod) of
{error,nocreate} ->
{"",undefined};
{ok,Path,NumNow} ->
{Path,NumNow}
end.
read(Name,Flags,{[{copy,CopyFrom}],_},Start) ->
read(Name,Flags,[{copy,CopyFrom}],Start);
read(Name,Flags,[{copy,CopyFrom}],Start) ->
case distreg:whereis(Name) of
undefined ->
R = #read{sql = <<"select * from __adb limit 1;">>, flags = Flags},
case call(Name,Flags,R,Start) of
{ok,_} ->
{ok,[{columns,{<<"status">>}},{rows,[{<<"ok">>}]}]};
_E ->
?AERR("Unable to copy actor ~p to ~p",[CopyFrom,Name]),
{ok,[{columns,{<<"status">>}},{rows,[{<<"failed">>}]}]}
end;
Pid ->
diepls(Pid,overwrite),
Ref = erlang:monitor(process,Pid),
receive
{'DOWN',Ref,_,_Pid,_} ->
read(Name,Flags,[{copy,CopyFrom}],Start)
after 2000 ->
{ok,[{columns,{<<"status">>}},{row,{<<"failed_running">>}}]}
end
end;
read(Name,Flags,[delete],Start) ->
call(Name,Flags,#write{sql = delete, flags = Flags},Start);
read(Name,Flags,{Sql,[]},Start) ->
read(Name,Flags,Sql,Start);
read(Name,Flags,Sql,Start) ->
call(Name,Flags,#read{sql = Sql, flags = Flags},Start).
write(Name,Flags,{Sql,[]},Start) ->
write(Name,Flags,Sql,Start);
write(Name,Flags,{{_,_,_} = TransactionId,Sql},Start) ->
write(Name,Flags,{undefined,TransactionId,Sql},Start);
write(Name,Flags,{MFA,TransactionId,Sql},Start) ->
case TransactionId of
{_,_,_} ->
case Sql of
commit ->
call(Name,Flags,{commit,true,TransactionId},Start);
abort ->
call(Name,Flags,{commit,false,TransactionId},Start);
[delete] ->
W = #write{mfa = MFA,sql = delete, transaction = TransactionId, flags = Flags},
call(Name,Flags,W,Start);
{Sql0, PreparedStatements} ->
W = #write{mfa = MFA,sql = Sql0, records = PreparedStatements,
transaction = TransactionId, flags = Flags},
call(Name,Flags,W,Start);
_ ->
W = #write{mfa = MFA,sql = Sql,
transaction = TransactionId, flags = Flags},
call(Name,Flags,W,Start)
end;
_ when Sql == undefined ->
call(Name,Flags,#write{mfa = MFA, flags = Flags},Start);
_ when tuple_size(Sql) == 2 ->
{Sql0,Rec} = Sql,
W = #write{mfa = MFA, sql = Sql0, records = Rec, flags = Flags},
call(Name,[wait_election|Flags],W,Start);
_ ->
W = #write{mfa = MFA, sql = Sql, flags = Flags},
call(Name,[wait_election|Flags],W,Start)
end;
write(Name,Flags,[delete],Start) ->
call(Name,Flags,#write{sql = delete, flags = Flags},Start);
write(Name,Flags,{Sql,Records},Start) ->
W = #write{sql = Sql, records = Records, flags = Flags},
call(Name,[wait_election|Flags],W,Start);
write(Name,Flags,Sql,Start) ->
W = #write{sql = Sql, flags = Flags},
call(Name,[wait_election|Flags],W,Start).
call(Name,Flags,Msg,Start) ->
call(Name,Flags,Msg,Start,false).
call(Name,Flags,Msg,Start,IsRedirect) ->
case distreg:whereis(Name) of
undefined ->
case startactor(Name,Start,[{startreason,Msg}|Flags]) of %
{ok,Pid} when is_pid(Pid) ->
call(Name,Flags,Msg,Start,IsRedirect,Pid);
{error,nocreate} ->
{error,nocreate};
Res ->
Res
end;
Pid ->
? INF("Call have pid ~p for name ~p , alive ~p",[Pid , Name , : is_process_alive(Pid ) ] ) ,
call(Name,Flags,Msg,Start,IsRedirect,Pid)
end.
call(Name,Flags,Msg,Start,IsRedirect,Pid) ->
% If call returns redirect, this is slave node not master node.
% test_mon_calls(Name,Msg),
% ?ADBG("Call ~p",[Msg]),
case catch gen_server:call(Pid,Msg,infinity) of
{redirect,Node} when is_binary(Node) ->
% test_mon_stop(),
?ADBG("Redirect call to=~p, for=~p, ~p",[Node,Name,Msg]),
case lists:member(Node,bkdcore:cluster_nodes()) of
true ->
case IsRedirect of
true ->
double_redirect;
_ ->
case actordb:rpc(Node,element(1,Name),{?MODULE,call,[Name,Flags,Msg,Start,true]}) of
double_redirect ->
diepls(Pid,nomaster),
call(Name,Flags,Msg,Start);
Res ->
Res
end
end;
false ->
case IsRedirect of
onlylocal ->
{redirect,Node};
_ ->
case actordb:rpc(Node,element(1,Name),{?MODULE,call,[Name,Flags,Msg,Start,false]}) of
{error,Connerr} when Connerr == econnrefused; Connerr == timeout; Connerr == invalidnode ->
Pid ! doelection,
call(Name,Flags,Msg,Start,false,Pid);
Res ->
?ADBG("Redirect rpc res=~p",[Res]),
Res
end
end
end;
{'EXIT',{noproc,_}} = _X ->
?ADBG("noproc call again ~p",[_X]),
call(Name,Flags,Msg,Start);
{'EXIT',{normal,_}} ->
?ADBG("died normal"),
call(Name,Flags,Msg,Start);
{'EXIT',{nocreate,_}} ->
% test_mon_stop(),
{error,nocreate};
{'EXIT',{error,_} = E} ->
E;
{'EXIT',{timeout,_}} ->
{error,timeout};
Res ->
% test_mon_stop(),
Res
end.
startactor(Name,Start,Flags) ->
case Start of
{Mod,Func,Args} ->
apply(Mod,Func,[Name|Args]);
undefined ->
{ok,undefined};
_ ->
apply(Start,start,[Name,Flags])
end.
test_mon_calls(Who , Msg ) - >
% Ref = make_ref(),
% put(ref,Ref),
put(refpid , spawn(fun ( ) - > test_mon_proc(Who , Msg , Ref ) end ) ) .
% test_mon_proc(Who,Msg,Ref) ->
% receive
% Ref ->
% ok
after 1000 - >
? waiting on ~p , for ~p",[Who , Msg ] ) ,
% test_mon_proc(Who,Msg,Ref)
% end.
% test_mon_stop() ->
butil : safesend(get(refpid ) , get(ref ) ) .
call_slave(Cb,Actor,Type,Msg) ->
call_slave(Cb,Actor,Type,Msg,[]).
call_slave(Cb,Actor,Type,Msg,Flags) ->
actordb_util:wait_for_startup(Type,Actor,0),
case apply(Cb,cb_slave_pid,[Actor,Type,[{startreason,Msg}|Flags]]) of
{ok,Pid} ->
ok;
Pid when is_pid(Pid) ->
ok
end,
case catch gen_server:call(Pid,Msg,infinity) of
{'EXIT',{noproc,_}} when Msg /= stop ->
call_slave(Cb,Actor,Type,Msg);
{'EXIT',{normal,_}} when Msg /= stop ->
call_slave(Cb,Actor,Type,Msg);
Res ->
Res
end.
diepls(Pid,Reason) ->
gen_server:cast(Pid,{diepls,Reason}).
start_copylock(Fullname,O) ->
start_copylock(Fullname,O,0).
start_copylock(Fullname,Opt,N) when N < 2 ->
case distreg:whereis(Fullname) of
undefined ->
start(Opt);
_ ->
timer:sleep(1000),
start_copylock(Fullname,Opt,N+1)
end;
start_copylock(Fullname,_,_) ->
Pid = distreg:whereis(Fullname),
print_info(Pid),
{error,{slave_proc_running,Pid,Fullname}}.
Opts :
[ { actor , Name},{type , Type},{mod , CallbackModule},{state , CallbackState } ,
{ inactivity_timeout , SecondsOrInfinity},{slave , true / false},{copyfrom , NodeName},{copyreset,{Mod , Func , } } ]
start(Opts) ->
? ADBG("Starting ~p slave=~p",[butil : ds_vals([actor , type],Opts),butil : ds_val(slave , ) ] ) ,
Mod = butil:ds_val(mod,Opts),
Name = butil:ds_val(name,Opts),
Ref = make_ref(),
case gen_server:start(?MODULE, [{start_from,{self(),Ref}}|Opts], [apply(Mod,cb_spawnopts,[Name])]) of
{ok,Pid} ->
{ok,Pid};
{error,normal} ->
Init failed gracefully . It should have sent an explanation .
receive
{Ref,nocreate} ->
{error,nocreate};
{Ref,{registered,Pid}} ->
{ok,Pid};
{Ref,{actornum,Path,Num}} ->
{ok,Path,Num};
{Ref,{ok,[{columns,_},_]} = Res} ->
Res;
{Ref,nostart} ->
{error,nostart}
after 0 ->
{error,cantstart}
end;
Err ->
?AERR("start sqlproc error ~p",[Err]),
Err
end.
stop(Pid) when is_pid(Pid) ->
Pid ! stop;
stop(Name) ->
case distreg:whereis(Name) of
undefined ->
ok;
Pid ->
stop(Pid)
end.
print_info(undefined) ->
ok;
print_info({A,T}) ->
print_info(distreg:whereis({A,T}));
print_info(Pid) ->
gen_server:cast(Pid,print_info).
% Call processing.
% Calls are processed here and in actordb_sqlprocutil:doqueue.
Only in handle_call are we allowed to add calls to callqueue .
handle_call(Msg,_,P) when is_binary(P#dp.movedtonode) ->
?DBG("REDIRECT BECAUSE MOVED TO NODE ~p ~p",[P#dp.movedtonode,Msg]),
case apply(P#dp.cbmod,cb_redirected_call,[P#dp.cbstate,P#dp.movedtonode,Msg,moved]) of
{reply,What,NS,Red} ->
{reply,What,P#dp{cbstate = NS, movedtonode = Red}};
ok ->
{reply,{redirect,P#dp.movedtonode},P}
end;
handle_call({dbcopy,Msg},CallFrom,P) -> %when element(1,Msg) /= reached_end ->
Me = actordb_conf:node_name(),
case ok of
_ when element(1,Msg) == send_db andalso P#dp.verified == false ->
{noreply,P#dp{callqueue = queue:in_r({CallFrom,{dbcopy,Msg}},P#dp.callqueue),
activity = actordb_local:actor_activity(P#dp.activity)}};
_ when element(1,Msg) == send_db andalso Me /= P#dp.masternode ->
?DBG("redirect not master node"),
actordb_sqlprocutil:redirect_master(P);
_ ->
actordb_sqlprocutil:dbcopy_call(Msg,CallFrom,
P#dp{activity = actordb_local:actor_activity(P#dp.activity)})
end;
handle_call({state_rw,_} = Msg,From, #dp{wasync = #ai{wait = WRef}} = P) when is_reference(WRef) ->
?DBG("Queuing state call, waitingfor=~p, msg=~p",[WRef,Msg]),
{noreply,P#dp{statequeue = queue:in_r({From,Msg},P#dp.statequeue)}};
handle_call({state_rw,What},From,P) ->
state_rw_call(What,From,P#dp{activity = actordb_local:actor_activity(P#dp.activity)});
handle_call({commit,Doit,Id},From, P) ->
commit_call(Doit,Id,From,P#dp{activity = actordb_local:actor_activity(P#dp.activity)});
handle_call(Msg,From,P) ->
case Msg of
_ when P#dp.mors == slave ->
% Now = actordb_local:elapsed_time(),
case P#dp.masternode of
undefined when P#dp.election_timer == undefined, is_integer(P#dp.without_master_since) ->
% P#dp.without_master_since < Now-3000 ->
% We have given up. But since we are getting a call from outside, try again.
% Execute election.
{noreply,actordb_sqlprocutil:start_verify(
P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue),
flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION)},false)};
undefined ->
?DBG("Queing msg no master yet ~p",[Msg]),
{noreply,P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue),
election_timer = actordb_sqlprocutil:election_timer(P#dp.election_timer),
flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION)}};
_ ->
case apply(P#dp.cbmod,cb_redirected_call,[P#dp.cbstate,P#dp.masternode,Msg,slave]) of
{reply,What,NS,_} ->
{reply,What,P#dp{cbstate = NS}};
ok ->
?DBG("Redirecting to master"),
actordb_sqlprocutil:redirect_master(P)
end
end;
_ when P#dp.verified == false ->
case P#dp.flags band ?FLAG_WAIT_ELECTION > 0 of
true when element(1,P#dp.election_timer) == election ->
?DBG("clear wait_election"),
P#dp.election_timer ! exit ,
case P#dp.election_timer of
{election,undefined,_} ->
% if timer already ran out but we are waiting due to wait_election flag
self() ! {election_timeout, element(3, P#dp.election_timer)};
_ ->
% Do nothing election is slower and it does not have to wait
ok
end,
handle_call(Msg,From,P#dp{flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION)});
_ ->
case apply(P#dp.cbmod,cb_unverified_call,[P#dp.cbstate,Msg]) of
queue ->
{noreply,P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue)}};
{moved,Moved} ->
{noreply,P#dp{movedtonode = Moved}};
{moved,Moved,NS} ->
{noreply,P#dp{movedtonode = Moved, cbstate = NS}};
{reply,What} ->
{reply,What,P};
{reinit_master,Mors} ->
{ok,NP} = init(P#dp{mors = Mors},cb_reinit),
{noreply,NP};
{isolate,_OutSql,_State} ->
write_call(Msg,From,P);
{reinit,Sql,NS} ->
{ok,NP} = init(P#dp{cbstate = NS,
callqueue = queue:in_r({From,#write{sql = Sql}},P#dp.callqueue)},cb_reinit),
{noreply,NP}
end
end;
#write{transaction = TransactionId} = Msg1 when P#dp.transactionid /= undefined,
element(2,P#dp.transactionid) >= element(2,TransactionId) ->
case ok of
_ when P#dp.transactionid == TransactionId ->
% Same transaction can write to an actor more than once
% Transactions do not use async calls to driver,
% so if we are in the middle of one, we can execute
% this write immediately.
write_call1(Msg1,From,P#dp.schemavers,P);
_ ->
{Tid,Updaterid,_} = P#dp.transactionid,
?DBG("Overruling transaction ~p.'__mupdate__', for ~p.'__mupdate__'",
[Updaterid,element(2,TransactionId)]),
% New transaction has higher priority.
% Interrupt it unless already committed.
actordb_sqlprocutil:transaction_overruled(Tid,Updaterid),
{noreply,timeout(P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue)})}
end;
#read{} when P#dp.movedtonode == undefined ->
% read call just buffers call
read_call(Msg,From,P);
#write{transaction = undefined} when P#dp.movedtonode == undefined ->
% write_call just buffers call, we can always run it.
% Actual write is executed at the end of doqueue.
write_call(Msg,From,P);
_ when P#dp.movedtonode == deleted andalso (element(1,Msg) == read orelse element(1,Msg) == write) ->
% #write and #read have flags in same pos
Flags = element(#write.flags,Msg),
case lists:member(create,Flags) of
true ->
{stop,normal,P};
false ->
{reply, {error,nocreate},P}
end;
_ ->
% ?DBG("Queing msg ~p, callres ~p, locked ~p, transactionid ~p",
[ Msg , P#dp.callres , P#dp.locked , P#dp.transactionid ] ) ,
% Continue in doqueue
{noreply,timeout(P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue),
activity = actordb_local:actor_activity(P#dp.activity)})}
end.
timeout(P) ->
timeout(P,0).
timeout(P,_Resend) when P#dp.flags band ?TIMEOUT_PENDING > 0 ->
P;
timeout(P,Resend) ->
self() ! {timeout,Resend+1},
P#dp{flags = P#dp.flags bor ?TIMEOUT_PENDING}.
commit_call(Doit,Id,From,P) ->
?DBG("Commit doit=~p, id=~p, from=~p, trans=~p",[Doit,Id,From,P#dp.transactionid]),
case P#dp.transactionid == Id of
true ->
case P#dp.transactioncheckref of
undefined ->
ok;
_ ->
erlang:demonitor(P#dp.transactioncheckref)
end,
?DBG("Commit write ~p",[P#dp.transactioninfo]),
{Sql,EvNum,_NewVers} = P#dp.transactioninfo,
case Sql of
<<"delete">> when Doit == true ->
Moved = deleted;
_ ->
Moved = P#dp.movedtonode
end,
case Doit of
true when P#dp.followers == [] ->
case Moved of
deleted ->
Me = self(),
actordb_sqlprocutil:delete_actor(P),
spawn(fun() -> ?DBG("Stopping in commit"), stop(Me) end);
_ ->
VarHeader = actordb_sqlprocutil:create_var_header(P),
ok = actordb_sqlite:okornot(actordb_sqlite:exec(
P#dp.db,<<"#s01;">>,P#dp.evterm,EvNum,VarHeader)),
actordb_sqlite:replication_done(P#dp.db)
end,
{reply,ok,actordb_sqlprocutil:doqueue(P#dp{transactionid = undefined,
transactioncheckref = undefined,
transactioninfo = undefined, movedtonode = Moved,
evnum = EvNum, evterm = P#dp.current_term})};
true ->
% We can safely release savepoint.
This will send the remaining WAL pages to followers that have commit flag set .
Followers will then rpc back appendentries_response .
% We can also set #dp.evnum now.
VarHeader = actordb_sqlprocutil:create_var_header(P),
actordb_sqlite:okornot(actordb_sqlite:exec(P#dp.db,<<"#s01;">>,
P#dp.evterm,EvNum,VarHeader)),
actordb_sqlite:replication_done(P#dp.db),
{noreply,ae_timer(P#dp{callfrom = From,
callres = ok,evnum = EvNum,movedtonode = Moved,
transactionid = undefined, transactioninfo = undefined,
transactioncheckref = undefined})};
false when P#dp.followers == [] ->
actordb_sqlite:rollback(P#dp.db),
{reply,ok,actordb_sqlprocutil:doqueue(P#dp{transactionid = undefined,
transactioninfo = undefined,transactioncheckref = undefined})};
false ->
% Transaction failed.
% Delete it from __transactions.
EvNum will actually be the same as transactionsql that we have not finished .
Thus this EvNum section of WAL contains pages from failed transaction and
% cleanup of transaction from __transactions.
{Tid,Updaterid,_} = P#dp.transactionid,
% Do not match on ok. It might fail if this is the actual node on which transaction
% failed. It makes no difference if we are redundantly calling rollback again.
actordb_sqlite:rollback(P#dp.db),
NewSql = <<"DELETE FROM __transactions WHERE tid=",(butil:tobin(Tid))/binary," AND updater=",
(butil:tobin(Updaterid))/binary,";">>,
write_call(#write{sql = NewSql},From,P#dp{callfrom = undefined,
transactionid = undefined,transactioninfo = undefined,
transactioncheckref = undefined})
end;
_ ->
{reply,ok,P}
end.
state_rw_call(donothing,_From,P) ->
{reply,ok,P};
state_rw_call(recovered,_From,P) ->
?DBG("No longer in recovery!"),
{reply,ok,P#dp{inrecovery = false}};
state_rw_call({appendentries_start,Term,LeaderNode,PrevEvnum,PrevTerm,AEType,CallCount} = What,From,P) ->
% Executed on follower.
AE is split into multiple calls ( because wal is sent page by page as it is written )
Start sets parameters . There may not be any wal append calls after if empty write .
AEType = [ head , empty , recover ]
?DBG("AE start ~p {PrevEvnum,PrevTerm}=~p leader=~p",[AEType, {PrevEvnum,PrevTerm},LeaderNode]),
RecoveryAge = actordb_local:elapsed_time() - P#dp.recovery_age,
case ok of
_ when P#dp.inrecovery andalso RecoveryAge > 2000 andalso AEType == head ->
% ?DBG("Ignoring head because inrecovery"),
% Reply may have gotten lost or leader could have changed.
case RecoveryAge > 2000 of
% true ->
?ERR("Recovery mode timeout",[]),
state_rw_call(What,From,P#dp{inrecovery = false});
% false ->
% actordb_sqlprocutil : ae_respond(P , LeaderNode , wrongstate , PrevEvnum , AEType , CallCount ) ,
% {reply,false,P}
% end;
_ when is_pid(P#dp.copyproc) ->
?DBG("Ignoring AE because copy in progress"),
actordb_sqlprocutil : ae_respond(P , LeaderNode , wrongstate , PrevEvnum , AEType , CallCount ) ,
{reply,false,P};
_ when Term < P#dp.current_term ->
?WARN("AE start, input term too old ~p {InTerm,MyTerm}=~p",
[AEType,{Term,P#dp.current_term}]),
reply(From,false),
actordb_sqlprocutil:ae_respond(P,LeaderNode,false,PrevEvnum,AEType,CallCount),
% Some node thinks its master and sent us appendentries start.
% Because we are master with higher term, we turn it down.
% But we also start a new write so that nodes get synchronized.
case P#dp.mors of
master ->
{ noreply , actordb_sqlprocutil : start_verify(P , false ) } ;
?DBG("Executing empty write"),
write_call(#write{sql = []},undefined,P);
_ ->
{noreply,P}
end;
_ when P#dp.mors == slave, P#dp.masternode /= LeaderNode ->
?DBG("AE start, slave now knows leader ~p ~p",[AEType,LeaderNode]),
% If we have any pending replies, this must result in a rewind for this node
% and a new write to master.
reply(P#dp.callfrom,{redirect,LeaderNode}),
RR = P#dp.rasync,
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
actordb_local:actor_mors(slave,LeaderNode),
NP = P#dp{masternode = LeaderNode,without_master_since = undefined,
masternode_since = actordb_local:elapsed_time(),
masternodedist = bkdcore:dist_name(LeaderNode),
netchanges = actordb_local:net_changes(),
election_timer = undefined,
rasync = RR#ai{callfrom = undefined, wait = undefined},
callfrom = undefined, callres = undefined,verified = true},
state_rw_call(What,From,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:reopen_db(NP)));
% This node is candidate or leader but someone with newer term is sending us log
_ when P#dp.mors == master ->
?WARN("AE start, stepping down as leader ~p ~p",
[AEType,{Term,P#dp.current_term}]),
reply(P#dp.callfrom,{redirect,LeaderNode}),
RR = P#dp.rasync,
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
actordb_local:actor_mors(slave,LeaderNode),
NP = P#dp{mors = slave, verified = true, election_timer = undefined,
voted_for = undefined,callfrom = undefined, callres = undefined,
rasync = RR#ai{callfrom = undefined, wait = undefined},
masternode = LeaderNode,without_master_since = undefined,
netchanges = actordb_local:net_changes(),
masternode_since = actordb_local:elapsed_time(),
masternodedist = bkdcore:dist_name(LeaderNode),
current_term = Term},
state_rw_call(What,From,
actordb_sqlprocutil:save_term(actordb_sqlprocutil:doqueue(actordb_sqlprocutil:reopen_db(NP))));
_ when P#dp.evnum /= PrevEvnum; P#dp.evterm /= PrevTerm ->
?WARN("AE start failed, evnum evterm do not match, type=~p, {MyEvnum,MyTerm}=~p, {InNum,InTerm}=~p",
[AEType,{P#dp.evnum,P#dp.evterm},{PrevEvnum,PrevTerm}]),
case ok of
% Node is conflicted, delete last entry
_ when PrevEvnum > 0, AEType == recover, P#dp.evnum > 0 ->
NP = actordb_sqlprocutil:rewind_wal(P);
% If false this node is behind. If empty this is just check call.
% Wait for leader to send an earlier event.
_ ->
NP = P
end,
reply(From,false),
actordb_sqlprocutil:ae_respond(NP,LeaderNode,false,PrevEvnum,AEType,CallCount),
{noreply,NP};
_ when Term > P#dp.current_term ->
?WARN("AE start, my term out of date type=~p {InTerm,MyTerm}=~p",
[AEType,{Term,P#dp.current_term}]),
NP = P#dp{current_term = Term,voted_for = undefined,
masternode = LeaderNode, without_master_since = undefined,verified = true,
netchanges = actordb_local:net_changes(),
masternode_since = actordb_local:elapsed_time(),
masternodedist = bkdcore:dist_name(LeaderNode)},
state_rw_call(What,From,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:save_term(NP)));
_ when AEType == empty ->
?DBG("AE start, ok for empty"),
reply(From,ok),
actordb_sqlprocutil:ae_respond(P,LeaderNode,true,PrevEvnum,AEType,CallCount),
{noreply,P#dp{verified = true, masternode_since = actordb_local:elapsed_time()}};
Ok , now it will start receiving wal pages
_ ->
case AEType == recover of
true ->
Age = actordb_local:elapsed_time(),
?INF("AE start ok for recovery from ~p, evnum=~p, evterm=~p",
[LeaderNode,P#dp.evnum,P#dp.evterm]);
false ->
Age = P#dp.recovery_age,
?DBG("AE start ok from ~p",[LeaderNode])
end,
{reply,ok,P#dp{verified = true, masternode_since = actordb_local:elapsed_time(), inrecovery = AEType == recover, recovery_age = Age}}
end;
% Executed on follower.
% Appends pages, a single write is split into multiple calls.
% Header tells you if this is last call. If we reached header, this means we must have received
% all preceding calls as well.
state_rw_call({appendentries_wal,Header,Body,AEType,CallCount},From,P) ->
append_wal(P,From,CallCount,Header,Body,AEType);
% Executed on leader.
state_rw_call({appendentries_response,Node,CurrentTerm,Success,
EvNum,EvTerm,MatchEvnum,AEType,{SentIndex,SentTerm}} = What,From,P) ->
Follower = lists:keyfind(Node,#flw.node,P#dp.followers),
case Follower of
false ->
?DBG("Adding node to follower list ~p",[Node]),
state_rw_call(What,From,actordb_sqlprocutil:store_follower(P,#flw{node = Node}));
( not ( AEType = = head andalso Success ) ) andalso
(SentIndex /= Follower#flw.match_index orelse
SentTerm /= Follower#flw.match_term orelse P#dp.verified == false) ->
We can get responses from AE calls which are out of date . This is why the other node always sends
back { SentIndex , SentTerm } which are the parameters for follower that we knew of when we sent data .
If these two parameters match our current state , then response is valid .
?DBG("ignoring AE resp, from=~p,success=~p,type=~p,prevevnum=~p,evnum=~p,matchev=~p, sent=~p",
[Node,Success,AEType,Follower#flw.match_index,EvNum,MatchEvnum,{SentIndex,SentTerm}]),
{reply,ok,P};
_ ->
?DBG("AE resp,from=~p,success=~p,type=~p,prevnum=~p,prevterm=~p evnum=~p,evterm=~p,matchev=~p",
[Node,Success,AEType,Follower#flw.match_index,Follower#flw.match_term,EvNum,EvTerm,MatchEvnum]),
Now = actordb_local:elapsed_time(),
NFlw = Follower#flw{match_index = EvNum, match_term = EvTerm,next_index = EvNum+1,
wait_for_response_since = undefined, last_seen = Now},
case Success of
% An earlier response.
_ when P#dp.mors == slave ->
?WARN("Received AE response after stepping down"),
{reply,ok,P};
true ->
reply(From,ok),
NP = actordb_sqlprocutil:reply_maybe(actordb_sqlprocutil:continue_maybe(
orelse AEType = = empty
?DBG("AE response for node ~p, followers=~p",
[Node,[{F#flw.node,F#flw.match_index,F#flw.match_term,F#flw.next_index} || F <- NP#dp.followers]]),
{noreply,NP};
% What we thought was follower is ahead of us and we need to step down
false when P#dp.current_term < CurrentTerm ->
?DBG("My term is out of date {His,Mine}=~p",[{CurrentTerm,P#dp.current_term}]),
{reply, ok, P#dp{masternode = undefined, without_master_since = Now,
masternodedist = undefined, verified = false,
current_term = CurrentTerm,election_timer = actordb_sqlprocutil:election_timer(Now,undefined)}};
% {reply,ok,actordb_sqlprocutil:reopen_db(actordb_sqlprocutil:save_term(
% P#dp{mors = slave,current_term = CurrentTerm,
election_timer = actordb_sqlprocutil : election_timer(Now , P#dp.election_timer ) ,
% masternode = undefined, without_master_since = Now,
% masternodedist = undefined,
% voted_for = undefined, followers = []}))};
false when NFlw#flw.match_index == P#dp.evnum ->
% Follower is up to date. He replied false. Maybe our term was too old.
{reply,ok,actordb_sqlprocutil:reply_maybe(actordb_sqlprocutil:store_follower(P,NFlw))};
false ->
% Check if we are copying entire db to that node already, do nothing.
case [C || C <- P#dp.dbcopy_to, C#cpto.node == Node, C#cpto.actorname == P#dp.actorname] of
[_|_] ->
?DBG("Ignoring appendendentries false response because copying to"),
{reply,ok,P};
[] ->
case actordb_sqlprocutil:try_wal_recover(P,NFlw) of
{false,NP,NF} ->
?DBG("Can not recover from log, sending entire db"),
We can not recover from wal . Send entire db .
Ref = make_ref(),
case bkdcore:rpc(NF#flw.node,{?MODULE,call_slave,
[P#dp.cbmod,P#dp.actorname,P#dp.actortype,
{dbcopy,{start_receive,actordb_conf:node_name(),Ref}}]}) of
ok ->
DC = {send_db,{NF#flw.node,Ref,false,P#dp.actorname}},
actordb_sqlprocutil:dbcopy_call(DC,From,NP);
_Err ->
?ERR("Unable to send db ~p",[_Err]),
{reply,false,P}
end;
{true,NP,NF} ->
we can recover from wal
?DBG("Recovering from wal, for node=~p, match_index=~p, match_term=~p, myevnum=~p",
[NF#flw.node,NF#flw.match_index,NF#flw.match_term,P#dp.evnum]),
reply(From,ok),
{noreply,actordb_sqlprocutil:continue_maybe(NP,NF,false)}
end
end
end
end;
state_rw_call({request_vote,Candidate,NewTerm,LastEvnum,LastTerm} = What,From,P) ->
?DBG("Request vote for=~p, mors=~p, {histerm,myterm}=~p, {HisLogTerm,MyLogTerm}=~p {HisEvnum,MyEvnum}=~p",
[Candidate,P#dp.mors,{NewTerm,P#dp.current_term},{LastTerm,P#dp.evterm},{LastEvnum,P#dp.evnum}]),
Uptodate =
case ok of
_ when P#dp.evterm < LastTerm ->
true;
_ when P#dp.evterm > LastTerm ->
false;
_ when P#dp.evnum < LastEvnum ->
true;
_ when P#dp.evnum > LastEvnum ->
false;
_ ->
true
end,
Follower = lists:keyfind(Candidate,#flw.node,P#dp.followers),
DistFollower = bkdcore:dist_name(Candidate),
Now = actordb_local:elapsed_time(),
case Follower of
false when P#dp.mors == master, DistFollower /= undefined ->
?DBG("Adding node to follower list ~p",[Candidate]),
state_rw_call(What,From,actordb_sqlprocutil:store_follower(P,#flw{node = Candidate}));
_ ->
case ok of
% Candidates term is lower than current_term, ignore.
_ when NewTerm < P#dp.current_term ->
DoElection = true, %(P#dp.mors == master andalso P#dp.verified == true),
reply(From,{outofdate,actordb_conf:node_name(),P#dp.current_term,{P#dp.evnum,P#dp.evterm}}),
NP = P;
% We've already seen this term, only vote yes if we have not voted
% or have voted for this candidate already.
_ when NewTerm == P#dp.current_term ->
case (P#dp.voted_for == undefined orelse P#dp.voted_for == Candidate) of
true when Uptodate ->
DoElection = false,
reply(From,{true,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = Candidate,
current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)});
true ->
DoElection = true,% (P#dp.mors == master andalso P#dp.verified == true),
reply(From,{outofdate,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = undefined, current_term = NewTerm});
false ->
DoElection =(P#dp.mors == master andalso P#dp.verified == true),
AV = {alreadyvoted,actordb_conf:node_name(),P#dp.current_term,{P#dp.evnum,P#dp.evterm}},
reply(From,AV),
NP = P
end;
_ when Uptodate, P#dp.masternode == Candidate ->
DoElection = false,
?DBG("Voting yes for same master as before"),
reply(From,{true,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = Candidate, current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)});
% New candidates term is higher than ours, is he as up to date?
_ when Uptodate ->
DoElection = false,
?DBG("Stepping down after voting on another master"),
reply(From,{true,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{mors = slave, verified = false,
masternode = undefined,masternodedist = undefined,
without_master_since = Now,
last_vote_event = Now,
voted_for = Candidate, current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,undefined)});
% Higher term, but not as up to date. We can not vote for him.
% We do have to remember new term index though.
_ ->
DoElection = true,% (P#dp.mors == master andalso P#dp.verified == true),
reply(From,{outofdate,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = undefined, current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)})
end,
?DBG("Doing election after request_vote? ~p, mors=~p, verified=~p, election=~p",
[DoElection,P#dp.mors,P#dp.verified,P#dp.election_timer]),
{noreply,actordb_sqlprocutil:doqueue(NP#dp{election_timer =
actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)})}
end;
state_rw_call({delete,deleted},From,P) ->
ok = actordb_sqlite:wal_rewind(P,0),
reply(From,ok),
{stop,normal,P};
state_rw_call({delete,{moved,Moved}},From,P) ->
actordb_sqlprocutil:moved_replace(P,Moved),
reply(From,ok),
{stop,normal,P};
state_rw_call(checkpoint,_From,P) ->
actordb_sqlprocutil:checkpoint(P),
{reply,ok,P}.
append_wal(P,From,CallCount,[Header|HT],[Body|BT],AEType) ->
case append_wal(P,From,CallCount,Header,Body,AEType) of
{noreply,NP} ->
{noreply,NP};
{reply,ok,NP} when HT /= [] ->
append_wal(NP,From,CallCount,HT,BT,AEType);
{reply,ok,NP} ->
{reply,ok,NP}
end;
append_wal(P,From,CallCount,Header,Body,AEType) ->
AWR = actordb_sqlprocutil:append_wal(P,Header,Body),
append_wal1(P,From,CallCount,Header,AEType,AWR).
append_wal1(P,From,CallCount,Header,AEType,{ok,NS}) ->
append_wal1(P#dp{cbstate = NS},From,CallCount,Header,AEType,ok);
append_wal1(P,From,CallCount,Header,AEType,AWR) ->
case AWR of
ok ->
case Header of
% dbsize == 0, not last page
<<_:20/binary,0:32>> ->
?DBG("AE append ~p",[AEType]),
{reply,ok,P#dp{locked = [ae]}};
% last page
<<Evterm:64/unsigned-big,Evnum:64/unsigned-big,Pgno:32,Commit:32>> ->
?DBG("AE WAL done evnum=~p,evterm=~p,aetype=~p,qempty=~p,master=~p,pgno=~p,commit=~p",
[Evnum,Evterm,AEType,queue:is_empty(P#dp.callqueue),P#dp.masternode,Pgno,Commit]),
% Prevent any timeouts on next ae since recovery process is progressing.
case P#dp.inrecovery of
true ->
RecoveryAge = actordb_local:elapsed_time();
false ->
RecoveryAge = P#dp.recovery_age
end,
NP = P#dp{evnum = Evnum, evterm = Evterm,locked = [], recovery_age = RecoveryAge},
reply(From,done),
actordb_sqlprocutil:ae_respond(NP,NP#dp.masternode,true,P#dp.evnum,AEType,CallCount),
{noreply,NP}
end;
_X ->
?ERR("Append failed ~p",[_X]),
reply(From,false),
actordb_sqlprocutil:ae_respond(P,P#dp.masternode,false,P#dp.evnum,AEType,CallCount),
{noreply,P}
end.
read_call(#read{sql = [exists]},_From,#dp{mors = master} = P) ->
{reply,{ok,[{columns,{<<"exists">>}},{rows,[{<<"true">>}]}]},P};
read_call(#read{sql = {[exists],_}},_From,#dp{mors = master} = P) ->
{reply,{ok,[{columns,{<<"exists">>}},{rows,[{<<"true">>}]}]},P};
read_call(Msg,From,#dp{flags = F} = P) when (F band ?TIMEOUT_PENDING) == 0 ->
read_call(Msg,From,timeout(P));
read_call(Msg,From,#dp{mors = master, rasync = AR} = P) ->
?DBG("read_call ~p",[Msg]),
Safe = AR#ai.safe_read or lists:member(safe,Msg#read.flags),
case Msg#read.sql of
{Mod,Func,Args} ->
case apply(Mod,Func,[P#dp.cbstate|Args]) of
{reply,What,Sql,NS} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [{tuple,What,From}|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{cbstate = NS, rasync = AR1}};
{reply,What,NS} ->
{reply,What,P#dp{cbstate = NS}};
{reply,What} ->
{reply,What,P};
{Sql,Recs} when is_list(Recs) ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [Recs|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}};
{Sql,State} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{cbstate = State, rasync = AR1}};
Sql ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}}
end;
{Sql,{Mod,Func,Args}} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [{mod,{Mod,Func,Args},From}|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}};
{Sql,Recs} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [Recs|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}};
Sql ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}}
end;
read_call(_Msg,_From,P) ->
?DBG("redirect read ~p",[P#dp.masternode]),
actordb_sqlprocutil:redirect_master(P).
Execute buffered read
read_call1(_,_,_,[],P) ->
P#dp{activity = actordb_local:actor_activity(P#dp.activity)};
read_call1(_,_,_,From,#dp{mors = slave} = P) ->
[reply(F,{redirect,P#dp.masternode}) || F <- From],
P#dp{rasync = #ai{}};
% if no followers safe read is meaningless.
read_call1(true,Sql,Recs,From,#dp{followers = []} = P) ->
read_call1(false, Sql, Recs, From, P);
read_call1(SafeRead,Sql,Recs,From,P) ->
ComplSql = list_to_tuple(Sql),
Records = list_to_tuple(Recs),
?DBG("READ SQL=~p, Recs=~p, from=~p",[ComplSql, Records,From]),
%
% Direct read mode (not async)
%
Res = actordb_sqlite:exec(P#dp.db,ComplSql,Records,read),
case Res of
{ok,ResTuples} when SafeRead == false ->
?DBG("Read resp=~p",[Res]),
actordb_sqlprocutil:read_reply(
P#dp{rasync = #ai{}, activity = actordb_local:actor_activity(P#dp.activity)}, From, 1, ResTuples);
{ok,ResTuples} ->
% We have result, but now we must verify if we are still leader.
A = P#dp.rasync,
W = P#dp.wasync,
NRB = A#ai{wait = ResTuples, callfrom = From, buffer = [], buffer_cf = [], buffer_recs = []},
% If we have some writes to do, execute them and once writes are replicated, send read response
% If not, use send_empty_ae.
% Either way we must end up in reply_maybe.
case W#ai.buffer of
[] ->
?DBG("Sending empty ae to verify read"),
% no writes pending
NewFollowers1 = [actordb_sqlprocutil:send_empty_ae(P,NF) || NF <- P#dp.followers],
ae_timer(P#dp{callres = ok,followers = NewFollowers1, rasync = NRB});
_ ->
?DBG("Read response will be sent after write"),
% exec_writes gets executed after this.
P#dp{rasync = NRB}
end;
{ sql_error , ErrMsg , _ } = Err - >
{sql_error,{ErrPos,_,_ErrAtom,ErrStr},_} ->
?ERR("Read call error: ~p",[Res]),
{Before,[Problem|After]} = lists:split(ErrPos,From),
reply(Problem, {sql_error,ErrStr}),
{BeforeSql,[_ProblemSql|AfterSql]} = lists:split(ErrPos,Sql),
{BeforeRecs,[_ProblemRecs|AfterRecs]} = lists:split(ErrPos,Recs),
read_call1(SafeRead,BeforeSql++AfterSql, BeforeRecs++AfterRecs, Before++After,P#dp{rasync = #ai{}})
end.
%
% Async mode, less safe because it can return pages that have not been replicated.
% We can make the storage engine level correct (lmdb), but what we can't fix at the moment
% is sqlite page cache. Any write will store pages in cache. Which means reads will use those
% unsafe cache pages instead of what is stored in lmdb.
% This unfortunately means we can't process reads while writes are running. Reads are executed
% before writes.
% We could use seperate read/write connections. This also means there is a read and write
% sqlite page cache. After every write, read connection page cache must be cleared. How
% detrimental to performance that would be is something that needs to be tested.
%
Recompile driver with threadsafe=1 if using async reads .
%
Res = actordb_sqlite : exec_async(P#dp.db , ComplSql , Records , read ) ,
% A = P#dp.rasync,
NRB = A#ai{wait = Res , info = Sql , callfrom = From , buffer = [ ] , buffer_cf = [ ] , buffer_recs = [ ] } ,
P#dp{rasync = NRB } .
write_call(Msg,From,#dp{flags = F} = P) when F band ?TIMEOUT_PENDING == 0 ->
write_call(Msg,From,timeout(P));
write_call(#write{mfa = MFA, sql = Sql} = Msg,From,P) ->
A = P#dp.wasync,
ForceSync = A#ai.buffer_fsync or lists:member(fsync,Msg#write.flags),
?DBG("writecall evnum_prewrite=~p,term=~p writeinfo=~p, from=~p",[P#dp.evnum,P#dp.current_term,{MFA,Sql},From]),
case Sql of
delete ->
A1 = A#ai{buffer = [<<"INSERT OR REPLACE INTO __adb (id,val) VALUES (?1,?2);">>|A#ai.buffer],
buffer_cf = [From|A#ai.buffer_cf],
buffer_recs = [[[[?MOVEDTOI,<<"$deleted$">>]]]|A#ai.buffer_recs],
buffer_moved = deleted, buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
{moved,MovedTo} ->
A1 = A#ai{buffer = [<<"#s02;">>|A#ai.buffer], buffer_cf = [From|A#ai.buffer_cf],
buffer_recs = [[[[?MOVEDTOI,MovedTo]]]|A#ai.buffer_recs],
buffer_moved = {moved,MovedTo}, buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
If new schema version write , add sql to first place of list of writes .
_ when Msg#write.newvers /= undefined, MFA == undefined ->
A1 = A#ai{buffer = A#ai.buffer++[Sql], buffer_recs = A#ai.buffer_recs++[Msg#write.records],
buffer_cf = A#ai.buffer_cf++[From], buffer_nv = Msg#write.newvers,
buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
_ when MFA == undefined ->
A1 = A#ai{buffer = [Sql|A#ai.buffer], buffer_cf = [From|A#ai.buffer_cf],
buffer_recs = [Msg#write.records|A#ai.buffer_recs], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
_ ->
{Mod,Func,Args} = MFA,
case apply(Mod,Func,[P#dp.cbstate|Args]) of
{reply,What,OutSql,NS} ->
reply(From,What),
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [undefined|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = NS}};
{reply,What,NS} ->
{reply,What,P#dp{cbstate = NS}};
{reply,What} ->
{reply,What,P};
{exec,OutSql,Recs} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [Recs|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
% For when a node wants to take its marbles and go play by itself.
{isolate,OutSql,State} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = State, verified = true, mors = master, followers = []}};
{OutSql,Recs} when is_list(Recs) ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [Recs|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
{OutSql,State} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = State}};
{OutSql,Recs,State} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [Recs|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = State}};
OutSql ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}}
end
end.
print_sqls(Pos , , ) when tuple_size(Sql ) > = Pos - >
? , Recs=~p",[element(Pos , Sql),element(Pos , Recs ) ] ) ,
print_sqls(Pos+1,Sql , Recs ) ;
% print_sqls(_,_,_) ->
% ok.
write_call2(#dp{db = queue, wasync = #ai{wait = Ref}} = P) ->
element(2,handle_info({Ref,ok},P));
write_call2(P) ->
P.
write_call1(_W,From,_CF,#dp{mors = slave} = P) ->
RR = P#dp.rasync,
?ADBG("Redirecting write ~p from=~p, w=~p",[P#dp.masternode,From,_W]),
[reply(F,{redirect,P#dp.masternode}) || F <- From],
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
P#dp{wasync = #ai{}, rasync = RR#ai{callfrom = undefined, wait = undefined}};
% Not a multiactor transaction write
write_call1(#write{sql = Sql,transaction = undefined} = W,From,NewVers,P) ->
EvNum = P#dp.evnum+1,
VarHeader = actordb_sqlprocutil:create_var_header(P),
case P#dp.db of
queue ->
Res = make_ref(),
% We must reverse because when writes were being batched, we remembered offset of every data item
CF = [batch|lists:reverse(From)],
{ok,NS} = actordb_queue:cb_write_exec(P#dp.cbstate, lists:reverse(Sql), P#dp.current_term, EvNum, VarHeader);
_ ->
NS = P#dp.cbstate,
CF = [batch,undefined|lists:reverse([undefined|From])],
ComplSql = list_to_tuple([<<"#s00;">>|lists:reverse([<<"#s02;#s01;">>|Sql])]),
ADBW = [[[?EVNUMI,butil:tobin(EvNum)],[?EVTERMI,butil:tobin(P#dp.current_term)]]],
Records = list_to_tuple([[]|lists:reverse([ADBW|W#write.records])]),
?DBG("schema = ~p, SQL=~p, Recs=~p, cf=~p",[P#dp.schemavers,ComplSql, Records, CF]),
print_sqls(1,ComplSql , Records ) ,
Res = actordb_sqlite:exec_async(P#dp.db,ComplSql,Records,P#dp.current_term,EvNum,VarHeader)
end,
A = P#dp.wasync,
NWB = A#ai{wait = Res, info = W, newvers = NewVers,
callfrom = CF, evnum = EvNum, evterm = P#dp.current_term,
moved = A#ai.buffer_moved, fsync = A#ai.buffer_fsync,
buffer_moved = undefined, buffer_nv = undefined, buffer_fsync = false,
buffer = [], buffer_cf = [], buffer_recs = []},
write_call2(P#dp{wasync = NWB, last_write_at = actordb_local:elapsed_time(),
activity = actordb_local:actor_activity(P#dp.activity), cbstate = NS});
write_call1(#write{sql = Sql1, transaction = {Tid,Updaterid,Node} = TransactionId} = W,From,NewVers,P) ->
{_CheckPid,CheckRef} = actordb_sqlprocutil:start_transaction_checker(Tid,Updaterid,Node),
?DBG("Starting transaction write id ~p, curtr ~p, sql ~p",[TransactionId,P#dp.transactionid,Sql1]),
ForceSync = lists:member(fsync,W#write.flags),
case P#dp.followers of
[] ->
If single node cluster , no need to store sql first .
case P#dp.transactionid of
TransactionId ->
Transaction can write to single actor more than once ( especially for KV stores )
% if we are already in this transaction, just update sql.
{_OldSql,EvNum,_} = P#dp.transactioninfo,
case Sql1 of
delete ->
ComplSql = <<"delete">>,
Res = {ok,{changes,0,1}};
_ ->
ComplSql = Sql1,
Res = actordb_sqlite:exec(P#dp.db,ComplSql,write)
end;
undefined ->
EvNum = P#dp.evnum+1,
case Sql1 of
delete ->
Res = {ok,{changes,0,1}},
ComplSql = <<"delete">>;
_ ->
ComplSql =
[<<"#s00;">>,
actordb_sqlprocutil:semicolon(Sql1),
<<"#s02;">>
],
AWR = [[?EVNUMI,butil:tobin(EvNum)],[?EVTERMI,butil:tobin(P#dp.current_term)]],
Records = W#write.records++[AWR],
VarHeader = actordb_sqlprocutil:create_var_header(P),
Res = actordb_sqlite:exec(P#dp.db,ComplSql,Records,P#dp.current_term,EvNum,VarHeader)
end
end,
case actordb_sqlite:okornot(Res) of
ok ->
?DBG("Transaction ok"),
{noreply, actordb_sqlprocutil:reply_maybe(P#dp{transactionid = TransactionId,
evterm = P#dp.current_term,
last_write_at = actordb_local:elapsed_time(),
transactioncheckref = CheckRef,force_sync = ForceSync,
transactioninfo = {ComplSql,EvNum,NewVers},
activity = actordb_local:actor_activity(P#dp.activity),
callfrom = From, callres = Res})};
_Err ->
actordb_sqlite:rollback(P#dp.db),
erlang:demonitor(CheckRef),
?DBG("Transaction not ok ~p",[_Err]),
{reply,Res,P#dp{transactionid = undefined,
last_write_at = actordb_local:elapsed_time(),
activity = actordb_local:actor_activity(P#dp.activity),
evterm = P#dp.current_term}}
end;
_ ->
EvNum = P#dp.evnum+1,
case P#dp.transactionid of
TransactionId when Sql1 /= delete ->
Rollback prev version of sql .
actordb_sqlite:rollback(P#dp.db),
{OldSql,_EvNum,_} = P#dp.transactioninfo,
Combine prev sql with new one .
Sql = iolist_to_binary([OldSql,Sql1]);
TransactionId ->
Sql = <<"delete">>;
_ ->
case Sql1 of
delete ->
Sql = <<"delete">>;
_ ->
Sql = iolist_to_binary(Sql1)
end
end,
ComplSql = <<"#s00;#s02;#s03;#s01;">>,
TransRecs = [[[butil:tobin(Tid),butil:tobin(Updaterid),Node,butil:tobin(NewVers),base64:encode(Sql)]]],
Records = [[[?EVNUMI,butil:tobin(EvNum)],[?EVTERMI,butil:tobin(P#dp.current_term)]]|TransRecs],
VarHeader = actordb_sqlprocutil:create_var_header(P),
ok = actordb_sqlite:okornot(actordb_sqlite:exec(
P#dp.db,ComplSql,Records,P#dp.current_term,EvNum,VarHeader)),
{noreply,ae_timer(P#dp{callfrom = From,callres = undefined, evterm = P#dp.current_term,evnum = EvNum,
last_write_at = actordb_local:elapsed_time(),
transactioninfo = {Sql,EvNum+1,NewVers},
transactioncheckref = CheckRef,force_sync = ForceSync,
transactionid = TransactionId})}
end.
ae_timer(P) ->
Now = actordb_local:elapsed_time(),
P#dp{election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer),
callat = {Now,0},activity = actordb_local:actor_activity(P#dp.activity),
followers = [F#flw{wait_for_response_since = Now} || F <- P#dp.followers]}.
handle_cast({diepls,_Reason},P) ->
?DBG("Received diepls ~p",[_Reason]),
W = P#dp.wasync,
R = P#dp.rasync,
Inactive = queue:is_empty(P#dp.callqueue) andalso W#ai.buffer == [] andalso R#ai.buffer == [] andalso
P#dp.dbcopy_to == [] andalso P#dp.locked == [] andalso P#dp.copyfrom == undefined andalso
W#ai.wait == undefined andalso R#ai.wait == undefined andalso P#dp.transactioninfo == undefined,
CanDie = apply(P#dp.cbmod,cb_candie,[P#dp.mors,P#dp.actorname,P#dp.actortype,P#dp.cbstate]),
?DBG("verified ~p, empty ~p, candie ~p, state=~p",[P#dp.verified,Inactive,CanDie,?R2P(P)]),
case ok of
_ when P#dp.verified, Inactive, CanDie /= never ->
{stop,normal,P};
_ ->
{noreply,P}
end;
handle_cast(print_info,P) ->
?AINF("locks=~p wwait=~p",[P#dp.locked,(P#dp.wasync)#ai.wait]),
?AINF("~p~n",[?R2P(P)]),
{noreply,P};
handle_cast(Msg,#dp{mors = master, verified = true} = P) ->
case apply(P#dp.cbmod,cb_cast,[Msg,P#dp.cbstate]) of
{noreply,S} ->
{noreply,P#dp{cbstate = S}};
noreply ->
{noreply,P}
end;
handle_cast(_Msg,P) ->
?INF("sqlproc ~p unhandled cast ~p~n",[P#dp.cbmod,_Msg]),
{noreply,P}.
handle_info({election_timeout,Ref},P) ->
election_timeout(Ref,P);
handle_info({timeout,Resend},P) ->
case erlang:process_info(self(),message_queue_len) of
{message_queue_len,N} when N > 1, Resend < 10 ->
{noreply,timeout(P#dp{flags = P#dp.flags band (bnot ?TIMEOUT_PENDING)}, Resend)};
_ ->
{noreply,actordb_sqlprocutil:doqueue(P#dp{flags = P#dp.flags band (bnot ?TIMEOUT_PENDING)})}
end;
% Async read result. Unlike writes we can reply directly. We don't use async reads atm.
handle_info({Ref,Res}, #dp{rasync = #ai{wait = Ref} = BD} = P) when is_reference(Ref) ->
NewBD = BD#ai{callfrom = undefined, info = undefined, wait = undefined},
case Res of
{ok,ResTuples} ->
?DBG("Read resp=~p",[Res]),
{noreply,actordb_sqlprocutil:read_reply(P#dp{rasync = NewBD}, BD#ai.callfrom, 1, ResTuples)}
% Err ->
TODO : if async reads ever get used ...
% ?ERR("Read call error: ~p",[Err]),
{ noreply , P#dp{rasync = NewBD } }
end;
% async write result
handle_info({Ref,Res1}, #dp{wasync = #ai{wait = Ref} = BD} = P) when is_reference(Ref) ->
?DBG("Write result ~p",[Res1]),
% ?DBG("Buffer=~p",[BD#ai.buffer]),
% ?DBG("CQ=~p",[P#dp.callqueue]),
Res = actordb_sqlite:exec_res(Res1),
From = BD#ai.callfrom,
EvNum = BD#ai.evnum,
EvTerm = BD#ai.evterm,
? , Callfrom=~p",[Res , From ] ) ,
case BD#ai.newvers of
undefined ->
NewVers = P#dp.schemavers;
NewVers ->
ok
end,
Moved = BD#ai.moved,
W = BD#ai.info,
ForceSync = BD#ai.fsync,
NewAsync = BD#ai{callfrom = undefined, evnum = undefined, evterm = undefined,
newvers = undefined, info = undefined, wait = undefined, fsync = false},
case actordb_sqlite:okornot(Res) of
ok when P#dp.followers == [] ->
{noreply,actordb_sqlprocutil:statequeue(actordb_sqlprocutil:reply_maybe(
P#dp{callfrom = From, callres = Res,evnum = EvNum,
netchanges = actordb_local:net_changes(), force_sync = ForceSync,
schemavers = NewVers,evterm = EvTerm,movedtonode = Moved,
wasync = NewAsync}))};
ok ->
% reply on appendentries response or later if nodes are behind.
case P#dp.callres of
undefined ->
Callres = Res;
Callres ->
ok
end,
{noreply, actordb_sqlprocutil:statequeue(ae_timer(P#dp{callfrom = From, callres = Callres,
netchanges = actordb_local:net_changes(),force_sync = ForceSync,
evterm = EvTerm, evnum = EvNum,schemavers = NewVers,movedtonode = Moved,
wasync = NewAsync}))};
{sql_error,{ErrPos,_,_ErrAtom,ErrStr},_} ->
actordb_sqlite : ) ,
[batch,undefined|CF1] = From,
Remove cf for last part ( # s02 , # s01 )
CF = lists:reverse(tl(lists:reverse(CF1))),
?DBG("Error pos ~p, cf=~p",[ErrPos-1,CF]),
{Before,[Problem|After]} = lists:split(ErrPos-1,CF),
reply(Problem, {sql_error,ErrStr}),
{BeforeSql,[_ProblemSql|AfterSql]} = lists:split(ErrPos-1,lists:reverse(W#write.sql)),
{BeforeRecs,[_ProblemRecs|AfterRecs]} = lists:split(ErrPos-1,lists:reverse(W#write.records)),
case BD#ai.newvers of
undefined ->
RemainCF = lists:reverse(Before++After),
RemainSql = lists:reverse(BeforeSql++AfterSql),
RemainRecs = lists:reverse(BeforeRecs++AfterRecs);
_ ->
RemainCF = lists:reverse(tl(Before++After)),
RemainSql = lists:reverse(tl(BeforeSql++AfterSql)),
RemainRecs = lists:reverse(tl(BeforeRecs++AfterRecs))
end,
% ?DBG("Remain cf=~p",[RemainCF]),
? DBG("Remain ] ) ,
% ?DBG("Remain recs=~p",[RemainRecs]),
NewAsync1 = NewAsync#ai{buffer = RemainSql++NewAsync#ai.buffer,
buffer_cf = RemainCF++NewAsync#ai.buffer_cf,
buffer_recs = RemainRecs++NewAsync#ai.buffer_recs},
?DBG("New write ~p",[NewAsync1]),
handle_info(doqueue,actordb_sqlprocutil:statequeue(P#dp{wasync = NewAsync1}))
end;
% Async election vote result.
handle_info({{Ref,MonRef,_Nd}, Msg}, P) when element(3,P#dp.election_timer) == Ref ->
erlang:demonitor(MonRef,[flush]),
?DBG("received vote result from ~p, res=~p",[_Nd,element(1,Msg)]),
election_vote(Msg,P);
handle_info(doqueue, P) ->
{noreply,actordb_sqlprocutil:doqueue(P)};
handle_info(statequeue,P) ->
{noreply,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:statequeue(P))};
handle_info({hibernate,A},P) ->
?DBG("hibernating"),
{noreply,P#dp{activity = A},hibernate};
handle_info(copy_timer,P) ->
case P#dp.dbcopy_to of
[_|_] ->
erlang:send_after(1000,self(),copy_timer);
_ ->
ok
end,
{noreply,P#dp{activity = actordb_local:actor_activity(P#dp.activity)}};
handle_info({'DOWN',Monitor,_,PID,Reason},P) ->
down_info(PID,Monitor,Reason,P);
handle_info(doelection,P) ->
self() ! doelection1,
{noreply,P};
First check if latencies changed .
handle_info({doelection,_LatencyBefore,_TimerFrom} = Msg,P) ->
election_timer(Msg,P);
handle_info(doelection1,P) ->
election_timer(doelection1,P);
handle_info(doelection2,P) ->
election_timer(doelection2,P);
handle_info({forget,Nd},P) ->
?INF("Forgetting node ~p",[Nd]),
{noreply,P#dp{followers = lists:keydelete(Nd,#flw.node,P#dp.followers)}};
handle_info(retry_copy,P) ->
?DBG("Retry copy mors=~p, ver=~p, cl=~p",[P#dp.mors,P#dp.verified,P#dp.copylater]),
case P#dp.mors == master andalso P#dp.verified == true of
true ->
{noreply,actordb_sqlprocutil:retry_copy(P)};
_ ->
{noreply, P}
end;
handle_info({batch,L},P) ->
?DBG("Batch=~p",[L]),
{noreply, lists:foldl(fun({{Pid,Ref},W},NP) -> {noreply, NP1} = handle_call(W, {Pid,Ref}, NP), NP1 end, P, L)};
handle_info(check_locks,P) ->
case P#dp.locked of
[] ->
{noreply,P};
_ ->
erlang:send_after(1000,self(),check_locks),
{noreply, actordb_sqlprocutil:check_locks(P,P#dp.locked,[])}
end;
handle_info(stop,P) ->
?DBG("Received stop msg"),
handle_info({stop,normal},P);
handle_info({stop,Reason},P) ->
?DBG("Actor stop with reason ~p",[Reason]),
{stop, normal, P};
handle_info(print_info,P) ->
handle_cast(print_info,P);
handle_info(commit_transaction,P) ->
down_info(0,12345,done,P#dp{transactioncheckref = 12345});
handle_info(start_copy,P) ->
?DBG("Start copy ~p",[P#dp.copyfrom]),
case P#dp.copyfrom of
{move,NewShard,Node} ->
OldActor = P#dp.actorname,
Msg = {move,NewShard,actordb_conf:node_name(),P#dp.copyreset,P#dp.cbstate};
{split,MFA,Node,OldActor,NewActor} ->
% Change node to this node, so that other actor knows where to send db.
Msg = {split,MFA,actordb_conf:node_name(),OldActor,NewActor,P#dp.copyreset,P#dp.cbstate};
{Node,OldActor} ->
Msg = {copy,{actordb_conf:node_name(),OldActor,P#dp.actorname}}
end,
Home = self(),
spawn(fun() ->
Rpc = {?MODULE,call,[{OldActor,P#dp.actortype},[],Msg,P#dp.cbmod,onlylocal]},
case actordb:rpc(Node,OldActor,Rpc) of
ok ->
?DBG("Ok response for startcopy msg"),
ok;
{ok,_} ->
?DBG("Ok response for startcopy msg"),
ok;
{redirect,_} ->
?DBG("Received redirect, presume job is done"),
Home ! start_copy_done;
Err ->
?ERR("Unable to start copy from ~p, ~p",[P#dp.copyfrom,Err]),
Home ! {stop,Err}
end
end),
{noreply,P};
handle_info(start_copy_done,P) ->
{ok,NP} = init(P,copy_done),
{noreply,NP};
handle_info(Msg,#dp{verified = true} = P) ->
case apply(P#dp.cbmod,cb_info,[Msg,P#dp.cbstate]) of
{noreply,S} ->
{noreply,P#dp{cbstate = S}};
noreply ->
{noreply,P}
end;
handle_info(_Msg,P) ->
?DBG("sqlproc ~p unhandled info ~p~n",[P#dp.cbmod,_Msg]),
{noreply,P}.
election_timer({doelection,LatencyBefore,_TimerFrom},P) ->
LatencyNow = actordb_latency:latency(),
Now = actordb_local:elapsed_time(),
Interval = actordb_sqlprocutil:election_timer_interval(),
% Delay if latency significantly increased since start of timer.
% But only if more than 100ms latency. Which should mean significant load or bad network which
% from here means same thing.
case (LatencyNow > (LatencyBefore*1.5) andalso LatencyNow > 100) orelse (Now - P#dp.last_vote_event < Interval) of
true ->
{noreply,P#dp{election_timer = actordb_sqlprocutil:election_timer(undefined),
last_vote_event = 0,
activity = actordb_local:actor_activity(P#dp.activity)}};
false ->
Clear out msg queue first .
self() ! doelection1,
{noreply,P#dp{activity = actordb_local:actor_activity(P#dp.activity)}}
end;
% Are any write results pending?
election_timer(doelection1,P) ->
case P#dp.callfrom of
undefined ->
election_timer(doelection2,P);
_ ->
LatencyNow = actordb_latency:latency(),
Now = actordb_local:elapsed_time(),
{CallTime,Noops} = P#dp.callat,
More than a second after write is finished ( and sent to followers )
case Now - CallTime > 1000+LatencyNow of
true when Noops == 0 ->
?ERR("Write is taking long to reach consensus ~p",[P#dp.callfrom]),
NewFollowers1 = [actordb_sqlprocutil:send_empty_ae(P,NF) || NF <- P#dp.followers],
{noreply,P#dp{callat = {CallTime,1}, election_timer = actordb_sqlprocutil:election_timer(undefined),
followers = NewFollowers1}};
true when Noops == 1 ->
?ERR("Still have not reached consensus"),
{noreply,P#dp{callat = {CallTime,2}, election_timer = actordb_sqlprocutil:election_timer(undefined)}};
true when Noops == 2 ->
?ERR("Write abandon with consensus_timeout ~p",[P#dp.callfrom]),
reply(P#dp.callfrom,{error,consensus_timeout}),
RR = P#dp.rasync,
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
% Step down as leader.
election_timer(doelection2,P#dp{callfrom = undefined, callres = undefined,
masternode = undefined,masternodedist = undefined,
rasync = RR#ai{callfrom = undefined, wait = undefined},
verified = false, mors = slave, without_master_since = CallTime});
false ->
{noreply,P#dp{election_timer = actordb_sqlprocutil:election_timer(undefined),
activity = actordb_local:actor_activity(P#dp.activity)}}
end
end;
% Check if there is anything we need to do, like run another election, issue an empty write or wait some more.
election_timer(doelection2,P) ->
A = P#dp.wasync,
Empty = queue:is_empty(P#dp.callqueue) andalso A#ai.buffer_cf == [],
?DBG("Election timeout, master=~p, verified=~p, followers=~p",
[P#dp.masternode,P#dp.verified,P#dp.followers]),
Now = actordb_local:elapsed_time(),
Me = actordb_conf:node_name(),
LatencyNow = actordb_latency:latency(),
case ok of
_ when P#dp.verified, P#dp.mors == master, P#dp.dbcopy_to /= [] ->
% Copying db, wait some more
{noreply,P#dp{election_timer = actordb_sqlprocutil:election_timer(Now,undefined)}};
_ when P#dp.verified, P#dp.mors == master ->
actordb_sqlprocutil:follower_check_handle(P);
_ when element(1,P#dp.election_timer) == election ->
% We are candidate, wait for election to complete.
{noreply,P};
% Unless leader is known and available, start an election.
_ when P#dp.masternode /= undefined, P#dp.masternode /= Me ->
We are follower and masternode is set . This means leader sent us at least one AE .
% Is connection active?
case bkdcore_rpc:is_connected(P#dp.masternode) andalso Now - P#dp.masternode_since < 2000 of
true ->
?DBG("Election timeout, do nothing, leader=~p",[P#dp.masternode]),
{noreply,P#dp{without_master_since = undefined}};
false ->
% We had leader, but he is gone
?DBG("Leader is gone, leader=~p, election=~p, empty=~p, me=~p",
[P#dp.masternode,P#dp.election_timer,Empty,actordb_conf:node_name()]),
NP = P#dp{election_timer = undefined,without_master_since = Now,
masternode = undefined, masternodedist = undefined},
{noreply,actordb_sqlprocutil:start_verify(NP,false)}
end;
_ when P#dp.without_master_since == undefined ->
?DBG("Leader timeout, leader=~p, election=~p, empty=~p, me=~p",
[P#dp.masternode,P#dp.election_timer,Empty,actordb_conf:node_name()]),
% Start counter how long we are looking for leader for.
NP = P#dp{election_timer = undefined,without_master_since = Now},
{noreply,actordb_sqlprocutil:start_verify(NP,false)};
_ when P#dp.election_timer == undefined ->
% If election undefined this should be a hint from outside.
{noreply,actordb_sqlprocutil:start_verify(P,false)};
_ when Now - P#dp.without_master_since >= 3000+LatencyNow, Empty == false ->
?ERR("Unable to establish leader, responding with error"),
% It took too long. Respond with error.
actordb_sqlprocutil:empty_queue(P#dp.wasync,P#dp.rasync, P#dp.callqueue,{error,consensus_impossible_atm}),
A1 = A#ai{buffer = [], buffer_recs = [], buffer_cf = [],
buffer_nv = undefined, buffer_moved = undefined},
R1 = (P#dp.rasync)#ai{callfrom = undefined, wait = undefined},
% Give up for now. Do not run elections untill we get a hint from outside.
% Hint will come from catchup or a client wanting to execute read/write.
actordb_catchup:report(P#dp.actorname,P#dp.actortype),
{noreply,P#dp{callqueue = queue:new(),election_timer = undefined,
wasync = A1,rasync = R1}};
_ when Now - P#dp.without_master_since >= 3000+LatencyNow ->
actordb_catchup:report(P#dp.actorname,P#dp.actortype),
% Give up and wait for hint.
{noreply,P#dp{election_timer = undefined}};
_ ->
?DBG("Election timeout"),
{noreply,actordb_sqlprocutil:start_verify(P#dp{election_timer = undefined},false)}
end.
election_vote({What,Node,_HisLatestTerm,{Num,Term}}, P) ->
ClusterSize = length(P#dp.followers) + 1,
F = lists:keyfind(Node,#flw.node,P#dp.followers),
NF = F#flw{match_index = Num, next_index = Num+1, match_term = Term, election_result = What},
NFL = lists:keystore(Node,#flw.node, P#dp.followers,NF),
case count_votes(P,NFL, true,1,0) of
% If elected with majority continue processing.
{AllSynced, NVotes, Missing} when NVotes*2 > ClusterSize ->
?DBG("Election successfull, nvotes=~p missing=~p",[NVotes, Missing]),
NP = P#dp{followers = NFL, election_timer = undefined,
last_vote_event = actordb_local:elapsed_time()},
elected_leader(cleanup_results(NP), AllSynced);
% If not wait for election timeout to decide what to do.
_ ->
{noreply, P#dp{followers = NFL, last_vote_event = actordb_local:elapsed_time()}}
end;
election_vote(Err,P) ->
?ERR("election_vote response error=~p",[Err]),
{noreply, P}.
count_votes(P,[F|T], AllSynced, NVotes,Missing) ->
case F#flw.election_result of
true when AllSynced, P#dp.evnum == F#flw.match_index, P#dp.evterm == F#flw.match_term ->
count_votes(P, T, true, NVotes+1,Missing);
true ->
count_votes(P, T, false, NVotes+1,Missing);
undefined ->
count_votes(P, T, AllSynced, NVotes,Missing+1);
_ ->
count_votes(P, T, false, NVotes,Missing)
end;
count_votes(_,[],AllSynced, NVotes, Missing) ->
{AllSynced, NVotes, Missing}.
cleanup_results(P) ->
P#dp{followers = [F#flw{election_result = undefined, election_rpc_ref = undefined} || F <- P#dp.followers]}.
election_timeout(Ref,#dp{election_timer = {election,_TimerRef,Ref}} = P) ->
ClusterSize = length(P#dp.followers) + 1,
case count_votes(P, P#dp.followers, true, 1, 0) of
{AllSynced, NVotes, Missing} when NVotes*2 > ClusterSize, P#dp.flags band ?FLAG_WAIT_ELECTION == 0 ->
?DBG("election_timeout with majority, missing=~p",[Missing]),
elected_leader(cleanup_results(P),AllSynced andalso Missing == 0);
{_AllSynced, NVotes, _Missing} when NVotes*2 > ClusterSize ->
erlang:send_after(100,self(),{election_timeout, Ref}),
?DBG("wait_election"),
{noreply, P#dp{election_timer = {election, undefined, Ref}}};
_ ->
?DBG("election_timeout continue as follower, without_master_since=~p",
[P#dp.without_master_since]),
% If election timer finishes, election failed.
case ok of
_ when P#dp.without_master_since == undefined ->
Now = actordb_local:elapsed_time(),
{noreply,actordb_sqlprocutil:reopen_db(cleanup_results(P#dp{
election_timer = actordb_sqlprocutil:election_timer(Now,undefined),
masternode = undefined, masternodedist = undefined, mors = slave,
without_master_since = Now}))};
_ ->
Now = actordb_local:elapsed_time(),
{noreply,cleanup_results(P#dp{election_timer = actordb_sqlprocutil:election_timer(Now,undefined),
masternode = undefined, masternodedist = undefined, mors = slave})}
end
end;
election_timeout(_,P) ->
?DBG("Election timeout when no longer relevant, election_timer=~p",[P#dp.election_timer]),
{noreply, P}.
elected_leader(P, _AllSynced) when (P#dp.flags band ?FLAG_CREATE) == 0 andalso
(P#dp.schemavers == undefined orelse P#dp.movedtonode == deleted) ->
Nodes = actordb_sqlprocutil:follower_nodes(P#dp.followers),
spawn(fun() -> bkdcore_rpc:multicall(Nodes,{actordb_sqlproc,call_slave,
[P#dp.cbmod,P#dp.actorname,P#dp.actortype,stop]}) end),
Me = self(),
spawn(fun() -> timer:sleep(10), stop(Me) end),
RR = (P#dp.rasync)#ai{callfrom = undefined, wait = undefined},
actordb_sqlprocutil:empty_queue(P#dp.wasync,P#dp.rasync, P#dp.callqueue,{error,nocreate}),
A1 = (P#dp.wasync)#ai{buffer = [], buffer_recs = [], buffer_cf = [],
buffer_nv = undefined, buffer_moved = undefined},
{noreply,P#dp{movedtonode = deleted, verified = true, callqueue = queue:new(),
netchanges = actordb_local:net_changes(),
wasync = A1, rasync = RR}};
elected_leader(P1, AllSynced) ->
actordb_local:actor_mors(master,actordb_conf:node_name()),
ReplType = apply(P1#dp.cbmod,cb_replicate_type,[P1#dp.cbstate]),
P = actordb_sqlprocutil:reopen_db(P1#dp{mors = master, election_timer = undefined,
masternode = actordb_conf:node_name(),
masternode_since = actordb_local:elapsed_time(),
without_master_since = undefined,
masternodedist = bkdcore:dist_name(actordb_conf:node_name()),
flags = P1#dp.flags band (bnot ?FLAG_WAIT_ELECTION),
cbstate = actordb_sqlite:replicate_opts(P1,term_to_binary({P1#dp.cbmod,P1#dp.actorname,P1#dp.actortype}),ReplType),
locked = lists:delete(ae,P1#dp.locked)}),
case P#dp.movedtonode of
deleted ->
actordb_sqlprocutil:actually_delete(P1),
Moved = undefined,
SchemaVers = undefined;
_ ->
Moved = P#dp.movedtonode,
SchemaVers = P#dp.schemavers
end,
?DBG("Elected leader term=~p, nodes_synced=~p, moved=~p",[P1#dp.current_term,AllSynced,P#dp.movedtonode]),
case P#dp.schemavers of
undefined ->
Transaction = [],
Rows = [];
_ ->
case actordb_sqlite:exec(P#dp.db,
<<"SELECT * FROM __adb;",
"SELECT * FROM __transactions;">>,read) of
{ok,[[{columns,_},{rows,Transaction}],[{columns,_},{rows,Rows}]]} ->
ok;
Err ->
?ERR("Unable read from db for, error=~p after election.",[Err]),
Transaction = Rows = [],
exit(error)
end
end,
case butil:ds_val(?COPYFROMI,Rows) of
CopyFrom1 when byte_size(CopyFrom1) > 0 ->
CbInit = true,%P#dp.cbinit,
{CopyFrom,CopyReset,CbState} = binary_to_term(base64:decode(CopyFrom1));
_ ->
CbInit = false,%P#dp.cbinit,
CopyFrom = CopyReset = undefined,
CbState = P#dp.cbstate
end,
% After election is won a write needs to be executed. What we will write depends on the situation:
% - If this actor has been moving, do a write to clean up after it (or restart it)
% - If transaction active continue with write.
% - If empty db or schema not up to date create/update it.
- It can also happen that both transaction active and actor move is active . Sqls will be combined .
% - Otherwise just empty sql, which still means an increment for evnum and evterm in __adb.
NP1 = P#dp{verified = true,copyreset = CopyReset,movedtonode = Moved,
cbstate = CbState, schemavers = SchemaVers, cbinit = CbInit,
netchanges = actordb_local:net_changes()},
{NP,Sql,AdbRecords,Callfrom} =
actordb_sqlprocutil:post_election_sql(NP1,Transaction,CopyFrom,[],P#dp.callfrom),
% If nothing to store and all nodes synced, send an empty AE.
case is_number(P#dp.schemavers) andalso is_atom(Sql) == false andalso iolist_size(Sql) == 0 of
true when AllSynced, P#dp.followers == [] ->
?DBG("Nodes synced, no followers"),
W = NP#dp.wasync,
{noreply,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:do_cb(
NP#dp{followers = [],
wasync = W#ai{nreplies = W#ai.nreplies+1}}))};
true when AllSynced ->
?DBG("Nodes synced, running empty AE."),
NewFollowers1 = [actordb_sqlprocutil:send_empty_ae(P,NF) || NF <- P#dp.followers],
W = NP#dp.wasync,
{noreply,actordb_sqlprocutil:doqueue(ae_timer(NP#dp{callres = ok,followers = NewFollowers1,
wasync = W#ai{nreplies = W#ai.nreplies+1}}))};
_ ->
?DBG("Running post election write on nodes ~p, evterm=~p, curterm=~p, vers ~p, trans=~p",
[P#dp.followers,P#dp.evterm,P#dp.current_term,NP#dp.schemavers, NP#dp.transactionid]),
W = #write{sql = Sql, transaction = NP#dp.transactionid,records = AdbRecords},
Now = actordb_local:elapsed_time(),
% Since we won election nodes are accessible.
Followers = [F#flw{last_seen = Now} || F <- P#dp.followers],
write_call1(#write{sql = , transaction = undefined } = W , From , NewVers , P ) -
case NP#dp.transactionid of
undefined ->
write_call(W,Callfrom, NP#dp{followers = Followers});
_ ->
write_call1(W,Callfrom, NP#dp.schemavers, NP#dp{followers = Followers})
end
end.
down_info(_PID,Ref,Reason,#dp{transactioncheckref = Ref} = P) ->
?DBG("Transactioncheck died ~p myid ~p, pid=~p",[Reason,P#dp.transactionid,_PID]),
case P#dp.transactionid of
{Tid,Updaterid,Node} ->
case Reason of
noproc ->
{_CheckPid,CheckRef} = actordb_sqlprocutil:start_transaction_checker(Tid,Updaterid,Node),
{noreply,P#dp{transactioncheckref = CheckRef}};
abandoned ->
case handle_call({commit,false,P#dp.transactionid},
undefined,P#dp{transactioncheckref = undefined}) of
{stop,normal,NP} ->
{stop,normal,NP};
{reply,_,NP} ->
{noreply,NP};
{noreply,_} = R ->
R
end;
done ->
case handle_call({commit,true,P#dp.transactionid},
undefined,P#dp{transactioncheckref = undefined}) of
{stop,normal,NP} ->
{stop,normal,NP};
{reply,_,NP} ->
{noreply,NP};
{noreply,_} = R ->
R
end
end;
_ ->
{noreply,P#dp{transactioncheckref = undefined}}
end;
down_info(PID,_Ref,Reason,#dp{copyproc = PID} = P) ->
?DBG("copyproc died ~p my_status=~p copyfrom=~p",[Reason,P#dp.mors,P#dp.copyfrom]),
case Reason of
unlock ->
case catch actordb_sqlprocutil:callback_unlock(P) of
ok when is_binary(P#dp.copyfrom) ->
{ok,NP} = init(P#dp{mors = slave},copyproc_done),
{noreply,NP};
ok ->
{ok,NP} = init(P#dp{mors = master},copyproc_done),
{noreply,NP};
Err ->
?DBG("Unable to unlock"),
{stop,Err,P}
end;
ok when P#dp.mors == slave ->
?DBG("Stopping because slave"),
{stop,normal,P};
nomajority ->
{stop,{error,nomajority},P};
% Error copying.
% - There is a chance copy succeeded. If this node was able to send unlock msg
% but connection was interrupted before replying.
% If this is the case next read/write call will start
% actor on this node again and everything will be fine.
% - If copy failed before unlock, then it actually did fail. In that case move will restart
% eventually.
_ ->
?ERR("Coproc died with error ~p~n",[Reason]),
% actordb_sqlprocutil:empty_queue(P#dp.callqueue,{error,copyfailed}),
{stop,{error,copyfailed},P}
end;
down_info(PID,_Ref,Reason,P) ->
case lists:keyfind(PID,#cpto.pid,P#dp.dbcopy_to) of
false ->
?DBG("downmsg, verify maybe? ~p ~p ~p",[P#dp.election_timer,PID,Reason]),
case apply(P#dp.cbmod,cb_info,[{'DOWN',_Ref,process,PID,Reason},P#dp.cbstate]) of
{noreply,S} ->
{noreply,P#dp{cbstate = S}};
noreply ->
{noreply,P}
end;
C ->
?DBG("Down copyto proc ~p ~p ~p ~p ~p",
[P#dp.actorname,Reason,C#cpto.ref,P#dp.locked,P#dp.dbcopy_to]),
case Reason of
ok ->
ok;
_ ->
?ERR("Copyto process invalid exit ~p",[Reason])
end,
WithoutCopy = lists:keydelete(PID,#lck.pid,P#dp.locked),
NewCopyto = lists:keydelete(PID,#cpto.pid,P#dp.dbcopy_to),
false = lists:keyfind(C#cpto.ref,2,WithoutCopy),
wait_copy not in list add it ( 2nd stage of lock )
WithoutCopy1 = [#lck{ref = C#cpto.ref, ismove = C#cpto.ismove,
node = C#cpto.node,time = actordb_local:elapsed_time(),
actorname = C#cpto.actorname}|WithoutCopy],
erlang:send_after(1000,self(),check_locks),
{noreply,actordb_sqlprocutil:doqueue(P#dp{dbcopy_to = NewCopyto,locked = WithoutCopy1})}
end.
terminate(Reason, P) ->
case is_record(P,dp) of
true ->
?DBG("Terminating ~p",[Reason]);
false ->
?ADBG("Terminating ~p, ~p",[Reason,P])
end,
actordb_sqlite:stop(P),
distreg:unreg(self()),
ok.
code_change(_, P, _) ->
{ok, P}.
init(#dp{} = P,_Why) ->
? because ~p , ~p , ~p",[_Why,?R2P(P),get ( ) ] ) ,
?DBG("Reinit because ~p",[_Why]),
actordb_sqlite:stop(P),
Flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION) band (bnot ?FLAG_STARTLOCK),
case ok of
_ when is_reference(element(2,P#dp.election_timer)) ->
erlang:cancel_timer(element(2,P#dp.election_timer));
% _ when is_pid(P#dp.election) ->
% exit(P#dp.election,reinit);
_ ->
ok
end,
init([{actor,P#dp.actorname},{type,P#dp.actortype},{mod,P#dp.cbmod},{flags,Flags},
{state,P#dp.cbstate},{slave,P#dp.mors == slave},{wasync,P#dp.wasync},{rasync,P#dp.rasync},
{queue,P#dp.callqueue},{startreason,{reinit,_Why}}]).
% Never call other processes from init. It may cause deadlocks. Whoever
% started actor is blocking waiting for init to finish.
init([_|_] = Opts) ->
put(opt , ) ,
?ADBG("Start opts ~p",[Opts]),
rand:seed(exs64),
Now = actordb_local:elapsed_time(),
P1 = #dp{mors = master, callqueue = queue:new(),statequeue = queue:new(), without_master_since = Now},
case actordb_sqlprocutil:parse_opts(P1,Opts) of
{registered,Pid} ->
explain({registered,Pid},Opts),
{stop,normal};
P when ( P#dp.flags band ? ) > 0 - >
% explain({actornum,P#dp.fullpath,actordb_sqlprocutil:read_num(P)},Opts),
% {stop,normal};
P when (P#dp.flags band ?FLAG_EXISTS) > 0 ->
case P#dp.movedtonode of
deleted ->
explain({ok,[{columns,{<<"exists">>}},{rows,[{<<"false">>}]}]},Opts);
_ ->
% {ok,_Db,SchemaTables,_PageSize} = actordb_sqlite:init(P#dp.dbpath,wal),
explain({ok,[{columns,{<<"exists">>}},{rows,[{butil : tobin(SchemaTables /= [ ] ) } ] } ] } , ) ,
% {stop,normal}
LocalShard = actordb_shardmngr:find_local_shard(P#dp.actorname,P#dp.actortype),
Val =
case LocalShard of
{redirect,Shard,Node} ->
actordb:rpc(Node,Shard,{actordb_shard,is_reg,[Shard,P#dp.actorname,P#dp.actortype]});
undefined ->
{Shard,_,Node} = actordb_shardmngr:find_global_shard(P#dp.actorname),
actordb:rpc(Node,Shard,{actordb_shard,is_reg,[Shard,P#dp.actorname,P#dp.actortype]});
Shard ->
actordb_shard:is_reg(Shard,P#dp.actorname,P#dp.actortype)
end,
explain({ok,[{columns,{<<"exists">>}},{rows,[{butil:tobin(Val)}]}]},Opts),
{stop,normal}
end;
P when (P#dp.flags band ?FLAG_STARTLOCK) > 0 ->
case lists:keyfind(lockinfo,1,Opts) of
{lockinfo,dbcopy,{Ref,CbState,CpFrom,CpReset}} ->
?DBG("Starting actor slave lock for copy on ref ~p",[Ref]),
{ok,Db,_,_PageSize} = actordb_sqlite:init(P#dp.dbpath,wal),
{ok,Pid} = actordb_sqlprocutil:start_copyrec(
P#dp{db = Db, mors = slave, cbstate = CbState,
dbcopyref = Ref, copyfrom = CpFrom, copyreset = CpReset}),
{ok,P#dp{copyproc = Pid, verified = false,mors = slave, copyfrom = P#dp.copyfrom}};
{lockinfo,wait} ->
?DBG("Starting actor lock wait ~p",[P]),
{ok,P}
end;
P when P#dp.copyfrom == undefined ->
?DBG("Actor start, copy=~p, flags=~p, mors=~p",[P#dp.copyfrom,P#dp.flags,P#dp.mors]),
% Could be normal start after moving to another node though.
MovedToNode = apply(P#dp.cbmod,cb_checkmoved,[P#dp.actorname,P#dp.actortype]),
RightCluster = lists:member(MovedToNode,bkdcore:all_cluster_nodes()),
case actordb_sqlite:actor_info(P) of
{ _ , VotedFor , VotedCurrentTerm , VoteEvnum , VoteEvTerm } - >
{{_FCT,LastCheck},{VoteEvTerm,VoteEvnum},_InProg,_MxPage,_AllPages,VotedCurrentTerm,<<>>} ->
VotedFor = undefined;
{{_FCT,LastCheck},{VoteEvTerm,VoteEvnum},_InProg,_MxPage,_AllPages,VotedCurrentTerm,VotedFor} ->
ok;
_ ->
VotedFor = undefined,
LastCheck = VoteEvnum = VotedCurrentTerm = VoteEvTerm = 0
end,
case ok of
_ when P#dp.mors == slave ->
{ok,actordb_sqlprocutil:init_opendb(P#dp{current_term = VotedCurrentTerm,
voted_for = VotedFor, evnum = VoteEvnum,evterm = VoteEvTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,undefined),
last_checkpoint = LastCheck})};
_ when MovedToNode == undefined; RightCluster ->
NP = P#dp{current_term = VotedCurrentTerm,voted_for = VotedFor, evnum = VoteEvnum,
evterm = VoteEvTerm, last_checkpoint = LastCheck},
{ok,actordb_sqlprocutil:start_verify(actordb_sqlprocutil:init_opendb(NP),true)};
_ ->
?DBG("Actor moved ~p ~p ~p",[P#dp.actorname,P#dp.actortype,MovedToNode]),
{ok, P#dp{verified = true, movedtonode = MovedToNode}}
end;
{stop,Explain} ->
explain(Explain,Opts),
{stop,normal};
P ->
self() ! start_copy,
{ok,P#dp{mors = master}}
end;
init(#dp{} = P) ->
init(P,noreason).
explain(What,Opts) ->
case lists:keyfind(start_from,1,Opts) of
{_,{FromPid,FromRef}} ->
FromPid ! {FromRef,What};
_ ->
ok
end.
reply(A,B) ->
actordb_sqlprocutil:reply(A,B).
% reply(undefined,_Msg) ->
% ok;
reply([_| _ ] = From , Msg ) - >
[ gen_server : reply(F , Msg ) || F < - From ] ;
reply(From , Msg ) - >
% gen_server:reply(From,Msg).
| null | https://raw.githubusercontent.com/biokoda/actordb_core/8dcd08a0897055af89c3ce20d99ed5e64d0c33eb/src/actordb_sqlproc.erl | erlang | call_master/4,call_master/5
Read actor number without creating actor.
If call returns redirect, this is slave node not master node.
test_mon_calls(Name,Msg),
?ADBG("Call ~p",[Msg]),
test_mon_stop(),
test_mon_stop(),
test_mon_stop(),
Ref = make_ref(),
put(ref,Ref),
test_mon_proc(Who,Msg,Ref) ->
receive
Ref ->
ok
test_mon_proc(Who,Msg,Ref)
end.
test_mon_stop() ->
Call processing.
Calls are processed here and in actordb_sqlprocutil:doqueue.
when element(1,Msg) /= reached_end ->
Now = actordb_local:elapsed_time(),
P#dp.without_master_since < Now-3000 ->
We have given up. But since we are getting a call from outside, try again.
Execute election.
if timer already ran out but we are waiting due to wait_election flag
Do nothing election is slower and it does not have to wait
Same transaction can write to an actor more than once
Transactions do not use async calls to driver,
so if we are in the middle of one, we can execute
this write immediately.
New transaction has higher priority.
Interrupt it unless already committed.
read call just buffers call
write_call just buffers call, we can always run it.
Actual write is executed at the end of doqueue.
#write and #read have flags in same pos
?DBG("Queing msg ~p, callres ~p, locked ~p, transactionid ~p",
Continue in doqueue
We can safely release savepoint.
We can also set #dp.evnum now.
Transaction failed.
Delete it from __transactions.
cleanup of transaction from __transactions.
Do not match on ok. It might fail if this is the actual node on which transaction
failed. It makes no difference if we are redundantly calling rollback again.
Executed on follower.
?DBG("Ignoring head because inrecovery"),
Reply may have gotten lost or leader could have changed.
true ->
false ->
actordb_sqlprocutil : ae_respond(P , LeaderNode , wrongstate , PrevEvnum , AEType , CallCount ) ,
{reply,false,P}
end;
Some node thinks its master and sent us appendentries start.
Because we are master with higher term, we turn it down.
But we also start a new write so that nodes get synchronized.
If we have any pending replies, this must result in a rewind for this node
and a new write to master.
This node is candidate or leader but someone with newer term is sending us log
Node is conflicted, delete last entry
If false this node is behind. If empty this is just check call.
Wait for leader to send an earlier event.
Executed on follower.
Appends pages, a single write is split into multiple calls.
Header tells you if this is last call. If we reached header, this means we must have received
all preceding calls as well.
Executed on leader.
An earlier response.
What we thought was follower is ahead of us and we need to step down
{reply,ok,actordb_sqlprocutil:reopen_db(actordb_sqlprocutil:save_term(
P#dp{mors = slave,current_term = CurrentTerm,
masternode = undefined, without_master_since = Now,
masternodedist = undefined,
voted_for = undefined, followers = []}))};
Follower is up to date. He replied false. Maybe our term was too old.
Check if we are copying entire db to that node already, do nothing.
Candidates term is lower than current_term, ignore.
(P#dp.mors == master andalso P#dp.verified == true),
We've already seen this term, only vote yes if we have not voted
or have voted for this candidate already.
(P#dp.mors == master andalso P#dp.verified == true),
New candidates term is higher than ours, is he as up to date?
Higher term, but not as up to date. We can not vote for him.
We do have to remember new term index though.
(P#dp.mors == master andalso P#dp.verified == true),
dbsize == 0, not last page
last page
Prevent any timeouts on next ae since recovery process is progressing.
if no followers safe read is meaningless.
Direct read mode (not async)
We have result, but now we must verify if we are still leader.
If we have some writes to do, execute them and once writes are replicated, send read response
If not, use send_empty_ae.
Either way we must end up in reply_maybe.
no writes pending
exec_writes gets executed after this.
Async mode, less safe because it can return pages that have not been replicated.
We can make the storage engine level correct (lmdb), but what we can't fix at the moment
is sqlite page cache. Any write will store pages in cache. Which means reads will use those
unsafe cache pages instead of what is stored in lmdb.
This unfortunately means we can't process reads while writes are running. Reads are executed
before writes.
We could use seperate read/write connections. This also means there is a read and write
sqlite page cache. After every write, read connection page cache must be cleared. How
detrimental to performance that would be is something that needs to be tested.
A = P#dp.rasync,
For when a node wants to take its marbles and go play by itself.
print_sqls(_,_,_) ->
ok.
Not a multiactor transaction write
We must reverse because when writes were being batched, we remembered offset of every data item
if we are already in this transaction, just update sql.
Async read result. Unlike writes we can reply directly. We don't use async reads atm.
Err ->
?ERR("Read call error: ~p",[Err]),
async write result
?DBG("Buffer=~p",[BD#ai.buffer]),
?DBG("CQ=~p",[P#dp.callqueue]),
reply on appendentries response or later if nodes are behind.
?DBG("Remain cf=~p",[RemainCF]),
?DBG("Remain recs=~p",[RemainRecs]),
Async election vote result.
Change node to this node, so that other actor knows where to send db.
Delay if latency significantly increased since start of timer.
But only if more than 100ms latency. Which should mean significant load or bad network which
from here means same thing.
Are any write results pending?
Step down as leader.
Check if there is anything we need to do, like run another election, issue an empty write or wait some more.
Copying db, wait some more
We are candidate, wait for election to complete.
Unless leader is known and available, start an election.
Is connection active?
We had leader, but he is gone
Start counter how long we are looking for leader for.
If election undefined this should be a hint from outside.
It took too long. Respond with error.
Give up for now. Do not run elections untill we get a hint from outside.
Hint will come from catchup or a client wanting to execute read/write.
Give up and wait for hint.
If elected with majority continue processing.
If not wait for election timeout to decide what to do.
If election timer finishes, election failed.
P#dp.cbinit,
P#dp.cbinit,
After election is won a write needs to be executed. What we will write depends on the situation:
- If this actor has been moving, do a write to clean up after it (or restart it)
- If transaction active continue with write.
- If empty db or schema not up to date create/update it.
- Otherwise just empty sql, which still means an increment for evnum and evterm in __adb.
If nothing to store and all nodes synced, send an empty AE.
Since we won election nodes are accessible.
Error copying.
- There is a chance copy succeeded. If this node was able to send unlock msg
but connection was interrupted before replying.
If this is the case next read/write call will start
actor on this node again and everything will be fine.
- If copy failed before unlock, then it actually did fail. In that case move will restart
eventually.
actordb_sqlprocutil:empty_queue(P#dp.callqueue,{error,copyfailed}),
_ when is_pid(P#dp.election) ->
exit(P#dp.election,reinit);
Never call other processes from init. It may cause deadlocks. Whoever
started actor is blocking waiting for init to finish.
explain({actornum,P#dp.fullpath,actordb_sqlprocutil:read_num(P)},Opts),
{stop,normal};
{ok,_Db,SchemaTables,_PageSize} = actordb_sqlite:init(P#dp.dbpath,wal),
{stop,normal}
Could be normal start after moving to another node though.
reply(undefined,_Msg) ->
ok;
gen_server:reply(From,Msg). | This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
-module(actordb_sqlproc).
-behaviour(gen_server).
-define(LAGERDBG,true).
-export([start/1, stop/1, init/1, handle_call/3, handle_cast/2, handle_info/2, terminate/2, code_change/3]).
-export([print_info/1]).
-export([read/4,write/4,call/4,call/5,diepls/2,try_actornum/3]).
-export([write_call/3, write_call1/4, read_call/3, read_call1/5]).
-include_lib("actordb_sqlproc.hrl").
try_actornum(Name,Type,CbMod) ->
case call({Name,Type},[actornum],{state_rw,actornum},CbMod) of
{error,nocreate} ->
{"",undefined};
{ok,Path,NumNow} ->
{Path,NumNow}
end.
read(Name,Flags,{[{copy,CopyFrom}],_},Start) ->
read(Name,Flags,[{copy,CopyFrom}],Start);
read(Name,Flags,[{copy,CopyFrom}],Start) ->
case distreg:whereis(Name) of
undefined ->
R = #read{sql = <<"select * from __adb limit 1;">>, flags = Flags},
case call(Name,Flags,R,Start) of
{ok,_} ->
{ok,[{columns,{<<"status">>}},{rows,[{<<"ok">>}]}]};
_E ->
?AERR("Unable to copy actor ~p to ~p",[CopyFrom,Name]),
{ok,[{columns,{<<"status">>}},{rows,[{<<"failed">>}]}]}
end;
Pid ->
diepls(Pid,overwrite),
Ref = erlang:monitor(process,Pid),
receive
{'DOWN',Ref,_,_Pid,_} ->
read(Name,Flags,[{copy,CopyFrom}],Start)
after 2000 ->
{ok,[{columns,{<<"status">>}},{row,{<<"failed_running">>}}]}
end
end;
read(Name,Flags,[delete],Start) ->
call(Name,Flags,#write{sql = delete, flags = Flags},Start);
read(Name,Flags,{Sql,[]},Start) ->
read(Name,Flags,Sql,Start);
read(Name,Flags,Sql,Start) ->
call(Name,Flags,#read{sql = Sql, flags = Flags},Start).
write(Name,Flags,{Sql,[]},Start) ->
write(Name,Flags,Sql,Start);
write(Name,Flags,{{_,_,_} = TransactionId,Sql},Start) ->
write(Name,Flags,{undefined,TransactionId,Sql},Start);
write(Name,Flags,{MFA,TransactionId,Sql},Start) ->
case TransactionId of
{_,_,_} ->
case Sql of
commit ->
call(Name,Flags,{commit,true,TransactionId},Start);
abort ->
call(Name,Flags,{commit,false,TransactionId},Start);
[delete] ->
W = #write{mfa = MFA,sql = delete, transaction = TransactionId, flags = Flags},
call(Name,Flags,W,Start);
{Sql0, PreparedStatements} ->
W = #write{mfa = MFA,sql = Sql0, records = PreparedStatements,
transaction = TransactionId, flags = Flags},
call(Name,Flags,W,Start);
_ ->
W = #write{mfa = MFA,sql = Sql,
transaction = TransactionId, flags = Flags},
call(Name,Flags,W,Start)
end;
_ when Sql == undefined ->
call(Name,Flags,#write{mfa = MFA, flags = Flags},Start);
_ when tuple_size(Sql) == 2 ->
{Sql0,Rec} = Sql,
W = #write{mfa = MFA, sql = Sql0, records = Rec, flags = Flags},
call(Name,[wait_election|Flags],W,Start);
_ ->
W = #write{mfa = MFA, sql = Sql, flags = Flags},
call(Name,[wait_election|Flags],W,Start)
end;
write(Name,Flags,[delete],Start) ->
call(Name,Flags,#write{sql = delete, flags = Flags},Start);
write(Name,Flags,{Sql,Records},Start) ->
W = #write{sql = Sql, records = Records, flags = Flags},
call(Name,[wait_election|Flags],W,Start);
write(Name,Flags,Sql,Start) ->
W = #write{sql = Sql, flags = Flags},
call(Name,[wait_election|Flags],W,Start).
call(Name,Flags,Msg,Start) ->
call(Name,Flags,Msg,Start,false).
call(Name,Flags,Msg,Start,IsRedirect) ->
case distreg:whereis(Name) of
undefined ->
{ok,Pid} when is_pid(Pid) ->
call(Name,Flags,Msg,Start,IsRedirect,Pid);
{error,nocreate} ->
{error,nocreate};
Res ->
Res
end;
Pid ->
? INF("Call have pid ~p for name ~p , alive ~p",[Pid , Name , : is_process_alive(Pid ) ] ) ,
call(Name,Flags,Msg,Start,IsRedirect,Pid)
end.
call(Name,Flags,Msg,Start,IsRedirect,Pid) ->
case catch gen_server:call(Pid,Msg,infinity) of
{redirect,Node} when is_binary(Node) ->
?ADBG("Redirect call to=~p, for=~p, ~p",[Node,Name,Msg]),
case lists:member(Node,bkdcore:cluster_nodes()) of
true ->
case IsRedirect of
true ->
double_redirect;
_ ->
case actordb:rpc(Node,element(1,Name),{?MODULE,call,[Name,Flags,Msg,Start,true]}) of
double_redirect ->
diepls(Pid,nomaster),
call(Name,Flags,Msg,Start);
Res ->
Res
end
end;
false ->
case IsRedirect of
onlylocal ->
{redirect,Node};
_ ->
case actordb:rpc(Node,element(1,Name),{?MODULE,call,[Name,Flags,Msg,Start,false]}) of
{error,Connerr} when Connerr == econnrefused; Connerr == timeout; Connerr == invalidnode ->
Pid ! doelection,
call(Name,Flags,Msg,Start,false,Pid);
Res ->
?ADBG("Redirect rpc res=~p",[Res]),
Res
end
end
end;
{'EXIT',{noproc,_}} = _X ->
?ADBG("noproc call again ~p",[_X]),
call(Name,Flags,Msg,Start);
{'EXIT',{normal,_}} ->
?ADBG("died normal"),
call(Name,Flags,Msg,Start);
{'EXIT',{nocreate,_}} ->
{error,nocreate};
{'EXIT',{error,_} = E} ->
E;
{'EXIT',{timeout,_}} ->
{error,timeout};
Res ->
Res
end.
startactor(Name,Start,Flags) ->
case Start of
{Mod,Func,Args} ->
apply(Mod,Func,[Name|Args]);
undefined ->
{ok,undefined};
_ ->
apply(Start,start,[Name,Flags])
end.
test_mon_calls(Who , Msg ) - >
put(refpid , spawn(fun ( ) - > test_mon_proc(Who , Msg , Ref ) end ) ) .
after 1000 - >
? waiting on ~p , for ~p",[Who , Msg ] ) ,
butil : safesend(get(refpid ) , get(ref ) ) .
call_slave(Cb,Actor,Type,Msg) ->
call_slave(Cb,Actor,Type,Msg,[]).
call_slave(Cb,Actor,Type,Msg,Flags) ->
actordb_util:wait_for_startup(Type,Actor,0),
case apply(Cb,cb_slave_pid,[Actor,Type,[{startreason,Msg}|Flags]]) of
{ok,Pid} ->
ok;
Pid when is_pid(Pid) ->
ok
end,
case catch gen_server:call(Pid,Msg,infinity) of
{'EXIT',{noproc,_}} when Msg /= stop ->
call_slave(Cb,Actor,Type,Msg);
{'EXIT',{normal,_}} when Msg /= stop ->
call_slave(Cb,Actor,Type,Msg);
Res ->
Res
end.
diepls(Pid,Reason) ->
gen_server:cast(Pid,{diepls,Reason}).
start_copylock(Fullname,O) ->
start_copylock(Fullname,O,0).
start_copylock(Fullname,Opt,N) when N < 2 ->
case distreg:whereis(Fullname) of
undefined ->
start(Opt);
_ ->
timer:sleep(1000),
start_copylock(Fullname,Opt,N+1)
end;
start_copylock(Fullname,_,_) ->
Pid = distreg:whereis(Fullname),
print_info(Pid),
{error,{slave_proc_running,Pid,Fullname}}.
Opts :
[ { actor , Name},{type , Type},{mod , CallbackModule},{state , CallbackState } ,
{ inactivity_timeout , SecondsOrInfinity},{slave , true / false},{copyfrom , NodeName},{copyreset,{Mod , Func , } } ]
start(Opts) ->
? ADBG("Starting ~p slave=~p",[butil : ds_vals([actor , type],Opts),butil : ds_val(slave , ) ] ) ,
Mod = butil:ds_val(mod,Opts),
Name = butil:ds_val(name,Opts),
Ref = make_ref(),
case gen_server:start(?MODULE, [{start_from,{self(),Ref}}|Opts], [apply(Mod,cb_spawnopts,[Name])]) of
{ok,Pid} ->
{ok,Pid};
{error,normal} ->
Init failed gracefully . It should have sent an explanation .
receive
{Ref,nocreate} ->
{error,nocreate};
{Ref,{registered,Pid}} ->
{ok,Pid};
{Ref,{actornum,Path,Num}} ->
{ok,Path,Num};
{Ref,{ok,[{columns,_},_]} = Res} ->
Res;
{Ref,nostart} ->
{error,nostart}
after 0 ->
{error,cantstart}
end;
Err ->
?AERR("start sqlproc error ~p",[Err]),
Err
end.
stop(Pid) when is_pid(Pid) ->
Pid ! stop;
stop(Name) ->
case distreg:whereis(Name) of
undefined ->
ok;
Pid ->
stop(Pid)
end.
print_info(undefined) ->
ok;
print_info({A,T}) ->
print_info(distreg:whereis({A,T}));
print_info(Pid) ->
gen_server:cast(Pid,print_info).
Only in handle_call are we allowed to add calls to callqueue .
handle_call(Msg,_,P) when is_binary(P#dp.movedtonode) ->
?DBG("REDIRECT BECAUSE MOVED TO NODE ~p ~p",[P#dp.movedtonode,Msg]),
case apply(P#dp.cbmod,cb_redirected_call,[P#dp.cbstate,P#dp.movedtonode,Msg,moved]) of
{reply,What,NS,Red} ->
{reply,What,P#dp{cbstate = NS, movedtonode = Red}};
ok ->
{reply,{redirect,P#dp.movedtonode},P}
end;
Me = actordb_conf:node_name(),
case ok of
_ when element(1,Msg) == send_db andalso P#dp.verified == false ->
{noreply,P#dp{callqueue = queue:in_r({CallFrom,{dbcopy,Msg}},P#dp.callqueue),
activity = actordb_local:actor_activity(P#dp.activity)}};
_ when element(1,Msg) == send_db andalso Me /= P#dp.masternode ->
?DBG("redirect not master node"),
actordb_sqlprocutil:redirect_master(P);
_ ->
actordb_sqlprocutil:dbcopy_call(Msg,CallFrom,
P#dp{activity = actordb_local:actor_activity(P#dp.activity)})
end;
handle_call({state_rw,_} = Msg,From, #dp{wasync = #ai{wait = WRef}} = P) when is_reference(WRef) ->
?DBG("Queuing state call, waitingfor=~p, msg=~p",[WRef,Msg]),
{noreply,P#dp{statequeue = queue:in_r({From,Msg},P#dp.statequeue)}};
handle_call({state_rw,What},From,P) ->
state_rw_call(What,From,P#dp{activity = actordb_local:actor_activity(P#dp.activity)});
handle_call({commit,Doit,Id},From, P) ->
commit_call(Doit,Id,From,P#dp{activity = actordb_local:actor_activity(P#dp.activity)});
handle_call(Msg,From,P) ->
case Msg of
_ when P#dp.mors == slave ->
case P#dp.masternode of
undefined when P#dp.election_timer == undefined, is_integer(P#dp.without_master_since) ->
{noreply,actordb_sqlprocutil:start_verify(
P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue),
flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION)},false)};
undefined ->
?DBG("Queing msg no master yet ~p",[Msg]),
{noreply,P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue),
election_timer = actordb_sqlprocutil:election_timer(P#dp.election_timer),
flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION)}};
_ ->
case apply(P#dp.cbmod,cb_redirected_call,[P#dp.cbstate,P#dp.masternode,Msg,slave]) of
{reply,What,NS,_} ->
{reply,What,P#dp{cbstate = NS}};
ok ->
?DBG("Redirecting to master"),
actordb_sqlprocutil:redirect_master(P)
end
end;
_ when P#dp.verified == false ->
case P#dp.flags band ?FLAG_WAIT_ELECTION > 0 of
true when element(1,P#dp.election_timer) == election ->
?DBG("clear wait_election"),
P#dp.election_timer ! exit ,
case P#dp.election_timer of
{election,undefined,_} ->
self() ! {election_timeout, element(3, P#dp.election_timer)};
_ ->
ok
end,
handle_call(Msg,From,P#dp{flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION)});
_ ->
case apply(P#dp.cbmod,cb_unverified_call,[P#dp.cbstate,Msg]) of
queue ->
{noreply,P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue)}};
{moved,Moved} ->
{noreply,P#dp{movedtonode = Moved}};
{moved,Moved,NS} ->
{noreply,P#dp{movedtonode = Moved, cbstate = NS}};
{reply,What} ->
{reply,What,P};
{reinit_master,Mors} ->
{ok,NP} = init(P#dp{mors = Mors},cb_reinit),
{noreply,NP};
{isolate,_OutSql,_State} ->
write_call(Msg,From,P);
{reinit,Sql,NS} ->
{ok,NP} = init(P#dp{cbstate = NS,
callqueue = queue:in_r({From,#write{sql = Sql}},P#dp.callqueue)},cb_reinit),
{noreply,NP}
end
end;
#write{transaction = TransactionId} = Msg1 when P#dp.transactionid /= undefined,
element(2,P#dp.transactionid) >= element(2,TransactionId) ->
case ok of
_ when P#dp.transactionid == TransactionId ->
write_call1(Msg1,From,P#dp.schemavers,P);
_ ->
{Tid,Updaterid,_} = P#dp.transactionid,
?DBG("Overruling transaction ~p.'__mupdate__', for ~p.'__mupdate__'",
[Updaterid,element(2,TransactionId)]),
actordb_sqlprocutil:transaction_overruled(Tid,Updaterid),
{noreply,timeout(P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue)})}
end;
#read{} when P#dp.movedtonode == undefined ->
read_call(Msg,From,P);
#write{transaction = undefined} when P#dp.movedtonode == undefined ->
write_call(Msg,From,P);
_ when P#dp.movedtonode == deleted andalso (element(1,Msg) == read orelse element(1,Msg) == write) ->
Flags = element(#write.flags,Msg),
case lists:member(create,Flags) of
true ->
{stop,normal,P};
false ->
{reply, {error,nocreate},P}
end;
_ ->
[ Msg , P#dp.callres , P#dp.locked , P#dp.transactionid ] ) ,
{noreply,timeout(P#dp{callqueue = queue:in_r({From,Msg},P#dp.callqueue),
activity = actordb_local:actor_activity(P#dp.activity)})}
end.
timeout(P) ->
timeout(P,0).
timeout(P,_Resend) when P#dp.flags band ?TIMEOUT_PENDING > 0 ->
P;
timeout(P,Resend) ->
self() ! {timeout,Resend+1},
P#dp{flags = P#dp.flags bor ?TIMEOUT_PENDING}.
commit_call(Doit,Id,From,P) ->
?DBG("Commit doit=~p, id=~p, from=~p, trans=~p",[Doit,Id,From,P#dp.transactionid]),
case P#dp.transactionid == Id of
true ->
case P#dp.transactioncheckref of
undefined ->
ok;
_ ->
erlang:demonitor(P#dp.transactioncheckref)
end,
?DBG("Commit write ~p",[P#dp.transactioninfo]),
{Sql,EvNum,_NewVers} = P#dp.transactioninfo,
case Sql of
<<"delete">> when Doit == true ->
Moved = deleted;
_ ->
Moved = P#dp.movedtonode
end,
case Doit of
true when P#dp.followers == [] ->
case Moved of
deleted ->
Me = self(),
actordb_sqlprocutil:delete_actor(P),
spawn(fun() -> ?DBG("Stopping in commit"), stop(Me) end);
_ ->
VarHeader = actordb_sqlprocutil:create_var_header(P),
ok = actordb_sqlite:okornot(actordb_sqlite:exec(
P#dp.db,<<"#s01;">>,P#dp.evterm,EvNum,VarHeader)),
actordb_sqlite:replication_done(P#dp.db)
end,
{reply,ok,actordb_sqlprocutil:doqueue(P#dp{transactionid = undefined,
transactioncheckref = undefined,
transactioninfo = undefined, movedtonode = Moved,
evnum = EvNum, evterm = P#dp.current_term})};
true ->
This will send the remaining WAL pages to followers that have commit flag set .
Followers will then rpc back appendentries_response .
VarHeader = actordb_sqlprocutil:create_var_header(P),
actordb_sqlite:okornot(actordb_sqlite:exec(P#dp.db,<<"#s01;">>,
P#dp.evterm,EvNum,VarHeader)),
actordb_sqlite:replication_done(P#dp.db),
{noreply,ae_timer(P#dp{callfrom = From,
callres = ok,evnum = EvNum,movedtonode = Moved,
transactionid = undefined, transactioninfo = undefined,
transactioncheckref = undefined})};
false when P#dp.followers == [] ->
actordb_sqlite:rollback(P#dp.db),
{reply,ok,actordb_sqlprocutil:doqueue(P#dp{transactionid = undefined,
transactioninfo = undefined,transactioncheckref = undefined})};
false ->
EvNum will actually be the same as transactionsql that we have not finished .
Thus this EvNum section of WAL contains pages from failed transaction and
{Tid,Updaterid,_} = P#dp.transactionid,
actordb_sqlite:rollback(P#dp.db),
NewSql = <<"DELETE FROM __transactions WHERE tid=",(butil:tobin(Tid))/binary," AND updater=",
(butil:tobin(Updaterid))/binary,";">>,
write_call(#write{sql = NewSql},From,P#dp{callfrom = undefined,
transactionid = undefined,transactioninfo = undefined,
transactioncheckref = undefined})
end;
_ ->
{reply,ok,P}
end.
state_rw_call(donothing,_From,P) ->
{reply,ok,P};
state_rw_call(recovered,_From,P) ->
?DBG("No longer in recovery!"),
{reply,ok,P#dp{inrecovery = false}};
state_rw_call({appendentries_start,Term,LeaderNode,PrevEvnum,PrevTerm,AEType,CallCount} = What,From,P) ->
AE is split into multiple calls ( because wal is sent page by page as it is written )
Start sets parameters . There may not be any wal append calls after if empty write .
AEType = [ head , empty , recover ]
?DBG("AE start ~p {PrevEvnum,PrevTerm}=~p leader=~p",[AEType, {PrevEvnum,PrevTerm},LeaderNode]),
RecoveryAge = actordb_local:elapsed_time() - P#dp.recovery_age,
case ok of
_ when P#dp.inrecovery andalso RecoveryAge > 2000 andalso AEType == head ->
case RecoveryAge > 2000 of
?ERR("Recovery mode timeout",[]),
state_rw_call(What,From,P#dp{inrecovery = false});
_ when is_pid(P#dp.copyproc) ->
?DBG("Ignoring AE because copy in progress"),
actordb_sqlprocutil : ae_respond(P , LeaderNode , wrongstate , PrevEvnum , AEType , CallCount ) ,
{reply,false,P};
_ when Term < P#dp.current_term ->
?WARN("AE start, input term too old ~p {InTerm,MyTerm}=~p",
[AEType,{Term,P#dp.current_term}]),
reply(From,false),
actordb_sqlprocutil:ae_respond(P,LeaderNode,false,PrevEvnum,AEType,CallCount),
case P#dp.mors of
master ->
{ noreply , actordb_sqlprocutil : start_verify(P , false ) } ;
?DBG("Executing empty write"),
write_call(#write{sql = []},undefined,P);
_ ->
{noreply,P}
end;
_ when P#dp.mors == slave, P#dp.masternode /= LeaderNode ->
?DBG("AE start, slave now knows leader ~p ~p",[AEType,LeaderNode]),
reply(P#dp.callfrom,{redirect,LeaderNode}),
RR = P#dp.rasync,
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
actordb_local:actor_mors(slave,LeaderNode),
NP = P#dp{masternode = LeaderNode,without_master_since = undefined,
masternode_since = actordb_local:elapsed_time(),
masternodedist = bkdcore:dist_name(LeaderNode),
netchanges = actordb_local:net_changes(),
election_timer = undefined,
rasync = RR#ai{callfrom = undefined, wait = undefined},
callfrom = undefined, callres = undefined,verified = true},
state_rw_call(What,From,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:reopen_db(NP)));
_ when P#dp.mors == master ->
?WARN("AE start, stepping down as leader ~p ~p",
[AEType,{Term,P#dp.current_term}]),
reply(P#dp.callfrom,{redirect,LeaderNode}),
RR = P#dp.rasync,
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
actordb_local:actor_mors(slave,LeaderNode),
NP = P#dp{mors = slave, verified = true, election_timer = undefined,
voted_for = undefined,callfrom = undefined, callres = undefined,
rasync = RR#ai{callfrom = undefined, wait = undefined},
masternode = LeaderNode,without_master_since = undefined,
netchanges = actordb_local:net_changes(),
masternode_since = actordb_local:elapsed_time(),
masternodedist = bkdcore:dist_name(LeaderNode),
current_term = Term},
state_rw_call(What,From,
actordb_sqlprocutil:save_term(actordb_sqlprocutil:doqueue(actordb_sqlprocutil:reopen_db(NP))));
_ when P#dp.evnum /= PrevEvnum; P#dp.evterm /= PrevTerm ->
?WARN("AE start failed, evnum evterm do not match, type=~p, {MyEvnum,MyTerm}=~p, {InNum,InTerm}=~p",
[AEType,{P#dp.evnum,P#dp.evterm},{PrevEvnum,PrevTerm}]),
case ok of
_ when PrevEvnum > 0, AEType == recover, P#dp.evnum > 0 ->
NP = actordb_sqlprocutil:rewind_wal(P);
_ ->
NP = P
end,
reply(From,false),
actordb_sqlprocutil:ae_respond(NP,LeaderNode,false,PrevEvnum,AEType,CallCount),
{noreply,NP};
_ when Term > P#dp.current_term ->
?WARN("AE start, my term out of date type=~p {InTerm,MyTerm}=~p",
[AEType,{Term,P#dp.current_term}]),
NP = P#dp{current_term = Term,voted_for = undefined,
masternode = LeaderNode, without_master_since = undefined,verified = true,
netchanges = actordb_local:net_changes(),
masternode_since = actordb_local:elapsed_time(),
masternodedist = bkdcore:dist_name(LeaderNode)},
state_rw_call(What,From,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:save_term(NP)));
_ when AEType == empty ->
?DBG("AE start, ok for empty"),
reply(From,ok),
actordb_sqlprocutil:ae_respond(P,LeaderNode,true,PrevEvnum,AEType,CallCount),
{noreply,P#dp{verified = true, masternode_since = actordb_local:elapsed_time()}};
Ok , now it will start receiving wal pages
_ ->
case AEType == recover of
true ->
Age = actordb_local:elapsed_time(),
?INF("AE start ok for recovery from ~p, evnum=~p, evterm=~p",
[LeaderNode,P#dp.evnum,P#dp.evterm]);
false ->
Age = P#dp.recovery_age,
?DBG("AE start ok from ~p",[LeaderNode])
end,
{reply,ok,P#dp{verified = true, masternode_since = actordb_local:elapsed_time(), inrecovery = AEType == recover, recovery_age = Age}}
end;
state_rw_call({appendentries_wal,Header,Body,AEType,CallCount},From,P) ->
append_wal(P,From,CallCount,Header,Body,AEType);
state_rw_call({appendentries_response,Node,CurrentTerm,Success,
EvNum,EvTerm,MatchEvnum,AEType,{SentIndex,SentTerm}} = What,From,P) ->
Follower = lists:keyfind(Node,#flw.node,P#dp.followers),
case Follower of
false ->
?DBG("Adding node to follower list ~p",[Node]),
state_rw_call(What,From,actordb_sqlprocutil:store_follower(P,#flw{node = Node}));
( not ( AEType = = head andalso Success ) ) andalso
(SentIndex /= Follower#flw.match_index orelse
SentTerm /= Follower#flw.match_term orelse P#dp.verified == false) ->
We can get responses from AE calls which are out of date . This is why the other node always sends
back { SentIndex , SentTerm } which are the parameters for follower that we knew of when we sent data .
If these two parameters match our current state , then response is valid .
?DBG("ignoring AE resp, from=~p,success=~p,type=~p,prevevnum=~p,evnum=~p,matchev=~p, sent=~p",
[Node,Success,AEType,Follower#flw.match_index,EvNum,MatchEvnum,{SentIndex,SentTerm}]),
{reply,ok,P};
_ ->
?DBG("AE resp,from=~p,success=~p,type=~p,prevnum=~p,prevterm=~p evnum=~p,evterm=~p,matchev=~p",
[Node,Success,AEType,Follower#flw.match_index,Follower#flw.match_term,EvNum,EvTerm,MatchEvnum]),
Now = actordb_local:elapsed_time(),
NFlw = Follower#flw{match_index = EvNum, match_term = EvTerm,next_index = EvNum+1,
wait_for_response_since = undefined, last_seen = Now},
case Success of
_ when P#dp.mors == slave ->
?WARN("Received AE response after stepping down"),
{reply,ok,P};
true ->
reply(From,ok),
NP = actordb_sqlprocutil:reply_maybe(actordb_sqlprocutil:continue_maybe(
orelse AEType = = empty
?DBG("AE response for node ~p, followers=~p",
[Node,[{F#flw.node,F#flw.match_index,F#flw.match_term,F#flw.next_index} || F <- NP#dp.followers]]),
{noreply,NP};
false when P#dp.current_term < CurrentTerm ->
?DBG("My term is out of date {His,Mine}=~p",[{CurrentTerm,P#dp.current_term}]),
{reply, ok, P#dp{masternode = undefined, without_master_since = Now,
masternodedist = undefined, verified = false,
current_term = CurrentTerm,election_timer = actordb_sqlprocutil:election_timer(Now,undefined)}};
election_timer = actordb_sqlprocutil : election_timer(Now , P#dp.election_timer ) ,
false when NFlw#flw.match_index == P#dp.evnum ->
{reply,ok,actordb_sqlprocutil:reply_maybe(actordb_sqlprocutil:store_follower(P,NFlw))};
false ->
case [C || C <- P#dp.dbcopy_to, C#cpto.node == Node, C#cpto.actorname == P#dp.actorname] of
[_|_] ->
?DBG("Ignoring appendendentries false response because copying to"),
{reply,ok,P};
[] ->
case actordb_sqlprocutil:try_wal_recover(P,NFlw) of
{false,NP,NF} ->
?DBG("Can not recover from log, sending entire db"),
We can not recover from wal . Send entire db .
Ref = make_ref(),
case bkdcore:rpc(NF#flw.node,{?MODULE,call_slave,
[P#dp.cbmod,P#dp.actorname,P#dp.actortype,
{dbcopy,{start_receive,actordb_conf:node_name(),Ref}}]}) of
ok ->
DC = {send_db,{NF#flw.node,Ref,false,P#dp.actorname}},
actordb_sqlprocutil:dbcopy_call(DC,From,NP);
_Err ->
?ERR("Unable to send db ~p",[_Err]),
{reply,false,P}
end;
{true,NP,NF} ->
we can recover from wal
?DBG("Recovering from wal, for node=~p, match_index=~p, match_term=~p, myevnum=~p",
[NF#flw.node,NF#flw.match_index,NF#flw.match_term,P#dp.evnum]),
reply(From,ok),
{noreply,actordb_sqlprocutil:continue_maybe(NP,NF,false)}
end
end
end
end;
state_rw_call({request_vote,Candidate,NewTerm,LastEvnum,LastTerm} = What,From,P) ->
?DBG("Request vote for=~p, mors=~p, {histerm,myterm}=~p, {HisLogTerm,MyLogTerm}=~p {HisEvnum,MyEvnum}=~p",
[Candidate,P#dp.mors,{NewTerm,P#dp.current_term},{LastTerm,P#dp.evterm},{LastEvnum,P#dp.evnum}]),
Uptodate =
case ok of
_ when P#dp.evterm < LastTerm ->
true;
_ when P#dp.evterm > LastTerm ->
false;
_ when P#dp.evnum < LastEvnum ->
true;
_ when P#dp.evnum > LastEvnum ->
false;
_ ->
true
end,
Follower = lists:keyfind(Candidate,#flw.node,P#dp.followers),
DistFollower = bkdcore:dist_name(Candidate),
Now = actordb_local:elapsed_time(),
case Follower of
false when P#dp.mors == master, DistFollower /= undefined ->
?DBG("Adding node to follower list ~p",[Candidate]),
state_rw_call(What,From,actordb_sqlprocutil:store_follower(P,#flw{node = Candidate}));
_ ->
case ok of
_ when NewTerm < P#dp.current_term ->
reply(From,{outofdate,actordb_conf:node_name(),P#dp.current_term,{P#dp.evnum,P#dp.evterm}}),
NP = P;
_ when NewTerm == P#dp.current_term ->
case (P#dp.voted_for == undefined orelse P#dp.voted_for == Candidate) of
true when Uptodate ->
DoElection = false,
reply(From,{true,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = Candidate,
current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)});
true ->
reply(From,{outofdate,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = undefined, current_term = NewTerm});
false ->
DoElection =(P#dp.mors == master andalso P#dp.verified == true),
AV = {alreadyvoted,actordb_conf:node_name(),P#dp.current_term,{P#dp.evnum,P#dp.evterm}},
reply(From,AV),
NP = P
end;
_ when Uptodate, P#dp.masternode == Candidate ->
DoElection = false,
?DBG("Voting yes for same master as before"),
reply(From,{true,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = Candidate, current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)});
_ when Uptodate ->
DoElection = false,
?DBG("Stepping down after voting on another master"),
reply(From,{true,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{mors = slave, verified = false,
masternode = undefined,masternodedist = undefined,
without_master_since = Now,
last_vote_event = Now,
voted_for = Candidate, current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,undefined)});
_ ->
reply(From,{outofdate,actordb_conf:node_name(),NewTerm,{P#dp.evnum,P#dp.evterm}}),
NP = actordb_sqlprocutil:save_term(P#dp{voted_for = undefined, current_term = NewTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)})
end,
?DBG("Doing election after request_vote? ~p, mors=~p, verified=~p, election=~p",
[DoElection,P#dp.mors,P#dp.verified,P#dp.election_timer]),
{noreply,actordb_sqlprocutil:doqueue(NP#dp{election_timer =
actordb_sqlprocutil:election_timer(Now,P#dp.election_timer)})}
end;
state_rw_call({delete,deleted},From,P) ->
ok = actordb_sqlite:wal_rewind(P,0),
reply(From,ok),
{stop,normal,P};
state_rw_call({delete,{moved,Moved}},From,P) ->
actordb_sqlprocutil:moved_replace(P,Moved),
reply(From,ok),
{stop,normal,P};
state_rw_call(checkpoint,_From,P) ->
actordb_sqlprocutil:checkpoint(P),
{reply,ok,P}.
append_wal(P,From,CallCount,[Header|HT],[Body|BT],AEType) ->
case append_wal(P,From,CallCount,Header,Body,AEType) of
{noreply,NP} ->
{noreply,NP};
{reply,ok,NP} when HT /= [] ->
append_wal(NP,From,CallCount,HT,BT,AEType);
{reply,ok,NP} ->
{reply,ok,NP}
end;
append_wal(P,From,CallCount,Header,Body,AEType) ->
AWR = actordb_sqlprocutil:append_wal(P,Header,Body),
append_wal1(P,From,CallCount,Header,AEType,AWR).
append_wal1(P,From,CallCount,Header,AEType,{ok,NS}) ->
append_wal1(P#dp{cbstate = NS},From,CallCount,Header,AEType,ok);
append_wal1(P,From,CallCount,Header,AEType,AWR) ->
case AWR of
ok ->
case Header of
<<_:20/binary,0:32>> ->
?DBG("AE append ~p",[AEType]),
{reply,ok,P#dp{locked = [ae]}};
<<Evterm:64/unsigned-big,Evnum:64/unsigned-big,Pgno:32,Commit:32>> ->
?DBG("AE WAL done evnum=~p,evterm=~p,aetype=~p,qempty=~p,master=~p,pgno=~p,commit=~p",
[Evnum,Evterm,AEType,queue:is_empty(P#dp.callqueue),P#dp.masternode,Pgno,Commit]),
case P#dp.inrecovery of
true ->
RecoveryAge = actordb_local:elapsed_time();
false ->
RecoveryAge = P#dp.recovery_age
end,
NP = P#dp{evnum = Evnum, evterm = Evterm,locked = [], recovery_age = RecoveryAge},
reply(From,done),
actordb_sqlprocutil:ae_respond(NP,NP#dp.masternode,true,P#dp.evnum,AEType,CallCount),
{noreply,NP}
end;
_X ->
?ERR("Append failed ~p",[_X]),
reply(From,false),
actordb_sqlprocutil:ae_respond(P,P#dp.masternode,false,P#dp.evnum,AEType,CallCount),
{noreply,P}
end.
read_call(#read{sql = [exists]},_From,#dp{mors = master} = P) ->
{reply,{ok,[{columns,{<<"exists">>}},{rows,[{<<"true">>}]}]},P};
read_call(#read{sql = {[exists],_}},_From,#dp{mors = master} = P) ->
{reply,{ok,[{columns,{<<"exists">>}},{rows,[{<<"true">>}]}]},P};
read_call(Msg,From,#dp{flags = F} = P) when (F band ?TIMEOUT_PENDING) == 0 ->
read_call(Msg,From,timeout(P));
read_call(Msg,From,#dp{mors = master, rasync = AR} = P) ->
?DBG("read_call ~p",[Msg]),
Safe = AR#ai.safe_read or lists:member(safe,Msg#read.flags),
case Msg#read.sql of
{Mod,Func,Args} ->
case apply(Mod,Func,[P#dp.cbstate|Args]) of
{reply,What,Sql,NS} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [{tuple,What,From}|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{cbstate = NS, rasync = AR1}};
{reply,What,NS} ->
{reply,What,P#dp{cbstate = NS}};
{reply,What} ->
{reply,What,P};
{Sql,Recs} when is_list(Recs) ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [Recs|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}};
{Sql,State} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{cbstate = State, rasync = AR1}};
Sql ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}}
end;
{Sql,{Mod,Func,Args}} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [{mod,{Mod,Func,Args},From}|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}};
{Sql,Recs} ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [Recs|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}};
Sql ->
AR1 = AR#ai{buffer = [Sql|AR#ai.buffer], buffer_cf = [From|AR#ai.buffer_cf],
buffer_recs = [[]|AR#ai.buffer_recs], safe_read = Safe},
{noreply,P#dp{rasync = AR1}}
end;
read_call(_Msg,_From,P) ->
?DBG("redirect read ~p",[P#dp.masternode]),
actordb_sqlprocutil:redirect_master(P).
Execute buffered read
read_call1(_,_,_,[],P) ->
P#dp{activity = actordb_local:actor_activity(P#dp.activity)};
read_call1(_,_,_,From,#dp{mors = slave} = P) ->
[reply(F,{redirect,P#dp.masternode}) || F <- From],
P#dp{rasync = #ai{}};
read_call1(true,Sql,Recs,From,#dp{followers = []} = P) ->
read_call1(false, Sql, Recs, From, P);
read_call1(SafeRead,Sql,Recs,From,P) ->
ComplSql = list_to_tuple(Sql),
Records = list_to_tuple(Recs),
?DBG("READ SQL=~p, Recs=~p, from=~p",[ComplSql, Records,From]),
Res = actordb_sqlite:exec(P#dp.db,ComplSql,Records,read),
case Res of
{ok,ResTuples} when SafeRead == false ->
?DBG("Read resp=~p",[Res]),
actordb_sqlprocutil:read_reply(
P#dp{rasync = #ai{}, activity = actordb_local:actor_activity(P#dp.activity)}, From, 1, ResTuples);
{ok,ResTuples} ->
A = P#dp.rasync,
W = P#dp.wasync,
NRB = A#ai{wait = ResTuples, callfrom = From, buffer = [], buffer_cf = [], buffer_recs = []},
case W#ai.buffer of
[] ->
?DBG("Sending empty ae to verify read"),
NewFollowers1 = [actordb_sqlprocutil:send_empty_ae(P,NF) || NF <- P#dp.followers],
ae_timer(P#dp{callres = ok,followers = NewFollowers1, rasync = NRB});
_ ->
?DBG("Read response will be sent after write"),
P#dp{rasync = NRB}
end;
{ sql_error , ErrMsg , _ } = Err - >
{sql_error,{ErrPos,_,_ErrAtom,ErrStr},_} ->
?ERR("Read call error: ~p",[Res]),
{Before,[Problem|After]} = lists:split(ErrPos,From),
reply(Problem, {sql_error,ErrStr}),
{BeforeSql,[_ProblemSql|AfterSql]} = lists:split(ErrPos,Sql),
{BeforeRecs,[_ProblemRecs|AfterRecs]} = lists:split(ErrPos,Recs),
read_call1(SafeRead,BeforeSql++AfterSql, BeforeRecs++AfterRecs, Before++After,P#dp{rasync = #ai{}})
end.
Recompile driver with threadsafe=1 if using async reads .
Res = actordb_sqlite : exec_async(P#dp.db , ComplSql , Records , read ) ,
NRB = A#ai{wait = Res , info = Sql , callfrom = From , buffer = [ ] , buffer_cf = [ ] , buffer_recs = [ ] } ,
P#dp{rasync = NRB } .
write_call(Msg,From,#dp{flags = F} = P) when F band ?TIMEOUT_PENDING == 0 ->
write_call(Msg,From,timeout(P));
write_call(#write{mfa = MFA, sql = Sql} = Msg,From,P) ->
A = P#dp.wasync,
ForceSync = A#ai.buffer_fsync or lists:member(fsync,Msg#write.flags),
?DBG("writecall evnum_prewrite=~p,term=~p writeinfo=~p, from=~p",[P#dp.evnum,P#dp.current_term,{MFA,Sql},From]),
case Sql of
delete ->
A1 = A#ai{buffer = [<<"INSERT OR REPLACE INTO __adb (id,val) VALUES (?1,?2);">>|A#ai.buffer],
buffer_cf = [From|A#ai.buffer_cf],
buffer_recs = [[[[?MOVEDTOI,<<"$deleted$">>]]]|A#ai.buffer_recs],
buffer_moved = deleted, buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
{moved,MovedTo} ->
A1 = A#ai{buffer = [<<"#s02;">>|A#ai.buffer], buffer_cf = [From|A#ai.buffer_cf],
buffer_recs = [[[[?MOVEDTOI,MovedTo]]]|A#ai.buffer_recs],
buffer_moved = {moved,MovedTo}, buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
If new schema version write , add sql to first place of list of writes .
_ when Msg#write.newvers /= undefined, MFA == undefined ->
A1 = A#ai{buffer = A#ai.buffer++[Sql], buffer_recs = A#ai.buffer_recs++[Msg#write.records],
buffer_cf = A#ai.buffer_cf++[From], buffer_nv = Msg#write.newvers,
buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
_ when MFA == undefined ->
A1 = A#ai{buffer = [Sql|A#ai.buffer], buffer_cf = [From|A#ai.buffer_cf],
buffer_recs = [Msg#write.records|A#ai.buffer_recs], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
_ ->
{Mod,Func,Args} = MFA,
case apply(Mod,Func,[P#dp.cbstate|Args]) of
{reply,What,OutSql,NS} ->
reply(From,What),
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [undefined|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = NS}};
{reply,What,NS} ->
{reply,What,P#dp{cbstate = NS}};
{reply,What} ->
{reply,What,P};
{exec,OutSql,Recs} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [Recs|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
{isolate,OutSql,State} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = State, verified = true, mors = master, followers = []}};
{OutSql,Recs} when is_list(Recs) ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [Recs|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}};
{OutSql,State} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = State}};
{OutSql,Recs,State} ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [Recs|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1, cbstate = State}};
OutSql ->
A1 = A#ai{buffer = [OutSql|A#ai.buffer], buffer_recs = [[]|A#ai.buffer_recs],
buffer_cf = [From|A#ai.buffer_cf], buffer_fsync = ForceSync},
{noreply,P#dp{wasync = A1}}
end
end.
print_sqls(Pos , , ) when tuple_size(Sql ) > = Pos - >
? , Recs=~p",[element(Pos , Sql),element(Pos , Recs ) ] ) ,
print_sqls(Pos+1,Sql , Recs ) ;
write_call2(#dp{db = queue, wasync = #ai{wait = Ref}} = P) ->
element(2,handle_info({Ref,ok},P));
write_call2(P) ->
P.
write_call1(_W,From,_CF,#dp{mors = slave} = P) ->
RR = P#dp.rasync,
?ADBG("Redirecting write ~p from=~p, w=~p",[P#dp.masternode,From,_W]),
[reply(F,{redirect,P#dp.masternode}) || F <- From],
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
P#dp{wasync = #ai{}, rasync = RR#ai{callfrom = undefined, wait = undefined}};
write_call1(#write{sql = Sql,transaction = undefined} = W,From,NewVers,P) ->
EvNum = P#dp.evnum+1,
VarHeader = actordb_sqlprocutil:create_var_header(P),
case P#dp.db of
queue ->
Res = make_ref(),
CF = [batch|lists:reverse(From)],
{ok,NS} = actordb_queue:cb_write_exec(P#dp.cbstate, lists:reverse(Sql), P#dp.current_term, EvNum, VarHeader);
_ ->
NS = P#dp.cbstate,
CF = [batch,undefined|lists:reverse([undefined|From])],
ComplSql = list_to_tuple([<<"#s00;">>|lists:reverse([<<"#s02;#s01;">>|Sql])]),
ADBW = [[[?EVNUMI,butil:tobin(EvNum)],[?EVTERMI,butil:tobin(P#dp.current_term)]]],
Records = list_to_tuple([[]|lists:reverse([ADBW|W#write.records])]),
?DBG("schema = ~p, SQL=~p, Recs=~p, cf=~p",[P#dp.schemavers,ComplSql, Records, CF]),
print_sqls(1,ComplSql , Records ) ,
Res = actordb_sqlite:exec_async(P#dp.db,ComplSql,Records,P#dp.current_term,EvNum,VarHeader)
end,
A = P#dp.wasync,
NWB = A#ai{wait = Res, info = W, newvers = NewVers,
callfrom = CF, evnum = EvNum, evterm = P#dp.current_term,
moved = A#ai.buffer_moved, fsync = A#ai.buffer_fsync,
buffer_moved = undefined, buffer_nv = undefined, buffer_fsync = false,
buffer = [], buffer_cf = [], buffer_recs = []},
write_call2(P#dp{wasync = NWB, last_write_at = actordb_local:elapsed_time(),
activity = actordb_local:actor_activity(P#dp.activity), cbstate = NS});
write_call1(#write{sql = Sql1, transaction = {Tid,Updaterid,Node} = TransactionId} = W,From,NewVers,P) ->
{_CheckPid,CheckRef} = actordb_sqlprocutil:start_transaction_checker(Tid,Updaterid,Node),
?DBG("Starting transaction write id ~p, curtr ~p, sql ~p",[TransactionId,P#dp.transactionid,Sql1]),
ForceSync = lists:member(fsync,W#write.flags),
case P#dp.followers of
[] ->
If single node cluster , no need to store sql first .
case P#dp.transactionid of
TransactionId ->
Transaction can write to single actor more than once ( especially for KV stores )
{_OldSql,EvNum,_} = P#dp.transactioninfo,
case Sql1 of
delete ->
ComplSql = <<"delete">>,
Res = {ok,{changes,0,1}};
_ ->
ComplSql = Sql1,
Res = actordb_sqlite:exec(P#dp.db,ComplSql,write)
end;
undefined ->
EvNum = P#dp.evnum+1,
case Sql1 of
delete ->
Res = {ok,{changes,0,1}},
ComplSql = <<"delete">>;
_ ->
ComplSql =
[<<"#s00;">>,
actordb_sqlprocutil:semicolon(Sql1),
<<"#s02;">>
],
AWR = [[?EVNUMI,butil:tobin(EvNum)],[?EVTERMI,butil:tobin(P#dp.current_term)]],
Records = W#write.records++[AWR],
VarHeader = actordb_sqlprocutil:create_var_header(P),
Res = actordb_sqlite:exec(P#dp.db,ComplSql,Records,P#dp.current_term,EvNum,VarHeader)
end
end,
case actordb_sqlite:okornot(Res) of
ok ->
?DBG("Transaction ok"),
{noreply, actordb_sqlprocutil:reply_maybe(P#dp{transactionid = TransactionId,
evterm = P#dp.current_term,
last_write_at = actordb_local:elapsed_time(),
transactioncheckref = CheckRef,force_sync = ForceSync,
transactioninfo = {ComplSql,EvNum,NewVers},
activity = actordb_local:actor_activity(P#dp.activity),
callfrom = From, callres = Res})};
_Err ->
actordb_sqlite:rollback(P#dp.db),
erlang:demonitor(CheckRef),
?DBG("Transaction not ok ~p",[_Err]),
{reply,Res,P#dp{transactionid = undefined,
last_write_at = actordb_local:elapsed_time(),
activity = actordb_local:actor_activity(P#dp.activity),
evterm = P#dp.current_term}}
end;
_ ->
EvNum = P#dp.evnum+1,
case P#dp.transactionid of
TransactionId when Sql1 /= delete ->
Rollback prev version of sql .
actordb_sqlite:rollback(P#dp.db),
{OldSql,_EvNum,_} = P#dp.transactioninfo,
Combine prev sql with new one .
Sql = iolist_to_binary([OldSql,Sql1]);
TransactionId ->
Sql = <<"delete">>;
_ ->
case Sql1 of
delete ->
Sql = <<"delete">>;
_ ->
Sql = iolist_to_binary(Sql1)
end
end,
ComplSql = <<"#s00;#s02;#s03;#s01;">>,
TransRecs = [[[butil:tobin(Tid),butil:tobin(Updaterid),Node,butil:tobin(NewVers),base64:encode(Sql)]]],
Records = [[[?EVNUMI,butil:tobin(EvNum)],[?EVTERMI,butil:tobin(P#dp.current_term)]]|TransRecs],
VarHeader = actordb_sqlprocutil:create_var_header(P),
ok = actordb_sqlite:okornot(actordb_sqlite:exec(
P#dp.db,ComplSql,Records,P#dp.current_term,EvNum,VarHeader)),
{noreply,ae_timer(P#dp{callfrom = From,callres = undefined, evterm = P#dp.current_term,evnum = EvNum,
last_write_at = actordb_local:elapsed_time(),
transactioninfo = {Sql,EvNum+1,NewVers},
transactioncheckref = CheckRef,force_sync = ForceSync,
transactionid = TransactionId})}
end.
ae_timer(P) ->
Now = actordb_local:elapsed_time(),
P#dp{election_timer = actordb_sqlprocutil:election_timer(Now,P#dp.election_timer),
callat = {Now,0},activity = actordb_local:actor_activity(P#dp.activity),
followers = [F#flw{wait_for_response_since = Now} || F <- P#dp.followers]}.
handle_cast({diepls,_Reason},P) ->
?DBG("Received diepls ~p",[_Reason]),
W = P#dp.wasync,
R = P#dp.rasync,
Inactive = queue:is_empty(P#dp.callqueue) andalso W#ai.buffer == [] andalso R#ai.buffer == [] andalso
P#dp.dbcopy_to == [] andalso P#dp.locked == [] andalso P#dp.copyfrom == undefined andalso
W#ai.wait == undefined andalso R#ai.wait == undefined andalso P#dp.transactioninfo == undefined,
CanDie = apply(P#dp.cbmod,cb_candie,[P#dp.mors,P#dp.actorname,P#dp.actortype,P#dp.cbstate]),
?DBG("verified ~p, empty ~p, candie ~p, state=~p",[P#dp.verified,Inactive,CanDie,?R2P(P)]),
case ok of
_ when P#dp.verified, Inactive, CanDie /= never ->
{stop,normal,P};
_ ->
{noreply,P}
end;
handle_cast(print_info,P) ->
?AINF("locks=~p wwait=~p",[P#dp.locked,(P#dp.wasync)#ai.wait]),
?AINF("~p~n",[?R2P(P)]),
{noreply,P};
handle_cast(Msg,#dp{mors = master, verified = true} = P) ->
case apply(P#dp.cbmod,cb_cast,[Msg,P#dp.cbstate]) of
{noreply,S} ->
{noreply,P#dp{cbstate = S}};
noreply ->
{noreply,P}
end;
handle_cast(_Msg,P) ->
?INF("sqlproc ~p unhandled cast ~p~n",[P#dp.cbmod,_Msg]),
{noreply,P}.
handle_info({election_timeout,Ref},P) ->
election_timeout(Ref,P);
handle_info({timeout,Resend},P) ->
case erlang:process_info(self(),message_queue_len) of
{message_queue_len,N} when N > 1, Resend < 10 ->
{noreply,timeout(P#dp{flags = P#dp.flags band (bnot ?TIMEOUT_PENDING)}, Resend)};
_ ->
{noreply,actordb_sqlprocutil:doqueue(P#dp{flags = P#dp.flags band (bnot ?TIMEOUT_PENDING)})}
end;
handle_info({Ref,Res}, #dp{rasync = #ai{wait = Ref} = BD} = P) when is_reference(Ref) ->
NewBD = BD#ai{callfrom = undefined, info = undefined, wait = undefined},
case Res of
{ok,ResTuples} ->
?DBG("Read resp=~p",[Res]),
{noreply,actordb_sqlprocutil:read_reply(P#dp{rasync = NewBD}, BD#ai.callfrom, 1, ResTuples)}
TODO : if async reads ever get used ...
{ noreply , P#dp{rasync = NewBD } }
end;
handle_info({Ref,Res1}, #dp{wasync = #ai{wait = Ref} = BD} = P) when is_reference(Ref) ->
?DBG("Write result ~p",[Res1]),
Res = actordb_sqlite:exec_res(Res1),
From = BD#ai.callfrom,
EvNum = BD#ai.evnum,
EvTerm = BD#ai.evterm,
? , Callfrom=~p",[Res , From ] ) ,
case BD#ai.newvers of
undefined ->
NewVers = P#dp.schemavers;
NewVers ->
ok
end,
Moved = BD#ai.moved,
W = BD#ai.info,
ForceSync = BD#ai.fsync,
NewAsync = BD#ai{callfrom = undefined, evnum = undefined, evterm = undefined,
newvers = undefined, info = undefined, wait = undefined, fsync = false},
case actordb_sqlite:okornot(Res) of
ok when P#dp.followers == [] ->
{noreply,actordb_sqlprocutil:statequeue(actordb_sqlprocutil:reply_maybe(
P#dp{callfrom = From, callres = Res,evnum = EvNum,
netchanges = actordb_local:net_changes(), force_sync = ForceSync,
schemavers = NewVers,evterm = EvTerm,movedtonode = Moved,
wasync = NewAsync}))};
ok ->
case P#dp.callres of
undefined ->
Callres = Res;
Callres ->
ok
end,
{noreply, actordb_sqlprocutil:statequeue(ae_timer(P#dp{callfrom = From, callres = Callres,
netchanges = actordb_local:net_changes(),force_sync = ForceSync,
evterm = EvTerm, evnum = EvNum,schemavers = NewVers,movedtonode = Moved,
wasync = NewAsync}))};
{sql_error,{ErrPos,_,_ErrAtom,ErrStr},_} ->
actordb_sqlite : ) ,
[batch,undefined|CF1] = From,
Remove cf for last part ( # s02 , # s01 )
CF = lists:reverse(tl(lists:reverse(CF1))),
?DBG("Error pos ~p, cf=~p",[ErrPos-1,CF]),
{Before,[Problem|After]} = lists:split(ErrPos-1,CF),
reply(Problem, {sql_error,ErrStr}),
{BeforeSql,[_ProblemSql|AfterSql]} = lists:split(ErrPos-1,lists:reverse(W#write.sql)),
{BeforeRecs,[_ProblemRecs|AfterRecs]} = lists:split(ErrPos-1,lists:reverse(W#write.records)),
case BD#ai.newvers of
undefined ->
RemainCF = lists:reverse(Before++After),
RemainSql = lists:reverse(BeforeSql++AfterSql),
RemainRecs = lists:reverse(BeforeRecs++AfterRecs);
_ ->
RemainCF = lists:reverse(tl(Before++After)),
RemainSql = lists:reverse(tl(BeforeSql++AfterSql)),
RemainRecs = lists:reverse(tl(BeforeRecs++AfterRecs))
end,
? DBG("Remain ] ) ,
NewAsync1 = NewAsync#ai{buffer = RemainSql++NewAsync#ai.buffer,
buffer_cf = RemainCF++NewAsync#ai.buffer_cf,
buffer_recs = RemainRecs++NewAsync#ai.buffer_recs},
?DBG("New write ~p",[NewAsync1]),
handle_info(doqueue,actordb_sqlprocutil:statequeue(P#dp{wasync = NewAsync1}))
end;
handle_info({{Ref,MonRef,_Nd}, Msg}, P) when element(3,P#dp.election_timer) == Ref ->
erlang:demonitor(MonRef,[flush]),
?DBG("received vote result from ~p, res=~p",[_Nd,element(1,Msg)]),
election_vote(Msg,P);
handle_info(doqueue, P) ->
{noreply,actordb_sqlprocutil:doqueue(P)};
handle_info(statequeue,P) ->
{noreply,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:statequeue(P))};
handle_info({hibernate,A},P) ->
?DBG("hibernating"),
{noreply,P#dp{activity = A},hibernate};
handle_info(copy_timer,P) ->
case P#dp.dbcopy_to of
[_|_] ->
erlang:send_after(1000,self(),copy_timer);
_ ->
ok
end,
{noreply,P#dp{activity = actordb_local:actor_activity(P#dp.activity)}};
handle_info({'DOWN',Monitor,_,PID,Reason},P) ->
down_info(PID,Monitor,Reason,P);
handle_info(doelection,P) ->
self() ! doelection1,
{noreply,P};
First check if latencies changed .
handle_info({doelection,_LatencyBefore,_TimerFrom} = Msg,P) ->
election_timer(Msg,P);
handle_info(doelection1,P) ->
election_timer(doelection1,P);
handle_info(doelection2,P) ->
election_timer(doelection2,P);
handle_info({forget,Nd},P) ->
?INF("Forgetting node ~p",[Nd]),
{noreply,P#dp{followers = lists:keydelete(Nd,#flw.node,P#dp.followers)}};
handle_info(retry_copy,P) ->
?DBG("Retry copy mors=~p, ver=~p, cl=~p",[P#dp.mors,P#dp.verified,P#dp.copylater]),
case P#dp.mors == master andalso P#dp.verified == true of
true ->
{noreply,actordb_sqlprocutil:retry_copy(P)};
_ ->
{noreply, P}
end;
handle_info({batch,L},P) ->
?DBG("Batch=~p",[L]),
{noreply, lists:foldl(fun({{Pid,Ref},W},NP) -> {noreply, NP1} = handle_call(W, {Pid,Ref}, NP), NP1 end, P, L)};
handle_info(check_locks,P) ->
case P#dp.locked of
[] ->
{noreply,P};
_ ->
erlang:send_after(1000,self(),check_locks),
{noreply, actordb_sqlprocutil:check_locks(P,P#dp.locked,[])}
end;
handle_info(stop,P) ->
?DBG("Received stop msg"),
handle_info({stop,normal},P);
handle_info({stop,Reason},P) ->
?DBG("Actor stop with reason ~p",[Reason]),
{stop, normal, P};
handle_info(print_info,P) ->
handle_cast(print_info,P);
handle_info(commit_transaction,P) ->
down_info(0,12345,done,P#dp{transactioncheckref = 12345});
handle_info(start_copy,P) ->
?DBG("Start copy ~p",[P#dp.copyfrom]),
case P#dp.copyfrom of
{move,NewShard,Node} ->
OldActor = P#dp.actorname,
Msg = {move,NewShard,actordb_conf:node_name(),P#dp.copyreset,P#dp.cbstate};
{split,MFA,Node,OldActor,NewActor} ->
Msg = {split,MFA,actordb_conf:node_name(),OldActor,NewActor,P#dp.copyreset,P#dp.cbstate};
{Node,OldActor} ->
Msg = {copy,{actordb_conf:node_name(),OldActor,P#dp.actorname}}
end,
Home = self(),
spawn(fun() ->
Rpc = {?MODULE,call,[{OldActor,P#dp.actortype},[],Msg,P#dp.cbmod,onlylocal]},
case actordb:rpc(Node,OldActor,Rpc) of
ok ->
?DBG("Ok response for startcopy msg"),
ok;
{ok,_} ->
?DBG("Ok response for startcopy msg"),
ok;
{redirect,_} ->
?DBG("Received redirect, presume job is done"),
Home ! start_copy_done;
Err ->
?ERR("Unable to start copy from ~p, ~p",[P#dp.copyfrom,Err]),
Home ! {stop,Err}
end
end),
{noreply,P};
handle_info(start_copy_done,P) ->
{ok,NP} = init(P,copy_done),
{noreply,NP};
handle_info(Msg,#dp{verified = true} = P) ->
case apply(P#dp.cbmod,cb_info,[Msg,P#dp.cbstate]) of
{noreply,S} ->
{noreply,P#dp{cbstate = S}};
noreply ->
{noreply,P}
end;
handle_info(_Msg,P) ->
?DBG("sqlproc ~p unhandled info ~p~n",[P#dp.cbmod,_Msg]),
{noreply,P}.
election_timer({doelection,LatencyBefore,_TimerFrom},P) ->
LatencyNow = actordb_latency:latency(),
Now = actordb_local:elapsed_time(),
Interval = actordb_sqlprocutil:election_timer_interval(),
case (LatencyNow > (LatencyBefore*1.5) andalso LatencyNow > 100) orelse (Now - P#dp.last_vote_event < Interval) of
true ->
{noreply,P#dp{election_timer = actordb_sqlprocutil:election_timer(undefined),
last_vote_event = 0,
activity = actordb_local:actor_activity(P#dp.activity)}};
false ->
Clear out msg queue first .
self() ! doelection1,
{noreply,P#dp{activity = actordb_local:actor_activity(P#dp.activity)}}
end;
election_timer(doelection1,P) ->
case P#dp.callfrom of
undefined ->
election_timer(doelection2,P);
_ ->
LatencyNow = actordb_latency:latency(),
Now = actordb_local:elapsed_time(),
{CallTime,Noops} = P#dp.callat,
More than a second after write is finished ( and sent to followers )
case Now - CallTime > 1000+LatencyNow of
true when Noops == 0 ->
?ERR("Write is taking long to reach consensus ~p",[P#dp.callfrom]),
NewFollowers1 = [actordb_sqlprocutil:send_empty_ae(P,NF) || NF <- P#dp.followers],
{noreply,P#dp{callat = {CallTime,1}, election_timer = actordb_sqlprocutil:election_timer(undefined),
followers = NewFollowers1}};
true when Noops == 1 ->
?ERR("Still have not reached consensus"),
{noreply,P#dp{callat = {CallTime,2}, election_timer = actordb_sqlprocutil:election_timer(undefined)}};
true when Noops == 2 ->
?ERR("Write abandon with consensus_timeout ~p",[P#dp.callfrom]),
reply(P#dp.callfrom,{error,consensus_timeout}),
RR = P#dp.rasync,
case RR#ai.callfrom of
[_|_] ->
[reply(F,{redirect,P#dp.masternode}) || F <- RR#ai.callfrom];
_ ->
ok
end,
election_timer(doelection2,P#dp{callfrom = undefined, callres = undefined,
masternode = undefined,masternodedist = undefined,
rasync = RR#ai{callfrom = undefined, wait = undefined},
verified = false, mors = slave, without_master_since = CallTime});
false ->
{noreply,P#dp{election_timer = actordb_sqlprocutil:election_timer(undefined),
activity = actordb_local:actor_activity(P#dp.activity)}}
end
end;
election_timer(doelection2,P) ->
A = P#dp.wasync,
Empty = queue:is_empty(P#dp.callqueue) andalso A#ai.buffer_cf == [],
?DBG("Election timeout, master=~p, verified=~p, followers=~p",
[P#dp.masternode,P#dp.verified,P#dp.followers]),
Now = actordb_local:elapsed_time(),
Me = actordb_conf:node_name(),
LatencyNow = actordb_latency:latency(),
case ok of
_ when P#dp.verified, P#dp.mors == master, P#dp.dbcopy_to /= [] ->
{noreply,P#dp{election_timer = actordb_sqlprocutil:election_timer(Now,undefined)}};
_ when P#dp.verified, P#dp.mors == master ->
actordb_sqlprocutil:follower_check_handle(P);
_ when element(1,P#dp.election_timer) == election ->
{noreply,P};
_ when P#dp.masternode /= undefined, P#dp.masternode /= Me ->
We are follower and masternode is set . This means leader sent us at least one AE .
case bkdcore_rpc:is_connected(P#dp.masternode) andalso Now - P#dp.masternode_since < 2000 of
true ->
?DBG("Election timeout, do nothing, leader=~p",[P#dp.masternode]),
{noreply,P#dp{without_master_since = undefined}};
false ->
?DBG("Leader is gone, leader=~p, election=~p, empty=~p, me=~p",
[P#dp.masternode,P#dp.election_timer,Empty,actordb_conf:node_name()]),
NP = P#dp{election_timer = undefined,without_master_since = Now,
masternode = undefined, masternodedist = undefined},
{noreply,actordb_sqlprocutil:start_verify(NP,false)}
end;
_ when P#dp.without_master_since == undefined ->
?DBG("Leader timeout, leader=~p, election=~p, empty=~p, me=~p",
[P#dp.masternode,P#dp.election_timer,Empty,actordb_conf:node_name()]),
NP = P#dp{election_timer = undefined,without_master_since = Now},
{noreply,actordb_sqlprocutil:start_verify(NP,false)};
_ when P#dp.election_timer == undefined ->
{noreply,actordb_sqlprocutil:start_verify(P,false)};
_ when Now - P#dp.without_master_since >= 3000+LatencyNow, Empty == false ->
?ERR("Unable to establish leader, responding with error"),
actordb_sqlprocutil:empty_queue(P#dp.wasync,P#dp.rasync, P#dp.callqueue,{error,consensus_impossible_atm}),
A1 = A#ai{buffer = [], buffer_recs = [], buffer_cf = [],
buffer_nv = undefined, buffer_moved = undefined},
R1 = (P#dp.rasync)#ai{callfrom = undefined, wait = undefined},
actordb_catchup:report(P#dp.actorname,P#dp.actortype),
{noreply,P#dp{callqueue = queue:new(),election_timer = undefined,
wasync = A1,rasync = R1}};
_ when Now - P#dp.without_master_since >= 3000+LatencyNow ->
actordb_catchup:report(P#dp.actorname,P#dp.actortype),
{noreply,P#dp{election_timer = undefined}};
_ ->
?DBG("Election timeout"),
{noreply,actordb_sqlprocutil:start_verify(P#dp{election_timer = undefined},false)}
end.
election_vote({What,Node,_HisLatestTerm,{Num,Term}}, P) ->
ClusterSize = length(P#dp.followers) + 1,
F = lists:keyfind(Node,#flw.node,P#dp.followers),
NF = F#flw{match_index = Num, next_index = Num+1, match_term = Term, election_result = What},
NFL = lists:keystore(Node,#flw.node, P#dp.followers,NF),
case count_votes(P,NFL, true,1,0) of
{AllSynced, NVotes, Missing} when NVotes*2 > ClusterSize ->
?DBG("Election successfull, nvotes=~p missing=~p",[NVotes, Missing]),
NP = P#dp{followers = NFL, election_timer = undefined,
last_vote_event = actordb_local:elapsed_time()},
elected_leader(cleanup_results(NP), AllSynced);
_ ->
{noreply, P#dp{followers = NFL, last_vote_event = actordb_local:elapsed_time()}}
end;
election_vote(Err,P) ->
?ERR("election_vote response error=~p",[Err]),
{noreply, P}.
count_votes(P,[F|T], AllSynced, NVotes,Missing) ->
case F#flw.election_result of
true when AllSynced, P#dp.evnum == F#flw.match_index, P#dp.evterm == F#flw.match_term ->
count_votes(P, T, true, NVotes+1,Missing);
true ->
count_votes(P, T, false, NVotes+1,Missing);
undefined ->
count_votes(P, T, AllSynced, NVotes,Missing+1);
_ ->
count_votes(P, T, false, NVotes,Missing)
end;
count_votes(_,[],AllSynced, NVotes, Missing) ->
{AllSynced, NVotes, Missing}.
cleanup_results(P) ->
P#dp{followers = [F#flw{election_result = undefined, election_rpc_ref = undefined} || F <- P#dp.followers]}.
election_timeout(Ref,#dp{election_timer = {election,_TimerRef,Ref}} = P) ->
ClusterSize = length(P#dp.followers) + 1,
case count_votes(P, P#dp.followers, true, 1, 0) of
{AllSynced, NVotes, Missing} when NVotes*2 > ClusterSize, P#dp.flags band ?FLAG_WAIT_ELECTION == 0 ->
?DBG("election_timeout with majority, missing=~p",[Missing]),
elected_leader(cleanup_results(P),AllSynced andalso Missing == 0);
{_AllSynced, NVotes, _Missing} when NVotes*2 > ClusterSize ->
erlang:send_after(100,self(),{election_timeout, Ref}),
?DBG("wait_election"),
{noreply, P#dp{election_timer = {election, undefined, Ref}}};
_ ->
?DBG("election_timeout continue as follower, without_master_since=~p",
[P#dp.without_master_since]),
case ok of
_ when P#dp.without_master_since == undefined ->
Now = actordb_local:elapsed_time(),
{noreply,actordb_sqlprocutil:reopen_db(cleanup_results(P#dp{
election_timer = actordb_sqlprocutil:election_timer(Now,undefined),
masternode = undefined, masternodedist = undefined, mors = slave,
without_master_since = Now}))};
_ ->
Now = actordb_local:elapsed_time(),
{noreply,cleanup_results(P#dp{election_timer = actordb_sqlprocutil:election_timer(Now,undefined),
masternode = undefined, masternodedist = undefined, mors = slave})}
end
end;
election_timeout(_,P) ->
?DBG("Election timeout when no longer relevant, election_timer=~p",[P#dp.election_timer]),
{noreply, P}.
elected_leader(P, _AllSynced) when (P#dp.flags band ?FLAG_CREATE) == 0 andalso
(P#dp.schemavers == undefined orelse P#dp.movedtonode == deleted) ->
Nodes = actordb_sqlprocutil:follower_nodes(P#dp.followers),
spawn(fun() -> bkdcore_rpc:multicall(Nodes,{actordb_sqlproc,call_slave,
[P#dp.cbmod,P#dp.actorname,P#dp.actortype,stop]}) end),
Me = self(),
spawn(fun() -> timer:sleep(10), stop(Me) end),
RR = (P#dp.rasync)#ai{callfrom = undefined, wait = undefined},
actordb_sqlprocutil:empty_queue(P#dp.wasync,P#dp.rasync, P#dp.callqueue,{error,nocreate}),
A1 = (P#dp.wasync)#ai{buffer = [], buffer_recs = [], buffer_cf = [],
buffer_nv = undefined, buffer_moved = undefined},
{noreply,P#dp{movedtonode = deleted, verified = true, callqueue = queue:new(),
netchanges = actordb_local:net_changes(),
wasync = A1, rasync = RR}};
elected_leader(P1, AllSynced) ->
actordb_local:actor_mors(master,actordb_conf:node_name()),
ReplType = apply(P1#dp.cbmod,cb_replicate_type,[P1#dp.cbstate]),
P = actordb_sqlprocutil:reopen_db(P1#dp{mors = master, election_timer = undefined,
masternode = actordb_conf:node_name(),
masternode_since = actordb_local:elapsed_time(),
without_master_since = undefined,
masternodedist = bkdcore:dist_name(actordb_conf:node_name()),
flags = P1#dp.flags band (bnot ?FLAG_WAIT_ELECTION),
cbstate = actordb_sqlite:replicate_opts(P1,term_to_binary({P1#dp.cbmod,P1#dp.actorname,P1#dp.actortype}),ReplType),
locked = lists:delete(ae,P1#dp.locked)}),
case P#dp.movedtonode of
deleted ->
actordb_sqlprocutil:actually_delete(P1),
Moved = undefined,
SchemaVers = undefined;
_ ->
Moved = P#dp.movedtonode,
SchemaVers = P#dp.schemavers
end,
?DBG("Elected leader term=~p, nodes_synced=~p, moved=~p",[P1#dp.current_term,AllSynced,P#dp.movedtonode]),
case P#dp.schemavers of
undefined ->
Transaction = [],
Rows = [];
_ ->
case actordb_sqlite:exec(P#dp.db,
<<"SELECT * FROM __adb;",
"SELECT * FROM __transactions;">>,read) of
{ok,[[{columns,_},{rows,Transaction}],[{columns,_},{rows,Rows}]]} ->
ok;
Err ->
?ERR("Unable read from db for, error=~p after election.",[Err]),
Transaction = Rows = [],
exit(error)
end
end,
case butil:ds_val(?COPYFROMI,Rows) of
CopyFrom1 when byte_size(CopyFrom1) > 0 ->
{CopyFrom,CopyReset,CbState} = binary_to_term(base64:decode(CopyFrom1));
_ ->
CopyFrom = CopyReset = undefined,
CbState = P#dp.cbstate
end,
- It can also happen that both transaction active and actor move is active . Sqls will be combined .
NP1 = P#dp{verified = true,copyreset = CopyReset,movedtonode = Moved,
cbstate = CbState, schemavers = SchemaVers, cbinit = CbInit,
netchanges = actordb_local:net_changes()},
{NP,Sql,AdbRecords,Callfrom} =
actordb_sqlprocutil:post_election_sql(NP1,Transaction,CopyFrom,[],P#dp.callfrom),
case is_number(P#dp.schemavers) andalso is_atom(Sql) == false andalso iolist_size(Sql) == 0 of
true when AllSynced, P#dp.followers == [] ->
?DBG("Nodes synced, no followers"),
W = NP#dp.wasync,
{noreply,actordb_sqlprocutil:doqueue(actordb_sqlprocutil:do_cb(
NP#dp{followers = [],
wasync = W#ai{nreplies = W#ai.nreplies+1}}))};
true when AllSynced ->
?DBG("Nodes synced, running empty AE."),
NewFollowers1 = [actordb_sqlprocutil:send_empty_ae(P,NF) || NF <- P#dp.followers],
W = NP#dp.wasync,
{noreply,actordb_sqlprocutil:doqueue(ae_timer(NP#dp{callres = ok,followers = NewFollowers1,
wasync = W#ai{nreplies = W#ai.nreplies+1}}))};
_ ->
?DBG("Running post election write on nodes ~p, evterm=~p, curterm=~p, vers ~p, trans=~p",
[P#dp.followers,P#dp.evterm,P#dp.current_term,NP#dp.schemavers, NP#dp.transactionid]),
W = #write{sql = Sql, transaction = NP#dp.transactionid,records = AdbRecords},
Now = actordb_local:elapsed_time(),
Followers = [F#flw{last_seen = Now} || F <- P#dp.followers],
write_call1(#write{sql = , transaction = undefined } = W , From , NewVers , P ) -
case NP#dp.transactionid of
undefined ->
write_call(W,Callfrom, NP#dp{followers = Followers});
_ ->
write_call1(W,Callfrom, NP#dp.schemavers, NP#dp{followers = Followers})
end
end.
down_info(_PID,Ref,Reason,#dp{transactioncheckref = Ref} = P) ->
?DBG("Transactioncheck died ~p myid ~p, pid=~p",[Reason,P#dp.transactionid,_PID]),
case P#dp.transactionid of
{Tid,Updaterid,Node} ->
case Reason of
noproc ->
{_CheckPid,CheckRef} = actordb_sqlprocutil:start_transaction_checker(Tid,Updaterid,Node),
{noreply,P#dp{transactioncheckref = CheckRef}};
abandoned ->
case handle_call({commit,false,P#dp.transactionid},
undefined,P#dp{transactioncheckref = undefined}) of
{stop,normal,NP} ->
{stop,normal,NP};
{reply,_,NP} ->
{noreply,NP};
{noreply,_} = R ->
R
end;
done ->
case handle_call({commit,true,P#dp.transactionid},
undefined,P#dp{transactioncheckref = undefined}) of
{stop,normal,NP} ->
{stop,normal,NP};
{reply,_,NP} ->
{noreply,NP};
{noreply,_} = R ->
R
end
end;
_ ->
{noreply,P#dp{transactioncheckref = undefined}}
end;
down_info(PID,_Ref,Reason,#dp{copyproc = PID} = P) ->
?DBG("copyproc died ~p my_status=~p copyfrom=~p",[Reason,P#dp.mors,P#dp.copyfrom]),
case Reason of
unlock ->
case catch actordb_sqlprocutil:callback_unlock(P) of
ok when is_binary(P#dp.copyfrom) ->
{ok,NP} = init(P#dp{mors = slave},copyproc_done),
{noreply,NP};
ok ->
{ok,NP} = init(P#dp{mors = master},copyproc_done),
{noreply,NP};
Err ->
?DBG("Unable to unlock"),
{stop,Err,P}
end;
ok when P#dp.mors == slave ->
?DBG("Stopping because slave"),
{stop,normal,P};
nomajority ->
{stop,{error,nomajority},P};
_ ->
?ERR("Coproc died with error ~p~n",[Reason]),
{stop,{error,copyfailed},P}
end;
down_info(PID,_Ref,Reason,P) ->
case lists:keyfind(PID,#cpto.pid,P#dp.dbcopy_to) of
false ->
?DBG("downmsg, verify maybe? ~p ~p ~p",[P#dp.election_timer,PID,Reason]),
case apply(P#dp.cbmod,cb_info,[{'DOWN',_Ref,process,PID,Reason},P#dp.cbstate]) of
{noreply,S} ->
{noreply,P#dp{cbstate = S}};
noreply ->
{noreply,P}
end;
C ->
?DBG("Down copyto proc ~p ~p ~p ~p ~p",
[P#dp.actorname,Reason,C#cpto.ref,P#dp.locked,P#dp.dbcopy_to]),
case Reason of
ok ->
ok;
_ ->
?ERR("Copyto process invalid exit ~p",[Reason])
end,
WithoutCopy = lists:keydelete(PID,#lck.pid,P#dp.locked),
NewCopyto = lists:keydelete(PID,#cpto.pid,P#dp.dbcopy_to),
false = lists:keyfind(C#cpto.ref,2,WithoutCopy),
wait_copy not in list add it ( 2nd stage of lock )
WithoutCopy1 = [#lck{ref = C#cpto.ref, ismove = C#cpto.ismove,
node = C#cpto.node,time = actordb_local:elapsed_time(),
actorname = C#cpto.actorname}|WithoutCopy],
erlang:send_after(1000,self(),check_locks),
{noreply,actordb_sqlprocutil:doqueue(P#dp{dbcopy_to = NewCopyto,locked = WithoutCopy1})}
end.
terminate(Reason, P) ->
case is_record(P,dp) of
true ->
?DBG("Terminating ~p",[Reason]);
false ->
?ADBG("Terminating ~p, ~p",[Reason,P])
end,
actordb_sqlite:stop(P),
distreg:unreg(self()),
ok.
code_change(_, P, _) ->
{ok, P}.
init(#dp{} = P,_Why) ->
? because ~p , ~p , ~p",[_Why,?R2P(P),get ( ) ] ) ,
?DBG("Reinit because ~p",[_Why]),
actordb_sqlite:stop(P),
Flags = P#dp.flags band (bnot ?FLAG_WAIT_ELECTION) band (bnot ?FLAG_STARTLOCK),
case ok of
_ when is_reference(element(2,P#dp.election_timer)) ->
erlang:cancel_timer(element(2,P#dp.election_timer));
_ ->
ok
end,
init([{actor,P#dp.actorname},{type,P#dp.actortype},{mod,P#dp.cbmod},{flags,Flags},
{state,P#dp.cbstate},{slave,P#dp.mors == slave},{wasync,P#dp.wasync},{rasync,P#dp.rasync},
{queue,P#dp.callqueue},{startreason,{reinit,_Why}}]).
init([_|_] = Opts) ->
put(opt , ) ,
?ADBG("Start opts ~p",[Opts]),
rand:seed(exs64),
Now = actordb_local:elapsed_time(),
P1 = #dp{mors = master, callqueue = queue:new(),statequeue = queue:new(), without_master_since = Now},
case actordb_sqlprocutil:parse_opts(P1,Opts) of
{registered,Pid} ->
explain({registered,Pid},Opts),
{stop,normal};
P when ( P#dp.flags band ? ) > 0 - >
P when (P#dp.flags band ?FLAG_EXISTS) > 0 ->
case P#dp.movedtonode of
deleted ->
explain({ok,[{columns,{<<"exists">>}},{rows,[{<<"false">>}]}]},Opts);
_ ->
explain({ok,[{columns,{<<"exists">>}},{rows,[{butil : tobin(SchemaTables /= [ ] ) } ] } ] } , ) ,
LocalShard = actordb_shardmngr:find_local_shard(P#dp.actorname,P#dp.actortype),
Val =
case LocalShard of
{redirect,Shard,Node} ->
actordb:rpc(Node,Shard,{actordb_shard,is_reg,[Shard,P#dp.actorname,P#dp.actortype]});
undefined ->
{Shard,_,Node} = actordb_shardmngr:find_global_shard(P#dp.actorname),
actordb:rpc(Node,Shard,{actordb_shard,is_reg,[Shard,P#dp.actorname,P#dp.actortype]});
Shard ->
actordb_shard:is_reg(Shard,P#dp.actorname,P#dp.actortype)
end,
explain({ok,[{columns,{<<"exists">>}},{rows,[{butil:tobin(Val)}]}]},Opts),
{stop,normal}
end;
P when (P#dp.flags band ?FLAG_STARTLOCK) > 0 ->
case lists:keyfind(lockinfo,1,Opts) of
{lockinfo,dbcopy,{Ref,CbState,CpFrom,CpReset}} ->
?DBG("Starting actor slave lock for copy on ref ~p",[Ref]),
{ok,Db,_,_PageSize} = actordb_sqlite:init(P#dp.dbpath,wal),
{ok,Pid} = actordb_sqlprocutil:start_copyrec(
P#dp{db = Db, mors = slave, cbstate = CbState,
dbcopyref = Ref, copyfrom = CpFrom, copyreset = CpReset}),
{ok,P#dp{copyproc = Pid, verified = false,mors = slave, copyfrom = P#dp.copyfrom}};
{lockinfo,wait} ->
?DBG("Starting actor lock wait ~p",[P]),
{ok,P}
end;
P when P#dp.copyfrom == undefined ->
?DBG("Actor start, copy=~p, flags=~p, mors=~p",[P#dp.copyfrom,P#dp.flags,P#dp.mors]),
MovedToNode = apply(P#dp.cbmod,cb_checkmoved,[P#dp.actorname,P#dp.actortype]),
RightCluster = lists:member(MovedToNode,bkdcore:all_cluster_nodes()),
case actordb_sqlite:actor_info(P) of
{ _ , VotedFor , VotedCurrentTerm , VoteEvnum , VoteEvTerm } - >
{{_FCT,LastCheck},{VoteEvTerm,VoteEvnum},_InProg,_MxPage,_AllPages,VotedCurrentTerm,<<>>} ->
VotedFor = undefined;
{{_FCT,LastCheck},{VoteEvTerm,VoteEvnum},_InProg,_MxPage,_AllPages,VotedCurrentTerm,VotedFor} ->
ok;
_ ->
VotedFor = undefined,
LastCheck = VoteEvnum = VotedCurrentTerm = VoteEvTerm = 0
end,
case ok of
_ when P#dp.mors == slave ->
{ok,actordb_sqlprocutil:init_opendb(P#dp{current_term = VotedCurrentTerm,
voted_for = VotedFor, evnum = VoteEvnum,evterm = VoteEvTerm,
election_timer = actordb_sqlprocutil:election_timer(Now,undefined),
last_checkpoint = LastCheck})};
_ when MovedToNode == undefined; RightCluster ->
NP = P#dp{current_term = VotedCurrentTerm,voted_for = VotedFor, evnum = VoteEvnum,
evterm = VoteEvTerm, last_checkpoint = LastCheck},
{ok,actordb_sqlprocutil:start_verify(actordb_sqlprocutil:init_opendb(NP),true)};
_ ->
?DBG("Actor moved ~p ~p ~p",[P#dp.actorname,P#dp.actortype,MovedToNode]),
{ok, P#dp{verified = true, movedtonode = MovedToNode}}
end;
{stop,Explain} ->
explain(Explain,Opts),
{stop,normal};
P ->
self() ! start_copy,
{ok,P#dp{mors = master}}
end;
init(#dp{} = P) ->
init(P,noreason).
explain(What,Opts) ->
case lists:keyfind(start_from,1,Opts) of
{_,{FromPid,FromRef}} ->
FromPid ! {FromRef,What};
_ ->
ok
end.
reply(A,B) ->
actordb_sqlprocutil:reply(A,B).
reply([_| _ ] = From , Msg ) - >
[ gen_server : reply(F , Msg ) || F < - From ] ;
reply(From , Msg ) - >
|
7627e75deb1f42a16a9aeebeaaaba19f415802591a55b8e80def7a0532ea1862 | mmottl/lacaml | CZ.ml | File : CZ.ml
Copyright ( C ) 2001-
email :
WWW :
email :
WWW : /
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
Copyright (C) 2001-
Markus Mottl
email:
WWW:
Christophe Troestler
email:
WWW: /
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
(** Modules with functions specialized for simple (C) or double (Z)
precision complex numbers. *)
include Complexxx
include Complex_io
include Impl2_CPREC
include Impl4_CPREC
module Vec = struct
type t = vec
include Types.Vec (* Export [unop],... so they can be explicit in C and Z. *)
include Vec2_CPREC
include Vec4_CPREC
end
module Mat = struct
type t = mat
include Types.Mat
include Mat2_CPREC
include Mat4_CPREC
end
| null | https://raw.githubusercontent.com/mmottl/lacaml/2e01c0747e740e54ab9a23ea59b29ea0d929b50f/src/CZ.ml | ocaml | * Modules with functions specialized for simple (C) or double (Z)
precision complex numbers.
Export [unop],... so they can be explicit in C and Z. | File : CZ.ml
Copyright ( C ) 2001-
email :
WWW :
email :
WWW : /
This library is free software ; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation ; either
version 2.1 of the License , or ( at your option ) any later version .
This library is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
Lesser General Public License for more details .
You should have received a copy of the GNU Lesser General Public
License along with this library ; if not , write to the Free Software
Foundation , Inc. , 51 Franklin Street , Fifth Floor , Boston , MA 02110 - 1301 USA
Copyright (C) 2001-
Markus Mottl
email:
WWW:
Christophe Troestler
email:
WWW: /
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
*)
include Complexxx
include Complex_io
include Impl2_CPREC
include Impl4_CPREC
module Vec = struct
type t = vec
include Vec2_CPREC
include Vec4_CPREC
end
module Mat = struct
type t = mat
include Types.Mat
include Mat2_CPREC
include Mat4_CPREC
end
|
e5f771f1fff0dbee1afe39d2f3c6b0046d1b6b30a5bc5ac273d993869bc665c8 | marijnh/parse-js | package.lisp | (cl:defpackage #:parse-js
(:use #:cl)
(:export #:token-type #:token-value #:token-line #:token-char #:token-pos
#:token-newline-before #:token-comments-before
#:lex-js #:parse-js #:parse-js-string #:read-js-number
#:js-parse-error #:js-parse-error-line #:js-parse-error-char
#:*check-for-reserved-words* #:*ecma-version*))
| null | https://raw.githubusercontent.com/marijnh/parse-js/fbadc6029bec7039602abfc06c73bb52970998f6/src/package.lisp | lisp | (cl:defpackage #:parse-js
(:use #:cl)
(:export #:token-type #:token-value #:token-line #:token-char #:token-pos
#:token-newline-before #:token-comments-before
#:lex-js #:parse-js #:parse-js-string #:read-js-number
#:js-parse-error #:js-parse-error-line #:js-parse-error-char
#:*check-for-reserved-words* #:*ecma-version*))
| |
1f7d80dff9a67ce9d9a3320c16a5e546dc948ca563847952769f2e9d54b1fd76 | geneweb/geneweb | image.ml | open Config
open Gwdb
let prefix conf = Util.escape_html conf.image_prefix
* [ default_portrait_filename_of_key fn sn occ ] is the default filename of the corresponding person 's portrait . WITHOUT its file extenssion .
e.g : default_portrait_filename_of_key " " " DUPOND " 3 is " jean_claude.3.dupond "
e.g: default_portrait_filename_of_key "Jean Claude" "DUPOND" 3 is "jean_claude.3.dupond"
*)
let default_portrait_filename_of_key first_name surname occ =
let space_to_unders = Mutil.tr ' ' '_' in
let f = space_to_unders (Name.lower first_name) in
let s = space_to_unders (Name.lower surname) in
Format.sprintf "%s.%d.%s" f occ s
let default_portrait_filename base p =
default_portrait_filename_of_key (p_first_name base p) (p_surname base p)
(get_occ p)
(** [full_portrait_path conf base p] is [Some path] if [p] has a portrait.
[path] is a the full path of the file with file extension. *)
let full_portrait_path conf base p =
TODO why is extension not in filename .. ?
let s = default_portrait_filename base p in
let f = Filename.concat (Util.base_path [ "images" ] conf.bname) s in
if Sys.file_exists (f ^ ".jpg") then Some (`Path (f ^ ".jpg"))
else if Sys.file_exists (f ^ ".png") then Some (`Path (f ^ ".png"))
else if Sys.file_exists (f ^ ".gif") then Some (`Path (f ^ ".gif"))
else None
let source_filename bname src =
let fname1 =
List.fold_right Filename.concat
[ Util.base_path [ "src" ] bname; "images" ]
src
in
let fname2 =
List.fold_right Filename.concat [ Secure.base_dir (); "src"; "images" ] src
in
if Sys.file_exists fname1 then fname1 else fname2
let path_of_filename src =
let fname1 =
List.fold_right Filename.concat [ Secure.base_dir (); "images" ] src
in
if Sys.file_exists fname1 then `Path fname1
else `Path (Util.search_in_assets (Filename.concat "images" src))
let png_size ic =
let magic = really_input_string ic 4 in
if magic = "\137PNG" then (
seek_in ic 16;
let wid = input_binary_int ic in
let hei = input_binary_int ic in
Ok (wid, hei))
else Error ()
let gif_size ic =
let magic = really_input_string ic 4 in
if magic = "GIF8" then (
seek_in ic 6;
let wid =
let x = input_byte ic in
(input_byte ic * 256) + x
in
let hei =
let x = input_byte ic in
(input_byte ic * 256) + x
in
Ok (wid, hei))
else Error ()
let jpeg_size ic =
let magic = really_input_string ic 10 in
if
Char.code magic.[0] = 0xff
&& Char.code magic.[1] = 0xd8
&&
let m = String.sub magic 6 4 in
m = "JFIF" || m = "Exif"
then
let exif_type = String.sub magic 6 4 = "Exif" in
let rec loop found =
while Char.code (input_char ic) <> 0xFF do
()
done;
let ch =
let rec loop ch =
if Char.code ch = 0xFF then loop (input_char ic) else ch
in
loop (input_char ic)
in
if Char.code ch = 0xC0 || Char.code ch = 0xC3 then
if exif_type && not found then loop true
else (
for _i = 1 to 3 do
ignore @@ input_char ic
done;
let a = input_char ic in
let b = input_char ic in
let c = input_char ic in
let d = input_char ic in
let wid = (Char.code c lsl 8) lor Char.code d in
let hei = (Char.code a lsl 8) lor Char.code b in
Ok (wid, hei))
else
let a = input_char ic in
let b = input_char ic in
let len = (Char.code a lsl 8) lor Char.code b in
let len = if len >= 32768 then 0 else len in
for _i = 1 to len - 2 do
ignore @@ input_char ic
done;
if Char.code ch <> 0xDA then loop found else Error ()
in
loop false
else Error ()
let size_from_path fname =
(* TODO: size and mime type should be in db *)
let (`Path fname) = fname in
let res =
if fname = "" then Error ()
else
try
let ic = Secure.open_in_bin fname in
let r =
try
(* TODO: should match on mime type here *)
match String.lowercase_ascii @@ Filename.extension fname with
| ".jpeg" | ".jpg" -> jpeg_size ic
| ".png" -> png_size ic
| ".gif" -> gif_size ic
| _s -> Error ()
with End_of_file -> Error ()
in
close_in ic;
r
with Sys_error _e -> Error ()
in
res
let rename_portrait conf base p (nfn, nsn, noc) =
match full_portrait_path conf base p with
| Some (`Path old_f) -> (
let s = default_portrait_filename_of_key nfn nsn noc in
let f = Filename.concat (Util.base_path [ "images" ] conf.bname) s in
let new_f = f ^ Filename.extension old_f in
try Sys.rename old_f new_f
with Sys_error e ->
!GWPARAM.syslog `LOG_ERR
(Format.sprintf
"Error renaming portrait: old_path=%s new_path=%s : %s" old_f new_f
e))
| None -> ()
let src_to_string = function `Url s | `Path s -> s
let scale_to_fit ~max_w ~max_h ~w ~h =
let w, h =
if h > max_h then
let w = w * max_h / h in
let h = max_h in
(w, h)
else (w, h)
in
let w, h =
if w > max_w then
let h = h * max_w / w in
let w = max_w in
(w, h)
else (w, h)
in
(w, h)
(** [has_access_to_portrait conf base p] is true iif we can see [p]'s portrait. *)
let has_access_to_portrait conf base p =
let img = get_image p in
(not conf.no_image)
&& Util.authorized_age conf base p
&& ((not (is_empty_string img)) || full_portrait_path conf base p <> None)
&& (conf.wizard || conf.friend
|| not (Mutil.contains (sou base img) "/private/"))
(* TODO: privacy settings should be in db not in url *)
let get_portrait_path conf base p =
if has_access_to_portrait conf base p then full_portrait_path conf base p
else None
(* parse a string to an `Url or a `Path *)
let urlorpath_of_string conf s =
let http = "http://" in
let https = "https://" in
TODO OCaml 4.13 : use String.starts_with
if
String.length s > String.length http
&& String.sub s 0 (String.length http) = http
|| String.length s > String.length https
&& String.sub s 0 (String.length https) = https
then `Url s
else if Filename.is_implicit s then
match List.assoc_opt "images_path" conf.base_env with
| Some p when p <> "" -> `Path (Filename.concat p s)
| Some _ | None ->
let fname =
Filename.concat (Util.base_path [ "images" ] conf.bname) s
in
`Path fname
else `Path s
let src_of_string conf s =
if s = "" then `Empty
else
let l = String.length s - 1 in
if s.[l] = ')' then `Src_with_size_info s else urlorpath_of_string conf s
let parse_src_with_size_info conf s =
let (`Src_with_size_info s) = s in
let l = String.length s - 1 in
try
let pos1 = String.index s '(' in
let pos2 = String.index_from s pos1 'x' in
let w = String.sub s (pos1 + 1) (pos2 - pos1 - 1) |> int_of_string in
let h = String.sub s (pos2 + 1) (l - pos2 - 1) |> int_of_string in
let s = String.sub s 0 pos1 in
Ok (urlorpath_of_string conf s, (w, h))
with Not_found | Failure _ ->
!GWPARAM.syslog `LOG_ERR
(Format.sprintf "Error parsing portrait source with size info %s" s);
Error "Failed to parse url with size info"
let get_portrait_with_size conf base p =
if has_access_to_portrait conf base p then
match src_of_string conf (sou base (get_image p)) with
| `Src_with_size_info _s as s_info -> (
match parse_src_with_size_info conf s_info with
| Error _e -> None
| Ok (s, size) -> Some (s, Some size))
| `Url _s as url -> Some (url, None)
| `Path p as path ->
if Sys.file_exists p then
Some (path, size_from_path path |> Result.to_option)
else None
| `Empty -> (
match full_portrait_path conf base p with
| None -> None
| Some path -> Some (path, size_from_path path |> Result.to_option))
else None
let get_portrait conf base p =
if has_access_to_portrait conf base p then
match src_of_string conf (sou base (get_image p)) with
| `Src_with_size_info _s as s_info -> (
match parse_src_with_size_info conf s_info with
| Error _e -> None
| Ok (s, _size) -> Some s)
| `Url _s as url -> Some url
| `Path p as path -> if Sys.file_exists p then Some path else None
| `Empty -> (
match full_portrait_path conf base p with
| None -> None
| Some path -> Some path)
else None
| null | https://raw.githubusercontent.com/geneweb/geneweb/747f43da396a706bd1da60d34c04493a190edf0f/lib/image.ml | ocaml | * [full_portrait_path conf base p] is [Some path] if [p] has a portrait.
[path] is a the full path of the file with file extension.
TODO: size and mime type should be in db
TODO: should match on mime type here
* [has_access_to_portrait conf base p] is true iif we can see [p]'s portrait.
TODO: privacy settings should be in db not in url
parse a string to an `Url or a `Path | open Config
open Gwdb
let prefix conf = Util.escape_html conf.image_prefix
* [ default_portrait_filename_of_key fn sn occ ] is the default filename of the corresponding person 's portrait . WITHOUT its file extenssion .
e.g : default_portrait_filename_of_key " " " DUPOND " 3 is " jean_claude.3.dupond "
e.g: default_portrait_filename_of_key "Jean Claude" "DUPOND" 3 is "jean_claude.3.dupond"
*)
let default_portrait_filename_of_key first_name surname occ =
let space_to_unders = Mutil.tr ' ' '_' in
let f = space_to_unders (Name.lower first_name) in
let s = space_to_unders (Name.lower surname) in
Format.sprintf "%s.%d.%s" f occ s
let default_portrait_filename base p =
default_portrait_filename_of_key (p_first_name base p) (p_surname base p)
(get_occ p)
let full_portrait_path conf base p =
TODO why is extension not in filename .. ?
let s = default_portrait_filename base p in
let f = Filename.concat (Util.base_path [ "images" ] conf.bname) s in
if Sys.file_exists (f ^ ".jpg") then Some (`Path (f ^ ".jpg"))
else if Sys.file_exists (f ^ ".png") then Some (`Path (f ^ ".png"))
else if Sys.file_exists (f ^ ".gif") then Some (`Path (f ^ ".gif"))
else None
let source_filename bname src =
let fname1 =
List.fold_right Filename.concat
[ Util.base_path [ "src" ] bname; "images" ]
src
in
let fname2 =
List.fold_right Filename.concat [ Secure.base_dir (); "src"; "images" ] src
in
if Sys.file_exists fname1 then fname1 else fname2
let path_of_filename src =
let fname1 =
List.fold_right Filename.concat [ Secure.base_dir (); "images" ] src
in
if Sys.file_exists fname1 then `Path fname1
else `Path (Util.search_in_assets (Filename.concat "images" src))
let png_size ic =
let magic = really_input_string ic 4 in
if magic = "\137PNG" then (
seek_in ic 16;
let wid = input_binary_int ic in
let hei = input_binary_int ic in
Ok (wid, hei))
else Error ()
let gif_size ic =
let magic = really_input_string ic 4 in
if magic = "GIF8" then (
seek_in ic 6;
let wid =
let x = input_byte ic in
(input_byte ic * 256) + x
in
let hei =
let x = input_byte ic in
(input_byte ic * 256) + x
in
Ok (wid, hei))
else Error ()
let jpeg_size ic =
let magic = really_input_string ic 10 in
if
Char.code magic.[0] = 0xff
&& Char.code magic.[1] = 0xd8
&&
let m = String.sub magic 6 4 in
m = "JFIF" || m = "Exif"
then
let exif_type = String.sub magic 6 4 = "Exif" in
let rec loop found =
while Char.code (input_char ic) <> 0xFF do
()
done;
let ch =
let rec loop ch =
if Char.code ch = 0xFF then loop (input_char ic) else ch
in
loop (input_char ic)
in
if Char.code ch = 0xC0 || Char.code ch = 0xC3 then
if exif_type && not found then loop true
else (
for _i = 1 to 3 do
ignore @@ input_char ic
done;
let a = input_char ic in
let b = input_char ic in
let c = input_char ic in
let d = input_char ic in
let wid = (Char.code c lsl 8) lor Char.code d in
let hei = (Char.code a lsl 8) lor Char.code b in
Ok (wid, hei))
else
let a = input_char ic in
let b = input_char ic in
let len = (Char.code a lsl 8) lor Char.code b in
let len = if len >= 32768 then 0 else len in
for _i = 1 to len - 2 do
ignore @@ input_char ic
done;
if Char.code ch <> 0xDA then loop found else Error ()
in
loop false
else Error ()
let size_from_path fname =
let (`Path fname) = fname in
let res =
if fname = "" then Error ()
else
try
let ic = Secure.open_in_bin fname in
let r =
try
match String.lowercase_ascii @@ Filename.extension fname with
| ".jpeg" | ".jpg" -> jpeg_size ic
| ".png" -> png_size ic
| ".gif" -> gif_size ic
| _s -> Error ()
with End_of_file -> Error ()
in
close_in ic;
r
with Sys_error _e -> Error ()
in
res
let rename_portrait conf base p (nfn, nsn, noc) =
match full_portrait_path conf base p with
| Some (`Path old_f) -> (
let s = default_portrait_filename_of_key nfn nsn noc in
let f = Filename.concat (Util.base_path [ "images" ] conf.bname) s in
let new_f = f ^ Filename.extension old_f in
try Sys.rename old_f new_f
with Sys_error e ->
!GWPARAM.syslog `LOG_ERR
(Format.sprintf
"Error renaming portrait: old_path=%s new_path=%s : %s" old_f new_f
e))
| None -> ()
let src_to_string = function `Url s | `Path s -> s
let scale_to_fit ~max_w ~max_h ~w ~h =
let w, h =
if h > max_h then
let w = w * max_h / h in
let h = max_h in
(w, h)
else (w, h)
in
let w, h =
if w > max_w then
let h = h * max_w / w in
let w = max_w in
(w, h)
else (w, h)
in
(w, h)
let has_access_to_portrait conf base p =
let img = get_image p in
(not conf.no_image)
&& Util.authorized_age conf base p
&& ((not (is_empty_string img)) || full_portrait_path conf base p <> None)
&& (conf.wizard || conf.friend
|| not (Mutil.contains (sou base img) "/private/"))
let get_portrait_path conf base p =
if has_access_to_portrait conf base p then full_portrait_path conf base p
else None
let urlorpath_of_string conf s =
let http = "http://" in
let https = "https://" in
TODO OCaml 4.13 : use String.starts_with
if
String.length s > String.length http
&& String.sub s 0 (String.length http) = http
|| String.length s > String.length https
&& String.sub s 0 (String.length https) = https
then `Url s
else if Filename.is_implicit s then
match List.assoc_opt "images_path" conf.base_env with
| Some p when p <> "" -> `Path (Filename.concat p s)
| Some _ | None ->
let fname =
Filename.concat (Util.base_path [ "images" ] conf.bname) s
in
`Path fname
else `Path s
let src_of_string conf s =
if s = "" then `Empty
else
let l = String.length s - 1 in
if s.[l] = ')' then `Src_with_size_info s else urlorpath_of_string conf s
let parse_src_with_size_info conf s =
let (`Src_with_size_info s) = s in
let l = String.length s - 1 in
try
let pos1 = String.index s '(' in
let pos2 = String.index_from s pos1 'x' in
let w = String.sub s (pos1 + 1) (pos2 - pos1 - 1) |> int_of_string in
let h = String.sub s (pos2 + 1) (l - pos2 - 1) |> int_of_string in
let s = String.sub s 0 pos1 in
Ok (urlorpath_of_string conf s, (w, h))
with Not_found | Failure _ ->
!GWPARAM.syslog `LOG_ERR
(Format.sprintf "Error parsing portrait source with size info %s" s);
Error "Failed to parse url with size info"
let get_portrait_with_size conf base p =
if has_access_to_portrait conf base p then
match src_of_string conf (sou base (get_image p)) with
| `Src_with_size_info _s as s_info -> (
match parse_src_with_size_info conf s_info with
| Error _e -> None
| Ok (s, size) -> Some (s, Some size))
| `Url _s as url -> Some (url, None)
| `Path p as path ->
if Sys.file_exists p then
Some (path, size_from_path path |> Result.to_option)
else None
| `Empty -> (
match full_portrait_path conf base p with
| None -> None
| Some path -> Some (path, size_from_path path |> Result.to_option))
else None
let get_portrait conf base p =
if has_access_to_portrait conf base p then
match src_of_string conf (sou base (get_image p)) with
| `Src_with_size_info _s as s_info -> (
match parse_src_with_size_info conf s_info with
| Error _e -> None
| Ok (s, _size) -> Some s)
| `Url _s as url -> Some url
| `Path p as path -> if Sys.file_exists p then Some path else None
| `Empty -> (
match full_portrait_path conf base p with
| None -> None
| Some path -> Some path)
else None
|
b3c8be680baff50388f5185c0655c9ba335db0b61d1b19c3e9b4067dbc5e6c0a | lojic/LearningRacket | rna-transcription-test.rkt | #lang racket
(require "rna-transcription.rkt")
(module+ test
(require rackunit rackunit/text-ui)
(define suite
(test-suite
"Tests for the rna-transcription exercise"
(test-case "transcribes guanine to cytosine"
(check-equal? (to-rna "G") "C"))
(test-case "transcribes cytosine to guanine"
(check-equal? (to-rna "C") "G"))
(test-case "transcribes thymidine to adenine"
(check-equal? (to-rna "T") "A"))
(test-case "transcribes adenine to uracil"
(check-equal? (to-rna "A") "U"))
(test-case "it transcribes all dna nucleotides to rna equivalents"
(check-equal? (to-rna "ACGTGGTCTTAA") "UGCACCAGAAUU"))
))
(run-tests suite))
| null | https://raw.githubusercontent.com/lojic/LearningRacket/eb0e75b0e16d3e0a91b8fa6612e2678a9e12e8c7/exercism.io/rna-transcription/rna-transcription-test.rkt | racket | #lang racket
(require "rna-transcription.rkt")
(module+ test
(require rackunit rackunit/text-ui)
(define suite
(test-suite
"Tests for the rna-transcription exercise"
(test-case "transcribes guanine to cytosine"
(check-equal? (to-rna "G") "C"))
(test-case "transcribes cytosine to guanine"
(check-equal? (to-rna "C") "G"))
(test-case "transcribes thymidine to adenine"
(check-equal? (to-rna "T") "A"))
(test-case "transcribes adenine to uracil"
(check-equal? (to-rna "A") "U"))
(test-case "it transcribes all dna nucleotides to rna equivalents"
(check-equal? (to-rna "ACGTGGTCTTAA") "UGCACCAGAAUU"))
))
(run-tests suite))
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.