_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
ff20d7f9ec331844ef845f7ea8d99887c4c4e214c8be94baab7b7c4146e44480 | TeMPOraL/tracer | main.lisp | (in-package #:tracer)
Trace operations :
1 . Reset
2 . Trace
2.5 snapshot tracing ?
3 . Stop tracing
4 . Save report
#-sbcl (error "This system currently works only on SBCL.")
(defvar *tracing-p* nil "Is currently tracing activity happening?")
Trace info entry type , for function call
;;; - Timestamp
;;; - Function name
;;; - Function args maybe? (trace-with-args), on enter
;;; - Function return value, on exit
;;; - Beginning or ending
;;; - Thread ID
;;; This prints a representation of the return values delivered.
First , this checks to see that cookie is at the top of
;;; *TRACED-ENTRIES*; if it is not, then we need to adjust this list
;;; to determine the correct indentation for output. We then check to
;;; see whether the function is still traced and that the condition
;;; succeeded before printing anything.
(defmacro with-tracing ((&rest specs) &body body)
`(unwind-protect
(progn
(reset-tracing)
(start-tracing ,@specs)
(progn
,@body))
(stop-tracing)))
FIXME : this still has an SBCL dependency -- , 2019 - 10 - 18
(defun function-name->name-and-category (function-name)
(etypecase function-name
(symbol
(values (symbol-name function-name) (package-name (symbol-package function-name))))
(cons
(ecase (first function-name)
(setf
(values (format nil "~S" function-name) (package-name (symbol-package (second function-name)))))
((method sb-pcl::combined-method)
(values (remove #\Newline (format nil "~S" function-name))
(if (consp (second function-name))
(package-name (symbol-package (second (second function-name))))
(package-name (symbol-package (second function-name))))))))))
(defgeneric post-process-arg (arg)
(:method ((arg t))
"Passthrough method."
(or (ignore-errors
(prin1-to-string arg))
"!!Error printing argument!!"))
(:documentation "A hook useful for changing the printed representation of input and return values."))
(defmethod post-process-arg ((arg sequence))
(if (every (lambda (el) (typep el 'number)) arg)
(format nil "[~{~F~^, ~}]" (coerce arg 'list))
(call-next-method)))
FIXME : Something breaks if not collecting args , and : skip - args is NIL . Probably the getf in printing . -- , 2019 - 11 - 05
(defun trace-event->json (trace-event &key (skip-args nil))
(flet ((sanitize-and-format-args-list (argslist)
(if skip-args "\"skipped\""
(substitute #\Space #\Newline (format nil "[~{~S~^, ~}]" (mapcar #'post-process-arg argslist))))))
(ecase (trace-event-phase trace-event)
(:enter
(multiple-value-bind (name category)
(function-name->name-and-category (trace-event-name trace-event))
(format nil
"{ \"name\" : ~S, \"cat\" : ~S, \"ph\" : \"B\", \"pid\" : 1, \"tid\" : ~D, \"ts\" : ~D, \"args\" : { \"in\" : ~A }}"
name
category
(sb-impl::get-lisp-obj-address (trace-event-thread trace-event))
(trace-event-timestamp trace-event)
(sanitize-and-format-args-list (trace-event-args trace-event)))))
(:exit
(multiple-value-bind (name category)
(function-name->name-and-category (trace-event-name trace-event))
(format nil
"{ \"name\" : ~S, \"cat\" : ~S, \"ph\" : \"E\", \"pid\" : 1, \"tid\" : ~D, \"ts\" : ~D, \"args\" : { \"out\" : ~A }}"
name
category
(sb-impl::get-lisp-obj-address (trace-event-thread trace-event))
(trace-event-timestamp trace-event)
(sanitize-and-format-args-list (trace-event-args trace-event)))))
(:complete
(multiple-value-bind (name category)
(function-name->name-and-category (trace-event-name trace-event))
(format nil
"{ \"name\" : ~S, \"cat\" : ~S, \"ph\" : \"X\", \"pid\" : 1, \"tid\" : ~D, \"ts\" : ~D, \"dur\" : ~D, \"args\" : { \"in\" : ~A, \"out\" : ~A }}"
name
category
(sb-impl::get-lisp-obj-address (trace-event-thread trace-event))
(trace-event-timestamp trace-event)
(trace-event-duration trace-event)
(sanitize-and-format-args-list (getf (trace-event-args trace-event) :in))
(sanitize-and-format-args-list (getf (trace-event-args trace-event) :out))))))))
(defun thread->json (thread)
(format nil
"{ \"name\" : \"thread_name\", \"ph\" : \"M\", \"pid\" : 1, \"tid\" : ~D, \"args\" : { \"name\" : ~S }}"
(sb-impl::get-lisp-obj-address thread)
(bt:thread-name thread)))
(defun extract-threads (events)
(loop
with uniques-ht = (make-hash-table :test #'eq)
for event in events
do
(setf (gethash (trace-event-thread event) uniques-ht) t)
finally
(return (alexandria:hash-table-keys uniques-ht))))
FIXME : save with streams instead ? -- , 2019 - 10 - 14
(defun save-report (output-file-name &key (skip-args t))
(with-open-file (stream output-file-name :direction :output :if-exists :supersede)
TODO : preamble -- , 2019 - 10 - 14
(format stream "{~%\"traceEvents\" : [~%")
(loop
for (entry . restp) on *trace-events*
do
(write-string (trace-event->json entry :skip-args skip-args) stream)
(when restp
(write-string "," stream)
(terpri stream)))
(loop
for (thread . restp) on (extract-threads *trace-events*)
initially
(write-string "," stream)
(terpri stream)
do
(write-string (thread->json thread) stream)
(when restp
(write-string "," stream)
(terpri stream)))
(format stream "~&],
\"displayTimeUnit\" : \"ms\",
\"application\" : \"FIXME\",
\"version\" : \"FIXME\",
\"traceTime\" : ~S
}"
" TODO local-time independent time"
;;(local-time:format-timestring nil (local-time:now))
))
(values))
;;; Helper function for blacklisting symbols when tracing whole packages.
(defun package-symbols-except (name &rest exceptions)
(let (symbols
(package (sb-impl::find-undeleted-package-or-lose name)))
(do-all-symbols (symbol (find-package name))
(when (eql package (symbol-package symbol))
(when (and (fboundp symbol)
(not (macro-function symbol))
(not (special-operator-p symbol)))
(push symbol symbols))
(let ((setf-name `(setf ,symbol)))
(when (fboundp setf-name)
(push setf-name symbols)))))
(set-difference symbols exceptions :key (lambda (x)
(if (consp x)
(string (second x))
(string x))) :test #'string-equal)))
| null | https://raw.githubusercontent.com/TeMPOraL/tracer/d7e236752aa291776c49c0bbe435a64dc1fbff78/src/main.lisp | lisp | - Timestamp
- Function name
- Function args maybe? (trace-with-args), on enter
- Function return value, on exit
- Beginning or ending
- Thread ID
This prints a representation of the return values delivered.
*TRACED-ENTRIES*; if it is not, then we need to adjust this list
to determine the correct indentation for output. We then check to
see whether the function is still traced and that the condition
succeeded before printing anything.
(local-time:format-timestring nil (local-time:now))
Helper function for blacklisting symbols when tracing whole packages. | (in-package #:tracer)
Trace operations :
1 . Reset
2 . Trace
2.5 snapshot tracing ?
3 . Stop tracing
4 . Save report
#-sbcl (error "This system currently works only on SBCL.")
(defvar *tracing-p* nil "Is currently tracing activity happening?")
Trace info entry type , for function call
First , this checks to see that cookie is at the top of
(defmacro with-tracing ((&rest specs) &body body)
`(unwind-protect
(progn
(reset-tracing)
(start-tracing ,@specs)
(progn
,@body))
(stop-tracing)))
FIXME : this still has an SBCL dependency -- , 2019 - 10 - 18
(defun function-name->name-and-category (function-name)
(etypecase function-name
(symbol
(values (symbol-name function-name) (package-name (symbol-package function-name))))
(cons
(ecase (first function-name)
(setf
(values (format nil "~S" function-name) (package-name (symbol-package (second function-name)))))
((method sb-pcl::combined-method)
(values (remove #\Newline (format nil "~S" function-name))
(if (consp (second function-name))
(package-name (symbol-package (second (second function-name))))
(package-name (symbol-package (second function-name))))))))))
(defgeneric post-process-arg (arg)
(:method ((arg t))
"Passthrough method."
(or (ignore-errors
(prin1-to-string arg))
"!!Error printing argument!!"))
(:documentation "A hook useful for changing the printed representation of input and return values."))
(defmethod post-process-arg ((arg sequence))
(if (every (lambda (el) (typep el 'number)) arg)
(format nil "[~{~F~^, ~}]" (coerce arg 'list))
(call-next-method)))
FIXME : Something breaks if not collecting args , and : skip - args is NIL . Probably the getf in printing . -- , 2019 - 11 - 05
(defun trace-event->json (trace-event &key (skip-args nil))
(flet ((sanitize-and-format-args-list (argslist)
(if skip-args "\"skipped\""
(substitute #\Space #\Newline (format nil "[~{~S~^, ~}]" (mapcar #'post-process-arg argslist))))))
(ecase (trace-event-phase trace-event)
(:enter
(multiple-value-bind (name category)
(function-name->name-and-category (trace-event-name trace-event))
(format nil
"{ \"name\" : ~S, \"cat\" : ~S, \"ph\" : \"B\", \"pid\" : 1, \"tid\" : ~D, \"ts\" : ~D, \"args\" : { \"in\" : ~A }}"
name
category
(sb-impl::get-lisp-obj-address (trace-event-thread trace-event))
(trace-event-timestamp trace-event)
(sanitize-and-format-args-list (trace-event-args trace-event)))))
(:exit
(multiple-value-bind (name category)
(function-name->name-and-category (trace-event-name trace-event))
(format nil
"{ \"name\" : ~S, \"cat\" : ~S, \"ph\" : \"E\", \"pid\" : 1, \"tid\" : ~D, \"ts\" : ~D, \"args\" : { \"out\" : ~A }}"
name
category
(sb-impl::get-lisp-obj-address (trace-event-thread trace-event))
(trace-event-timestamp trace-event)
(sanitize-and-format-args-list (trace-event-args trace-event)))))
(:complete
(multiple-value-bind (name category)
(function-name->name-and-category (trace-event-name trace-event))
(format nil
"{ \"name\" : ~S, \"cat\" : ~S, \"ph\" : \"X\", \"pid\" : 1, \"tid\" : ~D, \"ts\" : ~D, \"dur\" : ~D, \"args\" : { \"in\" : ~A, \"out\" : ~A }}"
name
category
(sb-impl::get-lisp-obj-address (trace-event-thread trace-event))
(trace-event-timestamp trace-event)
(trace-event-duration trace-event)
(sanitize-and-format-args-list (getf (trace-event-args trace-event) :in))
(sanitize-and-format-args-list (getf (trace-event-args trace-event) :out))))))))
(defun thread->json (thread)
(format nil
"{ \"name\" : \"thread_name\", \"ph\" : \"M\", \"pid\" : 1, \"tid\" : ~D, \"args\" : { \"name\" : ~S }}"
(sb-impl::get-lisp-obj-address thread)
(bt:thread-name thread)))
(defun extract-threads (events)
(loop
with uniques-ht = (make-hash-table :test #'eq)
for event in events
do
(setf (gethash (trace-event-thread event) uniques-ht) t)
finally
(return (alexandria:hash-table-keys uniques-ht))))
FIXME : save with streams instead ? -- , 2019 - 10 - 14
(defun save-report (output-file-name &key (skip-args t))
(with-open-file (stream output-file-name :direction :output :if-exists :supersede)
TODO : preamble -- , 2019 - 10 - 14
(format stream "{~%\"traceEvents\" : [~%")
(loop
for (entry . restp) on *trace-events*
do
(write-string (trace-event->json entry :skip-args skip-args) stream)
(when restp
(write-string "," stream)
(terpri stream)))
(loop
for (thread . restp) on (extract-threads *trace-events*)
initially
(write-string "," stream)
(terpri stream)
do
(write-string (thread->json thread) stream)
(when restp
(write-string "," stream)
(terpri stream)))
(format stream "~&],
\"displayTimeUnit\" : \"ms\",
\"application\" : \"FIXME\",
\"version\" : \"FIXME\",
\"traceTime\" : ~S
}"
" TODO local-time independent time"
))
(values))
(defun package-symbols-except (name &rest exceptions)
(let (symbols
(package (sb-impl::find-undeleted-package-or-lose name)))
(do-all-symbols (symbol (find-package name))
(when (eql package (symbol-package symbol))
(when (and (fboundp symbol)
(not (macro-function symbol))
(not (special-operator-p symbol)))
(push symbol symbols))
(let ((setf-name `(setf ,symbol)))
(when (fboundp setf-name)
(push setf-name symbols)))))
(set-difference symbols exceptions :key (lambda (x)
(if (consp x)
(string (second x))
(string x))) :test #'string-equal)))
|
a7ad9776865f7e7ae039cd08edd6d5ecf790ed42aba13484be4039adac505292 | CrossRef/event-data-query | facet_test.clj | (ns event-data-query.facet-test
(:require [clojure.test :refer :all]
[event-data-query.facet :as facet]
[event-data-query.parameters :as parameters]
[slingshot.test]
[slingshot.test :as stest]))
; A regression test to make sure that parameters/parse suits the facet syntax.
(deftest parse-input
(testing "parameters/parse can parse facets input"
(is (= (parameters/parse "source:*,relation:66,obj.url.domain:99" identity)
{"source" "*"
"relation" "66"
"obj.url.domain" "99"}))))
(deftest error-handling
(testing "Maximum size can't be exceeded"
(is (thrown+? [:type :validation-failure, :subtype :facet-unavailable]
(facet/validate {"denomenation" "1"})))
(is (thrown+? [:type :validation-failure :subtype :facet-size]
(facet/validate {"source" "999999"})))
(is (thrown+? [:type :validation-failure :subtype :facet-size-malformed]
(facet/validate {"source" "a million"})))))
(deftest generate-aggregation-query
(testing "An aggregation query can be created from a query string input."
(is (= (facet/build-facet-query (parameters/parse "source:5,relation-type:*" identity))
{"source" {:terms {:field "source" :size 5}}
"relation-type" {:terms {:field "relation-type" :size 100}}})))
(testing "No facet gives empty query"
(is (= nil (facet/build-facet-query (parameters/parse "" identity))))))
| null | https://raw.githubusercontent.com/CrossRef/event-data-query/ede3c91afccd89dbb7053e81dd425bc3c3ad966a/test/event_data_query/facet_test.clj | clojure | A regression test to make sure that parameters/parse suits the facet syntax. | (ns event-data-query.facet-test
(:require [clojure.test :refer :all]
[event-data-query.facet :as facet]
[event-data-query.parameters :as parameters]
[slingshot.test]
[slingshot.test :as stest]))
(deftest parse-input
(testing "parameters/parse can parse facets input"
(is (= (parameters/parse "source:*,relation:66,obj.url.domain:99" identity)
{"source" "*"
"relation" "66"
"obj.url.domain" "99"}))))
(deftest error-handling
(testing "Maximum size can't be exceeded"
(is (thrown+? [:type :validation-failure, :subtype :facet-unavailable]
(facet/validate {"denomenation" "1"})))
(is (thrown+? [:type :validation-failure :subtype :facet-size]
(facet/validate {"source" "999999"})))
(is (thrown+? [:type :validation-failure :subtype :facet-size-malformed]
(facet/validate {"source" "a million"})))))
(deftest generate-aggregation-query
(testing "An aggregation query can be created from a query string input."
(is (= (facet/build-facet-query (parameters/parse "source:5,relation-type:*" identity))
{"source" {:terms {:field "source" :size 5}}
"relation-type" {:terms {:field "relation-type" :size 100}}})))
(testing "No facet gives empty query"
(is (= nil (facet/build-facet-query (parameters/parse "" identity))))))
|
cfad335ab4317c468dc996bde1d6ad94beb17e27095d5189c6a7514a9a665441 | pasberth/granjure | trans.clj | (ns granjure.control.monad.trans
(:use granjure.primitive
granjure.control))
(defprotocol MonadTrans
(lift-monad [this m]))
(defrecord Lift [m])
(extend-protocol TypeClass
Lift
(specialize [t cxt] (lift-monad cxt (:m t))))
(def lift (cfn [m] (Lift. m))) | null | https://raw.githubusercontent.com/pasberth/granjure/b2c85b794ef7e98c91178f0b6a59f748d3af7390/src/granjure/control/monad/trans.clj | clojure | (ns granjure.control.monad.trans
(:use granjure.primitive
granjure.control))
(defprotocol MonadTrans
(lift-monad [this m]))
(defrecord Lift [m])
(extend-protocol TypeClass
Lift
(specialize [t cxt] (lift-monad cxt (:m t))))
(def lift (cfn [m] (Lift. m))) | |
675de3b44b5754d34f91944d3a224937912fb53e1d9479ea7910b7761d030c3d | deadcode/Learning-CL--David-Touretzky | 7.9.lisp | (defun find-nested (x)
(find-if #'(lambda (y) (and (listp y) (not (equal y nil)))) x))
(let ((foo '(find-nested '(a () b (c d) e (f g h) i))))
(format t "~s = ~s~%" foo (eval foo)))
| null | https://raw.githubusercontent.com/deadcode/Learning-CL--David-Touretzky/b4557c33f58e382f765369971e6a4747c27ca692/Chapter%207/7.9.lisp | lisp | (defun find-nested (x)
(find-if #'(lambda (y) (and (listp y) (not (equal y nil)))) x))
(let ((foo '(find-nested '(a () b (c d) e (f g h) i))))
(format t "~s = ~s~%" foo (eval foo)))
| |
4f2d753e64a91c43705c2b45d2c9fad6485ac48bd07e4a8a31b1d78e968e4189 | tweag/ormolu | Fixity.hs | # LANGUAGE CPP #
# LANGUAGE LambdaCase #
{-# LANGUAGE MultiWayIf #-}
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
# LANGUAGE TemplateHaskell #
-- | Definitions for fixity analysis.
module Ormolu.Fixity
( OpName,
pattern OpName,
unOpName,
occOpName,
FixityDirection (..),
FixityInfo (..),
FixityMap,
LazyFixityMap,
lookupFixity,
HackageInfo (..),
defaultStrategyThreshold,
defaultFixityInfo,
buildFixityMap,
buildFixityMap',
bootPackages,
packageToOps,
packageToPopularity,
)
where
import qualified Data.Binary as Binary
import qualified Data.Binary.Get as Binary
import qualified Data.ByteString.Lazy as BL
import Data.Foldable (foldl')
import Data.List.NonEmpty (NonEmpty ((:|)))
import qualified Data.List.NonEmpty as NE
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe)
import Data.MemoTrie (memo)
import Data.Semigroup (sconcat)
import Data.Set (Set)
import qualified Data.Set as Set
import Distribution.Types.PackageName (PackageName, mkPackageName, unPackageName)
import Ormolu.Fixity.Internal
#if BUNDLE_FIXITIES
import Data.FileEmbed (embedFile)
#else
import qualified Data.ByteString as B
import System.IO.Unsafe (unsafePerformIO)
#endif
packageToOps :: Map PackageName FixityMap
packageToPopularity :: Map PackageName Int
#if BUNDLE_FIXITIES
HackageInfo packageToOps packageToPopularity =
Binary.runGet Binary.get $
BL.fromStrict $(embedFile "extract-hackage-info/hackage-info.bin")
#else
The GHC WASM backend does not yet support , so we instead
-- pass in the encoded fixity DB via pre-initialization with Wizer.
HackageInfo packageToOps packageToPopularity =
unsafePerformIO $
Binary.runGet Binary.get . BL.fromStrict <$> B.readFile "hackage-info.bin"
# NOINLINE packageToOps #
# NOINLINE packageToPopularity #
#endif
| List of packages shipped with GHC , for which the download count from
-- Hackage does not reflect their high popularity.
-- See #issuecomment-986609572.
-- "base" is not is this list, because it is already whitelisted
-- by buildFixityMap'.
bootPackages :: Set PackageName
bootPackages =
Set.fromList
[ "array",
"binary",
"bytestring",
"containers",
"deepseq",
"directory",
"exceptions",
"filepath",
"ghc-binary",
"mtl",
"parsec",
"process",
"stm",
"template-haskell",
"terminfo",
"text",
"time",
"transformers",
"unix",
"Win32"
]
-- | The default value for the popularity ratio threshold, after which a
-- very popular definition from packageToOps will completely rule out
-- conflicting definitions instead of being merged with them.
defaultStrategyThreshold :: Float
defaultStrategyThreshold = 0.9
-- | Build a fixity map using the given popularity threshold and a list of
-- cabal dependencies. Dependencies from the list have higher priority than
-- other packages.
buildFixityMap ::
-- | Popularity ratio threshold, after which a very popular package will
-- completely rule out conflicting definitions coming from other packages
-- instead of being merged with them
Float ->
-- | Explicitly known dependencies
Set PackageName ->
-- | Resulting map
LazyFixityMap
buildFixityMap = buildFixityMap' packageToOps packageToPopularity bootPackages
-- | Build a fixity map using the given popularity threshold and a list of
-- cabal dependencies. Dependencies from the list have higher priority than
-- other packages. This specific version of the function allows the user to
-- specify the package databases used to build the final fixity map.
buildFixityMap' ::
-- | Map from package to fixity map for operators defined in this package
Map PackageName FixityMap ->
-- | Map from package to popularity
Map PackageName Int ->
-- | Higher priority packages
Set PackageName ->
-- | Popularity ratio threshold, after which a very popular package will
-- completely rule out conflicting definitions coming from other packages
-- instead of being merged with them
Float ->
-- | Explicitly known dependencies
Set PackageName ->
-- | Resulting map
LazyFixityMap
buildFixityMap'
operatorMap
popularityMap
higherPriorityPackages
strategyThreshold = memoSet $ \dependencies ->
let baseFixityMap =
Map.insert ":" colonFixityInfo $
fromMaybe Map.empty $
Map.lookup "base" operatorMap
cabalFixityMap =
mergeAll (buildPackageFixityMap <$> Set.toList dependencies)
higherPriorityFixityMap =
mergeAll (buildPackageFixityMap <$> Set.toList higherPriorityPackages)
remainingFixityMap =
mergeFixityMaps
popularityMap
strategyThreshold
(buildPackageFixityMap <$> Set.toList remainingPackages)
remainingPackages =
Map.keysSet operatorMap
`Set.difference` Set.union dependencies higherPriorityPackages
buildPackageFixityMap packageName =
( packageName,
fromMaybe Map.empty $
Map.lookup packageName operatorMap
)
we need a threshold > 1.0 so that no dependency can reach the
-- threshold
mergeAll = mergeFixityMaps Map.empty 10.0
in LazyFixityMap
[ baseFixityMap,
cabalFixityMap,
higherPriorityFixityMap,
remainingFixityMap
]
memoSet :: (Set PackageName -> v) -> Set PackageName -> v
memoSet f = memo (f . Set.fromAscList . fmap mkPackageName) . fmap unPackageName . Set.toAscList
-- | Merge a list of individual fixity maps, coming from different packages.
-- Package popularities and the given threshold are used to choose between
-- the "keep best only" (>= threshold) and "merge all" (< threshold)
-- strategies when conflicting definitions are encountered for an operator.
mergeFixityMaps ::
| Map from package name to 30 - days download count
Map PackageName Int ->
-- | Popularity ratio threshold
Float ->
-- | List of (package name, package fixity map) to merge
[(PackageName, FixityMap)] ->
-- | Resulting fixity map
FixityMap
mergeFixityMaps popularityMap threshold packageMaps =
Map.map
(useThreshold threshold . NE.fromList . Map.toList)
scoredMap
where
scoredMap = Map.map getScores opFixityMap
when we encounter a duplicate key ( op1 ) in the unionsWith operation ,
-- we have
op1 -map- > { definitions1 -map- > originPackages }
-- op1 -map-> {definitions2 -map-> originPackages}
-- so we merge the keys (which have the type:
Map FixityInfo ( NonEmpty PackageName ) )
using ' Map.unionWith ( < > ) ' , to " concatenate " the list of
-- definitions for this operator, and to also "concatenate" origin
-- packages if a same definition is found in both maps
opFixityMap =
Map.unionsWith
(Map.unionWith (<>))
(opFixityMapFrom <$> packageMaps)
useThreshold ::
-- Threshold
Float ->
-- List of conflicting (definition, score) for a given operator
NonEmpty (FixityInfo, Int) ->
-- Resulting fixity, using the specified threshold to choose between
-- strategy "keep best only" and "merge all"
FixityInfo
useThreshold t fixScores =
if toFloat maxScore / toFloat sumScores >= t
then sconcat . fmap fst $ maxs -- merge potential ex-aequo winners
else sconcat . fmap fst $ fixScores
where
toFloat x = fromIntegral x :: Float
maxs = maxWith snd fixScores
maxScore = snd $ NE.head maxs
sumScores = foldl' (+) 0 (snd <$> fixScores)
getScores ::
-- Map for a given operator associating each of its conflicting
-- definitions with the packages that define it
Map FixityInfo (NonEmpty PackageName) ->
-- Map for a given operator associating each of its conflicting
-- definitions with their score (= sum of the popularity of the
-- packages that define it)
Map FixityInfo Int
getScores =
Map.map
(sum . fmap (fromMaybe 0 . flip Map.lookup popularityMap))
opFixityMapFrom ::
-- (packageName, package fixity map)
(PackageName, FixityMap) ->
-- Map associating each operator of the package with a
-- {map for a given operator associating each of its definitions with
-- the list of packages that define it}
-- (this list can only be == [packageName] in the context of this
-- function)
Map OpName (Map FixityInfo (NonEmpty PackageName))
opFixityMapFrom (packageName, opsMap) =
Map.map
(flip Map.singleton (packageName :| []))
opsMap
maxWith :: (Ord b) => (a -> b) -> NonEmpty a -> NonEmpty a
maxWith f xs = snd $ foldl' comp (f h, h :| []) t
where
h :| t = xs
comp (fMax, maxs) x =
let fX = f x
in if
| fMax < fX -> (fX, x :| [])
| fMax == fX -> (fMax, NE.cons x maxs)
| otherwise -> (fMax, maxs)
| null | https://raw.githubusercontent.com/tweag/ormolu/f6dcd728b3abfdb9b4b352111be4d4448cf4e17e/src/Ormolu/Fixity.hs | haskell | # LANGUAGE MultiWayIf #
# LANGUAGE OverloadedStrings #
| Definitions for fixity analysis.
pass in the encoded fixity DB via pre-initialization with Wizer.
Hackage does not reflect their high popularity.
See #issuecomment-986609572.
"base" is not is this list, because it is already whitelisted
by buildFixityMap'.
| The default value for the popularity ratio threshold, after which a
very popular definition from packageToOps will completely rule out
conflicting definitions instead of being merged with them.
| Build a fixity map using the given popularity threshold and a list of
cabal dependencies. Dependencies from the list have higher priority than
other packages.
| Popularity ratio threshold, after which a very popular package will
completely rule out conflicting definitions coming from other packages
instead of being merged with them
| Explicitly known dependencies
| Resulting map
| Build a fixity map using the given popularity threshold and a list of
cabal dependencies. Dependencies from the list have higher priority than
other packages. This specific version of the function allows the user to
specify the package databases used to build the final fixity map.
| Map from package to fixity map for operators defined in this package
| Map from package to popularity
| Higher priority packages
| Popularity ratio threshold, after which a very popular package will
completely rule out conflicting definitions coming from other packages
instead of being merged with them
| Explicitly known dependencies
| Resulting map
threshold
| Merge a list of individual fixity maps, coming from different packages.
Package popularities and the given threshold are used to choose between
the "keep best only" (>= threshold) and "merge all" (< threshold)
strategies when conflicting definitions are encountered for an operator.
| Popularity ratio threshold
| List of (package name, package fixity map) to merge
| Resulting fixity map
we have
op1 -map-> {definitions2 -map-> originPackages}
so we merge the keys (which have the type:
definitions for this operator, and to also "concatenate" origin
packages if a same definition is found in both maps
Threshold
List of conflicting (definition, score) for a given operator
Resulting fixity, using the specified threshold to choose between
strategy "keep best only" and "merge all"
merge potential ex-aequo winners
Map for a given operator associating each of its conflicting
definitions with the packages that define it
Map for a given operator associating each of its conflicting
definitions with their score (= sum of the popularity of the
packages that define it)
(packageName, package fixity map)
Map associating each operator of the package with a
{map for a given operator associating each of its definitions with
the list of packages that define it}
(this list can only be == [packageName] in the context of this
function) | # LANGUAGE CPP #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE TemplateHaskell #
module Ormolu.Fixity
( OpName,
pattern OpName,
unOpName,
occOpName,
FixityDirection (..),
FixityInfo (..),
FixityMap,
LazyFixityMap,
lookupFixity,
HackageInfo (..),
defaultStrategyThreshold,
defaultFixityInfo,
buildFixityMap,
buildFixityMap',
bootPackages,
packageToOps,
packageToPopularity,
)
where
import qualified Data.Binary as Binary
import qualified Data.Binary.Get as Binary
import qualified Data.ByteString.Lazy as BL
import Data.Foldable (foldl')
import Data.List.NonEmpty (NonEmpty ((:|)))
import qualified Data.List.NonEmpty as NE
import Data.Map.Strict (Map)
import qualified Data.Map.Strict as Map
import Data.Maybe (fromMaybe)
import Data.MemoTrie (memo)
import Data.Semigroup (sconcat)
import Data.Set (Set)
import qualified Data.Set as Set
import Distribution.Types.PackageName (PackageName, mkPackageName, unPackageName)
import Ormolu.Fixity.Internal
#if BUNDLE_FIXITIES
import Data.FileEmbed (embedFile)
#else
import qualified Data.ByteString as B
import System.IO.Unsafe (unsafePerformIO)
#endif
packageToOps :: Map PackageName FixityMap
packageToPopularity :: Map PackageName Int
#if BUNDLE_FIXITIES
HackageInfo packageToOps packageToPopularity =
Binary.runGet Binary.get $
BL.fromStrict $(embedFile "extract-hackage-info/hackage-info.bin")
#else
The GHC WASM backend does not yet support , so we instead
HackageInfo packageToOps packageToPopularity =
unsafePerformIO $
Binary.runGet Binary.get . BL.fromStrict <$> B.readFile "hackage-info.bin"
# NOINLINE packageToOps #
# NOINLINE packageToPopularity #
#endif
| List of packages shipped with GHC , for which the download count from
bootPackages :: Set PackageName
bootPackages =
Set.fromList
[ "array",
"binary",
"bytestring",
"containers",
"deepseq",
"directory",
"exceptions",
"filepath",
"ghc-binary",
"mtl",
"parsec",
"process",
"stm",
"template-haskell",
"terminfo",
"text",
"time",
"transformers",
"unix",
"Win32"
]
defaultStrategyThreshold :: Float
defaultStrategyThreshold = 0.9
buildFixityMap ::
Float ->
Set PackageName ->
LazyFixityMap
buildFixityMap = buildFixityMap' packageToOps packageToPopularity bootPackages
buildFixityMap' ::
Map PackageName FixityMap ->
Map PackageName Int ->
Set PackageName ->
Float ->
Set PackageName ->
LazyFixityMap
buildFixityMap'
operatorMap
popularityMap
higherPriorityPackages
strategyThreshold = memoSet $ \dependencies ->
let baseFixityMap =
Map.insert ":" colonFixityInfo $
fromMaybe Map.empty $
Map.lookup "base" operatorMap
cabalFixityMap =
mergeAll (buildPackageFixityMap <$> Set.toList dependencies)
higherPriorityFixityMap =
mergeAll (buildPackageFixityMap <$> Set.toList higherPriorityPackages)
remainingFixityMap =
mergeFixityMaps
popularityMap
strategyThreshold
(buildPackageFixityMap <$> Set.toList remainingPackages)
remainingPackages =
Map.keysSet operatorMap
`Set.difference` Set.union dependencies higherPriorityPackages
buildPackageFixityMap packageName =
( packageName,
fromMaybe Map.empty $
Map.lookup packageName operatorMap
)
we need a threshold > 1.0 so that no dependency can reach the
mergeAll = mergeFixityMaps Map.empty 10.0
in LazyFixityMap
[ baseFixityMap,
cabalFixityMap,
higherPriorityFixityMap,
remainingFixityMap
]
memoSet :: (Set PackageName -> v) -> Set PackageName -> v
memoSet f = memo (f . Set.fromAscList . fmap mkPackageName) . fmap unPackageName . Set.toAscList
mergeFixityMaps ::
| Map from package name to 30 - days download count
Map PackageName Int ->
Float ->
[(PackageName, FixityMap)] ->
FixityMap
mergeFixityMaps popularityMap threshold packageMaps =
Map.map
(useThreshold threshold . NE.fromList . Map.toList)
scoredMap
where
scoredMap = Map.map getScores opFixityMap
when we encounter a duplicate key ( op1 ) in the unionsWith operation ,
op1 -map- > { definitions1 -map- > originPackages }
Map FixityInfo ( NonEmpty PackageName ) )
using ' Map.unionWith ( < > ) ' , to " concatenate " the list of
opFixityMap =
Map.unionsWith
(Map.unionWith (<>))
(opFixityMapFrom <$> packageMaps)
useThreshold ::
Float ->
NonEmpty (FixityInfo, Int) ->
FixityInfo
useThreshold t fixScores =
if toFloat maxScore / toFloat sumScores >= t
else sconcat . fmap fst $ fixScores
where
toFloat x = fromIntegral x :: Float
maxs = maxWith snd fixScores
maxScore = snd $ NE.head maxs
sumScores = foldl' (+) 0 (snd <$> fixScores)
getScores ::
Map FixityInfo (NonEmpty PackageName) ->
Map FixityInfo Int
getScores =
Map.map
(sum . fmap (fromMaybe 0 . flip Map.lookup popularityMap))
opFixityMapFrom ::
(PackageName, FixityMap) ->
Map OpName (Map FixityInfo (NonEmpty PackageName))
opFixityMapFrom (packageName, opsMap) =
Map.map
(flip Map.singleton (packageName :| []))
opsMap
maxWith :: (Ord b) => (a -> b) -> NonEmpty a -> NonEmpty a
maxWith f xs = snd $ foldl' comp (f h, h :| []) t
where
h :| t = xs
comp (fMax, maxs) x =
let fX = f x
in if
| fMax < fX -> (fX, x :| [])
| fMax == fX -> (fMax, NE.cons x maxs)
| otherwise -> (fMax, maxs)
|
e5898f47362ed900bfd066807b46bbc796fda9f9d5f36819240433dce47447c9 | SNePS/SNePS2 | orders.lisp | -*- Mode : Lisp ; Syntax : Common - Lisp ; Package : SNEPSLOG ; Base : 10 -*-
Copyright ( C ) 1984 - -2013
Research Foundation of State University of New York
Version : $ I d : , v 1.6 2013/08/28 19:07:26 shapiro Exp $
;; This file is part of SNePS.
$ BEGIN LICENSE$
The contents of this file are subject to the University at
Buffalo Public License Version 1.0 ( the " License " ) ; you may
;;; not use this file except in compliance with the License. You
;;; may obtain a copy of the License at
;;; . edu/sneps/Downloads/ubpl.pdf.
;;;
Software distributed under the License is distributed on an
" AS IS " basis , WITHOUT WARRANTY OF ANY KIND , either express
;;; or implied. See the License for the specific language gov
;;; erning rights and limitations under the License.
;;;
The Original Code is SNePS 2.8 .
;;;
The Initial Developer of the Original Code is Research Foun
dation of State University of New York , on behalf of Univer
sity at Buffalo .
;;;
Portions created by the Initial Developer are Copyright ( C )
2011 Research Foundation of State University of New York , on
behalf of University at Buffalo . All Rights Reserved .
$ END LICENSE$
;;; Predefined epistemic ordering functions
by
2013/05/04
(in-package :snepslog)
;;; *fluents*
;;; list of symbols corresponding to names of functions that are fluents
(defparameter *fluents* nil)
;;; An order in which all propositions are equally entrenched
(defun null-order (lhs rhs)
(declare (ignore lhs rhs))
t)
;;; Description: An ordering function that causes fluent propositions to be
;;; less epistemically entrenched than non-fluent propositions
;;; is a fluent or the rhs argument is not.
(defun fluent (lhs rhs)
(or (is-fluent lhs)
(not (is-fluent rhs))))
;;; Description: Returns t iff the function symbol for n is a fluent
;;; Arguments: n - a node
(defun is-fluent (n)
(let ((pred (relation-predicate n)))
(member (if (consp pred)
(get-node-name (first pred))
pred)
*fluents*)))
(defun source (p1 p2)
"Returns t iff p1 <= p2 in the epistemic entrenchment ordering.
Uses assertions:
HasSource(p,s) to mean that proposition p's source is s;
IsBetterSource(s1,s2) to mean that s1 is a more credible source than s2.
If p1 and p2 are the same, then they're epistemically tied.
If neither p1 nor p2 has a source, then they're epistemically tied.
If only one of p1 or p2 has a source,
then the one without the source is more epistemically entrenched than the other.
If they both have sources,
then p1 <= p2 (not (p1 > p2))
iff it is not the case
that for every source of p2
there is a source of p1 that is more credible than p2's source."
(or (eq p1 p2)
(let ((p1sources (mapcar #'(lambda (sub) (match:value.sbst 'x sub))
(tell "askwh HasSource(~A, ?x)" p1)))
(p2sources (mapcar #'(lambda (sub) (match:value.sbst 'x sub))
(tell "askwh HasSource(~A, ?x)" p2))))
(if (and p1sources p2sources)
(not (every #'(lambda (s2)
(some #'(lambda (s1)
(and (not (eq s1 s2))
(tell "ask IsBetterSource(~A, ~A)" s1 s2)))
p1sources))
p2sources))
(not p2sources)))))
;;; Description: An ordering function relying on explicit statements of
;;; relative entrenchment of propositions, using the
;;; IsLessEntrenched predicate for checks
;;; [IsLessEntrenched(x.y)] = [x] is strictly less entrenched than [y]
(defun explicit (lhs rhs)
(not (tell "ask IsLessEntrenched(~A, ~A)" rhs lhs)))
| null | https://raw.githubusercontent.com/SNePS/SNePS2/d3862108609b1879f2c546112072ad4caefc050d/snepslog/orders.lisp | lisp | Syntax : Common - Lisp ; Package : SNEPSLOG ; Base : 10 -*-
This file is part of SNePS.
you may
not use this file except in compliance with the License. You
may obtain a copy of the License at
. edu/sneps/Downloads/ubpl.pdf.
or implied. See the License for the specific language gov
erning rights and limitations under the License.
Predefined epistemic ordering functions
*fluents*
list of symbols corresponding to names of functions that are fluents
An order in which all propositions are equally entrenched
Description: An ordering function that causes fluent propositions to be
less epistemically entrenched than non-fluent propositions
is a fluent or the rhs argument is not.
Description: Returns t iff the function symbol for n is a fluent
Arguments: n - a node
Description: An ordering function relying on explicit statements of
relative entrenchment of propositions, using the
IsLessEntrenched predicate for checks
[IsLessEntrenched(x.y)] = [x] is strictly less entrenched than [y] |
Copyright ( C ) 1984 - -2013
Research Foundation of State University of New York
Version : $ I d : , v 1.6 2013/08/28 19:07:26 shapiro Exp $
$ BEGIN LICENSE$
The contents of this file are subject to the University at
Software distributed under the License is distributed on an
" AS IS " basis , WITHOUT WARRANTY OF ANY KIND , either express
The Original Code is SNePS 2.8 .
The Initial Developer of the Original Code is Research Foun
dation of State University of New York , on behalf of Univer
sity at Buffalo .
Portions created by the Initial Developer are Copyright ( C )
2011 Research Foundation of State University of New York , on
behalf of University at Buffalo . All Rights Reserved .
$ END LICENSE$
by
2013/05/04
(in-package :snepslog)
(defparameter *fluents* nil)
(defun null-order (lhs rhs)
(declare (ignore lhs rhs))
t)
(defun fluent (lhs rhs)
(or (is-fluent lhs)
(not (is-fluent rhs))))
(defun is-fluent (n)
(let ((pred (relation-predicate n)))
(member (if (consp pred)
(get-node-name (first pred))
pred)
*fluents*)))
(defun source (p1 p2)
"Returns t iff p1 <= p2 in the epistemic entrenchment ordering.
Uses assertions:
IsBetterSource(s1,s2) to mean that s1 is a more credible source than s2.
If p1 and p2 are the same, then they're epistemically tied.
If neither p1 nor p2 has a source, then they're epistemically tied.
If only one of p1 or p2 has a source,
then the one without the source is more epistemically entrenched than the other.
If they both have sources,
then p1 <= p2 (not (p1 > p2))
iff it is not the case
that for every source of p2
there is a source of p1 that is more credible than p2's source."
(or (eq p1 p2)
(let ((p1sources (mapcar #'(lambda (sub) (match:value.sbst 'x sub))
(tell "askwh HasSource(~A, ?x)" p1)))
(p2sources (mapcar #'(lambda (sub) (match:value.sbst 'x sub))
(tell "askwh HasSource(~A, ?x)" p2))))
(if (and p1sources p2sources)
(not (every #'(lambda (s2)
(some #'(lambda (s1)
(and (not (eq s1 s2))
(tell "ask IsBetterSource(~A, ~A)" s1 s2)))
p1sources))
p2sources))
(not p2sources)))))
(defun explicit (lhs rhs)
(not (tell "ask IsLessEntrenched(~A, ~A)" rhs lhs)))
|
cf627416208afe40e9d689c2686f88e716d0e6c1cdfb229d72342e07f7de7ec9 | pixlsus/registry.gimp.org_static | elsamuko-photochrom-batch.scm | ; The GIMP -- an image manipulation program
Copyright ( C ) 1995 and
;
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 3 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
;
You should have received a copy of the GNU General Public License
; along with this program; if not, write to the Free Software
Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
; -3.0.html
;
Copyright ( C ) 2011 elsamuko < >
;
; This is the batch version of the photochrom script, run it with
gimp -i -b ' ( elsamuko - photochrom - batch " picture.jpg " ( list 255 128 0 ) ( list 255 68 112 ) 60 60 0 100 FALSE FALSE ) ' -b ' ( gimp - quit 0 ) '
or for more than one picture
gimp -i -b ' ( elsamuko - photochrom - batch " * .jpg " ( list 255 128 0 ) ( list 255 68 112 ) 60 60 0 100 FALSE FALSE ) ' -b ' ( gimp - quit 0 ) '
(define (elsamuko-photochrom-batch pattern
color1 color2
contrast bw-merge
num1 num2
dodge retro)
(let* ((filelist (cadr (file-glob pattern 1))))
(while (not (null? filelist))
(let* ((filename (car filelist))
(img (car (gimp-file-load RUN-NONINTERACTIVE filename filename)))
(adraw (car (gimp-image-get-active-drawable img)))
(owidth (car (gimp-image-width img)))
(oheight (car (gimp-image-height img)))
(offset1 (* oheight (/ num1 100)))
(offset2 (* oheight (/ num2 100)))
(dodge-layer (car (gimp-layer-copy adraw FALSE)))
(contrast-layer1 (car (gimp-layer-copy adraw FALSE)))
(contrast-layer2 (car (gimp-layer-copy adraw FALSE)))
(bw-screen-layer (car (gimp-layer-copy adraw FALSE)))
(bw-merge-layer (car (gimp-layer-copy adraw FALSE)))
(lum-layer (car (gimp-layer-copy adraw FALSE)))
(extra-layer 0)
(merge-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Grain Merge"
50
GRAIN-MERGE-MODE)))
(merge-mask (car (gimp-layer-create-mask merge-layer ADD-WHITE-MASK)))
(screen-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Screen"
10
SCREEN-MODE)))
(screen-mask (car (gimp-layer-create-mask screen-layer ADD-WHITE-MASK)))
(multiply-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Multiply"
10
MULTIPLY-MODE)))
(multiply-mask (car (gimp-layer-create-mask multiply-layer ADD-WHITE-MASK)))
(retro-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Retro 1"
60
MULTIPLY-MODE)))
(floatingsel 0)
(retro-mask (car (gimp-layer-create-mask retro-layer ADD-WHITE-MASK)))
(retro-layer2 (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Retro 2"
20
SCREEN-MODE)))
(gradient-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Gradient Overlay"
100
OVERLAY-MODE)))
)
; init
(gimp-message "begin")
(gimp-context-push)
(gimp-image-undo-group-start img)
(if (= (car (gimp-drawable-is-gray adraw )) TRUE)
(gimp-image-convert-rgb img)
)
;set extra color layer
(gimp-message "set extra color layer")
(gimp-image-add-layer img lum-layer 0)
(gimp-drawable-set-name lum-layer "Luminosity")
(gimp-desaturate-full lum-layer DESATURATE-LIGHTNESS)
(gimp-layer-set-mode lum-layer GRAIN-EXTRACT-MODE)
(gimp-edit-copy-visible img)
(set! extra-layer (car (gimp-layer-new-from-visible img img "Extra Color")))
(gimp-image-add-layer img extra-layer 0)
(gimp-layer-set-mode extra-layer GRAIN-MERGE-MODE)
(gimp-layer-set-opacity extra-layer 50)
(gimp-drawable-set-visible lum-layer FALSE)
set BW screen layer
(gimp-message "set BW screen layer")
(gimp-image-add-layer img bw-screen-layer -1)
(gimp-drawable-set-name bw-screen-layer "BW Screen")
(gimp-layer-set-mode bw-screen-layer SCREEN-MODE)
(gimp-layer-set-opacity bw-screen-layer 50)
(gimp-desaturate-full bw-screen-layer DESATURATE-LUMINOSITY)
set BW merge layer
(gimp-message "set BW merge layer")
(gimp-image-add-layer img bw-merge-layer -1)
(gimp-drawable-set-name bw-merge-layer "BW Merge")
(gimp-layer-set-mode bw-merge-layer GRAIN-MERGE-MODE)
(gimp-layer-set-opacity bw-merge-layer bw-merge)
(gimp-desaturate-full bw-merge-layer DESATURATE-LUMINOSITY)
(gimp-curves-spline bw-merge-layer HISTOGRAM-VALUE 6 #(0 144 88 42 255 255))
;set contrast layers
(gimp-message "set contrast layers")
(gimp-image-add-layer img contrast-layer1 -1)
(gimp-drawable-set-name contrast-layer1 "Contrast1")
(gimp-layer-set-mode contrast-layer1 OVERLAY-MODE)
(gimp-layer-set-opacity contrast-layer1 contrast)
(gimp-desaturate-full contrast-layer1 DESATURATE-LUMINOSITY)
(gimp-image-add-layer img contrast-layer2 -1)
(gimp-drawable-set-name contrast-layer2 "Contrast2")
(gimp-layer-set-mode contrast-layer2 OVERLAY-MODE)
(gimp-layer-set-opacity contrast-layer2 contrast)
(gimp-desaturate-full contrast-layer2 DESATURATE-LUMINOSITY)
;set dodge layer
(gimp-message "set dodge layer")
(gimp-image-add-layer img dodge-layer -1)
(gimp-drawable-set-name dodge-layer "Dodge")
(gimp-layer-set-mode dodge-layer DODGE-MODE)
(gimp-layer-set-opacity dodge-layer 50)
;set merge layer
(gimp-message "set merge layer")
(gimp-image-add-layer img merge-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground color1)
(gimp-edit-bucket-fill merge-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-message "set merge layer3")
(gimp-layer-add-mask merge-layer merge-mask)
(gimp-context-set-foreground '(255 255 255))
(gimp-message "set merge layer4")
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend merge-mask FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
TRUE FALSE 1 0
TRUE 0 offset1 0 offset2)
;set screen layer
(gimp-message "set screen layer")
(gimp-image-add-layer img screen-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground color1)
(gimp-edit-bucket-fill screen-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-layer-add-mask screen-layer screen-mask)
(gimp-context-set-foreground '(255 255 255))
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend screen-mask FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
TRUE FALSE 1 0
TRUE 0 offset1 0 offset2)
;set multiply layer
(gimp-message "set multiply layer")
(gimp-image-add-layer img multiply-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground color2)
(gimp-edit-bucket-fill multiply-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-layer-add-mask multiply-layer multiply-mask)
(gimp-context-set-foreground '(255 255 255))
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend multiply-mask FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
TRUE FALSE 1 0
TRUE 0 offset1 0 offset2)
;optional retro colors
(if(= retro TRUE)(begin
(gimp-message "optional retro colors")
;yellow with mask
(gimp-image-add-layer img retro-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground '(251 242 163))
(gimp-edit-bucket-fill retro-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-layer-add-mask retro-layer retro-mask)
(gimp-edit-copy contrast-layer1)
(set! floatingsel (car (gimp-edit-paste retro-mask TRUE)))
(gimp-floating-sel-anchor floatingsel)
;rose
(gimp-image-add-layer img retro-layer2 -1)
(gimp-selection-all img)
(gimp-context-set-foreground '(232 101 179))
(gimp-edit-bucket-fill retro-layer2 FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
;gradient overlay
(gimp-image-add-layer img gradient-layer -1)
(gimp-context-set-foreground '(255 255 255))
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend gradient-layer FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
FALSE FALSE 1 0
TRUE 0 offset1 0 offset2)
;deactivate orange layers
(gimp-drawable-set-visible merge-layer FALSE)
(gimp-drawable-set-visible screen-layer FALSE)
(gimp-drawable-set-visible multiply-layer FALSE)
)
)
;dodge b/w
(gimp-message "dodge b/w")
(if(= dodge TRUE)(begin
(gimp-desaturate-full dodge-layer DESATURATE-LUMINOSITY)
(gimp-drawable-set-visible extra-layer FALSE)
)
)
; tidy up
;(gimp-image-undo-group-end img)
;(gimp-displays-flush)
;(gimp-context-pop)
(gimp-message "tidy up")
(gimp-image-merge-visible-layers img EXPAND-AS-NECESSARY)
(set! adraw (car (gimp-image-get-active-drawable img)))
(gimp-file-save RUN-NONINTERACTIVE img adraw filename filename)
(gimp-image-delete img)
)
(set! filelist (cdr filelist))
)
)
)
| null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/elsamuko-photochrom-batch.scm | scheme | The GIMP -- an image manipulation program
This program is free software; you can redistribute it and/or modify
either version 3 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program; if not, write to the Free Software
-3.0.html
This is the batch version of the photochrom script, run it with
init
set extra color layer
set contrast layers
set dodge layer
set merge layer
set screen layer
set multiply layer
optional retro colors
yellow with mask
rose
gradient overlay
deactivate orange layers
dodge b/w
tidy up
(gimp-image-undo-group-end img)
(gimp-displays-flush)
(gimp-context-pop) | Copyright ( C ) 1995 and
it under the terms of the GNU General Public License as published by
You should have received a copy of the GNU General Public License
Foundation , Inc. , 675 Mass Ave , Cambridge , , USA .
Copyright ( C ) 2011 elsamuko < >
gimp -i -b ' ( elsamuko - photochrom - batch " picture.jpg " ( list 255 128 0 ) ( list 255 68 112 ) 60 60 0 100 FALSE FALSE ) ' -b ' ( gimp - quit 0 ) '
or for more than one picture
gimp -i -b ' ( elsamuko - photochrom - batch " * .jpg " ( list 255 128 0 ) ( list 255 68 112 ) 60 60 0 100 FALSE FALSE ) ' -b ' ( gimp - quit 0 ) '
(define (elsamuko-photochrom-batch pattern
color1 color2
contrast bw-merge
num1 num2
dodge retro)
(let* ((filelist (cadr (file-glob pattern 1))))
(while (not (null? filelist))
(let* ((filename (car filelist))
(img (car (gimp-file-load RUN-NONINTERACTIVE filename filename)))
(adraw (car (gimp-image-get-active-drawable img)))
(owidth (car (gimp-image-width img)))
(oheight (car (gimp-image-height img)))
(offset1 (* oheight (/ num1 100)))
(offset2 (* oheight (/ num2 100)))
(dodge-layer (car (gimp-layer-copy adraw FALSE)))
(contrast-layer1 (car (gimp-layer-copy adraw FALSE)))
(contrast-layer2 (car (gimp-layer-copy adraw FALSE)))
(bw-screen-layer (car (gimp-layer-copy adraw FALSE)))
(bw-merge-layer (car (gimp-layer-copy adraw FALSE)))
(lum-layer (car (gimp-layer-copy adraw FALSE)))
(extra-layer 0)
(merge-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Grain Merge"
50
GRAIN-MERGE-MODE)))
(merge-mask (car (gimp-layer-create-mask merge-layer ADD-WHITE-MASK)))
(screen-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Screen"
10
SCREEN-MODE)))
(screen-mask (car (gimp-layer-create-mask screen-layer ADD-WHITE-MASK)))
(multiply-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Multiply"
10
MULTIPLY-MODE)))
(multiply-mask (car (gimp-layer-create-mask multiply-layer ADD-WHITE-MASK)))
(retro-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Retro 1"
60
MULTIPLY-MODE)))
(floatingsel 0)
(retro-mask (car (gimp-layer-create-mask retro-layer ADD-WHITE-MASK)))
(retro-layer2 (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Retro 2"
20
SCREEN-MODE)))
(gradient-layer (car (gimp-layer-new img
owidth
oheight
RGBA-IMAGE
"Gradient Overlay"
100
OVERLAY-MODE)))
)
(gimp-message "begin")
(gimp-context-push)
(gimp-image-undo-group-start img)
(if (= (car (gimp-drawable-is-gray adraw )) TRUE)
(gimp-image-convert-rgb img)
)
(gimp-message "set extra color layer")
(gimp-image-add-layer img lum-layer 0)
(gimp-drawable-set-name lum-layer "Luminosity")
(gimp-desaturate-full lum-layer DESATURATE-LIGHTNESS)
(gimp-layer-set-mode lum-layer GRAIN-EXTRACT-MODE)
(gimp-edit-copy-visible img)
(set! extra-layer (car (gimp-layer-new-from-visible img img "Extra Color")))
(gimp-image-add-layer img extra-layer 0)
(gimp-layer-set-mode extra-layer GRAIN-MERGE-MODE)
(gimp-layer-set-opacity extra-layer 50)
(gimp-drawable-set-visible lum-layer FALSE)
set BW screen layer
(gimp-message "set BW screen layer")
(gimp-image-add-layer img bw-screen-layer -1)
(gimp-drawable-set-name bw-screen-layer "BW Screen")
(gimp-layer-set-mode bw-screen-layer SCREEN-MODE)
(gimp-layer-set-opacity bw-screen-layer 50)
(gimp-desaturate-full bw-screen-layer DESATURATE-LUMINOSITY)
set BW merge layer
(gimp-message "set BW merge layer")
(gimp-image-add-layer img bw-merge-layer -1)
(gimp-drawable-set-name bw-merge-layer "BW Merge")
(gimp-layer-set-mode bw-merge-layer GRAIN-MERGE-MODE)
(gimp-layer-set-opacity bw-merge-layer bw-merge)
(gimp-desaturate-full bw-merge-layer DESATURATE-LUMINOSITY)
(gimp-curves-spline bw-merge-layer HISTOGRAM-VALUE 6 #(0 144 88 42 255 255))
(gimp-message "set contrast layers")
(gimp-image-add-layer img contrast-layer1 -1)
(gimp-drawable-set-name contrast-layer1 "Contrast1")
(gimp-layer-set-mode contrast-layer1 OVERLAY-MODE)
(gimp-layer-set-opacity contrast-layer1 contrast)
(gimp-desaturate-full contrast-layer1 DESATURATE-LUMINOSITY)
(gimp-image-add-layer img contrast-layer2 -1)
(gimp-drawable-set-name contrast-layer2 "Contrast2")
(gimp-layer-set-mode contrast-layer2 OVERLAY-MODE)
(gimp-layer-set-opacity contrast-layer2 contrast)
(gimp-desaturate-full contrast-layer2 DESATURATE-LUMINOSITY)
(gimp-message "set dodge layer")
(gimp-image-add-layer img dodge-layer -1)
(gimp-drawable-set-name dodge-layer "Dodge")
(gimp-layer-set-mode dodge-layer DODGE-MODE)
(gimp-layer-set-opacity dodge-layer 50)
(gimp-message "set merge layer")
(gimp-image-add-layer img merge-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground color1)
(gimp-edit-bucket-fill merge-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-message "set merge layer3")
(gimp-layer-add-mask merge-layer merge-mask)
(gimp-context-set-foreground '(255 255 255))
(gimp-message "set merge layer4")
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend merge-mask FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
TRUE FALSE 1 0
TRUE 0 offset1 0 offset2)
(gimp-message "set screen layer")
(gimp-image-add-layer img screen-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground color1)
(gimp-edit-bucket-fill screen-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-layer-add-mask screen-layer screen-mask)
(gimp-context-set-foreground '(255 255 255))
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend screen-mask FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
TRUE FALSE 1 0
TRUE 0 offset1 0 offset2)
(gimp-message "set multiply layer")
(gimp-image-add-layer img multiply-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground color2)
(gimp-edit-bucket-fill multiply-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-layer-add-mask multiply-layer multiply-mask)
(gimp-context-set-foreground '(255 255 255))
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend multiply-mask FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
TRUE FALSE 1 0
TRUE 0 offset1 0 offset2)
(if(= retro TRUE)(begin
(gimp-message "optional retro colors")
(gimp-image-add-layer img retro-layer -1)
(gimp-selection-all img)
(gimp-context-set-foreground '(251 242 163))
(gimp-edit-bucket-fill retro-layer FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-layer-add-mask retro-layer retro-mask)
(gimp-edit-copy contrast-layer1)
(set! floatingsel (car (gimp-edit-paste retro-mask TRUE)))
(gimp-floating-sel-anchor floatingsel)
(gimp-image-add-layer img retro-layer2 -1)
(gimp-selection-all img)
(gimp-context-set-foreground '(232 101 179))
(gimp-edit-bucket-fill retro-layer2 FG-BUCKET-FILL NORMAL-MODE 100 0 FALSE 0 0)
(gimp-image-add-layer img gradient-layer -1)
(gimp-context-set-foreground '(255 255 255))
(gimp-context-set-background '(0 0 0))
(gimp-edit-blend gradient-layer FG-BG-RGB-MODE
NORMAL-MODE GRADIENT-LINEAR
100 0 REPEAT-NONE
FALSE FALSE 1 0
TRUE 0 offset1 0 offset2)
(gimp-drawable-set-visible merge-layer FALSE)
(gimp-drawable-set-visible screen-layer FALSE)
(gimp-drawable-set-visible multiply-layer FALSE)
)
)
(gimp-message "dodge b/w")
(if(= dodge TRUE)(begin
(gimp-desaturate-full dodge-layer DESATURATE-LUMINOSITY)
(gimp-drawable-set-visible extra-layer FALSE)
)
)
(gimp-message "tidy up")
(gimp-image-merge-visible-layers img EXPAND-AS-NECESSARY)
(set! adraw (car (gimp-image-get-active-drawable img)))
(gimp-file-save RUN-NONINTERACTIVE img adraw filename filename)
(gimp-image-delete img)
)
(set! filelist (cdr filelist))
)
)
)
|
27befc685a1e9bc0c6d835acc87ab29321877ffb6126197705f4fec4209ebf42 | google/ghc-source-gen | Internal.hs | Copyright 2019 Google LLC
--
-- Use of this source code is governed by a BSD-style
-- license that can be found in the LICENSE file or at
-- -source/licenses/bsd
# LANGUAGE CPP #
module GHC.SourceGen.Expr.Internal where
import GHC.Hs.Expr
#if MIN_VERSION_ghc(9,0,0)
import GHC.Types.SrcLoc (unLoc)
#else
import SrcLoc (unLoc)
#endif
import GHC.SourceGen.Lit.Internal
import GHC.SourceGen.Syntax.Internal
parenthesizeExprForApp, parenthesizeExprForOp
:: LHsExpr' -> LHsExpr'
parenthesizeExprForApp e
| needsExprForApp (unLoc e) = parExpr e
| otherwise = e
parenthesizeExprForOp e
| needsExprForOp (unLoc e) = parExpr e
| otherwise = e
parExpr :: LHsExpr' -> LHsExpr'
parExpr = mkLocated . withEpAnnNotUsed HsPar
#if MIN_VERSION_ghc(8,6,0)
#define WILD_EXT _
#else
#define WILD_EXT
#endif
needsExprForApp, needsExprForOp :: HsExpr' -> Bool
needsExprForOp e = case e of
-- TODO: more care for literals; only needed for negative numbers?
HsLit WILD_EXT l -> litNeedsParen l
HsOverLit WILD_EXT l -> overLitNeedsParen l
HsLam{} -> True
HsLamCase{} -> True
OpApp{} -> True
NegApp{} -> True
HsCase{} -> True
HsIf{} -> True
HsMultiIf{} -> True
HsLet{} -> True
HsDo{} -> True
ExprWithTySig{} -> True
_ -> False
needsExprForApp e = case e of
HsApp{} -> True
HsAppType{} -> True
HsStatic{} -> True
_ -> needsExprForOp e
| null | https://raw.githubusercontent.com/google/ghc-source-gen/f6b9130d8f384be0ef7a02b371870d532090ccb6/src/GHC/SourceGen/Expr/Internal.hs | haskell |
Use of this source code is governed by a BSD-style
license that can be found in the LICENSE file or at
-source/licenses/bsd
TODO: more care for literals; only needed for negative numbers? | Copyright 2019 Google LLC
# LANGUAGE CPP #
module GHC.SourceGen.Expr.Internal where
import GHC.Hs.Expr
#if MIN_VERSION_ghc(9,0,0)
import GHC.Types.SrcLoc (unLoc)
#else
import SrcLoc (unLoc)
#endif
import GHC.SourceGen.Lit.Internal
import GHC.SourceGen.Syntax.Internal
parenthesizeExprForApp, parenthesizeExprForOp
:: LHsExpr' -> LHsExpr'
parenthesizeExprForApp e
| needsExprForApp (unLoc e) = parExpr e
| otherwise = e
parenthesizeExprForOp e
| needsExprForOp (unLoc e) = parExpr e
| otherwise = e
parExpr :: LHsExpr' -> LHsExpr'
parExpr = mkLocated . withEpAnnNotUsed HsPar
#if MIN_VERSION_ghc(8,6,0)
#define WILD_EXT _
#else
#define WILD_EXT
#endif
needsExprForApp, needsExprForOp :: HsExpr' -> Bool
needsExprForOp e = case e of
HsLit WILD_EXT l -> litNeedsParen l
HsOverLit WILD_EXT l -> overLitNeedsParen l
HsLam{} -> True
HsLamCase{} -> True
OpApp{} -> True
NegApp{} -> True
HsCase{} -> True
HsIf{} -> True
HsMultiIf{} -> True
HsLet{} -> True
HsDo{} -> True
ExprWithTySig{} -> True
_ -> False
needsExprForApp e = case e of
HsApp{} -> True
HsAppType{} -> True
HsStatic{} -> True
_ -> needsExprForOp e
|
5f83953804cdde076de29ddd6029a9b34d10183b093df91ce9f049c8b3a16c43 | gusenov/stepik-functional-programming-hs | Demo.hs |
Ðåàëèçóéòå ôóíêöèþ sumOdd , öåëûõ ÷èñåë , íå÷åòíûå çíà÷åíèÿ :
GHCi > sumOdd [ 2,5,30,37 ]
42
Ðåàëèçóéòå ôóíêöèþ sumOdd, êîòîðàÿ ñóììèðóåò ýëåìåíòû ñïèñêà öåëûõ ÷èñåë, èìåþùèå íå÷åòíûå çíà÷åíèÿ:
GHCi> sumOdd [2,5,30,37]
42
-}
module Demo where
foo x y = if (odd x) then x + y else y
sumOdd :: [Integer] -> Integer
sumOdd = foldr foo 0
-- (2 + (5 + (30 + (37 + 0)))) | null | https://raw.githubusercontent.com/gusenov/stepik-functional-programming-hs/904164012b9b842a15d5ba3af05cfe589295fa5c/SumOdd/Demo.hs | haskell | (2 + (5 + (30 + (37 + 0)))) |
Ðåàëèçóéòå ôóíêöèþ sumOdd , öåëûõ ÷èñåë , íå÷åòíûå çíà÷åíèÿ :
GHCi > sumOdd [ 2,5,30,37 ]
42
Ðåàëèçóéòå ôóíêöèþ sumOdd, êîòîðàÿ ñóììèðóåò ýëåìåíòû ñïèñêà öåëûõ ÷èñåë, èìåþùèå íå÷åòíûå çíà÷åíèÿ:
GHCi> sumOdd [2,5,30,37]
42
-}
module Demo where
foo x y = if (odd x) then x + y else y
sumOdd :: [Integer] -> Integer
sumOdd = foldr foo 0
|
6606d78b317a993bfd1702d994d54847e2d3a10b656e758c29e9644d5ebfc34d | AccelerateHS/accelerate | Vec.hs | # LANGUAGE ScopedTypeVariables #
# LANGUAGE MagicHash #
{-# LANGUAGE ConstraintKinds #-}
# LANGUAGE TypeFamilies #
{-# OPTIONS_HADDOCK hide #-}
# OPTIONS_GHC -fno - warn - orphans #
-- |
Module : Data . Array . Accelerate . Sugar .
Copyright : [ 2008 .. 2020 ] The Accelerate Team
-- License : BSD3
--
Maintainer : < >
-- Stability : experimental
Portability : non - portable ( GHC extensions )
--
module Data.Array.Accelerate.Sugar.Vec
where
import Data.Array.Accelerate.Sugar.Elt
import Data.Array.Accelerate.Representation.Tag
import Data.Array.Accelerate.Representation.Type
import Data.Array.Accelerate.Type
import Data.Primitive.Types
import Data.Primitive.Vec
import GHC.TypeLits
import GHC.Prim
type VecElt a = (Elt a, Prim a, IsSingle a, EltR a ~ a)
instance (KnownNat n, VecElt a) => Elt (Vec n a) where
type EltR (Vec n a) = Vec n a
eltR = TupRsingle (VectorScalarType (VectorType (fromIntegral (natVal' (proxy# :: Proxy# n))) singleType))
tagsR = [TagRsingle (VectorScalarType (VectorType (fromIntegral (natVal' (proxy# :: Proxy# n))) singleType))]
toElt = id
fromElt = id
| null | https://raw.githubusercontent.com/AccelerateHS/accelerate/63e53be22aef32cd0b3b6f108e637716a92b72dc/src/Data/Array/Accelerate/Sugar/Vec.hs | haskell | # LANGUAGE ConstraintKinds #
# OPTIONS_HADDOCK hide #
|
License : BSD3
Stability : experimental
| # LANGUAGE ScopedTypeVariables #
# LANGUAGE MagicHash #
# LANGUAGE TypeFamilies #
# OPTIONS_GHC -fno - warn - orphans #
Module : Data . Array . Accelerate . Sugar .
Copyright : [ 2008 .. 2020 ] The Accelerate Team
Maintainer : < >
Portability : non - portable ( GHC extensions )
module Data.Array.Accelerate.Sugar.Vec
where
import Data.Array.Accelerate.Sugar.Elt
import Data.Array.Accelerate.Representation.Tag
import Data.Array.Accelerate.Representation.Type
import Data.Array.Accelerate.Type
import Data.Primitive.Types
import Data.Primitive.Vec
import GHC.TypeLits
import GHC.Prim
type VecElt a = (Elt a, Prim a, IsSingle a, EltR a ~ a)
instance (KnownNat n, VecElt a) => Elt (Vec n a) where
type EltR (Vec n a) = Vec n a
eltR = TupRsingle (VectorScalarType (VectorType (fromIntegral (natVal' (proxy# :: Proxy# n))) singleType))
tagsR = [TagRsingle (VectorScalarType (VectorType (fromIntegral (natVal' (proxy# :: Proxy# n))) singleType))]
toElt = id
fromElt = id
|
cc3e4446898db954f3d8ed62e0413b99af308cc470ab86234d9bbb27a4fa0b1a | fredfeng/CS162 | repl.ml | open Ast
open Eval
open Format
let rec repl () =
match LNoise.linenoise "> " with
| None -> ()
| Some l -> begin
LNoise.history_add l |> ignore;
try
let e = parse l in
printf "<== %s\n%!" (string_of_expr e);
let v = eval e in
printf "==> %s\n%!" (string_of_expr v)
with
| Parsing.Parse_error -> printf "parse error\n%!"
| Stuck msg -> printf "error: %s\n%!" msg
| Stack_overflow ->
printf "error: interpreter stack overflow; too many recursive function calls\n%!"
end;
repl ()
;;
if Array.length Sys.argv >= 2
then begin
(* execute expression in file *)
let file_name = Array.get Sys.argv 1 in
let ch = open_in file_name in
let contents = really_input_string ch (in_channel_length ch) in
close_in ch;
let e = parse contents in
printf "%s\n%!" (string_of_expr (eval e))
end
else begin
(* repl mode *)
printf "Welcome to lambda+! Built on: %s\n%!" Build_metadata.date;
LNoise.history_set ~max_length:100 |> ignore;
repl ()
end | null | https://raw.githubusercontent.com/fredfeng/CS162/3e868a98e2b2b1ffc749a46a98b62df75ca4d46c/homework/hw4/repl.ml | ocaml | execute expression in file
repl mode | open Ast
open Eval
open Format
let rec repl () =
match LNoise.linenoise "> " with
| None -> ()
| Some l -> begin
LNoise.history_add l |> ignore;
try
let e = parse l in
printf "<== %s\n%!" (string_of_expr e);
let v = eval e in
printf "==> %s\n%!" (string_of_expr v)
with
| Parsing.Parse_error -> printf "parse error\n%!"
| Stuck msg -> printf "error: %s\n%!" msg
| Stack_overflow ->
printf "error: interpreter stack overflow; too many recursive function calls\n%!"
end;
repl ()
;;
if Array.length Sys.argv >= 2
then begin
let file_name = Array.get Sys.argv 1 in
let ch = open_in file_name in
let contents = really_input_string ch (in_channel_length ch) in
close_in ch;
let e = parse contents in
printf "%s\n%!" (string_of_expr (eval e))
end
else begin
printf "Welcome to lambda+! Built on: %s\n%!" Build_metadata.date;
LNoise.history_set ~max_length:100 |> ignore;
repl ()
end |
5478a0635788fdcecad738346bf4c1caf1b031054c9b0ce6e9e99cfe329f0bc0 | cbaggers/rtg-math | docs.lisp | (in-package :rtg-math.base-matrices)
;;----------------------------------------------------------------
(docs:define-docs
(defun m!
"
Creates a new `mat2`, `mat3` or `mat4` based on the number of
components provided.
"))
| null | https://raw.githubusercontent.com/cbaggers/rtg-math/29fc5b3d0028a4a11a82355ecc8cca62662c69e0/matrices/base/docs.lisp | lisp | ---------------------------------------------------------------- | (in-package :rtg-math.base-matrices)
(docs:define-docs
(defun m!
"
Creates a new `mat2`, `mat3` or `mat4` based on the number of
components provided.
"))
|
f4c70dbd19e328ba0e17d380fcc0821fcfabc9974909dfdceb7f8770a6c17447 | sol/tinc | RunSpec.hs | module RunSpec (spec) where
import Helper
import System.Exit
import Run
spec :: Spec
spec = do
describe "callPlugin" $ do
it "propagates success" $ do
callPlugin "true" [] `shouldThrow` (== ExitSuccess)
it "propagates error" $ do
callPlugin "false" [] `shouldThrow` (== ExitFailure 1)
| null | https://raw.githubusercontent.com/sol/tinc/427df659df20d548bbe4c7bd0ea76cc13de40238/test/RunSpec.hs | haskell | module RunSpec (spec) where
import Helper
import System.Exit
import Run
spec :: Spec
spec = do
describe "callPlugin" $ do
it "propagates success" $ do
callPlugin "true" [] `shouldThrow` (== ExitSuccess)
it "propagates error" $ do
callPlugin "false" [] `shouldThrow` (== ExitFailure 1)
| |
c852435b4b176a4bdf6a60ee17ef275550d20c98f680f13cd9226626330e9809 | sogaiu/alc.x-as-tests | runner.clj | (ns alc.x-as-tests.impl.runner
(:require
[alc.x-as-tests.impl.paths :as paths]
[alc.x-as-tests.impl.rewrite :as rewrite]
[clojure.java.io :as cji]
[clojure.java.shell :as cjs]
[clojure.string :as cs]))
(defn enum-src-files-in-dir
[dir exts]
(some->> (file-seq (java.io.File. dir))
(keep (fn [file]
(when (.isFile file)
(let [path (paths/as-abspath file)]
(when (paths/has-filext? path exts)
path)))))))
(comment
(set
(enum-src-files-in-dir
(paths/as-abspath (System/getProperty "user.dir")
"src")
#{".clj"}))
#_ (set
(map (fn [path]
(paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests"
path))
["impl/ast.clj"
"impl/ex.clj"
"impl/paths.clj"
"impl/rewrite.clj"
"impl/runner.clj"
"impl/utils.clj"
"impl/validate.clj"
"main.clj"]))
,)
(defn all-src-files
[paths exts]
(reduce (fn [acc path]
(let [f (cji/file path)]
(cond
(.isFile f)
(conj acc path)
;;
(.isDirectory f)
(into acc (vec (enum-src-files-in-dir path exts)))
;;
:else
(throw (Exception. (str "not file or dir: " path))))))
[]
paths))
(comment
(set
(all-src-files
[(paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests" "main.clj")
(paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests" "impl")]
#{".clj"}))
#_ (->> (cji/file (System/getProperty "user.dir")
"src")
file-seq
(filter #(.isFile %))
(map #(.getAbsolutePath %))
set)
,)
(defn gen-test-path
[full-path root-path test-root]
(let [nio-full-path
(java.nio.file.Paths/get full-path
(into-array String []))
nio-root-path
(java.nio.file.Paths/get root-path
(into-array String []))]
(paths/as-abspath test-root
(-> (.relativize nio-root-path
nio-full-path)
.toString))))
(comment
(let [proj-root (System/getProperty "user.dir")]
(gen-test-path (paths/as-abspath proj-root
"src" "alc" "x_as_tests"
"main.clj")
(paths/as-abspath proj-root
"src")
(System/getProperty "java.io.tmpdir")))
#_ (paths/as-abspath (System/getProperty "java.io.tmpdir")
"alc" "x_as_tests"
"main.clj")
,)
;; XXX: generates test file paths and populates the corr files
(defn gen-tests!
[paths relative-to test-root]
(->> paths
(map (fn [path]
(let [test-path (gen-test-path path relative-to test-root)
_ (cji/make-parents test-path)
src (slurp path)]
(spit test-path
(rewrite/rewrite-with-tests src))
test-path)))
doall))
(comment
;; XXX: debris ends up in /tmp -- clean up?
(let [proj-root (System/getProperty "user.dir")]
(gen-tests!
[(paths/as-abspath proj-root
"src" "alc" "x_as_tests"
"main.clj")]
(paths/as-abspath proj-root
"src")
(paths/as-abspath (System/getProperty "java.io.tmpdir")
"alc.x-as-tests" "src")))
#_ [(paths/as-abspath (System/getProperty "java.io.tmpdir")
"alc.x-as-tests"
"src" "alc" "x_as_tests"
"main.clj")]
,)
(comment
(defmacro with-out-both
[& body]
`(let [s# (new java.io.StringWriter)]
(binding [*out* s#]
(let [v# ~@body]
(vector (str s#)
v#)))))
(defmacro with-test-out-both
[& body]
`(let [s# (new java.io.StringWriter)]
(binding [clojure.test/*test-out* s#]
(let [v# ~@body]
(vector (str s#)
v#)))))
,)
;; XXX: break this up?
(defn gen-run-schedule
[paths]
(cs/join "\n"
["(require 'clojure.test)"
""
"(defmacro with-test-out-both"
" [& body]"
" `(let [s# (new java.io.StringWriter)]"
" (binding [clojure.test/*test-out* s#]"
" (let [v# ~@body]"
" (vector (str s#)"
" v#)))))"
""
"(def summary (atom {}))"
""
"(def line"
" \"-----------------------------------------------\")"
""
"(binding [clojure.test/*test-out* *out*]"
(str " (doseq [path " (with-out-str (prn (vec paths))) "]")
" (swap! summary conj"
" [path"
" (with-test-out-both"
" (load-file path))])))"
""
"(println)"
""
"(doseq [[path [output report]] @summary]"
" (println \"path:\" path)"
" (when (not= \"nil\\n\" output)"
" (println line)"
" (println \"output:\")"
" (println output))"
" (println line)"
" (println \"sub:\" report)"
" (println line)"
" (println))"
""
"(println line)"
"(println \"total:\""
" (let [[test pass fail error]"
" (reduce (fn [[t-test t-pass t-fail t-error]"
" {:keys [:test :pass :fail :error]}]"
" [(+ t-test test)"
" (+ t-pass pass)"
" (+ t-fail fail)"
" (+ t-error error)])"
" [0 0 0 0]"
" (map (fn [[_output report]]"
" report)"
" (vals @summary)))]"
" {:test test"
" :pass pass"
" :fail fail"
" :error error}))"
""]))
(comment
(print
(gen-run-schedule [(paths/as-abspath (System/getProperty "user.dir")
"fin.clj")
(paths/as-abspath (System/getProperty "user.dir")
"fun.clj")])
,)
,)
(defn do-tests!
[{:keys [:exe-name
:exts
:paths
:temp-root
:verbose]
:or {exe-name (or (System/getenv "ALC_XAT_CLJ_NAME")
"clojure") ;; don't include file extension
exts #{".clj" ".cljc"}
paths [(paths/as-abspath (System/getProperty "user.dir")
"src")]
temp-root (paths/as-abspath (System/getProperty "java.io.tmpdir")
(str "alc.x-as-tests-"
(System/currentTimeMillis)))}}]
(if-let [clojure-bin (paths/which exe-name)]
(let [paths (all-src-files paths exts)
proj-root (System/getProperty "user.dir")
test-paths (gen-tests! paths proj-root temp-root)
runner-path (paths/as-abspath temp-root
"alc.xat.run-tests.clj")
_ (spit runner-path (gen-run-schedule test-paths))
cmd [clojure-bin "-i" runner-path]
{:keys [:err :exit :out]}
(cjs/with-sh-dir proj-root
(apply cjs/sh cmd))]
(when verbose
(println cmd))
(when (not= 0 exit)
(println cmd)
(println " exit:" exit)
(println " err:" err))
(println out))
(do
(println "Failed to find clojure executable:" exe-name)
(flush)
(System/exit 1))))
(comment
(do-tests! {:verbose true})
(let [impl-dir (paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests" "impl")]
(do-tests! {:verbose true
:paths [(paths/as-abspath impl-dir "ast.clj")
(paths/as-abspath impl-dir "rewrite.clj")]}))
,)
| null | https://raw.githubusercontent.com/sogaiu/alc.x-as-tests/86cfc58393f4d81e1a74d58e85f89717afb3d9b0/src/alc/x_as_tests/impl/runner.clj | clojure |
XXX: generates test file paths and populates the corr files
XXX: debris ends up in /tmp -- clean up?
XXX: break this up?
don't include file extension | (ns alc.x-as-tests.impl.runner
(:require
[alc.x-as-tests.impl.paths :as paths]
[alc.x-as-tests.impl.rewrite :as rewrite]
[clojure.java.io :as cji]
[clojure.java.shell :as cjs]
[clojure.string :as cs]))
(defn enum-src-files-in-dir
[dir exts]
(some->> (file-seq (java.io.File. dir))
(keep (fn [file]
(when (.isFile file)
(let [path (paths/as-abspath file)]
(when (paths/has-filext? path exts)
path)))))))
(comment
(set
(enum-src-files-in-dir
(paths/as-abspath (System/getProperty "user.dir")
"src")
#{".clj"}))
#_ (set
(map (fn [path]
(paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests"
path))
["impl/ast.clj"
"impl/ex.clj"
"impl/paths.clj"
"impl/rewrite.clj"
"impl/runner.clj"
"impl/utils.clj"
"impl/validate.clj"
"main.clj"]))
,)
(defn all-src-files
[paths exts]
(reduce (fn [acc path]
(let [f (cji/file path)]
(cond
(.isFile f)
(conj acc path)
(.isDirectory f)
(into acc (vec (enum-src-files-in-dir path exts)))
:else
(throw (Exception. (str "not file or dir: " path))))))
[]
paths))
(comment
(set
(all-src-files
[(paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests" "main.clj")
(paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests" "impl")]
#{".clj"}))
#_ (->> (cji/file (System/getProperty "user.dir")
"src")
file-seq
(filter #(.isFile %))
(map #(.getAbsolutePath %))
set)
,)
(defn gen-test-path
[full-path root-path test-root]
(let [nio-full-path
(java.nio.file.Paths/get full-path
(into-array String []))
nio-root-path
(java.nio.file.Paths/get root-path
(into-array String []))]
(paths/as-abspath test-root
(-> (.relativize nio-root-path
nio-full-path)
.toString))))
(comment
(let [proj-root (System/getProperty "user.dir")]
(gen-test-path (paths/as-abspath proj-root
"src" "alc" "x_as_tests"
"main.clj")
(paths/as-abspath proj-root
"src")
(System/getProperty "java.io.tmpdir")))
#_ (paths/as-abspath (System/getProperty "java.io.tmpdir")
"alc" "x_as_tests"
"main.clj")
,)
(defn gen-tests!
[paths relative-to test-root]
(->> paths
(map (fn [path]
(let [test-path (gen-test-path path relative-to test-root)
_ (cji/make-parents test-path)
src (slurp path)]
(spit test-path
(rewrite/rewrite-with-tests src))
test-path)))
doall))
(comment
(let [proj-root (System/getProperty "user.dir")]
(gen-tests!
[(paths/as-abspath proj-root
"src" "alc" "x_as_tests"
"main.clj")]
(paths/as-abspath proj-root
"src")
(paths/as-abspath (System/getProperty "java.io.tmpdir")
"alc.x-as-tests" "src")))
#_ [(paths/as-abspath (System/getProperty "java.io.tmpdir")
"alc.x-as-tests"
"src" "alc" "x_as_tests"
"main.clj")]
,)
(comment
(defmacro with-out-both
[& body]
`(let [s# (new java.io.StringWriter)]
(binding [*out* s#]
(let [v# ~@body]
(vector (str s#)
v#)))))
(defmacro with-test-out-both
[& body]
`(let [s# (new java.io.StringWriter)]
(binding [clojure.test/*test-out* s#]
(let [v# ~@body]
(vector (str s#)
v#)))))
,)
(defn gen-run-schedule
[paths]
(cs/join "\n"
["(require 'clojure.test)"
""
"(defmacro with-test-out-both"
" [& body]"
" `(let [s# (new java.io.StringWriter)]"
" (binding [clojure.test/*test-out* s#]"
" (let [v# ~@body]"
" (vector (str s#)"
" v#)))))"
""
"(def summary (atom {}))"
""
"(def line"
" \"-----------------------------------------------\")"
""
"(binding [clojure.test/*test-out* *out*]"
(str " (doseq [path " (with-out-str (prn (vec paths))) "]")
" (swap! summary conj"
" [path"
" (with-test-out-both"
" (load-file path))])))"
""
"(println)"
""
"(doseq [[path [output report]] @summary]"
" (println \"path:\" path)"
" (when (not= \"nil\\n\" output)"
" (println line)"
" (println \"output:\")"
" (println output))"
" (println line)"
" (println \"sub:\" report)"
" (println line)"
" (println))"
""
"(println line)"
"(println \"total:\""
" (let [[test pass fail error]"
" (reduce (fn [[t-test t-pass t-fail t-error]"
" {:keys [:test :pass :fail :error]}]"
" [(+ t-test test)"
" (+ t-pass pass)"
" (+ t-fail fail)"
" (+ t-error error)])"
" [0 0 0 0]"
" (map (fn [[_output report]]"
" report)"
" (vals @summary)))]"
" {:test test"
" :pass pass"
" :fail fail"
" :error error}))"
""]))
(comment
(print
(gen-run-schedule [(paths/as-abspath (System/getProperty "user.dir")
"fin.clj")
(paths/as-abspath (System/getProperty "user.dir")
"fun.clj")])
,)
,)
(defn do-tests!
[{:keys [:exe-name
:exts
:paths
:temp-root
:verbose]
:or {exe-name (or (System/getenv "ALC_XAT_CLJ_NAME")
exts #{".clj" ".cljc"}
paths [(paths/as-abspath (System/getProperty "user.dir")
"src")]
temp-root (paths/as-abspath (System/getProperty "java.io.tmpdir")
(str "alc.x-as-tests-"
(System/currentTimeMillis)))}}]
(if-let [clojure-bin (paths/which exe-name)]
(let [paths (all-src-files paths exts)
proj-root (System/getProperty "user.dir")
test-paths (gen-tests! paths proj-root temp-root)
runner-path (paths/as-abspath temp-root
"alc.xat.run-tests.clj")
_ (spit runner-path (gen-run-schedule test-paths))
cmd [clojure-bin "-i" runner-path]
{:keys [:err :exit :out]}
(cjs/with-sh-dir proj-root
(apply cjs/sh cmd))]
(when verbose
(println cmd))
(when (not= 0 exit)
(println cmd)
(println " exit:" exit)
(println " err:" err))
(println out))
(do
(println "Failed to find clojure executable:" exe-name)
(flush)
(System/exit 1))))
(comment
(do-tests! {:verbose true})
(let [impl-dir (paths/as-abspath (System/getProperty "user.dir")
"src" "alc" "x_as_tests" "impl")]
(do-tests! {:verbose true
:paths [(paths/as-abspath impl-dir "ast.clj")
(paths/as-abspath impl-dir "rewrite.clj")]}))
,)
|
1642d0c401d933d97085b8772ec383098b45b52a7ed898d2832585f3a47067df | sighingnow/mxnet-haskell | Base.hs | -----------------------------------------------------------
-- |
-- module: MXNet.Core.Base
copyright : ( c ) 2016
license : MIT
-- maintainer:
--
Interfaces in core module of MXNet .
--
module MXNet.Core.Base
( -- * Necessary raw functions
mxGetLastError
, mxListAllOpNames
* NDArray
, NDArray
, waitAll
, makeEmptyNDArray
, makeNDArray
, ndshape
, ndsize
, context
, at
, items
, slice
, waitToRead
, onehotEncode
, zeros
, ones
, full
, array
-- * Symbol
, Symbol
, variable
, getName
, getAttr
, setAttr
, infershape
, grad
, bind
, bind'
, listInputs
, listOutputs
, listAuxiliaries
-- * Executor
, Executor
, makeExecutor
, forward
, backward
, getOutputs
* DType
, module MXNet.Core.Base.DType
-- * Heterogeneous Dictionary.
, module MXNet.Core.Base.HMap
)where
import MXNet.Core.Base.DType
import MXNet.Core.Base.Executor
import MXNet.Core.Base.HMap
import MXNet.Core.Base.NDArray
import MXNet.Core.Base.Symbol
import MXNet.Core.Base.Internal
| null | https://raw.githubusercontent.com/sighingnow/mxnet-haskell/b8e8c6834519df512533094122ee223fd9fb91d8/mxnet/src/MXNet/Core/Base.hs | haskell | ---------------------------------------------------------
|
module: MXNet.Core.Base
maintainer:
* Necessary raw functions
* Symbol
* Executor
* Heterogeneous Dictionary. | copyright : ( c ) 2016
license : MIT
Interfaces in core module of MXNet .
module MXNet.Core.Base
mxGetLastError
, mxListAllOpNames
* NDArray
, NDArray
, waitAll
, makeEmptyNDArray
, makeNDArray
, ndshape
, ndsize
, context
, at
, items
, slice
, waitToRead
, onehotEncode
, zeros
, ones
, full
, array
, Symbol
, variable
, getName
, getAttr
, setAttr
, infershape
, grad
, bind
, bind'
, listInputs
, listOutputs
, listAuxiliaries
, Executor
, makeExecutor
, forward
, backward
, getOutputs
* DType
, module MXNet.Core.Base.DType
, module MXNet.Core.Base.HMap
)where
import MXNet.Core.Base.DType
import MXNet.Core.Base.Executor
import MXNet.Core.Base.HMap
import MXNet.Core.Base.NDArray
import MXNet.Core.Base.Symbol
import MXNet.Core.Base.Internal
|
0bee491cdd70fb42d43b6d9e5035e52c9f13668de4f3a4ec1ee5320d90fd316f | whamtet/Excel-REPL | client.clj | (ns clr-http.lite.client
"Batteries-included HTTP client."
(:require [clojure.string :as str]
[clojure.clr.io :as io]
[clr-http.lite.core :as core]
[clr-http.lite.util :as util])
(:import
System.Text.Encoding
System.Text.UTF8Encoding
)
(:refer-clojure :exclude (get)))
(def str->encoding
(into {}
(for [encoding (Encoding/GetEncodings)]
[(.Name encoding) (.GetEncoding encoding)])))
(defn update [m k f & args]
(assoc m k (apply f (m k) args)))
(defn parse-url [url]
(let [uri (Uri. url)]
{:scheme (-> uri .Scheme keyword)
:server-name (.Host uri)
:server-port (.Port uri)
:uri (.LocalPath uri)
:user-info (.UserInfo uri)
:query-string (let [q (.Query uri)]
(if-not (empty? q) (.Substring q 1)))}))
(def unexceptional-status?
#{200 201 202 203 204 205 206 207 300 301 302 303 307})
(defn wrap-exceptions [client]
(fn [req]
(let [{:keys [status] :as resp} (client req)]
(if (or (not (clojure.core/get req :throw-exceptions true))
(unexceptional-status? status))
resp
(throw (Exception. (pr-str resp)))
#_(throw+ resp "clj-http: status %s" (:status %))))))
(declare wrap-redirects)
(defn follow-redirect [client req resp]
(let [url (get-in resp [:headers "location"])]
((wrap-redirects client) (assoc req :url url))))
(defn wrap-redirects [client]
(fn [{:keys [request-method follow-redirects] :as req}]
(let [{:keys [status] :as resp} (client req)]
(cond
(= false follow-redirects)
resp
(and (#{301 302 307} status) (#{:get :head} request-method))
(follow-redirect client req resp)
(and (= 303 status) (= :head request-method))
(follow-redirect client (assoc req :request-method :get) resp)
:else
resp))))
(defn wrap-decompression [client]
(fn [req]
(if (get-in req [:headers "Accept-Encoding"])
(client req)
(let [req-c (update req :headers assoc "Accept-Encoding" "gzip, deflate")
resp-c (client req-c)]
(case (or (get-in resp-c [:headers "Content-Encoding"])
(get-in resp-c [:headers "content-encoding"]))
"gzip" (update resp-c :body util/gunzip)
"deflate" (update resp-c :body util/inflate)
resp-c)))))
(defn wrap-output-coercion [client]
(fn [{:keys [as] :as req}]
(let [{:keys [body] :as resp} (client req)]
(if body
(cond
(keyword? as)
(condp = as
;; Don't do anything for streams
:stream resp
;; Don't do anything when it's a byte-array
:byte-array resp
;; Automatically determine response type
:auto
(assoc resp
:body
(let [typestring (get-in resp [:headers "content-type"])]
(cond
(.startsWith (str typestring) "text/")
(if-let [charset (second (re-find #"charset=(.*)"
(str typestring)))]
(.GetString (Activator/CreateInstance (str->encoding charset UTF8Encoding)) body)
(util/utf8-string body))
:else
(util/utf8-string body))))
;; No :as matches found
(update-in resp [:body] util/utf8-string))
;; Try the charset given if a string is specified
(string? as)
(update-in resp [:body] #(.GetString (Activator/CreateInstance (str->encoding as UTF8Encoding)) %))
Return a regular UTF-8 string body
:else
(update-in resp [:body] util/utf8-string))
resp))))
(defn wrap-input-coercion [client]
(fn [{:keys [body body-encoding length] :as req}]
(let [
encoding (str->encoding body-encoding UTF8Encoding)
]
(if body
(cond
(string? body)
(client (assoc req
:body (.GetBytes (Activator/CreateInstance encoding) body)
:character-encoding (or body-encoding "UTF-8")))
:else
(client req))
(client req)))))
(defn content-type-value [type]
(if (keyword? type)
(str "application/" (name type))
type))
(defn wrap-content-type [client]
(fn [{:keys [content-type] :as req}]
(if content-type
(client (update-in req [:content-type] content-type-value))
(client req))))
(defn wrap-accept [client]
(fn [{:keys [accept] :as req}]
(if accept
(client (-> req
(dissoc :accept)
(assoc-in [:headers "Accept"]
(content-type-value accept))))
(client req))))
(defn accept-encoding-value [accept-encoding]
(str/join ", " (map name accept-encoding)))
(defn wrap-accept-encoding [client]
(fn [{:keys [accept-encoding] :as req}]
(if accept-encoding
(client (-> req (dissoc :accept-encoding)
(assoc-in [:headers "Accept-Encoding"]
(accept-encoding-value accept-encoding))))
(client req))))
(defn generate-query-string [params]
(str/join "&"
(mapcat (fn [[k v]]
(if (sequential? v)
(map #(str (util/url-encode (name %1))
"="
(util/url-encode (str %2)))
(repeat k) v)
[(str (util/url-encode (name k))
"="
(util/url-encode (str v)))]))
params)))
(defn wrap-query-params [client]
(fn [{:keys [query-params] :as req}]
(if query-params
(client (-> req (dissoc :query-params)
(assoc :query-string
(generate-query-string query-params))))
(client req))))
(defn basic-auth-value [basic-auth]
(let [basic-auth (if (string? basic-auth)
basic-auth
(str (first basic-auth) ":" (second basic-auth)))]
(str "Basic " (util/base64-encode (util/utf8-bytes basic-auth)))))
(defn wrap-basic-auth [client]
(fn [req]
(if-let [basic-auth (:basic-auth req)]
(client (-> req
(dissoc :basic-auth)
(assoc-in [:headers "Authorization"]
(basic-auth-value basic-auth))))
(client req))))
(defn parse-user-info [user-info]
(when user-info
(str/split user-info #":")))
(defn wrap-user-info [client]
(fn [req]
(if-let [[user password] (parse-user-info (:user-info req))]
(client (assoc req :basic-auth [user password]))
(client req))))
(defn wrap-method [client]
(fn [req]
(if-let [m (:method req)]
(client (-> req
(dissoc :method)
(assoc :request-method m)))
(client req))))
(defn wrap-form-params [client]
(fn [{:keys [form-params request-method] :as req}]
(if (and form-params (= :post request-method))
(client (-> req
(dissoc :form-params)
(assoc :content-type
(content-type-value
:x-www-form-urlencoded)
:body (generate-query-string form-params))))
(client req))))
(defn wrap-url [client]
(fn [req]
(if-let [url (:url req)]
(client (-> req (dissoc :url) (merge (parse-url url))))
(client req))))
#_(defn wrap-unknown-host [client]
(fn [{:keys [ignore-unknown-host?] :as req}]
(try
(client req)
(catch UnknownHostException e
(if ignore-unknown-host?
nil
(throw e))))))
(defn wrap-request
"Returns a battaries-included HTTP request function coresponding to the given
core client. See client/client."
[request]
(-> request
wrap-query-params
wrap-user-info
wrap-url
wrap-redirects
wrap-decompression
wrap-input-coercion
wrap-output-coercion
wrap-exceptions
wrap-basic-auth
wrap-accept
wrap-accept-encoding
wrap-content-type
wrap-form-params
wrap-method
;wrap-unknown-host
))
(def #^{:doc
"Executes the HTTP request corresponding to the given map and returns
the response map for corresponding to the resulting HTTP response.
In addition to the standard Ring request keys, the following keys are also
recognized:
* :url
* :method
* :query-params
* :basic-auth
* :content-type
* :accept
* :accept-encoding
* :as
The following additional behaviors over also automatically enabled:
* Exceptions are thrown for status codes other than 200-207, 300-303, or 307
* Gzip and deflate responses are accepted and decompressed
* Input and output bodies are coerced as required and indicated by the :as
option."}
request
(wrap-request #'core/request))
(defn get
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :get :url url})))
(defn head
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :head :url url})))
(defn post
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :post :url url})))
(defn put
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :put :url url})))
(defn delete
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :delete :url url})))
(defmacro with-connection-pool
"This macro is a no-op, but left in to support backward-compatibility
with clj-http."
[opts & body]
`(do
~@body))
| null | https://raw.githubusercontent.com/whamtet/Excel-REPL/cae2ef8e423e2f2723c60e4758785973f06c6ba4/Excel-REPL/nrepl/clr_http/lite/client.clj | clojure | Don't do anything for streams
Don't do anything when it's a byte-array
Automatically determine response type
No :as matches found
Try the charset given if a string is specified
wrap-unknown-host | (ns clr-http.lite.client
"Batteries-included HTTP client."
(:require [clojure.string :as str]
[clojure.clr.io :as io]
[clr-http.lite.core :as core]
[clr-http.lite.util :as util])
(:import
System.Text.Encoding
System.Text.UTF8Encoding
)
(:refer-clojure :exclude (get)))
(def str->encoding
(into {}
(for [encoding (Encoding/GetEncodings)]
[(.Name encoding) (.GetEncoding encoding)])))
(defn update [m k f & args]
(assoc m k (apply f (m k) args)))
(defn parse-url [url]
(let [uri (Uri. url)]
{:scheme (-> uri .Scheme keyword)
:server-name (.Host uri)
:server-port (.Port uri)
:uri (.LocalPath uri)
:user-info (.UserInfo uri)
:query-string (let [q (.Query uri)]
(if-not (empty? q) (.Substring q 1)))}))
(def unexceptional-status?
#{200 201 202 203 204 205 206 207 300 301 302 303 307})
(defn wrap-exceptions [client]
(fn [req]
(let [{:keys [status] :as resp} (client req)]
(if (or (not (clojure.core/get req :throw-exceptions true))
(unexceptional-status? status))
resp
(throw (Exception. (pr-str resp)))
#_(throw+ resp "clj-http: status %s" (:status %))))))
(declare wrap-redirects)
(defn follow-redirect [client req resp]
(let [url (get-in resp [:headers "location"])]
((wrap-redirects client) (assoc req :url url))))
(defn wrap-redirects [client]
(fn [{:keys [request-method follow-redirects] :as req}]
(let [{:keys [status] :as resp} (client req)]
(cond
(= false follow-redirects)
resp
(and (#{301 302 307} status) (#{:get :head} request-method))
(follow-redirect client req resp)
(and (= 303 status) (= :head request-method))
(follow-redirect client (assoc req :request-method :get) resp)
:else
resp))))
(defn wrap-decompression [client]
(fn [req]
(if (get-in req [:headers "Accept-Encoding"])
(client req)
(let [req-c (update req :headers assoc "Accept-Encoding" "gzip, deflate")
resp-c (client req-c)]
(case (or (get-in resp-c [:headers "Content-Encoding"])
(get-in resp-c [:headers "content-encoding"]))
"gzip" (update resp-c :body util/gunzip)
"deflate" (update resp-c :body util/inflate)
resp-c)))))
(defn wrap-output-coercion [client]
(fn [{:keys [as] :as req}]
(let [{:keys [body] :as resp} (client req)]
(if body
(cond
(keyword? as)
(condp = as
:stream resp
:byte-array resp
:auto
(assoc resp
:body
(let [typestring (get-in resp [:headers "content-type"])]
(cond
(.startsWith (str typestring) "text/")
(if-let [charset (second (re-find #"charset=(.*)"
(str typestring)))]
(.GetString (Activator/CreateInstance (str->encoding charset UTF8Encoding)) body)
(util/utf8-string body))
:else
(util/utf8-string body))))
(update-in resp [:body] util/utf8-string))
(string? as)
(update-in resp [:body] #(.GetString (Activator/CreateInstance (str->encoding as UTF8Encoding)) %))
Return a regular UTF-8 string body
:else
(update-in resp [:body] util/utf8-string))
resp))))
(defn wrap-input-coercion [client]
(fn [{:keys [body body-encoding length] :as req}]
(let [
encoding (str->encoding body-encoding UTF8Encoding)
]
(if body
(cond
(string? body)
(client (assoc req
:body (.GetBytes (Activator/CreateInstance encoding) body)
:character-encoding (or body-encoding "UTF-8")))
:else
(client req))
(client req)))))
(defn content-type-value [type]
(if (keyword? type)
(str "application/" (name type))
type))
(defn wrap-content-type [client]
(fn [{:keys [content-type] :as req}]
(if content-type
(client (update-in req [:content-type] content-type-value))
(client req))))
(defn wrap-accept [client]
(fn [{:keys [accept] :as req}]
(if accept
(client (-> req
(dissoc :accept)
(assoc-in [:headers "Accept"]
(content-type-value accept))))
(client req))))
(defn accept-encoding-value [accept-encoding]
(str/join ", " (map name accept-encoding)))
(defn wrap-accept-encoding [client]
(fn [{:keys [accept-encoding] :as req}]
(if accept-encoding
(client (-> req (dissoc :accept-encoding)
(assoc-in [:headers "Accept-Encoding"]
(accept-encoding-value accept-encoding))))
(client req))))
(defn generate-query-string [params]
(str/join "&"
(mapcat (fn [[k v]]
(if (sequential? v)
(map #(str (util/url-encode (name %1))
"="
(util/url-encode (str %2)))
(repeat k) v)
[(str (util/url-encode (name k))
"="
(util/url-encode (str v)))]))
params)))
(defn wrap-query-params [client]
(fn [{:keys [query-params] :as req}]
(if query-params
(client (-> req (dissoc :query-params)
(assoc :query-string
(generate-query-string query-params))))
(client req))))
(defn basic-auth-value [basic-auth]
(let [basic-auth (if (string? basic-auth)
basic-auth
(str (first basic-auth) ":" (second basic-auth)))]
(str "Basic " (util/base64-encode (util/utf8-bytes basic-auth)))))
(defn wrap-basic-auth [client]
(fn [req]
(if-let [basic-auth (:basic-auth req)]
(client (-> req
(dissoc :basic-auth)
(assoc-in [:headers "Authorization"]
(basic-auth-value basic-auth))))
(client req))))
(defn parse-user-info [user-info]
(when user-info
(str/split user-info #":")))
(defn wrap-user-info [client]
(fn [req]
(if-let [[user password] (parse-user-info (:user-info req))]
(client (assoc req :basic-auth [user password]))
(client req))))
(defn wrap-method [client]
(fn [req]
(if-let [m (:method req)]
(client (-> req
(dissoc :method)
(assoc :request-method m)))
(client req))))
(defn wrap-form-params [client]
(fn [{:keys [form-params request-method] :as req}]
(if (and form-params (= :post request-method))
(client (-> req
(dissoc :form-params)
(assoc :content-type
(content-type-value
:x-www-form-urlencoded)
:body (generate-query-string form-params))))
(client req))))
(defn wrap-url [client]
(fn [req]
(if-let [url (:url req)]
(client (-> req (dissoc :url) (merge (parse-url url))))
(client req))))
#_(defn wrap-unknown-host [client]
(fn [{:keys [ignore-unknown-host?] :as req}]
(try
(client req)
(catch UnknownHostException e
(if ignore-unknown-host?
nil
(throw e))))))
(defn wrap-request
"Returns a battaries-included HTTP request function coresponding to the given
core client. See client/client."
[request]
(-> request
wrap-query-params
wrap-user-info
wrap-url
wrap-redirects
wrap-decompression
wrap-input-coercion
wrap-output-coercion
wrap-exceptions
wrap-basic-auth
wrap-accept
wrap-accept-encoding
wrap-content-type
wrap-form-params
wrap-method
))
(def #^{:doc
"Executes the HTTP request corresponding to the given map and returns
the response map for corresponding to the resulting HTTP response.
In addition to the standard Ring request keys, the following keys are also
recognized:
* :url
* :method
* :query-params
* :basic-auth
* :content-type
* :accept
* :accept-encoding
* :as
The following additional behaviors over also automatically enabled:
* Exceptions are thrown for status codes other than 200-207, 300-303, or 307
* Gzip and deflate responses are accepted and decompressed
* Input and output bodies are coerced as required and indicated by the :as
option."}
request
(wrap-request #'core/request))
(defn get
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :get :url url})))
(defn head
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :head :url url})))
(defn post
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :post :url url})))
(defn put
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :put :url url})))
(defn delete
"Like #'request, but sets the :method and :url as appropriate."
[url & [req]]
(request (merge req {:method :delete :url url})))
(defmacro with-connection-pool
"This macro is a no-op, but left in to support backward-compatibility
with clj-http."
[opts & body]
`(do
~@body))
|
bfd03e3276f7bb1acbe40bb76a4792116f64ccf3a43df4888c4ae6c4de823b03 | mveritym/markov-bot | make_bot.clj | (ns markov-bot.make-bot
(:require [markov-bot.twitter :as twitter]
[markov-bot.generator :refer :all]
[cemerick.url :refer [url-encode]]))
(defn get-tweets-for-users [users]
(->> users
(map twitter/get-all-tweets-for-user)
(apply concat)))
(defn gen-chain-from-tweets [tweets]
(->> (map clojure.string/lower-case tweets)
(map text-to-word-chain)
(apply merge-with clojure.set/union)))
(defn gen-rand-start-phrase [tweets]
(->> tweets
shuffle
first
(#(clojure.string/split % #" "))
(take 2)
(clojure.string/join " ")
clojure.string/lower-case))
(defn reject-tweet [tweet orig-tweets]
(let [is-same (contains? (set orig-tweets) tweet)]
(if (= is-same true) (println "Removing" tweet))
is-same))
(defn make-bot [users]
(let [tweets (->> users get-tweets-for-users)
chain (->> tweets gen-chain-from-tweets)
make-text #(generate-text (gen-rand-start-phrase tweets) chain)]
(fn [num]
(loop [num-to-gen num
result []]
(let [new-tweets (repeatedly num make-text)
should-reject #(reject-tweet % tweets)
filtered (remove should-reject new-tweets)]
(if (< (count filtered) num-to-gen)
(recur (- num-to-gen (count filtered)) (concat result filtered))
(concat result filtered)))))))
| null | https://raw.githubusercontent.com/mveritym/markov-bot/138ac43d2c5b016ae1b8beeeee2dc854cd748e53/markov-bot-lambda/src/markov_bot/make_bot.clj | clojure | (ns markov-bot.make-bot
(:require [markov-bot.twitter :as twitter]
[markov-bot.generator :refer :all]
[cemerick.url :refer [url-encode]]))
(defn get-tweets-for-users [users]
(->> users
(map twitter/get-all-tweets-for-user)
(apply concat)))
(defn gen-chain-from-tweets [tweets]
(->> (map clojure.string/lower-case tweets)
(map text-to-word-chain)
(apply merge-with clojure.set/union)))
(defn gen-rand-start-phrase [tweets]
(->> tweets
shuffle
first
(#(clojure.string/split % #" "))
(take 2)
(clojure.string/join " ")
clojure.string/lower-case))
(defn reject-tweet [tweet orig-tweets]
(let [is-same (contains? (set orig-tweets) tweet)]
(if (= is-same true) (println "Removing" tweet))
is-same))
(defn make-bot [users]
(let [tweets (->> users get-tweets-for-users)
chain (->> tweets gen-chain-from-tweets)
make-text #(generate-text (gen-rand-start-phrase tweets) chain)]
(fn [num]
(loop [num-to-gen num
result []]
(let [new-tweets (repeatedly num make-text)
should-reject #(reject-tweet % tweets)
filtered (remove should-reject new-tweets)]
(if (< (count filtered) num-to-gen)
(recur (- num-to-gen (count filtered)) (concat result filtered))
(concat result filtered)))))))
| |
9715f70d2e961902fc9684cb578bdceaf8d2c62a23c38fb26b58a042e83ec9a2 | xoken/xoken-node | StoreSpec.hs | module Xoken.StoreSpec
(
) where
| null | https://raw.githubusercontent.com/xoken/xoken-node/99124fbe1b1cb9c2fc442c788c7c2bac06f5e900/node/test/Xoken/StoreSpec.hs | haskell | module Xoken.StoreSpec
(
) where
| |
69660e784c94640f8c98c64e6970eb99b6830d257ad147ebedd8cf8ebc070e7b | puppetlabs/jdbc-util | pool.clj | (ns puppetlabs.jdbc-util.pool
(:require [clojure.set :as set]
[clojure.tools.logging :as log]
[puppetlabs.i18n.core :refer [tru trs trun]]
[puppetlabs.jdbc-util.migration :as migration])
(:import com.codahale.metrics.health.HealthCheckRegistry
[com.zaxxer.hikari HikariConfig HikariDataSource]
java.io.Closeable
[java.sql SQLTransientConnectionException SQLTransientException SQLException]
javax.sql.DataSource
(java.util.concurrent ExecutionException)))
(defn add-connectivity-check-timeout-ms
[^HikariConfig config timeout]
(.addHealthCheckProperty config "connectivityCheckTimeoutMs" (str timeout))
config)
(defn- set-option
[^HikariConfig config option value]
(case option
:username (.setUsername config value)
:password (.setPassword config value)
:data-source-class-name (.setDataSourceClassName config value)
:jdbc-url (.setJdbcUrl config value)
:driver-class-name (.setDriverClassName config value)
:auto-commit (.setAutoCommit config value)
:connection-timeout (.setConnectionTimeout config value)
:connection-check-timeout (add-connectivity-check-timeout-ms config value)
:idle-timeout (.setIdleTimeout config value)
:max-lifetime (.setMaxLifetime config value)
:connection-test-query (.setConnectionTestQuery config value)
:minimum-idle (.setMinimumIdle config value)
:maximum-pool-size (.setMaximumPoolSize config value)
:metric-registry (.setMetricRegistry config value)
:health-check-registry (.setHealthCheckRegistry config value)
:pool-name (.setPoolName config value)
(throw (IllegalArgumentException. (tru "{0} is not a supported HikariCP option" (str option))))))
(defn options->hikari-config
[options]
(let [config (HikariConfig.)]
(doseq [[option value] options]
(set-option config option value))
(.validate config)
config))
(defn spec->hikari-options
[db-spec]
(-> db-spec
(set/rename-keys {:user :username
:classname :driver-class-name})
(assoc :jdbc-url (str "jdbc:"
(:subprotocol db-spec) ":"
(:subname db-spec)))
(dissoc :subprotocol :subname
:migration-password :migration-user)))
(defn select-user-configurable-hikari-options
"Given a map, return the subset of entries in the map whose keys are hikari
options that we want our users to be able to configure. This is intended to
allow users to set these fields in the database configuration section of a
service's TrapperKeeper config."
[m]
(select-keys [:connection-timeout
:connection-check-timeout
:idle-timeout
:max-lifetime
:minimum-idle
:maximum-pool-size]
m))
(defprotocol PoolStatus
(status [this] "Get a map representing the status of a connection pool.")
(init-error [this] "Return any exception raised by the init function (nil if none)."))
(defprotocol PoolLifetime
(block-until-ready [this] [this timeout-ms]
"Block execution until initialization is done, or the timeout expires (if specified).
If the timeout is specified, returns true if the execution completed within the timeperiod,
false if it didn't.")
(cancel-init [this] "Attempt to cancel the async initialization. Returns true if it was cancelled, false otherwise")
(init-complete? [this] "Returns true if the init is complete, false otherwise")
(close-after-ready [this] [this timeout-ms]
"Wait for the init routine to complete and then close the datasource. If the timeout is specified and expires
before the init is complete, cancel the init and close the datasource."))
(def replica-migrations-polling-interval-ms 1500)
(defn wait-for-migrations
"Loops until there are no uncompleted migrations from the migration-dir in the
db. We use this on pglogical replicas which will have their database
migrations replicated."
[db migration-dir]
(loop []
(when (not-empty (migration/uncompleted-migrations db migration-dir))
(Thread/sleep replica-migrations-polling-interval-ms)
(recur))))
(defn wrap-with-delayed-init
"Wraps a connection pool that loops trying to get a connection, and then runs
database migrations, then calls init-fn (with the connection as argument)
before returning any connections to the application. Accepts a timeout in ms
that's used when dereferencing the future and by the status check. The
datasource should have initialization-fail-fast set before being created or
this is pointless.
migration-opts is a map of:
:migration-dir, the path to the migratus migration directory on the classpath
:migration-dirs, the optional list of paths to any migratus migration directories
:migration-db, the connection map for the db used for migrations
:replication-mode, one of :source, :replica, or :none (the default)
If migration-opts is nil or not passed, the migration step is skipped."
([^HikariDataSource datasource init-fn timeout]
(wrap-with-delayed-init datasource nil init-fn timeout))
([^HikariDataSource datasource migration-opts init-fn timeout]
(let [init-error (atom nil)
init-exited-safely (promise)
pool-future
(future
(log/debug (trs "{0} - Starting database initialization" (.getPoolName datasource)))
(loop []
(if-let [result
(try
;; Try to get a connection to make sure the db is ready
(.close (.getConnection datasource))
(let [{:keys [migration-db migration-dir migration-dirs]} migration-opts]
;; ensure both the db and a migration directory is specified
(if (and migration-db (or migration-dir migration-dirs))
(do
(log/debug (trs "{0} - Starting database migration" (.getPoolName datasource)))
If we 're a replica then pglogical will be
;; replicating our migrations for us, so we poll until
;; the migrations have been replicated
(let [migration-dirs (if migration-dirs migration-dirs [migration-dir])]
(if (= (:replication-mode migration-opts) :replica)
(doseq [single-dir migration-dirs]
(wait-for-migrations migration-db single-dir))
(doseq [single-dir migration-dirs]
(log/info (trs "migrating from {0}" single-dir))
(migration/migrate migration-db single-dir)))))
(log/info (trs "{0} No migration path specified, skipping database migration." (.getPoolName datasource)))))
(log/debug (trs "{0} - Starting post-migration init-fn" (.getPoolName datasource)))
(init-fn {:datasource datasource})
(log/debug (trs "{0} - Finished database migration" (.getPoolName datasource)))
datasource
(catch SQLTransientException e
(log/warnf e (trs "{0} - Error while attempting to connect to database, retrying.")
(.getPoolName datasource))
nil)
(catch Exception e
(reset! init-error e)
(log/errorf e (trs "{0} - An error was encountered during database migration."
(.getPoolName datasource)))
;; return the datasource so we know we are done.
datasource))]
(do
(deliver init-exited-safely true)
result)
(recur))))]
(reify
DataSource
(getConnection [this]
(if (deref init-error)
(throw (RuntimeException. (tru "Unrecoverable error occurred during database initialization.")))
(.getConnection (or (deref pool-future timeout nil)
(throw (SQLTransientConnectionException. (tru "Timeout waiting for the database pool to become ready.")))))))
(getConnection [this username password]
(if (deref init-error)
(throw (RuntimeException. (tru "Unrecoverable error occurred during database initialization.")))
(.getConnection (or (deref pool-future timeout nil)
(throw (SQLTransientConnectionException. (tru "Timeout waiting for the database pool to become ready."))))
username
password)))
Closeable
(close [this]
(.close datasource))
PoolStatus
(status [this]
(if (realized? pool-future)
(let [connectivity-check (str (.getPoolName datasource)
".pool.ConnectivityCheck")
health-result (.runHealthCheck
(.getHealthCheckRegistry datasource)
connectivity-check)
healthy? (and (.isHealthy health-result)
(nil? @init-error))
messages (remove nil? [(some->> @init-error
(.getMessage)
(tru "Initialization resulted in an error: {0}"))
(.getMessage health-result)])]
(cond-> {:state (if healthy?
:ready
:error)}
(not healthy?) (merge {:messages messages})))
{:state :starting}))
(init-error [this]
@init-error)
PoolLifetime
(block-until-ready [this]
(log/info (trs "{0} - Blocking execution until db init has finished" (.getPoolName datasource)))
(try
(deref pool-future)
(catch ExecutionException e
(log/warn e (trs "{0} - Exception generated during init" (.getPoolName datasource))))))
(block-until-ready [this timeout-ms]
(log/info (trs "{0} - Blocking execution until db init has finished with {1} millisecond timeout "
(.getPoolName datasource) timeout-ms))
(try
(not (nil? (deref pool-future timeout-ms nil)))
(catch ExecutionException e
(log/warn e (trs "{0} - Exception generated during init" (.getPoolName datasource)))
true)))
(cancel-init [this]
(future-cancel pool-future))
(init-complete? [this]
(future-done? pool-future))
(close-after-ready [this]
(block-until-ready this)
(.close datasource))
(close-after-ready [this timeout-ms]
(when-not (block-until-ready this timeout-ms)
(log/warn (trs "{0} - Cancelling db-init due to timeout" (.getPoolName datasource)))
(cancel-init this)
; since we have cancelled the init, we need to specifically wait until the migrations have exited
; safely before closing the connection
(deref init-exited-safely timeout-ms :timeout)
(log/info (trs "{0} - Done waiting for init safe exit" (.getPoolName datasource))))
(.close datasource))))))
(defn connection-pool-with-delayed-init
"Create a connection pool that loops trying to get a connection, and then runs
init-fn (with the connection as argument) before returning any connections to
the application. Accepts a timeout in ms that's used when deferencing the
future. This overrides the value of initialization-fail-timeout to never timeout. "
([^HikariConfig config init-fn timeout]
(connection-pool-with-delayed-init config nil init-fn timeout))
([^HikariConfig config migration-options init-fn timeout]
(.setInitializationFailTimeout config -1)
(when-not (.getHealthCheckRegistry config)
(.setHealthCheckRegistry config (HealthCheckRegistry.)))
(wrap-with-delayed-init (HikariDataSource. config) migration-options init-fn timeout)))
| null | https://raw.githubusercontent.com/puppetlabs/jdbc-util/b9175b6e4899bc1d41817301abc325d83a058392/src/puppetlabs/jdbc_util/pool.clj | clojure | Try to get a connection to make sure the db is ready
ensure both the db and a migration directory is specified
replicating our migrations for us, so we poll until
the migrations have been replicated
return the datasource so we know we are done.
since we have cancelled the init, we need to specifically wait until the migrations have exited
safely before closing the connection | (ns puppetlabs.jdbc-util.pool
(:require [clojure.set :as set]
[clojure.tools.logging :as log]
[puppetlabs.i18n.core :refer [tru trs trun]]
[puppetlabs.jdbc-util.migration :as migration])
(:import com.codahale.metrics.health.HealthCheckRegistry
[com.zaxxer.hikari HikariConfig HikariDataSource]
java.io.Closeable
[java.sql SQLTransientConnectionException SQLTransientException SQLException]
javax.sql.DataSource
(java.util.concurrent ExecutionException)))
(defn add-connectivity-check-timeout-ms
[^HikariConfig config timeout]
(.addHealthCheckProperty config "connectivityCheckTimeoutMs" (str timeout))
config)
(defn- set-option
[^HikariConfig config option value]
(case option
:username (.setUsername config value)
:password (.setPassword config value)
:data-source-class-name (.setDataSourceClassName config value)
:jdbc-url (.setJdbcUrl config value)
:driver-class-name (.setDriverClassName config value)
:auto-commit (.setAutoCommit config value)
:connection-timeout (.setConnectionTimeout config value)
:connection-check-timeout (add-connectivity-check-timeout-ms config value)
:idle-timeout (.setIdleTimeout config value)
:max-lifetime (.setMaxLifetime config value)
:connection-test-query (.setConnectionTestQuery config value)
:minimum-idle (.setMinimumIdle config value)
:maximum-pool-size (.setMaximumPoolSize config value)
:metric-registry (.setMetricRegistry config value)
:health-check-registry (.setHealthCheckRegistry config value)
:pool-name (.setPoolName config value)
(throw (IllegalArgumentException. (tru "{0} is not a supported HikariCP option" (str option))))))
(defn options->hikari-config
[options]
(let [config (HikariConfig.)]
(doseq [[option value] options]
(set-option config option value))
(.validate config)
config))
(defn spec->hikari-options
[db-spec]
(-> db-spec
(set/rename-keys {:user :username
:classname :driver-class-name})
(assoc :jdbc-url (str "jdbc:"
(:subprotocol db-spec) ":"
(:subname db-spec)))
(dissoc :subprotocol :subname
:migration-password :migration-user)))
(defn select-user-configurable-hikari-options
"Given a map, return the subset of entries in the map whose keys are hikari
options that we want our users to be able to configure. This is intended to
allow users to set these fields in the database configuration section of a
service's TrapperKeeper config."
[m]
(select-keys [:connection-timeout
:connection-check-timeout
:idle-timeout
:max-lifetime
:minimum-idle
:maximum-pool-size]
m))
(defprotocol PoolStatus
(status [this] "Get a map representing the status of a connection pool.")
(init-error [this] "Return any exception raised by the init function (nil if none)."))
(defprotocol PoolLifetime
(block-until-ready [this] [this timeout-ms]
"Block execution until initialization is done, or the timeout expires (if specified).
If the timeout is specified, returns true if the execution completed within the timeperiod,
false if it didn't.")
(cancel-init [this] "Attempt to cancel the async initialization. Returns true if it was cancelled, false otherwise")
(init-complete? [this] "Returns true if the init is complete, false otherwise")
(close-after-ready [this] [this timeout-ms]
"Wait for the init routine to complete and then close the datasource. If the timeout is specified and expires
before the init is complete, cancel the init and close the datasource."))
(def replica-migrations-polling-interval-ms 1500)
(defn wait-for-migrations
"Loops until there are no uncompleted migrations from the migration-dir in the
db. We use this on pglogical replicas which will have their database
migrations replicated."
[db migration-dir]
(loop []
(when (not-empty (migration/uncompleted-migrations db migration-dir))
(Thread/sleep replica-migrations-polling-interval-ms)
(recur))))
(defn wrap-with-delayed-init
"Wraps a connection pool that loops trying to get a connection, and then runs
database migrations, then calls init-fn (with the connection as argument)
before returning any connections to the application. Accepts a timeout in ms
that's used when dereferencing the future and by the status check. The
datasource should have initialization-fail-fast set before being created or
this is pointless.
migration-opts is a map of:
:migration-dir, the path to the migratus migration directory on the classpath
:migration-dirs, the optional list of paths to any migratus migration directories
:migration-db, the connection map for the db used for migrations
:replication-mode, one of :source, :replica, or :none (the default)
If migration-opts is nil or not passed, the migration step is skipped."
([^HikariDataSource datasource init-fn timeout]
(wrap-with-delayed-init datasource nil init-fn timeout))
([^HikariDataSource datasource migration-opts init-fn timeout]
(let [init-error (atom nil)
init-exited-safely (promise)
pool-future
(future
(log/debug (trs "{0} - Starting database initialization" (.getPoolName datasource)))
(loop []
(if-let [result
(try
(.close (.getConnection datasource))
(let [{:keys [migration-db migration-dir migration-dirs]} migration-opts]
(if (and migration-db (or migration-dir migration-dirs))
(do
(log/debug (trs "{0} - Starting database migration" (.getPoolName datasource)))
If we 're a replica then pglogical will be
(let [migration-dirs (if migration-dirs migration-dirs [migration-dir])]
(if (= (:replication-mode migration-opts) :replica)
(doseq [single-dir migration-dirs]
(wait-for-migrations migration-db single-dir))
(doseq [single-dir migration-dirs]
(log/info (trs "migrating from {0}" single-dir))
(migration/migrate migration-db single-dir)))))
(log/info (trs "{0} No migration path specified, skipping database migration." (.getPoolName datasource)))))
(log/debug (trs "{0} - Starting post-migration init-fn" (.getPoolName datasource)))
(init-fn {:datasource datasource})
(log/debug (trs "{0} - Finished database migration" (.getPoolName datasource)))
datasource
(catch SQLTransientException e
(log/warnf e (trs "{0} - Error while attempting to connect to database, retrying.")
(.getPoolName datasource))
nil)
(catch Exception e
(reset! init-error e)
(log/errorf e (trs "{0} - An error was encountered during database migration."
(.getPoolName datasource)))
datasource))]
(do
(deliver init-exited-safely true)
result)
(recur))))]
(reify
DataSource
(getConnection [this]
(if (deref init-error)
(throw (RuntimeException. (tru "Unrecoverable error occurred during database initialization.")))
(.getConnection (or (deref pool-future timeout nil)
(throw (SQLTransientConnectionException. (tru "Timeout waiting for the database pool to become ready.")))))))
(getConnection [this username password]
(if (deref init-error)
(throw (RuntimeException. (tru "Unrecoverable error occurred during database initialization.")))
(.getConnection (or (deref pool-future timeout nil)
(throw (SQLTransientConnectionException. (tru "Timeout waiting for the database pool to become ready."))))
username
password)))
Closeable
(close [this]
(.close datasource))
PoolStatus
(status [this]
(if (realized? pool-future)
(let [connectivity-check (str (.getPoolName datasource)
".pool.ConnectivityCheck")
health-result (.runHealthCheck
(.getHealthCheckRegistry datasource)
connectivity-check)
healthy? (and (.isHealthy health-result)
(nil? @init-error))
messages (remove nil? [(some->> @init-error
(.getMessage)
(tru "Initialization resulted in an error: {0}"))
(.getMessage health-result)])]
(cond-> {:state (if healthy?
:ready
:error)}
(not healthy?) (merge {:messages messages})))
{:state :starting}))
(init-error [this]
@init-error)
PoolLifetime
(block-until-ready [this]
(log/info (trs "{0} - Blocking execution until db init has finished" (.getPoolName datasource)))
(try
(deref pool-future)
(catch ExecutionException e
(log/warn e (trs "{0} - Exception generated during init" (.getPoolName datasource))))))
(block-until-ready [this timeout-ms]
(log/info (trs "{0} - Blocking execution until db init has finished with {1} millisecond timeout "
(.getPoolName datasource) timeout-ms))
(try
(not (nil? (deref pool-future timeout-ms nil)))
(catch ExecutionException e
(log/warn e (trs "{0} - Exception generated during init" (.getPoolName datasource)))
true)))
(cancel-init [this]
(future-cancel pool-future))
(init-complete? [this]
(future-done? pool-future))
(close-after-ready [this]
(block-until-ready this)
(.close datasource))
(close-after-ready [this timeout-ms]
(when-not (block-until-ready this timeout-ms)
(log/warn (trs "{0} - Cancelling db-init due to timeout" (.getPoolName datasource)))
(cancel-init this)
(deref init-exited-safely timeout-ms :timeout)
(log/info (trs "{0} - Done waiting for init safe exit" (.getPoolName datasource))))
(.close datasource))))))
(defn connection-pool-with-delayed-init
"Create a connection pool that loops trying to get a connection, and then runs
init-fn (with the connection as argument) before returning any connections to
the application. Accepts a timeout in ms that's used when deferencing the
future. This overrides the value of initialization-fail-timeout to never timeout. "
([^HikariConfig config init-fn timeout]
(connection-pool-with-delayed-init config nil init-fn timeout))
([^HikariConfig config migration-options init-fn timeout]
(.setInitializationFailTimeout config -1)
(when-not (.getHealthCheckRegistry config)
(.setHealthCheckRegistry config (HealthCheckRegistry.)))
(wrap-with-delayed-init (HikariDataSource. config) migration-options init-fn timeout)))
|
82eb6d62584fc1a52e81576e8e1a86cf3bd2b398de60513514785bdc62ce7991 | feldi/clj-swipl7 | swipl7_test.clj | (ns clj.swipl7-test
(:require [clojure.test :refer :all]
[clj.swipl7.protocols :refer :all]
[clj.swipl7.core :refer :all]))
(deftest jpl-tests
(testing "version"
(is (= (get-jpl-version-as-string) "7.6.1-stable")))
(testing "prolog lists"
(let [pl-list (to-pl [(to-pl "a") (to-pl "b") (to-pl "c")])
clj-list (pl-list-to-clj-list pl-list)
clj-vec (pl-list-to-vec pl-list)]
(is (= '("a" "b" "c") clj-list))
(is (= ["a" "b" "c"] clj-vec))))
(testing "prolog exception"
(is (thrown? org.jpl7.PrologException (new-q "p(]."))))
)
| null | https://raw.githubusercontent.com/feldi/clj-swipl7/a40c22161a3a6c30edad64675b90f7552f20b984/test/clj/swipl7_test.clj | clojure | (ns clj.swipl7-test
(:require [clojure.test :refer :all]
[clj.swipl7.protocols :refer :all]
[clj.swipl7.core :refer :all]))
(deftest jpl-tests
(testing "version"
(is (= (get-jpl-version-as-string) "7.6.1-stable")))
(testing "prolog lists"
(let [pl-list (to-pl [(to-pl "a") (to-pl "b") (to-pl "c")])
clj-list (pl-list-to-clj-list pl-list)
clj-vec (pl-list-to-vec pl-list)]
(is (= '("a" "b" "c") clj-list))
(is (= ["a" "b" "c"] clj-vec))))
(testing "prolog exception"
(is (thrown? org.jpl7.PrologException (new-q "p(]."))))
)
| |
b7caa7eab423b86389bddb9601e16137757f38e7cbd25bc34eaa60369d0a6d39 | chenyukang/eopl | 03.scm | (load-relative "../libs/init.scm")
(load-relative "./base/test.scm")
(load-relative "./base/equal-type.scm")
(load-relative "./base/data-structures.scm")
(load-relative "./base/cases.scm")
(load-relative "./base/simplemodule-lang.scm")
;; see the new stuff
;;;;;;;;;;;;;;;; grammatical specification ;;;;;;;;;;;;;;;;
(define the-lexical-spec
'((whitespace (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier
(letter (arbno (or letter digit "_" "-" "?")))
symbol)
;; new stuff
(module-var
(letter (arbno (or letter digit "_" "-" "?")) "."
(arbno (or letter digit "_" "-" "?")))
symbol)
(number (digit (arbno digit)) number)
(number ("-" digit (arbno digit)) number)
))
(define the-grammar
'(
(program
((arbno module-definition)
expression)
a-program)
(module-definition
("module" identifier
"interface" interface
"body" module-body)
a-module-definition)
(interface
("[" (arbno declaration) "]")
simple-iface)
(declaration
(identifier ":" type)
val-decl)
(module-body
("[" (arbno definition) "]")
defns-module-body)
(definition
(identifier "=" expression)
val-defn)
;; new expression:
;; new stuff
(expression
(module-var)
qualified-var-exp)
;; new types
(type
(identifier)
named-type)
(type
(module-var)
qualified-type)
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; no changes in grammar below here
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(expression (number) const-exp)
(expression
(identifier)
var-exp)
(expression
("-" "(" expression "," expression ")")
diff-exp)
(expression
("zero?" "(" expression ")")
zero?-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression
("let" identifier "=" expression "in" expression)
let-exp)
(expression
("proc" "(" identifier ":" type ")" expression)
proc-exp)
(expression
("(" expression expression ")")
call-exp)
(expression
("letrec"
type identifier "(" identifier ":" type ")"
"=" expression "in" expression)
letrec-exp)
(type
("int")
int-type)
(type
("bool")
bool-type)
(type
("(" type "->" type ")")
proc-type)
))
;;;;;;;;;;;;;;;; sllgen boilerplate ;;;;;;;;;;;;;;;;
(sllgen:make-define-datatypes the-lexical-spec the-grammar)
(define show-the-datatypes
(lambda () (sllgen:list-define-datatypes the-lexical-spec the-grammar)))
(define scan&parse
(sllgen:make-string-parser the-lexical-spec the-grammar))
(define just-scan
(sllgen:make-string-scanner the-lexical-spec the-grammar))
value - of : Exp * Env - > ExpVal
(define value-of
(lambda (exp env)
(cases expression exp
(const-exp (num) (num-val num))
(var-exp (var) (apply-env env var))
;; new stuff, a hack
(qualified-var-exp (module-var)
(let ((names
(string-split (symbol->string module-var) ".")))
(let ((module (string->symbol (car names)))
(var (string->symbol (cadr names))))
(lookup-qualified-var-in-env module var env))))
(diff-exp (exp1 exp2)
(let ((val1
(expval->num
(value-of exp1 env)))
(val2
(expval->num
(value-of exp2 env))))
(num-val
(- val1 val2))))
(zero?-exp (exp1)
(let ((val1 (expval->num (value-of exp1 env))))
(if (zero? val1)
(bool-val #t)
(bool-val #f))))
(if-exp (exp0 exp1 exp2)
(if (expval->bool (value-of exp0 env))
(value-of exp1 env)
(value-of exp2 env)))
(let-exp (var exp1 body)
(let ((val (value-of exp1 env)))
(let ((new-env (extend-env var val env)))
(value-of body new-env))))
(proc-exp (bvar ty body)
(proc-val
(procedure bvar body env)))
(call-exp (rator rand)
(let ((proc (expval->proc (value-of rator env)))
(arg (value-of rand env)))
(apply-procedure proc arg)))
(letrec-exp (ty1 proc-name bvar ty2 proc-body letrec-body)
(value-of letrec-body
(extend-env-recursively proc-name bvar proc-body env)))
)))
(run "
module m
interface
[u : int
v : int]
body
[u = 44
v = 33]
-(m.u , m.v)")
| null | https://raw.githubusercontent.com/chenyukang/eopl/0406ff23b993bfe020294fa70d2597b1ce4f9b78/ch8/03.scm | scheme | see the new stuff
grammatical specification ;;;;;;;;;;;;;;;;
new stuff
new expression:
new stuff
new types
no changes in grammar below here
sllgen boilerplate ;;;;;;;;;;;;;;;;
new stuff, a hack | (load-relative "../libs/init.scm")
(load-relative "./base/test.scm")
(load-relative "./base/equal-type.scm")
(load-relative "./base/data-structures.scm")
(load-relative "./base/cases.scm")
(load-relative "./base/simplemodule-lang.scm")
(define the-lexical-spec
'((whitespace (whitespace) skip)
(comment ("%" (arbno (not #\newline))) skip)
(identifier
(letter (arbno (or letter digit "_" "-" "?")))
symbol)
(module-var
(letter (arbno (or letter digit "_" "-" "?")) "."
(arbno (or letter digit "_" "-" "?")))
symbol)
(number (digit (arbno digit)) number)
(number ("-" digit (arbno digit)) number)
))
(define the-grammar
'(
(program
((arbno module-definition)
expression)
a-program)
(module-definition
("module" identifier
"interface" interface
"body" module-body)
a-module-definition)
(interface
("[" (arbno declaration) "]")
simple-iface)
(declaration
(identifier ":" type)
val-decl)
(module-body
("[" (arbno definition) "]")
defns-module-body)
(definition
(identifier "=" expression)
val-defn)
(expression
(module-var)
qualified-var-exp)
(type
(identifier)
named-type)
(type
(module-var)
qualified-type)
(expression (number) const-exp)
(expression
(identifier)
var-exp)
(expression
("-" "(" expression "," expression ")")
diff-exp)
(expression
("zero?" "(" expression ")")
zero?-exp)
(expression
("if" expression "then" expression "else" expression)
if-exp)
(expression
("let" identifier "=" expression "in" expression)
let-exp)
(expression
("proc" "(" identifier ":" type ")" expression)
proc-exp)
(expression
("(" expression expression ")")
call-exp)
(expression
("letrec"
type identifier "(" identifier ":" type ")"
"=" expression "in" expression)
letrec-exp)
(type
("int")
int-type)
(type
("bool")
bool-type)
(type
("(" type "->" type ")")
proc-type)
))
(sllgen:make-define-datatypes the-lexical-spec the-grammar)
(define show-the-datatypes
(lambda () (sllgen:list-define-datatypes the-lexical-spec the-grammar)))
(define scan&parse
(sllgen:make-string-parser the-lexical-spec the-grammar))
(define just-scan
(sllgen:make-string-scanner the-lexical-spec the-grammar))
value - of : Exp * Env - > ExpVal
(define value-of
(lambda (exp env)
(cases expression exp
(const-exp (num) (num-val num))
(var-exp (var) (apply-env env var))
(qualified-var-exp (module-var)
(let ((names
(string-split (symbol->string module-var) ".")))
(let ((module (string->symbol (car names)))
(var (string->symbol (cadr names))))
(lookup-qualified-var-in-env module var env))))
(diff-exp (exp1 exp2)
(let ((val1
(expval->num
(value-of exp1 env)))
(val2
(expval->num
(value-of exp2 env))))
(num-val
(- val1 val2))))
(zero?-exp (exp1)
(let ((val1 (expval->num (value-of exp1 env))))
(if (zero? val1)
(bool-val #t)
(bool-val #f))))
(if-exp (exp0 exp1 exp2)
(if (expval->bool (value-of exp0 env))
(value-of exp1 env)
(value-of exp2 env)))
(let-exp (var exp1 body)
(let ((val (value-of exp1 env)))
(let ((new-env (extend-env var val env)))
(value-of body new-env))))
(proc-exp (bvar ty body)
(proc-val
(procedure bvar body env)))
(call-exp (rator rand)
(let ((proc (expval->proc (value-of rator env)))
(arg (value-of rand env)))
(apply-procedure proc arg)))
(letrec-exp (ty1 proc-name bvar ty2 proc-body letrec-body)
(value-of letrec-body
(extend-env-recursively proc-name bvar proc-body env)))
)))
(run "
module m
interface
[u : int
v : int]
body
[u = 44
v = 33]
-(m.u , m.v)")
|
5e28f21b82a7c862147d97cab817076868f86c1ac3fffd4ebf33fc90a169a912 | nijohando/event | emitter.cljc | (ns jp.nijohando.event.emitter
(:require [clojure.core.async :as ca]))
(defn pipe
[emitter-id emitter-ch bus-ch]
(ca/go-loop []
(when-some [v (ca/<! emitter-ch)]
(when-let [event (when (map? v)
(assoc-in v [:header :emitter-id] emitter-id))]
(when-not (ca/>! bus-ch event)
(ca/close! emitter-ch)))
(recur))))
| null | https://raw.githubusercontent.com/nijohando/event/34e359c554d2baaa01e4fec524942f513a26d7f5/src/jp/nijohando/event/emitter.cljc | clojure | (ns jp.nijohando.event.emitter
(:require [clojure.core.async :as ca]))
(defn pipe
[emitter-id emitter-ch bus-ch]
(ca/go-loop []
(when-some [v (ca/<! emitter-ch)]
(when-let [event (when (map? v)
(assoc-in v [:header :emitter-id] emitter-id))]
(when-not (ca/>! bus-ch event)
(ca/close! emitter-ch)))
(recur))))
| |
a084b96c85fed97d71866ce0032fc7bf2d9dbfce08773e765f0bd81f1d33b33d | EMSL-NMR-EPR/Haskell-MFAPipe-Executable | IsotopicLabeling.hs | -----------------------------------------------------------------------------
-- |
-- Module : Science.Chemistry.IsotopicLabeling
Copyright : 2016 - 17 Pacific Northwest National Laboratory
-- License : ECL-2.0 (see the LICENSE file in the distribution)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- This module exports modules for isotopic labeling.
-----------------------------------------------------------------------------
module Science.Chemistry.IsotopicLabeling
( -- * Types
-- ** Generic
module Science.Chemistry.IsotopicLabeling.FractionType
, module Science.Chemistry.IsotopicLabeling.FractionTypeDict
, module Science.Chemistry.IsotopicLabeling.FractionMatrix
, module Science.Chemistry.IsotopicLabeling.FractionVector
-- ** Isotopomer fractions
, module Science.Chemistry.IsotopicLabeling.Isotopomer
, module Science.Chemistry.IsotopicLabeling.IsotopomerFractionMatrix
, module Science.Chemistry.IsotopicLabeling.IsotopomerFractionVector
-- ** Mass fractions
, module Science.Chemistry.IsotopicLabeling.MassFraction
, module Science.Chemistry.IsotopicLabeling.MassFractionMatrix
, module Science.Chemistry.IsotopicLabeling.MassFractionVector
-- * Classes
, module Science.Chemistry.IsotopicLabeling.DSL.Display.Class
, module Science.Chemistry.IsotopicLabeling.DSL.FromDict.Class
-- * Steady state
, module Science.Chemistry.IsotopicLabeling.SteadyState
) where
import Science.Chemistry.IsotopicLabeling.DSL.Display.Class
import Science.Chemistry.IsotopicLabeling.DSL.FromDict.Class
import Science.Chemistry.IsotopicLabeling.FractionMatrix
import Science.Chemistry.IsotopicLabeling.FractionType
import Science.Chemistry.IsotopicLabeling.FractionTypeDict
import Science.Chemistry.IsotopicLabeling.FractionVector
import Science.Chemistry.IsotopicLabeling.Isotopomer
import Science.Chemistry.IsotopicLabeling.IsotopomerFractionMatrix
import Science.Chemistry.IsotopicLabeling.IsotopomerFractionVector
import Science.Chemistry.IsotopicLabeling.MassFraction
import Science.Chemistry.IsotopicLabeling.MassFractionMatrix
import Science.Chemistry.IsotopicLabeling.MassFractionVector
import Science.Chemistry.IsotopicLabeling.SteadyState
| null | https://raw.githubusercontent.com/EMSL-NMR-EPR/Haskell-MFAPipe-Executable/8a7fd13202d3b6b7380af52d86e851e995a9b53e/MFAPipe/src/Science/Chemistry/IsotopicLabeling.hs | haskell | ---------------------------------------------------------------------------
|
Module : Science.Chemistry.IsotopicLabeling
License : ECL-2.0 (see the LICENSE file in the distribution)
Maintainer :
Stability : experimental
Portability : portable
This module exports modules for isotopic labeling.
---------------------------------------------------------------------------
* Types
** Generic
** Isotopomer fractions
** Mass fractions
* Classes
* Steady state | Copyright : 2016 - 17 Pacific Northwest National Laboratory
module Science.Chemistry.IsotopicLabeling
module Science.Chemistry.IsotopicLabeling.FractionType
, module Science.Chemistry.IsotopicLabeling.FractionTypeDict
, module Science.Chemistry.IsotopicLabeling.FractionMatrix
, module Science.Chemistry.IsotopicLabeling.FractionVector
, module Science.Chemistry.IsotopicLabeling.Isotopomer
, module Science.Chemistry.IsotopicLabeling.IsotopomerFractionMatrix
, module Science.Chemistry.IsotopicLabeling.IsotopomerFractionVector
, module Science.Chemistry.IsotopicLabeling.MassFraction
, module Science.Chemistry.IsotopicLabeling.MassFractionMatrix
, module Science.Chemistry.IsotopicLabeling.MassFractionVector
, module Science.Chemistry.IsotopicLabeling.DSL.Display.Class
, module Science.Chemistry.IsotopicLabeling.DSL.FromDict.Class
, module Science.Chemistry.IsotopicLabeling.SteadyState
) where
import Science.Chemistry.IsotopicLabeling.DSL.Display.Class
import Science.Chemistry.IsotopicLabeling.DSL.FromDict.Class
import Science.Chemistry.IsotopicLabeling.FractionMatrix
import Science.Chemistry.IsotopicLabeling.FractionType
import Science.Chemistry.IsotopicLabeling.FractionTypeDict
import Science.Chemistry.IsotopicLabeling.FractionVector
import Science.Chemistry.IsotopicLabeling.Isotopomer
import Science.Chemistry.IsotopicLabeling.IsotopomerFractionMatrix
import Science.Chemistry.IsotopicLabeling.IsotopomerFractionVector
import Science.Chemistry.IsotopicLabeling.MassFraction
import Science.Chemistry.IsotopicLabeling.MassFractionMatrix
import Science.Chemistry.IsotopicLabeling.MassFractionVector
import Science.Chemistry.IsotopicLabeling.SteadyState
|
03dc708941adc6cae8a3456286ea43fde69dfaf2bd3cfba8ce292f396992a798 | ocaml-flambda/ocaml-jst | simplif.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
(** Lambda simplification.
{b Warning:} this module is unstable and part of
{{!Compiler_libs}compiler-libs}.
*)
(* Elimination of useless Llet(Alias) bindings.
Transformation of let-bound references into variables.
Simplification over staticraise/staticcatch constructs.
Generation of tail-call annotations if -annot is set. *)
open Lambda
val simplify_lambda: lambda -> lambda
val split_default_wrapper
: id:Ident.t
-> kind:function_kind
-> params:(Ident.t * Lambda.layout) list
-> return:Lambda.layout
-> body:lambda
-> attr:function_attribute
-> loc:Lambda.scoped_location
-> mode:Lambda.alloc_mode
-> region:bool
-> (Ident.t * lambda) list
| null | https://raw.githubusercontent.com/ocaml-flambda/ocaml-jst/7e5a626e4b4e12f1e9106564e1baba4d0ef6309a/lambda/simplif.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Lambda simplification.
{b Warning:} this module is unstable and part of
{{!Compiler_libs}compiler-libs}.
Elimination of useless Llet(Alias) bindings.
Transformation of let-bound references into variables.
Simplification over staticraise/staticcatch constructs.
Generation of tail-call annotations if -annot is set. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
open Lambda
val simplify_lambda: lambda -> lambda
val split_default_wrapper
: id:Ident.t
-> kind:function_kind
-> params:(Ident.t * Lambda.layout) list
-> return:Lambda.layout
-> body:lambda
-> attr:function_attribute
-> loc:Lambda.scoped_location
-> mode:Lambda.alloc_mode
-> region:bool
-> (Ident.t * lambda) list
|
078f1b7629d02aac9ff27e33404d7d5582c8c5dd99fb2593ad1502812cb43e7e | scicloj/tablecloth.time | validatable_test.clj | (ns tablecloth.time.utils.validatable-test
(:require [clojure.test :refer [deftest is]]
[tablecloth.api :as tablecloth]
[tablecloth.time.utils.validatable :as validatable]))
(deftest validatable-test
(let [ds (tablecloth/dataset {:x [1 2 3]
:y [4 5 6]})
ds-with-validatable (validatable/add-validatable ds
[:x]
:id1
9999)]
(is (-> ds ;; without validatable
(validatable/valid? :id1)
not))
(is (-> ds-with-validatable
(tablecloth/select-rows [0 2])
(validatable/valid? :id1)
not))
(is (-> ds-with-validatable
(tablecloth/add-or-replace-column :x 9)
(validatable/valid? :id1)
not))
(is (-> ds-with-validatable
(tablecloth/add-or-replace-column :z 9)
(validatable/valid? :id1)
not))))
| null | https://raw.githubusercontent.com/scicloj/tablecloth.time/9eba5d0aa22fed087c2c0b072c40f58e91c0505d/test/tablecloth/time/utils/validatable_test.clj | clojure | without validatable | (ns tablecloth.time.utils.validatable-test
(:require [clojure.test :refer [deftest is]]
[tablecloth.api :as tablecloth]
[tablecloth.time.utils.validatable :as validatable]))
(deftest validatable-test
(let [ds (tablecloth/dataset {:x [1 2 3]
:y [4 5 6]})
ds-with-validatable (validatable/add-validatable ds
[:x]
:id1
9999)]
(validatable/valid? :id1)
not))
(is (-> ds-with-validatable
(tablecloth/select-rows [0 2])
(validatable/valid? :id1)
not))
(is (-> ds-with-validatable
(tablecloth/add-or-replace-column :x 9)
(validatable/valid? :id1)
not))
(is (-> ds-with-validatable
(tablecloth/add-or-replace-column :z 9)
(validatable/valid? :id1)
not))))
|
8f883d49674099a37116904408e1a633616e982c08748a1af53c1e468255ac89 | lazy-cat-io/metaverse | reporter.cljs | (ns metaverse.common.reporter
(:require
[metaverse.common.env :as env]
[metaverse.common.logger :as log :include-macros true]
[metaverse.common.utils.string :as str]
[tenet.response :as r]))
TODO : [ 2022 - 05 - 02 , ] validate opts
(defn init!
([^js sentry]
(init! sentry {}))
([^js sentry {:keys [dsn] :as opts}]
(let [dsn (or dsn env/sentry-dsn)]
(if (= "N/A" dsn)
(do
(log/error :msg "Sentry reporter is not initialized" :opts opts)
(r/as-incorrect env/sentry-dsn))
(let [version (:version env/build-info)
build (-> env/build-info
(:metadata)
(assoc :version version))
opts' (-> {:dsn env/sentry-dsn,
:environment env/mode,
:tracesSampleRate 1.0,
:debug env/develop?
:release (str/format "%s@%s:%s" env/company-name env/product-name version)
:initialScope {:build build}}
(merge opts))
reporter (.init sentry (clj->js opts'))]
(log/info :msg "Sentry reporter successfully initialized" :opts opts')
(r/as-success reporter))))))
| null | https://raw.githubusercontent.com/lazy-cat-io/metaverse/4c566278d253ce6b85e4644b2fdec93dab58355c/src/main/clojure/metaverse/common/reporter.cljs | clojure | (ns metaverse.common.reporter
(:require
[metaverse.common.env :as env]
[metaverse.common.logger :as log :include-macros true]
[metaverse.common.utils.string :as str]
[tenet.response :as r]))
TODO : [ 2022 - 05 - 02 , ] validate opts
(defn init!
([^js sentry]
(init! sentry {}))
([^js sentry {:keys [dsn] :as opts}]
(let [dsn (or dsn env/sentry-dsn)]
(if (= "N/A" dsn)
(do
(log/error :msg "Sentry reporter is not initialized" :opts opts)
(r/as-incorrect env/sentry-dsn))
(let [version (:version env/build-info)
build (-> env/build-info
(:metadata)
(assoc :version version))
opts' (-> {:dsn env/sentry-dsn,
:environment env/mode,
:tracesSampleRate 1.0,
:debug env/develop?
:release (str/format "%s@%s:%s" env/company-name env/product-name version)
:initialScope {:build build}}
(merge opts))
reporter (.init sentry (clj->js opts'))]
(log/info :msg "Sentry reporter successfully initialized" :opts opts')
(r/as-success reporter))))))
| |
0209d7be44b0f9232ec7cc26cb66e1d0ab87916466bd01c1d827957043e27a6d | runeksvendsen/restful-payment-channel-server | Types.hs | module BlockchainAPI.Impl.Bitcoind.Types
(
module APISpec.Types
) where
import APISpec.Types
| null | https://raw.githubusercontent.com/runeksvendsen/restful-payment-channel-server/0fe65eadccad5ef2b840714623ec407e509ad00b/src/BlockchainAPI/Impl/Bitcoind/Types.hs | haskell | module BlockchainAPI.Impl.Bitcoind.Types
(
module APISpec.Types
) where
import APISpec.Types
| |
a559c77c36f8f166c30a1b749e3a880cf7e98e2864419aefd2d29b4160b67f33 | ocaml-flambda/flambda-backend | simplify_switch_expr.ml | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, OCamlPro
and ,
(* *)
(* Copyright 2013--2020 OCamlPro SAS *)
Copyright 2014 - -2020 Jane Street Group LLC
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
open! Simplify_import
module TE = Flambda2_types.Typing_env
module Alias_set = TE.Alias_set
[@@@ocaml.warning "-37"]
type mergeable_arms =
| No_arms
| Mergeable of
{ cont : Continuation.t;
args : Alias_set.t list
}
| Not_mergeable
let find_all_aliases env arg =
let find_all_aliases () =
TE.aliases_of_simple env ~min_name_mode:NM.normal arg
in
Simple.pattern_match'
~var:(fun _var ~coercion:_ ->
(* We use find alias to find a common simple to different
simples.
This simple is already guaranteed to be the cannonical alias.
* If there is a common alias between variables, the
cannonical alias must also be a common alias.
* For constants and symbols there can be a common alias that
is not cannonical: A variable can have different constant
values in different branches: this variable is not the
cannonical alias, the cannonical would be the constant or
the symbol. But the only common alias could be a variable
in that case.
hence there is no loss of generality in returning the
cannonical alias as the single alias if it is a variable.
Note that the main reason for this is to allow changing the
arguments of continuations to variables that where not in
scope during the downward traversal. In particular for the
alias rewriting provided by data_flow *)
TE.Alias_set.singleton arg)
~symbol:(fun _sym ~coercion:_ -> find_all_aliases ())
~const:(fun _cst -> find_all_aliases ())
arg
let rebuild_arm uacc arm (action, use_id, arity, env_at_use)
( new_let_conts,
arms,
(mergeable_arms : mergeable_arms),
identity_arms,
not_arms ) =
let action =
Simplify_common.clear_demoted_trap_action_and_patch_unused_exn_bucket uacc
action
in
match
EB.rewrite_switch_arm uacc action ~use_id
(Flambda_arity.With_subkinds.of_arity arity)
with
| Apply_cont action -> (
let action =
let cont = Apply_cont.continuation action in
let cont_info_from_uenv = UE.find_continuation (UA.uenv uacc) cont in
First try to absorb any [ Apply_cont ] expression that forms the entirety
of the arm 's action ( via an intermediate zero - arity continuation
without trap action ) into the [ Switch ] expression itself .
of the arm's action (via an intermediate zero-arity continuation
without trap action) into the [Switch] expression itself. *)
match cont_info_from_uenv with
| Invalid _ -> None
| Linearly_used_and_inlinable _ | Non_inlinable_zero_arity _
| Non_inlinable_non_zero_arity _
| Toplevel_or_function_return_or_exn_continuation _ -> (
if not (Apply_cont.is_goto action)
then Some action
else
let check_handler ~handler ~action =
match RE.to_apply_cont handler with
| Some action -> Some action
| None -> Some action
in
match cont_info_from_uenv with
| Linearly_used_and_inlinable
{ handler;
free_names_of_handler = _;
params;
cost_metrics_of_handler = _
} ->
assert (Bound_parameters.is_empty params);
check_handler ~handler ~action
| Non_inlinable_zero_arity { handler = Known handler } ->
check_handler ~handler ~action
| Non_inlinable_zero_arity { handler = Unknown } -> Some action
| Invalid _ -> None
| Non_inlinable_non_zero_arity _
| Toplevel_or_function_return_or_exn_continuation _ ->
Misc.fatal_errorf
"Inconsistency for %a between [Apply_cont.is_goto] and \
continuation environment in [UA]:@ %a"
Continuation.print cont UA.print uacc)
in
match action with
| None ->
(* The destination is unreachable; delete the [Switch] arm. *)
new_let_conts, arms, mergeable_arms, identity_arms, not_arms
| Some action -> (
CR mshinwell / : Fix alias handling so that identity switches
like those in can be simplified by only using
[ mergeable_arms ] . Then remove [ identity_arms ] .
like those in id_switch.ml can be simplified by only using
[mergeable_arms]. Then remove [identity_arms]. *)
let maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms =
let arms = Targetint_31_63.Map.add arm action arms in
(* Check to see if this arm may be merged with others. *)
if Option.is_some (Apply_cont.trap_action action)
then new_let_conts, arms, Not_mergeable, identity_arms, not_arms
else
match mergeable_arms with
| Not_mergeable ->
new_let_conts, arms, Not_mergeable, identity_arms, not_arms
| No_arms ->
let cont = Apply_cont.continuation action in
let args =
List.map
(fun arg -> find_all_aliases env_at_use arg)
(Apply_cont.args action)
in
( new_let_conts,
arms,
Mergeable { cont; args },
identity_arms,
not_arms )
| Mergeable { cont; args } ->
if not (Continuation.equal cont (Apply_cont.continuation action))
then new_let_conts, arms, Not_mergeable, identity_arms, not_arms
else
let args =
List.map2
(fun arg_set arg ->
Alias_set.inter (find_all_aliases env_at_use arg) arg_set)
args (Apply_cont.args action)
in
( new_let_conts,
arms,
Mergeable { cont; args },
identity_arms,
not_arms )
in
(* Check to see if the arm is of a form that might mean the whole [Switch]
is a boolean NOT. *)
match Apply_cont.to_one_arg_without_trap_action action with
| None -> maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
| Some arg ->
let[@inline always] const arg =
match Reg_width_const.descr arg with
| Tagged_immediate arg ->
if Targetint_31_63.equal arm arg
then
let identity_arms =
Targetint_31_63.Map.add arm action identity_arms
in
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
else if Targetint_31_63.equal arm Targetint_31_63.bool_true
&& Targetint_31_63.equal arg Targetint_31_63.bool_false
|| Targetint_31_63.equal arm Targetint_31_63.bool_false
&& Targetint_31_63.equal arg Targetint_31_63.bool_true
then
let not_arms = Targetint_31_63.Map.add arm action not_arms in
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
else maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
| Naked_immediate _ | Naked_float _ | Naked_int32 _ | Naked_int64 _
| Naked_nativeint _ ->
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
in
Simple.pattern_match arg ~const ~name:(fun _ ~coercion:_ ->
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms)))
| New_wrapper new_let_cont ->
let new_let_conts = new_let_cont :: new_let_conts in
let action = Apply_cont.goto new_let_cont.cont in
let arms = Targetint_31_63.Map.add arm action arms in
new_let_conts, arms, Not_mergeable, identity_arms, not_arms
let filter_and_choose_alias required_names alias_set =
let available_alias_set =
Alias_set.filter alias_set ~f:(fun alias ->
Simple.pattern_match alias
~name:(fun name ~coercion:_ -> Name.Set.mem name required_names)
~const:(fun _ -> true))
in
Alias_set.find_best available_alias_set
let find_cse_simple dacc required_names prim =
match P.Eligible_for_cse.create prim with
| None -> None (* Constant *)
| Some with_fixed_value -> (
match DE.find_cse (DA.denv dacc) with_fixed_value with
| None -> None
| Some simple ->
filter_and_choose_alias required_names
(find_all_aliases (DA.typing_env dacc) simple))
let rebuild_switch ~arms ~condition_dbg ~scrutinee ~scrutinee_ty
~dacc_before_switch uacc ~after_rebuild =
let new_let_conts, arms, mergeable_arms, identity_arms, not_arms =
Targetint_31_63.Map.fold (rebuild_arm uacc) arms
( [],
Targetint_31_63.Map.empty,
No_arms,
Targetint_31_63.Map.empty,
Targetint_31_63.Map.empty )
in
let switch_merged =
match mergeable_arms with
| No_arms | Not_mergeable -> None
| Mergeable { cont; args } ->
let num_args = List.length args in
let required_names = UA.required_names uacc in
let args =
List.filter_map (filter_and_choose_alias required_names) args
in
if List.compare_length_with args num_args = 0
then Some (cont, args)
else None
in
let switch_is_identity =
let arm_discrs = Targetint_31_63.Map.keys arms in
let identity_arms_discrs = Targetint_31_63.Map.keys identity_arms in
if not (Targetint_31_63.Set.equal arm_discrs identity_arms_discrs)
then None
else
Targetint_31_63.Map.data identity_arms
|> List.map Apply_cont.continuation
|> Continuation.Set.of_list |> Continuation.Set.get_singleton
in
let switch_is_boolean_not =
let arm_discrs = Targetint_31_63.Map.keys arms in
let not_arms_discrs = Targetint_31_63.Map.keys not_arms in
if (not (Targetint_31_63.Set.equal arm_discrs Targetint_31_63.all_bools))
|| not (Targetint_31_63.Set.equal arm_discrs not_arms_discrs)
then None
else
Targetint_31_63.Map.data not_arms
|> List.map Apply_cont.continuation
|> Continuation.Set.of_list |> Continuation.Set.get_singleton
in
let body, uacc =
if Targetint_31_63.Map.cardinal arms < 1
then
let uacc = UA.notify_removed ~operation:Removed_operations.branch uacc in
RE.create_invalid Zero_switch_arms, uacc
else
let dbg = Debuginfo.none in
let[@inline] normal_case uacc =
(* In that case, even though some branches were removed by simplify we
should not count them in the number of removed operations: these
branches wouldn't have been taken during execution anyway. *)
let expr, uacc =
EB.create_switch uacc ~condition_dbg ~scrutinee ~arms
in
if Flambda_features.check_invariants ()
&& Simple.is_const scrutinee
&& Targetint_31_63.Map.cardinal arms > 1
then
Misc.fatal_errorf
"[Switch] with constant scrutinee (type: %a) should have been \
simplified away:@ %a"
T.print scrutinee_ty
(RE.print (UA.are_rebuilding_terms uacc))
expr;
expr, uacc
in
match switch_merged with
| Some (dest, args) ->
let uacc =
UA.notify_removed ~operation:Removed_operations.branch uacc
in
let apply_cont = Apply_cont.create dest ~args ~dbg in
let expr = RE.create_apply_cont apply_cont in
let uacc = UA.add_free_names uacc (Apply_cont.free_names apply_cont) in
expr, uacc
| None -> (
match switch_is_identity with
| Some dest -> (
let uacc =
UA.notify_removed ~operation:Removed_operations.branch uacc
in
let tagging_prim : P.t = Unary (Tag_immediate, scrutinee) in
match
find_cse_simple dacc_before_switch (UA.required_names uacc)
tagging_prim
with
| None -> normal_case uacc
| Some tagged_scrutinee ->
let apply_cont =
Apply_cont.create dest ~args:[tagged_scrutinee] ~dbg
in
let expr = RE.create_apply_cont apply_cont in
let uacc =
UA.add_free_names uacc (Apply_cont.free_names apply_cont)
in
expr, uacc)
| None -> (
match switch_is_boolean_not with
| Some dest -> (
let uacc =
UA.notify_removed ~operation:Removed_operations.branch uacc
in
let not_scrutinee = Variable.create "not_scrutinee" in
let not_scrutinee' = Simple.var not_scrutinee in
let tagging_prim : P.t = Unary (Tag_immediate, scrutinee) in
match
find_cse_simple dacc_before_switch (UA.required_names uacc)
tagging_prim
with
| None -> normal_case uacc
| Some tagged_scrutinee ->
let do_tagging =
Named.create_prim
(P.Unary (Boolean_not, tagged_scrutinee))
Debuginfo.none
in
let bound =
VB.create not_scrutinee NM.normal |> Bound_pattern.singleton
in
let apply_cont =
Apply_cont.create dest ~args:[not_scrutinee'] ~dbg
in
let body = RE.create_apply_cont apply_cont in
let free_names_of_body = Apply_cont.free_names apply_cont in
let expr =
RE.create_let
(UA.are_rebuilding_terms uacc)
bound do_tagging ~body ~free_names_of_body
in
let uacc =
UA.add_free_names uacc
(NO.union
(Named.free_names do_tagging)
(NO.diff free_names_of_body
~without:(NO.singleton_variable not_scrutinee NM.normal)))
in
expr, uacc)
| None -> normal_case uacc))
in
let uacc, expr = EB.bind_let_conts uacc ~body new_let_conts in
after_rebuild expr uacc
let simplify_arm ~typing_env_at_use ~scrutinee_ty arm action (arms, dacc) =
let shape = T.this_naked_immediate arm in
match T.meet typing_env_at_use scrutinee_ty shape with
| Bottom -> arms, dacc
| Ok (_meet_ty, env_extension) ->
let env_at_use = TE.add_env_extension typing_env_at_use env_extension in
let denv_at_use = DE.with_typing_env (DA.denv dacc) env_at_use in
let args = AC.args action in
let use_kind =
Simplify_common.apply_cont_use_kind ~context:Switch_branch action
in
let { S.simples = args; simple_tys = arg_types } =
S.simplify_simples dacc args
in
let dacc, rewrite_id =
DA.record_continuation_use dacc (AC.continuation action) use_kind
~env_at_use:denv_at_use ~arg_types
in
let arity = List.map T.kind arg_types |> Flambda_arity.create in
let action = Apply_cont.update_args action ~args in
let dacc =
DA.map_flow_acc dacc
~f:
(Flow.Acc.add_apply_cont_args ~rewrite_id
(Apply_cont.continuation action)
args)
in
let arms =
Targetint_31_63.Map.add arm (action, rewrite_id, arity, env_at_use) arms
in
arms, dacc
let simplify_switch0 dacc switch ~down_to_up =
let scrutinee = Switch.scrutinee switch in
let scrutinee_ty =
S.simplify_simple dacc scrutinee ~min_name_mode:NM.normal
in
let scrutinee = T.get_alias_exn scrutinee_ty in
let dacc_before_switch = dacc in
let typing_env_at_use = DA.typing_env dacc in
let arms, dacc =
Targetint_31_63.Map.fold
(simplify_arm ~typing_env_at_use ~scrutinee_ty)
(Switch.arms switch)
(Targetint_31_63.Map.empty, dacc)
in
let dacc =
if Targetint_31_63.Map.cardinal arms <= 1
then dacc
else
DA.map_flow_acc dacc
~f:(Flow.Acc.add_used_in_current_handler (Simple.free_names scrutinee))
in
let condition_dbg =
DE.add_inlined_debuginfo (DA.denv dacc) (Switch.condition_dbg switch)
in
down_to_up dacc
~rebuild:
(rebuild_switch ~arms ~condition_dbg ~scrutinee ~scrutinee_ty
~dacc_before_switch)
let simplify_switch ~simplify_let ~simplify_function_body dacc switch
~down_to_up =
let tagged_scrutinee = Variable.create "tagged_scrutinee" in
let tagging_prim =
Named.create_prim
(Unary (Tag_immediate, Switch.scrutinee switch))
Debuginfo.none
in
let let_expr =
(* [body] won't be looked at (see below). *)
Let.create
(Bound_pattern.singleton (Bound_var.create tagged_scrutinee NM.normal))
tagging_prim
~body:(Expr.create_switch switch)
~free_names_of_body:Unknown
in
let dacc =
DA.map_flow_acc dacc
~f:
(Flow.Acc.add_used_in_current_handler
(NO.singleton_variable tagged_scrutinee NM.normal))
in
simplify_let
~simplify_expr:(fun dacc _body ~down_to_up ->
simplify_switch0 dacc switch ~down_to_up)
~simplify_function_body dacc let_expr ~down_to_up
| null | https://raw.githubusercontent.com/ocaml-flambda/flambda-backend/30c53039a2256b46a77527493419067271bec0a8/middle_end/flambda2/simplify/simplify_switch_expr.ml | ocaml | ************************************************************************
OCaml
Copyright 2013--2020 OCamlPro SAS
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
We use find alias to find a common simple to different
simples.
This simple is already guaranteed to be the cannonical alias.
* If there is a common alias between variables, the
cannonical alias must also be a common alias.
* For constants and symbols there can be a common alias that
is not cannonical: A variable can have different constant
values in different branches: this variable is not the
cannonical alias, the cannonical would be the constant or
the symbol. But the only common alias could be a variable
in that case.
hence there is no loss of generality in returning the
cannonical alias as the single alias if it is a variable.
Note that the main reason for this is to allow changing the
arguments of continuations to variables that where not in
scope during the downward traversal. In particular for the
alias rewriting provided by data_flow
The destination is unreachable; delete the [Switch] arm.
Check to see if this arm may be merged with others.
Check to see if the arm is of a form that might mean the whole [Switch]
is a boolean NOT.
Constant
In that case, even though some branches were removed by simplify we
should not count them in the number of removed operations: these
branches wouldn't have been taken during execution anyway.
[body] won't be looked at (see below). | , OCamlPro
and ,
Copyright 2014 - -2020 Jane Street Group LLC
the GNU Lesser General Public License version 2.1 , with the
open! Simplify_import
module TE = Flambda2_types.Typing_env
module Alias_set = TE.Alias_set
[@@@ocaml.warning "-37"]
type mergeable_arms =
| No_arms
| Mergeable of
{ cont : Continuation.t;
args : Alias_set.t list
}
| Not_mergeable
let find_all_aliases env arg =
let find_all_aliases () =
TE.aliases_of_simple env ~min_name_mode:NM.normal arg
in
Simple.pattern_match'
~var:(fun _var ~coercion:_ ->
TE.Alias_set.singleton arg)
~symbol:(fun _sym ~coercion:_ -> find_all_aliases ())
~const:(fun _cst -> find_all_aliases ())
arg
let rebuild_arm uacc arm (action, use_id, arity, env_at_use)
( new_let_conts,
arms,
(mergeable_arms : mergeable_arms),
identity_arms,
not_arms ) =
let action =
Simplify_common.clear_demoted_trap_action_and_patch_unused_exn_bucket uacc
action
in
match
EB.rewrite_switch_arm uacc action ~use_id
(Flambda_arity.With_subkinds.of_arity arity)
with
| Apply_cont action -> (
let action =
let cont = Apply_cont.continuation action in
let cont_info_from_uenv = UE.find_continuation (UA.uenv uacc) cont in
First try to absorb any [ Apply_cont ] expression that forms the entirety
of the arm 's action ( via an intermediate zero - arity continuation
without trap action ) into the [ Switch ] expression itself .
of the arm's action (via an intermediate zero-arity continuation
without trap action) into the [Switch] expression itself. *)
match cont_info_from_uenv with
| Invalid _ -> None
| Linearly_used_and_inlinable _ | Non_inlinable_zero_arity _
| Non_inlinable_non_zero_arity _
| Toplevel_or_function_return_or_exn_continuation _ -> (
if not (Apply_cont.is_goto action)
then Some action
else
let check_handler ~handler ~action =
match RE.to_apply_cont handler with
| Some action -> Some action
| None -> Some action
in
match cont_info_from_uenv with
| Linearly_used_and_inlinable
{ handler;
free_names_of_handler = _;
params;
cost_metrics_of_handler = _
} ->
assert (Bound_parameters.is_empty params);
check_handler ~handler ~action
| Non_inlinable_zero_arity { handler = Known handler } ->
check_handler ~handler ~action
| Non_inlinable_zero_arity { handler = Unknown } -> Some action
| Invalid _ -> None
| Non_inlinable_non_zero_arity _
| Toplevel_or_function_return_or_exn_continuation _ ->
Misc.fatal_errorf
"Inconsistency for %a between [Apply_cont.is_goto] and \
continuation environment in [UA]:@ %a"
Continuation.print cont UA.print uacc)
in
match action with
| None ->
new_let_conts, arms, mergeable_arms, identity_arms, not_arms
| Some action -> (
CR mshinwell / : Fix alias handling so that identity switches
like those in can be simplified by only using
[ mergeable_arms ] . Then remove [ identity_arms ] .
like those in id_switch.ml can be simplified by only using
[mergeable_arms]. Then remove [identity_arms]. *)
let maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms =
let arms = Targetint_31_63.Map.add arm action arms in
if Option.is_some (Apply_cont.trap_action action)
then new_let_conts, arms, Not_mergeable, identity_arms, not_arms
else
match mergeable_arms with
| Not_mergeable ->
new_let_conts, arms, Not_mergeable, identity_arms, not_arms
| No_arms ->
let cont = Apply_cont.continuation action in
let args =
List.map
(fun arg -> find_all_aliases env_at_use arg)
(Apply_cont.args action)
in
( new_let_conts,
arms,
Mergeable { cont; args },
identity_arms,
not_arms )
| Mergeable { cont; args } ->
if not (Continuation.equal cont (Apply_cont.continuation action))
then new_let_conts, arms, Not_mergeable, identity_arms, not_arms
else
let args =
List.map2
(fun arg_set arg ->
Alias_set.inter (find_all_aliases env_at_use arg) arg_set)
args (Apply_cont.args action)
in
( new_let_conts,
arms,
Mergeable { cont; args },
identity_arms,
not_arms )
in
match Apply_cont.to_one_arg_without_trap_action action with
| None -> maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
| Some arg ->
let[@inline always] const arg =
match Reg_width_const.descr arg with
| Tagged_immediate arg ->
if Targetint_31_63.equal arm arg
then
let identity_arms =
Targetint_31_63.Map.add arm action identity_arms
in
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
else if Targetint_31_63.equal arm Targetint_31_63.bool_true
&& Targetint_31_63.equal arg Targetint_31_63.bool_false
|| Targetint_31_63.equal arm Targetint_31_63.bool_false
&& Targetint_31_63.equal arg Targetint_31_63.bool_true
then
let not_arms = Targetint_31_63.Map.add arm action not_arms in
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
else maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
| Naked_immediate _ | Naked_float _ | Naked_int32 _ | Naked_int64 _
| Naked_nativeint _ ->
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms
in
Simple.pattern_match arg ~const ~name:(fun _ ~coercion:_ ->
maybe_mergeable ~mergeable_arms ~identity_arms ~not_arms)))
| New_wrapper new_let_cont ->
let new_let_conts = new_let_cont :: new_let_conts in
let action = Apply_cont.goto new_let_cont.cont in
let arms = Targetint_31_63.Map.add arm action arms in
new_let_conts, arms, Not_mergeable, identity_arms, not_arms
let filter_and_choose_alias required_names alias_set =
let available_alias_set =
Alias_set.filter alias_set ~f:(fun alias ->
Simple.pattern_match alias
~name:(fun name ~coercion:_ -> Name.Set.mem name required_names)
~const:(fun _ -> true))
in
Alias_set.find_best available_alias_set
let find_cse_simple dacc required_names prim =
match P.Eligible_for_cse.create prim with
| Some with_fixed_value -> (
match DE.find_cse (DA.denv dacc) with_fixed_value with
| None -> None
| Some simple ->
filter_and_choose_alias required_names
(find_all_aliases (DA.typing_env dacc) simple))
let rebuild_switch ~arms ~condition_dbg ~scrutinee ~scrutinee_ty
~dacc_before_switch uacc ~after_rebuild =
let new_let_conts, arms, mergeable_arms, identity_arms, not_arms =
Targetint_31_63.Map.fold (rebuild_arm uacc) arms
( [],
Targetint_31_63.Map.empty,
No_arms,
Targetint_31_63.Map.empty,
Targetint_31_63.Map.empty )
in
let switch_merged =
match mergeable_arms with
| No_arms | Not_mergeable -> None
| Mergeable { cont; args } ->
let num_args = List.length args in
let required_names = UA.required_names uacc in
let args =
List.filter_map (filter_and_choose_alias required_names) args
in
if List.compare_length_with args num_args = 0
then Some (cont, args)
else None
in
let switch_is_identity =
let arm_discrs = Targetint_31_63.Map.keys arms in
let identity_arms_discrs = Targetint_31_63.Map.keys identity_arms in
if not (Targetint_31_63.Set.equal arm_discrs identity_arms_discrs)
then None
else
Targetint_31_63.Map.data identity_arms
|> List.map Apply_cont.continuation
|> Continuation.Set.of_list |> Continuation.Set.get_singleton
in
let switch_is_boolean_not =
let arm_discrs = Targetint_31_63.Map.keys arms in
let not_arms_discrs = Targetint_31_63.Map.keys not_arms in
if (not (Targetint_31_63.Set.equal arm_discrs Targetint_31_63.all_bools))
|| not (Targetint_31_63.Set.equal arm_discrs not_arms_discrs)
then None
else
Targetint_31_63.Map.data not_arms
|> List.map Apply_cont.continuation
|> Continuation.Set.of_list |> Continuation.Set.get_singleton
in
let body, uacc =
if Targetint_31_63.Map.cardinal arms < 1
then
let uacc = UA.notify_removed ~operation:Removed_operations.branch uacc in
RE.create_invalid Zero_switch_arms, uacc
else
let dbg = Debuginfo.none in
let[@inline] normal_case uacc =
let expr, uacc =
EB.create_switch uacc ~condition_dbg ~scrutinee ~arms
in
if Flambda_features.check_invariants ()
&& Simple.is_const scrutinee
&& Targetint_31_63.Map.cardinal arms > 1
then
Misc.fatal_errorf
"[Switch] with constant scrutinee (type: %a) should have been \
simplified away:@ %a"
T.print scrutinee_ty
(RE.print (UA.are_rebuilding_terms uacc))
expr;
expr, uacc
in
match switch_merged with
| Some (dest, args) ->
let uacc =
UA.notify_removed ~operation:Removed_operations.branch uacc
in
let apply_cont = Apply_cont.create dest ~args ~dbg in
let expr = RE.create_apply_cont apply_cont in
let uacc = UA.add_free_names uacc (Apply_cont.free_names apply_cont) in
expr, uacc
| None -> (
match switch_is_identity with
| Some dest -> (
let uacc =
UA.notify_removed ~operation:Removed_operations.branch uacc
in
let tagging_prim : P.t = Unary (Tag_immediate, scrutinee) in
match
find_cse_simple dacc_before_switch (UA.required_names uacc)
tagging_prim
with
| None -> normal_case uacc
| Some tagged_scrutinee ->
let apply_cont =
Apply_cont.create dest ~args:[tagged_scrutinee] ~dbg
in
let expr = RE.create_apply_cont apply_cont in
let uacc =
UA.add_free_names uacc (Apply_cont.free_names apply_cont)
in
expr, uacc)
| None -> (
match switch_is_boolean_not with
| Some dest -> (
let uacc =
UA.notify_removed ~operation:Removed_operations.branch uacc
in
let not_scrutinee = Variable.create "not_scrutinee" in
let not_scrutinee' = Simple.var not_scrutinee in
let tagging_prim : P.t = Unary (Tag_immediate, scrutinee) in
match
find_cse_simple dacc_before_switch (UA.required_names uacc)
tagging_prim
with
| None -> normal_case uacc
| Some tagged_scrutinee ->
let do_tagging =
Named.create_prim
(P.Unary (Boolean_not, tagged_scrutinee))
Debuginfo.none
in
let bound =
VB.create not_scrutinee NM.normal |> Bound_pattern.singleton
in
let apply_cont =
Apply_cont.create dest ~args:[not_scrutinee'] ~dbg
in
let body = RE.create_apply_cont apply_cont in
let free_names_of_body = Apply_cont.free_names apply_cont in
let expr =
RE.create_let
(UA.are_rebuilding_terms uacc)
bound do_tagging ~body ~free_names_of_body
in
let uacc =
UA.add_free_names uacc
(NO.union
(Named.free_names do_tagging)
(NO.diff free_names_of_body
~without:(NO.singleton_variable not_scrutinee NM.normal)))
in
expr, uacc)
| None -> normal_case uacc))
in
let uacc, expr = EB.bind_let_conts uacc ~body new_let_conts in
after_rebuild expr uacc
let simplify_arm ~typing_env_at_use ~scrutinee_ty arm action (arms, dacc) =
let shape = T.this_naked_immediate arm in
match T.meet typing_env_at_use scrutinee_ty shape with
| Bottom -> arms, dacc
| Ok (_meet_ty, env_extension) ->
let env_at_use = TE.add_env_extension typing_env_at_use env_extension in
let denv_at_use = DE.with_typing_env (DA.denv dacc) env_at_use in
let args = AC.args action in
let use_kind =
Simplify_common.apply_cont_use_kind ~context:Switch_branch action
in
let { S.simples = args; simple_tys = arg_types } =
S.simplify_simples dacc args
in
let dacc, rewrite_id =
DA.record_continuation_use dacc (AC.continuation action) use_kind
~env_at_use:denv_at_use ~arg_types
in
let arity = List.map T.kind arg_types |> Flambda_arity.create in
let action = Apply_cont.update_args action ~args in
let dacc =
DA.map_flow_acc dacc
~f:
(Flow.Acc.add_apply_cont_args ~rewrite_id
(Apply_cont.continuation action)
args)
in
let arms =
Targetint_31_63.Map.add arm (action, rewrite_id, arity, env_at_use) arms
in
arms, dacc
let simplify_switch0 dacc switch ~down_to_up =
let scrutinee = Switch.scrutinee switch in
let scrutinee_ty =
S.simplify_simple dacc scrutinee ~min_name_mode:NM.normal
in
let scrutinee = T.get_alias_exn scrutinee_ty in
let dacc_before_switch = dacc in
let typing_env_at_use = DA.typing_env dacc in
let arms, dacc =
Targetint_31_63.Map.fold
(simplify_arm ~typing_env_at_use ~scrutinee_ty)
(Switch.arms switch)
(Targetint_31_63.Map.empty, dacc)
in
let dacc =
if Targetint_31_63.Map.cardinal arms <= 1
then dacc
else
DA.map_flow_acc dacc
~f:(Flow.Acc.add_used_in_current_handler (Simple.free_names scrutinee))
in
let condition_dbg =
DE.add_inlined_debuginfo (DA.denv dacc) (Switch.condition_dbg switch)
in
down_to_up dacc
~rebuild:
(rebuild_switch ~arms ~condition_dbg ~scrutinee ~scrutinee_ty
~dacc_before_switch)
let simplify_switch ~simplify_let ~simplify_function_body dacc switch
~down_to_up =
let tagged_scrutinee = Variable.create "tagged_scrutinee" in
let tagging_prim =
Named.create_prim
(Unary (Tag_immediate, Switch.scrutinee switch))
Debuginfo.none
in
let let_expr =
Let.create
(Bound_pattern.singleton (Bound_var.create tagged_scrutinee NM.normal))
tagging_prim
~body:(Expr.create_switch switch)
~free_names_of_body:Unknown
in
let dacc =
DA.map_flow_acc dacc
~f:
(Flow.Acc.add_used_in_current_handler
(NO.singleton_variable tagged_scrutinee NM.normal))
in
simplify_let
~simplify_expr:(fun dacc _body ~down_to_up ->
simplify_switch0 dacc switch ~down_to_up)
~simplify_function_body dacc let_expr ~down_to_up
|
74ab81de0978102202045d54bda88694b6818df467cb05ab494484b9c940c753 | alexandergunnarson/quantum | core.cljc | (ns quantum.security.core
(:require
#?@(:clj [[less.awful.ssl :as las]]))
#?(:clj
(:import java.security.KeyStore
[javax.net.ssl TrustManagerFactory KeyManagerFactory]
[io.netty.handler.ssl SslProvider SslContextBuilder])))
;; Disabling developer tools (e.g. console) doesn't matter. This doesn't protect against hackers.
#?(:clj
(defn trust-manager-factory
"An X.509 trust manager factory for a KeyStore."
[^KeyStore key-store]
(let [factory (TrustManagerFactory/getInstance "PKIX" "SunJSSE")]
I 'm concerned that getInstance might return the * same * factory each time ,
; so we'll defensively lock before mutating here:
(locking factory
(doto factory (.init key-store))))))
#?(:clj
(defn key-manager-factory
"An X.509 key manager factory for a KeyStore."
([key-store password]
(let [factory (KeyManagerFactory/getInstance "SunX509" "SunJSSE")]
(locking factory
(doto factory (.init key-store password)))))
([key-store]
(key-manager-factory key-store las/key-store-password))))
#?(:clj
(defn ssl-context-generator:netty
"Returns a function that yields SSL contexts. Takes a PKCS8 key file, a
certificate file, and a trusted CA certificate used to verify peers."
[key-file cert-file ca-cert-file]
(let [key-manager-factory (key-manager-factory (las/key-store key-file cert-file))
trust-manager-factory (trust-manager-factory (las/trust-store ca-cert-file))]
(fn build-context []
(.build
(doto (SslContextBuilder/forServer key-manager-factory)
(.sslProvider SslProvider/JDK)
(.trustManager ^TrustManagerFactory trust-manager-factory)
#_(.keyManager key-manager-factory)))))))
#?(:clj
(defn ssl-context:netty
"Given a PKCS8 key file, a certificate file, and a trusted CA certificate
used to verify peers, returns a Netty SSLContext."
[key-file cert-file ca-cert-file]
((ssl-context-generator:netty key-file cert-file ca-cert-file))))
#?(:clj
(defn ssl-context [type key-file cert-file ca-cert-file]
(condp = type
:std (las/ssl-context key-file cert-file ca-cert-file)
:netty (ssl-context:netty key-file cert-file ca-cert-file))))
| null | https://raw.githubusercontent.com/alexandergunnarson/quantum/0c655af439734709566110949f9f2f482e468509/src/quantum/security/core.cljc | clojure | Disabling developer tools (e.g. console) doesn't matter. This doesn't protect against hackers.
so we'll defensively lock before mutating here: | (ns quantum.security.core
(:require
#?@(:clj [[less.awful.ssl :as las]]))
#?(:clj
(:import java.security.KeyStore
[javax.net.ssl TrustManagerFactory KeyManagerFactory]
[io.netty.handler.ssl SslProvider SslContextBuilder])))
#?(:clj
(defn trust-manager-factory
"An X.509 trust manager factory for a KeyStore."
[^KeyStore key-store]
(let [factory (TrustManagerFactory/getInstance "PKIX" "SunJSSE")]
I 'm concerned that getInstance might return the * same * factory each time ,
(locking factory
(doto factory (.init key-store))))))
#?(:clj
(defn key-manager-factory
"An X.509 key manager factory for a KeyStore."
([key-store password]
(let [factory (KeyManagerFactory/getInstance "SunX509" "SunJSSE")]
(locking factory
(doto factory (.init key-store password)))))
([key-store]
(key-manager-factory key-store las/key-store-password))))
#?(:clj
(defn ssl-context-generator:netty
"Returns a function that yields SSL contexts. Takes a PKCS8 key file, a
certificate file, and a trusted CA certificate used to verify peers."
[key-file cert-file ca-cert-file]
(let [key-manager-factory (key-manager-factory (las/key-store key-file cert-file))
trust-manager-factory (trust-manager-factory (las/trust-store ca-cert-file))]
(fn build-context []
(.build
(doto (SslContextBuilder/forServer key-manager-factory)
(.sslProvider SslProvider/JDK)
(.trustManager ^TrustManagerFactory trust-manager-factory)
#_(.keyManager key-manager-factory)))))))
#?(:clj
(defn ssl-context:netty
"Given a PKCS8 key file, a certificate file, and a trusted CA certificate
used to verify peers, returns a Netty SSLContext."
[key-file cert-file ca-cert-file]
((ssl-context-generator:netty key-file cert-file ca-cert-file))))
#?(:clj
(defn ssl-context [type key-file cert-file ca-cert-file]
(condp = type
:std (las/ssl-context key-file cert-file ca-cert-file)
:netty (ssl-context:netty key-file cert-file ca-cert-file))))
|
e2adff0924fa815d8f9d900b44cd824c779141a3b434c510cccf3f09077db084 | FranklinChen/learn-you-some-erlang | recursive_tests.erl | -module(recursive_tests).
-include_lib("eunit/include/eunit.hrl").
%% those were not in the module, but yeah
fac_test_() ->
[?_assert(24 == recursive:fac(4)),
?_assert(1 == recursive:fac(0)),
?_assert(1 == recursive:fac(1)),
?_assertError(function_clause, recursive:fac(-1))].
tail_fac_test_() ->
[?_assert(recursive:fac(4) == recursive:tail_fac(4)),
?_assert(recursive:fac(0) == recursive:tail_fac(0)),
?_assert(recursive:fac(1) == recursive:tail_fac(1)),
?_assertError(function_clause, recursive:tail_fac(-1))].
len_test_() ->
[?_assert(1 == recursive:len([a])),
?_assert(0 == recursive:len([])),
?_assert(5 == recursive:len([1,2,3,4,5]))].
tail_len_test_() ->
[?_assert(recursive:len([a]) == recursive:tail_len([a])),
?_assert(recursive:len([]) == recursive:tail_len([])),
?_assert(recursive:len([1,2,3,4,5]) == recursive:tail_len([1,2,3,4,5]))].
duplicate_test_() ->
[?_assert([] == recursive:duplicate(0,a)),
?_assert([a] == recursive:duplicate(1,a)),
?_assert([a,a,a] == recursive:duplicate(3,a))].
tail_duplicate_test_() ->
[?_assert(recursive:tail_duplicate(0,a) == recursive:duplicate(0,a)),
?_assert(recursive:tail_duplicate(1,a) == recursive:duplicate(1,a)),
?_assert(recursive:tail_duplicate(3,a) == recursive:duplicate(3,a))].
reverse_test_() ->
[?_assert([] == recursive:reverse([])),
?_assert([1] == recursive:reverse([1])),
?_assert([3,2,1] == recursive:reverse([1,2,3]))].
tail_reverse_test_() ->
[?_assertEqual(recursive:tail_reverse([]),
recursive:reverse([])),
?_assertEqual(recursive:tail_reverse([1]),
recursive:reverse([1])),
?_assertEqual(recursive:tail_reverse([1,2,3]),
recursive:reverse([1,2,3]))].
sublist_test_() ->
[?_assert([] == recursive:sublist([1,2,3],0)),
?_assert([1,2] == recursive:sublist([1,2,3],2)),
?_assert([] == recursive:sublist([], 4))].
tail_sublist_test_() ->
[?_assertEqual(recursive:tail_sublist([1,2,3],0),
recursive:sublist([1,2,3],0)),
?_assertEqual(recursive:tail_sublist([1,2,3],2),
recursive:sublist([1,2,3],2)),
?_assertEqual(recursive:tail_sublist([], 4),
recursive:sublist([], 4))].
zip_test_() ->
[?_assert([{a,1},{b,2},{c,3}] == recursive:zip([a,b,c],[1,2,3])),
?_assert([] == recursive:zip([],[])),
?_assertError(function_clause, recursive:zip([1],[1,2]))].
lenient_zip_test_() ->
[?_assertEqual([{a,1},{b,2},{c,3}],
recursive:lenient_zip([a,b,c],[1,2,3])),
?_assert([] == recursive:lenient_zip([],[])),
?_assert([{a,1}] == recursive:lenient_zip([a],[1,2]))].
%% exercises!
tail_zip_test_() ->
[?_assertEqual(recursive:tail_zip([a,b,c],[1,2,3]),
recursive:zip([a,b,c],[1,2,3])),
?_assertEqual(recursive:tail_zip([],[]),
recursive:zip([],[])),
?_assertError(function_clause, recursive:tail_zip([1],[1,2]))].
tail_lenient_zip_test_() ->
[?_assertEqual(recursive:tail_lenient_zip([a,b,c],[1,2,3]),
recursive:lenient_zip([a,b,c],[1,2,3])),
?_assertEqual(recursive:tail_lenient_zip([],[]),
recursive:lenient_zip([],[])),
?_assertEqual(recursive:tail_lenient_zip([a],[1,2]),
recursive:lenient_zip([a],[1,2]))].
%% quick, sort!
quicksort_test_() ->
[?_assert([] == recursive:quicksort([])),
?_assert([1] == recursive:quicksort([1])),
?_assert([1,2,2,4,6] == recursive:quicksort([4,2,6,2,1])),
?_assert(" JRaceeinqqsuu" == recursive:quicksort("Jacques Requin"))].
lc_quicksort_test_() ->
[?_assert([] == recursive:lc_quicksort([])),
?_assert([1] == recursive:lc_quicksort([1])),
?_assert([1,2,2,4,6] == recursive:lc_quicksort([4,2,6,2,1])),
?_assert(" JRaceeinqqsuu" == recursive:lc_quicksort("Jacques Requin"))].
bestest_qsort_test_() ->
[?_assert([] == recursive:bestest_qsort([])),
?_assert([1] == recursive:bestest_qsort([1])),
?_assert([1,2,2,4,6] == recursive:bestest_qsort([4,2,6,2,1])),
?_assert(" JRaceeinqqsuu" == recursive:bestest_qsort("Jacques Requin"))].
| null | https://raw.githubusercontent.com/FranklinChen/learn-you-some-erlang/878c8bc2011a12862fe72dd7fdc6c921348c79d6/tests/recursive_tests.erl | erlang | those were not in the module, but yeah
exercises!
quick, sort!
| -module(recursive_tests).
-include_lib("eunit/include/eunit.hrl").
fac_test_() ->
[?_assert(24 == recursive:fac(4)),
?_assert(1 == recursive:fac(0)),
?_assert(1 == recursive:fac(1)),
?_assertError(function_clause, recursive:fac(-1))].
tail_fac_test_() ->
[?_assert(recursive:fac(4) == recursive:tail_fac(4)),
?_assert(recursive:fac(0) == recursive:tail_fac(0)),
?_assert(recursive:fac(1) == recursive:tail_fac(1)),
?_assertError(function_clause, recursive:tail_fac(-1))].
len_test_() ->
[?_assert(1 == recursive:len([a])),
?_assert(0 == recursive:len([])),
?_assert(5 == recursive:len([1,2,3,4,5]))].
tail_len_test_() ->
[?_assert(recursive:len([a]) == recursive:tail_len([a])),
?_assert(recursive:len([]) == recursive:tail_len([])),
?_assert(recursive:len([1,2,3,4,5]) == recursive:tail_len([1,2,3,4,5]))].
duplicate_test_() ->
[?_assert([] == recursive:duplicate(0,a)),
?_assert([a] == recursive:duplicate(1,a)),
?_assert([a,a,a] == recursive:duplicate(3,a))].
tail_duplicate_test_() ->
[?_assert(recursive:tail_duplicate(0,a) == recursive:duplicate(0,a)),
?_assert(recursive:tail_duplicate(1,a) == recursive:duplicate(1,a)),
?_assert(recursive:tail_duplicate(3,a) == recursive:duplicate(3,a))].
reverse_test_() ->
[?_assert([] == recursive:reverse([])),
?_assert([1] == recursive:reverse([1])),
?_assert([3,2,1] == recursive:reverse([1,2,3]))].
tail_reverse_test_() ->
[?_assertEqual(recursive:tail_reverse([]),
recursive:reverse([])),
?_assertEqual(recursive:tail_reverse([1]),
recursive:reverse([1])),
?_assertEqual(recursive:tail_reverse([1,2,3]),
recursive:reverse([1,2,3]))].
sublist_test_() ->
[?_assert([] == recursive:sublist([1,2,3],0)),
?_assert([1,2] == recursive:sublist([1,2,3],2)),
?_assert([] == recursive:sublist([], 4))].
tail_sublist_test_() ->
[?_assertEqual(recursive:tail_sublist([1,2,3],0),
recursive:sublist([1,2,3],0)),
?_assertEqual(recursive:tail_sublist([1,2,3],2),
recursive:sublist([1,2,3],2)),
?_assertEqual(recursive:tail_sublist([], 4),
recursive:sublist([], 4))].
zip_test_() ->
[?_assert([{a,1},{b,2},{c,3}] == recursive:zip([a,b,c],[1,2,3])),
?_assert([] == recursive:zip([],[])),
?_assertError(function_clause, recursive:zip([1],[1,2]))].
lenient_zip_test_() ->
[?_assertEqual([{a,1},{b,2},{c,3}],
recursive:lenient_zip([a,b,c],[1,2,3])),
?_assert([] == recursive:lenient_zip([],[])),
?_assert([{a,1}] == recursive:lenient_zip([a],[1,2]))].
tail_zip_test_() ->
[?_assertEqual(recursive:tail_zip([a,b,c],[1,2,3]),
recursive:zip([a,b,c],[1,2,3])),
?_assertEqual(recursive:tail_zip([],[]),
recursive:zip([],[])),
?_assertError(function_clause, recursive:tail_zip([1],[1,2]))].
tail_lenient_zip_test_() ->
[?_assertEqual(recursive:tail_lenient_zip([a,b,c],[1,2,3]),
recursive:lenient_zip([a,b,c],[1,2,3])),
?_assertEqual(recursive:tail_lenient_zip([],[]),
recursive:lenient_zip([],[])),
?_assertEqual(recursive:tail_lenient_zip([a],[1,2]),
recursive:lenient_zip([a],[1,2]))].
quicksort_test_() ->
[?_assert([] == recursive:quicksort([])),
?_assert([1] == recursive:quicksort([1])),
?_assert([1,2,2,4,6] == recursive:quicksort([4,2,6,2,1])),
?_assert(" JRaceeinqqsuu" == recursive:quicksort("Jacques Requin"))].
lc_quicksort_test_() ->
[?_assert([] == recursive:lc_quicksort([])),
?_assert([1] == recursive:lc_quicksort([1])),
?_assert([1,2,2,4,6] == recursive:lc_quicksort([4,2,6,2,1])),
?_assert(" JRaceeinqqsuu" == recursive:lc_quicksort("Jacques Requin"))].
bestest_qsort_test_() ->
[?_assert([] == recursive:bestest_qsort([])),
?_assert([1] == recursive:bestest_qsort([1])),
?_assert([1,2,2,4,6] == recursive:bestest_qsort([4,2,6,2,1])),
?_assert(" JRaceeinqqsuu" == recursive:bestest_qsort("Jacques Requin"))].
|
aad80ebecd48a9d82d79db279aeb0aafca14ab49eda1ca72d87b856c5a5a6377 | ku-fpg/haskino | Utils.hs | -------------------------------------------------------------------------------
-- |
-- Module : System.Hardware.Haskino.Utils
-- Based on System.Hardware.Arduino.Utils
Copyright : ( c ) University of Kansas
System . Hardware . Arduino ( c )
-- License : BSD3
-- Stability : experimental
--
Internal utilities
-------------------------------------------------------------------------------
module System.Hardware.Haskino.Utils where
import Data.Bits (shiftL, shiftR, (.&.), (.|.))
import qualified Data.ByteString as B
import Data.Char (isAlphaNum, isAscii, isSpace, chr, ord)
import Data.Int (Int32)
import Data.IORef (newIORef, readIORef, writeIORef)
import Data.List (intercalate)
import Data.Word (Word8, Word16, Word32)
import Data.Serialize (runPut, runGet)
import Data.Serialize.IEEE754 (putFloat32le, getFloat32le)
import Data.Time (getCurrentTime, utctDayTime)
import Numeric (showHex, showIntAtBase)
-- | A simple printer that can keep track of sequence numbers. Used for debugging purposes.
mkDebugPrinter :: Bool -> IO (String -> IO ())
mkDebugPrinter False = return (const (return ()))
mkDebugPrinter True = do
cnt <- newIORef (1::Int)
let f s = do i <- readIORef cnt
writeIORef cnt (i+1)
tick <- utctDayTime `fmap` getCurrentTime
let precision = 1000000 :: Integer
micro = round . (fromIntegral precision *) . toRational $ tick
putStrLn $ "[" ++ show i ++ ":" ++ show (micro :: Integer) ++ "] Haskino: " ++ s
return f
-- | Show a byte in a visible format.
showByte :: Word8 -> String
showByte i | isVisible = [c]
| i <= 0xf = '0' : showHex i ""
| True = showHex i ""
where c = chr $ fromIntegral i
isVisible = isAscii c && isAlphaNum c && isSpace c
-- | Show a list of bytes
showByteList :: [Word8] -> String
showByteList bs = "[" ++ intercalate ", " (map showByte bs) ++ "]"
-- | Show a number as a binary value
showBin :: (Integral a, Show a) => a -> String
showBin n = showIntAtBase 2 (head . show) n ""
| Turn a lo / hi encoded Arduino string constant into a string
getString :: [Word8] -> String
getString s = map (chr . fromIntegral) s
-- | Convert a word to it's bytes, as would be required by Arduino comms
-- | Note: Little endian format, which is Arduino native
word32ToBytes :: Word32 -> [Word8]
word32ToBytes i = map fromIntegral [ i .&. 0xFF, (i `shiftR` 8) .&. 0xFF,
(i `shiftR` 16) .&. 0xFF,
(i `shiftR` 24) .&. 0xFF]
-- | Inverse conversion for word32ToBytes
-- | Note: Little endian format, which is Arduino native
bytesToWord32 :: (Word8, Word8, Word8, Word8) -> Word32
bytesToWord32 (a, b, c, d) = fromIntegral d `shiftL` 24 .|.
fromIntegral c `shiftL` 16 .|.
fromIntegral b `shiftL` 8 .|.
fromIntegral a
bytesToInt32 :: (Word8, Word8, Word8, Word8) -> Int32
bytesToInt32 (a, b, c, d) = fromIntegral d `shiftL` 24 .|.
fromIntegral c `shiftL` 16 .|.
fromIntegral b `shiftL` 8 .|.
fromIntegral a
-- | Convert a word to it's bytes, as would be required by Arduino comms
-- | Note: Little endian format, which is Arduino native
word16ToBytes :: Word16 -> [Word8]
word16ToBytes i = map fromIntegral [ i .&. 0xFF, (i `shiftR` 8) .&. 0xFF ]
-- | Inverse conversion for word16ToBytes
-- | Note: Little endian format, which is Arduino native
bytesToWord16 :: (Word8, Word8) -> Word16
bytesToWord16 (a, b) = fromIntegral a .|. fromIntegral b `shiftL` 8
-- | Convert a float to it's bytes, as would be required by Arduino comms
-- | Note: Little endian format, which is Arduino native
floatToBytes :: Float -> [Word8]
floatToBytes f = B.unpack $ runPut $ putFloat32le f
-- | Inverse conversion for floatToBytes
-- | Note: Little endian format, which is Arduino native
bytesToFloat :: (Word8, Word8, Word8, Word8) -> Float
bytesToFloat (a,b,c,d) = case e of
Left _ -> 0.0
Right f -> f
where
bString = B.pack [a,b,c,d]
e = runGet getFloat32le bString
stringToBytes :: String -> [Word8]
stringToBytes s = map (\d -> fromIntegral $ ord d) s
bytesToString :: [Word8] -> String
bytesToString = map (chr . fromEnum)
toW8 :: Enum a => a -> Word8
toW8 = fromIntegral . fromEnum
| null | https://raw.githubusercontent.com/ku-fpg/haskino/9a0709c92c2da9b9371e292b00fd076e5539eb18/System/Hardware/Haskino/Utils.hs | haskell | -----------------------------------------------------------------------------
|
Module : System.Hardware.Haskino.Utils
Based on System.Hardware.Arduino.Utils
License : BSD3
Stability : experimental
-----------------------------------------------------------------------------
| A simple printer that can keep track of sequence numbers. Used for debugging purposes.
| Show a byte in a visible format.
| Show a list of bytes
| Show a number as a binary value
| Convert a word to it's bytes, as would be required by Arduino comms
| Note: Little endian format, which is Arduino native
| Inverse conversion for word32ToBytes
| Note: Little endian format, which is Arduino native
| Convert a word to it's bytes, as would be required by Arduino comms
| Note: Little endian format, which is Arduino native
| Inverse conversion for word16ToBytes
| Note: Little endian format, which is Arduino native
| Convert a float to it's bytes, as would be required by Arduino comms
| Note: Little endian format, which is Arduino native
| Inverse conversion for floatToBytes
| Note: Little endian format, which is Arduino native | Copyright : ( c ) University of Kansas
System . Hardware . Arduino ( c )
Internal utilities
module System.Hardware.Haskino.Utils where
import Data.Bits (shiftL, shiftR, (.&.), (.|.))
import qualified Data.ByteString as B
import Data.Char (isAlphaNum, isAscii, isSpace, chr, ord)
import Data.Int (Int32)
import Data.IORef (newIORef, readIORef, writeIORef)
import Data.List (intercalate)
import Data.Word (Word8, Word16, Word32)
import Data.Serialize (runPut, runGet)
import Data.Serialize.IEEE754 (putFloat32le, getFloat32le)
import Data.Time (getCurrentTime, utctDayTime)
import Numeric (showHex, showIntAtBase)
mkDebugPrinter :: Bool -> IO (String -> IO ())
mkDebugPrinter False = return (const (return ()))
mkDebugPrinter True = do
cnt <- newIORef (1::Int)
let f s = do i <- readIORef cnt
writeIORef cnt (i+1)
tick <- utctDayTime `fmap` getCurrentTime
let precision = 1000000 :: Integer
micro = round . (fromIntegral precision *) . toRational $ tick
putStrLn $ "[" ++ show i ++ ":" ++ show (micro :: Integer) ++ "] Haskino: " ++ s
return f
showByte :: Word8 -> String
showByte i | isVisible = [c]
| i <= 0xf = '0' : showHex i ""
| True = showHex i ""
where c = chr $ fromIntegral i
isVisible = isAscii c && isAlphaNum c && isSpace c
showByteList :: [Word8] -> String
showByteList bs = "[" ++ intercalate ", " (map showByte bs) ++ "]"
showBin :: (Integral a, Show a) => a -> String
showBin n = showIntAtBase 2 (head . show) n ""
| Turn a lo / hi encoded Arduino string constant into a string
getString :: [Word8] -> String
getString s = map (chr . fromIntegral) s
word32ToBytes :: Word32 -> [Word8]
word32ToBytes i = map fromIntegral [ i .&. 0xFF, (i `shiftR` 8) .&. 0xFF,
(i `shiftR` 16) .&. 0xFF,
(i `shiftR` 24) .&. 0xFF]
bytesToWord32 :: (Word8, Word8, Word8, Word8) -> Word32
bytesToWord32 (a, b, c, d) = fromIntegral d `shiftL` 24 .|.
fromIntegral c `shiftL` 16 .|.
fromIntegral b `shiftL` 8 .|.
fromIntegral a
bytesToInt32 :: (Word8, Word8, Word8, Word8) -> Int32
bytesToInt32 (a, b, c, d) = fromIntegral d `shiftL` 24 .|.
fromIntegral c `shiftL` 16 .|.
fromIntegral b `shiftL` 8 .|.
fromIntegral a
word16ToBytes :: Word16 -> [Word8]
word16ToBytes i = map fromIntegral [ i .&. 0xFF, (i `shiftR` 8) .&. 0xFF ]
bytesToWord16 :: (Word8, Word8) -> Word16
bytesToWord16 (a, b) = fromIntegral a .|. fromIntegral b `shiftL` 8
floatToBytes :: Float -> [Word8]
floatToBytes f = B.unpack $ runPut $ putFloat32le f
bytesToFloat :: (Word8, Word8, Word8, Word8) -> Float
bytesToFloat (a,b,c,d) = case e of
Left _ -> 0.0
Right f -> f
where
bString = B.pack [a,b,c,d]
e = runGet getFloat32le bString
stringToBytes :: String -> [Word8]
stringToBytes s = map (\d -> fromIntegral $ ord d) s
bytesToString :: [Word8] -> String
bytesToString = map (chr . fromEnum)
toW8 :: Enum a => a -> Word8
toW8 = fromIntegral . fromEnum
|
ee1a76253207b0a85aa96c84b7e48b9ad7f2da8933a07879686bf01a16e20a7c | lilactown/helix-todo-mvc | components.cljs | (ns todo-mvc.components
(:require
[helix.core :as hx :refer [$ <>]]
[helix.dom :as d]
[helix.hooks :as hooks]
[todo-mvc.lib :refer [defnc]])
(:require-macros
[todo-mvc.components]))
(defn enter-key? [ev]
(= (.-which ev) 13))
(defn escape-key? [ev]
(= (.-which ev) 27))
(defnc Title
[]
(d/h1 "Todos"))
(defnc AppFooter []
(d/footer
{:class "info"}
(d/p "Double click to edit a todo")
(d/p "Part of " (d/a {:href ""} "TodoMVC"))))
(defnc NewTodo
[{:keys [on-complete]}]
(let [[new-todo set-new-todo] (hooks/use-state "")
on-change #(set-new-todo (.. % -target -value))]
(d/input
{:class "new-todo"
:placeholder "What needs to be done?"
:autoFocus true
:value new-todo
:on-key-down #(when (enter-key? %)
(on-complete new-todo)
(set-new-todo ""))
:on-change on-change})))
(defn init-state [title]
{:editing? false
:title title})
(defmulti todo-actions (fn [state action] (first action)))
(defmethod todo-actions
::start-editing [state _]
(assoc state :editing? true))
(defmethod todo-actions
::stop-editing [state _]
(assoc state :editing? false))
(defmethod todo-actions
::update-title [state [_ new-title]]
(assoc state :title new-title))
(defmethod todo-actions
::reset [state [_ initial-title]]
(init-state initial-title))
(defnc TodoItem
[{:keys [id title completed? on-toggle on-destroy on-update-title]}]
(let [initial-title title
[{:keys [editing?
title]} dispatch] (hooks/use-reducer
todo-actions
initial-title
init-state)
input-ref (hooks/use-ref nil)
focus-input #(when-let [current (.-current input-ref)]
(.focus current))]
(hooks/use-layout-effect
:auto-deps
(when editing?
(focus-input)))
(d/li
{:class (cond
editing? "editing"
completed? "completed")}
(d/input
{:class "edit"
:value title
:on-change #(dispatch [::update-title (.. % -target -value)])
:ref input-ref
:on-key-down #(cond
(and (enter-key? %)
(= (.. % -target -value) "")) (on-destroy id)
(enter-key? %) (do (on-update-title id title)
(dispatch [::stop-editing]))
(escape-key? %) (do (dispatch [::reset initial-title])))
:on-blur #(when editing?
(on-update-title id title)
(dispatch [::stop-editing]))})
(d/div
{:class "view"}
(d/input
{:class "toggle"
:type "checkbox"
:checked completed?
:on-change #(on-toggle id)})
(d/label {:on-double-click #(dispatch [::start-editing])} title)
(d/button
{:class "destroy"
:on-click #(on-destroy id)})))))
| null | https://raw.githubusercontent.com/lilactown/helix-todo-mvc/bd1be0be6388264b70329817348c6cda616b6fdd/src/todo_mvc/components.cljs | clojure | (ns todo-mvc.components
(:require
[helix.core :as hx :refer [$ <>]]
[helix.dom :as d]
[helix.hooks :as hooks]
[todo-mvc.lib :refer [defnc]])
(:require-macros
[todo-mvc.components]))
(defn enter-key? [ev]
(= (.-which ev) 13))
(defn escape-key? [ev]
(= (.-which ev) 27))
(defnc Title
[]
(d/h1 "Todos"))
(defnc AppFooter []
(d/footer
{:class "info"}
(d/p "Double click to edit a todo")
(d/p "Part of " (d/a {:href ""} "TodoMVC"))))
(defnc NewTodo
[{:keys [on-complete]}]
(let [[new-todo set-new-todo] (hooks/use-state "")
on-change #(set-new-todo (.. % -target -value))]
(d/input
{:class "new-todo"
:placeholder "What needs to be done?"
:autoFocus true
:value new-todo
:on-key-down #(when (enter-key? %)
(on-complete new-todo)
(set-new-todo ""))
:on-change on-change})))
(defn init-state [title]
{:editing? false
:title title})
(defmulti todo-actions (fn [state action] (first action)))
(defmethod todo-actions
::start-editing [state _]
(assoc state :editing? true))
(defmethod todo-actions
::stop-editing [state _]
(assoc state :editing? false))
(defmethod todo-actions
::update-title [state [_ new-title]]
(assoc state :title new-title))
(defmethod todo-actions
::reset [state [_ initial-title]]
(init-state initial-title))
(defnc TodoItem
[{:keys [id title completed? on-toggle on-destroy on-update-title]}]
(let [initial-title title
[{:keys [editing?
title]} dispatch] (hooks/use-reducer
todo-actions
initial-title
init-state)
input-ref (hooks/use-ref nil)
focus-input #(when-let [current (.-current input-ref)]
(.focus current))]
(hooks/use-layout-effect
:auto-deps
(when editing?
(focus-input)))
(d/li
{:class (cond
editing? "editing"
completed? "completed")}
(d/input
{:class "edit"
:value title
:on-change #(dispatch [::update-title (.. % -target -value)])
:ref input-ref
:on-key-down #(cond
(and (enter-key? %)
(= (.. % -target -value) "")) (on-destroy id)
(enter-key? %) (do (on-update-title id title)
(dispatch [::stop-editing]))
(escape-key? %) (do (dispatch [::reset initial-title])))
:on-blur #(when editing?
(on-update-title id title)
(dispatch [::stop-editing]))})
(d/div
{:class "view"}
(d/input
{:class "toggle"
:type "checkbox"
:checked completed?
:on-change #(on-toggle id)})
(d/label {:on-double-click #(dispatch [::start-editing])} title)
(d/button
{:class "destroy"
:on-click #(on-destroy id)})))))
| |
77c2486a85c8eacae4851d84ccd8acb1ef1515e6d37cfa106c1a3f2420983c08 | mattmundell/nightshade | cdbuild.lisp | ;;; Build the boot CD core from the boot .assem files.
(in-package "LISP")
(or (fboundp 'genesis)
(load "target:compiler/generic/new-genesis"))
(defparameter lisp-files
`(,@(when (c:backend-featurep :pmax)
'("target:assembly/mips/boot.assem"))
,@(when (c:backend-featurep :sparc)
'("target:assembly/sparc/boot.assem"))
,@(when (c:backend-featurep :rt)
'("target:assembly/rt/boot.assem"))
,@(when (c:backend-featurep :hppa)
'("target:assembly/hppa/boot.assem"))
,@(when (c:backend-featurep :x86)
'("target:assembly/x86/boot.assem"))
,@(when (c:backend-featurep :alpha)
'("target:assembly/alpha/boot.assem"))
,@(when (c:backend-featurep :sgi)
'("target:assembly/mips/boot.assem"))))
(when (boundp '*target-page-size*)
(locally (declare (optimize (inhibit-warnings 3)))
(setf *target-page-size*
(c:backend-page-size c:*backend*))))
(build-cd-boot-image lisp-files)
| null | https://raw.githubusercontent.com/mattmundell/nightshade/7a67f9eac96414355de1463ec251b98237cb4009/src/tools/cdbuild.lisp | lisp | Build the boot CD core from the boot .assem files. |
(in-package "LISP")
(or (fboundp 'genesis)
(load "target:compiler/generic/new-genesis"))
(defparameter lisp-files
`(,@(when (c:backend-featurep :pmax)
'("target:assembly/mips/boot.assem"))
,@(when (c:backend-featurep :sparc)
'("target:assembly/sparc/boot.assem"))
,@(when (c:backend-featurep :rt)
'("target:assembly/rt/boot.assem"))
,@(when (c:backend-featurep :hppa)
'("target:assembly/hppa/boot.assem"))
,@(when (c:backend-featurep :x86)
'("target:assembly/x86/boot.assem"))
,@(when (c:backend-featurep :alpha)
'("target:assembly/alpha/boot.assem"))
,@(when (c:backend-featurep :sgi)
'("target:assembly/mips/boot.assem"))))
(when (boundp '*target-page-size*)
(locally (declare (optimize (inhibit-warnings 3)))
(setf *target-page-size*
(c:backend-page-size c:*backend*))))
(build-cd-boot-image lisp-files)
|
368eaea9115d7dd9b0d357d2c7602b55f85a1014a7497ab1caaef1e5ef56d99c | typedclojure/typedclojure | utils.clj | Copyright ( c ) , Rich Hickey & contributors .
;; The use and distribution terms for this software are covered by the
;; Eclipse Public License 1.0 (-1.0.php)
;; which can be found in the file epl-v10.html at the root of this distribution.
;; By using this software in any fashion, you are agreeing to be bound by
;; the terms of this license.
;; You must not remove this notice, or any other, from this software.
;; based on clojure.tools.reader.impl.utils
(ns ^:no-doc typed.clj.reader.impl.utils
(:refer-clojure :exclude [char reader-conditional tagged-literal]))
(defn char [x]
(when x
(clojure.core/char x)))
(def <=clojure-1-7-alpha5
(let [{:keys [minor qualifier]} *clojure-version*]
(or (< minor 7)
(and (= minor 7)
(= "alpha"
(when qualifier
(subs qualifier 0 (dec (count qualifier)))))
(<= (read-string (subs qualifier (dec (count qualifier))))
5)))))
(defmacro compile-when [cond & then]
(when (eval cond)
`(do ~@then)))
(defn ex-info? [ex]
(instance? clojure.lang.ExceptionInfo ex))
(compile-when <=clojure-1-7-alpha5
(defrecord TaggedLiteral [tag form])
(defn tagged-literal?
"Return true if the value is the data representation of a tagged literal"
[value]
(instance? clojure.tools.reader.impl.utils.TaggedLiteral value))
(defn tagged-literal
"Construct a data representation of a tagged literal from a
tag symbol and a form."
[tag form]
(clojure.tools.reader.impl.utils.TaggedLiteral. tag form))
(ns-unmap *ns* '->TaggedLiteral)
(ns-unmap *ns* 'map->TaggedLiteral)
(defmethod print-method clojure.tools.reader.impl.utils.TaggedLiteral [o ^java.io.Writer w]
(.write w "#")
(print-method (:tag o) w)
(.write w " ")
(print-method (:form o) w))
(defrecord ReaderConditional [splicing? form])
(ns-unmap *ns* '->ReaderConditional)
(ns-unmap *ns* 'map->ReaderConditional)
(defn reader-conditional?
"Return true if the value is the data representation of a reader conditional"
[value]
(instance? clojure.tools.reader.impl.utils.ReaderConditional value))
(defn reader-conditional
"Construct a data representation of a reader conditional.
If true, splicing? indicates read-cond-splicing."
[form splicing?]
(clojure.tools.reader.impl.utils.ReaderConditional. splicing? form))
(defmethod print-method clojure.tools.reader.impl.utils.ReaderConditional [o ^java.io.Writer w]
(.write w "#?")
(when (:splicing? o) (.write w "@"))
(print-method (:form o) w)))
(defn whitespace?
"Checks whether a given character is whitespace"
[ch]
(when ch
(or (Character/isWhitespace ^Character ch)
(identical? \, ch))))
(defn numeric?
"Checks whether a given character is numeric"
[^Character ch]
(when ch
(Character/isDigit ch)))
(defn newline?
"Checks whether the character is a newline"
[c]
(or (identical? \newline c)
(nil? c)))
(defn desugar-meta
"Resolves syntactical sugar in metadata" ;; could be combined with some other desugar?
[f]
(cond
(keyword? f) {f true}
(symbol? f) {:tag f}
(string? f) {:tag f}
:else f))
(defn make-var
"Returns an anonymous unbound Var"
[]
(with-local-vars [x nil] x))
(defn namespace-keys [ns keys]
(for [key keys]
(if (or (symbol? key)
(keyword? key))
(let [[key-ns key-name] ((juxt namespace name) key)
->key (if (symbol? key) symbol keyword)]
(cond
(nil? key-ns)
(->key ns key-name)
(= "_" key-ns)
(->key key-name)
:else
key))
key)))
(defn second' [[a b]]
(when-not a b))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/45556897356f3c9cbc7b1b6b4df263086a9d5803/typed/clj.reader/src/typed/clj/reader/impl/utils.clj | clojure | The use and distribution terms for this software are covered by the
Eclipse Public License 1.0 (-1.0.php)
which can be found in the file epl-v10.html at the root of this distribution.
By using this software in any fashion, you are agreeing to be bound by
the terms of this license.
You must not remove this notice, or any other, from this software.
based on clojure.tools.reader.impl.utils
could be combined with some other desugar? | Copyright ( c ) , Rich Hickey & contributors .
(ns ^:no-doc typed.clj.reader.impl.utils
(:refer-clojure :exclude [char reader-conditional tagged-literal]))
(defn char [x]
(when x
(clojure.core/char x)))
(def <=clojure-1-7-alpha5
(let [{:keys [minor qualifier]} *clojure-version*]
(or (< minor 7)
(and (= minor 7)
(= "alpha"
(when qualifier
(subs qualifier 0 (dec (count qualifier)))))
(<= (read-string (subs qualifier (dec (count qualifier))))
5)))))
(defmacro compile-when [cond & then]
(when (eval cond)
`(do ~@then)))
(defn ex-info? [ex]
(instance? clojure.lang.ExceptionInfo ex))
(compile-when <=clojure-1-7-alpha5
(defrecord TaggedLiteral [tag form])
(defn tagged-literal?
"Return true if the value is the data representation of a tagged literal"
[value]
(instance? clojure.tools.reader.impl.utils.TaggedLiteral value))
(defn tagged-literal
"Construct a data representation of a tagged literal from a
tag symbol and a form."
[tag form]
(clojure.tools.reader.impl.utils.TaggedLiteral. tag form))
(ns-unmap *ns* '->TaggedLiteral)
(ns-unmap *ns* 'map->TaggedLiteral)
(defmethod print-method clojure.tools.reader.impl.utils.TaggedLiteral [o ^java.io.Writer w]
(.write w "#")
(print-method (:tag o) w)
(.write w " ")
(print-method (:form o) w))
(defrecord ReaderConditional [splicing? form])
(ns-unmap *ns* '->ReaderConditional)
(ns-unmap *ns* 'map->ReaderConditional)
(defn reader-conditional?
"Return true if the value is the data representation of a reader conditional"
[value]
(instance? clojure.tools.reader.impl.utils.ReaderConditional value))
(defn reader-conditional
"Construct a data representation of a reader conditional.
If true, splicing? indicates read-cond-splicing."
[form splicing?]
(clojure.tools.reader.impl.utils.ReaderConditional. splicing? form))
(defmethod print-method clojure.tools.reader.impl.utils.ReaderConditional [o ^java.io.Writer w]
(.write w "#?")
(when (:splicing? o) (.write w "@"))
(print-method (:form o) w)))
(defn whitespace?
"Checks whether a given character is whitespace"
[ch]
(when ch
(or (Character/isWhitespace ^Character ch)
(identical? \, ch))))
(defn numeric?
"Checks whether a given character is numeric"
[^Character ch]
(when ch
(Character/isDigit ch)))
(defn newline?
"Checks whether the character is a newline"
[c]
(or (identical? \newline c)
(nil? c)))
(defn desugar-meta
[f]
(cond
(keyword? f) {f true}
(symbol? f) {:tag f}
(string? f) {:tag f}
:else f))
(defn make-var
"Returns an anonymous unbound Var"
[]
(with-local-vars [x nil] x))
(defn namespace-keys [ns keys]
(for [key keys]
(if (or (symbol? key)
(keyword? key))
(let [[key-ns key-name] ((juxt namespace name) key)
->key (if (symbol? key) symbol keyword)]
(cond
(nil? key-ns)
(->key ns key-name)
(= "_" key-ns)
(->key key-name)
:else
key))
key)))
(defn second' [[a b]]
(when-not a b))
|
bef5347c5f8c1f68223cba33a2117ec508501478c95f65d44fb9736d6bb0c06d | aharisu/Gauche-CV | cv.scm | ;;;
;;; gauche_cv
;;;
MIT License
Copyright 2011 - 2012 aharisu
;;; All rights reserved.
;;;
;;; Permission is hereby granted, free of charge, to any person obtaining a copy
;;; of this software and associated documentation files (the "Software"), to deal
in the Software without restriction , including without limitation the rights
;;; to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software , and to permit persons to whom the Software is
;;; furnished to do so, subject to the following conditions:
;;;
;;; The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software .
;;;
;;;
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
;;; IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
;;; FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
;;; AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
;;; OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
;;; SOFTWARE.
;;;
;;;
;;; aharisu
;;;
;;;
(define-module cv
(extend cv.core cv.highgui cv.imgproc)
)
(select-module cv)
;;
;; Put your Scheme definitions here
;;
| null | https://raw.githubusercontent.com/aharisu/Gauche-CV/5e4c51501431c72270765121ea4d92693f11d60b/lib/cv.scm | scheme |
gauche_cv
All rights reserved.
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
aharisu
Put your Scheme definitions here
| MIT License
Copyright 2011 - 2012 aharisu
in the Software without restriction , including without limitation the rights
copies of the Software , and to permit persons to whom the Software is
copies or substantial portions of the Software .
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING FROM ,
(define-module cv
(extend cv.core cv.highgui cv.imgproc)
)
(select-module cv)
|
030f195f3c6c30b7d8002023c9bc2b58a0a83e1fd53993348395a92ff42284f5 | realworldocaml/examples | compare_poly.ml | let cmp a b =
if a > b then a else b
| null | https://raw.githubusercontent.com/realworldocaml/examples/32ea926861a0b728813a29b0e4cf20dd15eb486e/code/back-end/compare_poly.ml | ocaml | let cmp a b =
if a > b then a else b
| |
66302a63a97756ae268760ae5516bc5da59b6d145f38133b0aa5ccba589a1363 | ghc/packages-Cabal | setup.test.hs | import Test.Cabal.Prelude
-- Test executable depends on internal library.
main = setupAndCabalTest $ setup_build []
| null | https://raw.githubusercontent.com/ghc/packages-Cabal/6f22f2a789fa23edb210a2591d74ea6a5f767872/cabal-testsuite/PackageTests/BuildDeps/InternalLibrary1/setup.test.hs | haskell | Test executable depends on internal library. | import Test.Cabal.Prelude
main = setupAndCabalTest $ setup_build []
|
b7324f295e929c667a8613385ba0c4a20359f91149fc3f6156eef75b3ad1e8a2 | fakedata-haskell/fakedata | Basketball.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE TemplateHaskell #
module Faker.Provider.Basketball where
import Config
import Control.Monad.Catch
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Yaml
import Faker
import Faker.Internal
import Faker.Provider.TH
import Language.Haskell.TH
parseBasketball :: FromJSON a => FakerSettings -> Value -> Parser a
parseBasketball settings (Object obj) = do
en <- obj .: (getLocaleKey settings)
faker <- en .: "faker"
basketball <- faker .: "basketball"
pure basketball
parseBasketball settings val = fail $ "expected Object, but got " <> (show val)
parseBasketballField ::
(FromJSON a, Monoid a) => FakerSettings -> AesonKey -> Value -> Parser a
parseBasketballField settings txt val = do
basketball <- parseBasketball settings val
field <- basketball .:? txt .!= mempty
pure field
parseBasketballFields ::
(FromJSON a, Monoid a) => FakerSettings -> [AesonKey] -> Value -> Parser a
parseBasketballFields settings txts val = do
basketball <- parseBasketball settings val
helper basketball txts
where
helper :: (FromJSON a) => Value -> [AesonKey] -> Parser a
helper a [] = parseJSON a
helper (Object a) (x:xs) = do
field <- a .: x
helper field xs
helper a (x:xs) = fail $ "expect Object, but got " <> (show a)
$(genParser "basketball" "teams")
$(genProvider "basketball" "teams")
$(genParser "basketball" "players")
$(genProvider "basketball" "players")
$(genParser "basketball" "coaches")
$(genProvider "basketball" "coaches")
$(genParser "basketball" "positions")
$(genProvider "basketball" "positions")
| null | https://raw.githubusercontent.com/fakedata-haskell/fakedata/7b0875067386e9bb844c8b985c901c91a58842ff/src/Faker/Provider/Basketball.hs | haskell | # LANGUAGE OverloadedStrings # | # LANGUAGE TemplateHaskell #
module Faker.Provider.Basketball where
import Config
import Control.Monad.Catch
import Data.Monoid ((<>))
import Data.Text (Text)
import Data.Vector (Vector)
import Data.Yaml
import Faker
import Faker.Internal
import Faker.Provider.TH
import Language.Haskell.TH
parseBasketball :: FromJSON a => FakerSettings -> Value -> Parser a
parseBasketball settings (Object obj) = do
en <- obj .: (getLocaleKey settings)
faker <- en .: "faker"
basketball <- faker .: "basketball"
pure basketball
parseBasketball settings val = fail $ "expected Object, but got " <> (show val)
parseBasketballField ::
(FromJSON a, Monoid a) => FakerSettings -> AesonKey -> Value -> Parser a
parseBasketballField settings txt val = do
basketball <- parseBasketball settings val
field <- basketball .:? txt .!= mempty
pure field
parseBasketballFields ::
(FromJSON a, Monoid a) => FakerSettings -> [AesonKey] -> Value -> Parser a
parseBasketballFields settings txts val = do
basketball <- parseBasketball settings val
helper basketball txts
where
helper :: (FromJSON a) => Value -> [AesonKey] -> Parser a
helper a [] = parseJSON a
helper (Object a) (x:xs) = do
field <- a .: x
helper field xs
helper a (x:xs) = fail $ "expect Object, but got " <> (show a)
$(genParser "basketball" "teams")
$(genProvider "basketball" "teams")
$(genParser "basketball" "players")
$(genProvider "basketball" "players")
$(genParser "basketball" "coaches")
$(genProvider "basketball" "coaches")
$(genParser "basketball" "positions")
$(genProvider "basketball" "positions")
|
75d29a908224d7f784da38e4b2e3b24a8251dd49ba4b5a0ab81c32821e36bc16 | plaidfinch/ComonadSheet | Z2.hs | # LANGUAGE MultiParamTypeClasses , FlexibleInstances #
module Z2
( module Generic , Z2(..) , wrapZ2
) where
import Generic
import Z1
newtype Z2 c r a = Z2 { fromZ2 :: Z1 r (Z1 c a) }
wrapZ2 :: (Z1 r (Z1 c a) -> Z1 r' (Z1 c' a')) -> Z2 c r a -> Z2 c' r' a'
wrapZ2 = (Z2 .) . (. fromZ2)
instance Functor (Z2 c r) where
fmap = wrapZ2 . fmap . fmap
instance (Ord c, Ord r, Enum c, Enum r) => Applicative (Z2 c r) where
fs <*> xs = Z2 $ fmap (<*>) (fromZ2 fs) <*> (fromZ2 xs)
pure = Z2 . pure . pure
instance (Ord c, Ord r, Enum c, Enum r) => ComonadApply (Z2 c r) where
(<@>) = (<*>)
instance (Ord c, Ord r, Enum c, Enum r) => Comonad (Z2 c r) where
extract = view
duplicate = Z2 . widthWise . heightWise
where widthWise = fmap $ zipIterate zipL zipR <$> col <*> id
heightWise = zipIterate zipU zipD <$> row <*> id
instance (Ord c, Ord r, Enum c, Enum r) => Zipper1 (Z2 c r a) c where
zipL = wrapZ2 $ fmap zipL
zipR = wrapZ2 $ fmap zipR
col = index . view . fromZ2
instance (Ord c, Ord r, Enum c, Enum r) => Zipper2 (Z2 c r a) r where
zipU = wrapZ2 zipL
zipD = wrapZ2 zipR
row = index . fromZ2
instance (Ord c, Enum c, Ord r, Enum r) => RefOf (Ref c,Ref r) (Z2 c r a) [[a]] where
slice (c,r) (c',r') = slice r r' . fmap (slice c c') . fromZ2
insert list z = Z2 $ insert <$> (insert list (pure [])) <*> fromZ2 z
go (colRef,rowRef) = horizontal . vertical
where
horizontal = genericDeref zipL zipR col colRef
vertical = genericDeref zipU zipD row rowRef
instance (Ord c, Enum c, Ord r, Enum r) => AnyZipper (Z2 c r a) (c,r) a where
index = (,) <$> col <*> row
view = view . view . fromZ2
write = wrapZ2 . modify . write
reindex (c,r) = wrapZ2 (fmap (reindex c) . reindex r)
modifyCell :: (Ord c, Enum c, Ord r, Enum r) => (a -> a) -> Z2 c r a -> Z2 c r a
modifyCell f = write <$> f . view <*> id
modifyRow :: (Z1 c a -> Z1 c a) -> Z2 c r a -> Z2 c r a
modifyRow = wrapZ2 . modify
modifyCol :: (Enum r, Ord r) => (Z1 r a -> Z1 r a) -> Z2 c r a -> Z2 c r a
modifyCol f = writeCol <$> f . fmap view . fromZ2 <*> id
writeRow :: Z1 c a -> Z2 c r a -> Z2 c r a
writeRow = wrapZ2 . write
writeCol :: (Ord r, Enum r) => Z1 r a -> Z2 c r a -> Z2 c r a
writeCol c plane = Z2 $ write <$> c <*> fromZ2 plane
insertRowU, insertRowD :: Z1 c a -> Z2 c r a -> Z2 c r a
insertRowU = wrapZ2 . insertL
insertRowD = wrapZ2 . insertR
deleteRowD, deleteRowU :: Z2 c r a -> Z2 c r a
deleteRowD = wrapZ2 deleteR
deleteRowU = wrapZ2 deleteL
insertColL, insertColR :: (Ord r, Enum r) => Z1 r a -> Z2 c r a -> Z2 c r a
insertColL c plane = Z2 $ insertL <$> c <*> fromZ2 plane
insertColR c plane = Z2 $ insertR <$> c <*> fromZ2 plane
deleteColL, deleteColR :: (Ord r, Enum r) => Z2 c r a -> Z2 c r a
deleteColL = wrapZ2 $ fmap deleteL
deleteColR = wrapZ2 $ fmap deleteR
insertCellD, insertCellU :: (Ord r, Enum r) => a -> Z2 c r a -> Z2 c r a
insertCellD = modifyCol . insertR
insertCellU = modifyCol . insertL
insertCellR, insertCellL :: (Ord c, Enum c) => a -> Z2 c r a -> Z2 c r a
insertCellR = modifyRow . insertR
insertCellL = modifyRow . insertL
deleteCellD, deleteCellU :: (Ord r, Enum r) => Z2 c r a -> Z2 c r a
deleteCellD = modifyCol deleteR
deleteCellU = modifyCol deleteL
deleteCellR, deleteCellL :: (Ord c, Enum c) => Z2 c r a -> Z2 c r a
deleteCellR = modifyRow deleteR
deleteCellL = modifyRow deleteL
--TODO...
insertCellsR , insertCellsL , insertCellsU , insertCellsD
--insertColsR, insertColsL, insertRowsU, insertRowsD
| null | https://raw.githubusercontent.com/plaidfinch/ComonadSheet/1cc9a91dc311bc1c692df4faaea091238b7871c2/Old/Z2.hs | haskell | TODO...
insertColsR, insertColsL, insertRowsU, insertRowsD | # LANGUAGE MultiParamTypeClasses , FlexibleInstances #
module Z2
( module Generic , Z2(..) , wrapZ2
) where
import Generic
import Z1
newtype Z2 c r a = Z2 { fromZ2 :: Z1 r (Z1 c a) }
wrapZ2 :: (Z1 r (Z1 c a) -> Z1 r' (Z1 c' a')) -> Z2 c r a -> Z2 c' r' a'
wrapZ2 = (Z2 .) . (. fromZ2)
instance Functor (Z2 c r) where
fmap = wrapZ2 . fmap . fmap
instance (Ord c, Ord r, Enum c, Enum r) => Applicative (Z2 c r) where
fs <*> xs = Z2 $ fmap (<*>) (fromZ2 fs) <*> (fromZ2 xs)
pure = Z2 . pure . pure
instance (Ord c, Ord r, Enum c, Enum r) => ComonadApply (Z2 c r) where
(<@>) = (<*>)
instance (Ord c, Ord r, Enum c, Enum r) => Comonad (Z2 c r) where
extract = view
duplicate = Z2 . widthWise . heightWise
where widthWise = fmap $ zipIterate zipL zipR <$> col <*> id
heightWise = zipIterate zipU zipD <$> row <*> id
instance (Ord c, Ord r, Enum c, Enum r) => Zipper1 (Z2 c r a) c where
zipL = wrapZ2 $ fmap zipL
zipR = wrapZ2 $ fmap zipR
col = index . view . fromZ2
instance (Ord c, Ord r, Enum c, Enum r) => Zipper2 (Z2 c r a) r where
zipU = wrapZ2 zipL
zipD = wrapZ2 zipR
row = index . fromZ2
instance (Ord c, Enum c, Ord r, Enum r) => RefOf (Ref c,Ref r) (Z2 c r a) [[a]] where
slice (c,r) (c',r') = slice r r' . fmap (slice c c') . fromZ2
insert list z = Z2 $ insert <$> (insert list (pure [])) <*> fromZ2 z
go (colRef,rowRef) = horizontal . vertical
where
horizontal = genericDeref zipL zipR col colRef
vertical = genericDeref zipU zipD row rowRef
instance (Ord c, Enum c, Ord r, Enum r) => AnyZipper (Z2 c r a) (c,r) a where
index = (,) <$> col <*> row
view = view . view . fromZ2
write = wrapZ2 . modify . write
reindex (c,r) = wrapZ2 (fmap (reindex c) . reindex r)
modifyCell :: (Ord c, Enum c, Ord r, Enum r) => (a -> a) -> Z2 c r a -> Z2 c r a
modifyCell f = write <$> f . view <*> id
modifyRow :: (Z1 c a -> Z1 c a) -> Z2 c r a -> Z2 c r a
modifyRow = wrapZ2 . modify
modifyCol :: (Enum r, Ord r) => (Z1 r a -> Z1 r a) -> Z2 c r a -> Z2 c r a
modifyCol f = writeCol <$> f . fmap view . fromZ2 <*> id
writeRow :: Z1 c a -> Z2 c r a -> Z2 c r a
writeRow = wrapZ2 . write
writeCol :: (Ord r, Enum r) => Z1 r a -> Z2 c r a -> Z2 c r a
writeCol c plane = Z2 $ write <$> c <*> fromZ2 plane
insertRowU, insertRowD :: Z1 c a -> Z2 c r a -> Z2 c r a
insertRowU = wrapZ2 . insertL
insertRowD = wrapZ2 . insertR
deleteRowD, deleteRowU :: Z2 c r a -> Z2 c r a
deleteRowD = wrapZ2 deleteR
deleteRowU = wrapZ2 deleteL
insertColL, insertColR :: (Ord r, Enum r) => Z1 r a -> Z2 c r a -> Z2 c r a
insertColL c plane = Z2 $ insertL <$> c <*> fromZ2 plane
insertColR c plane = Z2 $ insertR <$> c <*> fromZ2 plane
deleteColL, deleteColR :: (Ord r, Enum r) => Z2 c r a -> Z2 c r a
deleteColL = wrapZ2 $ fmap deleteL
deleteColR = wrapZ2 $ fmap deleteR
insertCellD, insertCellU :: (Ord r, Enum r) => a -> Z2 c r a -> Z2 c r a
insertCellD = modifyCol . insertR
insertCellU = modifyCol . insertL
insertCellR, insertCellL :: (Ord c, Enum c) => a -> Z2 c r a -> Z2 c r a
insertCellR = modifyRow . insertR
insertCellL = modifyRow . insertL
deleteCellD, deleteCellU :: (Ord r, Enum r) => Z2 c r a -> Z2 c r a
deleteCellD = modifyCol deleteR
deleteCellU = modifyCol deleteL
deleteCellR, deleteCellL :: (Ord c, Enum c) => Z2 c r a -> Z2 c r a
deleteCellR = modifyRow deleteR
deleteCellL = modifyRow deleteL
insertCellsR , insertCellsL , insertCellsU , insertCellsD
|
5287dae6ed063b3e83a33bdea8457e3d8f4913c8b99380759ad000270c091d63 | magnusjonsson/tidder-icfpc-2008 | hello-world.scm | #! /usr/bin/env mzscheme
#lang scheme
(display "Hello world!")
(newline)
| null | https://raw.githubusercontent.com/magnusjonsson/tidder-icfpc-2008/84d68fd9ac358c38e7eff99117d9cdfa86c1864a/playground/scheme/hello-world.scm | scheme | #! /usr/bin/env mzscheme
#lang scheme
(display "Hello world!")
(newline)
| |
74d28c7f4cdaa78621514ddbbbeefd28d81137c67ff8d520ebe953e35bbacc1d | dmjio/compiler-in-haskell | PrettyPrinter.hs | module DLC.PrettyPrinter
(prettyPrintTransResult, prettyPrintClassDef, prettyPrintMethodDef)
where
import DLC.TAST
import Data.Char (chr)
import Data.Map
prettyPrintTransResult :: Int -> TransResult -> IO ()
prettyPrintTransResult il (cdMap, mdMap) = do
mapM_ (\cName -> prettyPrintClassDef il (cdMap ! cName)) (keys cdMap)
mapM_ (\mName -> prettyPrintMethodDef il (mdMap ! mName)) (keys mdMap)
prettyPrintClassDef :: Int -> TClassDef -> IO ()
prettyPrintClassDef il (cName, superClassName, caList, cmList) = do
indent il
putStr $ "class " ++ cName
if cName == "Object" then return () else putStr (" extends " ++ superClassName)
putStrLn " {"
mapM_ (ppClassAttrDef (il+1)) caList
mapM_ (ppClassMethodDef (il+1)) cmList
indent il
putStrLn "}"
prettyPrintMethodDef :: Int -> TMethodDef -> IO ()
prettyPrintMethodDef il mDef = do {indent il; ppMethodDef il mDef}
ppClassAttrDef :: Int -> TClassAttrDef -> IO ()
ppClassAttrDef il (acc, isStatic, varDef) = do
indent il
ppClassAccessModifier acc
putStr $ if isStatic then " static " else " "
ppVarDef varDef
putStrLn ";"
ppClassMethodDef :: Int -> TClassMethodDef -> IO ()
ppClassMethodDef il (acc, isStatic, mDef) = do
indent il
ppClassAccessModifier acc
putStr $ if isStatic then " static " else " "
ppMethodDef il mDef
take care ! it does n't indent on the first line ...
ppMethodDef :: Int -> TMethodDef -> IO ()
ppMethodDef il (fName, fType, fArgList, fBody) = do
ppType fType
putStr $ " " ++ fName ++ "("
case fArgList of
[] -> return ()
a:as -> do {ppArg a; mapM_ (\a -> do {putStr ", "; ppArg a}) as}
putStrLn ") {"
ppBodyList (il+1) fBody
indent il
putStrLn "}"
where
ppArg :: (String, TType) -> IO ()
ppArg (aName, aType) = do {ppType aType; putStr $ " " ++ aName}
-- no prefix indent, no suffix ";" and newline
ppVarDef :: TVarDef -> IO ()
ppVarDef (vName, vType, vExpr) = do
ppType vType
putStr $ " " ++ vName ++ " = "
ppExpr vExpr
ppClassAccessModifier :: TClassAccessModifier -> IO ()
ppClassAccessModifier acc =
putStr $ case acc of
TPublic -> "public"
TProtected -> "protected"
TPrivate -> "private"
ppType :: TType -> IO ()
ppType TVoid = putStr "void"
ppType TInt = putStr "int"
ppType TInt32 = putStr "int32"
ppType TByte = putStr "byte"
ppType TBool = putStr "bool"
ppType (TClass s) = putStr s
ppType (TArray aDepth tp) = do {ppType tp; mapM_ putStr $ take aDepth $ repeat "[]"}
report error for TUnknown ?
ppBody :: Int -> TBodyStmt -> IO ()
ppBody il b =
case b of
(TBSStmt st) -> ppStmt il st
(TBSExpr ep) -> do {indent il; ppExpr ep; putStrLn ";"}
ppBodyList :: Int -> [TBodyStmt] -> IO ()
-- ppBodyList _ [] = putStrLn ""
ppBodyList il bList = mapM_ (ppBody il) bList
ppStmt :: Int -> TStmt -> IO ()
ppStmt il st = do
indent il
case st of
(TStmtVarDef vDef) -> do {ppVarDef vDef; putStrLn ";"}
(TStmtPrint e) -> do {putStr "print("; ppExpr e; putStrLn ");"}
(TStmtIf c b1 b2) -> do
putStr "if ("
ppExpr c
putStrLn ") {"
ppBodyList (il+1) b1
indent il
putStrLn "} else {"
ppBodyList (il+1) b2
indent il
putStrLn "}"
(TStmtFor init cond c b) -> do
putStr "for ("
case init of
(Left e) -> ppExpr e
(Right (tp, vList)) -> do
ppType tp
pvlist vList
where
pvlist :: [(String, TExpr)] -> IO ()
pvlist [(v,e)] = do {(putStr $ " " ++ v ++ " = "); ppExpr e}
pvlist (a:as) = do {pvlist [a];
mapM_ (\a -> do {putStr ","; pvlist [a]}) as}
putStr "; "
ppExpr cond
putStr "; "
ppExpr c
putStrLn ") {"
ppBodyList (il+1) b
indent il
putStrLn "}"
(TStmtWhile e b) -> do
putStr "while ("
ppExpr e
putStrLn ") {"
ppBodyList (il+1) b
indent il
putStrLn "}"
(TStmtDoWhile b e) -> do
putStr "do {"
ppBodyList (il+1) b
indent il
putStr "} while ("
ppExpr e
putStrLn ");"
(TStmtReturn e) -> do {putStr "return "; ppExpr e; putStrLn ";"}
ppExpr :: TExpr -> IO ()
ppExpr (TExprFunCall maybeExpr fName exprList) = do
case maybeExpr of
(Just e) -> do {ppExpr e; putStr "."}
Nothing -> return ()
putStr $ fName ++ "("
pArgList exprList
putStr ")"
where
pArgList :: [TExpr] -> IO ()
pArgList [] = return ()
pArgList (a:as) = do {ppExpr a; pArgList' as}
pArgList' :: [TExpr] -> IO ()
pArgList' [] = return ()
pArgList' (a:as) = do {putStr ", "; ppExpr a; pArgList' as}
ppExpr (TExprAdd e1 e2) = do {ppExpr e1; putStr " + " ; ppExpr e2}
ppExpr (TExprMin e1 e2) = do {ppExpr e1; putStr " - " ; ppExpr e2}
ppExpr (TExprMul e1 e2) = do {ppExpr e1; putStr " * " ; ppExpr e2}
ppExpr (TExprDiv e1 e2) = do {ppExpr e1; putStr " / " ; ppExpr e2}
ppExpr (TExprNeg e) = do { putStr "-" ; ppExpr e }
ppExpr (TExprAnd e1 e2) = do {ppExpr e1; putStr " and "; ppExpr e2}
ppExpr (TExprOr e1 e2) = do {ppExpr e1; putStr " or " ; ppExpr e2}
ppExpr (TExprNot e) = do { putStr "not " ; ppExpr e }
ppExpr (TExprIncV e) = do { putStr "++" ; ppExpr e }
ppExpr (TExprDecV e) = do { putStr "--" ; ppExpr e }
ppExpr (TExprVInc e) = do {ppExpr e; putStr "++" }
ppExpr (TExprVDec e) = do {ppExpr e; putStr "--" }
ppExpr (TExprIncBy e1 e2) = do {ppExpr e1; putStr " += " ; ppExpr e2}
ppExpr (TExprDecBy e1 e2) = do {ppExpr e1; putStr " -= " ; ppExpr e2}
ppExpr (TExprEq e1 e2) = do {ppExpr e1; putStr " == " ; ppExpr e2}
ppExpr (TExprNeq e1 e2) = do {ppExpr e1; putStr " != " ; ppExpr e2}
ppExpr (TExprLeq e1 e2) = do {ppExpr e1; putStr " <= " ; ppExpr e2}
ppExpr (TExprGeq e1 e2) = do {ppExpr e1; putStr " >= " ; ppExpr e2}
ppExpr (TExprLe e1 e2) = do {ppExpr e1; putStr " < " ; ppExpr e2}
ppExpr (TExprGe e1 e2) = do {ppExpr e1; putStr " > " ; ppExpr e2}
ppExpr (TExprArrAccess e1 e2) = do {ppExpr e1; putStr "[" ; ppExpr e2; putStr "]"}
ppExpr (TExprDotAccess e1 s) = do {ppExpr e1; putStr $ "." ++ s}
ppExpr (TExprBool b) = putStr (if b then "true" else "false")
ppExpr (TExprVar v) = putStr v
ppExpr (TExprInt v) = putStr $ show v
ppExpr (TExprStr s) = putStr $ show s -- for parens and escaping
ppExpr (TExprChar c) = putStr $ show $ chr c
ppExpr TExprNull = putStr "null"
ppExpr (TExprConvType t e) = do {putStr "("; ppType t; putStr ")"; ppExpr e}
ppExpr (TExprAssign e1 e2) = do {ppExpr e1; putStr " = "; ppExpr e2}
ppExpr (TExprNewObj s) = putStr $ "new " ++ s ++ "()"
ppExpr (TExprNewArr t eList) = do {putStr "new "; ppType t;
mapM_ (\e -> do {putStr "["; ppExpr e; putStr "]"}) eList}
indent :: Int -> IO ()
indent n | n >= 1 = do {putStr " "; indent (n-1)}
| n == 0 = return ()
| null | https://raw.githubusercontent.com/dmjio/compiler-in-haskell/1dbd377d72290f1a3614710df877b1a95f4242b8/src/DLC/PrettyPrinter.hs | haskell | no prefix indent, no suffix ";" and newline
ppBodyList _ [] = putStrLn ""
for parens and escaping | module DLC.PrettyPrinter
(prettyPrintTransResult, prettyPrintClassDef, prettyPrintMethodDef)
where
import DLC.TAST
import Data.Char (chr)
import Data.Map
prettyPrintTransResult :: Int -> TransResult -> IO ()
prettyPrintTransResult il (cdMap, mdMap) = do
mapM_ (\cName -> prettyPrintClassDef il (cdMap ! cName)) (keys cdMap)
mapM_ (\mName -> prettyPrintMethodDef il (mdMap ! mName)) (keys mdMap)
prettyPrintClassDef :: Int -> TClassDef -> IO ()
prettyPrintClassDef il (cName, superClassName, caList, cmList) = do
indent il
putStr $ "class " ++ cName
if cName == "Object" then return () else putStr (" extends " ++ superClassName)
putStrLn " {"
mapM_ (ppClassAttrDef (il+1)) caList
mapM_ (ppClassMethodDef (il+1)) cmList
indent il
putStrLn "}"
prettyPrintMethodDef :: Int -> TMethodDef -> IO ()
prettyPrintMethodDef il mDef = do {indent il; ppMethodDef il mDef}
ppClassAttrDef :: Int -> TClassAttrDef -> IO ()
ppClassAttrDef il (acc, isStatic, varDef) = do
indent il
ppClassAccessModifier acc
putStr $ if isStatic then " static " else " "
ppVarDef varDef
putStrLn ";"
ppClassMethodDef :: Int -> TClassMethodDef -> IO ()
ppClassMethodDef il (acc, isStatic, mDef) = do
indent il
ppClassAccessModifier acc
putStr $ if isStatic then " static " else " "
ppMethodDef il mDef
take care ! it does n't indent on the first line ...
ppMethodDef :: Int -> TMethodDef -> IO ()
ppMethodDef il (fName, fType, fArgList, fBody) = do
ppType fType
putStr $ " " ++ fName ++ "("
case fArgList of
[] -> return ()
a:as -> do {ppArg a; mapM_ (\a -> do {putStr ", "; ppArg a}) as}
putStrLn ") {"
ppBodyList (il+1) fBody
indent il
putStrLn "}"
where
ppArg :: (String, TType) -> IO ()
ppArg (aName, aType) = do {ppType aType; putStr $ " " ++ aName}
ppVarDef :: TVarDef -> IO ()
ppVarDef (vName, vType, vExpr) = do
ppType vType
putStr $ " " ++ vName ++ " = "
ppExpr vExpr
ppClassAccessModifier :: TClassAccessModifier -> IO ()
ppClassAccessModifier acc =
putStr $ case acc of
TPublic -> "public"
TProtected -> "protected"
TPrivate -> "private"
ppType :: TType -> IO ()
ppType TVoid = putStr "void"
ppType TInt = putStr "int"
ppType TInt32 = putStr "int32"
ppType TByte = putStr "byte"
ppType TBool = putStr "bool"
ppType (TClass s) = putStr s
ppType (TArray aDepth tp) = do {ppType tp; mapM_ putStr $ take aDepth $ repeat "[]"}
report error for TUnknown ?
ppBody :: Int -> TBodyStmt -> IO ()
ppBody il b =
case b of
(TBSStmt st) -> ppStmt il st
(TBSExpr ep) -> do {indent il; ppExpr ep; putStrLn ";"}
ppBodyList :: Int -> [TBodyStmt] -> IO ()
ppBodyList il bList = mapM_ (ppBody il) bList
ppStmt :: Int -> TStmt -> IO ()
ppStmt il st = do
indent il
case st of
(TStmtVarDef vDef) -> do {ppVarDef vDef; putStrLn ";"}
(TStmtPrint e) -> do {putStr "print("; ppExpr e; putStrLn ");"}
(TStmtIf c b1 b2) -> do
putStr "if ("
ppExpr c
putStrLn ") {"
ppBodyList (il+1) b1
indent il
putStrLn "} else {"
ppBodyList (il+1) b2
indent il
putStrLn "}"
(TStmtFor init cond c b) -> do
putStr "for ("
case init of
(Left e) -> ppExpr e
(Right (tp, vList)) -> do
ppType tp
pvlist vList
where
pvlist :: [(String, TExpr)] -> IO ()
pvlist [(v,e)] = do {(putStr $ " " ++ v ++ " = "); ppExpr e}
pvlist (a:as) = do {pvlist [a];
mapM_ (\a -> do {putStr ","; pvlist [a]}) as}
putStr "; "
ppExpr cond
putStr "; "
ppExpr c
putStrLn ") {"
ppBodyList (il+1) b
indent il
putStrLn "}"
(TStmtWhile e b) -> do
putStr "while ("
ppExpr e
putStrLn ") {"
ppBodyList (il+1) b
indent il
putStrLn "}"
(TStmtDoWhile b e) -> do
putStr "do {"
ppBodyList (il+1) b
indent il
putStr "} while ("
ppExpr e
putStrLn ");"
(TStmtReturn e) -> do {putStr "return "; ppExpr e; putStrLn ";"}
ppExpr :: TExpr -> IO ()
ppExpr (TExprFunCall maybeExpr fName exprList) = do
case maybeExpr of
(Just e) -> do {ppExpr e; putStr "."}
Nothing -> return ()
putStr $ fName ++ "("
pArgList exprList
putStr ")"
where
pArgList :: [TExpr] -> IO ()
pArgList [] = return ()
pArgList (a:as) = do {ppExpr a; pArgList' as}
pArgList' :: [TExpr] -> IO ()
pArgList' [] = return ()
pArgList' (a:as) = do {putStr ", "; ppExpr a; pArgList' as}
ppExpr (TExprAdd e1 e2) = do {ppExpr e1; putStr " + " ; ppExpr e2}
ppExpr (TExprMin e1 e2) = do {ppExpr e1; putStr " - " ; ppExpr e2}
ppExpr (TExprMul e1 e2) = do {ppExpr e1; putStr " * " ; ppExpr e2}
ppExpr (TExprDiv e1 e2) = do {ppExpr e1; putStr " / " ; ppExpr e2}
ppExpr (TExprNeg e) = do { putStr "-" ; ppExpr e }
ppExpr (TExprAnd e1 e2) = do {ppExpr e1; putStr " and "; ppExpr e2}
ppExpr (TExprOr e1 e2) = do {ppExpr e1; putStr " or " ; ppExpr e2}
ppExpr (TExprNot e) = do { putStr "not " ; ppExpr e }
ppExpr (TExprIncV e) = do { putStr "++" ; ppExpr e }
ppExpr (TExprDecV e) = do { putStr "--" ; ppExpr e }
ppExpr (TExprVInc e) = do {ppExpr e; putStr "++" }
ppExpr (TExprVDec e) = do {ppExpr e; putStr "--" }
ppExpr (TExprIncBy e1 e2) = do {ppExpr e1; putStr " += " ; ppExpr e2}
ppExpr (TExprDecBy e1 e2) = do {ppExpr e1; putStr " -= " ; ppExpr e2}
ppExpr (TExprEq e1 e2) = do {ppExpr e1; putStr " == " ; ppExpr e2}
ppExpr (TExprNeq e1 e2) = do {ppExpr e1; putStr " != " ; ppExpr e2}
ppExpr (TExprLeq e1 e2) = do {ppExpr e1; putStr " <= " ; ppExpr e2}
ppExpr (TExprGeq e1 e2) = do {ppExpr e1; putStr " >= " ; ppExpr e2}
ppExpr (TExprLe e1 e2) = do {ppExpr e1; putStr " < " ; ppExpr e2}
ppExpr (TExprGe e1 e2) = do {ppExpr e1; putStr " > " ; ppExpr e2}
ppExpr (TExprArrAccess e1 e2) = do {ppExpr e1; putStr "[" ; ppExpr e2; putStr "]"}
ppExpr (TExprDotAccess e1 s) = do {ppExpr e1; putStr $ "." ++ s}
ppExpr (TExprBool b) = putStr (if b then "true" else "false")
ppExpr (TExprVar v) = putStr v
ppExpr (TExprInt v) = putStr $ show v
ppExpr (TExprChar c) = putStr $ show $ chr c
ppExpr TExprNull = putStr "null"
ppExpr (TExprConvType t e) = do {putStr "("; ppType t; putStr ")"; ppExpr e}
ppExpr (TExprAssign e1 e2) = do {ppExpr e1; putStr " = "; ppExpr e2}
ppExpr (TExprNewObj s) = putStr $ "new " ++ s ++ "()"
ppExpr (TExprNewArr t eList) = do {putStr "new "; ppType t;
mapM_ (\e -> do {putStr "["; ppExpr e; putStr "]"}) eList}
indent :: Int -> IO ()
indent n | n >= 1 = do {putStr " "; indent (n-1)}
| n == 0 = return ()
|
747bd882689311684f82b00026741cf6beae3c9a04ae45dfd49df4bf61f55bd9 | agda/agda | DisplayForm.hs | # LANGUAGE UndecidableInstances #
| Tools for ' DisplayTerm ' and ' DisplayForm ' .
module Agda.TypeChecking.DisplayForm (displayForm) where
import Control.Monad
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Maybe
import Data.Monoid (All(..))
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Agda.Syntax.Common
import Agda.Syntax.Internal
import Agda.Syntax.Internal.Names
import Agda.Syntax.Scope.Base (inverseScopeLookupName)
import Agda.TypeChecking.Monad
import Agda.TypeChecking.Substitute
import Agda.TypeChecking.Level
import Agda.TypeChecking.Reduce (instantiate)
import Agda.Utils.Functor
import Agda.Utils.List
import Agda.Utils.Maybe
import Agda.Utils.Pretty
import Agda.Utils.Impossible
-- | Get the arities of all display forms for a name.
displayFormArities :: (HasConstInfo m, ReadTCState m) => QName -> m [Int]
displayFormArities q = map (length . dfPats . dget) <$> getDisplayForms q
-- | Lift a local display form to an outer context. The substitution goes from the parent context to
the context of the local display form ( see Issue 958 ) . Current only handles pure extensions of
-- the parent context.
liftLocalDisplayForm :: Substitution -> DisplayForm -> Maybe DisplayForm
liftLocalDisplayForm IdS df = Just df
liftLocalDisplayForm (Wk n IdS) (Display m lhs rhs) =
-- We lift a display form by turning matches on free variables into pattern variables, which can
-- be done by simply adding to the dfPatternVars field.
Just $ Display (n + m) lhs rhs
liftLocalDisplayForm _ _ = Nothing
type MonadDisplayForm m =
( MonadReduce m
, ReadTCState m
, HasConstInfo m
, HasBuiltins m
, MonadDebug m
)
| Find a matching display form for @q es@.
In essence this tries to rewrite @q es@ with any
display form @q ps -- > dt@ and returns the instantiated
@dt@ if successful . First match wins .
displayForm :: MonadDisplayForm m => QName -> Elims -> m (Maybe DisplayTerm)
displayForm q es = do
-- Get display forms for name q.
odfs <- getDisplayForms q
if (null odfs) then do
reportSLn "tc.display.top" 101 $ "no displayForm for " ++ prettyShow q
return Nothing
else do
Display debug info about the .
unlessDebugPrinting $ reportSDoc "tc.display.top" 100 $ do
cps <- viewTC eCheckpoints
cxt <- getContextTelescope
return $ vcat
[ "displayForm for" <+> pretty q
, nest 2 $ "cxt =" <+> pretty cxt
, nest 2 $ "cps =" <+> vcat (map pretty (Map.toList cps))
, nest 2 $ "dfs =" <+> vcat (map pretty odfs) ]
-- Use only the display forms that can be opened in the current context.
dfs <- catMaybes <$> mapM (tryGetOpen liftLocalDisplayForm) odfs
scope <- getScope
Keep the display forms that match the application @q es@.
ms <- do
ms <- mapM (runMaybeT . (`matchDisplayForm` es)) dfs
return [ m | Just (d, m) <- ms, wellScoped scope d ]
-- Not safe when printing non-terminating terms.
-- (nfdfs, us) <- normalise (dfs, es)
unlessDebugPrinting $ reportSDoc "tc.display.top" 100 $ return $ vcat
[ "name :" <+> pretty q
, "displayForms:" <+> pretty dfs
, "arguments :" <+> pretty es
, "matches :" <+> pretty ms
, "result :" <+> pretty (listToMaybe ms)
]
Return the first display form that matches .
return $ listToMaybe ms
where
-- Look at the original display form, not the instantiated result when
-- checking if it's well-scoped. Otherwise we might pick up out of scope
-- identifiers coming from the source term.
wellScoped scope (Display _ _ d)
| isWithDisplay d = True
| otherwise = getAll $ namesIn' (All . inScope scope) d -- all names in d should be in scope
inScope scope x = not $ null $ inverseScopeLookupName x scope
isWithDisplay DWithApp{} = True
isWithDisplay _ = False
| Match a ' DisplayForm ' @q ps = v@ against @q es@.
-- Return the 'DisplayTerm' @v[us]@ if the match was successful,
-- i.e., @es / ps = Just us@.
matchDisplayForm :: MonadDisplayForm m
=> DisplayForm -> Elims -> MaybeT m (DisplayForm, DisplayTerm)
matchDisplayForm d@(Display n ps v) es
| length ps > length es = mzero
| otherwise = do
let (es0, es1) = splitAt (length ps) es
mm <- match (Window 0 n) ps es0
us <- forM [0 .. n - 1] $ \ i -> do
# 5294 : Fail if we do n't have bindings for all variables . This can
-- happen outside parameterised modules when some of the parameters
are not used in the lhs .
Just u <- return $ Map.lookup i mm
return u
return (d, substWithOrigin (parallelS $ map woThing us) us v `applyE` es1)
type MatchResult = Map Int (WithOrigin Term)
unionMatch :: Monad m => MatchResult -> MatchResult -> MaybeT m MatchResult
unionMatch m1 m2
| null (Map.intersection m1 m2) = return $ Map.union m1 m2
| otherwise = mzero -- Non-linear pattern, fail for now.
unionsMatch :: Monad m => [MatchResult] -> MaybeT m MatchResult
unionsMatch = foldM unionMatch Map.empty
data Window = Window {dbLo, dbHi :: Nat}
inWindow :: Window -> Nat -> Maybe Nat
inWindow (Window lo hi) n | lo <= n, n < hi = Just (n - lo)
| otherwise = Nothing
shiftWindow :: Window -> Window
shiftWindow (Window lo hi) = Window (lo + 1) (hi + 1)
-- | Class @Match@ for matching a term @p@ in the role of a pattern
-- against a term @v@.
--
Free variables inside the window in @p@ are pattern variables and
-- the result of matching is a map from pattern variables (shifted down to start at 0) to subterms
-- of @v@.
class Match a where
match :: MonadDisplayForm m => Window -> a -> a -> MaybeT m MatchResult
instance Match a => Match [a] where
match n xs ys = unionsMatch =<< zipWithM (match n) xs ys
instance Match a => Match (Arg a) where
match n p v = Map.map (setOrigin (getOrigin v)) <$> match n (unArg p) (unArg v)
instance Match a => Match (Elim' a) where
match n p v =
case (p, v) of
(Proj _ f, Proj _ f') | f == f' -> return Map.empty
_ | Just a <- isApplyElim p
, Just a' <- isApplyElim v -> match n a a'
-- we do not care to differentiate between Apply and IApply for
-- printing.
_ -> mzero
instance Match Term where
match w p v = lift (instantiate v) >>= \ v -> case (unSpine p, unSpine v) of
(Var i [], v) | Just j <- inWindow w i -> return $ Map.singleton j (WithOrigin Inserted v)
(Var i (_:_), v) | Just{} <- inWindow w i -> mzero -- Higher-order pattern, fail for now.
(Var i ps, Var j vs) | i == j -> match w ps vs
(Def c ps, Def d vs) | c == d -> match w ps vs
(Con c _ ps, Con d _ vs) | c == d -> match w ps vs
(Lit l, Lit l') | l == l' -> return Map.empty
(Lam h p, Lam h' v) | h == h' -> match (shiftWindow w) (unAbs p) (unAbs v)
(p, v) | p == v -> return Map.empty -- TODO: this is wrong (this is why we lifted the rhs before)
(p, Level l) -> match w p =<< reallyUnLevelView l
(Sort ps, Sort pv) -> match w ps pv
(p, Sort (Type v)) -> match w p =<< reallyUnLevelView v
_ -> mzero
instance Match Sort where
match w p v = case (p, v) of
(Type pl, Type vl) -> match w pl vl
_ | p == v -> return Map.empty
_ -> mzero
instance Match Level where
match w p v = do
p <- reallyUnLevelView p
v <- reallyUnLevelView v
match w p v
-- | Substitute terms with origin into display terms,
-- replacing variables along with their origins.
--
-- The purpose is to replace the pattern variables in a with-display form,
and only on the top level of the lhs . Thus , we are happy to fall back
-- to ordinary substitution where it does not matter.
This fixes issue # 2590 .
class SubstWithOrigin a where
substWithOrigin :: Substitution -> [WithOrigin Term] -> a -> a
instance SubstWithOrigin a => SubstWithOrigin [a] where
substWithOrigin rho ots = map (substWithOrigin rho ots)
instance (SubstWithOrigin a, SubstWithOrigin (Arg a)) => SubstWithOrigin (Elim' a) where
substWithOrigin rho ots (Apply arg) = Apply $ substWithOrigin rho ots arg
substWithOrigin rho ots e@Proj{} = e
substWithOrigin rho ots (IApply u v w) = IApply
(substWithOrigin rho ots u)
(substWithOrigin rho ots v)
(substWithOrigin rho ots w)
instance SubstWithOrigin (Arg Term) where
substWithOrigin rho ots (Arg ai v) =
case v of
-- pattern variable: replace origin if better
Var x [] -> case ots !!! x of
Just (WithOrigin o u) -> Arg (mapOrigin (replaceOrigin o) ai) u
Issue # 2717 , not _ _ IMPOSSIBLE _ _
-- constructor: recurse
Con c ci args -> Arg ai $ Con c ci $ substWithOrigin rho ots args
-- def: recurse
Def q es -> Arg ai $ Def q $ substWithOrigin rho ots es
-- otherwise: fall back to ordinary substitution
_ -> Arg ai $ applySubst rho v
where
replaceOrigin _ UserWritten = UserWritten
replaceOrigin o _ = o
instance SubstWithOrigin Term where
substWithOrigin rho ots v =
case v of
-- constructor: recurse
Con c ci args -> Con c ci $ substWithOrigin rho ots args
-- def: recurse
Def q es -> Def q $ substWithOrigin rho ots es
-- otherwise: fall back to oridinary substitution
_ -> applySubst rho v
Do not go into dot pattern , otherwise interaction test # 231 fails
instance SubstWithOrigin DisplayTerm where
substWithOrigin rho ots =
\case
DTerm' v es -> DTerm' (substWithOrigin rho ots v) $ substWithOrigin rho ots es
DDot' v es -> DDot' (substWithOrigin rho ots v) $ substWithOrigin rho ots es
DDef q es -> DDef q $ substWithOrigin rho ots es
DCon c ci args -> DCon c ci $ substWithOrigin rho ots args
DWithApp t ts es -> DWithApp
(substWithOrigin rho ots t)
(substWithOrigin rho ots ts)
(substWithOrigin rho ots es)
Do not go into dot pattern , otherwise interaction test # 231 fails
instance SubstWithOrigin (Arg DisplayTerm) where
substWithOrigin rho ots (Arg ai dt) =
case dt of
DTerm' v es -> substWithOrigin rho ots (Arg ai v) <&> (`DTerm'` substWithOrigin rho ots es)
DDot' v es -> Arg ai $ DDot' (applySubst rho v) $ substWithOrigin rho ots es
DDef q es -> Arg ai $ DDef q $ substWithOrigin rho ots es
DCon c ci args -> Arg ai $ DCon c ci $ substWithOrigin rho ots args
DWithApp t ts es -> Arg ai $ DWithApp
(substWithOrigin rho ots t)
(substWithOrigin rho ots ts)
(substWithOrigin rho ots es)
| null | https://raw.githubusercontent.com/agda/agda/9afe77020541b944331685ef2720a81bb312a925/src/full/Agda/TypeChecking/DisplayForm.hs | haskell | | Get the arities of all display forms for a name.
| Lift a local display form to an outer context. The substitution goes from the parent context to
the parent context.
We lift a display form by turning matches on free variables into pattern variables, which can
be done by simply adding to the dfPatternVars field.
> dt@ and returns the instantiated
Get display forms for name q.
Use only the display forms that can be opened in the current context.
Not safe when printing non-terminating terms.
(nfdfs, us) <- normalise (dfs, es)
Look at the original display form, not the instantiated result when
checking if it's well-scoped. Otherwise we might pick up out of scope
identifiers coming from the source term.
all names in d should be in scope
Return the 'DisplayTerm' @v[us]@ if the match was successful,
i.e., @es / ps = Just us@.
happen outside parameterised modules when some of the parameters
Non-linear pattern, fail for now.
| Class @Match@ for matching a term @p@ in the role of a pattern
against a term @v@.
the result of matching is a map from pattern variables (shifted down to start at 0) to subterms
of @v@.
we do not care to differentiate between Apply and IApply for
printing.
Higher-order pattern, fail for now.
TODO: this is wrong (this is why we lifted the rhs before)
| Substitute terms with origin into display terms,
replacing variables along with their origins.
The purpose is to replace the pattern variables in a with-display form,
to ordinary substitution where it does not matter.
pattern variable: replace origin if better
constructor: recurse
def: recurse
otherwise: fall back to ordinary substitution
constructor: recurse
def: recurse
otherwise: fall back to oridinary substitution | # LANGUAGE UndecidableInstances #
| Tools for ' DisplayTerm ' and ' DisplayForm ' .
module Agda.TypeChecking.DisplayForm (displayForm) where
import Control.Monad
import Control.Monad.Trans (lift)
import Control.Monad.Trans.Maybe
import Data.Monoid (All(..))
import Data.Map (Map)
import qualified Data.Map as Map
import qualified Data.Set as Set
import Agda.Syntax.Common
import Agda.Syntax.Internal
import Agda.Syntax.Internal.Names
import Agda.Syntax.Scope.Base (inverseScopeLookupName)
import Agda.TypeChecking.Monad
import Agda.TypeChecking.Substitute
import Agda.TypeChecking.Level
import Agda.TypeChecking.Reduce (instantiate)
import Agda.Utils.Functor
import Agda.Utils.List
import Agda.Utils.Maybe
import Agda.Utils.Pretty
import Agda.Utils.Impossible
displayFormArities :: (HasConstInfo m, ReadTCState m) => QName -> m [Int]
displayFormArities q = map (length . dfPats . dget) <$> getDisplayForms q
the context of the local display form ( see Issue 958 ) . Current only handles pure extensions of
liftLocalDisplayForm :: Substitution -> DisplayForm -> Maybe DisplayForm
liftLocalDisplayForm IdS df = Just df
liftLocalDisplayForm (Wk n IdS) (Display m lhs rhs) =
Just $ Display (n + m) lhs rhs
liftLocalDisplayForm _ _ = Nothing
type MonadDisplayForm m =
( MonadReduce m
, ReadTCState m
, HasConstInfo m
, HasBuiltins m
, MonadDebug m
)
| Find a matching display form for @q es@.
In essence this tries to rewrite @q es@ with any
@dt@ if successful . First match wins .
displayForm :: MonadDisplayForm m => QName -> Elims -> m (Maybe DisplayTerm)
displayForm q es = do
odfs <- getDisplayForms q
if (null odfs) then do
reportSLn "tc.display.top" 101 $ "no displayForm for " ++ prettyShow q
return Nothing
else do
Display debug info about the .
unlessDebugPrinting $ reportSDoc "tc.display.top" 100 $ do
cps <- viewTC eCheckpoints
cxt <- getContextTelescope
return $ vcat
[ "displayForm for" <+> pretty q
, nest 2 $ "cxt =" <+> pretty cxt
, nest 2 $ "cps =" <+> vcat (map pretty (Map.toList cps))
, nest 2 $ "dfs =" <+> vcat (map pretty odfs) ]
dfs <- catMaybes <$> mapM (tryGetOpen liftLocalDisplayForm) odfs
scope <- getScope
Keep the display forms that match the application @q es@.
ms <- do
ms <- mapM (runMaybeT . (`matchDisplayForm` es)) dfs
return [ m | Just (d, m) <- ms, wellScoped scope d ]
unlessDebugPrinting $ reportSDoc "tc.display.top" 100 $ return $ vcat
[ "name :" <+> pretty q
, "displayForms:" <+> pretty dfs
, "arguments :" <+> pretty es
, "matches :" <+> pretty ms
, "result :" <+> pretty (listToMaybe ms)
]
Return the first display form that matches .
return $ listToMaybe ms
where
wellScoped scope (Display _ _ d)
| isWithDisplay d = True
inScope scope x = not $ null $ inverseScopeLookupName x scope
isWithDisplay DWithApp{} = True
isWithDisplay _ = False
| Match a ' DisplayForm ' @q ps = v@ against @q es@.
matchDisplayForm :: MonadDisplayForm m
=> DisplayForm -> Elims -> MaybeT m (DisplayForm, DisplayTerm)
matchDisplayForm d@(Display n ps v) es
| length ps > length es = mzero
| otherwise = do
let (es0, es1) = splitAt (length ps) es
mm <- match (Window 0 n) ps es0
us <- forM [0 .. n - 1] $ \ i -> do
# 5294 : Fail if we do n't have bindings for all variables . This can
are not used in the lhs .
Just u <- return $ Map.lookup i mm
return u
return (d, substWithOrigin (parallelS $ map woThing us) us v `applyE` es1)
type MatchResult = Map Int (WithOrigin Term)
unionMatch :: Monad m => MatchResult -> MatchResult -> MaybeT m MatchResult
unionMatch m1 m2
| null (Map.intersection m1 m2) = return $ Map.union m1 m2
unionsMatch :: Monad m => [MatchResult] -> MaybeT m MatchResult
unionsMatch = foldM unionMatch Map.empty
data Window = Window {dbLo, dbHi :: Nat}
inWindow :: Window -> Nat -> Maybe Nat
inWindow (Window lo hi) n | lo <= n, n < hi = Just (n - lo)
| otherwise = Nothing
shiftWindow :: Window -> Window
shiftWindow (Window lo hi) = Window (lo + 1) (hi + 1)
Free variables inside the window in @p@ are pattern variables and
class Match a where
match :: MonadDisplayForm m => Window -> a -> a -> MaybeT m MatchResult
instance Match a => Match [a] where
match n xs ys = unionsMatch =<< zipWithM (match n) xs ys
instance Match a => Match (Arg a) where
match n p v = Map.map (setOrigin (getOrigin v)) <$> match n (unArg p) (unArg v)
instance Match a => Match (Elim' a) where
match n p v =
case (p, v) of
(Proj _ f, Proj _ f') | f == f' -> return Map.empty
_ | Just a <- isApplyElim p
, Just a' <- isApplyElim v -> match n a a'
_ -> mzero
instance Match Term where
match w p v = lift (instantiate v) >>= \ v -> case (unSpine p, unSpine v) of
(Var i [], v) | Just j <- inWindow w i -> return $ Map.singleton j (WithOrigin Inserted v)
(Var i ps, Var j vs) | i == j -> match w ps vs
(Def c ps, Def d vs) | c == d -> match w ps vs
(Con c _ ps, Con d _ vs) | c == d -> match w ps vs
(Lit l, Lit l') | l == l' -> return Map.empty
(Lam h p, Lam h' v) | h == h' -> match (shiftWindow w) (unAbs p) (unAbs v)
(p, Level l) -> match w p =<< reallyUnLevelView l
(Sort ps, Sort pv) -> match w ps pv
(p, Sort (Type v)) -> match w p =<< reallyUnLevelView v
_ -> mzero
instance Match Sort where
match w p v = case (p, v) of
(Type pl, Type vl) -> match w pl vl
_ | p == v -> return Map.empty
_ -> mzero
instance Match Level where
match w p v = do
p <- reallyUnLevelView p
v <- reallyUnLevelView v
match w p v
and only on the top level of the lhs . Thus , we are happy to fall back
This fixes issue # 2590 .
class SubstWithOrigin a where
substWithOrigin :: Substitution -> [WithOrigin Term] -> a -> a
instance SubstWithOrigin a => SubstWithOrigin [a] where
substWithOrigin rho ots = map (substWithOrigin rho ots)
instance (SubstWithOrigin a, SubstWithOrigin (Arg a)) => SubstWithOrigin (Elim' a) where
substWithOrigin rho ots (Apply arg) = Apply $ substWithOrigin rho ots arg
substWithOrigin rho ots e@Proj{} = e
substWithOrigin rho ots (IApply u v w) = IApply
(substWithOrigin rho ots u)
(substWithOrigin rho ots v)
(substWithOrigin rho ots w)
instance SubstWithOrigin (Arg Term) where
substWithOrigin rho ots (Arg ai v) =
case v of
Var x [] -> case ots !!! x of
Just (WithOrigin o u) -> Arg (mapOrigin (replaceOrigin o) ai) u
Issue # 2717 , not _ _ IMPOSSIBLE _ _
Con c ci args -> Arg ai $ Con c ci $ substWithOrigin rho ots args
Def q es -> Arg ai $ Def q $ substWithOrigin rho ots es
_ -> Arg ai $ applySubst rho v
where
replaceOrigin _ UserWritten = UserWritten
replaceOrigin o _ = o
instance SubstWithOrigin Term where
substWithOrigin rho ots v =
case v of
Con c ci args -> Con c ci $ substWithOrigin rho ots args
Def q es -> Def q $ substWithOrigin rho ots es
_ -> applySubst rho v
Do not go into dot pattern , otherwise interaction test # 231 fails
instance SubstWithOrigin DisplayTerm where
substWithOrigin rho ots =
\case
DTerm' v es -> DTerm' (substWithOrigin rho ots v) $ substWithOrigin rho ots es
DDot' v es -> DDot' (substWithOrigin rho ots v) $ substWithOrigin rho ots es
DDef q es -> DDef q $ substWithOrigin rho ots es
DCon c ci args -> DCon c ci $ substWithOrigin rho ots args
DWithApp t ts es -> DWithApp
(substWithOrigin rho ots t)
(substWithOrigin rho ots ts)
(substWithOrigin rho ots es)
Do not go into dot pattern , otherwise interaction test # 231 fails
instance SubstWithOrigin (Arg DisplayTerm) where
substWithOrigin rho ots (Arg ai dt) =
case dt of
DTerm' v es -> substWithOrigin rho ots (Arg ai v) <&> (`DTerm'` substWithOrigin rho ots es)
DDot' v es -> Arg ai $ DDot' (applySubst rho v) $ substWithOrigin rho ots es
DDef q es -> Arg ai $ DDef q $ substWithOrigin rho ots es
DCon c ci args -> Arg ai $ DCon c ci $ substWithOrigin rho ots args
DWithApp t ts es -> Arg ai $ DWithApp
(substWithOrigin rho ots t)
(substWithOrigin rho ots ts)
(substWithOrigin rho ots es)
|
6971b27d1204dc79b274cfbfa17d5d2b57feaff463e083eb8c08887dad9ea53b | soupi/chip-8 | Bits.hs | Bits Utilities
module CPU.Bits where
import Data.Word
import Data.Bits
import qualified Data.ByteString as BS
import qualified Numeric as Nume (showHex)
import Data.Char (toUpper)
mergeList16 :: [Word8] -> [Word16]
mergeList16 [] = []
mergeList16 [x] = [merge16 x 0]
mergeList16 (x:y:rest) = merge16 x y : mergeList16 rest
-- |
combining 2 Word8 to Word16
merge16 :: Word8 -> Word8 -> Word16
merge16 high low = shift (fromIntegral high) 8 .|. fromIntegral low
-- |
combining 2 Word8 to Word8
merge8 :: Word8 -> Word8 -> Word8
merge8 high low = shift (fromIntegral (word8to4 high)) 4 .|. fromIntegral (word8to4 low)
word8to4 :: Word8 -> Word8
word8to4 = (.&.) 0xF
-- |
Formatting for debugging purposes
showHex16 :: Word16 -> String
showHex16 = ("0x"++) . (\f (w,x,y,z) -> f w ++ f x ++ f y ++ f z) (map toUpper . flip Nume.showHex "") . match16
-- |
-- Formatting Word8 for debugging purposes
showHex8 :: Word8 -> String
showHex8 n =
"0x" ++
(\f (x,y) -> f x ++ f y) (map toUpper . flip Nume.showHex "")
(rotateR (n .&. 0xF0) 4, n .&. 0x0F)
-- |
-- a helper function for bit pattern matching
16 bits becomes 32 bits because there is n't a 4 bit data type
match16 :: Word16 -> (Word8, Word8, Word8, Word8)
match16 n =
(fromIntegral $ rotateL (n .&. 0xF000) 4
,fromIntegral $ rotateR (n .&. 0x0F00) 8
,fromIntegral $ rotateR (n .&. 0x00F0) 4
,fromIntegral (n .&. 0x000F)
)
bcd8 :: Word8 -> (Word8, Word8, Word8)
bcd8 n =
case map (read . (:[])) (show n) of
[z] -> (0,0,z)
[y,z] -> (0,y,z)
[x,y,z] -> (x,y,z)
_ -> error $ "Impossible pattern match for: " ++ show n
showFile :: BS.ByteString -> String
showFile =
unlines . zipWith (++) (map ((++ ": ") . showHex16) (filter even [0x200..])) . map showHex16 . mergeList16 . BS.unpack
| null | https://raw.githubusercontent.com/soupi/chip-8/b2ded9808cd5a071f0b797286bb040bc73e19b27/src/CPU/Bits.hs | haskell | |
|
|
|
Formatting Word8 for debugging purposes
|
a helper function for bit pattern matching | Bits Utilities
module CPU.Bits where
import Data.Word
import Data.Bits
import qualified Data.ByteString as BS
import qualified Numeric as Nume (showHex)
import Data.Char (toUpper)
mergeList16 :: [Word8] -> [Word16]
mergeList16 [] = []
mergeList16 [x] = [merge16 x 0]
mergeList16 (x:y:rest) = merge16 x y : mergeList16 rest
combining 2 Word8 to Word16
merge16 :: Word8 -> Word8 -> Word16
merge16 high low = shift (fromIntegral high) 8 .|. fromIntegral low
combining 2 Word8 to Word8
merge8 :: Word8 -> Word8 -> Word8
merge8 high low = shift (fromIntegral (word8to4 high)) 4 .|. fromIntegral (word8to4 low)
word8to4 :: Word8 -> Word8
word8to4 = (.&.) 0xF
Formatting for debugging purposes
showHex16 :: Word16 -> String
showHex16 = ("0x"++) . (\f (w,x,y,z) -> f w ++ f x ++ f y ++ f z) (map toUpper . flip Nume.showHex "") . match16
showHex8 :: Word8 -> String
showHex8 n =
"0x" ++
(\f (x,y) -> f x ++ f y) (map toUpper . flip Nume.showHex "")
(rotateR (n .&. 0xF0) 4, n .&. 0x0F)
16 bits becomes 32 bits because there is n't a 4 bit data type
match16 :: Word16 -> (Word8, Word8, Word8, Word8)
match16 n =
(fromIntegral $ rotateL (n .&. 0xF000) 4
,fromIntegral $ rotateR (n .&. 0x0F00) 8
,fromIntegral $ rotateR (n .&. 0x00F0) 4
,fromIntegral (n .&. 0x000F)
)
bcd8 :: Word8 -> (Word8, Word8, Word8)
bcd8 n =
case map (read . (:[])) (show n) of
[z] -> (0,0,z)
[y,z] -> (0,y,z)
[x,y,z] -> (x,y,z)
_ -> error $ "Impossible pattern match for: " ++ show n
showFile :: BS.ByteString -> String
showFile =
unlines . zipWith (++) (map ((++ ": ") . showHex16) (filter even [0x200..])) . map showHex16 . mergeList16 . BS.unpack
|
7f121c8a14a9c240a0f8df2caa3dcf94ee7100ee986130b4a283e479ddcf1147 | utkarshkukreti/bs-preact | WindowSize.ml | module P = Preact
module WindowSize = struct
type t =
{ width : float
; height : float
}
external innerWidth : float = "" [@@bs.val] [@@bs.scope "window"]
external innerHeight : float = "" [@@bs.val] [@@bs.scope "window"]
external addEventListener : string -> (unit -> unit) -> unit = ""
[@@bs.val] [@@bs.scope "window"]
external removeEventListener : string -> (unit -> unit) -> unit = ""
[@@bs.val] [@@bs.scope "window"]
let use =
fun [@preact.hook] () ->
let[@hook] size, setSize = P.useState { width = innerWidth; height = innerHeight } in
let[@hook] () =
P.useEffect
(fun () ->
let handler () = setSize { width = innerWidth; height = innerHeight } in
let () = addEventListener "resize" handler in
Some (fun () -> removeEventListener "resize" handler))
None
in
size
end
module Demo = struct
let make =
fun [@preact.component "Demo"] () ->
let[@hook] windowSize = WindowSize.use () in
P.div [] [ P.float windowSize.width; P.string " "; P.float windowSize.height ]
end
let () =
match P.find "main" with
| Some element -> P.render (Demo.make ()) element
| None -> Js.Console.error "<main> not found!"
| null | https://raw.githubusercontent.com/utkarshkukreti/bs-preact/61d10d40543e1f8fc83b8a82f6353bcb52489c91/examples/WindowSize.ml | ocaml | module P = Preact
module WindowSize = struct
type t =
{ width : float
; height : float
}
external innerWidth : float = "" [@@bs.val] [@@bs.scope "window"]
external innerHeight : float = "" [@@bs.val] [@@bs.scope "window"]
external addEventListener : string -> (unit -> unit) -> unit = ""
[@@bs.val] [@@bs.scope "window"]
external removeEventListener : string -> (unit -> unit) -> unit = ""
[@@bs.val] [@@bs.scope "window"]
let use =
fun [@preact.hook] () ->
let[@hook] size, setSize = P.useState { width = innerWidth; height = innerHeight } in
let[@hook] () =
P.useEffect
(fun () ->
let handler () = setSize { width = innerWidth; height = innerHeight } in
let () = addEventListener "resize" handler in
Some (fun () -> removeEventListener "resize" handler))
None
in
size
end
module Demo = struct
let make =
fun [@preact.component "Demo"] () ->
let[@hook] windowSize = WindowSize.use () in
P.div [] [ P.float windowSize.width; P.string " "; P.float windowSize.height ]
end
let () =
match P.find "main" with
| Some element -> P.render (Demo.make ()) element
| None -> Js.Console.error "<main> not found!"
| |
7c8c039a7a33007fc7a9436c7c1e1488d65874174ac874f42ae699c1c5f98db4 | b1412/clojure-web-admin | kit.clj | (ns clojure-web.db.kit
(:require [clojure-web.db.entity :refer [resource role-resource]]
[inflections.core :refer [plural titleize]]
[korma.core :as k]))
(defn get-res-for-entity
"Generate basic resources of an entity like follows
+----+----------------+--------+-------------------+
| id | uri | method | desc |
+----+----------------+--------+-------------------+
| 1 | /tasks/.* | GET | query tasks |
| 2 | /tasks/ | POST | create a new task |
| 3 | /tasks/[0-9]* | DELETE | delete a task |
| 4 | /tasks/[0-9]* | PUT | update a task |
| 5 | /tasks/columns | GET | get task columns |
+----+----------------+--------+-------------------+
"
[entity]
(let [table (:table entity)
plural-table (plural table )]
[{:uri (str "/" (plural table) "[/]?")
:method "GET"
:desc (str "query " (plural table ))
:key plural-table
:type "menu"
:label (titleize table)
:parent-id 0
:entity table}
{:uri (str "/" plural-table "[/]?")
:method "POST"
:desc (str "create a new " table)
:key (str "new-" table)
:type "button"
:label "New"
:entity table}
{:uri (str "/" (plural table) "/[0-9]+")
:method "GET"
:desc (str "get a specific" table )
:key (str "get-" table)
:type "button"
:label "Edit"
:entity table}
{:uri (str "/" plural-table "/[0-9]+")
:method "DELETE"
:desc (str "delete a specific " table)
:key (str "delete-" table)
:type "button"
:label "Delete"
:entity table}
{:uri (str "/" plural-table "/[0-9]+")
:method "PUT"
:key (str "edit-" table)
:desc (str "update a specific " table)
:type "button"
:label "Update"
:entity table}
{:uri (str "/" plural-table "/meta")
:method "GET"
:key (str table "-meta")
:desc (str "get metadata of " table)
:type "button"
:label "Metadata"
:entity table}
{:uri (str "/" plural-table "/charts")
:method "GET"
:key (str table "-charts")
:desc (str "view " table " charts")
:label "Charts"
:type "button"
:entity table}
{:uri (str "/" plural-table "/excel")
:method "POST"
:key (str "import-" table "-excel")
:desc (str "import excel of " (plural table))
:type "button"
:label "Import"
:entity table}
{:uri (str "/" plural-table "/excel")
:method "GET"
:key (str "export-" table "-excel")
:desc (str "export excel of " (plural table))
:type "button"
:label "Export"
:entity table}
{:uri (str "/" plural-table "/excel/template")
:method "GET"
:key (str "export-" table "-excel-template")
:desc (str "get excel template of " (plural table))
:type "button"
:entity table}]))
(defn insert-ress
[ress]
(let [new-keys (->> ress
(map #(-> (k/insert* resource)
(k/values %)
(k/insert)))
(map :generated-key))
parent (first new-keys)
children (rest new-keys)]
(->> children
(map #(-> (k/update* resource)
(k/set-fields {:parent_id parent})
(k/where {:id %})
(k/update))))))
(defn insert-res-for-entity [entity]
(->> (get-res-for-entity entity)
(insert-ress)))
(defn get-res-to-role [entity role-id]
(let [res (k/select resource (k/where {:entity (:table entity)}))]
(->> res
(map (comp
(partial merge {:role_id role-id
:scope "system"})
#(hash-map :resource_id (:id %)))))))
(defn insert-res-to-role
[entity role-id]
(let [data (get-res-to-role entity role-id)]
(k/insert role-resource (k/values data))))
| null | https://raw.githubusercontent.com/b1412/clojure-web-admin/018161dcdb364cc168d6f5a56ceb798005a0701f/src/clj/clojure_web/db/kit.clj | clojure | (ns clojure-web.db.kit
(:require [clojure-web.db.entity :refer [resource role-resource]]
[inflections.core :refer [plural titleize]]
[korma.core :as k]))
(defn get-res-for-entity
"Generate basic resources of an entity like follows
+----+----------------+--------+-------------------+
| id | uri | method | desc |
+----+----------------+--------+-------------------+
| 1 | /tasks/.* | GET | query tasks |
| 2 | /tasks/ | POST | create a new task |
| 3 | /tasks/[0-9]* | DELETE | delete a task |
| 4 | /tasks/[0-9]* | PUT | update a task |
| 5 | /tasks/columns | GET | get task columns |
+----+----------------+--------+-------------------+
"
[entity]
(let [table (:table entity)
plural-table (plural table )]
[{:uri (str "/" (plural table) "[/]?")
:method "GET"
:desc (str "query " (plural table ))
:key plural-table
:type "menu"
:label (titleize table)
:parent-id 0
:entity table}
{:uri (str "/" plural-table "[/]?")
:method "POST"
:desc (str "create a new " table)
:key (str "new-" table)
:type "button"
:label "New"
:entity table}
{:uri (str "/" (plural table) "/[0-9]+")
:method "GET"
:desc (str "get a specific" table )
:key (str "get-" table)
:type "button"
:label "Edit"
:entity table}
{:uri (str "/" plural-table "/[0-9]+")
:method "DELETE"
:desc (str "delete a specific " table)
:key (str "delete-" table)
:type "button"
:label "Delete"
:entity table}
{:uri (str "/" plural-table "/[0-9]+")
:method "PUT"
:key (str "edit-" table)
:desc (str "update a specific " table)
:type "button"
:label "Update"
:entity table}
{:uri (str "/" plural-table "/meta")
:method "GET"
:key (str table "-meta")
:desc (str "get metadata of " table)
:type "button"
:label "Metadata"
:entity table}
{:uri (str "/" plural-table "/charts")
:method "GET"
:key (str table "-charts")
:desc (str "view " table " charts")
:label "Charts"
:type "button"
:entity table}
{:uri (str "/" plural-table "/excel")
:method "POST"
:key (str "import-" table "-excel")
:desc (str "import excel of " (plural table))
:type "button"
:label "Import"
:entity table}
{:uri (str "/" plural-table "/excel")
:method "GET"
:key (str "export-" table "-excel")
:desc (str "export excel of " (plural table))
:type "button"
:label "Export"
:entity table}
{:uri (str "/" plural-table "/excel/template")
:method "GET"
:key (str "export-" table "-excel-template")
:desc (str "get excel template of " (plural table))
:type "button"
:entity table}]))
(defn insert-ress
[ress]
(let [new-keys (->> ress
(map #(-> (k/insert* resource)
(k/values %)
(k/insert)))
(map :generated-key))
parent (first new-keys)
children (rest new-keys)]
(->> children
(map #(-> (k/update* resource)
(k/set-fields {:parent_id parent})
(k/where {:id %})
(k/update))))))
(defn insert-res-for-entity [entity]
(->> (get-res-for-entity entity)
(insert-ress)))
(defn get-res-to-role [entity role-id]
(let [res (k/select resource (k/where {:entity (:table entity)}))]
(->> res
(map (comp
(partial merge {:role_id role-id
:scope "system"})
#(hash-map :resource_id (:id %)))))))
(defn insert-res-to-role
[entity role-id]
(let [data (get-res-to-role entity role-id)]
(k/insert role-resource (k/values data))))
| |
520a53ba8ff6cce1306fc1efe8605cc64e7993a2f1000da1565d062ea7f96866 | bobzhang/fan | ctypN.mli |
(** Utilities for Fan's deriving mechanism *)
open Astfn
type vrn =
| Sum
| TyVrnEq
| TyVrnSup
| TyVrnInf
| TyVrnInfSup
| TyAbstr
type col = {
col_label:string;
col_mutable:bool;
col_ctyp:ctyp
}
type ty_info = {
name_exp: exp; (* [meta_int] *)
[ test _ a3 ]
ep0: ep; (* _a3*)
id_ep: ep; (* (_a3,_b3) *)
id_eps: ep list ; (* [_a3;_b3] *)
ty: ctyp; (* int *)
}
type vbranch =
[ `variant of (string* ctyp list )
| `abbrev of ident ]
type branch =
[ `branch of (string * ctyp list) ]
type destination =
|Obj of kind
|Str_item
and kind =
| Fold
| Iter (* Iter style *)
| Map (* Map style *)
| Concrete of ctyp
type warning_type =
| Abstract of string
| Qualified of string
(* Feed to user to compose an expession node *)
type record_col = {
re_label: string ;
re_mutable: bool ;
re_info: ty_info;
}
type record_info = record_col list
(* types below are used to tell fan how to produce
function of type [ident -> ident]
*)
type basic_id_transform =
[ `Pre of string
| `Post of string
| `Fun of (string->string) ]
type rhs_basic_id_transform =
[ basic_id_transform
| `Exp of string -> exp ]
type full_id_transform =
[ basic_id_transform
| `Idents of vid list -> vid
(* decompose to a list of ident and compose as an ident *)
| `Id of vid -> vid
(* just pass the ident to user do ident transform *)
| `Last of string -> vid
(* pass the string, and << .$old$. .$return$. >> *)
| `Obj of (string -> string) ]
val arrow_of_list : ctyp list -> ctyp
val app_arrow : ctyp list -> ctyp -> ctyp
val ( <+ ) : string list -> ctyp -> ctyp
val ( + > ) : ctyp list - > ctyp - > ctyp
* { [
match { : stru < type ' a list = [ A of int | B of ' a ] | } with
% stru { type $ x } - > name_length_of_tydcl x
( " list",1 ) ] }
match {:stru< type 'a list = [A of int | B of 'a] |} with
%stru{ type $x } -> name_length_of_tydcl x
("list",1) ]} *)
val name_length_of_tydcl : typedecl -> string * int
val gen_ty_of_tydcl : off:int -> typedecl -> ctyp
* { [ of_id_len ~off:2 ( < : ident < Loc.t > > , 3 ) | > eprint ;
( ' all_c0 , ' all_c1 , ' all_c2 ) Loc.t ] }
('all_c0, 'all_c1, 'all_c2) Loc.t]} *)
val of_id_len : off:int -> ident * int -> ctyp
*
{ [
( % stru- { type ' a list = [ A of int | B of ' a ] } | >
function | % stru- { type $ x } - > name_length_of_tydcl x
| > of_name_len ~off:1 ) ;
list ' all_b0
( < : stru < type list = [ A of int | B ] > > | >
fun [ < : stru < type .$x$. > > - > name_length_of_tydcl x
| > of_name_len ~off:1 | > eprint ] ) ;
] }
{[
( %stru-{ type 'a list = [A of int | B of 'a] } |>
function | %stru-{ type $x } -> name_length_of_tydcl x
|> of_name_len ~off:1 );
list 'all_b0
( <:stru< type list = [A of int | B] >> |>
fun [ <:stru<type .$x$. >> -> name_length_of_tydcl x
|> of_name_len ~off:1 |> eprint ] );
]}
*)
val of_name_len : off:int -> string * int -> ctyp
val list_of_record : name_ctyp -> col list
val gen_tuple_n : ctyp -> int -> ctyp
val repeat_arrow_n : ctyp -> int -> ctyp
*
[ result ] is a keyword
{ [
let ( name , len ) =
( % stru { type list ' a ' b = [ A of int | B of ' a ] }
| > function % stru{type $ x } - > name_length_of_tydcl x )
let f = mk_method_type ~number:2 ~prefix:["fmt " ]
( % ident { $ lid : name } , len ) ;
open Fan_sig
f ( Obj Map)| > eprint ;
! ' all_a0 ' all_a1 ' all_b0 ' all_b1 .
( ' self_type - > ' fmt - > ' all_a0 - > ' all_a0 - > ' all_b0 ) - >
( ' self_type - > ' fmt - > ' all_a1 - > ' all_a1 - > ' all_b1 ) - >
' fmt - >
list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > list ' all_b0 ' all_b1
f ( Obj Iter)| > eprint ;
! ' all_a0 ' all_a1 .
( ' self_type - > ' fmt - > ' all_a0 - > ' all_a0 - > ' result ) - >
( ' self_type - > ' fmt - > ' all_a1 - > ' all_a1 - > ' result ) - >
' fmt - > list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > ' result
f ( Obj Fold ) | > eprint ;
! ' all_a0 ' all_a1 .
( ' self_type - > ' fmt - > ' all_a0 - > ' all_a0 - > ' self_type ) - >
( ' self_type - > ' fmt - > ' all_a1 - > ' all_a1 - > ' self_type ) - >
' fmt - > list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > ' self_type
f Str_item | > eprint ;
! ' all_a0 ' all_a1 .
( ' fmt - > ' all_a0 - > ' all_a0 - > ' result ) - >
( ' fmt - > ' all_a1 - > ' all_a1 - > ' result ) - >
' fmt - > list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > ' result ] }
[result] is a keyword
{[
let (name,len) =
(%stru{ type list 'a 'b = [A of int | B of 'a] }
|> function %stru{type $x } -> name_length_of_tydcl x)
let f = mk_method_type ~number:2 ~prefix:["fmt"]
(%ident{ $lid:name },len);
open Fan_sig
f (Obj Map)|> eprint;
! 'all_a0 'all_a1 'all_b0 'all_b1.
('self_type -> 'fmt -> 'all_a0 -> 'all_a0 -> 'all_b0) ->
('self_type -> 'fmt -> 'all_a1 -> 'all_a1 -> 'all_b1) ->
'fmt ->
list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> list 'all_b0 'all_b1
f (Obj Iter)|> eprint;
! 'all_a0 'all_a1.
('self_type -> 'fmt -> 'all_a0 -> 'all_a0 -> 'result) ->
('self_type -> 'fmt -> 'all_a1 -> 'all_a1 -> 'result) ->
'fmt -> list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> 'result
f (Obj Fold) |> eprint;
! 'all_a0 'all_a1.
('self_type -> 'fmt -> 'all_a0 -> 'all_a0 -> 'self_type) ->
('self_type -> 'fmt -> 'all_a1 -> 'all_a1 -> 'self_type) ->
'fmt -> list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> 'self_type
f Str_item |> eprint;
! 'all_a0 'all_a1.
('fmt -> 'all_a0 -> 'all_a0 -> 'result) ->
('fmt -> 'all_a1 -> 'all_a1 -> 'result) ->
'fmt -> list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> 'result]} *)
val mk_method_type :
number:int ->
prefix:string list -> ident * int -> destination -> (ctyp*ctyp)
val mk_method_type_of_name :
number:int ->
prefix:string list -> string * int -> destination -> (ctyp*ctyp)
val mk_dest_type : destination : destination - > ident * int - >
val mk_obj : string -> string -> clfield -> stru
val is_recursive : typedecl -> bool
val is_abstract : typedecl -> bool
val abstract_list : typedecl -> int option
val qualified_app_list : ctyp -> (ident * ctyp list) option
val reduce_data_ctors:
or_ctyp ->
'a -> compose:('e -> 'a -> 'a) -> (string -> ctyp list -> 'e) -> 'a
(* @raise Invalid_argument *)
(* val of_stru: stru -> typedecl *)
val view_sum: or_ctyp -> branch list
val view_variant: row_field -> vbranch list
(* val ty_name_of_tydcl : typedecl -> ctyp *)
(* val gen_quantifiers : arity:int -> int -> ctyp *)
val transform : full_id_transform -> vid -> exp
val basic_transform :
[< `Fun of string -> string | `Post of string | `Pre of string ] ->
string -> string
val right_transform :
[< `Exp of string -> exp
| `Fun of string -> string
| `Post of string
| `Pre of string ] ->
string -> exp
val gen_tuple_abbrev : arity:int ->
annot:ctyp ->
destination:destination -> ident -> exp -> case
val pp_print_warning_type: Format.formatter -> warning_type -> unit
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/cold/ctypN.mli | ocaml | * Utilities for Fan's deriving mechanism
[meta_int]
_a3
(_a3,_b3)
[_a3;_b3]
int
Iter style
Map style
Feed to user to compose an expession node
types below are used to tell fan how to produce
function of type [ident -> ident]
decompose to a list of ident and compose as an ident
just pass the ident to user do ident transform
pass the string, and << .$old$. .$return$. >>
@raise Invalid_argument
val of_stru: stru -> typedecl
val ty_name_of_tydcl : typedecl -> ctyp
val gen_quantifiers : arity:int -> int -> ctyp |
open Astfn
type vrn =
| Sum
| TyVrnEq
| TyVrnSup
| TyVrnInf
| TyVrnInfSup
| TyAbstr
type col = {
col_label:string;
col_mutable:bool;
col_ctyp:ctyp
}
type ty_info = {
[ test _ a3 ]
}
type vbranch =
[ `variant of (string* ctyp list )
| `abbrev of ident ]
type branch =
[ `branch of (string * ctyp list) ]
type destination =
|Obj of kind
|Str_item
and kind =
| Fold
| Concrete of ctyp
type warning_type =
| Abstract of string
| Qualified of string
type record_col = {
re_label: string ;
re_mutable: bool ;
re_info: ty_info;
}
type record_info = record_col list
type basic_id_transform =
[ `Pre of string
| `Post of string
| `Fun of (string->string) ]
type rhs_basic_id_transform =
[ basic_id_transform
| `Exp of string -> exp ]
type full_id_transform =
[ basic_id_transform
| `Idents of vid list -> vid
| `Id of vid -> vid
| `Last of string -> vid
| `Obj of (string -> string) ]
val arrow_of_list : ctyp list -> ctyp
val app_arrow : ctyp list -> ctyp -> ctyp
val ( <+ ) : string list -> ctyp -> ctyp
val ( + > ) : ctyp list - > ctyp - > ctyp
* { [
match { : stru < type ' a list = [ A of int | B of ' a ] | } with
% stru { type $ x } - > name_length_of_tydcl x
( " list",1 ) ] }
match {:stru< type 'a list = [A of int | B of 'a] |} with
%stru{ type $x } -> name_length_of_tydcl x
("list",1) ]} *)
val name_length_of_tydcl : typedecl -> string * int
val gen_ty_of_tydcl : off:int -> typedecl -> ctyp
* { [ of_id_len ~off:2 ( < : ident < Loc.t > > , 3 ) | > eprint ;
( ' all_c0 , ' all_c1 , ' all_c2 ) Loc.t ] }
('all_c0, 'all_c1, 'all_c2) Loc.t]} *)
val of_id_len : off:int -> ident * int -> ctyp
*
{ [
( % stru- { type ' a list = [ A of int | B of ' a ] } | >
function | % stru- { type $ x } - > name_length_of_tydcl x
| > of_name_len ~off:1 ) ;
list ' all_b0
( < : stru < type list = [ A of int | B ] > > | >
fun [ < : stru < type .$x$. > > - > name_length_of_tydcl x
| > of_name_len ~off:1 | > eprint ] ) ;
] }
{[
( %stru-{ type 'a list = [A of int | B of 'a] } |>
function | %stru-{ type $x } -> name_length_of_tydcl x
|> of_name_len ~off:1 );
list 'all_b0
( <:stru< type list = [A of int | B] >> |>
fun [ <:stru<type .$x$. >> -> name_length_of_tydcl x
|> of_name_len ~off:1 |> eprint ] );
]}
*)
val of_name_len : off:int -> string * int -> ctyp
val list_of_record : name_ctyp -> col list
val gen_tuple_n : ctyp -> int -> ctyp
val repeat_arrow_n : ctyp -> int -> ctyp
*
[ result ] is a keyword
{ [
let ( name , len ) =
( % stru { type list ' a ' b = [ A of int | B of ' a ] }
| > function % stru{type $ x } - > name_length_of_tydcl x )
let f = mk_method_type ~number:2 ~prefix:["fmt " ]
( % ident { $ lid : name } , len ) ;
open Fan_sig
f ( Obj Map)| > eprint ;
! ' all_a0 ' all_a1 ' all_b0 ' all_b1 .
( ' self_type - > ' fmt - > ' all_a0 - > ' all_a0 - > ' all_b0 ) - >
( ' self_type - > ' fmt - > ' all_a1 - > ' all_a1 - > ' all_b1 ) - >
' fmt - >
list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > list ' all_b0 ' all_b1
f ( Obj Iter)| > eprint ;
! ' all_a0 ' all_a1 .
( ' self_type - > ' fmt - > ' all_a0 - > ' all_a0 - > ' result ) - >
( ' self_type - > ' fmt - > ' all_a1 - > ' all_a1 - > ' result ) - >
' fmt - > list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > ' result
f ( Obj Fold ) | > eprint ;
! ' all_a0 ' all_a1 .
( ' self_type - > ' fmt - > ' all_a0 - > ' all_a0 - > ' self_type ) - >
( ' self_type - > ' fmt - > ' all_a1 - > ' all_a1 - > ' self_type ) - >
' fmt - > list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > ' self_type
f Str_item | > eprint ;
! ' all_a0 ' all_a1 .
( ' fmt - > ' all_a0 - > ' all_a0 - > ' result ) - >
( ' fmt - > ' all_a1 - > ' all_a1 - > ' result ) - >
' fmt - > list ' all_a0 ' all_a1 - > list ' all_a0 ' all_a1 - > ' result ] }
[result] is a keyword
{[
let (name,len) =
(%stru{ type list 'a 'b = [A of int | B of 'a] }
|> function %stru{type $x } -> name_length_of_tydcl x)
let f = mk_method_type ~number:2 ~prefix:["fmt"]
(%ident{ $lid:name },len);
open Fan_sig
f (Obj Map)|> eprint;
! 'all_a0 'all_a1 'all_b0 'all_b1.
('self_type -> 'fmt -> 'all_a0 -> 'all_a0 -> 'all_b0) ->
('self_type -> 'fmt -> 'all_a1 -> 'all_a1 -> 'all_b1) ->
'fmt ->
list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> list 'all_b0 'all_b1
f (Obj Iter)|> eprint;
! 'all_a0 'all_a1.
('self_type -> 'fmt -> 'all_a0 -> 'all_a0 -> 'result) ->
('self_type -> 'fmt -> 'all_a1 -> 'all_a1 -> 'result) ->
'fmt -> list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> 'result
f (Obj Fold) |> eprint;
! 'all_a0 'all_a1.
('self_type -> 'fmt -> 'all_a0 -> 'all_a0 -> 'self_type) ->
('self_type -> 'fmt -> 'all_a1 -> 'all_a1 -> 'self_type) ->
'fmt -> list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> 'self_type
f Str_item |> eprint;
! 'all_a0 'all_a1.
('fmt -> 'all_a0 -> 'all_a0 -> 'result) ->
('fmt -> 'all_a1 -> 'all_a1 -> 'result) ->
'fmt -> list 'all_a0 'all_a1 -> list 'all_a0 'all_a1 -> 'result]} *)
val mk_method_type :
number:int ->
prefix:string list -> ident * int -> destination -> (ctyp*ctyp)
val mk_method_type_of_name :
number:int ->
prefix:string list -> string * int -> destination -> (ctyp*ctyp)
val mk_dest_type : destination : destination - > ident * int - >
val mk_obj : string -> string -> clfield -> stru
val is_recursive : typedecl -> bool
val is_abstract : typedecl -> bool
val abstract_list : typedecl -> int option
val qualified_app_list : ctyp -> (ident * ctyp list) option
val reduce_data_ctors:
or_ctyp ->
'a -> compose:('e -> 'a -> 'a) -> (string -> ctyp list -> 'e) -> 'a
val view_sum: or_ctyp -> branch list
val view_variant: row_field -> vbranch list
val transform : full_id_transform -> vid -> exp
val basic_transform :
[< `Fun of string -> string | `Post of string | `Pre of string ] ->
string -> string
val right_transform :
[< `Exp of string -> exp
| `Fun of string -> string
| `Post of string
| `Pre of string ] ->
string -> exp
val gen_tuple_abbrev : arity:int ->
annot:ctyp ->
destination:destination -> ident -> exp -> case
val pp_print_warning_type: Format.formatter -> warning_type -> unit
|
de013287842c33bab0e47f878a59f655a46c0a5bdca89bc431bb59bdd9556aa5 | brendanhay/amazonka | SamlStatusEnum.hs | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE PatternSynonyms #
{-# LANGUAGE StrictData #-}
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
-- |
-- Module : Amazonka.WorkSpaces.Types.SamlStatusEnum
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
-- Stability : auto-generated
Portability : non - portable ( GHC extensions )
module Amazonka.WorkSpaces.Types.SamlStatusEnum
( SamlStatusEnum
( ..,
SamlStatusEnum_DISABLED,
SamlStatusEnum_ENABLED,
SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype SamlStatusEnum = SamlStatusEnum'
{ fromSamlStatusEnum ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern SamlStatusEnum_DISABLED :: SamlStatusEnum
pattern SamlStatusEnum_DISABLED = SamlStatusEnum' "DISABLED"
pattern SamlStatusEnum_ENABLED :: SamlStatusEnum
pattern SamlStatusEnum_ENABLED = SamlStatusEnum' "ENABLED"
pattern SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK :: SamlStatusEnum
pattern SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK = SamlStatusEnum' "ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK"
# COMPLETE
SamlStatusEnum_DISABLED ,
SamlStatusEnum_ENABLED ,
SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK ,
SamlStatusEnum '
#
SamlStatusEnum_DISABLED,
SamlStatusEnum_ENABLED,
SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK,
SamlStatusEnum'
#-}
| null | https://raw.githubusercontent.com/brendanhay/amazonka/09f52b75d2cfdff221b439280d3279d22690d6a6/lib/services/amazonka-workspaces/gen/Amazonka/WorkSpaces/Types/SamlStatusEnum.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE StrictData #
|
Module : Amazonka.WorkSpaces.Types.SamlStatusEnum
Stability : auto-generated | # LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE PatternSynonyms #
# LANGUAGE NoImplicitPrelude #
# OPTIONS_GHC -fno - warn - unused - imports #
Derived from AWS service descriptions , licensed under Apache 2.0 .
Copyright : ( c ) 2013 - 2023
License : Mozilla Public License , v. 2.0 .
Maintainer : < brendan.g.hay+ >
Portability : non - portable ( GHC extensions )
module Amazonka.WorkSpaces.Types.SamlStatusEnum
( SamlStatusEnum
( ..,
SamlStatusEnum_DISABLED,
SamlStatusEnum_ENABLED,
SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK
),
)
where
import qualified Amazonka.Core as Core
import qualified Amazonka.Data as Data
import qualified Amazonka.Prelude as Prelude
newtype SamlStatusEnum = SamlStatusEnum'
{ fromSamlStatusEnum ::
Data.Text
}
deriving stock
( Prelude.Show,
Prelude.Read,
Prelude.Eq,
Prelude.Ord,
Prelude.Generic
)
deriving newtype
( Prelude.Hashable,
Prelude.NFData,
Data.FromText,
Data.ToText,
Data.ToByteString,
Data.ToLog,
Data.ToHeader,
Data.ToQuery,
Data.FromJSON,
Data.FromJSONKey,
Data.ToJSON,
Data.ToJSONKey,
Data.FromXML,
Data.ToXML
)
pattern SamlStatusEnum_DISABLED :: SamlStatusEnum
pattern SamlStatusEnum_DISABLED = SamlStatusEnum' "DISABLED"
pattern SamlStatusEnum_ENABLED :: SamlStatusEnum
pattern SamlStatusEnum_ENABLED = SamlStatusEnum' "ENABLED"
pattern SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK :: SamlStatusEnum
pattern SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK = SamlStatusEnum' "ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK"
# COMPLETE
SamlStatusEnum_DISABLED ,
SamlStatusEnum_ENABLED ,
SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK ,
SamlStatusEnum '
#
SamlStatusEnum_DISABLED,
SamlStatusEnum_ENABLED,
SamlStatusEnum_ENABLED_WITH_DIRECTORY_LOGIN_FALLBACK,
SamlStatusEnum'
#-}
|
c86adc25f3536f2a72adb9616cce4c504d9cc848d1b2fe11dd96a702e05784ac | clojure-interop/java-jdk | HTMLEditorKit$HTMLFactory.clj | (ns javax.swing.text.html.HTMLEditorKit$HTMLFactory
"A factory to build views for HTML. The following
table describes what this factory will build by
default.
TagView created
HTML.Tag.CONTENTInlineView
HTML.Tag.IMPLIEDjavax.swing.text.html.ParagraphView
HTML.Tag.Pjavax.swing.text.html.ParagraphView
HTML.Tag.H1javax.swing.text.html.ParagraphView
HTML.Tag.H2javax.swing.text.html.ParagraphView
HTML.Tag.H3javax.swing.text.html.ParagraphView
HTML.Tag.H4javax.swing.text.html.ParagraphView
HTML.Tag.H5javax.swing.text.html.ParagraphView
HTML.Tag.H6javax.swing.text.html.ParagraphView
HTML.Tag.DTjavax.swing.text.html.ParagraphView
HTML.Tag.MENUListView
HTML.Tag.DIRListView
HTML.Tag.ULListView
HTML.Tag.OLListView
HTML.Tag.LIBlockView
HTML.Tag.DLBlockView
HTML.Tag.DDBlockView
HTML.Tag.BODYBlockView
HTML.Tag.HTMLBlockView
HTML.Tag.CENTERBlockView
HTML.Tag.DIVBlockView
HTML.Tag.BLOCKQUOTEBlockView
HTML.Tag.PREBlockView
HTML.Tag.BLOCKQUOTEBlockView
HTML.Tag.PREBlockView
HTML.Tag.IMGImageView
HTML.Tag.HRHRuleView
HTML.Tag.BRBRView
HTML.Tag.TABLEjavax.swing.text.html.TableView
HTML.Tag.INPUTFormView
HTML.Tag.SELECTFormView
HTML.Tag.TEXTAREAFormView
HTML.Tag.OBJECTObjectView
HTML.Tag.FRAMESETFrameSetView
HTML.Tag.FRAMEFrameView"
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text.html HTMLEditorKit$HTMLFactory]))
(defn ->html-factory
"Constructor."
(^HTMLEditorKit$HTMLFactory []
(new HTMLEditorKit$HTMLFactory )))
(defn create
"Creates a view from an element.
elem - the element - `javax.swing.text.Element`
returns: the view - `javax.swing.text.View`"
(^javax.swing.text.View [^HTMLEditorKit$HTMLFactory this ^javax.swing.text.Element elem]
(-> this (.create elem))))
| null | https://raw.githubusercontent.com/clojure-interop/java-jdk/8d7a223e0f9a0965eb0332fad595cf7649d9d96e/javax.swing/src/javax/swing/text/html/HTMLEditorKit%24HTMLFactory.clj | clojure | (ns javax.swing.text.html.HTMLEditorKit$HTMLFactory
"A factory to build views for HTML. The following
table describes what this factory will build by
default.
TagView created
HTML.Tag.CONTENTInlineView
HTML.Tag.IMPLIEDjavax.swing.text.html.ParagraphView
HTML.Tag.Pjavax.swing.text.html.ParagraphView
HTML.Tag.H1javax.swing.text.html.ParagraphView
HTML.Tag.H2javax.swing.text.html.ParagraphView
HTML.Tag.H3javax.swing.text.html.ParagraphView
HTML.Tag.H4javax.swing.text.html.ParagraphView
HTML.Tag.H5javax.swing.text.html.ParagraphView
HTML.Tag.H6javax.swing.text.html.ParagraphView
HTML.Tag.DTjavax.swing.text.html.ParagraphView
HTML.Tag.MENUListView
HTML.Tag.DIRListView
HTML.Tag.ULListView
HTML.Tag.OLListView
HTML.Tag.LIBlockView
HTML.Tag.DLBlockView
HTML.Tag.DDBlockView
HTML.Tag.BODYBlockView
HTML.Tag.HTMLBlockView
HTML.Tag.CENTERBlockView
HTML.Tag.DIVBlockView
HTML.Tag.BLOCKQUOTEBlockView
HTML.Tag.PREBlockView
HTML.Tag.BLOCKQUOTEBlockView
HTML.Tag.PREBlockView
HTML.Tag.IMGImageView
HTML.Tag.HRHRuleView
HTML.Tag.BRBRView
HTML.Tag.TABLEjavax.swing.text.html.TableView
HTML.Tag.INPUTFormView
HTML.Tag.SELECTFormView
HTML.Tag.TEXTAREAFormView
HTML.Tag.OBJECTObjectView
HTML.Tag.FRAMESETFrameSetView
HTML.Tag.FRAMEFrameView"
(:refer-clojure :only [require comment defn ->])
(:import [javax.swing.text.html HTMLEditorKit$HTMLFactory]))
(defn ->html-factory
"Constructor."
(^HTMLEditorKit$HTMLFactory []
(new HTMLEditorKit$HTMLFactory )))
(defn create
"Creates a view from an element.
elem - the element - `javax.swing.text.Element`
returns: the view - `javax.swing.text.View`"
(^javax.swing.text.View [^HTMLEditorKit$HTMLFactory this ^javax.swing.text.Element elem]
(-> this (.create elem))))
| |
f6d1dfc79a00c2ad813dbc45ff7c240e68c683ad1ca67fe7f6791cecd660bc36 | bvaugon/ocapic | list.mli | (**************************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
(* en Automatique. *)
(* *)
(* All rights reserved. This file is distributed under the terms of *)
the GNU Lesser General Public License version 2.1 , with the
(* special exception on linking described in the file LICENSE. *)
(* *)
(**************************************************************************)
* List operations .
Some functions are flagged as not tail - recursive . A tail - recursive
function uses constant stack space , while a non - tail - recursive function
uses stack space proportional to the length of its list argument , which
can be a problem with very long lists . When the function takes several
list arguments , an approximate formula giving stack usage ( in some
unspecified constant unit ) is shown in parentheses .
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements .
Some functions are flagged as not tail-recursive. A tail-recursive
function uses constant stack space, while a non-tail-recursive function
uses stack space proportional to the length of its list argument, which
can be a problem with very long lists. When the function takes several
list arguments, an approximate formula giving stack usage (in some
unspecified constant unit) is shown in parentheses.
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements.
*)
val length : 'a list -> int
(** Return the length (number of elements) of the given list. *)
val cons : 'a -> 'a list -> 'a list
* [ cons x xs ] is [ x : : xs ]
@since 4.03.0
@since 4.03.0
*)
val hd : 'a list -> 'a
* Return the first element of the given list . Raise
[ Failure " hd " ] if the list is empty .
[Failure "hd"] if the list is empty. *)
val tl : 'a list -> 'a list
* Return the given list without its first element . Raise
[ Failure " tl " ] if the list is empty .
[Failure "tl"] if the list is empty. *)
val nth : 'a list -> int -> 'a
* Return the [ n]-th element of the given list .
The first element ( head of the list ) is at position 0 .
Raise [ Failure " nth " ] if the list is too short .
Raise [ Invalid_argument " List.nth " ] if [ n ] is negative .
The first element (head of the list) is at position 0.
Raise [Failure "nth"] if the list is too short.
Raise [Invalid_argument "List.nth"] if [n] is negative. *)
val rev : 'a list -> 'a list
(** List reversal. *)
val append : 'a list -> 'a list -> 'a list
* two lists . Same as the infix operator [ @ ] .
Not tail - recursive ( length of the first argument ) .
Not tail-recursive (length of the first argument). *)
val rev_append : 'a list -> 'a list -> 'a list
* [ l1 l2 ] reverses [ l1 ] and concatenates it to [ l2 ] .
This is equivalent to { ! List.rev } [ l1 @ l2 ] , but [ rev_append ] is
tail - recursive and more efficient .
This is equivalent to {!List.rev}[ l1 @ l2], but [rev_append] is
tail-recursive and more efficient. *)
val concat : 'a list list -> 'a list
* a list of lists . The elements of the argument are all
concatenated together ( in the same order ) to give the result .
Not tail - recursive
( length of the argument + length of the longest sub - list ) .
concatenated together (in the same order) to give the result.
Not tail-recursive
(length of the argument + length of the longest sub-list). *)
val flatten : 'a list list -> 'a list
(** Same as [concat]. Not tail-recursive
(length of the argument + length of the longest sub-list). *)
* { 6 Iterators }
val iter : ('a -> unit) -> 'a list -> unit
* [ List.iter f [ a1 ; ... ; an ] ] applies function [ f ] in turn to
[ a1 ; ... ; an ] . It is equivalent to
[ begin f a1 ; f a2 ; ... ; f an ; ( ) end ] .
[a1; ...; an]. It is equivalent to
[begin f a1; f a2; ...; f an; () end]. *)
val iteri : (int -> 'a -> unit) -> 'a list -> unit
* Same as { ! List.iter } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument.
@since 4.00.0
*)
val map : ('a -> 'b) -> 'a list -> 'b list
* [ List.map f [ a1 ; ... ; an ] ] applies function [ f ] to [ a1 , ... , an ] ,
and builds the list [ [ f a1 ; ... ; f an ] ]
with the results returned by [ f ] . Not tail - recursive .
and builds the list [[f a1; ...; f an]]
with the results returned by [f]. Not tail-recursive. *)
val mapi : (int -> 'a -> 'b) -> 'a list -> 'b list
* Same as { ! } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument . Not tail - recursive .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument. Not tail-recursive.
@since 4.00.0
*)
val rev_map : ('a -> 'b) -> 'a list -> 'b list
* [ List.rev_map f l ] gives the same result as
{ ! List.rev } [ ( ] { ! } [ f l ) ] , but is tail - recursive and
more efficient .
{!List.rev}[ (]{!List.map}[ f l)], but is tail-recursive and
more efficient. *)
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a
(** [List.fold_left f a [b1; ...; bn]] is
[f (... (f (f a b1) b2) ...) bn]. *)
val fold_right : ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b
* [ f [ a1 ; ... ; an ] b ] is
[ f a1 ( f a2 ( ... ( f an b ) ... ) ) ] . Not tail - recursive .
[f a1 (f a2 (... (f an b) ...))]. Not tail-recursive. *)
* { 6 Iterators on two lists }
val iter2 : ('a -> 'b -> unit) -> 'a list -> 'b list -> unit
* [ List.iter2 f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] calls in turn
[ f a1 b1 ; ... ; f an bn ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
[f a1 b1; ...; f an bn].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
* [ List.map2 f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ f a1 b1 ; ... ; f an bn ] ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths . Not tail - recursive .
[[f a1 b1; ...; f an bn]].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. Not tail-recursive. *)
val rev_map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
(** [List.rev_map2 f l1 l2] gives the same result as
{!List.rev}[ (]{!List.map2}[ f l1 l2)], but is tail-recursive and
more efficient. *)
val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b list -> 'c list -> 'a
* [ List.fold_left2 f a [ b1 ; ... ; bn ] [ c1 ; ... ; cn ] ] is
[ f ( ... ( f ( f a b1 c1 ) b2 c2 ) ... ) bn cn ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
[f (... (f (f a b1 c1) b2 c2) ...) bn cn].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val fold_right2 : ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c
* [ List.fold_right2 f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] c ] is
[ f a1 b1 ( f a2 b2 ( ... ( f an bn c ) ... ) ) ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths . Not tail - recursive .
[f a1 b1 (f a2 b2 (... (f an bn c) ...))].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. Not tail-recursive. *)
* { 6 List scanning }
val for_all : ('a -> bool) -> 'a list -> bool
(** [for_all p [a1; ...; an]] checks if all elements of the list
satisfy the predicate [p]. That is, it returns
[(p a1) && (p a2) && ... && (p an)]. *)
val exists : ('a -> bool) -> 'a list -> bool
* [ exists p [ a1 ; ... ; an ] ] checks if at least one element of
the list satisfies the predicate [ p ] . That is , it returns
[ ( p a1 ) || ( p a2 ) || ... || ( p an ) ] .
the list satisfies the predicate [p]. That is, it returns
[(p a1) || (p a2) || ... || (p an)]. *)
val for_all2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! , but for a two - argument predicate .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val exists2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! List.exists } , but for a two - argument predicate .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val mem : 'a -> 'a list -> bool
(** [mem a l] is true if and only if [a] is equal
to an element of [l]. *)
val memq : 'a -> 'a list -> bool
(** Same as {!List.mem}, but uses physical equality instead of structural
equality to compare list elements. *)
* { 6 List searching }
val find : ('a -> bool) -> 'a list -> 'a
* [ find p l ] returns the first element of the list [ l ]
that satisfies the predicate [ p ] .
Raise [ Not_found ] if there is no value that satisfies [ p ] in the
list [ l ] .
that satisfies the predicate [p].
Raise [Not_found] if there is no value that satisfies [p] in the
list [l]. *)
val filter : ('a -> bool) -> 'a list -> 'a list
(** [filter p l] returns all the elements of the list [l]
that satisfy the predicate [p]. The order of the elements
in the input list is preserved. *)
val find_all : ('a -> bool) -> 'a list -> 'a list
* [ find_all ] is another name for { ! } .
val partition : ('a -> bool) -> 'a list -> 'a list * 'a list
(** [partition p l] returns a pair of lists [(l1, l2)], where
[l1] is the list of all the elements of [l] that
satisfy the predicate [p], and [l2] is the list of all the
elements of [l] that do not satisfy [p].
The order of the elements in the input list is preserved. *)
* { 6 Association lists }
val assoc : 'a -> ('a * 'b) list -> 'b
(** [assoc a l] returns the value associated with key [a] in the list of
pairs [l]. That is,
[assoc a [ ...; (a,b); ...] = b]
if [(a,b)] is the leftmost binding of [a] in list [l].
Raise [Not_found] if there is no value associated with [a] in the
list [l]. *)
val assq : 'a -> ('a * 'b) list -> 'b
(** Same as {!List.assoc}, but uses physical equality instead of structural
equality to compare keys. *)
val mem_assoc : 'a -> ('a * 'b) list -> bool
(** Same as {!List.assoc}, but simply return true if a binding exists,
and false if no bindings exist for the given key. *)
val mem_assq : 'a -> ('a * 'b) list -> bool
* Same as { ! , but uses physical equality instead of
structural equality to compare keys .
structural equality to compare keys. *)
val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list
* [ remove_assoc a l ] returns the list of
pairs [ l ] without the first pair with key [ a ] , if any .
Not tail - recursive .
pairs [l] without the first pair with key [a], if any.
Not tail-recursive. *)
val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list
(** Same as {!List.remove_assoc}, but uses physical equality instead
of structural equality to compare keys. Not tail-recursive. *)
* { 6 Lists of pairs }
val split : ('a * 'b) list -> 'a list * 'b list
* Transform a list of pairs into a pair of lists :
[ split [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] is [ ( [ a1 ; ... ; an ] , [ b1 ; ... ; bn ] ) ] .
Not tail - recursive .
[split [(a1,b1); ...; (an,bn)]] is [([a1; ...; an], [b1; ...; bn])].
Not tail-recursive.
*)
val combine : 'a list -> 'b list -> ('a * 'b) list
* Transform a pair of lists into a list of pairs :
[ combine [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] .
Raise [ Invalid_argument ] if the two lists
have different lengths . Not tail - recursive .
[combine [a1; ...; an] [b1; ...; bn]] is
[[(a1,b1); ...; (an,bn)]].
Raise [Invalid_argument] if the two lists
have different lengths. Not tail-recursive. *)
* { 6 Sorting }
val sort : ('a -> 'a -> int) -> 'a list -> 'a list
* Sort a list in increasing order according to a comparison
function . The comparison function must return 0 if its arguments
compare as equal , a positive integer if the first is greater ,
and a negative integer if the first is smaller ( see Array.sort for
a complete specification ) . For example ,
{ ! Pervasives.compare } is a suitable comparison function .
The resulting list is sorted in increasing order .
[ List.sort ] is guaranteed to run in constant heap space
( in addition to the size of the result list ) and logarithmic
stack space .
The current implementation uses Merge Sort . It runs in constant
heap space and logarithmic stack space .
function. The comparison function must return 0 if its arguments
compare as equal, a positive integer if the first is greater,
and a negative integer if the first is smaller (see Array.sort for
a complete specification). For example,
{!Pervasives.compare} is a suitable comparison function.
The resulting list is sorted in increasing order.
[List.sort] is guaranteed to run in constant heap space
(in addition to the size of the result list) and logarithmic
stack space.
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
*)
val stable_sort : ('a -> 'a -> int) -> 'a list -> 'a list
(** Same as {!List.sort}, but the sorting algorithm is guaranteed to
be stable (i.e. elements that compare equal are kept in their
original order) .
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
*)
val fast_sort : ('a -> 'a -> int) -> 'a list -> 'a list
(** Same as {!List.sort} or {!List.stable_sort}, whichever is faster
on typical input. *)
val sort_uniq : ('a -> 'a -> int) -> 'a list -> 'a list
* Same as { ! List.sort } , but also remove duplicates .
@since 4.02.0
@since 4.02.0 *)
val merge : ('a -> 'a -> int) -> 'a list -> 'a list -> 'a list
* Merge two lists :
Assuming that [ l1 ] and [ l2 ] are sorted according to the
comparison function [ cmp ] , [ merge l1 l2 ] will return a
sorted list containting all the elements of [ l1 ] and [ l2 ] .
If several elements compare equal , the elements of [ l1 ] will be
before the elements of [ l2 ] .
Not tail - recursive ( sum of the lengths of the arguments ) .
Assuming that [l1] and [l2] are sorted according to the
comparison function [cmp], [merge cmp l1 l2] will return a
sorted list containting all the elements of [l1] and [l2].
If several elements compare equal, the elements of [l1] will be
before the elements of [l2].
Not tail-recursive (sum of the lengths of the arguments).
*)
| null | https://raw.githubusercontent.com/bvaugon/ocapic/a14cd9ec3f5022aeb5fe2264d595d7e8f1ddf58a/lib/list.mli | ocaml | ************************************************************************
OCaml
en Automatique.
All rights reserved. This file is distributed under the terms of
special exception on linking described in the file LICENSE.
************************************************************************
* Return the length (number of elements) of the given list.
* List reversal.
* Same as [concat]. Not tail-recursive
(length of the argument + length of the longest sub-list).
* [List.fold_left f a [b1; ...; bn]] is
[f (... (f (f a b1) b2) ...) bn].
* [List.rev_map2 f l1 l2] gives the same result as
{!List.rev}[ (]{!List.map2}[ f l1 l2)], but is tail-recursive and
more efficient.
* [for_all p [a1; ...; an]] checks if all elements of the list
satisfy the predicate [p]. That is, it returns
[(p a1) && (p a2) && ... && (p an)].
* [mem a l] is true if and only if [a] is equal
to an element of [l].
* Same as {!List.mem}, but uses physical equality instead of structural
equality to compare list elements.
* [filter p l] returns all the elements of the list [l]
that satisfy the predicate [p]. The order of the elements
in the input list is preserved.
* [partition p l] returns a pair of lists [(l1, l2)], where
[l1] is the list of all the elements of [l] that
satisfy the predicate [p], and [l2] is the list of all the
elements of [l] that do not satisfy [p].
The order of the elements in the input list is preserved.
* [assoc a l] returns the value associated with key [a] in the list of
pairs [l]. That is,
[assoc a [ ...; (a,b); ...] = b]
if [(a,b)] is the leftmost binding of [a] in list [l].
Raise [Not_found] if there is no value associated with [a] in the
list [l].
* Same as {!List.assoc}, but uses physical equality instead of structural
equality to compare keys.
* Same as {!List.assoc}, but simply return true if a binding exists,
and false if no bindings exist for the given key.
* Same as {!List.remove_assoc}, but uses physical equality instead
of structural equality to compare keys. Not tail-recursive.
* Same as {!List.sort}, but the sorting algorithm is guaranteed to
be stable (i.e. elements that compare equal are kept in their
original order) .
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
* Same as {!List.sort} or {!List.stable_sort}, whichever is faster
on typical input. | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
the GNU Lesser General Public License version 2.1 , with the
* List operations .
Some functions are flagged as not tail - recursive . A tail - recursive
function uses constant stack space , while a non - tail - recursive function
uses stack space proportional to the length of its list argument , which
can be a problem with very long lists . When the function takes several
list arguments , an approximate formula giving stack usage ( in some
unspecified constant unit ) is shown in parentheses .
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements .
Some functions are flagged as not tail-recursive. A tail-recursive
function uses constant stack space, while a non-tail-recursive function
uses stack space proportional to the length of its list argument, which
can be a problem with very long lists. When the function takes several
list arguments, an approximate formula giving stack usage (in some
unspecified constant unit) is shown in parentheses.
The above considerations can usually be ignored if your lists are not
longer than about 10000 elements.
*)
val length : 'a list -> int
val cons : 'a -> 'a list -> 'a list
* [ cons x xs ] is [ x : : xs ]
@since 4.03.0
@since 4.03.0
*)
val hd : 'a list -> 'a
* Return the first element of the given list . Raise
[ Failure " hd " ] if the list is empty .
[Failure "hd"] if the list is empty. *)
val tl : 'a list -> 'a list
* Return the given list without its first element . Raise
[ Failure " tl " ] if the list is empty .
[Failure "tl"] if the list is empty. *)
val nth : 'a list -> int -> 'a
* Return the [ n]-th element of the given list .
The first element ( head of the list ) is at position 0 .
Raise [ Failure " nth " ] if the list is too short .
Raise [ Invalid_argument " List.nth " ] if [ n ] is negative .
The first element (head of the list) is at position 0.
Raise [Failure "nth"] if the list is too short.
Raise [Invalid_argument "List.nth"] if [n] is negative. *)
val rev : 'a list -> 'a list
val append : 'a list -> 'a list -> 'a list
* two lists . Same as the infix operator [ @ ] .
Not tail - recursive ( length of the first argument ) .
Not tail-recursive (length of the first argument). *)
val rev_append : 'a list -> 'a list -> 'a list
* [ l1 l2 ] reverses [ l1 ] and concatenates it to [ l2 ] .
This is equivalent to { ! List.rev } [ l1 @ l2 ] , but [ rev_append ] is
tail - recursive and more efficient .
This is equivalent to {!List.rev}[ l1 @ l2], but [rev_append] is
tail-recursive and more efficient. *)
val concat : 'a list list -> 'a list
* a list of lists . The elements of the argument are all
concatenated together ( in the same order ) to give the result .
Not tail - recursive
( length of the argument + length of the longest sub - list ) .
concatenated together (in the same order) to give the result.
Not tail-recursive
(length of the argument + length of the longest sub-list). *)
val flatten : 'a list list -> 'a list
* { 6 Iterators }
val iter : ('a -> unit) -> 'a list -> unit
* [ List.iter f [ a1 ; ... ; an ] ] applies function [ f ] in turn to
[ a1 ; ... ; an ] . It is equivalent to
[ begin f a1 ; f a2 ; ... ; f an ; ( ) end ] .
[a1; ...; an]. It is equivalent to
[begin f a1; f a2; ...; f an; () end]. *)
val iteri : (int -> 'a -> unit) -> 'a list -> unit
* Same as { ! List.iter } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument.
@since 4.00.0
*)
val map : ('a -> 'b) -> 'a list -> 'b list
* [ List.map f [ a1 ; ... ; an ] ] applies function [ f ] to [ a1 , ... , an ] ,
and builds the list [ [ f a1 ; ... ; f an ] ]
with the results returned by [ f ] . Not tail - recursive .
and builds the list [[f a1; ...; f an]]
with the results returned by [f]. Not tail-recursive. *)
val mapi : (int -> 'a -> 'b) -> 'a list -> 'b list
* Same as { ! } , but the function is applied to the index of
the element as first argument ( counting from 0 ) , and the element
itself as second argument . Not tail - recursive .
@since 4.00.0
the element as first argument (counting from 0), and the element
itself as second argument. Not tail-recursive.
@since 4.00.0
*)
val rev_map : ('a -> 'b) -> 'a list -> 'b list
* [ List.rev_map f l ] gives the same result as
{ ! List.rev } [ ( ] { ! } [ f l ) ] , but is tail - recursive and
more efficient .
{!List.rev}[ (]{!List.map}[ f l)], but is tail-recursive and
more efficient. *)
val fold_left : ('a -> 'b -> 'a) -> 'a -> 'b list -> 'a
val fold_right : ('a -> 'b -> 'b) -> 'a list -> 'b -> 'b
* [ f [ a1 ; ... ; an ] b ] is
[ f a1 ( f a2 ( ... ( f an b ) ... ) ) ] . Not tail - recursive .
[f a1 (f a2 (... (f an b) ...))]. Not tail-recursive. *)
* { 6 Iterators on two lists }
val iter2 : ('a -> 'b -> unit) -> 'a list -> 'b list -> unit
* [ List.iter2 f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] calls in turn
[ f a1 b1 ; ... ; f an bn ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
[f a1 b1; ...; f an bn].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
* [ List.map2 f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ f a1 b1 ; ... ; f an bn ] ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths . Not tail - recursive .
[[f a1 b1; ...; f an bn]].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. Not tail-recursive. *)
val rev_map2 : ('a -> 'b -> 'c) -> 'a list -> 'b list -> 'c list
val fold_left2 : ('a -> 'b -> 'c -> 'a) -> 'a -> 'b list -> 'c list -> 'a
* [ List.fold_left2 f a [ b1 ; ... ; bn ] [ c1 ; ... ; cn ] ] is
[ f ( ... ( f ( f a b1 c1 ) b2 c2 ) ... ) bn cn ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
[f (... (f (f a b1 c1) b2 c2) ...) bn cn].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val fold_right2 : ('a -> 'b -> 'c -> 'c) -> 'a list -> 'b list -> 'c -> 'c
* [ List.fold_right2 f [ a1 ; ... ; an ] [ b1 ; ... ; bn ] c ] is
[ f a1 b1 ( f a2 b2 ( ... ( f an bn c ) ... ) ) ] .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths . Not tail - recursive .
[f a1 b1 (f a2 b2 (... (f an bn c) ...))].
Raise [Invalid_argument] if the two lists are determined
to have different lengths. Not tail-recursive. *)
* { 6 List scanning }
val for_all : ('a -> bool) -> 'a list -> bool
val exists : ('a -> bool) -> 'a list -> bool
* [ exists p [ a1 ; ... ; an ] ] checks if at least one element of
the list satisfies the predicate [ p ] . That is , it returns
[ ( p a1 ) || ( p a2 ) || ... || ( p an ) ] .
the list satisfies the predicate [p]. That is, it returns
[(p a1) || (p a2) || ... || (p an)]. *)
val for_all2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! , but for a two - argument predicate .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val exists2 : ('a -> 'b -> bool) -> 'a list -> 'b list -> bool
* Same as { ! List.exists } , but for a two - argument predicate .
Raise [ Invalid_argument ] if the two lists are determined
to have different lengths .
Raise [Invalid_argument] if the two lists are determined
to have different lengths. *)
val mem : 'a -> 'a list -> bool
val memq : 'a -> 'a list -> bool
* { 6 List searching }
val find : ('a -> bool) -> 'a list -> 'a
* [ find p l ] returns the first element of the list [ l ]
that satisfies the predicate [ p ] .
Raise [ Not_found ] if there is no value that satisfies [ p ] in the
list [ l ] .
that satisfies the predicate [p].
Raise [Not_found] if there is no value that satisfies [p] in the
list [l]. *)
val filter : ('a -> bool) -> 'a list -> 'a list
val find_all : ('a -> bool) -> 'a list -> 'a list
* [ find_all ] is another name for { ! } .
val partition : ('a -> bool) -> 'a list -> 'a list * 'a list
* { 6 Association lists }
val assoc : 'a -> ('a * 'b) list -> 'b
val assq : 'a -> ('a * 'b) list -> 'b
val mem_assoc : 'a -> ('a * 'b) list -> bool
val mem_assq : 'a -> ('a * 'b) list -> bool
* Same as { ! , but uses physical equality instead of
structural equality to compare keys .
structural equality to compare keys. *)
val remove_assoc : 'a -> ('a * 'b) list -> ('a * 'b) list
* [ remove_assoc a l ] returns the list of
pairs [ l ] without the first pair with key [ a ] , if any .
Not tail - recursive .
pairs [l] without the first pair with key [a], if any.
Not tail-recursive. *)
val remove_assq : 'a -> ('a * 'b) list -> ('a * 'b) list
* { 6 Lists of pairs }
val split : ('a * 'b) list -> 'a list * 'b list
* Transform a list of pairs into a pair of lists :
[ split [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] is [ ( [ a1 ; ... ; an ] , [ b1 ; ... ; bn ] ) ] .
Not tail - recursive .
[split [(a1,b1); ...; (an,bn)]] is [([a1; ...; an], [b1; ...; bn])].
Not tail-recursive.
*)
val combine : 'a list -> 'b list -> ('a * 'b) list
* Transform a pair of lists into a list of pairs :
[ combine [ a1 ; ... ; an ] [ b1 ; ... ; bn ] ] is
[ [ ( a1,b1 ) ; ... ; ( an , bn ) ] ] .
Raise [ Invalid_argument ] if the two lists
have different lengths . Not tail - recursive .
[combine [a1; ...; an] [b1; ...; bn]] is
[[(a1,b1); ...; (an,bn)]].
Raise [Invalid_argument] if the two lists
have different lengths. Not tail-recursive. *)
* { 6 Sorting }
val sort : ('a -> 'a -> int) -> 'a list -> 'a list
* Sort a list in increasing order according to a comparison
function . The comparison function must return 0 if its arguments
compare as equal , a positive integer if the first is greater ,
and a negative integer if the first is smaller ( see Array.sort for
a complete specification ) . For example ,
{ ! Pervasives.compare } is a suitable comparison function .
The resulting list is sorted in increasing order .
[ List.sort ] is guaranteed to run in constant heap space
( in addition to the size of the result list ) and logarithmic
stack space .
The current implementation uses Merge Sort . It runs in constant
heap space and logarithmic stack space .
function. The comparison function must return 0 if its arguments
compare as equal, a positive integer if the first is greater,
and a negative integer if the first is smaller (see Array.sort for
a complete specification). For example,
{!Pervasives.compare} is a suitable comparison function.
The resulting list is sorted in increasing order.
[List.sort] is guaranteed to run in constant heap space
(in addition to the size of the result list) and logarithmic
stack space.
The current implementation uses Merge Sort. It runs in constant
heap space and logarithmic stack space.
*)
val stable_sort : ('a -> 'a -> int) -> 'a list -> 'a list
val fast_sort : ('a -> 'a -> int) -> 'a list -> 'a list
val sort_uniq : ('a -> 'a -> int) -> 'a list -> 'a list
* Same as { ! List.sort } , but also remove duplicates .
@since 4.02.0
@since 4.02.0 *)
val merge : ('a -> 'a -> int) -> 'a list -> 'a list -> 'a list
* Merge two lists :
Assuming that [ l1 ] and [ l2 ] are sorted according to the
comparison function [ cmp ] , [ merge l1 l2 ] will return a
sorted list containting all the elements of [ l1 ] and [ l2 ] .
If several elements compare equal , the elements of [ l1 ] will be
before the elements of [ l2 ] .
Not tail - recursive ( sum of the lengths of the arguments ) .
Assuming that [l1] and [l2] are sorted according to the
comparison function [cmp], [merge cmp l1 l2] will return a
sorted list containting all the elements of [l1] and [l2].
If several elements compare equal, the elements of [l1] will be
before the elements of [l2].
Not tail-recursive (sum of the lengths of the arguments).
*)
|
f8dbfe045b1888044c22dddeb315c1f8c7dae7db1fa3925a4bc08a75c3938d89 | dsheets/gloc | glo.ml | Copyright ( c ) 2012 Ashima Arts . All rights reserved .
* Author :
* Use of this source code is governed by a BSD - style license that can be
* found in the LICENSE file .
* Author: David Sheets
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*)
open Printf
open Pp_lib
open Essl_lib
open Glo_lib
type require = Pdir of string | Edir of string * behavior | Vdir of int
type filenum = Num of int | Ref of int * string * string
exception DissolveError of string * unit pptok
let rec print_stream = function
| [ ] - > ( )
| ( Int _ ): : r - > print_endline " Int " ; print_stream r
| ( Float _ ): : r - > print_endline " Float " ; print_stream r
| ( Word _ ): : r - > print_endline " Word " ; print_stream r
| ( Call _ ): : r - > print_endline " Call " ; print_stream r
| ( Punc _ ): : r - > print_endline " Punc " ; print_stream r
| ( Comma _ ): : r - > print_endline " Comma " ; print_stream r
| ( Leftp _ ): : r - > print_endline " " ; print_stream r
| ( Rightp _ ): : r - > print_endline " Rightp " ; print_stream r
let rec print_stream = function
| [] -> ()
| (Int _)::r -> print_endline "Int"; print_stream r
| (Float _)::r -> print_endline "Float"; print_stream r
| (Word _)::r -> print_endline "Word"; print_stream r
| (Call _)::r -> print_endline "Call"; print_stream r
| (Punc _)::r -> print_endline "Punc"; print_stream r
| (Comma _)::r -> print_endline "Comma"; print_stream r
| (Leftp _)::r -> print_endline "Leftp"; print_stream r
| (Rightp _)::r -> print_endline "Rightp"; print_stream r
*)
let apply_to_pp_if fn ({v=(ce,tb,ofb)} as t) =
let tm, tb = match fn tb with
| tm, Some tb -> tm, tb
| tm, None -> tm, empty_pptok_expr tb
in
let fm, ofb = match ofb with
| None -> [], None
| Some fb -> fn fb
in tm@fm, Some (synth_pp_if { t with v=(ce,tb,ofb) })
let apply_to_pp_list fn ({v=ppel}) =
let ml, ppel = List.fold_left
(fun (ml,ppel) ppe -> match fn ppe with
| sml, None -> (sml@ml,ppel)
| sml, Some ppe -> (sml@ml,ppe::ppel)
)
([],[]) ppel
in ml, Some (fuse_pptok_expr (List.rev ppel))
let rec process_requires e = match e with
| Comments _ | Chunk _ | Def _ | Fun _ | Undef _
| Err _ | Line _ -> [], Some e
| Extension {v=({v=name},{v=behavior})} ->
[Edir (name, behavior)], None
| Pragma ({v=[Word {v=("STDGL",_)};
Word {v=("invariant",_)};
Leftp _;
Word {v=("all",_)};
Rightp _;
]} as t) -> [Pdir (snd (t.scan t.span.a))], None
| Pragma {v} -> [], Some e
| Version itt -> [Vdir itt.v.v], None
| If t -> apply_to_pp_if process_requires t
| List t -> apply_to_pp_list process_requires t
let line_ref_re = Re_str.regexp "\\([^ ]*\\)#n=\\([^ ]+\\)"
let extract_name_frag fn = function
| [] -> Num fn
| h::_ -> begin
try ignore (Re_str.search_forward line_ref_re h 0);
Ref (fn, Re_str.matched_group 1 h, Re_str.matched_group 2 h)
with Not_found -> Num fn
end
let rec process_line e = match e with
| Comments _ | Chunk _ | Def _ | Fun _ | Undef _
| Err _ | Pragma _ | Extension _ | Version _ -> [], Some e
| Line ({ v=(Some ft,_) } as t) ->
let _, postc = ft.comments in
let postc = List.flatten
(List.map (fun t -> List.map (fun t -> t.v) t.v) !postc)
in [extract_name_frag ft.v postc], Some (synth_pp_line_armored t)
| Line _ -> [], Some e
| If t -> apply_to_pp_if process_line t
| List t -> apply_to_pp_list process_line t
let directives_of_requires requires =
List.fold_left
(fun (pl,el,vo) -> function
| Pdir s -> (s::pl,el,vo)
| Edir (n,b) -> (pl,(n,string_of_behavior b)::el,vo)
| Vdir n -> (pl,el,Some n)
) ([],[],None) requires
TODO : enforce 1 - declare , 1 - define rule
let prototypes slenv = Sl_lib.SymMap.fold
(fun k bindings l ->
if List.for_all (fun b -> not (Sl_lib.definitionp b)) bindings
then k::l else l
) (List.hd (List.rev slenv.Sl_lib.ctxt)) []
let get_insyms envs =
let protos = List.fold_left (fun l e -> (prototypes e)@l) [] envs in
List.fold_left
(fun l e -> List.fold_left
(fun l s -> (* TODO: inference *)
if List.mem s l then l
else if List.mem_assoc s builtins then l
else s::l)
l e.Sl_lib.opensyms)
protos envs
let get_syms p envs = List.fold_left
(fun l e -> Sl_lib.SymMap.fold
(fun k bindings l -> (* TODO: inference *)
if (List.mem k l) or not (List.exists p bindings)
then l else k::l)
(List.hd (List.rev e.Sl_lib.ctxt)) l)
[] envs
let get_outsyms = get_syms Sl_lib.definitionp
let get_inu = get_syms Sl_lib.uniformp
let get_ina = get_syms Sl_lib.attributep
let get_vary = get_syms Sl_lib.varyingp
(* maintain order *)
let unique idx l = List.rev
(snd (List.fold_left
(fun (m,l) v ->
if Sl_lib.SymMap.mem (idx v) m then (m,l)
else (Sl_lib.SymMap.add (idx v) v m, v::l)
) (Sl_lib.SymMap.empty,[]) l))
let env_map idx f ppl =
unique idx (List.flatten (List.map (fun (env,_) -> f env) ppl))
let get_inmac env = List.map (fun t -> t.v) env.inmacros
let get_opmac env = Env.fold (fun _ mac l -> mac::l) env.macros []
let slenv_of_stream s =
let hot = ref None in
try parse_essl (essl_lexerfn hot s)
with Essl.Error ->
let span, scan = match !hot with Some t -> t.span, snd (t.scan t.span.a)
| None -> {a=start_loc;z=start_loc}, "__BOF__" in
raise (Essl_lib.EsslParseError ("'"^scan^"'",span))
let create_unit expr ppl =
let id x = x in
let inmac = env_map id get_inmac ppl in
let opmac = List.fold_right
(function
| {name=None} -> id
| {name=Some n} -> fun l -> n::l)
(env_map (fun {name} -> match name with None -> "" | Some s -> s)
get_opmac ppl) [] in
let envs = List.map
(fun (_,ppexpr) -> slenv_of_stream (stream_of_pptok_expr ppexpr))
ppl in
let file_nums, expr = match process_line expr with
| file_nums, Some e -> file_nums, e
| file_nums, None -> file_nums, empty_pptok_expr expr
in
let file_nums, start = match expr with
| List {v=(Line {v=(Some _,_)})::_}
| Line {v=(Some _,_)} ->
file_nums, {file={src=0;input=0}; line={src=1;input=1}; col=0}
| _ ->
((Num 0)::file_nums), {file={src=(-1);input=(-1)}; line={src=1;input=1}; col=0}
in
let (pdir,edir,vdir),expr = match process_requires expr with
| requires, Some e -> directives_of_requires requires, e
| requires, None -> directives_of_requires requires, empty_pptok_expr expr
TODO : rename GLOC _ * to GLOC_GLOC _ *
({pdir; edir; vdir;
outu=[]; outa=[]; vary=get_vary envs; inu=get_inu envs; ina=get_ina envs;
insym=get_insyms envs; outsym=get_outsyms envs;
inmac; opmac; outmac=[]; bmac=[];
source=(snd ((proj_pptok_expr expr).scan start))},
List.rev
(unique (function Num fn | Ref (fn,_,_) -> string_of_int fn) file_nums))
let empty_ppenv lang =
{macros=Env.empty;
builtin_macros=builtin_macros_of_language lang;
extensions=Env.empty;
inmacros=[]}
let link_of_filenum = function
| Num n -> (string_of_int n, sprintf "#n=%d" n)
| Ref (n, fn, frag) -> (string_of_int n, sprintf "#n=%s" frag)
let compile ?meta lang fn origexpr ppl =
let target = Language.target_of_language lang in
let body_unit, file_nums = create_unit origexpr ppl in
let linkmap = List.map link_of_filenum file_nums in
Leaf {glo=glo_version; target; meta; units=[|body_unit|]; linkmap}
let input_decl_a x =
(x.Sl_lib.qualt.span.a.line.input,x.Sl_lib.qualt.span.a.col)
let input_decl_z x =
(x.Sl_lib.symt.span.z.line.input,x.Sl_lib.symt.span.z.col)
let input_pptok_type_a pt =
let t = proj_pptok_type pt in
(t.span.a.line.input,t.span.a.col)
let input_pptok_type_z pt =
let t = proj_pptok_type pt in
(t.span.z.line.input,t.span.z.col)
let input_decl_cmp x y =
let x_a = input_decl_a x in
let x_z = input_decl_z x in
let y_a = input_decl_a y in
let y_z = input_decl_z y in
let a = compare x_a y_a in
let z = compare x_z y_z in
if a>(-1) && z=(-1) || a<1 && z=1 then z else a
let unit_of_binding lang (n, bs) =
let start = {file={src=(-1);input=(-1)}; line={src=1;input=1}; col=0} in
let _, source = List.fold_left
(fun (loc,s) {Sl_lib.qualt; Sl_lib.typet; Sl_lib.symt} ->
let loc,qs = qualt.scan loc in
let (loc,ts) = match Sl_lib.usertype_of_bind symt.v with
| None -> (typet.scan loc)
| Some ut ->
(scan_of_string
{typet.span with
z={typet.span.a with
col=typet.span.a.col+(String.length ut)}}
([],ref []) ut loc)
in
let loc,ss = symt.scan loc in
match symt.v with
| Sl_lib.Fun (_,_,_) -> (loc,s^qs^ts^ss)
| _ -> (loc,s^qs^ts^ss^";")
) (start,"") (List.rev bs)
in
let ppexpr = parse lang source in
let slenv = slenv_of_stream (stream_of_pptok_expr ppexpr) in
{pdir=[]; edir=[]; vdir=None;
inu=get_inu [slenv]; outu=[];
ina=get_ina [slenv]; outa=[];
vary=get_vary [slenv];
insym=get_insyms [slenv]; outsym=get_outsyms [slenv];
inmac=[]; outmac=[]; opmac=[]; bmac=[];
source}
let streams = List.map ( fun b - > match b with
| ( _ , [ ] ) - > ( b , [ ] )
| ( n , bs ) - >
let s = input_decl_a ( List.hd bs ) in
let e = input_decl_z ( List.hd ( List.rev bs ) ) in
( b , ( fun pt l - >
let ta = input_pptok_type_a pt in
let tz = input_pptok_type_z pt in
if ta > = s & & tz < = e then pt::l else l
) c.v [ ] )
) bindings in
let streams = List.map (fun b -> match b with
| (_,[]) -> (b,[])
| (n,bs) ->
let s = input_decl_a (List.hd bs) in
let e = input_decl_z (List.hd (List.rev bs)) in
(b,List.fold_right
(fun pt l ->
let ta = input_pptok_type_a pt in
let tz = input_pptok_type_z pt in
if ta >= s && tz <= e then pt::l else l
) c.v [])
) bindings in
*)
let dissolve ?meta lang fn origexpr ppl =
let target = Language.target_of_language lang in
let pglo = {glo=glo_version; target; meta; units=[||]; linkmap=[]} in
let append_unit file_nums requires oglo u =
let (pdir,edir,vdir) = directives_of_requires requires in
{oglo with units=Array.append oglo.units [|{u with vdir; edir; pdir}|];
linkmap=(List.map link_of_filenum file_nums)@oglo.linkmap}
in
let rec loop env (dfn,oglo) glom requires = function
| (Comments c)::r -> comments env (dfn,oglo) glom c.v requires r
| ((Chunk c) as pp)::r ->
let ul, linkmap = begin
try let slenv = slenv_of_stream (stream_of_pptok_expr pp) in
let bindings = Sl_lib.SymMap.bindings
(List.hd (List.rev slenv.Sl_lib.ctxt)) in
List.map (unit_of_binding lang)
(List.sort (* TODO: interleaved overloads *)
(fun (_,a) (_,b) ->
input_decl_cmp (List.hd a) (List.hd b))
bindings),
List.fold_left (fun l -> function
| (_,b::_) -> (Num b.Sl_lib.symt.span.a.file.src)::l
| (_,[]) -> l
) [] bindings
TODO : interleaved ppdir
let macros, _ = Pp.macro_expand env c.v in
let ppl = Pp.preprocess_ppexpr env pp in
maybe_fatal_error PPError ;
let u,linkmap = create_unit pp ppl in
[{u with opmac=[];
inmac=List.map (function
| {name=None} -> ""
| {name=Some s} -> s)
(unique (function {name=None} -> "" | {name=Some n} -> n) macros)}],
linkmap
end in
loop env (dfn,List.fold_left
(fun glo u -> append_unit linkmap requires glo u)
oglo ul) glom requires r
| ((If _) as pp)::r ->
let ppl = Pp.preprocess_ppexpr (empty_ppenv lang) pp in
maybe_fatal_error PPError ;
let u, file_nums = create_unit pp ppl in
let u = {u with outmac=u.opmac; opmac=[]} in
let o = env_map (fun {name} -> match name with None -> "" | Some s -> s)
get_opmac ppl in
loop {env with macros=List.fold_left
(fun e m -> Env.add (match m.name with None -> "" | Some n -> n) m e)
env.macros o}
(dfn,append_unit file_nums requires oglo u)
glom requires r
| ((Def pptok) as pp)::r ->
let env = Pp.define env pptok in
let u, file_nums = create_unit pp
[Pp.define (empty_ppenv lang) pptok,pp] in
let u = {u with outmac=u.opmac; opmac=[]} in
loop env (dfn,append_unit file_nums requires oglo u) glom requires r
| ((Fun pptok) as pp)::r ->
let env = Pp.defun env pptok in
let u, file_nums = create_unit pp
[Pp.defun (empty_ppenv lang) pptok,pp] in
let u = {u with outmac=u.opmac; opmac=[]} in
loop env (dfn,append_unit file_nums requires oglo u) glom requires r
| (Undef {v=m})::r -> loop (Pp.undef env m.v) (dfn,oglo) glom requires r
| (Err _)::r -> loop env (dfn,oglo) glom requires r
| (Pragma t)::r ->
loop env (dfn,oglo) glom ((Pdir (snd (t.scan t.span.a)))::requires) r
| ((Version _) as pp)::r
| ((Extension _) as pp)::r ->
let req, _ = process_requires pp in
loop env (dfn,oglo) glom (req@requires) r
| ((Line ldt) as pp)::r ->
let c, _ = ldt.comments in
if c=[] then
let file_num = List.hd (fst (process_line pp)) in (* Line is safe *)
let linkmap = (link_of_filenum file_num)::oglo.linkmap in
let dfn = match file_num with
| Num _ | Ref (_,"",_) -> dfn | Ref (_,fn,_) -> fn
in loop env (dfn,{oglo with linkmap}) glom requires r
else
comments env (dfn,oglo) glom c requires
((Line {ldt with comments=([],snd ldt.comments)})::r)
| (List {v})::r -> loop env (dfn,oglo) glom requires (v@r)
| [] -> List.rev ((dfn,Leaf {oglo with linkmap=unique fst oglo.linkmap})::glom)
and comments env (dfn,oglo) glom c =
let glom = if 0=(Array.length oglo.units) then glom
else ((dfn,Leaf {oglo with linkmap=unique fst oglo.linkmap})::glom)
in match extract_meta c with
| NoMeta -> loop env (dfn,oglo) glom
| EndMeta -> loop env (dfn,pglo) glom
| NewMeta meta -> loop env (dfn,{pglo with meta=Some meta}) glom
in let glo_alist = loop (empty_ppenv lang) (fn,pglo) [] [] [origexpr] in
if 1=(List.length glo_alist) then snd (List.hd glo_alist)
else Glom glo_alist
| null | https://raw.githubusercontent.com/dsheets/gloc/d5917c072ec314ae93a61344da2407f520fac1b5/src/glo.ml | ocaml | TODO: inference
TODO: inference
maintain order
TODO: interleaved overloads
Line is safe | Copyright ( c ) 2012 Ashima Arts . All rights reserved .
* Author :
* Use of this source code is governed by a BSD - style license that can be
* found in the LICENSE file .
* Author: David Sheets
* Use of this source code is governed by a BSD-style license that can be
* found in the LICENSE file.
*)
open Printf
open Pp_lib
open Essl_lib
open Glo_lib
type require = Pdir of string | Edir of string * behavior | Vdir of int
type filenum = Num of int | Ref of int * string * string
exception DissolveError of string * unit pptok
let rec print_stream = function
| [ ] - > ( )
| ( Int _ ): : r - > print_endline " Int " ; print_stream r
| ( Float _ ): : r - > print_endline " Float " ; print_stream r
| ( Word _ ): : r - > print_endline " Word " ; print_stream r
| ( Call _ ): : r - > print_endline " Call " ; print_stream r
| ( Punc _ ): : r - > print_endline " Punc " ; print_stream r
| ( Comma _ ): : r - > print_endline " Comma " ; print_stream r
| ( Leftp _ ): : r - > print_endline " " ; print_stream r
| ( Rightp _ ): : r - > print_endline " Rightp " ; print_stream r
let rec print_stream = function
| [] -> ()
| (Int _)::r -> print_endline "Int"; print_stream r
| (Float _)::r -> print_endline "Float"; print_stream r
| (Word _)::r -> print_endline "Word"; print_stream r
| (Call _)::r -> print_endline "Call"; print_stream r
| (Punc _)::r -> print_endline "Punc"; print_stream r
| (Comma _)::r -> print_endline "Comma"; print_stream r
| (Leftp _)::r -> print_endline "Leftp"; print_stream r
| (Rightp _)::r -> print_endline "Rightp"; print_stream r
*)
let apply_to_pp_if fn ({v=(ce,tb,ofb)} as t) =
let tm, tb = match fn tb with
| tm, Some tb -> tm, tb
| tm, None -> tm, empty_pptok_expr tb
in
let fm, ofb = match ofb with
| None -> [], None
| Some fb -> fn fb
in tm@fm, Some (synth_pp_if { t with v=(ce,tb,ofb) })
let apply_to_pp_list fn ({v=ppel}) =
let ml, ppel = List.fold_left
(fun (ml,ppel) ppe -> match fn ppe with
| sml, None -> (sml@ml,ppel)
| sml, Some ppe -> (sml@ml,ppe::ppel)
)
([],[]) ppel
in ml, Some (fuse_pptok_expr (List.rev ppel))
let rec process_requires e = match e with
| Comments _ | Chunk _ | Def _ | Fun _ | Undef _
| Err _ | Line _ -> [], Some e
| Extension {v=({v=name},{v=behavior})} ->
[Edir (name, behavior)], None
| Pragma ({v=[Word {v=("STDGL",_)};
Word {v=("invariant",_)};
Leftp _;
Word {v=("all",_)};
Rightp _;
]} as t) -> [Pdir (snd (t.scan t.span.a))], None
| Pragma {v} -> [], Some e
| Version itt -> [Vdir itt.v.v], None
| If t -> apply_to_pp_if process_requires t
| List t -> apply_to_pp_list process_requires t
let line_ref_re = Re_str.regexp "\\([^ ]*\\)#n=\\([^ ]+\\)"
let extract_name_frag fn = function
| [] -> Num fn
| h::_ -> begin
try ignore (Re_str.search_forward line_ref_re h 0);
Ref (fn, Re_str.matched_group 1 h, Re_str.matched_group 2 h)
with Not_found -> Num fn
end
let rec process_line e = match e with
| Comments _ | Chunk _ | Def _ | Fun _ | Undef _
| Err _ | Pragma _ | Extension _ | Version _ -> [], Some e
| Line ({ v=(Some ft,_) } as t) ->
let _, postc = ft.comments in
let postc = List.flatten
(List.map (fun t -> List.map (fun t -> t.v) t.v) !postc)
in [extract_name_frag ft.v postc], Some (synth_pp_line_armored t)
| Line _ -> [], Some e
| If t -> apply_to_pp_if process_line t
| List t -> apply_to_pp_list process_line t
let directives_of_requires requires =
List.fold_left
(fun (pl,el,vo) -> function
| Pdir s -> (s::pl,el,vo)
| Edir (n,b) -> (pl,(n,string_of_behavior b)::el,vo)
| Vdir n -> (pl,el,Some n)
) ([],[],None) requires
TODO : enforce 1 - declare , 1 - define rule
let prototypes slenv = Sl_lib.SymMap.fold
(fun k bindings l ->
if List.for_all (fun b -> not (Sl_lib.definitionp b)) bindings
then k::l else l
) (List.hd (List.rev slenv.Sl_lib.ctxt)) []
let get_insyms envs =
let protos = List.fold_left (fun l e -> (prototypes e)@l) [] envs in
List.fold_left
(fun l e -> List.fold_left
if List.mem s l then l
else if List.mem_assoc s builtins then l
else s::l)
l e.Sl_lib.opensyms)
protos envs
let get_syms p envs = List.fold_left
(fun l e -> Sl_lib.SymMap.fold
if (List.mem k l) or not (List.exists p bindings)
then l else k::l)
(List.hd (List.rev e.Sl_lib.ctxt)) l)
[] envs
let get_outsyms = get_syms Sl_lib.definitionp
let get_inu = get_syms Sl_lib.uniformp
let get_ina = get_syms Sl_lib.attributep
let get_vary = get_syms Sl_lib.varyingp
let unique idx l = List.rev
(snd (List.fold_left
(fun (m,l) v ->
if Sl_lib.SymMap.mem (idx v) m then (m,l)
else (Sl_lib.SymMap.add (idx v) v m, v::l)
) (Sl_lib.SymMap.empty,[]) l))
let env_map idx f ppl =
unique idx (List.flatten (List.map (fun (env,_) -> f env) ppl))
let get_inmac env = List.map (fun t -> t.v) env.inmacros
let get_opmac env = Env.fold (fun _ mac l -> mac::l) env.macros []
let slenv_of_stream s =
let hot = ref None in
try parse_essl (essl_lexerfn hot s)
with Essl.Error ->
let span, scan = match !hot with Some t -> t.span, snd (t.scan t.span.a)
| None -> {a=start_loc;z=start_loc}, "__BOF__" in
raise (Essl_lib.EsslParseError ("'"^scan^"'",span))
let create_unit expr ppl =
let id x = x in
let inmac = env_map id get_inmac ppl in
let opmac = List.fold_right
(function
| {name=None} -> id
| {name=Some n} -> fun l -> n::l)
(env_map (fun {name} -> match name with None -> "" | Some s -> s)
get_opmac ppl) [] in
let envs = List.map
(fun (_,ppexpr) -> slenv_of_stream (stream_of_pptok_expr ppexpr))
ppl in
let file_nums, expr = match process_line expr with
| file_nums, Some e -> file_nums, e
| file_nums, None -> file_nums, empty_pptok_expr expr
in
let file_nums, start = match expr with
| List {v=(Line {v=(Some _,_)})::_}
| Line {v=(Some _,_)} ->
file_nums, {file={src=0;input=0}; line={src=1;input=1}; col=0}
| _ ->
((Num 0)::file_nums), {file={src=(-1);input=(-1)}; line={src=1;input=1}; col=0}
in
let (pdir,edir,vdir),expr = match process_requires expr with
| requires, Some e -> directives_of_requires requires, e
| requires, None -> directives_of_requires requires, empty_pptok_expr expr
TODO : rename GLOC _ * to GLOC_GLOC _ *
({pdir; edir; vdir;
outu=[]; outa=[]; vary=get_vary envs; inu=get_inu envs; ina=get_ina envs;
insym=get_insyms envs; outsym=get_outsyms envs;
inmac; opmac; outmac=[]; bmac=[];
source=(snd ((proj_pptok_expr expr).scan start))},
List.rev
(unique (function Num fn | Ref (fn,_,_) -> string_of_int fn) file_nums))
let empty_ppenv lang =
{macros=Env.empty;
builtin_macros=builtin_macros_of_language lang;
extensions=Env.empty;
inmacros=[]}
let link_of_filenum = function
| Num n -> (string_of_int n, sprintf "#n=%d" n)
| Ref (n, fn, frag) -> (string_of_int n, sprintf "#n=%s" frag)
let compile ?meta lang fn origexpr ppl =
let target = Language.target_of_language lang in
let body_unit, file_nums = create_unit origexpr ppl in
let linkmap = List.map link_of_filenum file_nums in
Leaf {glo=glo_version; target; meta; units=[|body_unit|]; linkmap}
let input_decl_a x =
(x.Sl_lib.qualt.span.a.line.input,x.Sl_lib.qualt.span.a.col)
let input_decl_z x =
(x.Sl_lib.symt.span.z.line.input,x.Sl_lib.symt.span.z.col)
let input_pptok_type_a pt =
let t = proj_pptok_type pt in
(t.span.a.line.input,t.span.a.col)
let input_pptok_type_z pt =
let t = proj_pptok_type pt in
(t.span.z.line.input,t.span.z.col)
let input_decl_cmp x y =
let x_a = input_decl_a x in
let x_z = input_decl_z x in
let y_a = input_decl_a y in
let y_z = input_decl_z y in
let a = compare x_a y_a in
let z = compare x_z y_z in
if a>(-1) && z=(-1) || a<1 && z=1 then z else a
let unit_of_binding lang (n, bs) =
let start = {file={src=(-1);input=(-1)}; line={src=1;input=1}; col=0} in
let _, source = List.fold_left
(fun (loc,s) {Sl_lib.qualt; Sl_lib.typet; Sl_lib.symt} ->
let loc,qs = qualt.scan loc in
let (loc,ts) = match Sl_lib.usertype_of_bind symt.v with
| None -> (typet.scan loc)
| Some ut ->
(scan_of_string
{typet.span with
z={typet.span.a with
col=typet.span.a.col+(String.length ut)}}
([],ref []) ut loc)
in
let loc,ss = symt.scan loc in
match symt.v with
| Sl_lib.Fun (_,_,_) -> (loc,s^qs^ts^ss)
| _ -> (loc,s^qs^ts^ss^";")
) (start,"") (List.rev bs)
in
let ppexpr = parse lang source in
let slenv = slenv_of_stream (stream_of_pptok_expr ppexpr) in
{pdir=[]; edir=[]; vdir=None;
inu=get_inu [slenv]; outu=[];
ina=get_ina [slenv]; outa=[];
vary=get_vary [slenv];
insym=get_insyms [slenv]; outsym=get_outsyms [slenv];
inmac=[]; outmac=[]; opmac=[]; bmac=[];
source}
let streams = List.map ( fun b - > match b with
| ( _ , [ ] ) - > ( b , [ ] )
| ( n , bs ) - >
let s = input_decl_a ( List.hd bs ) in
let e = input_decl_z ( List.hd ( List.rev bs ) ) in
( b , ( fun pt l - >
let ta = input_pptok_type_a pt in
let tz = input_pptok_type_z pt in
if ta > = s & & tz < = e then pt::l else l
) c.v [ ] )
) bindings in
let streams = List.map (fun b -> match b with
| (_,[]) -> (b,[])
| (n,bs) ->
let s = input_decl_a (List.hd bs) in
let e = input_decl_z (List.hd (List.rev bs)) in
(b,List.fold_right
(fun pt l ->
let ta = input_pptok_type_a pt in
let tz = input_pptok_type_z pt in
if ta >= s && tz <= e then pt::l else l
) c.v [])
) bindings in
*)
let dissolve ?meta lang fn origexpr ppl =
let target = Language.target_of_language lang in
let pglo = {glo=glo_version; target; meta; units=[||]; linkmap=[]} in
let append_unit file_nums requires oglo u =
let (pdir,edir,vdir) = directives_of_requires requires in
{oglo with units=Array.append oglo.units [|{u with vdir; edir; pdir}|];
linkmap=(List.map link_of_filenum file_nums)@oglo.linkmap}
in
let rec loop env (dfn,oglo) glom requires = function
| (Comments c)::r -> comments env (dfn,oglo) glom c.v requires r
| ((Chunk c) as pp)::r ->
let ul, linkmap = begin
try let slenv = slenv_of_stream (stream_of_pptok_expr pp) in
let bindings = Sl_lib.SymMap.bindings
(List.hd (List.rev slenv.Sl_lib.ctxt)) in
List.map (unit_of_binding lang)
(fun (_,a) (_,b) ->
input_decl_cmp (List.hd a) (List.hd b))
bindings),
List.fold_left (fun l -> function
| (_,b::_) -> (Num b.Sl_lib.symt.span.a.file.src)::l
| (_,[]) -> l
) [] bindings
TODO : interleaved ppdir
let macros, _ = Pp.macro_expand env c.v in
let ppl = Pp.preprocess_ppexpr env pp in
maybe_fatal_error PPError ;
let u,linkmap = create_unit pp ppl in
[{u with opmac=[];
inmac=List.map (function
| {name=None} -> ""
| {name=Some s} -> s)
(unique (function {name=None} -> "" | {name=Some n} -> n) macros)}],
linkmap
end in
loop env (dfn,List.fold_left
(fun glo u -> append_unit linkmap requires glo u)
oglo ul) glom requires r
| ((If _) as pp)::r ->
let ppl = Pp.preprocess_ppexpr (empty_ppenv lang) pp in
maybe_fatal_error PPError ;
let u, file_nums = create_unit pp ppl in
let u = {u with outmac=u.opmac; opmac=[]} in
let o = env_map (fun {name} -> match name with None -> "" | Some s -> s)
get_opmac ppl in
loop {env with macros=List.fold_left
(fun e m -> Env.add (match m.name with None -> "" | Some n -> n) m e)
env.macros o}
(dfn,append_unit file_nums requires oglo u)
glom requires r
| ((Def pptok) as pp)::r ->
let env = Pp.define env pptok in
let u, file_nums = create_unit pp
[Pp.define (empty_ppenv lang) pptok,pp] in
let u = {u with outmac=u.opmac; opmac=[]} in
loop env (dfn,append_unit file_nums requires oglo u) glom requires r
| ((Fun pptok) as pp)::r ->
let env = Pp.defun env pptok in
let u, file_nums = create_unit pp
[Pp.defun (empty_ppenv lang) pptok,pp] in
let u = {u with outmac=u.opmac; opmac=[]} in
loop env (dfn,append_unit file_nums requires oglo u) glom requires r
| (Undef {v=m})::r -> loop (Pp.undef env m.v) (dfn,oglo) glom requires r
| (Err _)::r -> loop env (dfn,oglo) glom requires r
| (Pragma t)::r ->
loop env (dfn,oglo) glom ((Pdir (snd (t.scan t.span.a)))::requires) r
| ((Version _) as pp)::r
| ((Extension _) as pp)::r ->
let req, _ = process_requires pp in
loop env (dfn,oglo) glom (req@requires) r
| ((Line ldt) as pp)::r ->
let c, _ = ldt.comments in
if c=[] then
let linkmap = (link_of_filenum file_num)::oglo.linkmap in
let dfn = match file_num with
| Num _ | Ref (_,"",_) -> dfn | Ref (_,fn,_) -> fn
in loop env (dfn,{oglo with linkmap}) glom requires r
else
comments env (dfn,oglo) glom c requires
((Line {ldt with comments=([],snd ldt.comments)})::r)
| (List {v})::r -> loop env (dfn,oglo) glom requires (v@r)
| [] -> List.rev ((dfn,Leaf {oglo with linkmap=unique fst oglo.linkmap})::glom)
and comments env (dfn,oglo) glom c =
let glom = if 0=(Array.length oglo.units) then glom
else ((dfn,Leaf {oglo with linkmap=unique fst oglo.linkmap})::glom)
in match extract_meta c with
| NoMeta -> loop env (dfn,oglo) glom
| EndMeta -> loop env (dfn,pglo) glom
| NewMeta meta -> loop env (dfn,{pglo with meta=Some meta}) glom
in let glo_alist = loop (empty_ppenv lang) (fn,pglo) [] [] [origexpr] in
if 1=(List.length glo_alist) then snd (List.hd glo_alist)
else Glom glo_alist
|
f93a19db17e50f28b0c55459b14b292efabde69cfc8d15ca460f07a58e1bce93 | sdiehl/bulletproofs | Common.hs | module Test.Common
( commutes
, associates
, isIdentity
, isInverse
, distributes
) where
import Protolude
commutes
:: Eq a
=> (a -> a -> a)
-> a -> a -> Bool
commutes op x y
= (x `op` y) == (y `op` x)
associates
:: Eq a
=> (a -> a -> a)
-> a -> a -> a -> Bool
associates op x y z
= (x `op` (y `op` z)) == ((x `op` y) `op` z)
isIdentity
:: Eq a
=> (a -> a -> a)
-> a
-> a
-> Bool
isIdentity op e x
= (x `op` e == x) && (e `op` x == x)
isInverse
:: Eq a
=> (a -> a -> a)
-> (a -> a)
-> a
-> a
-> Bool
isInverse op inv e x
= (x `op` inv x == e) && (inv x `op` x == e)
distributes
:: Eq a
=> (a -> a -> a)
-> (a -> a -> a)
-> a
-> a
-> a
-> Bool
distributes mult add x y z
= x `mult` (y `add` z) == (x `mult` y) `add` (x `mult` z)
| null | https://raw.githubusercontent.com/sdiehl/bulletproofs/6cb356a2ad44dea139abb81214f1babab8456b91/test/Test/Common.hs | haskell | module Test.Common
( commutes
, associates
, isIdentity
, isInverse
, distributes
) where
import Protolude
commutes
:: Eq a
=> (a -> a -> a)
-> a -> a -> Bool
commutes op x y
= (x `op` y) == (y `op` x)
associates
:: Eq a
=> (a -> a -> a)
-> a -> a -> a -> Bool
associates op x y z
= (x `op` (y `op` z)) == ((x `op` y) `op` z)
isIdentity
:: Eq a
=> (a -> a -> a)
-> a
-> a
-> Bool
isIdentity op e x
= (x `op` e == x) && (e `op` x == x)
isInverse
:: Eq a
=> (a -> a -> a)
-> (a -> a)
-> a
-> a
-> Bool
isInverse op inv e x
= (x `op` inv x == e) && (inv x `op` x == e)
distributes
:: Eq a
=> (a -> a -> a)
-> (a -> a -> a)
-> a
-> a
-> a
-> Bool
distributes mult add x y z
= x `mult` (y `add` z) == (x `mult` y) `add` (x `mult` z)
| |
0e91dd0cf904ab6113f8a6cafec57c52938bd3a491ff13e7faba8ae92da999a5 | clojurecademy/clojurecademy | resume.clj | (ns clojurecademy.controller.course.resume
(:require [clojurecademy.dao.subject :as subject.dao]
[clojurecademy.dao.progress :as progress.dao]
[clojurecademy.dao.db :as db]
[clojurecademy.dao.chapter :as chapter.dao]
[clojurecademy.dao.sub-chapter :as sub-chapter.dao]
[clojurecademy.dao.course :as course.dao]
[clojurecademy.controller.course.common :as course.common]
[kezban.core :refer :all]))
(defn- get-biggest-indexed-entity
[e-ids e-keyword]
(let [combine-keys (comp keyword #(str (name e-keyword) "/" %))
active-k (combine-keys "active?")
index-k (combine-keys "index")]
(->> e-ids
(map db/entity)
(filter active-k)
(sort-by index-k #(compare %2 %1))
first
:db/id)))
(defn get-biggest-indexed-chapter-id
[course-id user-id]
(let [chapter-ids (progress.dao/find-progress-chapter-ids-by-course-id-and-user-id course-id user-id)]
(-> chapter-ids (get-biggest-indexed-entity :chapter))))
(defn get-biggest-indexed-sub-chapter-id
[chapter-id user-id]
(let [sub-chapter-ids (progress.dao/find-progress-sub-chapter-ids-by-chapter-id-and-user-id chapter-id user-id)]
(-> sub-chapter-ids (get-biggest-indexed-entity :sub-chapter))))
(defn- get-biggest-indexed-subject-id
[sub-chapter-id user-id]
(let [subject-ids (progress.dao/find-progress-subject-ids-by-sub-chapter-id-and-user-id sub-chapter-id user-id)]
(-> subject-ids (get-biggest-indexed-entity :subject))))
(defn- able-to-resume-sub-chapter-first-subject?
[user sub-chapter-id course-id]
(let [user-id (:db/id user)
sub-chapter (db/entity sub-chapter-id)
release-t (course.common/get-release user-id course-id)
chapter (chapter.dao/find-chapter-by-sub-chapter-id release-t sub-chapter-id)]
(if (or (= (:sub-chapter/index sub-chapter) (:chapter/index chapter) 0)
(course.common/able-to-access-all-subjects? course-id user))
true
(if-let [pre-sub-chapter-id (sub-chapter.dao/find-previous-sub-chapter-id-by-sub-chapter-id release-t sub-chapter-id)]
(let [max-indexed-subject-id (subject.dao/find-max-indexed-subject-id-by-sub-chapter-id release-t pre-sub-chapter-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id max-indexed-subject-id user-id)]
(true? (:progress/done? progress)))
;;TODO chain with ->>....refactor
(let [previous-chapter-id (chapter.dao/find-previous-chapter-id-by-chapter-id release-t (:db/id chapter))
max-indexded-sub-chapter-id (sub-chapter.dao/find-max-indexed-sub-chapter-id-by-chapter-id release-t previous-chapter-id)
max-indexed-subject-id (subject.dao/find-max-indexed-subject-id-by-sub-chapter-id release-t max-indexded-sub-chapter-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id max-indexed-subject-id user-id)]
(true? (:progress/done? progress)))))))
(defn get-tracked-latest-subject-map
[user course-id]
(let [user-id (:db/id user)
chapter-id (get-biggest-indexed-chapter-id course-id user-id)
sub-chapter-id (get-biggest-indexed-sub-chapter-id chapter-id user-id)
subject-id (get-biggest-indexed-subject-id sub-chapter-id user-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id)
release-t (course.common/get-release user-id course-id)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
completed-sub-ins (course.common/get-completed-sub-ins progress subject-and-childs)
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code (:progress/code progress)
:sub-instructions sub-ins-text
:completed-sub-instructions completed-sub-ins
:before-start before-start
:done? (:progress/done? progress)))
(assoc common-map :no-ins? true))))
(defn get-first-subject-of-sub-chapter-map
[user sub-chapter-id course-id]
(if-not (able-to-resume-sub-chapter-first-subject? user sub-chapter-id course-id)
{:locked-subject true}
(let [release-t (course.common/get-release (:db/id user) course-id)
subject (subject.dao/find-first-subject-of-sub-chapter-by-sub-chapter-id release-t sub-chapter-id)
subject-id (:db/id subject)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code initial-code :sub-instructions sub-ins-text :before-start before-start))
(assoc common-map :no-ins? true)))))
(defn get-tracked-latest-subject-of-sub-chapter-map
[user sub-chapter-id]
(let [user-id (:db/id user)
subject-id (get-biggest-indexed-subject-id sub-chapter-id user-id)
course-id (course.dao/find-course-id-by-subject-id subject-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id)
release-t (course.common/get-release user-id course-id)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
completed-sub-ins (course.common/get-completed-sub-ins progress subject-and-childs)
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code (:progress/code progress)
:sub-instructions sub-ins-text
:completed-sub-instructions completed-sub-ins
:before-start before-start
:done? (:progress/done? progress)))
(assoc common-map :no-ins? true))))
(defn get-first-subject-of-course-map
[user course-id]
(let [release-t (course.common/get-release (:db/id user) course-id)
subject (subject.dao/find-first-subject-of-course-by-course-id release-t course-id)
subject-id (:db/id subject)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code initial-code :sub-instructions sub-ins-text :before-start before-start))
(assoc common-map :no-ins? true)))) | null | https://raw.githubusercontent.com/clojurecademy/clojurecademy/97dc7f9b91a90a3f30ca5a3de88542c90a50ce01/src/clj/clojurecademy/controller/course/resume.clj | clojure | TODO chain with ->>....refactor | (ns clojurecademy.controller.course.resume
(:require [clojurecademy.dao.subject :as subject.dao]
[clojurecademy.dao.progress :as progress.dao]
[clojurecademy.dao.db :as db]
[clojurecademy.dao.chapter :as chapter.dao]
[clojurecademy.dao.sub-chapter :as sub-chapter.dao]
[clojurecademy.dao.course :as course.dao]
[clojurecademy.controller.course.common :as course.common]
[kezban.core :refer :all]))
(defn- get-biggest-indexed-entity
[e-ids e-keyword]
(let [combine-keys (comp keyword #(str (name e-keyword) "/" %))
active-k (combine-keys "active?")
index-k (combine-keys "index")]
(->> e-ids
(map db/entity)
(filter active-k)
(sort-by index-k #(compare %2 %1))
first
:db/id)))
(defn get-biggest-indexed-chapter-id
[course-id user-id]
(let [chapter-ids (progress.dao/find-progress-chapter-ids-by-course-id-and-user-id course-id user-id)]
(-> chapter-ids (get-biggest-indexed-entity :chapter))))
(defn get-biggest-indexed-sub-chapter-id
[chapter-id user-id]
(let [sub-chapter-ids (progress.dao/find-progress-sub-chapter-ids-by-chapter-id-and-user-id chapter-id user-id)]
(-> sub-chapter-ids (get-biggest-indexed-entity :sub-chapter))))
(defn- get-biggest-indexed-subject-id
[sub-chapter-id user-id]
(let [subject-ids (progress.dao/find-progress-subject-ids-by-sub-chapter-id-and-user-id sub-chapter-id user-id)]
(-> subject-ids (get-biggest-indexed-entity :subject))))
(defn- able-to-resume-sub-chapter-first-subject?
[user sub-chapter-id course-id]
(let [user-id (:db/id user)
sub-chapter (db/entity sub-chapter-id)
release-t (course.common/get-release user-id course-id)
chapter (chapter.dao/find-chapter-by-sub-chapter-id release-t sub-chapter-id)]
(if (or (= (:sub-chapter/index sub-chapter) (:chapter/index chapter) 0)
(course.common/able-to-access-all-subjects? course-id user))
true
(if-let [pre-sub-chapter-id (sub-chapter.dao/find-previous-sub-chapter-id-by-sub-chapter-id release-t sub-chapter-id)]
(let [max-indexed-subject-id (subject.dao/find-max-indexed-subject-id-by-sub-chapter-id release-t pre-sub-chapter-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id max-indexed-subject-id user-id)]
(true? (:progress/done? progress)))
(let [previous-chapter-id (chapter.dao/find-previous-chapter-id-by-chapter-id release-t (:db/id chapter))
max-indexded-sub-chapter-id (sub-chapter.dao/find-max-indexed-sub-chapter-id-by-chapter-id release-t previous-chapter-id)
max-indexed-subject-id (subject.dao/find-max-indexed-subject-id-by-sub-chapter-id release-t max-indexded-sub-chapter-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id max-indexed-subject-id user-id)]
(true? (:progress/done? progress)))))))
(defn get-tracked-latest-subject-map
[user course-id]
(let [user-id (:db/id user)
chapter-id (get-biggest-indexed-chapter-id course-id user-id)
sub-chapter-id (get-biggest-indexed-sub-chapter-id chapter-id user-id)
subject-id (get-biggest-indexed-subject-id sub-chapter-id user-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id)
release-t (course.common/get-release user-id course-id)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
completed-sub-ins (course.common/get-completed-sub-ins progress subject-and-childs)
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code (:progress/code progress)
:sub-instructions sub-ins-text
:completed-sub-instructions completed-sub-ins
:before-start before-start
:done? (:progress/done? progress)))
(assoc common-map :no-ins? true))))
(defn get-first-subject-of-sub-chapter-map
[user sub-chapter-id course-id]
(if-not (able-to-resume-sub-chapter-first-subject? user sub-chapter-id course-id)
{:locked-subject true}
(let [release-t (course.common/get-release (:db/id user) course-id)
subject (subject.dao/find-first-subject-of-sub-chapter-by-sub-chapter-id release-t sub-chapter-id)
subject-id (:db/id subject)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code initial-code :sub-instructions sub-ins-text :before-start before-start))
(assoc common-map :no-ins? true)))))
(defn get-tracked-latest-subject-of-sub-chapter-map
[user sub-chapter-id]
(let [user-id (:db/id user)
subject-id (get-biggest-indexed-subject-id sub-chapter-id user-id)
course-id (course.dao/find-course-id-by-subject-id subject-id)
progress (progress.dao/find-progress-by-subject-id-and-user-id subject-id user-id)
release-t (course.common/get-release user-id course-id)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
completed-sub-ins (course.common/get-completed-sub-ins progress subject-and-childs)
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code (:progress/code progress)
:sub-instructions sub-ins-text
:completed-sub-instructions completed-sub-ins
:before-start before-start
:done? (:progress/done? progress)))
(assoc common-map :no-ins? true))))
(defn get-first-subject-of-course-map
[user course-id]
(let [release-t (course.common/get-release (:db/id user) course-id)
subject (subject.dao/find-first-subject-of-course-by-course-id release-t course-id)
subject-id (:db/id subject)
subject-and-childs (subject.dao/collect-subject-and-childs release-t subject-id)
common-map (course.common/get-common-data-map release-t user subject-and-childs)]
(if-let [instruction (:instruction subject-and-childs)]
(let [initial-code (course.common/get-initial-code subject-and-childs)
sub-ins-text (course.common/get-sub-ins-texts (:sub-instructions subject-and-childs))
before-start (merge {:run-pre-tests? (:run-pre-tests? instruction)} (try-> instruction :rule read-string))]
(assoc common-map :initial-code initial-code :sub-instructions sub-ins-text :before-start before-start))
(assoc common-map :no-ins? true)))) |
7f1f42e55599888ba993308ed09660554866fe3bcbb3814f90841721c9c0b7bb | ocsigen/lwt | lwt_luv.ml | let from_unix : Unix.file_descr -> int = Obj.magic
class engine = object
inherit Lwt_engine.abstract
val loop = ref (Luv.Loop.default ())
method! fork =
Luv.Loop.fork !loop |> function
| Ok () -> ()
| Error e -> failwith (Printf.sprintf "Could not handle the fork, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
method private cleanup = Luv.Loop.stop !loop
method iter block =
match (block) with
| true -> Luv.Loop.run ~loop:!loop ~mode:`ONCE () |> ignore
| false -> Luv.Loop.run ~loop:!loop ~mode:`NOWAIT () |> ignore
method private register_readable fd f =
let p = Luv.Poll.init ~loop:!loop (from_unix fd) in
match p with
| Ok poll ->
let () = Luv.Poll.start poll [`READABLE;] (fun _ -> f ()) in
lazy(
Luv.Poll.stop poll |> function
| Ok () -> ()
| Error e -> failwith (Printf.sprintf "Could not stop read polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
)
| Result.Error e -> failwith (Printf.sprintf "Could not register fd for read polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
method private register_writable fd f =
let p = Luv.Poll.init ~loop:!loop (from_unix fd) in
match p with
| Ok poll ->
let () = Luv.Poll.start poll [`WRITABLE;] (fun _ -> f ()) in
lazy(
Luv.Poll.stop poll |> function
| Ok () -> ()
| Error e -> failwith (Printf.sprintf "Could not stop write polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
)
| Result.Error e -> failwith (Printf.sprintf "Could not register fd for write polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
method private register_timer delay repeat f =
let delay_ms = (int_of_float (delay *. 1000.)) in
let t = Luv.Timer.init ~loop:!loop () in
match t with
| Result.Error e -> failwith (Printf.sprintf "Could not initialize a timer, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
| Ok timer ->
let timer_fn = match repeat with
| true -> Luv.Timer.start ~repeat:delay_ms timer
| false -> Luv.Timer.start timer
in
match timer_fn delay_ms f with
| Ok () -> lazy(Luv.Timer.stop timer |> ignore)
| Result.Error e -> failwith (Printf.sprintf "Could not start a timer, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
end
| null | https://raw.githubusercontent.com/ocsigen/lwt/b3e7dd029dacbe37df9565c142c2206cfe6831c2/src/unix/luv/lwt_luv.ml | ocaml | let from_unix : Unix.file_descr -> int = Obj.magic
class engine = object
inherit Lwt_engine.abstract
val loop = ref (Luv.Loop.default ())
method! fork =
Luv.Loop.fork !loop |> function
| Ok () -> ()
| Error e -> failwith (Printf.sprintf "Could not handle the fork, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
method private cleanup = Luv.Loop.stop !loop
method iter block =
match (block) with
| true -> Luv.Loop.run ~loop:!loop ~mode:`ONCE () |> ignore
| false -> Luv.Loop.run ~loop:!loop ~mode:`NOWAIT () |> ignore
method private register_readable fd f =
let p = Luv.Poll.init ~loop:!loop (from_unix fd) in
match p with
| Ok poll ->
let () = Luv.Poll.start poll [`READABLE;] (fun _ -> f ()) in
lazy(
Luv.Poll.stop poll |> function
| Ok () -> ()
| Error e -> failwith (Printf.sprintf "Could not stop read polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
)
| Result.Error e -> failwith (Printf.sprintf "Could not register fd for read polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
method private register_writable fd f =
let p = Luv.Poll.init ~loop:!loop (from_unix fd) in
match p with
| Ok poll ->
let () = Luv.Poll.start poll [`WRITABLE;] (fun _ -> f ()) in
lazy(
Luv.Poll.stop poll |> function
| Ok () -> ()
| Error e -> failwith (Printf.sprintf "Could not stop write polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
)
| Result.Error e -> failwith (Printf.sprintf "Could not register fd for write polling, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
method private register_timer delay repeat f =
let delay_ms = (int_of_float (delay *. 1000.)) in
let t = Luv.Timer.init ~loop:!loop () in
match t with
| Result.Error e -> failwith (Printf.sprintf "Could not initialize a timer, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
| Ok timer ->
let timer_fn = match repeat with
| true -> Luv.Timer.start ~repeat:delay_ms timer
| false -> Luv.Timer.start timer
in
match timer_fn delay_ms f with
| Ok () -> lazy(Luv.Timer.stop timer |> ignore)
| Result.Error e -> failwith (Printf.sprintf "Could not start a timer, this is probably a error in Lwt, please open a issue on the repo. \nError message: %s" (Luv.Error.err_name e))
end
| |
80c3255fc397868582e31235228011da8cb028570207a983984566ba395d9106 | hhugo/merge-fmt | resolve_cmd.ml | open Base
open Stdio
open Common
type version =
| Common
| Theirs
| Ours
let string_of_version = function
| Common -> "common"
| Theirs -> "theirs"
| Ours -> "ours"
type rev = Object of string
type versions =
{ common : rev
; theirs : rev
; ours : rev
}
let conflict ~filename =
In_channel.with_file filename ~f:(fun ic ->
let rec loop n =
match In_channel.input_line ic with
| None -> n
| Some line ->
if String.is_prefix ~prefix:"<<<<<<<" line
then loop (Int.succ n)
else loop n
in
loop 0)
let ls ~echo () =
let ic = open_process_in ~echo "git ls-files -u" in
let rec loop acc =
match In_channel.input_line ic with
| None -> acc
| Some line -> (
match String.split_on_chars ~on:[ ' '; '\t' ] line with
| [ _; id; num; file ] -> loop ((file, (Int.of_string num, id)) :: acc)
| _ -> failwith "unexpected format")
in
let map = Map.of_alist_multi (module String) (loop []) in
Map.map map ~f:(fun l ->
let l = List.sort l ~compare:(Comparable.lift ~f:fst Int.compare) in
match l with
| [ (1, common); (2, ours); (3, theirs) ] ->
Ok
{ common = Object common
; ours = Object ours
; theirs = Object theirs
}
| _ -> Error "not a 3-way merge")
let show ~echo version versions =
let obj =
match version with
| Ours -> versions.ours
| Theirs -> versions.theirs
| Common -> versions.common
in
match obj with
| Object obj ->
open_process_in ~echo "git show %s" obj |> In_channel.input_all
let create_tmp ~echo fn version versions =
let content = show ~echo version versions in
let ext = Caml.Filename.extension fn in
let base =
if String.equal ext "" then fn else Caml.Filename.chop_extension fn
in
let fn' = sprintf "%s.%s%s" base (string_of_version version) ext in
let oc = Out_channel.create fn' in
Out_channel.output_string oc content;
Out_channel.close oc;
fn'
let merge ~echo ~ours ~common ~theirs ~output =
system ~echo "git merge-file -p %s %s %s > %s" ours common theirs output
let git_add ~echo ~filename = system ~echo "git add %s" filename
let fix ~echo ~filename ~versions ~formatter =
let ours = create_tmp ~echo filename Ours versions in
let theirs = create_tmp ~echo filename Theirs versions in
let common = create_tmp ~echo filename Common versions in
let x =
Fmters.run formatter ~echo ~filename:ours
|> Result.map_error ~f:(Fn.const ours)
and y =
Fmters.run formatter ~echo ~filename:theirs
|> Result.map_error ~f:(Fn.const theirs)
and z =
Fmters.run formatter ~echo ~filename:common
|> Result.map_error ~f:(Fn.const common)
in
match Result.combine_errors_unit [ x; y; z ] with
| Error l ->
eprintf "Failed to format %s\n%!" (String.concat ~sep:", " l);
Error ()
| Ok () -> (
match merge ~echo ~ours ~theirs ~common ~output:filename with
| Error _ -> Error ()
| Ok () ->
Unix.unlink ours;
Unix.unlink theirs;
Unix.unlink common;
Ok ())
let resolve config echo () =
let all = ls ~echo () in
if Map.is_empty all
then (
eprintf "Nothing to resolve\n%!";
Caml.exit 1);
Map.iteri all ~f:(fun ~key:filename ~data:versions ->
match versions with
| Ok versions -> (
match Fmters.find ~config ~filename ~name:None with
| Some formatter ->
let n1 = conflict ~filename in
Result.bind (fix ~echo ~filename ~versions ~formatter)
~f:(fun () -> git_add ~echo ~filename)
|> (ignore : (unit, unit) Result.t -> unit);
let n2 = conflict ~filename in
if n2 > n1
then eprintf "Resolved ?? %s\n%!" filename
else eprintf "Resolved %d/%d %s\n%!" (n1 - n2) n1 filename
| None -> eprintf "Ignore %s (no formatter register)\n%!" filename)
| Error reason -> eprintf "Ignore %s (%s)\n%!" filename reason);
let all = ls ~echo () in
if Map.is_empty all then Caml.exit 0 else Caml.exit 1
open Cmdliner
let cmd =
let doc = "Try to automatically resolve conflicts due to code formatting" in
( Term.(const resolve $ Fmters.Flags.t $ Flags.echo $ const ())
, Cmd.info ~doc "merge-fmt" )
| null | https://raw.githubusercontent.com/hhugo/merge-fmt/ddcddd85cbf1a596ad1759163ae2eb75673924f5/src/resolve_cmd.ml | ocaml | open Base
open Stdio
open Common
type version =
| Common
| Theirs
| Ours
let string_of_version = function
| Common -> "common"
| Theirs -> "theirs"
| Ours -> "ours"
type rev = Object of string
type versions =
{ common : rev
; theirs : rev
; ours : rev
}
let conflict ~filename =
In_channel.with_file filename ~f:(fun ic ->
let rec loop n =
match In_channel.input_line ic with
| None -> n
| Some line ->
if String.is_prefix ~prefix:"<<<<<<<" line
then loop (Int.succ n)
else loop n
in
loop 0)
let ls ~echo () =
let ic = open_process_in ~echo "git ls-files -u" in
let rec loop acc =
match In_channel.input_line ic with
| None -> acc
| Some line -> (
match String.split_on_chars ~on:[ ' '; '\t' ] line with
| [ _; id; num; file ] -> loop ((file, (Int.of_string num, id)) :: acc)
| _ -> failwith "unexpected format")
in
let map = Map.of_alist_multi (module String) (loop []) in
Map.map map ~f:(fun l ->
let l = List.sort l ~compare:(Comparable.lift ~f:fst Int.compare) in
match l with
| [ (1, common); (2, ours); (3, theirs) ] ->
Ok
{ common = Object common
; ours = Object ours
; theirs = Object theirs
}
| _ -> Error "not a 3-way merge")
let show ~echo version versions =
let obj =
match version with
| Ours -> versions.ours
| Theirs -> versions.theirs
| Common -> versions.common
in
match obj with
| Object obj ->
open_process_in ~echo "git show %s" obj |> In_channel.input_all
let create_tmp ~echo fn version versions =
let content = show ~echo version versions in
let ext = Caml.Filename.extension fn in
let base =
if String.equal ext "" then fn else Caml.Filename.chop_extension fn
in
let fn' = sprintf "%s.%s%s" base (string_of_version version) ext in
let oc = Out_channel.create fn' in
Out_channel.output_string oc content;
Out_channel.close oc;
fn'
let merge ~echo ~ours ~common ~theirs ~output =
system ~echo "git merge-file -p %s %s %s > %s" ours common theirs output
let git_add ~echo ~filename = system ~echo "git add %s" filename
let fix ~echo ~filename ~versions ~formatter =
let ours = create_tmp ~echo filename Ours versions in
let theirs = create_tmp ~echo filename Theirs versions in
let common = create_tmp ~echo filename Common versions in
let x =
Fmters.run formatter ~echo ~filename:ours
|> Result.map_error ~f:(Fn.const ours)
and y =
Fmters.run formatter ~echo ~filename:theirs
|> Result.map_error ~f:(Fn.const theirs)
and z =
Fmters.run formatter ~echo ~filename:common
|> Result.map_error ~f:(Fn.const common)
in
match Result.combine_errors_unit [ x; y; z ] with
| Error l ->
eprintf "Failed to format %s\n%!" (String.concat ~sep:", " l);
Error ()
| Ok () -> (
match merge ~echo ~ours ~theirs ~common ~output:filename with
| Error _ -> Error ()
| Ok () ->
Unix.unlink ours;
Unix.unlink theirs;
Unix.unlink common;
Ok ())
let resolve config echo () =
let all = ls ~echo () in
if Map.is_empty all
then (
eprintf "Nothing to resolve\n%!";
Caml.exit 1);
Map.iteri all ~f:(fun ~key:filename ~data:versions ->
match versions with
| Ok versions -> (
match Fmters.find ~config ~filename ~name:None with
| Some formatter ->
let n1 = conflict ~filename in
Result.bind (fix ~echo ~filename ~versions ~formatter)
~f:(fun () -> git_add ~echo ~filename)
|> (ignore : (unit, unit) Result.t -> unit);
let n2 = conflict ~filename in
if n2 > n1
then eprintf "Resolved ?? %s\n%!" filename
else eprintf "Resolved %d/%d %s\n%!" (n1 - n2) n1 filename
| None -> eprintf "Ignore %s (no formatter register)\n%!" filename)
| Error reason -> eprintf "Ignore %s (%s)\n%!" filename reason);
let all = ls ~echo () in
if Map.is_empty all then Caml.exit 0 else Caml.exit 1
open Cmdliner
let cmd =
let doc = "Try to automatically resolve conflicts due to code formatting" in
( Term.(const resolve $ Fmters.Flags.t $ Flags.echo $ const ())
, Cmd.info ~doc "merge-fmt" )
| |
0d5d6240344c8631d3050fefa6aa744cc3c94f1fb9074a1ced969ded051184e1 | Martoon-00/toy-compiler | Data.hs | module Toy.Execution.Data
( In
, Out
, InOut
, Meta (..)
, withEmptyInput
) where
import Data.Text (Text)
import Data.Text.Buildable (Buildable (..))
import Formatting ((%))
import qualified Formatting as F
import Toy.Base (Value)
type In = [Value]
type Out = [Value]
type InOut = (In, Out)
data Meta = Meta
{ metaName :: Text
, metaBody :: Text
}
instance Buildable Meta where
build Meta{..} =
F.bprint ("\n=== "%F.stext%" ===\n"%F.stext%"\n--^--^--\n")
metaName metaBody
withEmptyInput :: Out -> InOut
withEmptyInput = ([], )
| null | https://raw.githubusercontent.com/Martoon-00/toy-compiler/a325d56c367bbb673608d283197fcd51cf5960fa/src/Toy/Execution/Data.hs | haskell | module Toy.Execution.Data
( In
, Out
, InOut
, Meta (..)
, withEmptyInput
) where
import Data.Text (Text)
import Data.Text.Buildable (Buildable (..))
import Formatting ((%))
import qualified Formatting as F
import Toy.Base (Value)
type In = [Value]
type Out = [Value]
type InOut = (In, Out)
data Meta = Meta
{ metaName :: Text
, metaBody :: Text
}
instance Buildable Meta where
build Meta{..} =
F.bprint ("\n=== "%F.stext%" ===\n"%F.stext%"\n--^--^--\n")
metaName metaBody
withEmptyInput :: Out -> InOut
withEmptyInput = ([], )
| |
646f8bf44934510d9629e7c539b46f6fb8122900a1e77461d04ab1d5922581b4 | gigasquid/hyperdimensional-playground | fairytale_nouns.clj | (ns hyperdimensional-playground.fairytale-nouns
(:require [clojure.string :as string])
(:import (java.util Properties)
(edu.stanford.nlp.pipeline StanfordCoreNLP Annotation)
(edu.stanford.nlp.ling CoreAnnotations$SentencesAnnotation CoreAnnotations$TokensAnnotation
CoreAnnotations$TextAnnotation CoreAnnotations$PartOfSpeechAnnotation
CoreAnnotations$NamedEntityTagAnnotation)))
;;; get all the nouns in our corpus
(def props (Properties.))
(.setProperty props "annotators", "tokenize, ssplit, pos")
(def pipeline (StanfordCoreNLP. props))
(defn ->text-data [tokens sent-num]
(mapv (fn [t] {:sent-num sent-num
:token (.get t CoreAnnotations$TextAnnotation)
:pos (.get t CoreAnnotations$PartOfSpeechAnnotation)}) tokens))
(defn process-text [text]
(let [annotation (Annotation. text)
_ (.annotate pipeline annotation)
sentences (.get annotation CoreAnnotations$SentencesAnnotation)
sentence-tokens (mapv (fn [s] (.get s CoreAnnotations$TokensAnnotation)) sentences)
text-data (flatten (map-indexed (fn [i t] (->text-data t i)) sentence-tokens))]
text-data))
(defn filter-nouns [doc]
(let [nouns (filter #(contains? (hash-set "NN" "NNS" "NNP" "NNPS") (:pos %))
(process-text doc))]
(remove #{"and" "the" "is" "a" "i" "he" "she" "it"}
(set (map #(clojure.string/lower-case (:token %)) nouns)))))
(defn gather-nouns-from-book [book-str]
(let [book-text (slurp book-str)
docs (string/split book-text #"\s|\.|\,|\;|\!|\?")]
(doall (reduce (fn [nouns doc]
(clojure.set/union nouns (filter-nouns doc)))
#{}
docs))))
(comment
(def grimm-nouns (gather-nouns-from-book "resources/grimm_fairy_tales.txt"))
(def anderson-nouns (gather-nouns-from-book "resources/anderson_fairy_tales.txt"))
(def english-fairy-tale-nouns (gather-nouns-from-book "resources/english_fairy_tales.txt"))
(def every-child-nouns (gather-nouns-from-book "resources/fairy_tales_every_child_should_know.txt"))
(def firelight-nouns (gather-nouns-from-book "resources/firelight_fairy_book.txt"))
(def favorite-nouns (gather-nouns-from-book "resources/favorite_fairy_tales.txt"))
(def wonder-wings-nouns (gather-nouns-from-book "resources/wonder_wings.txt"))
(def old-fashioned-nouns (gather-nouns-from-book "resources/old_fashioned.txt"))
(def fairy-godmother-nouns (gather-nouns-from-book "resources/fairy_godmothers.txt"))
(def golden-spears-nouns (gather-nouns-from-book "resources/golden_spears.txt"))
(def red-cap-nouns (gather-nouns-from-book "resources/red_cap_tales.txt"))
(def fairy-tales-nouns (clojure.set/union grimm-nouns anderson-nouns english-fairy-tale-nouns
every-child-nouns firelight-nouns favorite-nouns
wonder-wings-nouns old-fashioned-nouns fairy-godmother-nouns
golden-spears-nouns red-cap-nouns))
(take 100 grimm-nouns)
(count grimm-nouns)
= > 136606
(spit "resources/fairy-tales-nouns.edn" fairy-tales-nouns)
(spit "resources/fairy-tales-nouns-grimm.edn" grimm-nouns)
(take 100 fairy-tales-nouns)
(def x (clojure.edn/read-string (slurp "resources/fairy-tales-nouns.edn")))
)
(def grimm-nouns (clojure.edn/read-string (slurp "resources/fairy-tales-nouns-grimm.edn")))
(def fairy-tales-nouns (set grimm-nouns))
construct a matrix of 10322 x 10,000 and use random columns of 20
;;;; to put frequenies
(def book-list ["resources/grimm_fairy_tales.txt"
"resources/anderson_fairy_tales.txt"
"resources/english_fairy_tales.txt"
"resources/fairy_tales_every_child_should_know.txt"
"resources/firelight_fairy_book.txt"
"resources/favorite_fairy_tales.txt"
"resources/wonder_wings.txt"
"resources/old_fashioned.txt"
"resources/fairy_godmothers.txt"
"resources/golden_spears.txt"
"resources/red_cap_tales.txt"])
| null | https://raw.githubusercontent.com/gigasquid/hyperdimensional-playground/ee83b9b38467f3ae60b82e70ec78db6563dbf17a/src/hyperdimensional_playground/fairytale_nouns.clj | clojure | get all the nouns in our corpus
to put frequenies | (ns hyperdimensional-playground.fairytale-nouns
(:require [clojure.string :as string])
(:import (java.util Properties)
(edu.stanford.nlp.pipeline StanfordCoreNLP Annotation)
(edu.stanford.nlp.ling CoreAnnotations$SentencesAnnotation CoreAnnotations$TokensAnnotation
CoreAnnotations$TextAnnotation CoreAnnotations$PartOfSpeechAnnotation
CoreAnnotations$NamedEntityTagAnnotation)))
(def props (Properties.))
(.setProperty props "annotators", "tokenize, ssplit, pos")
(def pipeline (StanfordCoreNLP. props))
(defn ->text-data [tokens sent-num]
(mapv (fn [t] {:sent-num sent-num
:token (.get t CoreAnnotations$TextAnnotation)
:pos (.get t CoreAnnotations$PartOfSpeechAnnotation)}) tokens))
(defn process-text [text]
(let [annotation (Annotation. text)
_ (.annotate pipeline annotation)
sentences (.get annotation CoreAnnotations$SentencesAnnotation)
sentence-tokens (mapv (fn [s] (.get s CoreAnnotations$TokensAnnotation)) sentences)
text-data (flatten (map-indexed (fn [i t] (->text-data t i)) sentence-tokens))]
text-data))
(defn filter-nouns [doc]
(let [nouns (filter #(contains? (hash-set "NN" "NNS" "NNP" "NNPS") (:pos %))
(process-text doc))]
(remove #{"and" "the" "is" "a" "i" "he" "she" "it"}
(set (map #(clojure.string/lower-case (:token %)) nouns)))))
(defn gather-nouns-from-book [book-str]
(let [book-text (slurp book-str)
docs (string/split book-text #"\s|\.|\,|\;|\!|\?")]
(doall (reduce (fn [nouns doc]
(clojure.set/union nouns (filter-nouns doc)))
#{}
docs))))
(comment
(def grimm-nouns (gather-nouns-from-book "resources/grimm_fairy_tales.txt"))
(def anderson-nouns (gather-nouns-from-book "resources/anderson_fairy_tales.txt"))
(def english-fairy-tale-nouns (gather-nouns-from-book "resources/english_fairy_tales.txt"))
(def every-child-nouns (gather-nouns-from-book "resources/fairy_tales_every_child_should_know.txt"))
(def firelight-nouns (gather-nouns-from-book "resources/firelight_fairy_book.txt"))
(def favorite-nouns (gather-nouns-from-book "resources/favorite_fairy_tales.txt"))
(def wonder-wings-nouns (gather-nouns-from-book "resources/wonder_wings.txt"))
(def old-fashioned-nouns (gather-nouns-from-book "resources/old_fashioned.txt"))
(def fairy-godmother-nouns (gather-nouns-from-book "resources/fairy_godmothers.txt"))
(def golden-spears-nouns (gather-nouns-from-book "resources/golden_spears.txt"))
(def red-cap-nouns (gather-nouns-from-book "resources/red_cap_tales.txt"))
(def fairy-tales-nouns (clojure.set/union grimm-nouns anderson-nouns english-fairy-tale-nouns
every-child-nouns firelight-nouns favorite-nouns
wonder-wings-nouns old-fashioned-nouns fairy-godmother-nouns
golden-spears-nouns red-cap-nouns))
(take 100 grimm-nouns)
(count grimm-nouns)
= > 136606
(spit "resources/fairy-tales-nouns.edn" fairy-tales-nouns)
(spit "resources/fairy-tales-nouns-grimm.edn" grimm-nouns)
(take 100 fairy-tales-nouns)
(def x (clojure.edn/read-string (slurp "resources/fairy-tales-nouns.edn")))
)
(def grimm-nouns (clojure.edn/read-string (slurp "resources/fairy-tales-nouns-grimm.edn")))
(def fairy-tales-nouns (set grimm-nouns))
construct a matrix of 10322 x 10,000 and use random columns of 20
(def book-list ["resources/grimm_fairy_tales.txt"
"resources/anderson_fairy_tales.txt"
"resources/english_fairy_tales.txt"
"resources/fairy_tales_every_child_should_know.txt"
"resources/firelight_fairy_book.txt"
"resources/favorite_fairy_tales.txt"
"resources/wonder_wings.txt"
"resources/old_fashioned.txt"
"resources/fairy_godmothers.txt"
"resources/golden_spears.txt"
"resources/red_cap_tales.txt"])
|
68c80ea2c3300f43f499a42a136e5b9cbfb6520c768dc80ff5b8ad02c67afe7f | inconvergent/weird | extra.lisp | (in-package :weir)
; TODO: copy properties?
; TODO: copy grps?
(veq:fvdef 3->2 (wer fx &key new)
(declare (weir wer) (function fx))
(weird:with-struct (weir- verts max-verts num-verts) wer
(let* ((new (if new new (make :max-verts max-verts)))
(new-verts (weir-verts new)))
(declare (weir new) (veq:fvec new-verts))
(setf (weir-num-verts new) num-verts)
(veq:f3$with-rows (num-verts verts)
(lambda (i (veq:varg 3 x))
(declare (veq:pn i) (veq:ff x))
(veq:2$vset (new-verts i) (funcall fx x))))
(itr-edges (wer e) (ladd-edge! new e))
new)))
(veq:fvdef* 2cut-to-area! (wer &key g (top 0f0) (left 0f0)
(bottom 1000f0) (right 1000f0))
(declare (weir wer) (veq:ff top left bottom right))
"
removes all edges (in g) outside envelope (ox oy), (w h).
all edges intersecting the envelope will be deleted, a new vert will be
inserted on the intersection. connected to the inside vert.
edges inside the envelope will be left as they are.
"
(labels
((inside (i)
(declare (veq:pn i))
(veq:f2let ((p (2$verts wer i)))
(and (> (:vref p 0) left) (> (:vref p 1) top)
(< (:vref p 0) right) (< (:vref p 1) bottom))))
(split-line (ai bi &aux (rev nil))
(declare (veq:pn ai bi) (boolean rev))
(unless (inside ai) (rotatef ai bi) (setf rev t)) ; swap indices
(veq:f2let ((a (2$verts wer ai))
(b (2$verts wer bi))
(ab (veq:f2- b a)))
(mvc #'values rev
(veq:f2lerp a b
(cond ((> (:vref b 0) right) (/ (- right (:vref a 0)) (:vref ab 0)))
((> (:vref b 1) bottom) (/ (- bottom (:vref a 1)) (:vref ab 1)))
((< (:vref b 0) left) (/ (- left (:vref a 0)) (:vref ab 0)))
(t (/ (- top (:vref a 1)) (:vref ab 1))))))))
(cutfx (line)
(declare (list line))
(case (length (remove-if-not #'inside line))
(0 (values :outside nil 0f0 0f0))
(1 (mvc #'values :split (apply #'split-line line)))
(t (values :keep nil 0f0 0f0)))))
(with (wer %)
(itr-edges (wer e :g g)
(with-gs (ae?)
(mvb (state rev px py) (cutfx e)
(declare (symbol state) (boolean rev) (veq:ff px py))
(case state
(:outside (% (ldel-edge? e :g g)))
(:split (% (ldel-edge? e :g g))
(% (2append-edge?
(if rev (second e) (first e)) (veq:f2 px py) :rel nil :g g)
:res ae?)
(% (set-edge-prop? ae? :cut))))))))))
| null | https://raw.githubusercontent.com/inconvergent/weird/106d154ec2cd0e4ec977c3672ba717d6305c1056/src/weir/extra.lisp | lisp | TODO: copy properties?
TODO: copy grps?
swap indices | (in-package :weir)
(veq:fvdef 3->2 (wer fx &key new)
(declare (weir wer) (function fx))
(weird:with-struct (weir- verts max-verts num-verts) wer
(let* ((new (if new new (make :max-verts max-verts)))
(new-verts (weir-verts new)))
(declare (weir new) (veq:fvec new-verts))
(setf (weir-num-verts new) num-verts)
(veq:f3$with-rows (num-verts verts)
(lambda (i (veq:varg 3 x))
(declare (veq:pn i) (veq:ff x))
(veq:2$vset (new-verts i) (funcall fx x))))
(itr-edges (wer e) (ladd-edge! new e))
new)))
(veq:fvdef* 2cut-to-area! (wer &key g (top 0f0) (left 0f0)
(bottom 1000f0) (right 1000f0))
(declare (weir wer) (veq:ff top left bottom right))
"
removes all edges (in g) outside envelope (ox oy), (w h).
all edges intersecting the envelope will be deleted, a new vert will be
inserted on the intersection. connected to the inside vert.
edges inside the envelope will be left as they are.
"
(labels
((inside (i)
(declare (veq:pn i))
(veq:f2let ((p (2$verts wer i)))
(and (> (:vref p 0) left) (> (:vref p 1) top)
(< (:vref p 0) right) (< (:vref p 1) bottom))))
(split-line (ai bi &aux (rev nil))
(declare (veq:pn ai bi) (boolean rev))
(veq:f2let ((a (2$verts wer ai))
(b (2$verts wer bi))
(ab (veq:f2- b a)))
(mvc #'values rev
(veq:f2lerp a b
(cond ((> (:vref b 0) right) (/ (- right (:vref a 0)) (:vref ab 0)))
((> (:vref b 1) bottom) (/ (- bottom (:vref a 1)) (:vref ab 1)))
((< (:vref b 0) left) (/ (- left (:vref a 0)) (:vref ab 0)))
(t (/ (- top (:vref a 1)) (:vref ab 1))))))))
(cutfx (line)
(declare (list line))
(case (length (remove-if-not #'inside line))
(0 (values :outside nil 0f0 0f0))
(1 (mvc #'values :split (apply #'split-line line)))
(t (values :keep nil 0f0 0f0)))))
(with (wer %)
(itr-edges (wer e :g g)
(with-gs (ae?)
(mvb (state rev px py) (cutfx e)
(declare (symbol state) (boolean rev) (veq:ff px py))
(case state
(:outside (% (ldel-edge? e :g g)))
(:split (% (ldel-edge? e :g g))
(% (2append-edge?
(if rev (second e) (first e)) (veq:f2 px py) :rel nil :g g)
:res ae?)
(% (set-edge-prop? ae? :cut))))))))))
|
ff33471a8ea5c535dda6fbca33c2c73b2792b3ffbd342aa4212701ca81fca8a3 | chef/opscoderl_httpc | oc_httpc_worker.erl | -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
%% ex: ts=4 sw=4 et
@author < >
Copyright 2013 Opscode , Inc. All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
-module(oc_httpc_worker).
-behaviour(gen_server).
%% API
-export([
start_link/3,
request/6,
verify_ca/3,
multi_request/3
]).
%% gen_server callbacks
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {ibrowse_options = [],
root_url,
ibrowse_pid,
current_connection_requests = 0,
max_connection_requests,
max_connection_duration, %% value in ms
retry_on_conn_closed,
born_on_time}).
-include_lib("ibrowse/include/ibrowse.hrl").
%%%===================================================================
%%% API
%%%===================================================================
start_link(RootUrl, IbrowseOptions, Config) ->
gen_server:start_link(?MODULE, [RootUrl, IbrowseOptions, Config], []).
request(Pid, Path, Headers, Method, Body, Timeout) when is_atom(Method) ->
try gen_server:call(Pid, {request, Path, Headers, Method, Body, Timeout}, Timeout) of
Result ->
Result
catch
exit:{timeout, _} ->
{error, req_timedout};
Other ->
throw(Other)
end.
multi_request(Pid, Fun, Timeout) ->
RequestFun = fun(Path, Headers, Method, Body) when is_atom(Method) ->
oc_httpc_worker:request(Pid, Path, Headers, Method, Body, Timeout)
end,
Fun(RequestFun).
%%%===================================================================
%%% Gen Server Callbacks
%%%===================================================================
init([RootUrl, IbrowseOptions, Config]) ->
process_flag(trap_exit, true),
RetryOnConnClosed = proplists:get_value(retry_on_conn_closed, Config, false),
MaxRequests = proplists:get_value(max_connection_request_limit, Config, 100),
MaxConnectionDuration = oc_time:convert_units(proplists:get_value(max_connection_duration, Config, {1, min}), ms),
#url{host = Host, port = Port} = create_ibrowse_url(RootUrl),
IbrowseOptions1 = handle_custom_ssl_options(IbrowseOptions),
ibrowse:add_config([{ignored, ignored}, {dest, Host, Port, 1, 1, IbrowseOptions1}]),
{ok, #state{root_url = RootUrl, ibrowse_options = IbrowseOptions1, ibrowse_pid = undefined,
retry_on_conn_closed = RetryOnConnClosed,
max_connection_requests = MaxRequests,
max_connection_duration = MaxConnectionDuration}}.
%%% handle_custom_ssl_options currently implements a custom verify mode, `verify_ca` which
ignores hostname mismatches , restoring the behavior of earlier Erlang versions . This is
useful for OTP 19 - 21 where now verifies the hostname but does so naively
%%% without additional configuration. This additional configuration is hard to provide
via a configuration file before OTP 22 .
%%%
This is not a stable API and will be removed when we upgrade our ecosystem to Erlang 22 .
handle_custom_ssl_options(Options) ->
case proplists:get_value(ssl_options, Options) of
undefined ->
Options;
SslOpts ->
case proplists:get_value(verify, SslOpts) of
verify_ca ->
SslOptsNoVerify = proplists:delete(verify, SslOpts),
SslNewOpts = [{verify, verify_peer}, {verify_fun, {fun oc_httpc_worker:verify_ca/3, []}} | SslOptsNoVerify],
lists:keyreplace(ssl_options, 1, Options, {ssl_options, SslNewOpts});
_ ->
Options
end
end.
verify_ca(_Cert, Event, UserState) ->
case Event of
{bad_cert, hostname_check_failed} ->
{valid, UserState};
{bad_cert, _} ->
{fail, Event};
{extension, _} ->
{unknown, UserState};
valid ->
{valid, UserState};
valid_peer ->
{valid, UserState}
end.
handle_call(Request, From, State = #state{ibrowse_pid = undefined}) ->
handle_call(Request, From, make_http_client_pid(State));
handle_call({request, Path, Headers, Method, Body, Timeout}, _From, State = #state{root_url = RootUrl,
ibrowse_options = IbrowseOptions,
retry_on_conn_closed = RetryOnConnClosed}) ->
NewState = refresh_connection_process(State),
ReqUrl = combine(RootUrl, Path),
Result = ibrowse:send_req_direct(NewState#state.ibrowse_pid, ReqUrl, Headers, Method, Body, IbrowseOptions, Timeout),
case {Result, RetryOnConnClosed} of
{{error, sel_conn_closed}, true} ->
lager:info("oc_httpc_worker: attempted request on closed connection (pid = ~p); opening new connection and retrying", [NewState#state.ibrowse_pid]),
NewState2 = reset_http_client_pid(State),
RetryResult = ibrowse:send_req_direct(NewState2#state.ibrowse_pid, ReqUrl, Headers, Method, Body, IbrowseOptions, Timeout),
{reply, RetryResult, NewState2};
_ ->
{reply, Result, NewState}
end;
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info({'EXIT', Pid, normal}, State = #state{ibrowse_pid = Pid}) ->
{noreply, make_http_client_pid(State)};
handle_info({'EXIT', Pid, _Reason}, State = #state{ibrowse_pid=Cur}) when Pid /= Cur ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
create_ibrowse_url(RootUrl) ->
ibrowse_lib:parse_url(RootUrl).
create_ssl_options(#url{protocol = Protocol}, Options) ->
case (Protocol == https) orelse
ibrowse_lib:get_value(is_ssl, Options, false) of
false -> {[], false};
true -> {ibrowse_lib:get_value(ssl_options, Options, []), true}
end.
enforce_trailing_slash(S) ->
Rev = lists:reverse(S),
case Rev of
[$/ | _Rest] ->
S;
RevNoSlash ->
lists:reverse([$/ | RevNoSlash])
end.
enforce_no_leading_slash(S) ->
case S of
[$/ | Rest] ->
enforce_no_leading_slash(Rest);
S ->
S
end.
combine(Root, Path) ->
enforce_trailing_slash(Root) ++ enforce_no_leading_slash(Path).
refresh_connection_process(State = #state{current_connection_requests = CurrentConnectionRequests,
max_connection_requests = MaxConnectionRequests})
when CurrentConnectionRequests >= MaxConnectionRequests ->
reset_http_client_pid(State);
refresh_connection_process(State = #state{born_on_time = BornOnTime,
max_connection_duration = MaxConnectionDuration,
current_connection_requests = CurrentConnectionRequests}) ->
Duration = (timer:now_diff(os:timestamp(), BornOnTime)/1000),
case Duration >= MaxConnectionDuration of
true ->
reset_http_client_pid(State);
false ->
State#state{current_connection_requests = CurrentConnectionRequests + 1}
end.
reset_http_client_pid(State) ->
clear_previous_connection(State),
make_http_client_pid(State).
clear_previous_connection(State = #state{ibrowse_pid = undefined}) ->
State;
clear_previous_connection(State = #state{ibrowse_pid = Pid}) ->
ibrowse_http_client:stop(Pid),
State.
make_http_client_pid(State = #state{root_url = RootUrl, ibrowse_options = IbrowseOptions}) ->
Url = create_ibrowse_url(RootUrl),
{ok, Pid} = ibrowse_http_client:start_link({undefined, Url, create_ssl_options(Url, IbrowseOptions)}),
State#state{ibrowse_pid = Pid, born_on_time = os:timestamp(), current_connection_requests = 0}.
| null | https://raw.githubusercontent.com/chef/opscoderl_httpc/58efd00f9e21f119890a3b105089878140ecd6fe/src/oc_httpc_worker.erl | erlang | ex: ts=4 sw=4 et
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
API
gen_server callbacks
value in ms
===================================================================
API
===================================================================
===================================================================
Gen Server Callbacks
===================================================================
handle_custom_ssl_options currently implements a custom verify mode, `verify_ca` which
without additional configuration. This additional configuration is hard to provide
| -*- erlang - indent - level : 4;indent - tabs - mode : nil ; fill - column : 92 -*-
@author < >
Copyright 2013 Opscode , Inc. All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(oc_httpc_worker).
-behaviour(gen_server).
-export([
start_link/3,
request/6,
verify_ca/3,
multi_request/3
]).
-export([init/1, handle_call/3, handle_cast/2, handle_info/2,
terminate/2, code_change/3]).
-define(SERVER, ?MODULE).
-record(state, {ibrowse_options = [],
root_url,
ibrowse_pid,
current_connection_requests = 0,
max_connection_requests,
retry_on_conn_closed,
born_on_time}).
-include_lib("ibrowse/include/ibrowse.hrl").
start_link(RootUrl, IbrowseOptions, Config) ->
gen_server:start_link(?MODULE, [RootUrl, IbrowseOptions, Config], []).
request(Pid, Path, Headers, Method, Body, Timeout) when is_atom(Method) ->
try gen_server:call(Pid, {request, Path, Headers, Method, Body, Timeout}, Timeout) of
Result ->
Result
catch
exit:{timeout, _} ->
{error, req_timedout};
Other ->
throw(Other)
end.
multi_request(Pid, Fun, Timeout) ->
RequestFun = fun(Path, Headers, Method, Body) when is_atom(Method) ->
oc_httpc_worker:request(Pid, Path, Headers, Method, Body, Timeout)
end,
Fun(RequestFun).
init([RootUrl, IbrowseOptions, Config]) ->
process_flag(trap_exit, true),
RetryOnConnClosed = proplists:get_value(retry_on_conn_closed, Config, false),
MaxRequests = proplists:get_value(max_connection_request_limit, Config, 100),
MaxConnectionDuration = oc_time:convert_units(proplists:get_value(max_connection_duration, Config, {1, min}), ms),
#url{host = Host, port = Port} = create_ibrowse_url(RootUrl),
IbrowseOptions1 = handle_custom_ssl_options(IbrowseOptions),
ibrowse:add_config([{ignored, ignored}, {dest, Host, Port, 1, 1, IbrowseOptions1}]),
{ok, #state{root_url = RootUrl, ibrowse_options = IbrowseOptions1, ibrowse_pid = undefined,
retry_on_conn_closed = RetryOnConnClosed,
max_connection_requests = MaxRequests,
max_connection_duration = MaxConnectionDuration}}.
ignores hostname mismatches , restoring the behavior of earlier Erlang versions . This is
useful for OTP 19 - 21 where now verifies the hostname but does so naively
via a configuration file before OTP 22 .
This is not a stable API and will be removed when we upgrade our ecosystem to Erlang 22 .
handle_custom_ssl_options(Options) ->
case proplists:get_value(ssl_options, Options) of
undefined ->
Options;
SslOpts ->
case proplists:get_value(verify, SslOpts) of
verify_ca ->
SslOptsNoVerify = proplists:delete(verify, SslOpts),
SslNewOpts = [{verify, verify_peer}, {verify_fun, {fun oc_httpc_worker:verify_ca/3, []}} | SslOptsNoVerify],
lists:keyreplace(ssl_options, 1, Options, {ssl_options, SslNewOpts});
_ ->
Options
end
end.
verify_ca(_Cert, Event, UserState) ->
case Event of
{bad_cert, hostname_check_failed} ->
{valid, UserState};
{bad_cert, _} ->
{fail, Event};
{extension, _} ->
{unknown, UserState};
valid ->
{valid, UserState};
valid_peer ->
{valid, UserState}
end.
handle_call(Request, From, State = #state{ibrowse_pid = undefined}) ->
handle_call(Request, From, make_http_client_pid(State));
handle_call({request, Path, Headers, Method, Body, Timeout}, _From, State = #state{root_url = RootUrl,
ibrowse_options = IbrowseOptions,
retry_on_conn_closed = RetryOnConnClosed}) ->
NewState = refresh_connection_process(State),
ReqUrl = combine(RootUrl, Path),
Result = ibrowse:send_req_direct(NewState#state.ibrowse_pid, ReqUrl, Headers, Method, Body, IbrowseOptions, Timeout),
case {Result, RetryOnConnClosed} of
{{error, sel_conn_closed}, true} ->
lager:info("oc_httpc_worker: attempted request on closed connection (pid = ~p); opening new connection and retrying", [NewState#state.ibrowse_pid]),
NewState2 = reset_http_client_pid(State),
RetryResult = ibrowse:send_req_direct(NewState2#state.ibrowse_pid, ReqUrl, Headers, Method, Body, IbrowseOptions, Timeout),
{reply, RetryResult, NewState2};
_ ->
{reply, Result, NewState}
end;
handle_call(_Request, _From, State) ->
Reply = ok,
{reply, Reply, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info({'EXIT', Pid, normal}, State = #state{ibrowse_pid = Pid}) ->
{noreply, make_http_client_pid(State)};
handle_info({'EXIT', Pid, _Reason}, State = #state{ibrowse_pid=Cur}) when Pid /= Cur ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
create_ibrowse_url(RootUrl) ->
ibrowse_lib:parse_url(RootUrl).
create_ssl_options(#url{protocol = Protocol}, Options) ->
case (Protocol == https) orelse
ibrowse_lib:get_value(is_ssl, Options, false) of
false -> {[], false};
true -> {ibrowse_lib:get_value(ssl_options, Options, []), true}
end.
enforce_trailing_slash(S) ->
Rev = lists:reverse(S),
case Rev of
[$/ | _Rest] ->
S;
RevNoSlash ->
lists:reverse([$/ | RevNoSlash])
end.
enforce_no_leading_slash(S) ->
case S of
[$/ | Rest] ->
enforce_no_leading_slash(Rest);
S ->
S
end.
combine(Root, Path) ->
enforce_trailing_slash(Root) ++ enforce_no_leading_slash(Path).
refresh_connection_process(State = #state{current_connection_requests = CurrentConnectionRequests,
max_connection_requests = MaxConnectionRequests})
when CurrentConnectionRequests >= MaxConnectionRequests ->
reset_http_client_pid(State);
refresh_connection_process(State = #state{born_on_time = BornOnTime,
max_connection_duration = MaxConnectionDuration,
current_connection_requests = CurrentConnectionRequests}) ->
Duration = (timer:now_diff(os:timestamp(), BornOnTime)/1000),
case Duration >= MaxConnectionDuration of
true ->
reset_http_client_pid(State);
false ->
State#state{current_connection_requests = CurrentConnectionRequests + 1}
end.
reset_http_client_pid(State) ->
clear_previous_connection(State),
make_http_client_pid(State).
clear_previous_connection(State = #state{ibrowse_pid = undefined}) ->
State;
clear_previous_connection(State = #state{ibrowse_pid = Pid}) ->
ibrowse_http_client:stop(Pid),
State.
make_http_client_pid(State = #state{root_url = RootUrl, ibrowse_options = IbrowseOptions}) ->
Url = create_ibrowse_url(RootUrl),
{ok, Pid} = ibrowse_http_client:start_link({undefined, Url, create_ssl_options(Url, IbrowseOptions)}),
State#state{ibrowse_pid = Pid, born_on_time = os:timestamp(), current_connection_requests = 0}.
|
c7aaadbc0a996f1a36f38e1475c777e711fe633a96826886a4207cdc47aa8340 | smaccoun/beam-servant | User.hs | {-# LANGUAGE TypeFamilies #-}
module Api.Endpoints.User where
import Api.Resource
import Config.AppConfig
import AppPrelude
import Control.Lens hiding (element)
import qualified Crypto.Scrypt as S
import Data.Text.Encoding (encodeUtf8)
import Data.UUID (UUID)
import Database.Beam
import Database.Crud
import Database.MasterEntity (baseTable, appId)
import Database.Schema (userTable)
import Database.Tables.User
import Database.Transaction
import Models.Credentials (Email (..), Password (..))
import Models.User (UserResponse)
import Pagination
import Servant
type UserAPI = RResourceAPI "users" PaginatedResult UserEntity UUID
userServer :: UserResponse -> ServerT UserAPI AppM
userServer _ = do
rResourceServer getUsers getUser
getUsers
:: (MonadIO m, MonadReader r m, HasDBConn r)
=> Maybe Limit
-> Maybe Offset
-> Maybe Order
-> m (PaginatedResult UserEntity)
getUsers mbLimit mbPage mbOrder = getEntities userTable mbLimit mbPage mbOrder
getUser :: (MonadIO m, MonadReader r m, HasDBConn r) => UUID -> m UserEntity
getUser userId' = getEntity userTable userId'
getUserByEmail
:: (MonadIO m, MonadReader r m, HasDBConn r) => Email -> m UserEntity
getUserByEmail (Email email') = do
userResult <- runQuerySingle $ select $ do
users <- all_ (userTable)
guard_ (users ^. baseTable ^. email ==. val_ email')
pure users
return $ userResult
createUser
:: (MonadIO m, MonadReader r m, HasDBConn r)
=> Email
-> Password
-> m UserEntity
createUser (Email email') (Password unencryptedPassword) = do
encryptedPassword <- liftIO
$ S.encryptPassIO S.defaultParams (S.Pass $ encodeUtf8 unencryptedPassword)
createEntity userTable (User email' encryptedPassword)
updatePassword
:: (MonadIO m, MonadReader r m, HasDBConn r)
=> UUID
-> S.EncryptedPass
-> m ()
updatePassword userUUID newPassword = runSqlM $ runUpdate $ update
userTable
(\u -> [u ^. baseTable ^. password <-. val_ newPassword])
(\u -> u ^. appId ==. val_ userUUID)
deleteUser :: (MonadIO m, MonadReader r m, HasDBConn r) => UUID -> m ()
deleteUser userUUID = deleteByID userTable userUUID
| null | https://raw.githubusercontent.com/smaccoun/beam-servant/1560f81c56ca7a324be1ee68208fb05c4dfea844/src/Api/Endpoints/User.hs | haskell | # LANGUAGE TypeFamilies # |
module Api.Endpoints.User where
import Api.Resource
import Config.AppConfig
import AppPrelude
import Control.Lens hiding (element)
import qualified Crypto.Scrypt as S
import Data.Text.Encoding (encodeUtf8)
import Data.UUID (UUID)
import Database.Beam
import Database.Crud
import Database.MasterEntity (baseTable, appId)
import Database.Schema (userTable)
import Database.Tables.User
import Database.Transaction
import Models.Credentials (Email (..), Password (..))
import Models.User (UserResponse)
import Pagination
import Servant
type UserAPI = RResourceAPI "users" PaginatedResult UserEntity UUID
userServer :: UserResponse -> ServerT UserAPI AppM
userServer _ = do
rResourceServer getUsers getUser
getUsers
:: (MonadIO m, MonadReader r m, HasDBConn r)
=> Maybe Limit
-> Maybe Offset
-> Maybe Order
-> m (PaginatedResult UserEntity)
getUsers mbLimit mbPage mbOrder = getEntities userTable mbLimit mbPage mbOrder
getUser :: (MonadIO m, MonadReader r m, HasDBConn r) => UUID -> m UserEntity
getUser userId' = getEntity userTable userId'
getUserByEmail
:: (MonadIO m, MonadReader r m, HasDBConn r) => Email -> m UserEntity
getUserByEmail (Email email') = do
userResult <- runQuerySingle $ select $ do
users <- all_ (userTable)
guard_ (users ^. baseTable ^. email ==. val_ email')
pure users
return $ userResult
createUser
:: (MonadIO m, MonadReader r m, HasDBConn r)
=> Email
-> Password
-> m UserEntity
createUser (Email email') (Password unencryptedPassword) = do
encryptedPassword <- liftIO
$ S.encryptPassIO S.defaultParams (S.Pass $ encodeUtf8 unencryptedPassword)
createEntity userTable (User email' encryptedPassword)
updatePassword
:: (MonadIO m, MonadReader r m, HasDBConn r)
=> UUID
-> S.EncryptedPass
-> m ()
updatePassword userUUID newPassword = runSqlM $ runUpdate $ update
userTable
(\u -> [u ^. baseTable ^. password <-. val_ newPassword])
(\u -> u ^. appId ==. val_ userUUID)
deleteUser :: (MonadIO m, MonadReader r m, HasDBConn r) => UUID -> m ()
deleteUser userUUID = deleteByID userTable userUUID
|
2dc711402fdea35fbb7fbd950bc5e000620c353febc83f34d30f9be3fc4d3674 | typedclojure/typedclojure | test_base_env.cljs | (ns cljs.core.typed.test.ympbyc.test-base-env
(:require-macros [cljs.core.typed :refer [ann ann-jsnominal] :as t])
(:require [cljs.core :refer [IVector ISeq ASeq List]]))
;;seq
(ann seq-vec (t/NonEmptySeqable number))
(def seq-vec (seq [1 2 3]))
(ann seq-empty (t/Option (t/NonEmptyASeq nil)))
(def seq-empty (seq []))
;;fst
(ann vec-fst number)
(def vec-fst (first [8]))
(ann seq-fst number)
(def seq-fst (first (seq [1 2 3])))
(ann fst-nil nil)
(def fst-nil (first nil))
;;rest
(ann vec-rest (ASeq number))
(def vec-rest (rest [1 2 3]))
(ann seq-rest (ASeq number))
(def seq-rest (rest (seq [1 2 3])))
(ann rest-empty (ASeq nil))
(def rest-empty (rest []))
;;last
(ann vec-last number)
(def vec-last (last [1 2 3]))
(ann seq-last number)
(def seq-last (last (seq [1 2 3])))
(ann last-nil (t/Option number))
(def last-nil (last []))
butlast
(ann vec-butlast (ASeq number))
(def vec-butlast (butlast [1 2 3]))
(ann seq-butlast (ASeq number))
(def vec-butlast (butlast (seq [1 2 3])))
(ann butlast-empty (ASeq nil))
(def butlast-empty (butlast []))
;;test if NonEmptySeqable is Seqable
(ann nonemp (t/All [x] [(t/NonEmptySeqable x) -> number]))
(defn foo [xs] 1)
(foo (seq [1 2 3]))
(ann second-vec number)
(def second-vec (second [1 2 3]))
(ann second-empty nil)
(def second-empty (second []))
(ann second-nil nil)
(def second-nil (second nil))
(ann second-seq (t/Option number))
(def second-seq (second (seq [1 2 3])))
(ann clj-to-jsjs t/Any)
(def clj-to-js (clj->js {:a 1}))
BUG : Use of js - obj triggers " js - op missing " in check_cljs
( - clj t / Any )
( def js - to - clj ( js->clj ( js - obj " a " 1 " b " 2 ) ) )
(ann cljs.core/nil? [t/Any -> boolean])
(ann nil-pred-t boolean)
(def nil-pred-t (nil? nil))
(ann nil-pred-f boolean)
(def nil-pred-f (nil? "nil"))
(ann ifn?-test-t boolean)
(def ifn?-test-t (ifn? (fn [x] x)))
(ann ifn?-test-f boolean)
(def ifn-test-f (ifn? "foo"))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; test vars in base-env-common
(ann id-test number)
(def id-test (identity 8))
int NYI
( ( ASeq int ) )
( def take - vec ( take 2 [ 1 2 3 4 ] ) )
;
;
( ( ASeq int ) )
( def drop ( drop 2 [ 1 2 3 4 ] ) )
;
( get - set ( t / Option int ) )
( def get - set ( get # { 1 2 3 } 2 ) )
(ann sym-test Symbol)
(def sym-test 'foo)
;BUG: subtyping fails
( - test ( t / Atom1 number ) )
( def atom - test ( atom 3 ) )
(ann set-test (t/Set number))
(def set-test #{5})
(ann number?-t boolean)
(def number?-t (number? 8))
(ann number?-f boolean)
(def number?-f (number? [1 2]))
(ann string?-t boolean)
(def string?-t (string? "hello"))
(ann seq?-t boolean)
(def seq-t (seq? (seq [1 2 3])))
(ann seq?-f boolean)
(def seq-f (seq? [1 2 3]))
;BUG: Use of `list` invokes an error
( [ t / Any t / Any - > ( t / Coll t / Any ) ] )
( cljs.core . List . EMPTY ( List t / Any ) ) ; ; this fails somehow
( boolean )
;(def list?-test (list? (list 1 2 3)))
(ann apply-test number)
(def apply-test (apply + [2 3]))
(ann apply-test-str string)
(def apply-test-str (apply str ["hello, " "world"]))
(ann conj-1 (IVector string))
(def conj-1 (conj ["foo"] "bar"))
(ann conj-2 (ASeq number))
(def conj-2 (conj (seq [3 4 5]) 1 2))
(ann conj-3 (ASeq (t/Vec number)))
(def conj-3 (conj (seq [[1] [2 3]]) [8] [9]))
;BUG: this throws assert failed
( ( t / Map Keyword number ) )
( def conj-4 ( { : foo 5 } [: bar 8 ] ) )
( ( t / Map Keyword t / Any ) )
( def conj-5 ( { : foo " bar " } { : baz 123 } ) )
(ann get-1 (t/Option number))
(def get-1 (get #{1 2 3} 3))
(ann get-2 boolean)
(def get-2 (get {:a true :b false} :c false))
(ann assoc-vec (t/Vec string))
(def assoc-vec (assoc ["foo" "bar"] 2 "baz"))
(ann assoc-map (t/Map (t/Vec number) string))
(def assoc-map (assoc {[2 3] "foo"} [4 5] "bar"))
(ann dissoc-1 (t/Map Keyword number))
(def dissoc-1 (dissoc {:foo 8 :bar 9} :foo))
(ann fn?-1 boolean)
(def fn?-1 (fn? (fn [x y] y)))
(ann fn?-2 boolean)
(def fn?-2 (fn? cljs.core/map))
(ann peek-1 (t/Map Keyword string))
(def peek-1 (peek [{:foo "bar" :baz "zot"} {:foo "bar"}]))
(ann pop-1 (t/Vec number))
(def pop-1 (pop [1 2 3]))
(ann disj-1 (t/Set number))
(def disj-1 (disj #{1 2 3 4} 3 4))
;;jsnominals
(ann-jsnominal js/Object
[[]
:fields {}
:methods {keys (Array string)
toString [-> string]}])
(ann-jsnominal js/Document
[[]
:fields {}
:methods
{getElementById [string -> (cljs.core.typed/Option js/HTMLElement)]
querySelector [string -> (cljs.core.typed/Option js/HTMLElement)]}
:ancestors #{js/Object}])
( / document js / Document )
(ann get-el [string -> (t/Option js/HTMLElement)])
(defn get-el [sel] (.querySelector js/document sel))
(ann inner-html [js/HTMLElement -> string])
(defn inner-html [el] (.-innerHTML el))
(ann inner-html-result string)
(def inner-html-result
(let [el (get-el "body")]
(if el (inner-html el) "")))
;;inheritance
(ann document-is-object string)
(def document-is-object (.toString js/document))
| null | https://raw.githubusercontent.com/typedclojure/typedclojure/514f2a46ae0145f34bef0400495079ba3292b82b/typed/cljs.checker/test/cljs/core/typed/test/ympbyc/test_base_env.cljs | clojure | seq
fst
rest
last
test if NonEmptySeqable is Seqable
test vars in base-env-common
BUG: subtyping fails
BUG: Use of `list` invokes an error
; this fails somehow
(def list?-test (list? (list 1 2 3)))
BUG: this throws assert failed
jsnominals
inheritance | (ns cljs.core.typed.test.ympbyc.test-base-env
(:require-macros [cljs.core.typed :refer [ann ann-jsnominal] :as t])
(:require [cljs.core :refer [IVector ISeq ASeq List]]))
(ann seq-vec (t/NonEmptySeqable number))
(def seq-vec (seq [1 2 3]))
(ann seq-empty (t/Option (t/NonEmptyASeq nil)))
(def seq-empty (seq []))
(ann vec-fst number)
(def vec-fst (first [8]))
(ann seq-fst number)
(def seq-fst (first (seq [1 2 3])))
(ann fst-nil nil)
(def fst-nil (first nil))
(ann vec-rest (ASeq number))
(def vec-rest (rest [1 2 3]))
(ann seq-rest (ASeq number))
(def seq-rest (rest (seq [1 2 3])))
(ann rest-empty (ASeq nil))
(def rest-empty (rest []))
(ann vec-last number)
(def vec-last (last [1 2 3]))
(ann seq-last number)
(def seq-last (last (seq [1 2 3])))
(ann last-nil (t/Option number))
(def last-nil (last []))
butlast
(ann vec-butlast (ASeq number))
(def vec-butlast (butlast [1 2 3]))
(ann seq-butlast (ASeq number))
(def vec-butlast (butlast (seq [1 2 3])))
(ann butlast-empty (ASeq nil))
(def butlast-empty (butlast []))
(ann nonemp (t/All [x] [(t/NonEmptySeqable x) -> number]))
(defn foo [xs] 1)
(foo (seq [1 2 3]))
(ann second-vec number)
(def second-vec (second [1 2 3]))
(ann second-empty nil)
(def second-empty (second []))
(ann second-nil nil)
(def second-nil (second nil))
(ann second-seq (t/Option number))
(def second-seq (second (seq [1 2 3])))
(ann clj-to-jsjs t/Any)
(def clj-to-js (clj->js {:a 1}))
BUG : Use of js - obj triggers " js - op missing " in check_cljs
( - clj t / Any )
( def js - to - clj ( js->clj ( js - obj " a " 1 " b " 2 ) ) )
(ann cljs.core/nil? [t/Any -> boolean])
(ann nil-pred-t boolean)
(def nil-pred-t (nil? nil))
(ann nil-pred-f boolean)
(def nil-pred-f (nil? "nil"))
(ann ifn?-test-t boolean)
(def ifn?-test-t (ifn? (fn [x] x)))
(ann ifn?-test-f boolean)
(def ifn-test-f (ifn? "foo"))
(ann id-test number)
(def id-test (identity 8))
int NYI
( ( ASeq int ) )
( def take - vec ( take 2 [ 1 2 3 4 ] ) )
( ( ASeq int ) )
( def drop ( drop 2 [ 1 2 3 4 ] ) )
( get - set ( t / Option int ) )
( def get - set ( get # { 1 2 3 } 2 ) )
(ann sym-test Symbol)
(def sym-test 'foo)
( - test ( t / Atom1 number ) )
( def atom - test ( atom 3 ) )
(ann set-test (t/Set number))
(def set-test #{5})
(ann number?-t boolean)
(def number?-t (number? 8))
(ann number?-f boolean)
(def number?-f (number? [1 2]))
(ann string?-t boolean)
(def string?-t (string? "hello"))
(ann seq?-t boolean)
(def seq-t (seq? (seq [1 2 3])))
(ann seq?-f boolean)
(def seq-f (seq? [1 2 3]))
( [ t / Any t / Any - > ( t / Coll t / Any ) ] )
( boolean )
(ann apply-test number)
(def apply-test (apply + [2 3]))
(ann apply-test-str string)
(def apply-test-str (apply str ["hello, " "world"]))
(ann conj-1 (IVector string))
(def conj-1 (conj ["foo"] "bar"))
(ann conj-2 (ASeq number))
(def conj-2 (conj (seq [3 4 5]) 1 2))
(ann conj-3 (ASeq (t/Vec number)))
(def conj-3 (conj (seq [[1] [2 3]]) [8] [9]))
( ( t / Map Keyword number ) )
( def conj-4 ( { : foo 5 } [: bar 8 ] ) )
( ( t / Map Keyword t / Any ) )
( def conj-5 ( { : foo " bar " } { : baz 123 } ) )
(ann get-1 (t/Option number))
(def get-1 (get #{1 2 3} 3))
(ann get-2 boolean)
(def get-2 (get {:a true :b false} :c false))
(ann assoc-vec (t/Vec string))
(def assoc-vec (assoc ["foo" "bar"] 2 "baz"))
(ann assoc-map (t/Map (t/Vec number) string))
(def assoc-map (assoc {[2 3] "foo"} [4 5] "bar"))
(ann dissoc-1 (t/Map Keyword number))
(def dissoc-1 (dissoc {:foo 8 :bar 9} :foo))
(ann fn?-1 boolean)
(def fn?-1 (fn? (fn [x y] y)))
(ann fn?-2 boolean)
(def fn?-2 (fn? cljs.core/map))
(ann peek-1 (t/Map Keyword string))
(def peek-1 (peek [{:foo "bar" :baz "zot"} {:foo "bar"}]))
(ann pop-1 (t/Vec number))
(def pop-1 (pop [1 2 3]))
(ann disj-1 (t/Set number))
(def disj-1 (disj #{1 2 3 4} 3 4))
(ann-jsnominal js/Object
[[]
:fields {}
:methods {keys (Array string)
toString [-> string]}])
(ann-jsnominal js/Document
[[]
:fields {}
:methods
{getElementById [string -> (cljs.core.typed/Option js/HTMLElement)]
querySelector [string -> (cljs.core.typed/Option js/HTMLElement)]}
:ancestors #{js/Object}])
( / document js / Document )
(ann get-el [string -> (t/Option js/HTMLElement)])
(defn get-el [sel] (.querySelector js/document sel))
(ann inner-html [js/HTMLElement -> string])
(defn inner-html [el] (.-innerHTML el))
(ann inner-html-result string)
(def inner-html-result
(let [el (get-el "body")]
(if el (inner-html el) "")))
(ann document-is-object string)
(def document-is-object (.toString js/document))
|
99860fb199d646c328721f7c6ee814c3fefd12ed1ce735f5ad68cf095ef48e87 | input-output-hk/ouroboros-network | RAWLock.hs | {-# LANGUAGE DeriveAnyClass #-}
{-# LANGUAGE DeriveGeneric #-}
{-# LANGUAGE DerivingStrategies #-}
# LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
-- | A Read-Append-Write (RAW) lock
--
-- Intended for qualified import
module Ouroboros.Consensus.Util.MonadSTM.RAWLock (
-- * Public API
RAWLock
, new
, poison
, read
, withAppendAccess
, withReadAccess
, withWriteAccess
-- * Exposed internals: non-bracketed acquire & release
, unsafeAcquireAppendAccess
, unsafeAcquireReadAccess
, unsafeAcquireWriteAccess
, unsafeReleaseAppendAccess
, unsafeReleaseReadAccess
, unsafeReleaseWriteAccess
) where
import Prelude hiding (read)
import Control.Monad.Except
import Data.Functor (($>))
import GHC.Generics (Generic)
import GHC.Stack (CallStack, HasCallStack, callStack)
import NoThunks.Class (AllowThunk (..))
import Ouroboros.Consensus.Util.IOLike
{-------------------------------------------------------------------------------
Public API
-------------------------------------------------------------------------------}
-- | A Read-Append-Write (RAW) lock
--
A RAW lock allows multiple concurrent readers , at most one appender , which
-- is allowed to run concurrently with the readers, and at most one writer,
-- which has exclusive access to the lock.
--
-- The following table summarises which roles are allowed to concurrently
access the RAW lock :
--
-- > │ Reader │ Appender │ Writer │
-- > ─────────┼────────┼──────────┼────────┤
-- > Reader │ V │ V │ X │
> Appender │ ░ ░ ░ ░ ░ ░ ░ ░ │ X │ X │
> Writer │ ░ ░ ░ ░ ░ ░ ░ ░ │ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░ │ X │
--
-- It is important to realise that a RAW lock is intended to control access to
-- a piece of in-memory state that should remain in sync with some other state
-- that can only be modified using side-effects, e.g., the file system. If,
-- for example, you're only maintaining a counter shared by threads, then
simply use a ' TVar ' or an ' MVar ' .
--
-- = Example use case: log files
--
-- A RAW lock is useful, for example, to maintain an in-memory index of log
-- files stored on disk.
--
-- * To read data from a log file, you need \"read\" access to the index to
-- find out the file and offset where the requested piece of data is stored.
While holding the RAW lock as a reader , you can perform the IO operation
-- to read the data from the right log file. This can safely happen
-- concurrently with other read operations.
--
-- * To append data to the current log file, you need \"append\" access to the
-- index so you can append an entry to the index and even to add a new log
-- file to the index when necessary. While holding the RAW lock as an
appender , you can perform the IO operation to append the piece of data to
the current log file and , if necessary start a new log file . Only one
-- append can happen concurrently. However, reads can safely happen
-- concurrently with appends. Note that the in-memory index is only updated
-- /after/ writing to disk.
--
-- * To remove the oldest log files, you need \"write\" access to the index,
-- so you can remove files from the index. While holding the RAW lock as a
writer , you can perform the IO operations to delete the oldest log files .
-- No other operations can run concurrently with this operation: concurrent
-- reads might try to read from deleted files and a concurrent append could
-- try to append to a deleted file.
--
-- = Analogy: Chicken coop
--
-- Think of readers as chickens, the appender as the rooster, and the writer
-- as the fox. All of them want access to the chicken coop, i.e., the state
protected by the RAW lock .
--
-- We can allow multiple chickens (readers) together in the chicken coop, they
get along ( reasonably ) fine . We can also let one rooster ( appender ) in , but
not more than one , otherwise he would start fighting with the other rooster
-- (conflict with the other appender). We can only let the fox in when all
-- chickens and the rooster (if present) have left the chicken coop, otherwise
-- the fox would eat them (conflict with the appender and invalidate the
-- results of readers, e.g, closing resources readers try to access).
--
-- = Usage
--
To use the lock , use any of the three following operations :
--
-- * 'withReadAccess'
-- * 'withAppendAccess'
-- * 'withWriteAccess'
--
If the standard bracketing the above three operations use does n't suffice ,
use the following three acquire - release pairs :
--
-- * 'unsafeAcquireReadAccess' & 'unsafeReleaseReadAccess'
-- * 'unsafeAcquireAppendAccess' & 'unsafeReleaseAppendAccess'
-- * 'unsafeAcquireWriteAccess' & 'unsafeReleaseWriteAccess'
--
-- NOTE: an acquire __must__ be followed by the corresponding release,
-- otherwise the correctness of the lock is not guaranteed and a dead-lock can
-- happen.
--
-- NOTE: nested locking of the same lock is not allowed, as you might be
-- blocked on yourself.
--
-- = Notes
--
-- * Only use a RAW lock when it is safe to concurrently read and append.
--
-- * We do not guarantee fairness for appenders and writers. They will race
-- for access each time the RAW lock changes.
--
-- * When you have many writers and/or very frequent writes, readers and
appenders will starve . You could say we have " , as writers
-- win over readers and appenders. A RAW lock will not be the best fit in
-- such a scenario.
--
-- * When you have no writers and you only need a read-append lock, consider
-- using a @StrictMVar@ instead. The \"stale\" state can be used by the
-- readers.
--
* The state @st@ is always evaluated to WHNF and is subject to the
' ' check when enabled .
--
-- * All public functions are exception-safe.
--
newtype RAWLock m st = RAWLock (StrictTVar m (RAWState st))
-- | Create a new 'RAWLock'
new :: (IOLike m, NoThunks st) => st -> m (RAWLock m st)
new st = RAWLock <$> newTVarIO (emptyRAWState st)
| Access the state stored in the ' RAWLock ' as a reader .
--
-- Will block when there is a writer or when a writer is waiting to take the
-- lock.
withReadAccess :: forall m st a. IOLike m => RAWLock m st -> (st -> m a) -> m a
withReadAccess rawLock =
bracket
(atomically $ unsafeAcquireReadAccess rawLock)
(const (atomically $ unsafeReleaseReadAccess rawLock))
| Access the state stored in the ' RAWLock ' as an appender .
--
-- NOTE: it must be safe to run the given append action concurrently with
-- readers.
--
-- Will block when there is another appender, a writer, or when a writer is
-- waiting to take the lock.
withAppendAccess
:: forall m st a. IOLike m => RAWLock m st -> (st -> m (st, a)) -> m a
withAppendAccess rawLock k = snd . fst <$>
generalBracket
(atomically $ unsafeAcquireAppendAccess rawLock)
(\acquiredSt exitCase ->
atomically $ unsafeReleaseAppendAccess
rawLock
(stateToPutBack acquiredSt exitCase))
k
| Access the state stored in the ' RAWLock ' as a writer .
--
-- Will block when there is another writer or while there are readers and/or
-- an appender.
withWriteAccess
:: forall m st a. IOLike m => RAWLock m st -> (st -> m (st, a)) -> m a
withWriteAccess rawLock k = snd . fst <$>
generalBracket
(unsafeAcquireWriteAccess rawLock)
(\acquiredSt exitCase ->
unsafeReleaseWriteAccess
rawLock
(stateToPutBack acquiredSt exitCase))
k
-- | Internal helper
stateToPutBack
:: st -- ^ Acquired state
-> ExitCase (st, a)
-- ^ Result of 'generalBracket', containing the modified state in case of
-- success
-> st
stateToPutBack acquiredSt = \case
ExitCaseSuccess (modifiedSt, _a) -> modifiedSt
ExitCaseException _ex -> acquiredSt
ExitCaseAbort -> acquiredSt
| Read the contents of the ' RAWLock ' in an STM transaction .
--
-- Will retry when there is a writer.
--
-- In contrast to 'withReadAccess', this transaction will succeed when there
-- is a writer waiting to write, as there is no IO-operation during which the
-- lock must be held.
read :: IOLike m => RAWLock m st -> STM m st
read (RAWLock var) = readTVar var >>= \case
ReadAppend _readers _appender st -> return st
WaitingToWrite _readers _appender st -> return st
Writing -> retry
Poisoned (AllowThunk ex) -> throwSTM ex
-- | Poison the lock with the given exception. All subsequent access to the
-- lock will result in the given exception being thrown.
--
-- Unless the lock has already been poisoned, in which case the original
-- exception with which the lock was poisoned will be thrown.
poison
:: (IOLike m, Exception e, HasCallStack)
=> RAWLock m st -> (CallStack -> e) -> m (Maybe st)
poison (RAWLock var) mkEx = atomically $ do
rawSt <- readTVar var
(rawSt', mbSt) <-
withPoisoned (poisonPure (toException (mkEx callStack)) rawSt)
writeTVar var rawSt'
return mbSt
{-------------------------------------------------------------------------------
Exposed internals: non-bracketed acquire & release
-------------------------------------------------------------------------------}
withPoisoned :: MonadThrow m => Except SomeException a -> m a
withPoisoned = either throwIO return . runExcept
-- | Acquire the 'RAWLock' as a reader.
--
-- Will block when there is a writer or when a writer is waiting to take the
-- lock.
--
Composable with other ' STM ' transactions .
--
-- NOTE: __must__ be followed by a call to 'unsafeReleaseReadAccess'.
unsafeAcquireReadAccess :: IOLike m => RAWLock m st -> STM m st
unsafeAcquireReadAccess (RAWLock var) = do
rawSt <- readTVar var
withPoisoned (acquireReadAccessPure rawSt) >>= \case
Nothing -> retry
Just (rawSt', st) -> writeTVar var rawSt' $> st
-- | Release the 'RAWLock' as a reader.
--
-- Doesn't block.
--
Composable with other ' STM ' transactions .
--
-- NOTE: __must__ be preceded by a call to 'unsafeAcquireReadAccess'.
unsafeReleaseReadAccess :: IOLike m => RAWLock m st -> STM m ()
unsafeReleaseReadAccess (RAWLock var) = do
rawSt <- readTVar var
withPoisoned (releaseReadAccessPure rawSt) >>= writeTVar var
| Access the state stored in the ' RAWLock ' as an appender .
--
-- Will block when there is another appender, a writer, or when a writer is
-- waiting to take the lock.
--
Composable with other ' STM ' transactions .
--
-- NOTE: __must__ be followed by a call to 'unsafeReleaseAppendAccess'.
unsafeAcquireAppendAccess :: IOLike m => RAWLock m st -> STM m st
unsafeAcquireAppendAccess (RAWLock var) = do
rawSt <- readTVar var
withPoisoned (acquireAppendAccessPure rawSt) >>= \case
Nothing -> retry
Just (rawSt', st) -> writeTVar var rawSt' $> st
-- | Release the 'RAWLock' as an appender.
--
-- Doesn't block.
--
Composable with other ' STM ' transactions .
--
-- NOTE: __must__ be preceded by a call to 'unsafeAcquireAppendAccess'.
unsafeReleaseAppendAccess
:: IOLike m
=> RAWLock m st
-> st -- ^ State to store in the lock
-> STM m ()
unsafeReleaseAppendAccess (RAWLock var) st = do
rawSt <- readTVar var
withPoisoned (releaseAppendAccessPure st rawSt) >>= writeTVar var
| Access the state stored in the ' RAWLock ' as a writer .
--
-- Will block when there is another writer or while there are readers and\/or
-- an appender.
--
Does /not/ compose with other ' STM ' transactions .
--
-- NOTE: __must__ be followed by a call to 'unsafeReleaseWriteAccess'.
unsafeAcquireWriteAccess :: IOLike m => RAWLock m st -> m st
unsafeAcquireWriteAccess rawLock@(RAWLock var) = join $ atomically $ do
rawSt <- readTVar var
withPoisoned (acquireWriteAccessPure rawSt) >>= \case
Nothing -> retry
Just (rawSt', mbSt) -> do
writeTVar var rawSt'
-- We must update the value in the var, but we may or may not have
-- obtained the @st@ in it. We must commit the write either way.
case mbSt of
Just st -> return $ return st
-- Return a continuation that tries to acquire again
Nothing -> return $ unsafeAcquireWriteAccess rawLock
-- | Release the 'RAWLock' as a writer.
--
-- Doesn't block.
--
Does /not/ compose with other ' STM ' transactions .
--
-- NOTE: __must__ be preceded by a call to 'unsafeAcquireWriteAccess'.
unsafeReleaseWriteAccess
:: IOLike m
=> RAWLock m st
-> st -- ^ State to store in the lock
-> m ()
unsafeReleaseWriteAccess (RAWLock var) st = atomically $ do
rawSt <- readTVar var
withPoisoned (releaseWriteAccessPure st rawSt) >>= writeTVar var
{-------------------------------------------------------------------------------
Pure internals
-------------------------------------------------------------------------------}
-- | Any non-negative number of readers
newtype Readers = Readers Word
deriving newtype (Eq, Ord, Enum, Num, NoThunks)
| At most one appender
data Appender = NoAppender | Appender
deriving (Generic, NoThunks)
| The lock is implemented by a single ' StrictTVar ' , which stores a
' RAWState ' .
data RAWState st =
-- | Reading and/or appending is happening.
ReadAppend !Readers !Appender !st
| A writer ( or more than one ) has arrived . No new readers or a new
-- appender are allowed, they can only release, not acquire.
--
-- When the number of readers is 0 and there is no more appender, a writer
-- (multiple writers can race for this) will be able to get exclusive
-- access and will change the state to 'Writing'.
| WaitingToWrite !Readers !Appender !st
| No ( more ) readers or appender , the writer has exclusive access .
| Writing
-- | The lock has been poisoned: all subsequent acquires or releases will
-- throw the stored exception.
| Poisoned !(AllowThunk SomeException)
deriving (Generic, NoThunks)
| Create an initial , empty , unlocked ' RAWState ' : no readers , no appender ,
-- no writer (waiting).
emptyRAWState :: st -> RAWState st
emptyRAWState = ReadAppend (Readers 0) NoAppender
------------------------------------------------------------------------------
Pure internals : transitions between the ' RAWState 's
------------------------------------------------------------------------------
Pure internals: transitions between the 'RAWState's
-------------------------------------------------------------------------------}
acquireReadAccessPure
:: RAWState st -> Except SomeException (Maybe (RAWState st, st))
acquireReadAccessPure = \case
ReadAppend readers appender st
-> return $ Just (ReadAppend (succ readers) appender st, st)
WaitingToWrite {}
-> return Nothing
Writing
-> return Nothing
Poisoned (AllowThunk ex)
-> throwError ex
releaseReadAccessPure
:: RAWState st -> Except SomeException (RAWState st)
releaseReadAccessPure = \case
ReadAppend readers appender st
| 0 <- readers
-> error "releasing a reader without outstanding readers in ReadAppend"
| otherwise
-> return $ ReadAppend (pred readers) appender st
WaitingToWrite readers appender st
| 0 <- readers
-> error "releasing a reader without outstanding readers in WaitingToWrite"
| otherwise
-> return $ WaitingToWrite (pred readers) appender st
Writing
-> error "releasing a reader without outstanding readers in Writing"
Poisoned (AllowThunk ex)
-> throwError ex
acquireAppendAccessPure
:: RAWState st -> Except SomeException (Maybe (RAWState st, st))
acquireAppendAccessPure = \case
ReadAppend readers appender st
| NoAppender <- appender
-> return $ Just (ReadAppend readers Appender st, st)
| otherwise
-> return Nothing
WaitingToWrite {}
-> return Nothing
Writing
-> return Nothing
Poisoned (AllowThunk ex)
-> throwError ex
releaseAppendAccessPure
:: st -> RAWState st -> Except SomeException (RAWState st)
releaseAppendAccessPure st' = \case
ReadAppend readers appender _st
| NoAppender <- appender
-> error "releasing an appender without an outstanding appender in ReadAppend"
| otherwise
-> return $ ReadAppend readers NoAppender st'
WaitingToWrite readers appender _st
| NoAppender <- appender
-> error "releasing an appender without an outstanding appender in WaitingToWrite"
| otherwise
-> return $ WaitingToWrite readers NoAppender st'
Writing
-> error "releasing an appender without an outstanding appender in Writing"
Poisoned (AllowThunk ex)
-> throwError ex
acquireWriteAccessPure
:: RAWState st -> Except SomeException (Maybe (RAWState st, Maybe st))
acquireWriteAccessPure = \case
-- When there are no readers or appender in the 'ReadAppend' we can
-- directly go to the 'Writing' state, if not, we'll go to the
intermediary ' WaitingToWrite ' state until they have all released .
ReadAppend readers appender st
| 0 <- readers
, NoAppender <- appender
-> return $ Just (Writing, Just st)
| otherwise
-> return $ Just (WaitingToWrite readers appender st, Nothing)
WaitingToWrite readers appender st
| 0 <- readers
, NoAppender <- appender
-> return $ Just (Writing, Just st)
| otherwise
-> return Nothing
Writing
-> return Nothing
Poisoned (AllowThunk ex)
-> throwError ex
releaseWriteAccessPure
:: st -> RAWState st -> Except SomeException (RAWState st)
releaseWriteAccessPure st' = \case
ReadAppend _readers _appender _st
-> error "releasing a writer in ReadAppend"
WaitingToWrite _readers _appender _st
-> error "releasing a writer in WaitingToWrite"
Writing
-> return $ emptyRAWState st'
Poisoned (AllowThunk ex)
-> throwError ex
poisonPure
:: SomeException -> RAWState st -> Except SomeException (RAWState st, Maybe st)
poisonPure ex = \case
ReadAppend _readers _appender st
-> return (Poisoned (AllowThunk ex), Just st)
WaitingToWrite _readers _appender st
-> return (Poisoned (AllowThunk ex), Just st)
Writing
-> return (Poisoned (AllowThunk ex), Nothing)
Poisoned (AllowThunk prevEx)
-> throwError prevEx
| null | https://raw.githubusercontent.com/input-output-hk/ouroboros-network/c82309f403e99d916a76bb4d96d6812fb0a9db81/ouroboros-consensus/src/Ouroboros/Consensus/Util/MonadSTM/RAWLock.hs | haskell | # LANGUAGE DeriveAnyClass #
# LANGUAGE DeriveGeneric #
# LANGUAGE DerivingStrategies #
| A Read-Append-Write (RAW) lock
Intended for qualified import
* Public API
* Exposed internals: non-bracketed acquire & release
------------------------------------------------------------------------------
Public API
------------------------------------------------------------------------------
| A Read-Append-Write (RAW) lock
is allowed to run concurrently with the readers, and at most one writer,
which has exclusive access to the lock.
The following table summarises which roles are allowed to concurrently
> │ Reader │ Appender │ Writer │
> ─────────┼────────┼──────────┼────────┤
> Reader │ V │ V │ X │
It is important to realise that a RAW lock is intended to control access to
a piece of in-memory state that should remain in sync with some other state
that can only be modified using side-effects, e.g., the file system. If,
for example, you're only maintaining a counter shared by threads, then
= Example use case: log files
A RAW lock is useful, for example, to maintain an in-memory index of log
files stored on disk.
* To read data from a log file, you need \"read\" access to the index to
find out the file and offset where the requested piece of data is stored.
to read the data from the right log file. This can safely happen
concurrently with other read operations.
* To append data to the current log file, you need \"append\" access to the
index so you can append an entry to the index and even to add a new log
file to the index when necessary. While holding the RAW lock as an
append can happen concurrently. However, reads can safely happen
concurrently with appends. Note that the in-memory index is only updated
/after/ writing to disk.
* To remove the oldest log files, you need \"write\" access to the index,
so you can remove files from the index. While holding the RAW lock as a
No other operations can run concurrently with this operation: concurrent
reads might try to read from deleted files and a concurrent append could
try to append to a deleted file.
= Analogy: Chicken coop
Think of readers as chickens, the appender as the rooster, and the writer
as the fox. All of them want access to the chicken coop, i.e., the state
We can allow multiple chickens (readers) together in the chicken coop, they
(conflict with the other appender). We can only let the fox in when all
chickens and the rooster (if present) have left the chicken coop, otherwise
the fox would eat them (conflict with the appender and invalidate the
results of readers, e.g, closing resources readers try to access).
= Usage
* 'withReadAccess'
* 'withAppendAccess'
* 'withWriteAccess'
* 'unsafeAcquireReadAccess' & 'unsafeReleaseReadAccess'
* 'unsafeAcquireAppendAccess' & 'unsafeReleaseAppendAccess'
* 'unsafeAcquireWriteAccess' & 'unsafeReleaseWriteAccess'
NOTE: an acquire __must__ be followed by the corresponding release,
otherwise the correctness of the lock is not guaranteed and a dead-lock can
happen.
NOTE: nested locking of the same lock is not allowed, as you might be
blocked on yourself.
= Notes
* Only use a RAW lock when it is safe to concurrently read and append.
* We do not guarantee fairness for appenders and writers. They will race
for access each time the RAW lock changes.
* When you have many writers and/or very frequent writes, readers and
win over readers and appenders. A RAW lock will not be the best fit in
such a scenario.
* When you have no writers and you only need a read-append lock, consider
using a @StrictMVar@ instead. The \"stale\" state can be used by the
readers.
* All public functions are exception-safe.
| Create a new 'RAWLock'
Will block when there is a writer or when a writer is waiting to take the
lock.
NOTE: it must be safe to run the given append action concurrently with
readers.
Will block when there is another appender, a writer, or when a writer is
waiting to take the lock.
Will block when there is another writer or while there are readers and/or
an appender.
| Internal helper
^ Acquired state
^ Result of 'generalBracket', containing the modified state in case of
success
Will retry when there is a writer.
In contrast to 'withReadAccess', this transaction will succeed when there
is a writer waiting to write, as there is no IO-operation during which the
lock must be held.
| Poison the lock with the given exception. All subsequent access to the
lock will result in the given exception being thrown.
Unless the lock has already been poisoned, in which case the original
exception with which the lock was poisoned will be thrown.
------------------------------------------------------------------------------
Exposed internals: non-bracketed acquire & release
------------------------------------------------------------------------------
| Acquire the 'RAWLock' as a reader.
Will block when there is a writer or when a writer is waiting to take the
lock.
NOTE: __must__ be followed by a call to 'unsafeReleaseReadAccess'.
| Release the 'RAWLock' as a reader.
Doesn't block.
NOTE: __must__ be preceded by a call to 'unsafeAcquireReadAccess'.
Will block when there is another appender, a writer, or when a writer is
waiting to take the lock.
NOTE: __must__ be followed by a call to 'unsafeReleaseAppendAccess'.
| Release the 'RAWLock' as an appender.
Doesn't block.
NOTE: __must__ be preceded by a call to 'unsafeAcquireAppendAccess'.
^ State to store in the lock
Will block when there is another writer or while there are readers and\/or
an appender.
NOTE: __must__ be followed by a call to 'unsafeReleaseWriteAccess'.
We must update the value in the var, but we may or may not have
obtained the @st@ in it. We must commit the write either way.
Return a continuation that tries to acquire again
| Release the 'RAWLock' as a writer.
Doesn't block.
NOTE: __must__ be preceded by a call to 'unsafeAcquireWriteAccess'.
^ State to store in the lock
------------------------------------------------------------------------------
Pure internals
------------------------------------------------------------------------------
| Any non-negative number of readers
| Reading and/or appending is happening.
appender are allowed, they can only release, not acquire.
When the number of readers is 0 and there is no more appender, a writer
(multiple writers can race for this) will be able to get exclusive
access and will change the state to 'Writing'.
| The lock has been poisoned: all subsequent acquires or releases will
throw the stored exception.
no writer (waiting).
----------------------------------------------------------------------------
----------------------------------------------------------------------------
-----------------------------------------------------------------------------}
When there are no readers or appender in the 'ReadAppend' we can
directly go to the 'Writing' state, if not, we'll go to the | # LANGUAGE GeneralizedNewtypeDeriving #
# LANGUAGE LambdaCase #
# LANGUAGE ScopedTypeVariables #
module Ouroboros.Consensus.Util.MonadSTM.RAWLock (
RAWLock
, new
, poison
, read
, withAppendAccess
, withReadAccess
, withWriteAccess
, unsafeAcquireAppendAccess
, unsafeAcquireReadAccess
, unsafeAcquireWriteAccess
, unsafeReleaseAppendAccess
, unsafeReleaseReadAccess
, unsafeReleaseWriteAccess
) where
import Prelude hiding (read)
import Control.Monad.Except
import Data.Functor (($>))
import GHC.Generics (Generic)
import GHC.Stack (CallStack, HasCallStack, callStack)
import NoThunks.Class (AllowThunk (..))
import Ouroboros.Consensus.Util.IOLike
A RAW lock allows multiple concurrent readers , at most one appender , which
access the RAW lock :
> Appender │ ░ ░ ░ ░ ░ ░ ░ ░ │ X │ X │
> Writer │ ░ ░ ░ ░ ░ ░ ░ ░ │ ░ ░ ░ ░ ░ ░ ░ ░ ░ ░ │ X │
simply use a ' TVar ' or an ' MVar ' .
While holding the RAW lock as a reader , you can perform the IO operation
appender , you can perform the IO operation to append the piece of data to
the current log file and , if necessary start a new log file . Only one
writer , you can perform the IO operations to delete the oldest log files .
protected by the RAW lock .
get along ( reasonably ) fine . We can also let one rooster ( appender ) in , but
not more than one , otherwise he would start fighting with the other rooster
To use the lock , use any of the three following operations :
If the standard bracketing the above three operations use does n't suffice ,
use the following three acquire - release pairs :
appenders will starve . You could say we have " , as writers
* The state @st@ is always evaluated to WHNF and is subject to the
' ' check when enabled .
newtype RAWLock m st = RAWLock (StrictTVar m (RAWState st))
new :: (IOLike m, NoThunks st) => st -> m (RAWLock m st)
new st = RAWLock <$> newTVarIO (emptyRAWState st)
| Access the state stored in the ' RAWLock ' as a reader .
withReadAccess :: forall m st a. IOLike m => RAWLock m st -> (st -> m a) -> m a
withReadAccess rawLock =
bracket
(atomically $ unsafeAcquireReadAccess rawLock)
(const (atomically $ unsafeReleaseReadAccess rawLock))
| Access the state stored in the ' RAWLock ' as an appender .
withAppendAccess
:: forall m st a. IOLike m => RAWLock m st -> (st -> m (st, a)) -> m a
withAppendAccess rawLock k = snd . fst <$>
generalBracket
(atomically $ unsafeAcquireAppendAccess rawLock)
(\acquiredSt exitCase ->
atomically $ unsafeReleaseAppendAccess
rawLock
(stateToPutBack acquiredSt exitCase))
k
| Access the state stored in the ' RAWLock ' as a writer .
withWriteAccess
:: forall m st a. IOLike m => RAWLock m st -> (st -> m (st, a)) -> m a
withWriteAccess rawLock k = snd . fst <$>
generalBracket
(unsafeAcquireWriteAccess rawLock)
(\acquiredSt exitCase ->
unsafeReleaseWriteAccess
rawLock
(stateToPutBack acquiredSt exitCase))
k
stateToPutBack
-> ExitCase (st, a)
-> st
stateToPutBack acquiredSt = \case
ExitCaseSuccess (modifiedSt, _a) -> modifiedSt
ExitCaseException _ex -> acquiredSt
ExitCaseAbort -> acquiredSt
| Read the contents of the ' RAWLock ' in an STM transaction .
read :: IOLike m => RAWLock m st -> STM m st
read (RAWLock var) = readTVar var >>= \case
ReadAppend _readers _appender st -> return st
WaitingToWrite _readers _appender st -> return st
Writing -> retry
Poisoned (AllowThunk ex) -> throwSTM ex
poison
:: (IOLike m, Exception e, HasCallStack)
=> RAWLock m st -> (CallStack -> e) -> m (Maybe st)
poison (RAWLock var) mkEx = atomically $ do
rawSt <- readTVar var
(rawSt', mbSt) <-
withPoisoned (poisonPure (toException (mkEx callStack)) rawSt)
writeTVar var rawSt'
return mbSt
withPoisoned :: MonadThrow m => Except SomeException a -> m a
withPoisoned = either throwIO return . runExcept
Composable with other ' STM ' transactions .
unsafeAcquireReadAccess :: IOLike m => RAWLock m st -> STM m st
unsafeAcquireReadAccess (RAWLock var) = do
rawSt <- readTVar var
withPoisoned (acquireReadAccessPure rawSt) >>= \case
Nothing -> retry
Just (rawSt', st) -> writeTVar var rawSt' $> st
Composable with other ' STM ' transactions .
unsafeReleaseReadAccess :: IOLike m => RAWLock m st -> STM m ()
unsafeReleaseReadAccess (RAWLock var) = do
rawSt <- readTVar var
withPoisoned (releaseReadAccessPure rawSt) >>= writeTVar var
| Access the state stored in the ' RAWLock ' as an appender .
Composable with other ' STM ' transactions .
unsafeAcquireAppendAccess :: IOLike m => RAWLock m st -> STM m st
unsafeAcquireAppendAccess (RAWLock var) = do
rawSt <- readTVar var
withPoisoned (acquireAppendAccessPure rawSt) >>= \case
Nothing -> retry
Just (rawSt', st) -> writeTVar var rawSt' $> st
Composable with other ' STM ' transactions .
unsafeReleaseAppendAccess
:: IOLike m
=> RAWLock m st
-> STM m ()
unsafeReleaseAppendAccess (RAWLock var) st = do
rawSt <- readTVar var
withPoisoned (releaseAppendAccessPure st rawSt) >>= writeTVar var
| Access the state stored in the ' RAWLock ' as a writer .
Does /not/ compose with other ' STM ' transactions .
unsafeAcquireWriteAccess :: IOLike m => RAWLock m st -> m st
unsafeAcquireWriteAccess rawLock@(RAWLock var) = join $ atomically $ do
rawSt <- readTVar var
withPoisoned (acquireWriteAccessPure rawSt) >>= \case
Nothing -> retry
Just (rawSt', mbSt) -> do
writeTVar var rawSt'
case mbSt of
Just st -> return $ return st
Nothing -> return $ unsafeAcquireWriteAccess rawLock
Does /not/ compose with other ' STM ' transactions .
unsafeReleaseWriteAccess
:: IOLike m
=> RAWLock m st
-> m ()
unsafeReleaseWriteAccess (RAWLock var) st = atomically $ do
rawSt <- readTVar var
withPoisoned (releaseWriteAccessPure st rawSt) >>= writeTVar var
newtype Readers = Readers Word
deriving newtype (Eq, Ord, Enum, Num, NoThunks)
| At most one appender
data Appender = NoAppender | Appender
deriving (Generic, NoThunks)
| The lock is implemented by a single ' StrictTVar ' , which stores a
' RAWState ' .
data RAWState st =
ReadAppend !Readers !Appender !st
| A writer ( or more than one ) has arrived . No new readers or a new
| WaitingToWrite !Readers !Appender !st
| No ( more ) readers or appender , the writer has exclusive access .
| Writing
| Poisoned !(AllowThunk SomeException)
deriving (Generic, NoThunks)
| Create an initial , empty , unlocked ' RAWState ' : no readers , no appender ,
emptyRAWState :: st -> RAWState st
emptyRAWState = ReadAppend (Readers 0) NoAppender
Pure internals : transitions between the ' RAWState 's
Pure internals: transitions between the 'RAWState's
acquireReadAccessPure
:: RAWState st -> Except SomeException (Maybe (RAWState st, st))
acquireReadAccessPure = \case
ReadAppend readers appender st
-> return $ Just (ReadAppend (succ readers) appender st, st)
WaitingToWrite {}
-> return Nothing
Writing
-> return Nothing
Poisoned (AllowThunk ex)
-> throwError ex
releaseReadAccessPure
:: RAWState st -> Except SomeException (RAWState st)
releaseReadAccessPure = \case
ReadAppend readers appender st
| 0 <- readers
-> error "releasing a reader without outstanding readers in ReadAppend"
| otherwise
-> return $ ReadAppend (pred readers) appender st
WaitingToWrite readers appender st
| 0 <- readers
-> error "releasing a reader without outstanding readers in WaitingToWrite"
| otherwise
-> return $ WaitingToWrite (pred readers) appender st
Writing
-> error "releasing a reader without outstanding readers in Writing"
Poisoned (AllowThunk ex)
-> throwError ex
acquireAppendAccessPure
:: RAWState st -> Except SomeException (Maybe (RAWState st, st))
acquireAppendAccessPure = \case
ReadAppend readers appender st
| NoAppender <- appender
-> return $ Just (ReadAppend readers Appender st, st)
| otherwise
-> return Nothing
WaitingToWrite {}
-> return Nothing
Writing
-> return Nothing
Poisoned (AllowThunk ex)
-> throwError ex
releaseAppendAccessPure
:: st -> RAWState st -> Except SomeException (RAWState st)
releaseAppendAccessPure st' = \case
ReadAppend readers appender _st
| NoAppender <- appender
-> error "releasing an appender without an outstanding appender in ReadAppend"
| otherwise
-> return $ ReadAppend readers NoAppender st'
WaitingToWrite readers appender _st
| NoAppender <- appender
-> error "releasing an appender without an outstanding appender in WaitingToWrite"
| otherwise
-> return $ WaitingToWrite readers NoAppender st'
Writing
-> error "releasing an appender without an outstanding appender in Writing"
Poisoned (AllowThunk ex)
-> throwError ex
acquireWriteAccessPure
:: RAWState st -> Except SomeException (Maybe (RAWState st, Maybe st))
acquireWriteAccessPure = \case
intermediary ' WaitingToWrite ' state until they have all released .
ReadAppend readers appender st
| 0 <- readers
, NoAppender <- appender
-> return $ Just (Writing, Just st)
| otherwise
-> return $ Just (WaitingToWrite readers appender st, Nothing)
WaitingToWrite readers appender st
| 0 <- readers
, NoAppender <- appender
-> return $ Just (Writing, Just st)
| otherwise
-> return Nothing
Writing
-> return Nothing
Poisoned (AllowThunk ex)
-> throwError ex
releaseWriteAccessPure
:: st -> RAWState st -> Except SomeException (RAWState st)
releaseWriteAccessPure st' = \case
ReadAppend _readers _appender _st
-> error "releasing a writer in ReadAppend"
WaitingToWrite _readers _appender _st
-> error "releasing a writer in WaitingToWrite"
Writing
-> return $ emptyRAWState st'
Poisoned (AllowThunk ex)
-> throwError ex
poisonPure
:: SomeException -> RAWState st -> Except SomeException (RAWState st, Maybe st)
poisonPure ex = \case
ReadAppend _readers _appender st
-> return (Poisoned (AllowThunk ex), Just st)
WaitingToWrite _readers _appender st
-> return (Poisoned (AllowThunk ex), Just st)
Writing
-> return (Poisoned (AllowThunk ex), Nothing)
Poisoned (AllowThunk prevEx)
-> throwError prevEx
|
1721c5089c6db505f2b62861b2ad2b3462b66fb76d0beadec65a413bf0214259 | nmichel/ejpet | ejpet_scanner.erl | -module(ejpet_scanner).
-author('').
-export([tokenize/2]).
-define(is_hexa(C),
(C >= $A andalso C =< $F) orelse
(C >= $a andalso C =< $f) orelse
(C >= $0 andalso C =< $9)).
-record(config, {
string_apply_escape_sequence = true,
string_raw = false
}).
tokenize(Pattern, Options) when is_list(Pattern) ->
tokenize(list_to_binary(Pattern), Options);
tokenize(Pattern, Options) ->
Config = #config{string_apply_escape_sequence = proplists:get_value(string_apply_escape_sequence, Options, true),
string_raw = proplists:get_value(string_raw, Options, false)},
tokenize(Pattern, [state_root], [], Config).
tokenize(<<>>, [state_root | _], Acc, _Config) ->
lists:reverse(Acc);
tokenize(<<${, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_curvy_brace | Acc], Config);
tokenize(<<$}, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_curvy_brace | Acc], Config);
tokenize(<<$[, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_square_brace | Acc], Config);
tokenize(<<$], T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_square_brace | Acc], Config);
tokenize(<<$<, $!, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_angle_brace_bang | Acc], Config);
tokenize(<<$<, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_angle_brace | Acc], Config);
tokenize(<<$!, $>, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_angle_brace_bang | Acc], Config);
tokenize(<<$>, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_angle_brace | Acc], Config);
tokenize(<<$,, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [coma | Acc], Config);
tokenize(<<$:, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [column | Acc], Config);
tokenize(<<$_, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [underscore | Acc], Config);
tokenize(<<$*, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [star | Acc], Config);
tokenize(<<$(, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_paren | Acc], Config);
tokenize(<<$), T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_paren | Acc], Config);
tokenize(<<$/, $g, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [slash_g | Acc], Config);
tokenize(<<$\n, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<$\r, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<$\t, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<$\s, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<"true", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [true | Acc], Config);
tokenize(<<"false", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [false | Acc], Config);
tokenize(<<"null", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [null | Acc], Config);
tokenize(<<"string", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [string | Acc], Config);
tokenize(<<"number", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [number | Acc], Config);
tokenize(<<"boolean", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [boolean | Acc], Config);
tokenize(<<"regex", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [regex | Acc], Config);
tokenize(<<$?, $<, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_capture, []} | State], Acc, Config);
tokenize(<<$#, $", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_string, ""}, {state_pattern} | State], Acc, Config);
tokenize(<<$", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_string, ""} | State], Acc, Config);
tokenize(<<$!, $<, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_capture, []}, state_inject | State], Acc, Config);
tokenize(<<$>, T/binary>>, [{state_capture, Name = [_|_]} | Tail], Acc, Config) ->
tokenize(T, Tail, [{capture, lists:reverse(Name)} | Acc], Config);
tokenize(<<$_, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) ->
tokenize(T, [{state_capture, [$_ | Name]} | Tail], Acc, Config);
tokenize(<<V, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) when V >= $A, V =< $Z ->
tokenize(T, [{state_capture, [V | Name]} | Tail], Acc, Config);
tokenize(<<V, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) when V >= $a, V =< $z ->
tokenize(T, [{state_capture, [V | Name]} | Tail], Acc, Config);
tokenize(<<V, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_capture, [V | Name]} | Tail], Acc, Config);
%% state_inject is never on top, except when the wrapped capture has been parsed.
%% We just have to transform the token "capture", into "inject", and we are done.
%%
tokenize(T, [state_inject | Tail], [{capture, Name} | Acc], Config) ->
tokenize(T, Tail, [{inject, Name} | Acc], Config);
tokenize(<<$-, V, T/binary>>, State = [state_root | _], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V, $-]} | State], Acc, Config);
tokenize(<<$+, V, T/binary>>, State = [state_root | _], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V, $+]} | State], Acc, Config);
tokenize(<<V, T/binary>>, State = [state_root | _], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V]} | State], Acc, Config);
tokenize(<<V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V | Num]} | Tail], Acc, Config);
tokenize(<<$., V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_decimal, [V, $. | Num]} | Tail], Acc, Config);
tokenize(<<$e, $+, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$e, $-, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $e, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$e, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$E, $+, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$E, $-, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $E, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$E, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E, $0, $. | Num]} | Tail], Acc, Config);
tokenize(T, [{state_number, Num} | Tail], Acc, Config) ->
tokenize(T, Tail, [{number, list_to_integer(lists:reverse(Num))} | Acc], Config);
tokenize(<<V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_decimal, [V | Num]} | Tail], Acc, Config);
tokenize(<<$e, $+, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e | Num]} | Tail], Acc, Config);
tokenize(<<$e, $-, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $e | Num]} | Tail], Acc, Config);
tokenize(<<$e, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e | Num]} | Tail], Acc, Config);
tokenize(<<$E, $+, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E | Num]} | Tail], Acc, Config);
tokenize(<<$E, $-, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $E | Num]} | Tail], Acc, Config);
tokenize(<<$E, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E | Num]} | Tail], Acc, Config);
tokenize(T, [{state_decimal, Num} | Tail], Acc, Config) ->
tokenize(T, Tail, [{number, list_to_float(lists:reverse(Num))} | Acc], Config);
tokenize(<<V, T/binary>>, [{state_frac, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V | Num]} | Tail], Acc, Config);
tokenize(T, [{state_frac, Num} | Tail], Acc, Config) ->
tokenize(T, Tail, [{number, list_to_float(lists:reverse(Num))} | Acc], Config);
tokenize(<<$", T/binary>>, [{state_string, String}, {state_pattern} | Tail], Acc, Config) ->
tokenize(T, Tail, [{regex, unicode:characters_to_binary(lists:reverse(String))} | Acc], Config);
tokenize(<<$", T/binary>>, [{state_string, String} | Tail], Acc, Config) ->
tokenize(T, Tail, [{string, unicode:characters_to_binary(lists:reverse(String))} | Acc], Config);
tokenize(<<$\\, T/binary>>, [{state_string, String}, {state_pattern} | Tail], Acc, Config) -> % do not apply escape sequence in regex
tokenize(T, [{state_string, [$\\ | String]}, {state_pattern} | Tail], Acc, Config);
tokenize(<<$\\, $n, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\n | String]} | Tail], Acc, Config);
tokenize(<<$\\, $r, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\r | String]} | Tail], Acc, Config);
tokenize(<<$\\, $t, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\t | String]} | Tail], Acc, Config);
tokenize(<<$\\, $b, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\b | String]} | Tail], Acc, Config);
tokenize(<<$\\, $f, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\f | String]} | Tail], Acc, Config);
tokenize(<<$\\, $s, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\s | String]} | Tail], Acc, Config);
tokenize(<<$\\, $", T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$" | String]} | Tail], Acc, Config);
tokenize(<<$\\, $\\, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\\ | String]} | Tail], Acc, Config);
tokenize(<<$\\, $u, A, B, C, D, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true})
when ?is_hexa(A), ?is_hexa(B), ?is_hexa(C), ?is_hexa(D) ->
CodePoint is * NOT * tested nor filtered .
tokenize(T, [{state_string, [CodePoint | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#20, C < 16#D800 ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C > 16#DFFF, C < 16#FDD0 ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C > 16#FDEF, C < 16#FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#10000, C < 16#1FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#20000, C < 16#2FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#30000, C < 16#3FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#40000, C < 16#4FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#50000, C < 16#5FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#60000, C < 16#6FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#70000, C < 16#7FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#80000, C < 16#8FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#90000, C < 16#9FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#A0000, C < 16#AFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#B0000, C < 16#BFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#C0000, C < 16#CFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#D0000, C < 16#DFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#E0000, C < 16#EFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#F0000, C < 16#FFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#100000, C < 16#10FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config).
| null | https://raw.githubusercontent.com/nmichel/ejpet/f2cafee31582d1e538cd3623edf249d0d3fc1162/src/ejpet_scanner.erl | erlang | state_inject is never on top, except when the wrapped capture has been parsed.
We just have to transform the token "capture", into "inject", and we are done.
do not apply escape sequence in regex | -module(ejpet_scanner).
-author('').
-export([tokenize/2]).
-define(is_hexa(C),
(C >= $A andalso C =< $F) orelse
(C >= $a andalso C =< $f) orelse
(C >= $0 andalso C =< $9)).
-record(config, {
string_apply_escape_sequence = true,
string_raw = false
}).
tokenize(Pattern, Options) when is_list(Pattern) ->
tokenize(list_to_binary(Pattern), Options);
tokenize(Pattern, Options) ->
Config = #config{string_apply_escape_sequence = proplists:get_value(string_apply_escape_sequence, Options, true),
string_raw = proplists:get_value(string_raw, Options, false)},
tokenize(Pattern, [state_root], [], Config).
tokenize(<<>>, [state_root | _], Acc, _Config) ->
lists:reverse(Acc);
tokenize(<<${, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_curvy_brace | Acc], Config);
tokenize(<<$}, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_curvy_brace | Acc], Config);
tokenize(<<$[, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_square_brace | Acc], Config);
tokenize(<<$], T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_square_brace | Acc], Config);
tokenize(<<$<, $!, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_angle_brace_bang | Acc], Config);
tokenize(<<$<, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_angle_brace | Acc], Config);
tokenize(<<$!, $>, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_angle_brace_bang | Acc], Config);
tokenize(<<$>, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_angle_brace | Acc], Config);
tokenize(<<$,, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [coma | Acc], Config);
tokenize(<<$:, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [column | Acc], Config);
tokenize(<<$_, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [underscore | Acc], Config);
tokenize(<<$*, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [star | Acc], Config);
tokenize(<<$(, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [open_paren | Acc], Config);
tokenize(<<$), T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [close_paren | Acc], Config);
tokenize(<<$/, $g, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [slash_g | Acc], Config);
tokenize(<<$\n, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<$\r, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<$\t, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<$\s, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, Acc, Config);
tokenize(<<"true", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [true | Acc], Config);
tokenize(<<"false", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [false | Acc], Config);
tokenize(<<"null", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [null | Acc], Config);
tokenize(<<"string", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [string | Acc], Config);
tokenize(<<"number", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [number | Acc], Config);
tokenize(<<"boolean", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [boolean | Acc], Config);
tokenize(<<"regex", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, State, [regex | Acc], Config);
tokenize(<<$?, $<, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_capture, []} | State], Acc, Config);
tokenize(<<$#, $", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_string, ""}, {state_pattern} | State], Acc, Config);
tokenize(<<$", T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_string, ""} | State], Acc, Config);
tokenize(<<$!, $<, T/binary>>, State = [state_root | _], Acc, Config) ->
tokenize(T, [{state_capture, []}, state_inject | State], Acc, Config);
tokenize(<<$>, T/binary>>, [{state_capture, Name = [_|_]} | Tail], Acc, Config) ->
tokenize(T, Tail, [{capture, lists:reverse(Name)} | Acc], Config);
tokenize(<<$_, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) ->
tokenize(T, [{state_capture, [$_ | Name]} | Tail], Acc, Config);
tokenize(<<V, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) when V >= $A, V =< $Z ->
tokenize(T, [{state_capture, [V | Name]} | Tail], Acc, Config);
tokenize(<<V, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) when V >= $a, V =< $z ->
tokenize(T, [{state_capture, [V | Name]} | Tail], Acc, Config);
tokenize(<<V, T/binary>>, [{state_capture, Name} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_capture, [V | Name]} | Tail], Acc, Config);
tokenize(T, [state_inject | Tail], [{capture, Name} | Acc], Config) ->
tokenize(T, Tail, [{inject, Name} | Acc], Config);
tokenize(<<$-, V, T/binary>>, State = [state_root | _], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V, $-]} | State], Acc, Config);
tokenize(<<$+, V, T/binary>>, State = [state_root | _], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V, $+]} | State], Acc, Config);
tokenize(<<V, T/binary>>, State = [state_root | _], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V]} | State], Acc, Config);
tokenize(<<V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_number, [V | Num]} | Tail], Acc, Config);
tokenize(<<$., V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_decimal, [V, $. | Num]} | Tail], Acc, Config);
tokenize(<<$e, $+, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$e, $-, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $e, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$e, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$E, $+, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$E, $-, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $E, $0, $. | Num]} | Tail], Acc, Config);
tokenize(<<$E, V, T/binary>>, [{state_number, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E, $0, $. | Num]} | Tail], Acc, Config);
tokenize(T, [{state_number, Num} | Tail], Acc, Config) ->
tokenize(T, Tail, [{number, list_to_integer(lists:reverse(Num))} | Acc], Config);
tokenize(<<V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_decimal, [V | Num]} | Tail], Acc, Config);
tokenize(<<$e, $+, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e | Num]} | Tail], Acc, Config);
tokenize(<<$e, $-, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $e | Num]} | Tail], Acc, Config);
tokenize(<<$e, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $e | Num]} | Tail], Acc, Config);
tokenize(<<$E, $+, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E | Num]} | Tail], Acc, Config);
tokenize(<<$E, $-, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $-, $E | Num]} | Tail], Acc, Config);
tokenize(<<$E, V, T/binary>>, [{state_decimal, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V, $+, $E | Num]} | Tail], Acc, Config);
tokenize(T, [{state_decimal, Num} | Tail], Acc, Config) ->
tokenize(T, Tail, [{number, list_to_float(lists:reverse(Num))} | Acc], Config);
tokenize(<<V, T/binary>>, [{state_frac, Num} | Tail], Acc, Config) when V >= $0, V =< $9 ->
tokenize(T, [{state_frac, [V | Num]} | Tail], Acc, Config);
tokenize(T, [{state_frac, Num} | Tail], Acc, Config) ->
tokenize(T, Tail, [{number, list_to_float(lists:reverse(Num))} | Acc], Config);
tokenize(<<$", T/binary>>, [{state_string, String}, {state_pattern} | Tail], Acc, Config) ->
tokenize(T, Tail, [{regex, unicode:characters_to_binary(lists:reverse(String))} | Acc], Config);
tokenize(<<$", T/binary>>, [{state_string, String} | Tail], Acc, Config) ->
tokenize(T, Tail, [{string, unicode:characters_to_binary(lists:reverse(String))} | Acc], Config);
tokenize(T, [{state_string, [$\\ | String]}, {state_pattern} | Tail], Acc, Config);
tokenize(<<$\\, $n, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\n | String]} | Tail], Acc, Config);
tokenize(<<$\\, $r, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\r | String]} | Tail], Acc, Config);
tokenize(<<$\\, $t, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\t | String]} | Tail], Acc, Config);
tokenize(<<$\\, $b, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\b | String]} | Tail], Acc, Config);
tokenize(<<$\\, $f, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\f | String]} | Tail], Acc, Config);
tokenize(<<$\\, $s, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\s | String]} | Tail], Acc, Config);
tokenize(<<$\\, $", T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$" | String]} | Tail], Acc, Config);
tokenize(<<$\\, $\\, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true}) ->
tokenize(T, [{state_string, [$\\ | String]} | Tail], Acc, Config);
tokenize(<<$\\, $u, A, B, C, D, T/binary>>, [{state_string, String} | Tail], Acc, Config=#config{string_apply_escape_sequence = true})
when ?is_hexa(A), ?is_hexa(B), ?is_hexa(C), ?is_hexa(D) ->
CodePoint is * NOT * tested nor filtered .
tokenize(T, [{state_string, [CodePoint | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#20, C < 16#D800 ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C > 16#DFFF, C < 16#FDD0 ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C > 16#FDEF, C < 16#FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#10000, C < 16#1FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#20000, C < 16#2FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#30000, C < 16#3FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#40000, C < 16#4FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#50000, C < 16#5FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#60000, C < 16#6FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#70000, C < 16#7FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#80000, C < 16#8FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#90000, C < 16#9FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#A0000, C < 16#AFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#B0000, C < 16#BFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#C0000, C < 16#CFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#D0000, C < 16#DFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#E0000, C < 16#EFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#F0000, C < 16#FFFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config);
tokenize(<<C/utf8, T/binary>>, [{state_string, String} | Tail], Acc, Config) when C >= 16#100000, C < 16#10FFFE ->
tokenize(T, [{state_string, [C | String]} | Tail], Acc, Config).
|
e4c5314c2784658774b8b748c9f6eff93d597874183789bc6681d7c0251eef0b | basho-labs/riak_nagios | check_node_tests.erl | -module(check_node_tests).
-include_lib("eunit/include/eunit.hrl").
-include_lib("eunit_helper/include/eunit_helper.hrl").
-export([run/2]).
run(_, _) ->
erlang:error(check_node_tests).
cleanup_each(_, _) ->
net_kernel:stop().
should_fail_when_using_an_unknown_check() ->
Options = [{check, unknown_check}] ++ default_options(),
?assertMatch({unknown, "Unknown check" ++ _, _}, check_node:run(Options, [], [])).
should_fail_gracefully_when_given_a_bad_cookie() ->
Options = [{cookie, "badcookie"}] ++ default_options(),
?assertMatch({unknown, "~w: ~w", [badarg, _]}, check_node:run(Options, [], [])).
should_fail_gracefully_when_check_crashes() ->
Checks = [{node_tests, check_node_tests}],
Options = [{check, node_tests}] ++ default_options(),
?assertMatch({unknown, "~w: ~w", [check_node_tests, _]}, check_node:run(Options, [], Checks)).
default_options() ->
[{name, 't@127.0.0.1'}, {node, 't@127.0.0.1'}, {cookie, c}].
| null | https://raw.githubusercontent.com/basho-labs/riak_nagios/9cc200e8cec7d6f71f445df5a7bd195f7a772e8a/test/check_node_tests.erl | erlang | -module(check_node_tests).
-include_lib("eunit/include/eunit.hrl").
-include_lib("eunit_helper/include/eunit_helper.hrl").
-export([run/2]).
run(_, _) ->
erlang:error(check_node_tests).
cleanup_each(_, _) ->
net_kernel:stop().
should_fail_when_using_an_unknown_check() ->
Options = [{check, unknown_check}] ++ default_options(),
?assertMatch({unknown, "Unknown check" ++ _, _}, check_node:run(Options, [], [])).
should_fail_gracefully_when_given_a_bad_cookie() ->
Options = [{cookie, "badcookie"}] ++ default_options(),
?assertMatch({unknown, "~w: ~w", [badarg, _]}, check_node:run(Options, [], [])).
should_fail_gracefully_when_check_crashes() ->
Checks = [{node_tests, check_node_tests}],
Options = [{check, node_tests}] ++ default_options(),
?assertMatch({unknown, "~w: ~w", [check_node_tests, _]}, check_node:run(Options, [], Checks)).
default_options() ->
[{name, 't@127.0.0.1'}, {node, 't@127.0.0.1'}, {cookie, c}].
| |
8b59d350a4eea7b514863e7ad5ffeaec8c67d0c47f35976ef025bde8f7c29082 | jyh/metaprl | czf_itt_set.ml | doc <:doc<
@module[Czf_itt_set]
The @tt{Czf_itt_set} module provides the basic definition
of sets and their elements. The @tt{set} term denotes
the type of all sets, defined using the $W$-type in
the module @hrefmodule[Itt_w], as follows:
$$@set @equiv @w{T; @univ{1}; T}.$$
That is, the @emph{sets} are pairs of a type $T @in @univ{1}$,
and a function $T @rightarrow @set$ that specifies the
elements of the set. Note that the type $T$ can be @emph{any}
type in $@univ{1}$; equality of sets is is general undecidable, and
their members can't necessarily be enumerated. This is a
@emph{constructive} theory, not a decidable theory. Of course,
there will be special cases where equality of sets is decidable.
The sets are defined with the terms $@collect{x; T; f[x]}$, where
$T$ is a type in $@univ{1}$, and $f[x]$ is a set for any index $x @in T$.
The sets $f[x]$ are the @emph{elements} of the set, and $T$ is
the a type used as their index. For example, the following set
is empty.
$$@{@} = @collect{x; <<void>>; x}$$
@noindent
The following set is the singleton set containing the empty
set.
$$@{@{@}@} = @collect{x; @unit; @{@}}$$
@noindent
The following set is equivalent.
$$@{@{@}@}' = @collect{x; @int; @{@}}$$
This raises an important point about equality.
The membership equality defined ion $W$-types
requires equality on the index type $T$ as well as the element
function $f$. The two sets $@{@{@}@}$ and $@{@{@}@}'$ are
@emph{not} equal in this type because they have the provably
different index types $@unit$ and $@int$, even though they have
the same elements.
One solution to this problem would be to use a quotient
construction using the quotient type defined in the
@hrefmodule[Itt_quotient] module. The @hrefmodule[Czf_itt_eq]
module defines extensional set equality $@equiv_{@i{ext}}$, and
we could potentially define the ``real'' sets with the following
type definition.
$$@i{real@_sets} @equiv (@quot{set; s_1; s_2; s_1 @equiv_{@i{ext}} s_2})$$
This type definition would require explicit functionality
reasoning on all functions over @i{real_set}. This construction,
however, makes these functionality proofs impossible because it
omits the computational content of the equivalence judgment,
which is a necessary part of the proof.
Alternative quotient formulations may be possible, but we have not
pursued this direction extensively. Instead, we introduce set
equality in the @hrefmodule[Czf_itt_eq] module, together with
explicit functionality predicates for set operators. In addition,
we prove the functionality properties for all the primitive
set operations.
One avenue for improvement in this theory would be to stratify
the set types to arbitrary type universes $@univ{i}$, which would
allow for higher-order reasoning on sets, classes, etc.
@docoff
----------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages.
See the file doc/htmlman/default.html or visit /
for more information.
Copyright (C) 1998 Jason Hickey, Cornell University
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey
@email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Itt_theory
extends Itt_eta
doc docoff
extends Czf_itt_comment
open Lm_debug
open Lm_printf
open Basic_tactics
open Itt_dfun
open Itt_struct
open Itt_w
let debug_czf_set =
create_debug (**)
{ debug_name = "czf_set";
debug_description = "display czf_set operations";
debug_value = false
}
(************************************************************************
* TERMS *
************************************************************************)
doc <:doc<
@terms
The @tt{set} term defines the type of sets; the @tt{collect}
terms are the individual sets. The @tt[isset] term is the
well-formedness judgment for the $@set$ type. The @tt[set_ind] term
is the induction combinator for computation over sets. The
@i{s} argument represents the set; @i{T} is it's index type,
@i{f} is it's function value, and @i{g} is used to perform
recursive computations on the children.
>>
declare set
declare isset{'s}
declare collect{'T; x. 'a['x]}
declare set_ind{'s; T, f, g. 'b['T; 'f; 'g]}
doc docoff
(************************************************************************
* DEFINITIONS *
************************************************************************)
doc <:doc<
@rewrites
The following four rewrites give the primitive definitions
of the set constructions from the terms in the @Nuprl type theory.
>>
prim_rw unfold_set : set <--> w{univ[1:l]; x. 'x}
prim_rw unfold_isset : isset{'s} <--> ('s = 's in set)
prim_rw unfold_collect : collect{'T; x. 'a['x]} <--> tree{'T; lambda{x. 'a['x]}}
prim_rw unfold_set_ind : set_ind{'s; x, f, g. 'b['x; 'f; 'g]} <-->
tree_ind{'s; x, f, g. 'b['x; 'f; 'g]}
doc <:doc<
The @hrefterm[set_ind] term performs a pattern match;
the normal reduction sequence can be derived from the
computational behavior of the @hrefterm[tree_ind] term.
>>
interactive_rw reduce_set_ind {| reduce |} :
set_ind{collect{'T; x. 'A['x]}; a, f, g. 'b['a; 'f; 'g]}
<--> 'b['T; lambda{x. 'A['x]}; lambda{a2. set_ind{.'A['a2]; a, f, g. 'b['a; 'f; 'g]}}]
doc docoff
let fold_set = makeFoldC << set >> unfold_set
let fold_isset = makeFoldC << isset{'t} >> unfold_isset
let fold_collect = makeFoldC << collect{'T; x. 'a['x]} >> unfold_collect
let fold_set_ind = makeFoldC << set_ind{'s; a, f, g. 'b['a; 'f; 'g]} >> unfold_set_ind
(************************************************************************
* DISPLAY FORMS *
************************************************************************)
dform set_df : except_mode[src] :: set =
`"set"
dform isset_df : parens :: except_mode[src] :: "prec"[prec_apply] :: isset{'s} =
slot{'s} `" set"
dform collect_df : parens :: except_mode[src] :: "prec"[prec_apply] :: collect{'T; x. 'a} =
szone pushm[3] `"collect" " " slot{'x} `":" " " slot{'T} `"." hspace slot{'a} popm ezone
dform set_ind_df : parens :: "prec"[prec_tree_ind] :: set_ind{'z; a, f, g. 'body} =
szone pushm[3]
pushm[3] `"let set(" slot{'a} `", " slot{'f} `")." slot{'g} `" = " slot{'z} `" in" popm hspace
slot{'body} popm ezone
(************************************************************************
* RELATION TO ITT *
************************************************************************)
doc <:doc<
@rules
@modsubsection{Typehood and equality}
The @hrefterm[set] term is a type in the @Nuprl type theory.
The @tt{equal_set} and @tt[isset_assum] rules define the
@tt[isset] well-formedness judgment. The @tt[isset_assum]
is added to the @hreftactic[trivialT] tactic for use as
default reasoning.
>>
interactive set_type {| intro [] |} :
sequent { <H> >- "type"{set} }
* Equality from sethood .
* Equality from sethood.
*)
interactive equal_set :
sequent { <H> >- isset{'s} } -->
sequent { <H> >- 's = 's in set }
(*
* By assumption.
*)
interactive isset_assum {| nth_hyp |} 'H :
sequent { <H>; x: set; <J['x]> >- isset{'x} }
doc <:doc<
The @hrefterm[collect] terms are well-formed, if their
index type $T$ is a type in $@univ{1}$, and their element function
$a$ produces a set for any argument $x @in T$.
>>
interactive isset_collect {| intro [] |} :
sequent { <H> >- 'T = 'T in univ[1:l] } -->
sequent { <H>; y: 'T >- isset{'a['y]} } -->
sequent { <H> >- isset{collect{'T; x. 'a['x]}} }
interactive isset_collect2 {| intro [] |} :
sequent { <H> >- 'T = 'T in univ[1:l] } -->
sequent { <H>; y: 'T >- isset{'a['y]} } -->
sequent { <H> >- collect{'T; x. 'a['x]} IN set }
doc <:doc<
@docoff
(* This is how a set is constructed. *)
>>
interactive isset_apply {| intro [] |} :
sequent { <H> >- ('f 'a) IN set } -->
sequent { <H> >- isset{.'f 'a} }
doc <:doc<
@modsubsection{Elimination}
The elimination form performs induction on the
assumption $a@colon @set$. The inductive argument is this:
goal $C$ is true for any set $a$ if it is true for some
set $@collect{x; T; f(x)}$ where the induction hypothesis
is true on every child $f(x)$ for $x @in T$. By definition,
induction requires that tree representing the set be well-founded
(which is true for all $W$-types).
>>
interactive set_elim {| elim [ThinOption thinT] |} 'H :
sequent { <H>;
a: set;
<J['a]>;
T: univ[1:l];
f: 'T -> set;
w: (all x : 'T. 'C['f 'x]);
z: isset{collect{'T; x. 'f 'x}}
>- 'C[collect{'T; x. 'f 'x}]
} -->
sequent { <H>; a: set; <J['a]> >- 'C['a] }
doc <:doc<
@docoff
The next two rules allow any set argument to be replaced with
an } argument . These rules are never used .
an @tt{collect} argument. These rules are never used. *)
>>
interactive set_split_hyp 'H 's (bind{v. 'A['v]}) :
sequent { <H>; x: 'A['s]; <J['x]> >- isset{'s} } -->
sequent { <H>; x: 'A['s]; <J['x]>; z: set >- "type"{'A['z]} } -->
sequent { <H>;
x: 'A['s];
<J['x]>;
T: univ[1:l];
f: 'T -> set;
z: 'A[collect{'T; y. 'f 'y}]
>- 'C['z] } -->
sequent { <H>; x: 'A['s]; <J['x]> >- 'C['x] }
interactive set_split_concl 's (bind{v. 'C['v]}) :
sequent { <H> >- isset{'s} } -->
sequent { <H>; z: set >- "type"{'C['z]} } -->
sequent { <H>; T: univ[1:l]; f: 'T -> set >- 'C[collect{'T; y. 'f 'y}] } -->
sequent { <H> >- 'C['s] }
doc <:doc<
@modsubsection{Combinator equality}
The induction combinator computes a value of type $T$ if its
argument $z$ is a set, and the body $b[z, f, g]$ computes a value
of type $T$, for any type $z @in @univ{1}$, any function
$f @in z @rightarrow @set$, and a recursive invocation
$g @in x@colon @univ{1} @rightarrow x @rightarrow T$.
>>
interactive set_ind_equality2 {| intro [] |} :
["wf"] sequent { <H> >- 'z1 = 'z2 in set } -->
["main"] sequent { <H>; a1: univ[1:l]; f1: 'a1 -> set; g1: x: 'a1 -> 'T >-
'body1['a1; 'f1; 'g1] = 'body2['a1; 'f1; 'g1] in 'T } -->
sequent { <H> >- set_ind{'z1; a1, f1, g1. 'body1['a1; 'f1; 'g1]}
= set_ind{'z2; a2, f2, g2. 'body2['a2; 'f2; 'g2]}
in 'T }
doc docoff
(************************************************************************
* PRIMITIVES *
************************************************************************)
(*
* Isset.
*)
let isset_term = << isset{'s} >>
let isset_opname = opname_of_term isset_term
let is_isset_term = is_dep0_term isset_opname
let mk_isset_term = mk_dep0_term isset_opname
let dest_isset = dest_dep0_term isset_opname
let set_ind_term = << set_ind{'s; T, f, g. 'B['T; 'f; 'g]} >>
let set_ind_opname = opname_of_term set_ind_term
let is_set_ind_term = is_dep0_dep3_term set_ind_opname
let mk_set_ind_term = mk_dep0_dep3_term set_ind_opname
let dest_set_ind = dest_dep0_dep3_term set_ind_opname
(************************************************************************
* OTHER TACTICS *
************************************************************************)
(*
* Typehood of isset{'s1}
*)
let d_isset_typeT =
rw (addrC [Subterm 1] unfold_isset) 0 thenT dT 0
let isset_type_term = << "type"{isset{'s1}} >>
let resource intro += (isset_type_term, wrap_intro d_isset_typeT)
(*
* Equal sets.
*)
let eqSetT = equal_set
(*
* Assumption.
*)
let setAssumT = isset_assum
(*
* Split a set in a hyp or concl.
*)
let splitT t i = funT (fun p ->
if i = 0 then
let bind = var_subst_to_bind (Sequent.concl p) t in
set_split_concl t bind
thenLT [addHiddenLabelT "wf";
addHiddenLabelT "wf";
addHiddenLabelT "main"]
else
let hyp = nth_hyp p i in
let bind = var_subst_to_bind hyp t in
set_split_hyp (get_pos_hyp_num p i) t bind
thenLT [addHiddenLabelT "wf";
addHiddenLabelT "wf";
addHiddenLabelT "main"])
(*
* -*-
* Local Variables:
* Caml-master: "editor.run"
* End:
* -*-
*)
| null | https://raw.githubusercontent.com/jyh/metaprl/51ba0bbbf409ecb7f96f5abbeb91902fdec47a19/theories/czf/czf_itt_set.ml | ocaml |
***********************************************************************
* TERMS *
***********************************************************************
***********************************************************************
* DEFINITIONS *
***********************************************************************
***********************************************************************
* DISPLAY FORMS *
***********************************************************************
***********************************************************************
* RELATION TO ITT *
***********************************************************************
* By assumption.
This is how a set is constructed.
***********************************************************************
* PRIMITIVES *
***********************************************************************
* Isset.
***********************************************************************
* OTHER TACTICS *
***********************************************************************
* Typehood of isset{'s1}
* Equal sets.
* Assumption.
* Split a set in a hyp or concl.
* -*-
* Local Variables:
* Caml-master: "editor.run"
* End:
* -*-
| doc <:doc<
@module[Czf_itt_set]
The @tt{Czf_itt_set} module provides the basic definition
of sets and their elements. The @tt{set} term denotes
the type of all sets, defined using the $W$-type in
the module @hrefmodule[Itt_w], as follows:
$$@set @equiv @w{T; @univ{1}; T}.$$
That is, the @emph{sets} are pairs of a type $T @in @univ{1}$,
and a function $T @rightarrow @set$ that specifies the
elements of the set. Note that the type $T$ can be @emph{any}
type in $@univ{1}$; equality of sets is is general undecidable, and
their members can't necessarily be enumerated. This is a
@emph{constructive} theory, not a decidable theory. Of course,
there will be special cases where equality of sets is decidable.
The sets are defined with the terms $@collect{x; T; f[x]}$, where
$T$ is a type in $@univ{1}$, and $f[x]$ is a set for any index $x @in T$.
The sets $f[x]$ are the @emph{elements} of the set, and $T$ is
the a type used as their index. For example, the following set
is empty.
$$@{@} = @collect{x; <<void>>; x}$$
@noindent
The following set is the singleton set containing the empty
set.
$$@{@{@}@} = @collect{x; @unit; @{@}}$$
@noindent
The following set is equivalent.
$$@{@{@}@}' = @collect{x; @int; @{@}}$$
This raises an important point about equality.
The membership equality defined ion $W$-types
requires equality on the index type $T$ as well as the element
function $f$. The two sets $@{@{@}@}$ and $@{@{@}@}'$ are
@emph{not} equal in this type because they have the provably
different index types $@unit$ and $@int$, even though they have
the same elements.
One solution to this problem would be to use a quotient
construction using the quotient type defined in the
@hrefmodule[Itt_quotient] module. The @hrefmodule[Czf_itt_eq]
module defines extensional set equality $@equiv_{@i{ext}}$, and
we could potentially define the ``real'' sets with the following
type definition.
$$@i{real@_sets} @equiv (@quot{set; s_1; s_2; s_1 @equiv_{@i{ext}} s_2})$$
This type definition would require explicit functionality
reasoning on all functions over @i{real_set}. This construction,
however, makes these functionality proofs impossible because it
omits the computational content of the equivalence judgment,
which is a necessary part of the proof.
Alternative quotient formulations may be possible, but we have not
pursued this direction extensively. Instead, we introduce set
equality in the @hrefmodule[Czf_itt_eq] module, together with
explicit functionality predicates for set operators. In addition,
we prove the functionality properties for all the primitive
set operations.
One avenue for improvement in this theory would be to stratify
the set types to arbitrary type universes $@univ{i}$, which would
allow for higher-order reasoning on sets, classes, etc.
@docoff
----------------------------------------------------------------
@begin[license]
This file is part of MetaPRL, a modular, higher order
logical framework that provides a logical programming
environment for OCaml and other languages.
See the file doc/htmlman/default.html or visit /
for more information.
Copyright (C) 1998 Jason Hickey, Cornell University
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
Author: Jason Hickey
@email{}
@end[license]
>>
doc <:doc<
@parents
>>
extends Itt_theory
extends Itt_eta
doc docoff
extends Czf_itt_comment
open Lm_debug
open Lm_printf
open Basic_tactics
open Itt_dfun
open Itt_struct
open Itt_w
let debug_czf_set =
{ debug_name = "czf_set";
debug_description = "display czf_set operations";
debug_value = false
}
doc <:doc<
@terms
The @tt{set} term defines the type of sets; the @tt{collect}
terms are the individual sets. The @tt[isset] term is the
well-formedness judgment for the $@set$ type. The @tt[set_ind] term
is the induction combinator for computation over sets. The
@i{s} argument represents the set; @i{T} is it's index type,
@i{f} is it's function value, and @i{g} is used to perform
recursive computations on the children.
>>
declare set
declare isset{'s}
declare collect{'T; x. 'a['x]}
declare set_ind{'s; T, f, g. 'b['T; 'f; 'g]}
doc docoff
doc <:doc<
@rewrites
The following four rewrites give the primitive definitions
of the set constructions from the terms in the @Nuprl type theory.
>>
prim_rw unfold_set : set <--> w{univ[1:l]; x. 'x}
prim_rw unfold_isset : isset{'s} <--> ('s = 's in set)
prim_rw unfold_collect : collect{'T; x. 'a['x]} <--> tree{'T; lambda{x. 'a['x]}}
prim_rw unfold_set_ind : set_ind{'s; x, f, g. 'b['x; 'f; 'g]} <-->
tree_ind{'s; x, f, g. 'b['x; 'f; 'g]}
doc <:doc<
The @hrefterm[set_ind] term performs a pattern match;
the normal reduction sequence can be derived from the
computational behavior of the @hrefterm[tree_ind] term.
>>
interactive_rw reduce_set_ind {| reduce |} :
set_ind{collect{'T; x. 'A['x]}; a, f, g. 'b['a; 'f; 'g]}
<--> 'b['T; lambda{x. 'A['x]}; lambda{a2. set_ind{.'A['a2]; a, f, g. 'b['a; 'f; 'g]}}]
doc docoff
let fold_set = makeFoldC << set >> unfold_set
let fold_isset = makeFoldC << isset{'t} >> unfold_isset
let fold_collect = makeFoldC << collect{'T; x. 'a['x]} >> unfold_collect
let fold_set_ind = makeFoldC << set_ind{'s; a, f, g. 'b['a; 'f; 'g]} >> unfold_set_ind
dform set_df : except_mode[src] :: set =
`"set"
dform isset_df : parens :: except_mode[src] :: "prec"[prec_apply] :: isset{'s} =
slot{'s} `" set"
dform collect_df : parens :: except_mode[src] :: "prec"[prec_apply] :: collect{'T; x. 'a} =
szone pushm[3] `"collect" " " slot{'x} `":" " " slot{'T} `"." hspace slot{'a} popm ezone
dform set_ind_df : parens :: "prec"[prec_tree_ind] :: set_ind{'z; a, f, g. 'body} =
szone pushm[3]
pushm[3] `"let set(" slot{'a} `", " slot{'f} `")." slot{'g} `" = " slot{'z} `" in" popm hspace
slot{'body} popm ezone
doc <:doc<
@rules
@modsubsection{Typehood and equality}
The @hrefterm[set] term is a type in the @Nuprl type theory.
The @tt{equal_set} and @tt[isset_assum] rules define the
@tt[isset] well-formedness judgment. The @tt[isset_assum]
is added to the @hreftactic[trivialT] tactic for use as
default reasoning.
>>
interactive set_type {| intro [] |} :
sequent { <H> >- "type"{set} }
* Equality from sethood .
* Equality from sethood.
*)
interactive equal_set :
sequent { <H> >- isset{'s} } -->
sequent { <H> >- 's = 's in set }
interactive isset_assum {| nth_hyp |} 'H :
sequent { <H>; x: set; <J['x]> >- isset{'x} }
doc <:doc<
The @hrefterm[collect] terms are well-formed, if their
index type $T$ is a type in $@univ{1}$, and their element function
$a$ produces a set for any argument $x @in T$.
>>
interactive isset_collect {| intro [] |} :
sequent { <H> >- 'T = 'T in univ[1:l] } -->
sequent { <H>; y: 'T >- isset{'a['y]} } -->
sequent { <H> >- isset{collect{'T; x. 'a['x]}} }
interactive isset_collect2 {| intro [] |} :
sequent { <H> >- 'T = 'T in univ[1:l] } -->
sequent { <H>; y: 'T >- isset{'a['y]} } -->
sequent { <H> >- collect{'T; x. 'a['x]} IN set }
doc <:doc<
@docoff
>>
interactive isset_apply {| intro [] |} :
sequent { <H> >- ('f 'a) IN set } -->
sequent { <H> >- isset{.'f 'a} }
doc <:doc<
@modsubsection{Elimination}
The elimination form performs induction on the
assumption $a@colon @set$. The inductive argument is this:
goal $C$ is true for any set $a$ if it is true for some
set $@collect{x; T; f(x)}$ where the induction hypothesis
is true on every child $f(x)$ for $x @in T$. By definition,
induction requires that tree representing the set be well-founded
(which is true for all $W$-types).
>>
interactive set_elim {| elim [ThinOption thinT] |} 'H :
sequent { <H>;
a: set;
<J['a]>;
T: univ[1:l];
f: 'T -> set;
w: (all x : 'T. 'C['f 'x]);
z: isset{collect{'T; x. 'f 'x}}
>- 'C[collect{'T; x. 'f 'x}]
} -->
sequent { <H>; a: set; <J['a]> >- 'C['a] }
doc <:doc<
@docoff
The next two rules allow any set argument to be replaced with
an } argument . These rules are never used .
an @tt{collect} argument. These rules are never used. *)
>>
interactive set_split_hyp 'H 's (bind{v. 'A['v]}) :
sequent { <H>; x: 'A['s]; <J['x]> >- isset{'s} } -->
sequent { <H>; x: 'A['s]; <J['x]>; z: set >- "type"{'A['z]} } -->
sequent { <H>;
x: 'A['s];
<J['x]>;
T: univ[1:l];
f: 'T -> set;
z: 'A[collect{'T; y. 'f 'y}]
>- 'C['z] } -->
sequent { <H>; x: 'A['s]; <J['x]> >- 'C['x] }
interactive set_split_concl 's (bind{v. 'C['v]}) :
sequent { <H> >- isset{'s} } -->
sequent { <H>; z: set >- "type"{'C['z]} } -->
sequent { <H>; T: univ[1:l]; f: 'T -> set >- 'C[collect{'T; y. 'f 'y}] } -->
sequent { <H> >- 'C['s] }
doc <:doc<
@modsubsection{Combinator equality}
The induction combinator computes a value of type $T$ if its
argument $z$ is a set, and the body $b[z, f, g]$ computes a value
of type $T$, for any type $z @in @univ{1}$, any function
$f @in z @rightarrow @set$, and a recursive invocation
$g @in x@colon @univ{1} @rightarrow x @rightarrow T$.
>>
interactive set_ind_equality2 {| intro [] |} :
["wf"] sequent { <H> >- 'z1 = 'z2 in set } -->
["main"] sequent { <H>; a1: univ[1:l]; f1: 'a1 -> set; g1: x: 'a1 -> 'T >-
'body1['a1; 'f1; 'g1] = 'body2['a1; 'f1; 'g1] in 'T } -->
sequent { <H> >- set_ind{'z1; a1, f1, g1. 'body1['a1; 'f1; 'g1]}
= set_ind{'z2; a2, f2, g2. 'body2['a2; 'f2; 'g2]}
in 'T }
doc docoff
let isset_term = << isset{'s} >>
let isset_opname = opname_of_term isset_term
let is_isset_term = is_dep0_term isset_opname
let mk_isset_term = mk_dep0_term isset_opname
let dest_isset = dest_dep0_term isset_opname
let set_ind_term = << set_ind{'s; T, f, g. 'B['T; 'f; 'g]} >>
let set_ind_opname = opname_of_term set_ind_term
let is_set_ind_term = is_dep0_dep3_term set_ind_opname
let mk_set_ind_term = mk_dep0_dep3_term set_ind_opname
let dest_set_ind = dest_dep0_dep3_term set_ind_opname
let d_isset_typeT =
rw (addrC [Subterm 1] unfold_isset) 0 thenT dT 0
let isset_type_term = << "type"{isset{'s1}} >>
let resource intro += (isset_type_term, wrap_intro d_isset_typeT)
let eqSetT = equal_set
let setAssumT = isset_assum
let splitT t i = funT (fun p ->
if i = 0 then
let bind = var_subst_to_bind (Sequent.concl p) t in
set_split_concl t bind
thenLT [addHiddenLabelT "wf";
addHiddenLabelT "wf";
addHiddenLabelT "main"]
else
let hyp = nth_hyp p i in
let bind = var_subst_to_bind hyp t in
set_split_hyp (get_pos_hyp_num p i) t bind
thenLT [addHiddenLabelT "wf";
addHiddenLabelT "wf";
addHiddenLabelT "main"])
|
75f46b8802a175d6fba0dc8064c4d9072b6a17e11dc095bbe07cbfb4b13a84dc | thephoeron/quipper-language | Types.hs | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
-- file COPYRIGHT for a list of authors, copyright holders, licensing,
-- and other details. All rights reserved.
--
-- ======================================================================
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
# LANGUAGE TypeFamilies #
# LANGUAGE OverlappingInstances #
# LANGUAGE IncoherentInstances #
{-# LANGUAGE DeriveDataTypeable #-}
-- | This module defines the specialized datatypes of the Class Number algorithm, and basic utility functions on these types.
module Algorithms.CL.Types where
import Quipper
import Quipper.Internal
import Data.Typeable
import Data.Ratio
import QuipperLib.Arith hiding (q_mult_param)
import QuipperLib.FPReal
import Algorithms.CL.Auxiliary
-- ===========================================
-- * Type synonyms
-- $ First, we define some type synonyms for arithmetic types, selecting which will be used in the functions for the Class Number algorithm.
--
We use three different integer types . For interfacing with quantum computation , we use ' CLInt ' : = ' IntM ' . For efficient classical ( i.e. circuit - generation time ) computation on potentially large integers , we use ' CLIntP ' : = ' Integer ' , ’s arbitrary - precision integers . ( Δ , for instance , is taken to be a ' CLIntP ' ) . For small classical integers ( typically for register sizes ) , we use ' Int ' , ’s bounded - precision integers .
--
For the first two of these , we define type synonyms , so that they can be swapped out to other types if desired ( they are to a large extent modular ) . For ' Int ' we do not , since we make un - coerced use of built - in Haskell functions like ' length ' which give it specifically .
--
-- Where not dictated by these conventions, integer types are generalized, i.e., @(Integral a) =>@ …
--
-- Rational and real numbers have not yet been similarly stratified.
-- | Integers that may be passed into or received out of quantum computations.
type CLInt = IntM
-- | Integers that will be used for parameter computation only, potentially large.
type CLIntP = Integer
-- | Rational numbers for the Class Number code.
type CLRational = Rational
-- | Real numbers for the Class Number code.
type CLReal = FPReal
-- ===========================================
-- * Algebraic number fields
-- ===========================================
-- ** Discriminants
-- $ The functions of this subsection are needed only for circuit-generation-time classical computation, not for quantum circuit computation.
-- | Compute Δ, given /d/.
( See [ Jozsa 2003 ] , Prop . 6 et seq . We use Δ , or in code ' bigD ' , where uses /D/. )
bigD_of_d :: Integral a => a -> a
bigD_of_d d = case (d `mod` 4) of
1 -> d
_ -> 4*d
-- | Compute /d/, given Δ.
( Again , see [ 2003 ] , Prop . 6 et seq . )
d_of_bigD :: Integral a => a -> a
d_of_bigD bigD = case (bigD `mod` 4) of
0 -> bigD `div` 4
_ -> bigD
| Check if /d/ is a valid input to Hallgren ’s algorithm ,
-- i.e. correctly defines a real quadratic number field.
is_valid_d :: (Integral a) => a -> Bool
is_valid_d d = d > 1 && is_square_free d
| Check if /Δ/ is a valid input to Hallgren ’s algorithm ,
-- i.e. is the discriminant of a real quadratic number field.
-- (Cf. <>)
is_valid_bigD :: (Integral a) => a -> Bool
is_valid_bigD bigD = bigD > 1 && case (bigD `mod` 4) of
1 -> is_square_free bigD
0 -> (d `mod` 4 == 2 || d `mod` 4 == 3) && is_square_free d
where d = bigD `div` 4
_ -> False
-- | The (infinite, lazy) list of all valid inputs /d/,
i.e. of all square - free integers above 2 .
all_small_ds :: (Integral int) => [int]
all_small_ds = filter (\n -> is_valid_d n) [2..]
-- | The (infinite, lazy) list of all valid inputs Δ,
-- i.e. of all discriminants of real quadratic number fields.
all_bigDs :: (Integral int) => [int]
all_bigDs = map bigD_of_d all_small_ds
-- ===========================================
-- ** Field elements
-- | A data type describing a number in the algebraic number field K = ℚ[√Δ]: @'AlgNum' /a/ /b/ Δ@ represents /a/ + /b/√Δ.
--
-- In general, the type of coefficients may be any type of (classical or quantum)
numbers , i.e. an instance of the ' ' or ' QNum ' class .
-- Given this, the algebraic numbers with a fixed Δ will in turn be an instance
of ' ' or ' QNum ' .
--
A value @/a/ : : /x/@ may also be used as an @'AlgNumGen ' /x/@ ,
-- with no Δ specified, to represent simply /a/ + 0√Δ; this can be considered polymorphic
-- over all possible values of Δ.
--
-- This is similar to the use of 'IntM's or 'FPReal's of indeterminate size, although
-- unlike for them, we do not restrict this to the classical case. However, the
question of whether an ' AlgNumQ ' has specified is ( like e.g. the length of
-- a list) is a parameter property, known at circuit generation time, not a purely
-- quantum property.
data AlgNumGen a = AlgNum a a CLIntP | AlgNum_indet a deriving (Show)
-- | The specific instance of 'AlgNumGen' used for classical (parameter) computation.
type AlgNum = AlgNumGen CLRational
| Extract the first co - ordinate of an ' AlgNumGen '
fst_AlgNum :: AlgNumGen a -> a
fst_AlgNum (AlgNum u _ _) = u
fst_AlgNum (AlgNum_indet u) = u
| Extract the second co - ordinate of an ' AlgNumGen '
snd_AlgNum :: (Num a) => AlgNumGen a -> a
snd_AlgNum (AlgNum _ v _) = v
snd_AlgNum (AlgNum_indet _) = 0
instance (Eq a, Num a) => Eq (AlgNumGen a) where
(AlgNum a b bigD) == (AlgNum a' b' bigD') =
if bigD == bigD' then a == a' && b == b'
else error "Operation = on AlgNum: operands must have same Δ."
(AlgNum a b bigD) == (AlgNum_indet a') = (AlgNum a b bigD) == (AlgNum a' 0 bigD)
(AlgNum_indet a) == (AlgNum a' b' bigD') = (AlgNum a 0 bigD') == (AlgNum a' b' bigD')
(AlgNum_indet a) == (AlgNum_indet a') = a == a'
| Print a ' Number ' in human - readable ( though not - readable ) format , as e.g.
pretty_show_AlgNum :: Show a => AlgNumGen a -> String
pretty_show_AlgNum (AlgNum a b bigD) = (show a) ++ " + " ++ (show b) ++ " √" ++ show bigD
pretty_show_AlgNum (AlgNum_indet a) = show a
-- | Realize an algebraic number as a real number (of any 'Floating' type).
floating_of_AlgNum :: (Real a, Floating b) => AlgNumGen a -> b
floating_of_AlgNum (AlgNum a b bigD) = (realToFrac a) + (realToFrac b) * (sqrt $ fromIntegral bigD)
floating_of_AlgNum (AlgNum_indet a) = (realToFrac a)
| Coerce one algebraic number into the field of a second , if possible . If not possible ( i.e. if their Δ ’s mismatch ) , throw an error .
number_promote :: Num a => AlgNumGen a -> AlgNumGen b -> ErrMsg -> AlgNumGen a
number_promote (AlgNum a b bigD) (AlgNum _ _ bigD') e =
if bigD == bigD' then AlgNum a b bigD
else error $ e "mismatched Δ."
number_promote (AlgNum_indet a) (AlgNum _ _ bigD') _ = AlgNum a 0 bigD'
number_promote n (AlgNum_indet _) _ = n
instance (Ord a, Num a) => Ord (AlgNumGen a) where
compare (AlgNum a b bigD) (AlgNum a' b' bigD') =
if bigD == bigD' then
case (compare a a', compare b b') of
(EQ,y) -> y
(x,EQ) -> x
(GT,GT) -> GT
(LT,LT) -> LT
(GT,LT) -> compare ((a-a')^2) ((b-b')^2 * fromInteger bigD)
(LT,GT) -> compare ((b-b')^2 * fromInteger bigD) ((a-a')^2)
else
error "compare // AlgNumGen: mismatched Δ."
compare (AlgNum a b bigD) (AlgNum_indet a') = compare (AlgNum a b bigD) (AlgNum a' 0 bigD)
compare (AlgNum_indet a) (AlgNum a' b' bigD') = compare (AlgNum a 0 bigD') (AlgNum a' b' bigD')
compare (AlgNum_indet a) (AlgNum_indet a') = compare a a'
instance (Ord a, Num a) => Num (AlgNumGen a) where
(AlgNum a b bigD) + (AlgNum a' b' bigD') =
if bigD == bigD' then AlgNum (a+a') (b+b') bigD
else error "Operation + on AlgNum: operands must have same Δ."
(AlgNum a b bigD) + (AlgNum_indet a') = (AlgNum a b bigD) + (AlgNum a' 0 bigD)
(AlgNum_indet a) + (AlgNum a' b' bigD') = (AlgNum a 0 bigD') + (AlgNum a' b' bigD')
(AlgNum_indet a) + (AlgNum_indet a') = (AlgNum_indet (a + a'))
(AlgNum a b bigD) * (AlgNum a' b' bigD') =
if bigD == bigD' then AlgNum (a*a' + b*b'*(fromIntegral bigD)) (a*b' + a'*b) bigD
else error "Operation * on AlgNum: operands must have same Δ."
(AlgNum a b bigD) * (AlgNum_indet a') = (AlgNum a b bigD) * (AlgNum a' 0 bigD)
(AlgNum_indet a) * (AlgNum a' b' bigD') = (AlgNum a 0 bigD') * (AlgNum a' b' bigD')
(AlgNum_indet a) * (AlgNum_indet a') = (AlgNum_indet (a * a'))
(AlgNum a b bigD) - (AlgNum a' b' bigD') =
if bigD == bigD' then AlgNum (a-a') (b-b') bigD
else error "Operation - on AlgNum: operands must have same Δ."
(AlgNum a b bigD) - (AlgNum_indet a') = (AlgNum a b bigD) - (AlgNum a' 0 bigD)
(AlgNum_indet a) - (AlgNum a' b' bigD') = (AlgNum a 0 bigD') - (AlgNum a' b' bigD')
(AlgNum_indet a) - (AlgNum_indet a') = (AlgNum_indet (a - a'))
abs n = if (n >= 0) then n else -n
signum n = number_promote (if n > 0 then 1 else if n == 0 then 0 else (-1)) n
(const "CL.Types: internal error (signum // AlgNum)")
fromInteger = AlgNum_indet . fromInteger
instance (Real a) => Real (AlgNumGen a) where
toRational = toRational . floating_of_AlgNum
instance (Ord a, Fractional a) => Fractional (AlgNumGen a) where
fromRational = AlgNum_indet . fromRational
recip (AlgNum a b bigD) =
let c = (a^2) - (b^2 * (fromIntegral bigD))
in assert (c /= 0) (if (a == 0 && b == 0) then "CL.Types: divide-by-zero error"
else if is_valid_bigD bigD then "CL.Types: internal error (AlgNum // recip)"
else error "CL.Types: " ++ show bigD ++ " not a valid discriminant")
(AlgNum (a/c) (-b/c) bigD)
recip (AlgNum_indet a) = AlgNum_indet $ recip a
instance (RealFrac a) => RealFrac (AlgNumGen a) where
properFraction x = (x',x - fromIntegral x')
where x' = truncate $ floating_of_AlgNum x
-- | The algebraic conjugate: sends /a/ + /b/ √Δ to /a/ - /b/ √Δ.
conjugate :: (Num a) => AlgNumGen a -> AlgNumGen a
conjugate (AlgNum a b bigD) = (AlgNum a (-b) bigD)
conjugate (AlgNum_indet a) = (AlgNum_indet a)
-- | Test whether an algebraic number is an algebraic integer.
--
( A number is an algebraic integer iff it can be written in the form /m/ + /n/(Δ + √Δ)\/2 , where /m/ , /n/ are integers .
See [ Jozsa 2003 ] , proof of Prop . 14 . )
is_alg_int :: (Ord a, RealFrac a) => AlgNumGen a -> Bool
is_alg_int (AlgNum a b bigD) = is_int n && is_int m
where
-- solve for m, n in the equation [a + b √D = m + n(Δ + √Δ)/2]
n = 2 * b
m = a - b * fromIntegral bigD
is_alg_int (AlgNum_indet a) = is_int a
-- | Test whether an algebraic number is a unit of the ring of algebraic integers.
is_unit :: (Ord a, RealFrac a) => AlgNumGen a -> Bool
is_unit n = if n == 0 then False else (is_alg_int n) && (is_alg_int (recip n))
| The number ω associated to the field /K/.
omega_of_bigD :: CLIntP -> AlgNum
omega_of_bigD bigD =
if (bigD `mod` 4 == 1)
then (AlgNum (1/2) (1/2) bigD)
else (AlgNum 0 1 bigD)
-- ===========================================
-- * Ideals
-- | Data specifying an ideal in an algebraic number field. An ideal is described by a tuple
( Δ,/m/,/l/,/a/,/b/ ) , representing the ideal
--
-- /m/\//l/ (/aZ/ + (/b/+√Δ)\/2 /Z/),
--
where moreover we assume and ensure always that the ideal is in /standard form/ ( [ Jozsa 2003 ] , p.11 , Prop . 16 ) . Specifically ,
--
* /a/,/k/,/l/ > 0 ;
--
-- * 4/a/ | /b/[sup 2] – Δ;
--
-- * /b/ = τ(/a/,/b/);
--
* gcd(/k/,/l/ ) = 1
--
-- In particular, this gives us bounds on the size of /a/ and /b/,
-- and hence tells us the sizes needed for these registers (see 'length_for_ab' below).
data IdealX x = Ideal CLIntP (XInt x) (XInt x) (XInt x) (XInt x)
deriving (Show, Eq, Typeable)
-- | Classical parameter specifying an ideal.
type Ideal = IdealX Bool
| Quantum circuit - type counterpart of ' Ideal ' .
type IdealQ = IdealX Qubit
-- | Classical circuit-type counterpart of 'Ideal'.
type IdealC = IdealX Bit
type instance QCType x y (IdealX z) = IdealX (QCType x y z)
type instance QTypeB Ideal = IdealQ
instance Show Ideal where
show (Ideal bigD m l a b) =
"Ideal "
++ show bigD ++ " "
++ show m ++ " "
++ show l ++ " "
++ show a ++ " "
++ show b
instance QCLeaf x => QCData (IdealX x) where
qcdata_mapM ~(Ideal _ msh lsh ash bsh) f g (Ideal bigD m l a b) = do
m' <- qcdata_mapM msh f g m
l' <- qcdata_mapM lsh f g l
a' <- qcdata_mapM ash f g a
b' <- qcdata_mapM bsh f g b
return (Ideal bigD m' l' a' b')
qcdata_zip ~(Ideal _ msh lsh ash bsh) q c q' c' (Ideal bigD m l a b) (Ideal bigD' m' l' a' b') e
| bigD /= bigD'
= error (e "Ideal exponent mismatch")
| otherwise
= (Ideal bigD m'' l'' a'' b'')
where
m'' = qcdata_zip msh q c q' c' m m' errmsg
l'' = qcdata_zip lsh q c q' c' l l' errmsg
a'' = qcdata_zip ash q c q' c' a a' errmsg
b'' = qcdata_zip bsh q c q' c' b b' errmsg
errmsg x = e ("in Ideal: " ++ x)
qcdata_promote (Ideal bigD m l a b) (Ideal bigD' m' l' a' b') e
| bigD /= bigD'
= error (e "Ideal exponent mismatch")
| otherwise
= (Ideal bigD m'' l'' a'' b'')
where
m'' = qcdata_promote m m' errmsg
l'' = qcdata_promote l l' errmsg
a'' = qcdata_promote a a' errmsg
b'' = qcdata_promote b b' errmsg
errmsg x = e ("in Ideal: " ++ x)
-- Labeling of IdealQ is (m,l,a,b).
instance QCLeaf x => Labelable (IdealX x) String where
label_rec (Ideal _ qm ql qa qb) s = do
label_rec qm s `dotted_indexed` "m"
label_rec ql s `dotted_indexed` "l"
label_rec qa s `dotted_indexed` "a"
label_rec qb s `dotted_indexed` "b"
We also provide an alternate labeling by a 4 - tuple of strings , in
-- case this is ever useful (maybe for an ideal where the components
are called something other than /m/ , /l/ , /a/ , and /b/ ) .
instance Labelable IdealQ (String, String, String, String) where
label_rec (Ideal _ qm ql qa qb) (sm, sl, sa, sb) = do
label_rec qm sm
label_rec ql sl
label_rec qa sa
label_rec qb sb
instance Eq Ideal where
i1@(Ideal bigD m l a b) == i2@(Ideal bigD' m' l' a' b')
= if (bigD /= bigD')
then error error_string
else (m == m' && l' == l' && a == a' && b == b')
where error_string = "Comparing two ideals of different Δ: " ++ (show i1) ++ "," ++ (show i2)
-- | Data specifying a reduced ideal, by a tuple (Δ,/a/,/b/); this
-- corresponds to the ideal specified by (Δ,1,/a/,/a/,/b/), i.e.,
-- /Z/ + (/b/+√Δ)\/2/a/ /Z/.
data IdealRedX x = IdealRed CLIntP (XInt x) (XInt x)
deriving (Show, Typeable)
-- | Classical parameter specifying a reduced ideal.
type IdealRed = IdealRedX Bool
| Quantum circuit - type counterpart of ' IdealRed ' .
type IdealRedQ = IdealRedX Qubit
-- | Classical circuit-type counterpart of 'IdealRed'.
type IdealRedC = IdealRedX Bit
instance Show IdealRed where
show (IdealRed bigD a b) =
"IdealRed "
++ show bigD ++ " "
++ show a ++ " "
++ show b
instance Eq IdealRed where
i1@(IdealRed bigD a b) == i2@(IdealRed bigD' a' b')
= if (bigD /= bigD')
then error error_string
else (a == a' && b == b')
where error_string = "Comparing two reduced ideals of different Δ: "
++ (show i1) ++ "," ++ (show i2)
type instance QCType x y (IdealRedX z) = IdealRedX (QCType x y z)
type instance QTypeB IdealRed = IdealRedQ
instance QCLeaf x => QCData (IdealRedX x) where
qcdata_mapM ~(IdealRed _ ash bsh) f g (IdealRed bigD a b) = do
a' <- qcdata_mapM ash f g a
b' <- qcdata_mapM bsh f g b
return (IdealRed bigD a' b')
qcdata_zip ~(IdealRed _ ash bsh) q c q' c' (IdealRed bigD a b) (IdealRed bigD' a' b') e
| bigD /= bigD'
= error (e "IdealRed exponent mismatch")
| otherwise
= (IdealRed bigD a'' b'')
where
a'' = qcdata_zip ash q c q' c' a a' errmsg
b'' = qcdata_zip bsh q c q' c' b b' errmsg
errmsg x = e ("in IdealRed: " ++ x)
qcdata_promote (IdealRed bigD a b) (IdealRed bigD' a' b') e
| bigD /= bigD'
= error (e "IdealRed exponent mismatch")
| otherwise
= (IdealRed bigD a'' b'')
where
a'' = qcdata_promote a a' errmsg
b'' = qcdata_promote b b' errmsg
errmsg x = e ("in IdealRed: " ++ x)
-- Labeling of IdealRedQ is (a,b).
instance QCLeaf x => Labelable (IdealRedX x) String where
label_rec (IdealRed _ qa qb) s = do
label_rec qa s `dotted_indexed` "a"
label_rec qb s `dotted_indexed` "b"
-- We also provide an alternate labeling by a pair of strings, in case
this is ever useful ( maybe for an ideal where the two components
-- are called something other than /a/ and /b/).
instance QCLeaf x => Labelable (IdealRedX x) (String, String) where
label_rec (IdealRed _ qa qb) (sa, sb) = do
label_rec qa sa
label_rec qb sb
| An ideal /I/ , together with a distance δ for it — that is , /some/ representative , mod /R/ , for δ(/I/ ) as defined on /G/ p.4 .
-- Most functions described as acting on ideals need in fact to be seen as a pair of an ideal and a distance for it.
type IdDist = (Ideal,FPReal)
| Quantum analogue of ' IdDist ' .
type IdDistQ = (IdealQ,FPRealQ)
-- | A reduced ideal /I/, together with a distance δ for it.
type IdRedDist = (IdealRed,FPReal)
| Quantum analogue of ' IdRedDist ' .
type IdRedDistQ = (IdealRedQ,FPRealQ)
-- ===========================================
-- ** Trivial access functions
| Extract the /d/ component from an ' IdealQ ' .
d_of_Ideal :: IdealX a -> CLIntP
d_of_Ideal (Ideal bigD _ _ _ _) = d_of_bigD bigD
-- | Extract the /d/ component from an 'IdealRedQ'.
d_of_IdealRed :: IdealRedX a -> CLIntP
d_of_IdealRed (IdealRed bigD _ _) = d_of_bigD bigD
| Extract Δ from an ' IdealQ ' .
bigD_of_Ideal :: IdealX a -> CLIntP
bigD_of_Ideal (Ideal bigD _ _ _ _) = bigD
-- | Extract Δ from an 'IdealRedQ'.
bigD_of_IdealRed :: IdealRedX a -> CLIntP
bigD_of_IdealRed (IdealRed bigD _ _) = bigD
-- | Extract the delta part from an ideal/distance pair.
delta :: IdDist -> CLReal
delta (Ideal _ _ _ _ _, del) = del
-- ===========================================
-- ** Assertions, coercions
-- $ Elements of the types 'Ideal', 'IdealRed', etc are assumed to satisfy certain extra conditions.
-- This section includes functions for checking that these conditions are satisfied, and for safely
-- coercing between these types.
-- | @'tau' Δ /b/ /a/@: the function τ(/b/,/a/). Gives the representative for /b/ mod /2a/, in a range dependent on /a/ and √Δ.
--
-- (This doesn't quite belong here, but is included as a prerequisite of the assertions).
tau :: (Integral int, Integral int') => int' -> int -> int -> int
tau bigD b a = mod_with_max b (2*a) max
where
max = if a > root_bigD then a else root_bigD
root_bigD = floor $ sqrt $ fromIntegral bigD
-- | Return 'True' if the given ideal is in standard form. (Functions should /always/ keep ideals in standard form).
is_standard :: Ideal -> Bool
is_standard (Ideal bigD m l a b) =
(a > 0) && (l > 0) && (m > 0)
&& ((bigD - (fromIntegral b)^2) `mod` (4 * (fromIntegral a)) == 0)
&& b == tau bigD b a
| Test whether an ' Ideal ' is reduced . ( An ideal \</m/,/l/,/a/,/b/ > is reduced iff /m/ = 1 , /l/ = /a/ , /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) ) .
is_reduced :: Ideal -> Bool
is_reduced (Ideal bigD m l a b) = (m == 1) && (l == a) && (b >= 0) && (b + root_bigD > 2 * a)
where root_bigD = ceiling $ sqrt $ fromIntegral bigD
| Test whether an ' IdealRed ' is really reduced . ( An ideal \<1,/a/,/a/,/b/ > is reduced iff /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) ) .
is_really_reduced :: IdealRed -> Bool
is_really_reduced (IdealRed bigD a b) = (b >= 0) && (b + root_bigD > 2 * a)
where root_bigD = ceiling $ sqrt $ fromIntegral bigD
-- | Coerce an 'IdealRed' to an 'Ideal'.
forget_reduced :: IdealRed -> Ideal
forget_reduced (IdealRed bigD a b) = (Ideal bigD 1 a a b)
| Coerce an ' Ideal ' to an ' IdealRed ' , if it is reduced , or throw an error otherwise . Cf . [ 2003 ] , Prop . 20 .
to_reduced :: Ideal -> IdealRed
to_reduced ii@(Ideal bigD m l a b) =
if is_reduced ii then (IdealRed bigD a b)
else error $ "to_reduced: (" ++ (show ii) ++ ") is not reduced."
-- | Throw an error if an 'Ideal' is not reduced; otherwise, the identity function.
assert_reduced :: Ideal -> a -> a
assert_reduced ii =
assert (is_reduced ii) ("assert_reduced: (" ++ (show ii) ++ ") is not reduced.")
-- | Throw an error if an 'IdealRed' is not really reduced; otherwise, the identity function.
assert_really_reduced :: IdealRed -> a -> a
assert_really_reduced ii =
assert (is_really_reduced ii) ("assert_really_reduced: (" ++ (show ii) ++ ") is not reduced.")
| Quantum analogue of ' tau ' . @'q_tau ' Δ /qb/ /qa/@ : compute the representative for /qb/ mod 2 / qa/ , in a range dependent on /qa/ and √Δ.
q_tau :: CLIntP -> QDInt -> QDInt -> Circ (QDInt, QDInt, QDInt)
q_tau bigD = box ("tau, Δ = " ++ show bigD) $ \a b -> do
let root_bigD = floor $ sqrt $ fromIntegral bigD
t <- with_computed
(do
(_, a_gt_rtD) <- q_gt_param a root_bigD
max <- qinit $ qc_false a
(max, _) <- controlled_not max a `controlled` a_gt_rtD
max <- bool_controlled_not max (intm_promote root_bigD max "q_tau: internal error") `controlled` (a_gt_rtD .==. False)
(_, twice_a) <- q_mult_param 2 a
(_, _, _, t) <- q_mod_with_max b twice_a max
return t)
qc_copy
return (a,b,t)
| Test whether a given ' IdealQ ' is reduced . \</m/,/l/,/a/,/b/ > is reduced iff /m/ = 1 , /l/ = /a/ , /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) .
q_is_reduced :: IdealQ -> Circ (IdealQ,Qubit)
q_is_reduced = box "is_reduced" $ \qii ->
let bigD = bigD_of_Ideal qii in
with_computed_fun qii
(\(Ideal bigD qm ql qa qb) -> do
test1 <- qinit False
test1 <- qnot test1 `controlled` qm .==. 1
(ql,qa,test2) <- q_is_equal ql qa
(qb,test3) <- q_ge_param qb 0
(qa, q2a) <- q_mult_param 2 qa
qx <- q_sub_param_in_place (ceiling $ sqrt $ fromIntegral bigD) q2a
(qb, qx, test4) <- q_gt qb qx
return ([test1,test2,test3,test4], (qm,ql,qa,qb,qx)))
(\(tests, rest) -> do
test_out <- qinit False
test_out <- qnot test_out `controlled` tests
return ((tests, rest), test_out))
| Test whether a given ' IdealQ ' is really reduced ( as it should always be , if code is written correctly ) . An ideal \<1,/a/,/a/,/b/ > is reduced iff /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) .
q_is_really_reduced :: IdealRedQ -> Circ (IdealRedQ,Qubit)
q_is_really_reduced = box "is_really_reduced" $ \qii ->
let bigD = bigD_of_IdealRed qii in
with_computed_fun qii
(\(IdealRed bigD qa qb) -> do
(qb,test1) <- q_ge_param qb 0
(qa, q2a) <- q_mult_param 2 qa
qx <- q_sub_param_in_place (ceiling $ sqrt $ fromIntegral bigD) q2a
(qb, qx, test2) <- q_gt qb qx
return ([test1,test2], (qa,qb,qx)))
(\(tests, rest) -> do
test_out <- qinit False
test_out <- qnot test_out `controlled` tests
return ((tests, rest), test_out))
| Coerce an ' IdealRedQ ' to an ' IdealQ ' , initializing the extra components appropriately .
q_forget_reduced :: IdealRedQ -> Circ IdealQ
q_forget_reduced = box "forget_reduced" $ \(IdealRed bigD a b) -> do
let a_bits = qulist_of_qdint_bh a
n = length a_bits
m <- qinit (intm n 1)
(a,l) <- qc_copy_fun a
return (Ideal bigD m l a b)
| Coerce an ' IdealQ ' to an ' IdealRedQ ' , assertively terminating the extra components
-- (and hence throwing an error at quantum runtime if the input is not reduced).
q_assert_reduced :: IdealQ -> Circ IdealRedQ
q_assert_reduced = box "assert_reduced" $ \x@(Ideal bigD m l a b) -> do
x_red <- reverse_generic q_forget_reduced (IdealRed bigD a b) x
q_assert_really_reduced x_red
-- | Throw a (quantum-runtime) error if an 'IdealRedQ' is not really reduced; otherwise, do nothing.
--
Compare ' assert_reduced ' , ' q_is_really_reduced ' in " Algorithms . CL.RegulatorQuantum " , and [ 2003 ] Prop . 20 .
q_assert_really_reduced :: IdealRedQ -> Circ IdealRedQ
q_assert_really_reduced = box "assert_really_reduced" $ \ii -> do
(ii,test) <- q_is_really_reduced ii
qterm True test
return ii
-- ======================================================================
-- ** Bounds on coefficient sizes
$ Given Δ , how much space should be allocated for the coefficients of ideals ? Most of these bounds are currently missing or uncertain , as documented below . Note these bounds are intended to be sufficient for the calculations occurring in this algorithm , /not/ for representing arbitrary ideals .
-- | Given Δ, return the size of integers to be used for the coefficients /a/, /b/ of reduced ideals.
--
Note : can we bound this more carefully ? In reduced ideals , we always have 0 ≤ /a/,/b/ ( see notes on ' is_standard ' , ' is_reduced ' ) , and the outputs of ρ , ρ[sup – 1 ] and dot - products of reduced ideals always keep |/a/| ≤ Δ. However , intermediate calculations may involve larger values , so we allocate a little more space . For now , this padding is a seat - of - the - pants estimate .
length_for_ab :: CLIntP -> Int
length_for_ab bigD = 3 + (ceiling $ logBase 2 $ fromIntegral bigD)
| Given Δ , return the size of integers to be used for the coefficients /m/ , /l/ of general ideals .
--
TODO : bound this ! Neither nor [ 2003 ] discusses bounds on the values of /m/ and that will appear , and we do not yet have a bound . For now we use the same length as for /a/ and /b/ , for convenience ; this should be considered a dummy bound , quite possibly not sufficient in general .
length_for_ml :: CLIntP -> Int
length_for_ml = length_for_ab
| Given Δ , return the precision /n/ = log[sub 2]/N/ to be used for
-- discretizing the quasi-periodic function /f/ to /f/[sub /N/].
--
-- (“Precision” here means the number of binary digits after the point).
--
Taken to ensure 1\//N/ < 3/(32 Δ log Δ ) . ( Cf . [ 2003 ] , Prop . 36 ( iii ) . )
n_of_bigD :: (Integral int) => CLIntP -> int
n_of_bigD bigD =
ceiling $ logBase 2 $ 32 * (fromIntegral bigD) * (log $ fromIntegral bigD) / 3
| Given Δ , /n/ , ( as for ' fN ' , ' q_fN ' ) , return the precision required
-- for intermediate distance calculations during the computation of /f/[sub /N/].
--
TODO : bound this more carefully . [ 2003 ] asks for the final output to be precision /n/ , but does not discuss intermediate precision , and we have not yet got a confident answer . For now , just a back - of - the - envelope estimate , which should be sufficient and /O/(correct ) , but is almost certainly rather larger than necessary .
precision_for_fN :: CLIntP -> Int -> Int -> Int
precision_for_fN _ n l = 2 * (n + l)
-- | Set the 'IntM' coefficients of an 'Ideal' to the standard lengths, if they are not already fixed incompatibly. The standard lengths are determined by 'length_for_ml', 'length_for_ab'. (Compare 'intm_promote', etc.)
fix_sizes_Ideal :: Ideal -> Ideal
fix_sizes_Ideal (Ideal bigD m l a b) = (Ideal bigD (f m) (f l) (f a) (f b))
where
f x = intm_promote x (intm n 0) "set_sizes_Ideal: lengths already fixed incompatibly"
n = max (length_for_ml bigD) (length_for_ab bigD)
-- | Set the 'IntM' coefficients of an 'IdealRed' to the standard lengths, if they are not already fixed incompatibly. The standard lengths are determined by 'length_for_ml', 'length_for_ab'. (Compare 'intm_promote', etc.)
fix_sizes_IdealRed :: IdealRed -> IdealRed
fix_sizes_IdealRed (IdealRed bigD a b) = (IdealRed bigD (f a) (f b))
where
f x = intm_promote x (intm n 0) "set_sizes_Ideal: lengths already fixed incompatibly"
n = max (length_for_ml bigD) (length_for_ab bigD)
| null | https://raw.githubusercontent.com/thephoeron/quipper-language/15e555343a15c07b9aa97aced1ada22414f04af6/Algorithms/CL/Types.hs | haskell | file COPYRIGHT for a list of authors, copyright holders, licensing,
and other details. All rights reserved.
======================================================================
# LANGUAGE DeriveDataTypeable #
| This module defines the specialized datatypes of the Class Number algorithm, and basic utility functions on these types.
===========================================
* Type synonyms
$ First, we define some type synonyms for arithmetic types, selecting which will be used in the functions for the Class Number algorithm.
Where not dictated by these conventions, integer types are generalized, i.e., @(Integral a) =>@ …
Rational and real numbers have not yet been similarly stratified.
| Integers that may be passed into or received out of quantum computations.
| Integers that will be used for parameter computation only, potentially large.
| Rational numbers for the Class Number code.
| Real numbers for the Class Number code.
===========================================
* Algebraic number fields
===========================================
** Discriminants
$ The functions of this subsection are needed only for circuit-generation-time classical computation, not for quantum circuit computation.
| Compute Δ, given /d/.
| Compute /d/, given Δ.
i.e. correctly defines a real quadratic number field.
i.e. is the discriminant of a real quadratic number field.
(Cf. <>)
| The (infinite, lazy) list of all valid inputs /d/,
| The (infinite, lazy) list of all valid inputs Δ,
i.e. of all discriminants of real quadratic number fields.
===========================================
** Field elements
| A data type describing a number in the algebraic number field K = ℚ[√Δ]: @'AlgNum' /a/ /b/ Δ@ represents /a/ + /b/√Δ.
In general, the type of coefficients may be any type of (classical or quantum)
Given this, the algebraic numbers with a fixed Δ will in turn be an instance
with no Δ specified, to represent simply /a/ + 0√Δ; this can be considered polymorphic
over all possible values of Δ.
This is similar to the use of 'IntM's or 'FPReal's of indeterminate size, although
unlike for them, we do not restrict this to the classical case. However, the
a list) is a parameter property, known at circuit generation time, not a purely
quantum property.
| The specific instance of 'AlgNumGen' used for classical (parameter) computation.
| Realize an algebraic number as a real number (of any 'Floating' type).
| The algebraic conjugate: sends /a/ + /b/ √Δ to /a/ - /b/ √Δ.
| Test whether an algebraic number is an algebraic integer.
solve for m, n in the equation [a + b √D = m + n(Δ + √Δ)/2]
| Test whether an algebraic number is a unit of the ring of algebraic integers.
===========================================
* Ideals
| Data specifying an ideal in an algebraic number field. An ideal is described by a tuple
/m/\//l/ (/aZ/ + (/b/+√Δ)\/2 /Z/),
* 4/a/ | /b/[sup 2] – Δ;
* /b/ = τ(/a/,/b/);
In particular, this gives us bounds on the size of /a/ and /b/,
and hence tells us the sizes needed for these registers (see 'length_for_ab' below).
| Classical parameter specifying an ideal.
| Classical circuit-type counterpart of 'Ideal'.
Labeling of IdealQ is (m,l,a,b).
case this is ever useful (maybe for an ideal where the components
| Data specifying a reduced ideal, by a tuple (Δ,/a/,/b/); this
corresponds to the ideal specified by (Δ,1,/a/,/a/,/b/), i.e.,
/Z/ + (/b/+√Δ)\/2/a/ /Z/.
| Classical parameter specifying a reduced ideal.
| Classical circuit-type counterpart of 'IdealRed'.
Labeling of IdealRedQ is (a,b).
We also provide an alternate labeling by a pair of strings, in case
are called something other than /a/ and /b/).
Most functions described as acting on ideals need in fact to be seen as a pair of an ideal and a distance for it.
| A reduced ideal /I/, together with a distance δ for it.
===========================================
** Trivial access functions
| Extract the /d/ component from an 'IdealRedQ'.
| Extract Δ from an 'IdealRedQ'.
| Extract the delta part from an ideal/distance pair.
===========================================
** Assertions, coercions
$ Elements of the types 'Ideal', 'IdealRed', etc are assumed to satisfy certain extra conditions.
This section includes functions for checking that these conditions are satisfied, and for safely
coercing between these types.
| @'tau' Δ /b/ /a/@: the function τ(/b/,/a/). Gives the representative for /b/ mod /2a/, in a range dependent on /a/ and √Δ.
(This doesn't quite belong here, but is included as a prerequisite of the assertions).
| Return 'True' if the given ideal is in standard form. (Functions should /always/ keep ideals in standard form).
| Coerce an 'IdealRed' to an 'Ideal'.
| Throw an error if an 'Ideal' is not reduced; otherwise, the identity function.
| Throw an error if an 'IdealRed' is not really reduced; otherwise, the identity function.
(and hence throwing an error at quantum runtime if the input is not reduced).
| Throw a (quantum-runtime) error if an 'IdealRedQ' is not really reduced; otherwise, do nothing.
======================================================================
** Bounds on coefficient sizes
| Given Δ, return the size of integers to be used for the coefficients /a/, /b/ of reduced ideals.
discretizing the quasi-periodic function /f/ to /f/[sub /N/].
(“Precision” here means the number of binary digits after the point).
for intermediate distance calculations during the computation of /f/[sub /N/].
| Set the 'IntM' coefficients of an 'Ideal' to the standard lengths, if they are not already fixed incompatibly. The standard lengths are determined by 'length_for_ml', 'length_for_ab'. (Compare 'intm_promote', etc.)
| Set the 'IntM' coefficients of an 'IdealRed' to the standard lengths, if they are not already fixed incompatibly. The standard lengths are determined by 'length_for_ml', 'length_for_ab'. (Compare 'intm_promote', etc.) | This file is part of Quipper . Copyright ( C ) 2011 - 2014 . Please see the
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE FlexibleInstances #
# LANGUAGE FlexibleContexts #
# LANGUAGE UndecidableInstances #
# LANGUAGE TypeFamilies #
# LANGUAGE OverlappingInstances #
# LANGUAGE IncoherentInstances #
module Algorithms.CL.Types where
import Quipper
import Quipper.Internal
import Data.Typeable
import Data.Ratio
import QuipperLib.Arith hiding (q_mult_param)
import QuipperLib.FPReal
import Algorithms.CL.Auxiliary
We use three different integer types . For interfacing with quantum computation , we use ' CLInt ' : = ' IntM ' . For efficient classical ( i.e. circuit - generation time ) computation on potentially large integers , we use ' CLIntP ' : = ' Integer ' , ’s arbitrary - precision integers . ( Δ , for instance , is taken to be a ' CLIntP ' ) . For small classical integers ( typically for register sizes ) , we use ' Int ' , ’s bounded - precision integers .
For the first two of these , we define type synonyms , so that they can be swapped out to other types if desired ( they are to a large extent modular ) . For ' Int ' we do not , since we make un - coerced use of built - in Haskell functions like ' length ' which give it specifically .
type CLInt = IntM
type CLIntP = Integer
type CLRational = Rational
type CLReal = FPReal
( See [ Jozsa 2003 ] , Prop . 6 et seq . We use Δ , or in code ' bigD ' , where uses /D/. )
bigD_of_d :: Integral a => a -> a
bigD_of_d d = case (d `mod` 4) of
1 -> d
_ -> 4*d
( Again , see [ 2003 ] , Prop . 6 et seq . )
d_of_bigD :: Integral a => a -> a
d_of_bigD bigD = case (bigD `mod` 4) of
0 -> bigD `div` 4
_ -> bigD
| Check if /d/ is a valid input to Hallgren ’s algorithm ,
is_valid_d :: (Integral a) => a -> Bool
is_valid_d d = d > 1 && is_square_free d
| Check if /Δ/ is a valid input to Hallgren ’s algorithm ,
is_valid_bigD :: (Integral a) => a -> Bool
is_valid_bigD bigD = bigD > 1 && case (bigD `mod` 4) of
1 -> is_square_free bigD
0 -> (d `mod` 4 == 2 || d `mod` 4 == 3) && is_square_free d
where d = bigD `div` 4
_ -> False
i.e. of all square - free integers above 2 .
all_small_ds :: (Integral int) => [int]
all_small_ds = filter (\n -> is_valid_d n) [2..]
all_bigDs :: (Integral int) => [int]
all_bigDs = map bigD_of_d all_small_ds
numbers , i.e. an instance of the ' ' or ' QNum ' class .
of ' ' or ' QNum ' .
A value @/a/ : : /x/@ may also be used as an @'AlgNumGen ' /x/@ ,
question of whether an ' AlgNumQ ' has specified is ( like e.g. the length of
data AlgNumGen a = AlgNum a a CLIntP | AlgNum_indet a deriving (Show)
type AlgNum = AlgNumGen CLRational
| Extract the first co - ordinate of an ' AlgNumGen '
fst_AlgNum :: AlgNumGen a -> a
fst_AlgNum (AlgNum u _ _) = u
fst_AlgNum (AlgNum_indet u) = u
| Extract the second co - ordinate of an ' AlgNumGen '
snd_AlgNum :: (Num a) => AlgNumGen a -> a
snd_AlgNum (AlgNum _ v _) = v
snd_AlgNum (AlgNum_indet _) = 0
instance (Eq a, Num a) => Eq (AlgNumGen a) where
(AlgNum a b bigD) == (AlgNum a' b' bigD') =
if bigD == bigD' then a == a' && b == b'
else error "Operation = on AlgNum: operands must have same Δ."
(AlgNum a b bigD) == (AlgNum_indet a') = (AlgNum a b bigD) == (AlgNum a' 0 bigD)
(AlgNum_indet a) == (AlgNum a' b' bigD') = (AlgNum a 0 bigD') == (AlgNum a' b' bigD')
(AlgNum_indet a) == (AlgNum_indet a') = a == a'
| Print a ' Number ' in human - readable ( though not - readable ) format , as e.g.
pretty_show_AlgNum :: Show a => AlgNumGen a -> String
pretty_show_AlgNum (AlgNum a b bigD) = (show a) ++ " + " ++ (show b) ++ " √" ++ show bigD
pretty_show_AlgNum (AlgNum_indet a) = show a
floating_of_AlgNum :: (Real a, Floating b) => AlgNumGen a -> b
floating_of_AlgNum (AlgNum a b bigD) = (realToFrac a) + (realToFrac b) * (sqrt $ fromIntegral bigD)
floating_of_AlgNum (AlgNum_indet a) = (realToFrac a)
| Coerce one algebraic number into the field of a second , if possible . If not possible ( i.e. if their Δ ’s mismatch ) , throw an error .
number_promote :: Num a => AlgNumGen a -> AlgNumGen b -> ErrMsg -> AlgNumGen a
number_promote (AlgNum a b bigD) (AlgNum _ _ bigD') e =
if bigD == bigD' then AlgNum a b bigD
else error $ e "mismatched Δ."
number_promote (AlgNum_indet a) (AlgNum _ _ bigD') _ = AlgNum a 0 bigD'
number_promote n (AlgNum_indet _) _ = n
instance (Ord a, Num a) => Ord (AlgNumGen a) where
compare (AlgNum a b bigD) (AlgNum a' b' bigD') =
if bigD == bigD' then
case (compare a a', compare b b') of
(EQ,y) -> y
(x,EQ) -> x
(GT,GT) -> GT
(LT,LT) -> LT
(GT,LT) -> compare ((a-a')^2) ((b-b')^2 * fromInteger bigD)
(LT,GT) -> compare ((b-b')^2 * fromInteger bigD) ((a-a')^2)
else
error "compare // AlgNumGen: mismatched Δ."
compare (AlgNum a b bigD) (AlgNum_indet a') = compare (AlgNum a b bigD) (AlgNum a' 0 bigD)
compare (AlgNum_indet a) (AlgNum a' b' bigD') = compare (AlgNum a 0 bigD') (AlgNum a' b' bigD')
compare (AlgNum_indet a) (AlgNum_indet a') = compare a a'
instance (Ord a, Num a) => Num (AlgNumGen a) where
(AlgNum a b bigD) + (AlgNum a' b' bigD') =
if bigD == bigD' then AlgNum (a+a') (b+b') bigD
else error "Operation + on AlgNum: operands must have same Δ."
(AlgNum a b bigD) + (AlgNum_indet a') = (AlgNum a b bigD) + (AlgNum a' 0 bigD)
(AlgNum_indet a) + (AlgNum a' b' bigD') = (AlgNum a 0 bigD') + (AlgNum a' b' bigD')
(AlgNum_indet a) + (AlgNum_indet a') = (AlgNum_indet (a + a'))
(AlgNum a b bigD) * (AlgNum a' b' bigD') =
if bigD == bigD' then AlgNum (a*a' + b*b'*(fromIntegral bigD)) (a*b' + a'*b) bigD
else error "Operation * on AlgNum: operands must have same Δ."
(AlgNum a b bigD) * (AlgNum_indet a') = (AlgNum a b bigD) * (AlgNum a' 0 bigD)
(AlgNum_indet a) * (AlgNum a' b' bigD') = (AlgNum a 0 bigD') * (AlgNum a' b' bigD')
(AlgNum_indet a) * (AlgNum_indet a') = (AlgNum_indet (a * a'))
(AlgNum a b bigD) - (AlgNum a' b' bigD') =
if bigD == bigD' then AlgNum (a-a') (b-b') bigD
else error "Operation - on AlgNum: operands must have same Δ."
(AlgNum a b bigD) - (AlgNum_indet a') = (AlgNum a b bigD) - (AlgNum a' 0 bigD)
(AlgNum_indet a) - (AlgNum a' b' bigD') = (AlgNum a 0 bigD') - (AlgNum a' b' bigD')
(AlgNum_indet a) - (AlgNum_indet a') = (AlgNum_indet (a - a'))
abs n = if (n >= 0) then n else -n
signum n = number_promote (if n > 0 then 1 else if n == 0 then 0 else (-1)) n
(const "CL.Types: internal error (signum // AlgNum)")
fromInteger = AlgNum_indet . fromInteger
instance (Real a) => Real (AlgNumGen a) where
toRational = toRational . floating_of_AlgNum
instance (Ord a, Fractional a) => Fractional (AlgNumGen a) where
fromRational = AlgNum_indet . fromRational
recip (AlgNum a b bigD) =
let c = (a^2) - (b^2 * (fromIntegral bigD))
in assert (c /= 0) (if (a == 0 && b == 0) then "CL.Types: divide-by-zero error"
else if is_valid_bigD bigD then "CL.Types: internal error (AlgNum // recip)"
else error "CL.Types: " ++ show bigD ++ " not a valid discriminant")
(AlgNum (a/c) (-b/c) bigD)
recip (AlgNum_indet a) = AlgNum_indet $ recip a
instance (RealFrac a) => RealFrac (AlgNumGen a) where
properFraction x = (x',x - fromIntegral x')
where x' = truncate $ floating_of_AlgNum x
conjugate :: (Num a) => AlgNumGen a -> AlgNumGen a
conjugate (AlgNum a b bigD) = (AlgNum a (-b) bigD)
conjugate (AlgNum_indet a) = (AlgNum_indet a)
( A number is an algebraic integer iff it can be written in the form /m/ + /n/(Δ + √Δ)\/2 , where /m/ , /n/ are integers .
See [ Jozsa 2003 ] , proof of Prop . 14 . )
is_alg_int :: (Ord a, RealFrac a) => AlgNumGen a -> Bool
is_alg_int (AlgNum a b bigD) = is_int n && is_int m
where
n = 2 * b
m = a - b * fromIntegral bigD
is_alg_int (AlgNum_indet a) = is_int a
is_unit :: (Ord a, RealFrac a) => AlgNumGen a -> Bool
is_unit n = if n == 0 then False else (is_alg_int n) && (is_alg_int (recip n))
| The number ω associated to the field /K/.
omega_of_bigD :: CLIntP -> AlgNum
omega_of_bigD bigD =
if (bigD `mod` 4 == 1)
then (AlgNum (1/2) (1/2) bigD)
else (AlgNum 0 1 bigD)
( Δ,/m/,/l/,/a/,/b/ ) , representing the ideal
where moreover we assume and ensure always that the ideal is in /standard form/ ( [ Jozsa 2003 ] , p.11 , Prop . 16 ) . Specifically ,
* /a/,/k/,/l/ > 0 ;
* gcd(/k/,/l/ ) = 1
data IdealX x = Ideal CLIntP (XInt x) (XInt x) (XInt x) (XInt x)
deriving (Show, Eq, Typeable)
type Ideal = IdealX Bool
| Quantum circuit - type counterpart of ' Ideal ' .
type IdealQ = IdealX Qubit
type IdealC = IdealX Bit
type instance QCType x y (IdealX z) = IdealX (QCType x y z)
type instance QTypeB Ideal = IdealQ
instance Show Ideal where
show (Ideal bigD m l a b) =
"Ideal "
++ show bigD ++ " "
++ show m ++ " "
++ show l ++ " "
++ show a ++ " "
++ show b
instance QCLeaf x => QCData (IdealX x) where
qcdata_mapM ~(Ideal _ msh lsh ash bsh) f g (Ideal bigD m l a b) = do
m' <- qcdata_mapM msh f g m
l' <- qcdata_mapM lsh f g l
a' <- qcdata_mapM ash f g a
b' <- qcdata_mapM bsh f g b
return (Ideal bigD m' l' a' b')
qcdata_zip ~(Ideal _ msh lsh ash bsh) q c q' c' (Ideal bigD m l a b) (Ideal bigD' m' l' a' b') e
| bigD /= bigD'
= error (e "Ideal exponent mismatch")
| otherwise
= (Ideal bigD m'' l'' a'' b'')
where
m'' = qcdata_zip msh q c q' c' m m' errmsg
l'' = qcdata_zip lsh q c q' c' l l' errmsg
a'' = qcdata_zip ash q c q' c' a a' errmsg
b'' = qcdata_zip bsh q c q' c' b b' errmsg
errmsg x = e ("in Ideal: " ++ x)
qcdata_promote (Ideal bigD m l a b) (Ideal bigD' m' l' a' b') e
| bigD /= bigD'
= error (e "Ideal exponent mismatch")
| otherwise
= (Ideal bigD m'' l'' a'' b'')
where
m'' = qcdata_promote m m' errmsg
l'' = qcdata_promote l l' errmsg
a'' = qcdata_promote a a' errmsg
b'' = qcdata_promote b b' errmsg
errmsg x = e ("in Ideal: " ++ x)
instance QCLeaf x => Labelable (IdealX x) String where
label_rec (Ideal _ qm ql qa qb) s = do
label_rec qm s `dotted_indexed` "m"
label_rec ql s `dotted_indexed` "l"
label_rec qa s `dotted_indexed` "a"
label_rec qb s `dotted_indexed` "b"
We also provide an alternate labeling by a 4 - tuple of strings , in
are called something other than /m/ , /l/ , /a/ , and /b/ ) .
instance Labelable IdealQ (String, String, String, String) where
label_rec (Ideal _ qm ql qa qb) (sm, sl, sa, sb) = do
label_rec qm sm
label_rec ql sl
label_rec qa sa
label_rec qb sb
instance Eq Ideal where
i1@(Ideal bigD m l a b) == i2@(Ideal bigD' m' l' a' b')
= if (bigD /= bigD')
then error error_string
else (m == m' && l' == l' && a == a' && b == b')
where error_string = "Comparing two ideals of different Δ: " ++ (show i1) ++ "," ++ (show i2)
data IdealRedX x = IdealRed CLIntP (XInt x) (XInt x)
deriving (Show, Typeable)
type IdealRed = IdealRedX Bool
| Quantum circuit - type counterpart of ' IdealRed ' .
type IdealRedQ = IdealRedX Qubit
type IdealRedC = IdealRedX Bit
instance Show IdealRed where
show (IdealRed bigD a b) =
"IdealRed "
++ show bigD ++ " "
++ show a ++ " "
++ show b
instance Eq IdealRed where
i1@(IdealRed bigD a b) == i2@(IdealRed bigD' a' b')
= if (bigD /= bigD')
then error error_string
else (a == a' && b == b')
where error_string = "Comparing two reduced ideals of different Δ: "
++ (show i1) ++ "," ++ (show i2)
type instance QCType x y (IdealRedX z) = IdealRedX (QCType x y z)
type instance QTypeB IdealRed = IdealRedQ
instance QCLeaf x => QCData (IdealRedX x) where
qcdata_mapM ~(IdealRed _ ash bsh) f g (IdealRed bigD a b) = do
a' <- qcdata_mapM ash f g a
b' <- qcdata_mapM bsh f g b
return (IdealRed bigD a' b')
qcdata_zip ~(IdealRed _ ash bsh) q c q' c' (IdealRed bigD a b) (IdealRed bigD' a' b') e
| bigD /= bigD'
= error (e "IdealRed exponent mismatch")
| otherwise
= (IdealRed bigD a'' b'')
where
a'' = qcdata_zip ash q c q' c' a a' errmsg
b'' = qcdata_zip bsh q c q' c' b b' errmsg
errmsg x = e ("in IdealRed: " ++ x)
qcdata_promote (IdealRed bigD a b) (IdealRed bigD' a' b') e
| bigD /= bigD'
= error (e "IdealRed exponent mismatch")
| otherwise
= (IdealRed bigD a'' b'')
where
a'' = qcdata_promote a a' errmsg
b'' = qcdata_promote b b' errmsg
errmsg x = e ("in IdealRed: " ++ x)
instance QCLeaf x => Labelable (IdealRedX x) String where
label_rec (IdealRed _ qa qb) s = do
label_rec qa s `dotted_indexed` "a"
label_rec qb s `dotted_indexed` "b"
this is ever useful ( maybe for an ideal where the two components
instance QCLeaf x => Labelable (IdealRedX x) (String, String) where
label_rec (IdealRed _ qa qb) (sa, sb) = do
label_rec qa sa
label_rec qb sb
| An ideal /I/ , together with a distance δ for it — that is , /some/ representative , mod /R/ , for δ(/I/ ) as defined on /G/ p.4 .
type IdDist = (Ideal,FPReal)
| Quantum analogue of ' IdDist ' .
type IdDistQ = (IdealQ,FPRealQ)
type IdRedDist = (IdealRed,FPReal)
| Quantum analogue of ' IdRedDist ' .
type IdRedDistQ = (IdealRedQ,FPRealQ)
| Extract the /d/ component from an ' IdealQ ' .
d_of_Ideal :: IdealX a -> CLIntP
d_of_Ideal (Ideal bigD _ _ _ _) = d_of_bigD bigD
d_of_IdealRed :: IdealRedX a -> CLIntP
d_of_IdealRed (IdealRed bigD _ _) = d_of_bigD bigD
| Extract Δ from an ' IdealQ ' .
bigD_of_Ideal :: IdealX a -> CLIntP
bigD_of_Ideal (Ideal bigD _ _ _ _) = bigD
bigD_of_IdealRed :: IdealRedX a -> CLIntP
bigD_of_IdealRed (IdealRed bigD _ _) = bigD
delta :: IdDist -> CLReal
delta (Ideal _ _ _ _ _, del) = del
tau :: (Integral int, Integral int') => int' -> int -> int -> int
tau bigD b a = mod_with_max b (2*a) max
where
max = if a > root_bigD then a else root_bigD
root_bigD = floor $ sqrt $ fromIntegral bigD
is_standard :: Ideal -> Bool
is_standard (Ideal bigD m l a b) =
(a > 0) && (l > 0) && (m > 0)
&& ((bigD - (fromIntegral b)^2) `mod` (4 * (fromIntegral a)) == 0)
&& b == tau bigD b a
| Test whether an ' Ideal ' is reduced . ( An ideal \</m/,/l/,/a/,/b/ > is reduced iff /m/ = 1 , /l/ = /a/ , /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) ) .
is_reduced :: Ideal -> Bool
is_reduced (Ideal bigD m l a b) = (m == 1) && (l == a) && (b >= 0) && (b + root_bigD > 2 * a)
where root_bigD = ceiling $ sqrt $ fromIntegral bigD
| Test whether an ' IdealRed ' is really reduced . ( An ideal \<1,/a/,/a/,/b/ > is reduced iff /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) ) .
is_really_reduced :: IdealRed -> Bool
is_really_reduced (IdealRed bigD a b) = (b >= 0) && (b + root_bigD > 2 * a)
where root_bigD = ceiling $ sqrt $ fromIntegral bigD
forget_reduced :: IdealRed -> Ideal
forget_reduced (IdealRed bigD a b) = (Ideal bigD 1 a a b)
| Coerce an ' Ideal ' to an ' IdealRed ' , if it is reduced , or throw an error otherwise . Cf . [ 2003 ] , Prop . 20 .
to_reduced :: Ideal -> IdealRed
to_reduced ii@(Ideal bigD m l a b) =
if is_reduced ii then (IdealRed bigD a b)
else error $ "to_reduced: (" ++ (show ii) ++ ") is not reduced."
assert_reduced :: Ideal -> a -> a
assert_reduced ii =
assert (is_reduced ii) ("assert_reduced: (" ++ (show ii) ++ ") is not reduced.")
assert_really_reduced :: IdealRed -> a -> a
assert_really_reduced ii =
assert (is_really_reduced ii) ("assert_really_reduced: (" ++ (show ii) ++ ") is not reduced.")
| Quantum analogue of ' tau ' . @'q_tau ' Δ /qb/ /qa/@ : compute the representative for /qb/ mod 2 / qa/ , in a range dependent on /qa/ and √Δ.
q_tau :: CLIntP -> QDInt -> QDInt -> Circ (QDInt, QDInt, QDInt)
q_tau bigD = box ("tau, Δ = " ++ show bigD) $ \a b -> do
let root_bigD = floor $ sqrt $ fromIntegral bigD
t <- with_computed
(do
(_, a_gt_rtD) <- q_gt_param a root_bigD
max <- qinit $ qc_false a
(max, _) <- controlled_not max a `controlled` a_gt_rtD
max <- bool_controlled_not max (intm_promote root_bigD max "q_tau: internal error") `controlled` (a_gt_rtD .==. False)
(_, twice_a) <- q_mult_param 2 a
(_, _, _, t) <- q_mod_with_max b twice_a max
return t)
qc_copy
return (a,b,t)
| Test whether a given ' IdealQ ' is reduced . \</m/,/l/,/a/,/b/ > is reduced iff /m/ = 1 , /l/ = /a/ , /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) .
q_is_reduced :: IdealQ -> Circ (IdealQ,Qubit)
q_is_reduced = box "is_reduced" $ \qii ->
let bigD = bigD_of_Ideal qii in
with_computed_fun qii
(\(Ideal bigD qm ql qa qb) -> do
test1 <- qinit False
test1 <- qnot test1 `controlled` qm .==. 1
(ql,qa,test2) <- q_is_equal ql qa
(qb,test3) <- q_ge_param qb 0
(qa, q2a) <- q_mult_param 2 qa
qx <- q_sub_param_in_place (ceiling $ sqrt $ fromIntegral bigD) q2a
(qb, qx, test4) <- q_gt qb qx
return ([test1,test2,test3,test4], (qm,ql,qa,qb,qx)))
(\(tests, rest) -> do
test_out <- qinit False
test_out <- qnot test_out `controlled` tests
return ((tests, rest), test_out))
| Test whether a given ' IdealQ ' is really reduced ( as it should always be , if code is written correctly ) . An ideal \<1,/a/,/a/,/b/ > is reduced iff /b/ ≥ 0 and /b/ + √Δ > 2 / a/ ( [ Jozsa 2003 ] , Prop . 20 ) .
q_is_really_reduced :: IdealRedQ -> Circ (IdealRedQ,Qubit)
q_is_really_reduced = box "is_really_reduced" $ \qii ->
let bigD = bigD_of_IdealRed qii in
with_computed_fun qii
(\(IdealRed bigD qa qb) -> do
(qb,test1) <- q_ge_param qb 0
(qa, q2a) <- q_mult_param 2 qa
qx <- q_sub_param_in_place (ceiling $ sqrt $ fromIntegral bigD) q2a
(qb, qx, test2) <- q_gt qb qx
return ([test1,test2], (qa,qb,qx)))
(\(tests, rest) -> do
test_out <- qinit False
test_out <- qnot test_out `controlled` tests
return ((tests, rest), test_out))
| Coerce an ' IdealRedQ ' to an ' IdealQ ' , initializing the extra components appropriately .
q_forget_reduced :: IdealRedQ -> Circ IdealQ
q_forget_reduced = box "forget_reduced" $ \(IdealRed bigD a b) -> do
let a_bits = qulist_of_qdint_bh a
n = length a_bits
m <- qinit (intm n 1)
(a,l) <- qc_copy_fun a
return (Ideal bigD m l a b)
| Coerce an ' IdealQ ' to an ' IdealRedQ ' , assertively terminating the extra components
q_assert_reduced :: IdealQ -> Circ IdealRedQ
q_assert_reduced = box "assert_reduced" $ \x@(Ideal bigD m l a b) -> do
x_red <- reverse_generic q_forget_reduced (IdealRed bigD a b) x
q_assert_really_reduced x_red
Compare ' assert_reduced ' , ' q_is_really_reduced ' in " Algorithms . CL.RegulatorQuantum " , and [ 2003 ] Prop . 20 .
q_assert_really_reduced :: IdealRedQ -> Circ IdealRedQ
q_assert_really_reduced = box "assert_really_reduced" $ \ii -> do
(ii,test) <- q_is_really_reduced ii
qterm True test
return ii
$ Given Δ , how much space should be allocated for the coefficients of ideals ? Most of these bounds are currently missing or uncertain , as documented below . Note these bounds are intended to be sufficient for the calculations occurring in this algorithm , /not/ for representing arbitrary ideals .
Note : can we bound this more carefully ? In reduced ideals , we always have 0 ≤ /a/,/b/ ( see notes on ' is_standard ' , ' is_reduced ' ) , and the outputs of ρ , ρ[sup – 1 ] and dot - products of reduced ideals always keep |/a/| ≤ Δ. However , intermediate calculations may involve larger values , so we allocate a little more space . For now , this padding is a seat - of - the - pants estimate .
length_for_ab :: CLIntP -> Int
length_for_ab bigD = 3 + (ceiling $ logBase 2 $ fromIntegral bigD)
| Given Δ , return the size of integers to be used for the coefficients /m/ , /l/ of general ideals .
TODO : bound this ! Neither nor [ 2003 ] discusses bounds on the values of /m/ and that will appear , and we do not yet have a bound . For now we use the same length as for /a/ and /b/ , for convenience ; this should be considered a dummy bound , quite possibly not sufficient in general .
length_for_ml :: CLIntP -> Int
length_for_ml = length_for_ab
| Given Δ , return the precision /n/ = log[sub 2]/N/ to be used for
Taken to ensure 1\//N/ < 3/(32 Δ log Δ ) . ( Cf . [ 2003 ] , Prop . 36 ( iii ) . )
n_of_bigD :: (Integral int) => CLIntP -> int
n_of_bigD bigD =
ceiling $ logBase 2 $ 32 * (fromIntegral bigD) * (log $ fromIntegral bigD) / 3
| Given Δ , /n/ , ( as for ' fN ' , ' q_fN ' ) , return the precision required
TODO : bound this more carefully . [ 2003 ] asks for the final output to be precision /n/ , but does not discuss intermediate precision , and we have not yet got a confident answer . For now , just a back - of - the - envelope estimate , which should be sufficient and /O/(correct ) , but is almost certainly rather larger than necessary .
precision_for_fN :: CLIntP -> Int -> Int -> Int
precision_for_fN _ n l = 2 * (n + l)
fix_sizes_Ideal :: Ideal -> Ideal
fix_sizes_Ideal (Ideal bigD m l a b) = (Ideal bigD (f m) (f l) (f a) (f b))
where
f x = intm_promote x (intm n 0) "set_sizes_Ideal: lengths already fixed incompatibly"
n = max (length_for_ml bigD) (length_for_ab bigD)
fix_sizes_IdealRed :: IdealRed -> IdealRed
fix_sizes_IdealRed (IdealRed bigD a b) = (IdealRed bigD (f a) (f b))
where
f x = intm_promote x (intm n 0) "set_sizes_Ideal: lengths already fixed incompatibly"
n = max (length_for_ml bigD) (length_for_ab bigD)
|
ac6d473d5212939f5556144af00a66d5663ed059cd8509914189eece11895c81 | iu-parfunc/HSBencher | Fusion.hs | # LANGUAGE NamedFieldPuns , RecordWildCards , ScopedTypeVariables , CPP , BangPatterns #
# LANGUAGE TupleSections , DeriveDataTypeable #
# LANGUAGE TypeFamilies #
-- | Google Fusion Table upload of benchmark data.
--
-- This module must be used in conjunction with the main "hsbencher" package,
e.g. " import HSBencher " .
module HSBencher.Backend.Fusion
( -- * The plugin itself, what you probably want
defaultFusionPlugin
-- * Creating and finding tables
, getTableId, findTableId, makeTable, ensureColumns
-- * Details and configuration options.
, FusionConfig(..), stdRetry
, fusionSchema, resultToTuple
-- * Prepping and uploading tuples (rows)
, PreppedTuple, Schema
, authenticate, prepBenchResult, uploadRows
, uploadBenchResult
, FusionPlug(), FusionCmdLnFlag(..),
)
where
import Control.Monad.Reader
import Control.Concurrent (threadDelay)
import qualified Control.Exception as E
import Data.Maybe (fromJust, fromMaybe)
import Data.Dynamic
import Data.Default (Default(..))
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.ByteString.Char8 as B
import Data.Time.Clock
import Data.Time.Format ()
import Network.Google.OAuth2 (getCachedTokens, refreshTokens, OAuth2Client(..), OAuth2Tokens(..))
import Network.Google.FusionTables (createTable, createColumn, listTables, listColumns,
bulkImportRows,
TableId, CellType(..), TableMetadata(..), ColumnMetadata(..))
import Network.HTTP.Conduit (HttpException)
import HSBencher.Types
import HSBencher.Internal.Logging (log)
import HSBencher.Internal.Fusion
import Prelude hiding (log)
import System.Console.GetOpt (OptDescr(Option), ArgDescr(..))
-- | A default plugin. This binding provides future-proof way to get
-- a default instance of the plugin, in the eventuality that more
-- configuration options are added in the future.
defaultFusionPlugin :: FusionPlug
defaultFusionPlugin = FusionPlug
-- | This is the same as defaultFusionPlugin
instance Default FusionPlug where
def = defaultFusionPlugin
----------------------------------------------------------------------------------------------------
-- #ifdef FUSION_TABLES
import Network . Google . OAuth2 ( getCachedTokens , refreshTokens , OAuth2Client ( .. ) , OAuth2Tokens ( .. ) )
import Network . Google . FusionTables ( createTable , , listColumns , insertRows ,
TableId , CellType ( .. ) , TableMetadata ( .. ) )
import HSBencher . Fusion ( getTableId , fusionPlugin )
-- #endif
----------------------------------------------------------------------------------------------------
-- defaultColumns =
-- ["Program","Args","Threads","Sched","Threads",
" MinTime","MedianTime","MaxTime " , " MinTime_Prod","MedianTime_Prod","MaxTime_Prod " ]
-- | The standard retry behavior when receiving HTTP network errors. Note that this
-- can retry for quite a long while so it is only to be usedfrom batch applications.
stdRetry :: String -> OAuth2Client -> OAuth2Tokens -> IO a ->
BenchM (Maybe a)
stdRetry msg client toks action = do
conf <- ask
let retryHook num exn = runReaderT (do
datetime <- lift$ getDateTime
log$ " [fusiontable] Retry #"++show num++" during <"++msg++"> due to HTTPException: " ++ show exn
log$ " [fusiontable] ("++datetime++") Retrying, but first, attempt token refresh..."
-- QUESTION: should we retry the refresh itself, it is NOT inside the exception handler.
liftIO$ client toks
liftIO$ retryIORequest ( refreshTokens client toks ) ( \ _ - > return ( ) ) [ 1,1 ]
stdRetry "refresh tokens" client toks (refreshTokens client toks)
return ()
) conf
liftIO$ retryIORequest action retryHook $
[1,2,4,4,4,4,4,4,8,16] --- 32,64,
++ replicate 30 5
getDateTime :: IO String
getDateTime = do
utc <- getCurrentTime
let day = utctDay utc
-- secs = utctDayTime utc
-- return $ show day ++" "++show secs
return $ show utc
-- | Takes an idempotent IO action that includes a network request. Catches
` HttpException`s and tries a gain a certain number of times . The second argument
-- is a callback to invoke every time a retry occurs.
--
Takes a list of * seconds * to wait between retries . A null list means no retries ,
-- an infinite list will retry indefinitely. The user can choose whatever temporal
-- pattern they desire (e.g. exponential backoff).
--
-- Once the retry list runs out, if it has not been successful, this function returns Nothing.
retryIORequest :: IO a -> (Int -> HttpException -> IO ()) -> [Double] -> IO (Maybe a)
retryIORequest req retryHook times = loop 0 times
where
loop _ [] = return Nothing
loop !num (delay:tl) =
E.catch (fmap Just req) $ \ (exn::HttpException) -> do
retryHook num exn
Microseconds
loop (num+1) tl
fromJustErr :: String -> Maybe t -> t
fromJustErr msg Nothing = error msg
fromJustErr _ (Just x) = x
-- TODO: Should probably move these routines into some kind of
-- "Authenticated" monad which would provide a Reader for the auth
-- info.
-- | Get the table ID that has been cached on disk, or find the the table in the users
Google Drive , or create a new table if needed .
--
This is a simple shorthand for combining findTableId / makeTable / ensureColumns .
--
-- It adds columns if necessary and returns the permutation of columns
found server side . It assumes the DEFAULT core table Schema and
-- will not work for creating CUSTOM columns on the server side.
Simple drop down to using the three finer grained routines if you want that .
getTableId :: OAuth2Client -> String -> BenchM (TableId, [String])
getTableId auth tablename = do
x <- findTableId auth tablename
tid <- case x of
Nothing -> makeTable auth tablename
Just iD -> return iD
-- FIXME: this really should not mutate columns... should be deprecated.
order <- ensureColumns auth tid fusionSchema
return (tid, order)
-- | Look for a table by name, returning its ID if it is present.
findTableId :: OAuth2Client -> String -> BenchM (Maybe TableId)
findTableId auth tablename = do
log$ " [fusiontable] Fetching access tokens, client ID/secret: "++show (clientId auth, clientSecret auth)
toks <- liftIO$ getCachedTokens auth
log$ " [fusiontable] Retrieved: "++show toks
let atok = B.pack $ accessToken toks
allTables <- fmap (fromJustErr "[fusiontable] getTableId, API call to listTables failed.") $
stdRetry "listTables" auth toks $ listTables atok
log$ " [fusiontable] Retrieved metadata on "++show (length allTables)++" tables"
case filter (\ t -> tab_name t == tablename) allTables of
[] -> do log$ " [fusiontable] No table with name "++show tablename
return Nothing
[t] -> do let tid = (tab_tableId t)
log$ " [fusiontable] Found one table with name "++show tablename ++", ID: "++show tid
return (Just tid)
-- | Make a new table, returning its ID.
-- In the future this may provide failure recovery. But for now it
-- simply produces an exception if anything goes wrong.
-- And in particular there is no way to deal with multiple clients
-- racing to perform a `makeTable` with the same name.
makeTable :: OAuth2Client -> String -> BenchM TableId
makeTable auth tablename = do
toks <- liftIO$ getCachedTokens auth
let atok = B.pack $ accessToken toks
log$ " [fusiontable] No table with name "++show tablename ++" found, creating..."
Just TableMetadata{tab_tableId} <- stdRetry "createTable" auth toks $
createTable atok tablename fusionSchema
log$ " [fusiontable] Table created with ID "++show tab_tableId
-- TODO: IF it exists but doesn't have all the columns, then add the necessary columns.
return tab_tableId
-- | Make sure that a minimal set of columns are present. This
-- routine creates columns that are missing and returns the
-- permutation of columns found on the server.
ensureColumns :: OAuth2Client -> TableId -> [(String, CellType)] -> BenchM [String]
ensureColumns auth tid ourSchema = do
log$ " [fusiontable] ensureColumns: Ensuring schema: "++show ourSchema
toks <- liftIO$ getCachedTokens auth
log$ " [fusiontable] ensureColumns: Retrieved: "++show toks
let ourColNames = map fst ourSchema
let atok = B.pack $ accessToken toks
let ourSet = S.fromList ourColNames
log$ " [fusiontable] ensureColumns: Checking columns... "
targetColNames <- fmap (map col_name) $ liftIO$ listColumns atok tid
let targetSet = S.fromList targetColNames
missing = S.difference ourSet targetSet
misslist = L.filter (`S.member` missing) ourColNames -- Keep the order.
extra = S.difference targetSet ourSet
unless (targetColNames == ourColNames) $
log$ "WARNING: HSBencher upload schema (1) did not match server side schema (2):\n (1) "++
show ourSchema ++"\n (2) " ++ show targetColNames
++ "\n HSBencher will try to make do..."
unless (S.null missing) $ do
log$ "WARNING: These fields are missing server-side, creating them: "++show misslist
forM_ misslist $ \ colname -> do
Just ColumnMetadata{col_name, col_columnId} <- stdRetry "createColumn" auth toks $
createColumn atok tid (colname, STRING)
log$ " -> Created column with name,id: "++show (col_name, col_columnId)
unless (S.null extra) $ do
log$ "WARNING: The fusion table has extra fields that HSBencher does not know about: "++
show (S.toList extra)
log$ " Expect null-string entries in these fields! "
-- For now we ASSUME that new columns are added to the end:
-- TODO: We could do another read from the list of columns to confirm.
return (targetColNames ++ misslist)
{-# DEPRECATED uploadBenchResult "this is subsumed by the Plugin interface and uploadRows" #-}
-- | Push the results from a single benchmark to the server.
uploadBenchResult :: BenchmarkResult -> BenchM ()
uploadBenchResult br = do
(_toks,auth,tid) <- authenticate
let schema = benchmarkResultToSchema br
order <- ensureColumns auth tid schema
let row = prepBenchResult order br
flg <- uploadRows [row]
unless flg $ error "uploadBenchResult: failed to upload rows"
-- | Upload raw tuples that are already in the format expected on the server.
-- Returns True if the upload succeeded.
uploadRows :: [PreppedTuple] -> BenchM Bool
uploadRows rows = do
(toks,auth,tid) <- authenticate
---------------------
let colss = map (map fst) rows
dats = map (map snd) rows
case colss of
[] -> return True
(schema:rst) -> do
unless (all (== schema) rst) $
error ("uploadRows: not all Schemas matched: "++ show (schema, filter (/= schema) rst))
-- It's easy to blow the URL size; we need the bulk import version.
-- stdRetry "insertRows" authclient toks $ insertRows
res <- stdRetry "bulkImportRows" auth toks $ bulkImportRows
(B.pack$ accessToken toks) tid schema dats
case res of
Just _ -> do log$ " [fusiontable] Done uploading, run ID "++ (fromJust$ lookup "RUNID" (head rows))
++ " date "++ (fromJust$ lookup "DATETIME" (head rows))
return True
Nothing -> do log$ " [fusiontable] WARNING: Upload failed the maximum number of times. Continuing with benchmarks anyway"
return False
-- | Check cached tokens, authenticate with server if necessary, and
-- return a bundle of the commonly needed information to speak to the
-- Fusion Table API.
authenticate :: BenchM (OAuth2Tokens, OAuth2Client, TableId)
authenticate = do
conf <- ask
let fusionConfig = getMyConf FusionPlug conf
fusionConfig < - error " FINISHME - acquire config dynamically "
let FusionConfig{fusionClientID, fusionClientSecret, fusionTableID, serverColumns} = fusionConfig
let (Just cid, Just sec) = (fusionClientID, fusionClientSecret)
auth = OAuth2Client { clientId = cid, clientSecret = sec }
toks <- liftIO$ getCachedTokens auth
let atok = B.pack $ accessToken toks
let tid = fromJust fusionTableID
return (toks,auth,tid)
-- | Tuples in the format expected on the server.
type PreppedTuple = [(String,String)]
-- | The ordered set of column names that form a schema.
type Schema = [String] -- TODO: include types.
-- | Ensure that a Schema is available on the server , creating columns
-- if necessary .
ensureSchema : : Schema - > BenchM ( )
ensureSchema ourSchema = do
let ( Just cid , Just sec ) = ( fusionClientID , )
authclient = OAuth2Client { clientId = cid , = sec }
-- FIXME : it 's EXTREMELY inefficient to authenticate on every tuple upload :
toks < - liftIO$ getCachedTokens authclient
let = B.pack $ accessToken toks
let tid = fromJust fusionTableID
-- ////// Enable working with Custom tags
let -- ourSchema = map fst $ benchmarkResultToSchema br
ourSet = S.fromList ourSchema
if null _ CUSTOM
then log$ " [ fusiontable ] Computed schema , no custom fields . "
else log$ " [ fusiontable ] Computed schema , including these custom fields : " + + show _ CUSTOM
targetSchema < - fmap ( map ) $ liftIO$ listColumns let targetSet = S.fromList targetSchema
missing = S.difference ourSet targetSet
= ( ` S.member ` missing ) ourSchema
log$ " [ fusiontable ] There were " + + show ( length misslist ) + + " columns missing "
unless ( S.null missing ) $ do
forM _ misslist $ \ colname - > do
stdRetry " createColumn " authclient toks $
( colname , STRING )
-- Create with the correct type ! ? Above just states STRING .
--- ////// END
-- | Ensure that a Schema is available on the server, creating columns
-- if necessary.
ensureSchema :: Schema -> BenchM ()
ensureSchema ourSchema = do
let (Just cid, Just sec) = (fusionClientID, fusionClientSecret)
authclient = OAuth2Client { clientId = cid, clientSecret = sec }
-- FIXME: it's EXTREMELY inefficient to authenticate on every tuple upload:
toks <- liftIO$ getCachedTokens authclient
let atok = B.pack $ accessToken toks
let tid = fromJust fusionTableID
-- ////// Enable working with Custom tags
let -- ourSchema = map fst $ benchmarkResultToSchema br
ourSet = S.fromList ourSchema
if null _CUSTOM
then log$ " [fusiontable] Computed schema, no custom fields."
else log$ " [fusiontable] Computed schema, including these custom fields: " ++ show _CUSTOM
targetSchema <- fmap (map col_name) $ liftIO$ listColumns atok tid
let targetSet = S.fromList targetSchema
missing = S.difference ourSet targetSet
misslist = L.filter (`S.member` missing) ourSchema
log$ " [fusiontable] There were " ++ show (length misslist) ++ " columns missing"
unless (S.null missing) $ do
forM_ misslist $ \ colname -> do
stdRetry "createColumn" authclient toks $
createColumn atok tid (colname, STRING)
-- Create with the correct type !? Above just states STRING.
--- ////// END
-}
-- | Prepare a Benchmark result for upload, matching the given Schema
-- in order and contents, which may mean adding empty fields.
This function requires that the already contain all columns
-- in the benchmark result.
prepBenchResult :: Schema -> BenchmarkResult -> PreppedTuple
prepBenchResult serverColumns br@BenchmarkResult{..} =
conf < - ask
let fusionConfig = getMyConf FusionPlug conf
-- fusionConfig < - error " FINISHME - acquire config dynamically "
let , , fusionTableID , serverColumns } = fusionConfig
let ( Just cid , Just sec ) = ( fusionClientID , )
authclient = OAuth2Client { clientId = cid , = sec }
-- FIXME : it 's EXTREMELY inefficient to authenticate on every tuple upload :
toks < - liftIO$ getCachedTokens authclient
let = B.pack $ accessToken toks
let tid = fromJust fusionTableID
conf <- ask
let fusionConfig = getMyConf FusionPlug conf
-- fusionConfig <- error "FINISHME - acquire config dynamically"
let FusionConfig{fusionClientID, fusionClientSecret, fusionTableID, serverColumns} = fusionConfig
let (Just cid, Just sec) = (fusionClientID, fusionClientSecret)
authclient = OAuth2Client { clientId = cid, clientSecret = sec }
-- FIXME: it's EXTREMELY inefficient to authenticate on every tuple upload:
toks <- liftIO$ getCachedTokens authclient
let atok = B.pack $ accessToken toks
let tid = fromJust fusionTableID
-}
let
ourData = M.fromList $ resultToTuple br
ourCols = M.keysSet ourData
targetSet = S.fromList serverColumns
missing = S.difference ourCols targetSet
Any field HSBencher does n't know about just gets an empty string :
tuple = [ (key, fromMaybe "" (M.lookup key ourData))
| key <- serverColumns ]
in if S.null missing
then tuple
else error $ "prepBenchResult: benchmark result contained columns absent on server: "++show missing
log$ " [ fusiontable ] Uploading row with " + + show ( length cols)++
" columns containing " + + show ( sum$ map length vals)++ " characters of data "
log$ " [ fusiontable ] Full row contents : " + + show ourData
return tuple
log$ " [fusiontable] Uploading row with "++show (length cols)++
" columns containing "++show (sum$ map length vals)++" characters of data"
log$ " [fusiontable] Full row contents: "++show ourData
return tuple
-}
-- | A representaton used for creating tables. Must be isomorphic to
` BenchmarkResult ` . This could perhaps be generated automatically
( e.g. from a Generic instance , or even by creating a default
-- benchmarkResult and feeding it to resultToTuple).
--
-- Note, order is important here, because this is the preferred order we'd like to
-- have it in the Fusion table.
--
fusionSchema :: [(String, CellType)]
fusionSchema =
[ ("PROGNAME",STRING)
, ("VARIANT",STRING)
, ("ARGS",STRING)
, ("HOSTNAME",STRING)
, ("MINTIME", NUMBER)
, ("MEDIANTIME", NUMBER)
, ("MAXTIME", NUMBER)
, ("THREADS",NUMBER)
, ("RETRIES",NUMBER)
-- The run is identified by hostname_secondsSinceEpoch:
, ("RUNID",STRING)
, ("CI_BUILD_ID",STRING)
, ("DATETIME",DATETIME)
, ("MINTIME_PRODUCTIVITY", NUMBER)
, ("MEDIANTIME_PRODUCTIVITY", NUMBER)
, ("MAXTIME_PRODUCTIVITY", NUMBER)
, ("ALLTIMES", STRING)
, ("TRIALS", NUMBER)
, ("COMPILER",STRING)
, ("COMPILE_FLAGS",STRING)
, ("RUNTIME_FLAGS",STRING)
, ("ENV_VARS",STRING)
, ("BENCH_VERSION", STRING)
, ("BENCH_FILE", STRING)
-- , ("OS",STRING)
, ("UNAME",STRING)
, ("PROCESSOR",STRING)
, ("TOPOLOGY",STRING)
, ("GIT_BRANCH",STRING)
, ("GIT_HASH",STRING)
, ("GIT_DEPTH",NUMBER)
, ("WHO",STRING)
, ("ETC_ISSUE",STRING)
, ("LSPCI",STRING)
, ("FULL_LOG",STRING)
-- New fields: [2013.12.01]
, ("MEDIANTIME_ALLOCRATE", STRING)
, ("MEDIANTIME_MEMFOOTPRINT", STRING)
New field : [ 2014.02.19 ]
In order of trials like ALLTIMES .
]
-- FIMXE: at least test that resultToTuple returns lits the same
length as fusionSchema .
benchmarkResultToSchema :: BenchmarkResult -> [(String, CellType)]
benchmarkResultToSchema bm = fusionSchema ++ map custom (_CUSTOM bm)
where
custom (tag, IntResult _) = (tag,NUMBER)
custom (tag, DoubleResult _) = (tag,NUMBER)
custom (tag, StringResult _) = (tag, STRING)
-- | The type of Fusion table plugins. Currently this is a singleton type; there is
really only one fusion plugin .
data FusionPlug = FusionPlug
deriving (Eq,Show,Ord,Read)
instance Plugin FusionPlug where
type PlugConf FusionPlug = FusionConfig
type PlugFlag FusionPlug = FusionCmdLnFlag
-- | Better be globally unique! Careful.
plugName _ = "fusion"
-- plugName _ = "Google_FusionTable_Backend"
plugCmdOpts _ = fusion_cli_options
plugUploadRow _ cfg row = runReaderT (uploadBenchResult row) cfg
plugInitialize p gconf = do
putStrLn " [fusiontable] Fusion table plugin initializing.. First, find config."
gc2 <- let fc@FusionConfig{fusionClientID, fusionClientSecret, fusionTableID} =
getMyConf p gconf in
case (benchsetName gconf, fusionTableID) of
(Nothing,Nothing) -> error "No way to find which fusion table to use! No name given and no explicit table ID."
(_, Just _tid) -> return gconf
(Just name,_) -> do
case (fusionClientID, fusionClientSecret) of
(Just cid, Just sec ) -> do
let auth = OAuth2Client { clientId=cid, clientSecret=sec }
(tid,cols) <- runReaderT (getTableId auth name) gconf
putStrLn$ " [fusiontable] -> Resolved name "++show name++" to table ID " ++show tid
return $! setMyConf p fc{ fusionTableID= Just tid, serverColumns= cols } gconf
(_,_) -> error "When --fusion-upload is activated --clientid and --clientsecret are required (or equiv ENV vars)"
let fc2 = getMyConf p gc2
let (Just cid, Just sec) = (fusionClientID fc2, fusionClientSecret fc2)
authclient = OAuth2Client { clientId = cid, clientSecret = sec }
putStrLn " [fusiontable] Second, lets retrieved cached auth tokens on the file system..."
_toks <- getCachedTokens authclient
-- TEMP: This should become another command line flag: --fusion-list to list the tables.
" [ fusiontable ] Next , to test our connections , attempt to list tables : "
< - fmap ( map tab_name ) ( listTables ( B.pack ( ) ) )
putStrLn$ " [ fusiontable ] All of users ( map ( " " + + ) strs )
putStrLn " [fusiontable] Next, to test our connections, attempt to list tables:"
strs <- fmap (map tab_name) (listTables (B.pack (accessToken toks)))
putStrLn$" [fusiontable] All of users tables:\n"++ unlines (map (" "++) strs)
-}
return gc2
foldFlags _p flgs cnf0 =
foldr ($) cnf0 (map doFlag flgs)
where
-- TODO: Move this one to the global config
doFlag FusionTest r = r
doFlag (ClientID cid) r = r { fusionClientID = Just cid }
doFlag (ClientSecret s) r = r { fusionClientSecret = Just s }
doFlag (FusionTables m) r =
-- let r2 = r { doFusionUpload = True } in
case m of
Just tid -> r { fusionTableID = Just tid }
Nothing -> r
-- | All the command line options understood by this plugin.
fusion_cli_options :: (String, [OptDescr FusionCmdLnFlag])
fusion_cli_options =
("Fusion Table Options:",
[ Option [] ["fusion-upload"] (OptArg FusionTables "TABLEID")
"enable fusion table upload. Optionally set TABLEID; otherwise create/discover it."
, Option [] ["clientid"] (ReqArg ClientID "ID")
("Use (and cache auth tokens for) Google client ID\n"++
"Alternatively set by env var HSBENCHER_GOOGLE_CLIENTID")
, Option [] ["clientsecret"] (ReqArg ClientSecret "STR")
("Use Google client secret\n"++
"Alternatively set by env var HSBENCHER_GOOGLE_CLIENTSECRET")
, Option [] ["fusion-test"] (NoArg FusionTest) "Test authentication and list tables if possible."
])
| command line options provided by the user initiating benchmarking .
data FusionCmdLnFlag =
FusionTables (Maybe TableId)
| ClientID String
| ClientSecret String
| FusionTest
deriving (Show,Read,Ord,Eq, Typeable)
| null | https://raw.githubusercontent.com/iu-parfunc/HSBencher/76782b75b3a4b276c45a2c159e0b4cb6bd8a2360/hsbencher-fusion/HSBencher/Backend/Fusion.hs | haskell | | Google Fusion Table upload of benchmark data.
This module must be used in conjunction with the main "hsbencher" package,
* The plugin itself, what you probably want
* Creating and finding tables
* Details and configuration options.
* Prepping and uploading tuples (rows)
| A default plugin. This binding provides future-proof way to get
a default instance of the plugin, in the eventuality that more
configuration options are added in the future.
| This is the same as defaultFusionPlugin
--------------------------------------------------------------------------------------------------
#ifdef FUSION_TABLES
#endif
--------------------------------------------------------------------------------------------------
defaultColumns =
["Program","Args","Threads","Sched","Threads",
| The standard retry behavior when receiving HTTP network errors. Note that this
can retry for quite a long while so it is only to be usedfrom batch applications.
QUESTION: should we retry the refresh itself, it is NOT inside the exception handler.
- 32,64,
secs = utctDayTime utc
return $ show day ++" "++show secs
| Takes an idempotent IO action that includes a network request. Catches
is a callback to invoke every time a retry occurs.
an infinite list will retry indefinitely. The user can choose whatever temporal
pattern they desire (e.g. exponential backoff).
Once the retry list runs out, if it has not been successful, this function returns Nothing.
TODO: Should probably move these routines into some kind of
"Authenticated" monad which would provide a Reader for the auth
info.
| Get the table ID that has been cached on disk, or find the the table in the users
It adds columns if necessary and returns the permutation of columns
will not work for creating CUSTOM columns on the server side.
FIXME: this really should not mutate columns... should be deprecated.
| Look for a table by name, returning its ID if it is present.
| Make a new table, returning its ID.
In the future this may provide failure recovery. But for now it
simply produces an exception if anything goes wrong.
And in particular there is no way to deal with multiple clients
racing to perform a `makeTable` with the same name.
TODO: IF it exists but doesn't have all the columns, then add the necessary columns.
| Make sure that a minimal set of columns are present. This
routine creates columns that are missing and returns the
permutation of columns found on the server.
Keep the order.
For now we ASSUME that new columns are added to the end:
TODO: We could do another read from the list of columns to confirm.
# DEPRECATED uploadBenchResult "this is subsumed by the Plugin interface and uploadRows" #
| Push the results from a single benchmark to the server.
| Upload raw tuples that are already in the format expected on the server.
Returns True if the upload succeeded.
-------------------
It's easy to blow the URL size; we need the bulk import version.
stdRetry "insertRows" authclient toks $ insertRows
| Check cached tokens, authenticate with server if necessary, and
return a bundle of the commonly needed information to speak to the
Fusion Table API.
| Tuples in the format expected on the server.
| The ordered set of column names that form a schema.
TODO: include types.
| Ensure that a Schema is available on the server , creating columns
if necessary .
FIXME : it 's EXTREMELY inefficient to authenticate on every tuple upload :
////// Enable working with Custom tags
ourSchema = map fst $ benchmarkResultToSchema br
Create with the correct type ! ? Above just states STRING .
- ////// END
| Ensure that a Schema is available on the server, creating columns
if necessary.
FIXME: it's EXTREMELY inefficient to authenticate on every tuple upload:
////// Enable working with Custom tags
ourSchema = map fst $ benchmarkResultToSchema br
Create with the correct type !? Above just states STRING.
- ////// END
| Prepare a Benchmark result for upload, matching the given Schema
in order and contents, which may mean adding empty fields.
in the benchmark result.
fusionConfig < - error " FINISHME - acquire config dynamically "
FIXME : it 's EXTREMELY inefficient to authenticate on every tuple upload :
fusionConfig <- error "FINISHME - acquire config dynamically"
FIXME: it's EXTREMELY inefficient to authenticate on every tuple upload:
| A representaton used for creating tables. Must be isomorphic to
benchmarkResult and feeding it to resultToTuple).
Note, order is important here, because this is the preferred order we'd like to
have it in the Fusion table.
The run is identified by hostname_secondsSinceEpoch:
, ("OS",STRING)
New fields: [2013.12.01]
FIMXE: at least test that resultToTuple returns lits the same
| The type of Fusion table plugins. Currently this is a singleton type; there is
| Better be globally unique! Careful.
plugName _ = "Google_FusionTable_Backend"
TEMP: This should become another command line flag: --fusion-list to list the tables.
TODO: Move this one to the global config
let r2 = r { doFusionUpload = True } in
| All the command line options understood by this plugin. | # LANGUAGE NamedFieldPuns , RecordWildCards , ScopedTypeVariables , CPP , BangPatterns #
# LANGUAGE TupleSections , DeriveDataTypeable #
# LANGUAGE TypeFamilies #
e.g. " import HSBencher " .
module HSBencher.Backend.Fusion
defaultFusionPlugin
, getTableId, findTableId, makeTable, ensureColumns
, FusionConfig(..), stdRetry
, fusionSchema, resultToTuple
, PreppedTuple, Schema
, authenticate, prepBenchResult, uploadRows
, uploadBenchResult
, FusionPlug(), FusionCmdLnFlag(..),
)
where
import Control.Monad.Reader
import Control.Concurrent (threadDelay)
import qualified Control.Exception as E
import Data.Maybe (fromJust, fromMaybe)
import Data.Dynamic
import Data.Default (Default(..))
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.ByteString.Char8 as B
import Data.Time.Clock
import Data.Time.Format ()
import Network.Google.OAuth2 (getCachedTokens, refreshTokens, OAuth2Client(..), OAuth2Tokens(..))
import Network.Google.FusionTables (createTable, createColumn, listTables, listColumns,
bulkImportRows,
TableId, CellType(..), TableMetadata(..), ColumnMetadata(..))
import Network.HTTP.Conduit (HttpException)
import HSBencher.Types
import HSBencher.Internal.Logging (log)
import HSBencher.Internal.Fusion
import Prelude hiding (log)
import System.Console.GetOpt (OptDescr(Option), ArgDescr(..))
defaultFusionPlugin :: FusionPlug
defaultFusionPlugin = FusionPlug
instance Default FusionPlug where
def = defaultFusionPlugin
import Network . Google . OAuth2 ( getCachedTokens , refreshTokens , OAuth2Client ( .. ) , OAuth2Tokens ( .. ) )
import Network . Google . FusionTables ( createTable , , listColumns , insertRows ,
TableId , CellType ( .. ) , TableMetadata ( .. ) )
import HSBencher . Fusion ( getTableId , fusionPlugin )
" MinTime","MedianTime","MaxTime " , " MinTime_Prod","MedianTime_Prod","MaxTime_Prod " ]
stdRetry :: String -> OAuth2Client -> OAuth2Tokens -> IO a ->
BenchM (Maybe a)
stdRetry msg client toks action = do
conf <- ask
let retryHook num exn = runReaderT (do
datetime <- lift$ getDateTime
log$ " [fusiontable] Retry #"++show num++" during <"++msg++"> due to HTTPException: " ++ show exn
log$ " [fusiontable] ("++datetime++") Retrying, but first, attempt token refresh..."
liftIO$ client toks
liftIO$ retryIORequest ( refreshTokens client toks ) ( \ _ - > return ( ) ) [ 1,1 ]
stdRetry "refresh tokens" client toks (refreshTokens client toks)
return ()
) conf
liftIO$ retryIORequest action retryHook $
++ replicate 30 5
getDateTime :: IO String
getDateTime = do
utc <- getCurrentTime
let day = utctDay utc
return $ show utc
` HttpException`s and tries a gain a certain number of times . The second argument
Takes a list of * seconds * to wait between retries . A null list means no retries ,
retryIORequest :: IO a -> (Int -> HttpException -> IO ()) -> [Double] -> IO (Maybe a)
retryIORequest req retryHook times = loop 0 times
where
loop _ [] = return Nothing
loop !num (delay:tl) =
E.catch (fmap Just req) $ \ (exn::HttpException) -> do
retryHook num exn
Microseconds
loop (num+1) tl
fromJustErr :: String -> Maybe t -> t
fromJustErr msg Nothing = error msg
fromJustErr _ (Just x) = x
Google Drive , or create a new table if needed .
This is a simple shorthand for combining findTableId / makeTable / ensureColumns .
found server side . It assumes the DEFAULT core table Schema and
Simple drop down to using the three finer grained routines if you want that .
getTableId :: OAuth2Client -> String -> BenchM (TableId, [String])
getTableId auth tablename = do
x <- findTableId auth tablename
tid <- case x of
Nothing -> makeTable auth tablename
Just iD -> return iD
order <- ensureColumns auth tid fusionSchema
return (tid, order)
findTableId :: OAuth2Client -> String -> BenchM (Maybe TableId)
findTableId auth tablename = do
log$ " [fusiontable] Fetching access tokens, client ID/secret: "++show (clientId auth, clientSecret auth)
toks <- liftIO$ getCachedTokens auth
log$ " [fusiontable] Retrieved: "++show toks
let atok = B.pack $ accessToken toks
allTables <- fmap (fromJustErr "[fusiontable] getTableId, API call to listTables failed.") $
stdRetry "listTables" auth toks $ listTables atok
log$ " [fusiontable] Retrieved metadata on "++show (length allTables)++" tables"
case filter (\ t -> tab_name t == tablename) allTables of
[] -> do log$ " [fusiontable] No table with name "++show tablename
return Nothing
[t] -> do let tid = (tab_tableId t)
log$ " [fusiontable] Found one table with name "++show tablename ++", ID: "++show tid
return (Just tid)
makeTable :: OAuth2Client -> String -> BenchM TableId
makeTable auth tablename = do
toks <- liftIO$ getCachedTokens auth
let atok = B.pack $ accessToken toks
log$ " [fusiontable] No table with name "++show tablename ++" found, creating..."
Just TableMetadata{tab_tableId} <- stdRetry "createTable" auth toks $
createTable atok tablename fusionSchema
log$ " [fusiontable] Table created with ID "++show tab_tableId
return tab_tableId
ensureColumns :: OAuth2Client -> TableId -> [(String, CellType)] -> BenchM [String]
ensureColumns auth tid ourSchema = do
log$ " [fusiontable] ensureColumns: Ensuring schema: "++show ourSchema
toks <- liftIO$ getCachedTokens auth
log$ " [fusiontable] ensureColumns: Retrieved: "++show toks
let ourColNames = map fst ourSchema
let atok = B.pack $ accessToken toks
let ourSet = S.fromList ourColNames
log$ " [fusiontable] ensureColumns: Checking columns... "
targetColNames <- fmap (map col_name) $ liftIO$ listColumns atok tid
let targetSet = S.fromList targetColNames
missing = S.difference ourSet targetSet
extra = S.difference targetSet ourSet
unless (targetColNames == ourColNames) $
log$ "WARNING: HSBencher upload schema (1) did not match server side schema (2):\n (1) "++
show ourSchema ++"\n (2) " ++ show targetColNames
++ "\n HSBencher will try to make do..."
unless (S.null missing) $ do
log$ "WARNING: These fields are missing server-side, creating them: "++show misslist
forM_ misslist $ \ colname -> do
Just ColumnMetadata{col_name, col_columnId} <- stdRetry "createColumn" auth toks $
createColumn atok tid (colname, STRING)
log$ " -> Created column with name,id: "++show (col_name, col_columnId)
unless (S.null extra) $ do
log$ "WARNING: The fusion table has extra fields that HSBencher does not know about: "++
show (S.toList extra)
log$ " Expect null-string entries in these fields! "
return (targetColNames ++ misslist)
uploadBenchResult :: BenchmarkResult -> BenchM ()
uploadBenchResult br = do
(_toks,auth,tid) <- authenticate
let schema = benchmarkResultToSchema br
order <- ensureColumns auth tid schema
let row = prepBenchResult order br
flg <- uploadRows [row]
unless flg $ error "uploadBenchResult: failed to upload rows"
uploadRows :: [PreppedTuple] -> BenchM Bool
uploadRows rows = do
(toks,auth,tid) <- authenticate
let colss = map (map fst) rows
dats = map (map snd) rows
case colss of
[] -> return True
(schema:rst) -> do
unless (all (== schema) rst) $
error ("uploadRows: not all Schemas matched: "++ show (schema, filter (/= schema) rst))
res <- stdRetry "bulkImportRows" auth toks $ bulkImportRows
(B.pack$ accessToken toks) tid schema dats
case res of
Just _ -> do log$ " [fusiontable] Done uploading, run ID "++ (fromJust$ lookup "RUNID" (head rows))
++ " date "++ (fromJust$ lookup "DATETIME" (head rows))
return True
Nothing -> do log$ " [fusiontable] WARNING: Upload failed the maximum number of times. Continuing with benchmarks anyway"
return False
authenticate :: BenchM (OAuth2Tokens, OAuth2Client, TableId)
authenticate = do
conf <- ask
let fusionConfig = getMyConf FusionPlug conf
fusionConfig < - error " FINISHME - acquire config dynamically "
let FusionConfig{fusionClientID, fusionClientSecret, fusionTableID, serverColumns} = fusionConfig
let (Just cid, Just sec) = (fusionClientID, fusionClientSecret)
auth = OAuth2Client { clientId = cid, clientSecret = sec }
toks <- liftIO$ getCachedTokens auth
let atok = B.pack $ accessToken toks
let tid = fromJust fusionTableID
return (toks,auth,tid)
type PreppedTuple = [(String,String)]
ensureSchema : : Schema - > BenchM ( )
ensureSchema ourSchema = do
let ( Just cid , Just sec ) = ( fusionClientID , )
authclient = OAuth2Client { clientId = cid , = sec }
toks < - liftIO$ getCachedTokens authclient
let = B.pack $ accessToken toks
let tid = fromJust fusionTableID
ourSet = S.fromList ourSchema
if null _ CUSTOM
then log$ " [ fusiontable ] Computed schema , no custom fields . "
else log$ " [ fusiontable ] Computed schema , including these custom fields : " + + show _ CUSTOM
targetSchema < - fmap ( map ) $ liftIO$ listColumns let targetSet = S.fromList targetSchema
missing = S.difference ourSet targetSet
= ( ` S.member ` missing ) ourSchema
log$ " [ fusiontable ] There were " + + show ( length misslist ) + + " columns missing "
unless ( S.null missing ) $ do
forM _ misslist $ \ colname - > do
stdRetry " createColumn " authclient toks $
( colname , STRING )
ensureSchema :: Schema -> BenchM ()
ensureSchema ourSchema = do
let (Just cid, Just sec) = (fusionClientID, fusionClientSecret)
authclient = OAuth2Client { clientId = cid, clientSecret = sec }
toks <- liftIO$ getCachedTokens authclient
let atok = B.pack $ accessToken toks
let tid = fromJust fusionTableID
ourSet = S.fromList ourSchema
if null _CUSTOM
then log$ " [fusiontable] Computed schema, no custom fields."
else log$ " [fusiontable] Computed schema, including these custom fields: " ++ show _CUSTOM
targetSchema <- fmap (map col_name) $ liftIO$ listColumns atok tid
let targetSet = S.fromList targetSchema
missing = S.difference ourSet targetSet
misslist = L.filter (`S.member` missing) ourSchema
log$ " [fusiontable] There were " ++ show (length misslist) ++ " columns missing"
unless (S.null missing) $ do
forM_ misslist $ \ colname -> do
stdRetry "createColumn" authclient toks $
createColumn atok tid (colname, STRING)
-}
This function requires that the already contain all columns
prepBenchResult :: Schema -> BenchmarkResult -> PreppedTuple
prepBenchResult serverColumns br@BenchmarkResult{..} =
conf < - ask
let fusionConfig = getMyConf FusionPlug conf
let , , fusionTableID , serverColumns } = fusionConfig
let ( Just cid , Just sec ) = ( fusionClientID , )
authclient = OAuth2Client { clientId = cid , = sec }
toks < - liftIO$ getCachedTokens authclient
let = B.pack $ accessToken toks
let tid = fromJust fusionTableID
conf <- ask
let fusionConfig = getMyConf FusionPlug conf
let FusionConfig{fusionClientID, fusionClientSecret, fusionTableID, serverColumns} = fusionConfig
let (Just cid, Just sec) = (fusionClientID, fusionClientSecret)
authclient = OAuth2Client { clientId = cid, clientSecret = sec }
toks <- liftIO$ getCachedTokens authclient
let atok = B.pack $ accessToken toks
let tid = fromJust fusionTableID
-}
let
ourData = M.fromList $ resultToTuple br
ourCols = M.keysSet ourData
targetSet = S.fromList serverColumns
missing = S.difference ourCols targetSet
Any field HSBencher does n't know about just gets an empty string :
tuple = [ (key, fromMaybe "" (M.lookup key ourData))
| key <- serverColumns ]
in if S.null missing
then tuple
else error $ "prepBenchResult: benchmark result contained columns absent on server: "++show missing
log$ " [ fusiontable ] Uploading row with " + + show ( length cols)++
" columns containing " + + show ( sum$ map length vals)++ " characters of data "
log$ " [ fusiontable ] Full row contents : " + + show ourData
return tuple
log$ " [fusiontable] Uploading row with "++show (length cols)++
" columns containing "++show (sum$ map length vals)++" characters of data"
log$ " [fusiontable] Full row contents: "++show ourData
return tuple
-}
` BenchmarkResult ` . This could perhaps be generated automatically
( e.g. from a Generic instance , or even by creating a default
fusionSchema :: [(String, CellType)]
fusionSchema =
[ ("PROGNAME",STRING)
, ("VARIANT",STRING)
, ("ARGS",STRING)
, ("HOSTNAME",STRING)
, ("MINTIME", NUMBER)
, ("MEDIANTIME", NUMBER)
, ("MAXTIME", NUMBER)
, ("THREADS",NUMBER)
, ("RETRIES",NUMBER)
, ("RUNID",STRING)
, ("CI_BUILD_ID",STRING)
, ("DATETIME",DATETIME)
, ("MINTIME_PRODUCTIVITY", NUMBER)
, ("MEDIANTIME_PRODUCTIVITY", NUMBER)
, ("MAXTIME_PRODUCTIVITY", NUMBER)
, ("ALLTIMES", STRING)
, ("TRIALS", NUMBER)
, ("COMPILER",STRING)
, ("COMPILE_FLAGS",STRING)
, ("RUNTIME_FLAGS",STRING)
, ("ENV_VARS",STRING)
, ("BENCH_VERSION", STRING)
, ("BENCH_FILE", STRING)
, ("UNAME",STRING)
, ("PROCESSOR",STRING)
, ("TOPOLOGY",STRING)
, ("GIT_BRANCH",STRING)
, ("GIT_HASH",STRING)
, ("GIT_DEPTH",NUMBER)
, ("WHO",STRING)
, ("ETC_ISSUE",STRING)
, ("LSPCI",STRING)
, ("FULL_LOG",STRING)
, ("MEDIANTIME_ALLOCRATE", STRING)
, ("MEDIANTIME_MEMFOOTPRINT", STRING)
New field : [ 2014.02.19 ]
In order of trials like ALLTIMES .
]
length as fusionSchema .
benchmarkResultToSchema :: BenchmarkResult -> [(String, CellType)]
benchmarkResultToSchema bm = fusionSchema ++ map custom (_CUSTOM bm)
where
custom (tag, IntResult _) = (tag,NUMBER)
custom (tag, DoubleResult _) = (tag,NUMBER)
custom (tag, StringResult _) = (tag, STRING)
really only one fusion plugin .
data FusionPlug = FusionPlug
deriving (Eq,Show,Ord,Read)
instance Plugin FusionPlug where
type PlugConf FusionPlug = FusionConfig
type PlugFlag FusionPlug = FusionCmdLnFlag
plugName _ = "fusion"
plugCmdOpts _ = fusion_cli_options
plugUploadRow _ cfg row = runReaderT (uploadBenchResult row) cfg
plugInitialize p gconf = do
putStrLn " [fusiontable] Fusion table plugin initializing.. First, find config."
gc2 <- let fc@FusionConfig{fusionClientID, fusionClientSecret, fusionTableID} =
getMyConf p gconf in
case (benchsetName gconf, fusionTableID) of
(Nothing,Nothing) -> error "No way to find which fusion table to use! No name given and no explicit table ID."
(_, Just _tid) -> return gconf
(Just name,_) -> do
case (fusionClientID, fusionClientSecret) of
(Just cid, Just sec ) -> do
let auth = OAuth2Client { clientId=cid, clientSecret=sec }
(tid,cols) <- runReaderT (getTableId auth name) gconf
putStrLn$ " [fusiontable] -> Resolved name "++show name++" to table ID " ++show tid
return $! setMyConf p fc{ fusionTableID= Just tid, serverColumns= cols } gconf
(_,_) -> error "When --fusion-upload is activated --clientid and --clientsecret are required (or equiv ENV vars)"
let fc2 = getMyConf p gc2
let (Just cid, Just sec) = (fusionClientID fc2, fusionClientSecret fc2)
authclient = OAuth2Client { clientId = cid, clientSecret = sec }
putStrLn " [fusiontable] Second, lets retrieved cached auth tokens on the file system..."
_toks <- getCachedTokens authclient
" [ fusiontable ] Next , to test our connections , attempt to list tables : "
< - fmap ( map tab_name ) ( listTables ( B.pack ( ) ) )
putStrLn$ " [ fusiontable ] All of users ( map ( " " + + ) strs )
putStrLn " [fusiontable] Next, to test our connections, attempt to list tables:"
strs <- fmap (map tab_name) (listTables (B.pack (accessToken toks)))
putStrLn$" [fusiontable] All of users tables:\n"++ unlines (map (" "++) strs)
-}
return gc2
foldFlags _p flgs cnf0 =
foldr ($) cnf0 (map doFlag flgs)
where
doFlag FusionTest r = r
doFlag (ClientID cid) r = r { fusionClientID = Just cid }
doFlag (ClientSecret s) r = r { fusionClientSecret = Just s }
doFlag (FusionTables m) r =
case m of
Just tid -> r { fusionTableID = Just tid }
Nothing -> r
fusion_cli_options :: (String, [OptDescr FusionCmdLnFlag])
fusion_cli_options =
("Fusion Table Options:",
[ Option [] ["fusion-upload"] (OptArg FusionTables "TABLEID")
"enable fusion table upload. Optionally set TABLEID; otherwise create/discover it."
, Option [] ["clientid"] (ReqArg ClientID "ID")
("Use (and cache auth tokens for) Google client ID\n"++
"Alternatively set by env var HSBENCHER_GOOGLE_CLIENTID")
, Option [] ["clientsecret"] (ReqArg ClientSecret "STR")
("Use Google client secret\n"++
"Alternatively set by env var HSBENCHER_GOOGLE_CLIENTSECRET")
, Option [] ["fusion-test"] (NoArg FusionTest) "Test authentication and list tables if possible."
])
| command line options provided by the user initiating benchmarking .
data FusionCmdLnFlag =
FusionTables (Maybe TableId)
| ClientID String
| ClientSecret String
| FusionTest
deriving (Show,Read,Ord,Eq, Typeable)
|
6ef10265c65f2d136cae7ace0642907d3f166f7cd3d99793d50895850d675bd9 | luksamuk/sonic-lisp-engine | animations.lisp | ;;;; animations.lisp
Copyright ( c ) 2018 - 2020 < >
This file is distributed under the MIT License .
;;;; See LICENSE for details.
(in-package :sonic-lisp)
(defstruct animation-props
keyframes
(time-per-frame 0.16 :type single-float)
(loopback nil))
(defclass animator ()
((%atlas :initarg :atlas
:reader atlas)
(%atlas-size :initarg :atlas-size
:reader atlas-size
:initform (gamekit:vec2 360 360))
(%fpl :initarg :frames-per-line
:reader frames-per-line
:initform 6)
(%curr-anim :accessor anim-name
:initform nil)
(%anim-timer :accessor anim-timer
:initform 0)
(%frame :accessor frame
:initform 0)
(%anims :initarg :animations
:accessor animations
:initform nil)))
(defgeneric (setf animation) (animation-name animator))
(defgeneric register-animation
(animator &key name keyframes time-per-frame loopback-index))
(defgeneric update-animation (animator dt))
(defgeneric draw-animation (animator position))
(defmethod (setf animation) (animation-name (animator animator))
;; Only set to a registered animation
(when (or (eq animation-name :keep)
(and (animations animator)
(gethash animation-name (animations animator))))
;; Reset animation data only when not attributing to
;; same animation
(unless (or (eql animation-name (anim-name animator))
(eq animation-name :keep))
(setf (frame animator) 0
(anim-timer animator) 0
(anim-name animator) animation-name))))
(defmethod register-animation ((animator animator)
&key
name
keyframes
(time-per-frame 0.16)
(loopback-index 0))
(let ((keyframes (make-array (length keyframes)
:initial-contents keyframes)))
Initialize animations table if not initialized
(unless (animations animator)
(setf (animations animator) (make-hash-table)))
(setf (gethash name (animations animator))
(make-animation-props
:keyframes keyframes
:time-per-frame time-per-frame
:loopback loopback-index))))
(defmethod update-animation ((animator animator) dt)
(let ((props (gethash (anim-name animator)
(animations animator)))
(tpf nil))
(when props
(incf (anim-timer animator) dt)
;; If we surpassed the frame duration for the
;; animation, calculate the amount of frames
;; to skip and then wrap the timer around.
(setf tpf (animation-props-time-per-frame props))
(when (>= (anim-timer animator)
(animation-props-time-per-frame props))
(let ((frames-skipped
(floor (/ (anim-timer animator) tpf)))
(num-frames
(length (animation-props-keyframes props))))
;; Restore timer
(setf (anim-timer animator)
(rem (anim-timer animator) tpf))
;; Increment current frame
(incf (frame animator) frames-skipped)
;; If beyond last frame, wrap around
(when (>= (frame animator) num-frames)
;; We need to determine at what frame should we
;; stop; take the loopback frame into account
;; and consider only the [loopback, last-frame]
;; range for another remainder operation.
(let* ((loopback-frame
(animation-props-loopback props))
(loopback-range (- num-frames loopback-frame)))
(setf (frame animator)
(+ loopback-frame (rem (frame animator)
loopback-range))))))))))
(defmethod draw-animation ((animator animator) (pos gamekit:vec2))
(let ((props
(gethash (anim-name animator) (animations animator))))
(when props
Take the index of the frame on the keyfranes , then
;; convert it to a proper X and Y position on the texture
;; atlas
(let* ((frame
(aref (animation-props-keyframes props)
(frame animator)))
(frame-x-index
(rem frame (frames-per-line animator)))
(frame-y-index
(floor (/ frame (frames-per-line animator))))
(frame-size
(/ (gamekit:x (atlas-size animator))
(frames-per-line animator))))
(gamekit:draw-image
;; Position on matrix
pos
;; Pass on animation atlas
(atlas animator)
;; Position on atlas
:origin
(gamekit:vec2
(* frame-x-index frame-size)
(- (- (gamekit:y (atlas-size animator))
frame-size)
(* frame-y-index frame-size)))
;; Size of frame square
:width frame-size
:height frame-size)))))
| null | https://raw.githubusercontent.com/luksamuk/sonic-lisp-engine/28987769a8c60411862a415010e44c495c4ea875/animations.lisp | lisp | animations.lisp
See LICENSE for details.
Only set to a registered animation
Reset animation data only when not attributing to
same animation
If we surpassed the frame duration for the
animation, calculate the amount of frames
to skip and then wrap the timer around.
Restore timer
Increment current frame
If beyond last frame, wrap around
We need to determine at what frame should we
stop; take the loopback frame into account
and consider only the [loopback, last-frame]
range for another remainder operation.
convert it to a proper X and Y position on the texture
atlas
Position on matrix
Pass on animation atlas
Position on atlas
Size of frame square | Copyright ( c ) 2018 - 2020 < >
This file is distributed under the MIT License .
(in-package :sonic-lisp)
(defstruct animation-props
keyframes
(time-per-frame 0.16 :type single-float)
(loopback nil))
(defclass animator ()
((%atlas :initarg :atlas
:reader atlas)
(%atlas-size :initarg :atlas-size
:reader atlas-size
:initform (gamekit:vec2 360 360))
(%fpl :initarg :frames-per-line
:reader frames-per-line
:initform 6)
(%curr-anim :accessor anim-name
:initform nil)
(%anim-timer :accessor anim-timer
:initform 0)
(%frame :accessor frame
:initform 0)
(%anims :initarg :animations
:accessor animations
:initform nil)))
(defgeneric (setf animation) (animation-name animator))
(defgeneric register-animation
(animator &key name keyframes time-per-frame loopback-index))
(defgeneric update-animation (animator dt))
(defgeneric draw-animation (animator position))
(defmethod (setf animation) (animation-name (animator animator))
(when (or (eq animation-name :keep)
(and (animations animator)
(gethash animation-name (animations animator))))
(unless (or (eql animation-name (anim-name animator))
(eq animation-name :keep))
(setf (frame animator) 0
(anim-timer animator) 0
(anim-name animator) animation-name))))
(defmethod register-animation ((animator animator)
&key
name
keyframes
(time-per-frame 0.16)
(loopback-index 0))
(let ((keyframes (make-array (length keyframes)
:initial-contents keyframes)))
Initialize animations table if not initialized
(unless (animations animator)
(setf (animations animator) (make-hash-table)))
(setf (gethash name (animations animator))
(make-animation-props
:keyframes keyframes
:time-per-frame time-per-frame
:loopback loopback-index))))
(defmethod update-animation ((animator animator) dt)
(let ((props (gethash (anim-name animator)
(animations animator)))
(tpf nil))
(when props
(incf (anim-timer animator) dt)
(setf tpf (animation-props-time-per-frame props))
(when (>= (anim-timer animator)
(animation-props-time-per-frame props))
(let ((frames-skipped
(floor (/ (anim-timer animator) tpf)))
(num-frames
(length (animation-props-keyframes props))))
(setf (anim-timer animator)
(rem (anim-timer animator) tpf))
(incf (frame animator) frames-skipped)
(when (>= (frame animator) num-frames)
(let* ((loopback-frame
(animation-props-loopback props))
(loopback-range (- num-frames loopback-frame)))
(setf (frame animator)
(+ loopback-frame (rem (frame animator)
loopback-range))))))))))
(defmethod draw-animation ((animator animator) (pos gamekit:vec2))
(let ((props
(gethash (anim-name animator) (animations animator))))
(when props
Take the index of the frame on the keyfranes , then
(let* ((frame
(aref (animation-props-keyframes props)
(frame animator)))
(frame-x-index
(rem frame (frames-per-line animator)))
(frame-y-index
(floor (/ frame (frames-per-line animator))))
(frame-size
(/ (gamekit:x (atlas-size animator))
(frames-per-line animator))))
(gamekit:draw-image
pos
(atlas animator)
:origin
(gamekit:vec2
(* frame-x-index frame-size)
(- (- (gamekit:y (atlas-size animator))
frame-size)
(* frame-y-index frame-size)))
:width frame-size
:height frame-size)))))
|
2889ea93f9254b01845da1a60f4eb7a00a47c64f2191f3945ba874ce7b0177dc | ocaml-sf/learn-ocaml-corpus | update1.ml | let rec cons : 'a . 'a -> 'a seq -> 'a seq =
fun x ys ->
match ys with
| Nil ->
One (x, Nil)
| Zero ys ->
One (x, ys)
| One (y, ys) ->
Zero (cons (x, y) ys)
let rec fupdate : 'a . int -> ('a -> 'a) -> 'a seq -> 'a seq =
fun i f xs ->
match xs with
| Nil ->
assert false (* cannot happen; [i] is within bounds *)
| One (x, xs) ->
if i = 0 then
One (f x, xs)
else
fupdate (i - 1) f (Zero xs) (* wrong *)
| Zero xs ->
let f' =
if i mod 2 = 0 then
fun (x0, x1) -> (f x0, x1)
else
fun (x0, x1) -> (x0, f x1)
in
Zero (fupdate (i / 2) f' xs)
let update i y xs =
fupdate i (fun _ -> y) xs
| null | https://raw.githubusercontent.com/ocaml-sf/learn-ocaml-corpus/7dcf4d72b49863a3e37e41b3c3097aa4c6101a69/exercises/fpottier/random_access_lists/wrong/update1.ml | ocaml | cannot happen; [i] is within bounds
wrong | let rec cons : 'a . 'a -> 'a seq -> 'a seq =
fun x ys ->
match ys with
| Nil ->
One (x, Nil)
| Zero ys ->
One (x, ys)
| One (y, ys) ->
Zero (cons (x, y) ys)
let rec fupdate : 'a . int -> ('a -> 'a) -> 'a seq -> 'a seq =
fun i f xs ->
match xs with
| Nil ->
| One (x, xs) ->
if i = 0 then
One (f x, xs)
else
| Zero xs ->
let f' =
if i mod 2 = 0 then
fun (x0, x1) -> (f x0, x1)
else
fun (x0, x1) -> (x0, f x1)
in
Zero (fupdate (i / 2) f' xs)
let update i y xs =
fupdate i (fun _ -> y) xs
|
ebb3c745c41af9bfd9367361952bc0c980d4f67baf2f78b72d6764f16b43118a | philnguyen/soft-contract | issue-101.rkt | #lang racket/base
(require racket/contract)
(define-syntax-rule (define2 x ctc e)
(begin
(define x e)
(provide (contract-out [x ctc]))))
(define2 g (between/c 1 10) 8)
| null | https://raw.githubusercontent.com/philnguyen/soft-contract/5e07dc2d622ee80b961f4e8aebd04ce950720239/soft-contract/test/programs/safe/issues/issue-101.rkt | racket | #lang racket/base
(require racket/contract)
(define-syntax-rule (define2 x ctc e)
(begin
(define x e)
(provide (contract-out [x ctc]))))
(define2 g (between/c 1 10) 8)
| |
9968791b598eca058f4d850724be0e069d3707e2b293d24b00134df9a537595f | nvim-treesitter/nvim-treesitter | locals.scm | (var_declaration
declarators: (var_declarators
(var (identifier)) @definition.var))
(var_assignment
variables: (assignment_variables
(var (identifier) @definition.var) @definition.associated))
(arg name: (identifier) @definition.parameter)
(anon_function) @scope
((function_statement
(function_name) @definition.function) @scope)
(program) @scope
(if_statement) @scope
(generic_for_statement (for_body) @scope)
(numeric_for_statement (for_body) @scope)
(repeat_statement) @scope
(while_statement (while_body) @scope)
(do_statement) @scope
(identifier) @reference
| null | https://raw.githubusercontent.com/nvim-treesitter/nvim-treesitter/f96c409e3d2b5c4f8fe76f3e049958a293f13167/queries/teal/locals.scm | scheme | (var_declaration
declarators: (var_declarators
(var (identifier)) @definition.var))
(var_assignment
variables: (assignment_variables
(var (identifier) @definition.var) @definition.associated))
(arg name: (identifier) @definition.parameter)
(anon_function) @scope
((function_statement
(function_name) @definition.function) @scope)
(program) @scope
(if_statement) @scope
(generic_for_statement (for_body) @scope)
(numeric_for_statement (for_body) @scope)
(repeat_statement) @scope
(while_statement (while_body) @scope)
(do_statement) @scope
(identifier) @reference
| |
7621df63e77316cc031ab7993c98f9a230f244bee057a680644872b944b76afd | PEZ/reagent-bidi-accountant-example | server.clj | (ns routing-example.server)
(defn handler [request]
{:status 200
:body (slurp "resources/public/index.html")})
| null | https://raw.githubusercontent.com/PEZ/reagent-bidi-accountant-example/de0ffe9ca5eb0a83e6bedea741b4335feb0ee255/dev/routing_example/server.clj | clojure | (ns routing-example.server)
(defn handler [request]
{:status 200
:body (slurp "resources/public/index.html")})
| |
40618522784f0e935017e12323fb7d0b7993a1c9b3e023253bb962d2b28acfa2 | lava-jato-the-game/lava-jato-the-game | client.cljs | (ns lava-jato-the-game.client
(:require [com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.application :as fa]
[goog.dom :as gdom]
[goog.object :as gobj]
[goog.events :as gevt]
[goog.history.EventType :as history.EventType]
[com.fulcrologic.fulcro.networking.http-remote :as fnh]
[com.fulcrologic.fulcro.dom :as dom]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.data-fetch :as df]
[com.fulcrologic.fulcro.mutations :as fm]
[clojure.string :as string])
(:import (goog.history Html5History)))
(defsc Player [this {:player/keys [id name]}]
{:ident [:player/id :player/id]
:query [:player/id
:player/name]}
(dom/div
(dom/hr)
(dom/code "Player name: ") (dom/span name)
(dom/br)
(dom/code "ID: " (dom/code (str id)))
(dom/hr)))
(def ui-player (comp/factory Player {:keyfn :player/id}))
(defsc Party [this {:party/keys [id name description]}]
{:query [:party/id
:party/name
:party/description]
:ident [:party/id :party/id]}
(dom/div
(dom/h2 name)
(dom/code (str id))
(dom/div description)))
(def ui-party (comp/factory Party {:keyfn :party/id}))
(defsc Character [this {:character/keys [id name player party]}]
{:query [:character/id
:character/name
{:character/player (comp/get-query Player)}
{:character/party (comp/get-query Party)}]
:ident [:character/id :character/id]}
(dom/div
(dom/h2 name)
(dom/code (str id))
(ui-player player)
(ui-party party)))
(def ui-character (comp/factory Character {:keyfn :character/id}))
(fm/defmutation player/login
[_]
(action [{:keys [state]}]
(swap! state (fn [st]
(assoc-in st [::login ::login :ui/loading?] true))))
(remote [env]
(fm/returning env Player)))
(defsc Home [this {:ui/keys [profile]}]
{:query [{:ui/profile (comp/get-query Character)}]
:ident (fn [] [::home ::home])
:route-segment ["home"]
:initial-state (fn [_] {})}
(dom/div
(dom/button {:onClick #(df/load! this :lava-jato-the-game.api/me Character
{:target [::home ::home :ui/profile]})}
"load")
(when profile
(ui-character profile))))
(defsc Login [this {:player/keys [username password]
:ui/keys [loading?]}]
{:query [:player/username
:player/password
:ui/loading?]
:ident (fn [] [::login ::login])
:route-segment ["login"]
:initial-state {:player/username ""
:ui/loading? false
:player/password ""}}
(let [on-login #(comp/transact! this `[(player/login ~{:username username
:password password})])]
(dom/form
{:onSubmit (fn [e]
(.preventDefault e)
(on-login))}
(dom/label "username")
(dom/input {:value username
:disabled loading?
:onChange #(fm/set-value! this :player/username (-> % .-target .-value))})
(dom/br)
(dom/label "password")
(dom/input {:value password
:disabled loading?
:type "password"
:onChange #(fm/set-value! this :player/password (-> % .-target .-value))})
(dom/br)
(dom/button
{:disabled loading?
#_#_:onClick on-login}
"login"))))
(dr/defrouter RootRouter [this props]
{:router-targets [Home Login]})
(def ui-root-router (comp/factory RootRouter))
(defsc Root [this {:>/keys [root-router]}]
{:query [{:>/root-router (comp/get-query RootRouter)}]
:initial-state (fn [_]
{:>/root-router (comp/get-initial-state RootRouter _)})}
(ui-root-router root-router))
(defonce SPA (atom nil))
(defn ^:export main
[]
(let [csrf-token (-> (gdom/getDocument)
(gobj/getValueByKeys "body" "dataset" "csrfToken"))
history (new Html5History)
client-did-mount (fn [app]
(doto history
(gevt/listen history.EventType/NAVIGATE #(when-let [token (.-token %)]
(dr/change-route app (-> (string/split token #"/")
rest
vec))))
(.setEnabled true)))
app (fa/fulcro-app
{:client-did-mount client-did-mount
:remotes {:remote (fnh/fulcro-http-remote {:request-middleware (-> (fnh/wrap-csrf-token csrf-token)
(fnh/wrap-fulcro-request))})}})]
(fa/mount! app Root "app")
(reset! SPA app)))
| null | https://raw.githubusercontent.com/lava-jato-the-game/lava-jato-the-game/05c324ecc6ef4eab20042ee478f3ff2da957d0d0/src/main/lava_jato_the_game/client.cljs | clojure | (ns lava-jato-the-game.client
(:require [com.fulcrologic.fulcro.components :as comp :refer [defsc]]
[com.fulcrologic.fulcro.application :as fa]
[goog.dom :as gdom]
[goog.object :as gobj]
[goog.events :as gevt]
[goog.history.EventType :as history.EventType]
[com.fulcrologic.fulcro.networking.http-remote :as fnh]
[com.fulcrologic.fulcro.dom :as dom]
[com.fulcrologic.fulcro.routing.dynamic-routing :as dr]
[com.fulcrologic.fulcro.data-fetch :as df]
[com.fulcrologic.fulcro.mutations :as fm]
[clojure.string :as string])
(:import (goog.history Html5History)))
(defsc Player [this {:player/keys [id name]}]
{:ident [:player/id :player/id]
:query [:player/id
:player/name]}
(dom/div
(dom/hr)
(dom/code "Player name: ") (dom/span name)
(dom/br)
(dom/code "ID: " (dom/code (str id)))
(dom/hr)))
(def ui-player (comp/factory Player {:keyfn :player/id}))
(defsc Party [this {:party/keys [id name description]}]
{:query [:party/id
:party/name
:party/description]
:ident [:party/id :party/id]}
(dom/div
(dom/h2 name)
(dom/code (str id))
(dom/div description)))
(def ui-party (comp/factory Party {:keyfn :party/id}))
(defsc Character [this {:character/keys [id name player party]}]
{:query [:character/id
:character/name
{:character/player (comp/get-query Player)}
{:character/party (comp/get-query Party)}]
:ident [:character/id :character/id]}
(dom/div
(dom/h2 name)
(dom/code (str id))
(ui-player player)
(ui-party party)))
(def ui-character (comp/factory Character {:keyfn :character/id}))
(fm/defmutation player/login
[_]
(action [{:keys [state]}]
(swap! state (fn [st]
(assoc-in st [::login ::login :ui/loading?] true))))
(remote [env]
(fm/returning env Player)))
(defsc Home [this {:ui/keys [profile]}]
{:query [{:ui/profile (comp/get-query Character)}]
:ident (fn [] [::home ::home])
:route-segment ["home"]
:initial-state (fn [_] {})}
(dom/div
(dom/button {:onClick #(df/load! this :lava-jato-the-game.api/me Character
{:target [::home ::home :ui/profile]})}
"load")
(when profile
(ui-character profile))))
(defsc Login [this {:player/keys [username password]
:ui/keys [loading?]}]
{:query [:player/username
:player/password
:ui/loading?]
:ident (fn [] [::login ::login])
:route-segment ["login"]
:initial-state {:player/username ""
:ui/loading? false
:player/password ""}}
(let [on-login #(comp/transact! this `[(player/login ~{:username username
:password password})])]
(dom/form
{:onSubmit (fn [e]
(.preventDefault e)
(on-login))}
(dom/label "username")
(dom/input {:value username
:disabled loading?
:onChange #(fm/set-value! this :player/username (-> % .-target .-value))})
(dom/br)
(dom/label "password")
(dom/input {:value password
:disabled loading?
:type "password"
:onChange #(fm/set-value! this :player/password (-> % .-target .-value))})
(dom/br)
(dom/button
{:disabled loading?
#_#_:onClick on-login}
"login"))))
(dr/defrouter RootRouter [this props]
{:router-targets [Home Login]})
(def ui-root-router (comp/factory RootRouter))
(defsc Root [this {:>/keys [root-router]}]
{:query [{:>/root-router (comp/get-query RootRouter)}]
:initial-state (fn [_]
{:>/root-router (comp/get-initial-state RootRouter _)})}
(ui-root-router root-router))
(defonce SPA (atom nil))
(defn ^:export main
[]
(let [csrf-token (-> (gdom/getDocument)
(gobj/getValueByKeys "body" "dataset" "csrfToken"))
history (new Html5History)
client-did-mount (fn [app]
(doto history
(gevt/listen history.EventType/NAVIGATE #(when-let [token (.-token %)]
(dr/change-route app (-> (string/split token #"/")
rest
vec))))
(.setEnabled true)))
app (fa/fulcro-app
{:client-did-mount client-did-mount
:remotes {:remote (fnh/fulcro-http-remote {:request-middleware (-> (fnh/wrap-csrf-token csrf-token)
(fnh/wrap-fulcro-request))})}})]
(fa/mount! app Root "app")
(reset! SPA app)))
| |
b430e0a3343d3adee0e36aef8f783ddb35a5fdfdd8469e64dbc144c80f83b09c | wireless-net/erlang-nommu | erts_alloc_config.erl | %%
%% %CopyrightBegin%
%%
Copyright Ericsson AB 2007 - 2011 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
The Initial Developer of the Original Code is Ericsson AB .
%%
%% %CopyrightEnd%
%%
%%%-------------------------------------------------------------------
%%% File : erts_alloc_config.erl
Author :
%%% Description : Generate an erts_alloc configuration suitable for
%%% a limited amount of runtime scenarios.
%%%
Created : 9 May 2007 by
%%%-------------------------------------------------------------------
-module(erts_alloc_config).
-record(state, {have_scenario = false,
alloc}).
-record(alloc, {name,
enabled,
need_config_change,
alloc_util,
instances,
strategy,
acul,
low_mbc_blocks_size,
high_mbc_blocks_size,
sbct,
segments}).
-record(conf,
{segments,
format_to}).
-record(segment, {size,number}).
-define(PRINT_WITDH, 76).
-define(SERVER, '__erts_alloc_config__').
-define(KB, 1024).
-define(MB, 1048576).
-define(B2KB(B), ((((B) - 1) div ?KB) + 1)).
-define(ROUNDUP(V, R), ((((V) - 1) div (R)) + 1)*(R)).
-define(LARGE_GROWTH_ABS_LIMIT, 20*?MB).
-define(MBC_MSEG_LIMIT, 150).
-define(FRAG_FACT, 1.25).
-define(GROWTH_SEG_FACT, 2).
-define(MIN_SEG_SIZE, 1*?MB).
-define(SMALL_GROWTH_SEGS, 5).
-define(ALLOC_UTIL_ALLOCATOR(A),
A == binary_alloc;
A == std_alloc;
A == ets_alloc;
A == fix_alloc;
A == eheap_alloc;
A == ll_alloc;
A == sl_alloc;
A == temp_alloc;
A == driver_alloc).
-define(ALLOCATORS,
[binary_alloc,
ets_alloc,
eheap_alloc,
fix_alloc,
ll_alloc,
mseg_alloc,
sl_alloc,
std_alloc,
sys_alloc,
temp_alloc,
driver_alloc]).
-define(MMBCS_DEFAULTS,
[{binary_alloc, 131072},
{std_alloc, 131072},
{ets_alloc, 131072},
{fix_alloc, 131072},
{eheap_alloc, 524288},
{ll_alloc, 131072},
{sl_alloc, 131072},
{temp_alloc, 131072},
{driver_alloc, 131072}]).
%%%
%%% Exported interface
%%%
-export([save_scenario/0,
make_config/0,
make_config/1,
stop/0]).
%% Test and debug export
-export([state/0]).
save_scenario() ->
req(save_scenario).
make_config() ->
make_config(group_leader()).
make_config(FileName) when is_list(FileName) ->
case file:open(FileName, [write]) of
{ok, IODev} ->
Res = req({make_config, IODev}),
file:close(IODev),
Res;
Error ->
Error
end;
make_config(IODev) ->
req({make_config, IODev}).
stop() ->
req(stop).
%% state() is intentionally undocumented, and is for testing
%% and debugging only...
state() ->
req(state).
%%%
%%% Server
%%%
req(Req) ->
Ref = make_ref(),
ReqMsg = {request, self(), Ref, Req},
req(ReqMsg, Ref, true).
req(ReqMsg, Ref, TryStart) ->
req(ReqMsg, Ref, TryStart, erlang:monitor(process, ?SERVER)).
req(ReqMsg, Ref, TryStart, Mon) ->
(catch ?SERVER ! ReqMsg),
receive
{response, Ref, Res} ->
erlang:demonitor(Mon, [flush]),
Res;
{'DOWN', Mon, _, _, noproc} ->
case TryStart of
true -> start_server(Ref, ReqMsg);
false -> {error, server_died}
end;
{'DOWN', Mon, _, _, Reason} ->
{error, Reason}
end.
start_server(Ref, ReqMsg) ->
Starter = self(),
Pid = spawn(fun () ->
register(?SERVER, self()),
Starter ! {Ref, self(), started},
server_loop(make_state())
end),
Mon = erlang:monitor(process, Pid),
receive
{Ref, Pid, started} ->
req(ReqMsg, Ref, false, Mon);
{'DOWN', Mon, _, _, _} ->
req(ReqMsg, Ref, false)
end.
server_loop(State) ->
NewState = receive
{request, From, Ref, save_scenario} ->
Alloc = save_scenario(State#state.alloc),
From ! {response, Ref, ok},
State#state{alloc = Alloc, have_scenario = true};
{request, From, Ref, {make_config, IODev}} ->
case State#state.have_scenario of
true ->
Conf = #conf{segments = ?MBC_MSEG_LIMIT,
format_to = IODev},
Res = mk_config(Conf, State#state.alloc),
From ! {response, Ref, Res};
_ ->
From ! {response, Ref, no_scenario_saved}
end,
State;
{request, From, Ref, stop} ->
From ! {response, Ref, ok},
exit(normal);
{request, From, Ref, state} ->
From ! {response, Ref, State},
State;
{request, From, Ref, Req} ->
From ! {response, Ref, {unknown_request, Req}},
State;
_ ->
State
end,
server_loop(NewState).
carrier_migration_support(aoff) ->
true;
carrier_migration_support(aoffcbf) ->
true;
carrier_migration_support(aoffcaobf) ->
true;
carrier_migration_support(_) ->
false.
allocator_instances(ll_alloc, Strategy) ->
case carrier_migration_support(Strategy) of
true -> erlang:system_info(schedulers);
false -> 1
end;
allocator_instances(_A, undefined) ->
1;
allocator_instances(_A, _Strategy) ->
erlang:system_info(schedulers).
strategy(temp_alloc, _AI) ->
af;
strategy(A, AI) ->
try
{A, OptList} = lists:keyfind(A, 1, AI),
{as, S} = lists:keyfind(as, 1, OptList),
S
catch
_ : _ ->
undefined
end.
strategy_str(af) ->
"A fit";
strategy_str(gf) ->
"Good fit";
strategy_str(bf) ->
"Best fit";
strategy_str(aobf) ->
"Address order best fit";
strategy_str(aoff) ->
"Address order first fit";
strategy_str(aoffcbf) ->
"Address order first fit carrier best fit";
strategy_str(aoffcaobf) ->
"Address order first fit carrier adress order best fit".
default_acul(A, S) ->
case carrier_migration_support(S) of
false ->
0;
true ->
case A of
ll_alloc -> 85;
eheap_alloc -> 45;
_ -> 60
end
end.
make_state() ->
{_, _, _, AI} = erlang:system_info(allocator),
#state{alloc = lists:map(fun (A) ->
S = strategy(A, AI),
#alloc{name = A,
strategy = S,
acul = default_acul(A, S),
instances = allocator_instances(A, S)}
end,
?ALLOCATORS)}.
%%
%% Save scenario
%%
ai_value(Key1, Key2, AI) ->
case lists:keysearch(Key1, 1, AI) of
{value, {Key1, Value1}} ->
case lists:keysearch(Key2, 1, Value1) of
{value, Result} -> Result;
_ -> undefined
end;
_ -> undefined
end.
chk_mbcs_blocks_size(#alloc{low_mbc_blocks_size = undefined,
high_mbc_blocks_size = undefined} = Alc,
Min,
Max) ->
Alc#alloc{low_mbc_blocks_size = Min,
high_mbc_blocks_size = Max,
enabled = true};
chk_mbcs_blocks_size(#alloc{low_mbc_blocks_size = LowBS,
high_mbc_blocks_size = HighBS} = Alc,
Min,
Max) ->
true = is_integer(LowBS),
true = is_integer(HighBS),
Alc1 = case Min < LowBS of
true -> Alc#alloc{low_mbc_blocks_size = Min};
false -> Alc
end,
case Max > HighBS of
true -> Alc1#alloc{high_mbc_blocks_size = Max};
false -> Alc1
end.
set_alloc_util(#alloc{alloc_util = AU} = Alc, AU) ->
Alc;
set_alloc_util(Alc, Val) ->
Alc#alloc{alloc_util = Val}.
chk_sbct(#alloc{sbct = undefined} = Alc, AI) ->
case ai_value(options, sbct, AI) of
{sbct, Bytes} when is_integer(Bytes) -> Alc#alloc{sbct = b2kb(Bytes)};
_ -> Alc
end;
chk_sbct(Alc, _AI) ->
Alc.
save_scenario(AlcList) ->
%% The high priority is not really necessary. It is
%% used since it will make retrieval of allocator
%% information less spread out in time on a highly
%% loaded system.
OP = process_flag(priority, high),
Res = do_save_scenario(AlcList),
process_flag(priority, OP),
Res.
save_ai2(Alc, AI) ->
Alc1 = chk_sbct(Alc, AI),
case ai_value(mbcs, blocks_size, AI) of
{blocks_size, MinBS, _, MaxBS} ->
set_alloc_util(chk_mbcs_blocks_size(Alc1, MinBS, MaxBS), true);
_ ->
set_alloc_util(Alc, false)
end.
save_ai(Alc, [{instance, 0, AI}]) ->
save_ai2(Alc, AI);
save_ai(Alc, [{instance, _, _}, {instance, _, _}| _]) ->
Alc#alloc{enabled = true, need_config_change = true};
save_ai(Alc, AI) ->
save_ai2(Alc, AI). % Non erts_alloc_util allocator
do_save_scenario(AlcList) ->
lists:map(fun (#alloc{enabled = false} = Alc) ->
Alc;
(#alloc{name = Name} = Alc) ->
case erlang:system_info({allocator, Name}) of
undefined ->
exit({bad_allocator_name, Name});
false ->
Alc#alloc{enabled = false};
AI when is_list(AI) ->
save_ai(Alc, AI)
end
end,
AlcList).
%%
%% Make configuration
%%
conf_size(Bytes) when is_integer(Bytes), Bytes < 0 ->
exit({bad_value, Bytes});
conf_size(Bytes) when is_integer(Bytes), Bytes < 1*?MB ->
?ROUNDUP(?B2KB(Bytes), 256);
conf_size(Bytes) when is_integer(Bytes), Bytes < 10*?MB ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(1*?MB));
conf_size(Bytes) when is_integer(Bytes), Bytes < 100*?MB ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(2*?MB));
conf_size(Bytes) when is_integer(Bytes), Bytes < 256*?MB ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(5*?MB));
conf_size(Bytes) when is_integer(Bytes) ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(10*?MB)).
sbct(#conf{format_to = FTO}, #alloc{name = A, sbct = SBCT}) ->
fc(FTO, "Sbc threshold size of ~p kilobytes.", [SBCT]),
format(FTO, " +M~csbct ~p~n", [alloc_char(A), SBCT]).
default_mmbcs(temp_alloc = A, _Insts) ->
{value, {A, MMBCS_Default}} = lists:keysearch(A, 1, ?MMBCS_DEFAULTS),
MMBCS_Default;
default_mmbcs(A, Insts) ->
{value, {A, MMBCS_Default}} = lists:keysearch(A, 1, ?MMBCS_DEFAULTS),
I = case Insts > 4 of
true -> 4;
_ -> Insts
end,
?ROUNDUP(MMBCS_Default div I, ?B2KB(1*?KB)).
mmbcs(#conf{format_to = FTO},
#alloc{name = A, instances = Insts, low_mbc_blocks_size = BlocksSize}) ->
BS = case A of
temp_alloc -> BlocksSize;
_ -> BlocksSize div Insts
end,
DefMMBCS = default_mmbcs(A, Insts),
case {Insts, BS > DefMMBCS} of
{1, true} ->
MMBCS = conf_size(BS),
fc(FTO, "Main mbc size of ~p kilobytes.", [MMBCS]),
format(FTO, " +M~cmmbcs ~p~n", [alloc_char(A), MMBCS]);
_ ->
MMBCS = ?B2KB(DefMMBCS),
fc(FTO, "Main mbc size of ~p kilobytes.", [MMBCS]),
format(FTO, " +M~cmmbcs ~p~n", [alloc_char(A), MMBCS]),
ok
end.
smbcs_lmbcs(#conf{format_to = FTO},
#alloc{name = A, segments = Segments}) ->
MBCS = Segments#segment.size,
AC = alloc_char(A),
fc(FTO, "Mseg mbc size of ~p kilobytes.", [MBCS]),
format(FTO, " +M~csmbcs ~p +M~clmbcs ~p~n", [AC, MBCS, AC, MBCS]),
ok.
alloc_char(binary_alloc) -> $B;
alloc_char(std_alloc) -> $D;
alloc_char(ets_alloc) -> $E;
alloc_char(fix_alloc) -> $F;
alloc_char(eheap_alloc) -> $H;
alloc_char(ll_alloc) -> $L;
alloc_char(mseg_alloc) -> $M;
alloc_char(driver_alloc) -> $R;
alloc_char(sl_alloc) -> $S;
alloc_char(temp_alloc) -> $T;
alloc_char(sys_alloc) -> $Y;
alloc_char(Alloc) ->
exit({bad_allocator, Alloc}).
conf_alloc(#conf{format_to = FTO},
#alloc{name = A, enabled = false}) ->
fcl(FTO, A),
fcp(FTO,
"WARNING: ~p has been disabled. Consider enabling ~p by passing "
"the \"+M~ce true\" command line argument and rerun "
"erts_alloc_config.",
[A, A, alloc_char(A)]);
conf_alloc(#conf{format_to = FTO},
#alloc{name = A, need_config_change = true}) ->
fcl(FTO, A),
fcp(FTO,
"WARNING: ~p has been configured in a way that prevents "
"erts_alloc_config from creating a configuration. The configuration "
"will be automatically adjusted to fit erts_alloc_config if you "
"use the \"+Mea config\" command line argument while running "
"erts_alloc_config.",
[A]);
conf_alloc(#conf{format_to = FTO} = Conf,
#alloc{name = A, alloc_util = true} = Alc) ->
fcl(FTO, A),
chk_xnote(Conf, Alc),
au_conf_alloc(Conf, Alc),
format(FTO, "#~n", []);
conf_alloc(#conf{format_to = FTO} = Conf, #alloc{name = A} = Alc) ->
fcl(FTO, A),
chk_xnote(Conf, Alc).
chk_xnote(#conf{format_to = FTO},
#alloc{name = sys_alloc}) ->
fcp(FTO, "Cannot be configured. Default malloc implementation used.");
chk_xnote(#conf{format_to = FTO},
#alloc{name = mseg_alloc}) ->
fcp(FTO, "Default configuration used.");
chk_xnote(#conf{format_to = FTO},
#alloc{name = ll_alloc}) ->
fcp(FTO,
"Note, blocks allocated with ll_alloc are very "
"seldom deallocated. Placing blocks in mseg "
"carriers is therefore very likely only a waste "
"of resources.");
chk_xnote(#conf{}, #alloc{}) ->
ok.
au_conf_alloc(#conf{format_to = FTO} = Conf,
#alloc{name = A,
alloc_util = true,
instances = Insts,
acul = Acul,
strategy = Strategy,
low_mbc_blocks_size = Low,
high_mbc_blocks_size = High} = Alc) ->
fcp(FTO, "Usage of mbcs: ~p - ~p kilobytes", [?B2KB(Low), ?B2KB(High)]),
case Insts of
1 ->
fc(FTO, "One instance used."),
format(FTO, " +M~ct false~n", [alloc_char(A)]);
_ ->
fc(FTO, "~p + 1 instances used.",
[Insts]),
format(FTO, " +M~ct true~n", [alloc_char(A)]),
case Strategy of
undefined ->
ok;
_ ->
fc(FTO, "Allocation strategy: ~s.",
[strategy_str(Strategy)]),
format(FTO, " +M~cas ~s~n", [alloc_char(A),
atom_to_list(Strategy)])
end,
case carrier_migration_support(Strategy) of
false ->
ok;
true ->
fc(FTO, "Abandon carrier utilization limit of ~p%.", [Acul]),
format(FTO, " +M~cacul ~p~n", [alloc_char(A), Acul])
end
end,
mmbcs(Conf, Alc),
smbcs_lmbcs(Conf, Alc),
sbct(Conf, Alc).
calc_seg_size(Growth, Segs) ->
conf_size(round(Growth*?FRAG_FACT*?GROWTH_SEG_FACT) div Segs).
calc_growth_segments(Conf, AlcList0) ->
CalcSmall = fun (#alloc{name = ll_alloc, instances = 1} = Alc, Acc) ->
{Alc#alloc{segments = #segment{size = conf_size(0),
number = 0}},
Acc};
(#alloc{alloc_util = true,
instances = Insts,
low_mbc_blocks_size = LowMBC,
high_mbc_blocks_size = High} = Alc,
{SL, AL}) ->
Low = case Insts of
1 -> LowMBC;
_ -> 0
end,
Growth = High - Low,
case Growth >= ?LARGE_GROWTH_ABS_LIMIT of
true ->
{Alc, {SL, AL+1}};
false ->
Segs = ?SMALL_GROWTH_SEGS,
SegSize = calc_seg_size(Growth, Segs),
{Alc#alloc{segments
= #segment{size = SegSize,
number = Segs}},
{SL - Segs, AL}}
end;
(Alc, Acc) -> {Alc, Acc}
end,
{AlcList1, {SegsLeft, AllocsLeft}}
= lists:mapfoldl(CalcSmall, {Conf#conf.segments, 0}, AlcList0),
case AllocsLeft of
0 ->
AlcList1;
_ ->
SegsPerAlloc = case (SegsLeft div AllocsLeft) + 1 of
SPA when SPA < ?SMALL_GROWTH_SEGS ->
?SMALL_GROWTH_SEGS;
SPA ->
SPA
end,
CalcLarge = fun (#alloc{alloc_util = true,
segments = undefined,
instances = Insts,
low_mbc_blocks_size = LowMBC,
high_mbc_blocks_size = High} = Alc) ->
Low = case Insts of
1 -> LowMBC;
_ -> 0
end,
Growth = High - Low,
SegSize = calc_seg_size(Growth,
SegsPerAlloc),
Alc#alloc{segments
= #segment{size = SegSize,
number = SegsPerAlloc}};
(Alc) ->
Alc
end,
lists:map(CalcLarge, AlcList1)
end.
mk_config(#conf{format_to = FTO} = Conf, AlcList) ->
format_header(FTO),
Res = lists:foreach(fun (Alc) -> conf_alloc(Conf, Alc) end,
calc_growth_segments(Conf, AlcList)),
format_footer(FTO),
Res.
format_header(FTO) ->
{Y,Mo,D} = erlang:date(),
{H,Mi,S} = erlang:time(),
fcl(FTO),
fcl(FTO, "erts_alloc configuration"),
fcl(FTO),
fcp(FTO,
"This erts_alloc configuration was automatically "
"generated at ~w-~2..0w-~2..0w ~2..0w:~2..0w.~2..0w by "
"erts_alloc_config.",
[Y, Mo, D, H, Mi, S]),
fcp(FTO,
"~s was used when generating the configuration.",
[string:strip(erlang:system_info(system_version), both, $\n)]),
case erlang:system_info(schedulers) of
1 -> ok;
Schdlrs ->
fcp(FTO,
"NOTE: This configuration was made for ~p schedulers. "
"It is very important that ~p schedulers are used.",
[Schdlrs, Schdlrs])
end,
fcp(FTO,
"This configuration is intended as a suggestion and "
"may need to be adjusted manually. Instead of modifying "
"this file, you are advised to write another configuration "
"file and override values that you want to change. "
"Doing it this way simplifies things when you want to "
"rerun erts_alloc_config."),
fcp(FTO,
"This configuration is based on the actual use of "
"multi-block carriers (mbcs) for a set of different "
"runtime scenarios. Note that this configuration may "
"perform bad, ever horrible, for other runtime "
"scenarios."),
fcp(FTO,
"You are advised to rerun erts_alloc_config if the "
"applications run when the configuration was made "
"are changed, or if the load on the applications have "
"changed since the configuration was made. You are also "
"advised to rerun erts_alloc_config if the Erlang runtime "
"system used is changed."),
fcp(FTO,
"Note, that the singel-block carrier (sbc) parameters "
"very much effects the use of mbcs. Therefore, if you "
"change the sbc parameters, you are advised to rerun "
"erts_alloc_config."),
fcp(FTO,
"For more information see the erts_alloc_config(3) "
"documentation."),
ok.
format_footer(FTO) ->
fcl(FTO).
%%%
%%% Misc.
%%%
b2kb(B) when is_integer(B) ->
MaxKB = (1 bsl erlang:system_info(wordsize)*8) div 1024,
case ?B2KB(B) of
KB when KB > MaxKB -> MaxKB;
KB -> KB
end.
format(false, _Frmt) ->
ok;
format(IODev, Frmt) ->
io:format(IODev, Frmt, []).
format(false, _Frmt, _Args) ->
ok;
format(IODev, Frmt, Args) ->
io:format(IODev, Frmt, Args).
fcp : format comment
fcp(IODev, Frmt, Args) ->
fc(IODev, Frmt, Args),
format(IODev, "#~n").
fcp(IODev, Frmt) ->
fc(IODev, Frmt),
format(IODev, "#~n").
%% fc: format comment
fc(IODev, Frmt, Args) ->
fc(IODev, lists:flatten(io_lib:format(Frmt, Args))).
fc(IODev, String) ->
fc_aux(IODev, string:tokens(String, " "), 0).
fc_aux(_IODev, [], 0) ->
ok;
fc_aux(IODev, [], _Len) ->
format(IODev, "~n");
fc_aux(IODev, [T|Ts], 0) ->
Len = 2 + length(T),
format(IODev, "# ~s", [T]),
fc_aux(IODev, Ts, Len);
fc_aux(IODev, [T|_Ts] = ATs, Len) when (length(T) + Len) >= ?PRINT_WITDH ->
format(IODev, "~n"),
fc_aux(IODev, ATs, 0);
fc_aux(IODev, [T|Ts], Len) ->
NewLen = Len + 1 + length(T),
format(IODev, " ~s", [T]),
fc_aux(IODev, Ts, NewLen).
%% fcl: format comment line
fcl(FTO) ->
EndStr = "# ",
Precision = length(EndStr),
FieldWidth = -1*(?PRINT_WITDH),
format(FTO, "~*.*.*s~n", [FieldWidth, Precision, $-, EndStr]).
fcl(FTO, A) when is_atom(A) ->
fcl(FTO, atom_to_list(A));
fcl(FTO, Str) when is_list(Str) ->
Str2 = "# --- " ++ Str ++ " ",
Precision = length(Str2),
FieldWidth = -1*(?PRINT_WITDH),
format(FTO, "~*.*.*s~n", [FieldWidth, Precision, $-, Str2]).
| null | https://raw.githubusercontent.com/wireless-net/erlang-nommu/79f32f81418e022d8ad8e0e447deaea407289926/lib/runtime_tools/src/erts_alloc_config.erl | erlang |
%CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
-------------------------------------------------------------------
File : erts_alloc_config.erl
Description : Generate an erts_alloc configuration suitable for
a limited amount of runtime scenarios.
-------------------------------------------------------------------
Exported interface
Test and debug export
state() is intentionally undocumented, and is for testing
and debugging only...
Server
Save scenario
The high priority is not really necessary. It is
used since it will make retrieval of allocator
information less spread out in time on a highly
loaded system.
Non erts_alloc_util allocator
Make configuration
Misc.
fc: format comment
fcl: format comment line | Copyright Ericsson AB 2007 - 2011 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
The Initial Developer of the Original Code is Ericsson AB .
Author :
Created : 9 May 2007 by
-module(erts_alloc_config).
-record(state, {have_scenario = false,
alloc}).
-record(alloc, {name,
enabled,
need_config_change,
alloc_util,
instances,
strategy,
acul,
low_mbc_blocks_size,
high_mbc_blocks_size,
sbct,
segments}).
-record(conf,
{segments,
format_to}).
-record(segment, {size,number}).
-define(PRINT_WITDH, 76).
-define(SERVER, '__erts_alloc_config__').
-define(KB, 1024).
-define(MB, 1048576).
-define(B2KB(B), ((((B) - 1) div ?KB) + 1)).
-define(ROUNDUP(V, R), ((((V) - 1) div (R)) + 1)*(R)).
-define(LARGE_GROWTH_ABS_LIMIT, 20*?MB).
-define(MBC_MSEG_LIMIT, 150).
-define(FRAG_FACT, 1.25).
-define(GROWTH_SEG_FACT, 2).
-define(MIN_SEG_SIZE, 1*?MB).
-define(SMALL_GROWTH_SEGS, 5).
-define(ALLOC_UTIL_ALLOCATOR(A),
A == binary_alloc;
A == std_alloc;
A == ets_alloc;
A == fix_alloc;
A == eheap_alloc;
A == ll_alloc;
A == sl_alloc;
A == temp_alloc;
A == driver_alloc).
-define(ALLOCATORS,
[binary_alloc,
ets_alloc,
eheap_alloc,
fix_alloc,
ll_alloc,
mseg_alloc,
sl_alloc,
std_alloc,
sys_alloc,
temp_alloc,
driver_alloc]).
-define(MMBCS_DEFAULTS,
[{binary_alloc, 131072},
{std_alloc, 131072},
{ets_alloc, 131072},
{fix_alloc, 131072},
{eheap_alloc, 524288},
{ll_alloc, 131072},
{sl_alloc, 131072},
{temp_alloc, 131072},
{driver_alloc, 131072}]).
-export([save_scenario/0,
make_config/0,
make_config/1,
stop/0]).
-export([state/0]).
save_scenario() ->
req(save_scenario).
make_config() ->
make_config(group_leader()).
make_config(FileName) when is_list(FileName) ->
case file:open(FileName, [write]) of
{ok, IODev} ->
Res = req({make_config, IODev}),
file:close(IODev),
Res;
Error ->
Error
end;
make_config(IODev) ->
req({make_config, IODev}).
stop() ->
req(stop).
state() ->
req(state).
req(Req) ->
Ref = make_ref(),
ReqMsg = {request, self(), Ref, Req},
req(ReqMsg, Ref, true).
req(ReqMsg, Ref, TryStart) ->
req(ReqMsg, Ref, TryStart, erlang:monitor(process, ?SERVER)).
req(ReqMsg, Ref, TryStart, Mon) ->
(catch ?SERVER ! ReqMsg),
receive
{response, Ref, Res} ->
erlang:demonitor(Mon, [flush]),
Res;
{'DOWN', Mon, _, _, noproc} ->
case TryStart of
true -> start_server(Ref, ReqMsg);
false -> {error, server_died}
end;
{'DOWN', Mon, _, _, Reason} ->
{error, Reason}
end.
start_server(Ref, ReqMsg) ->
Starter = self(),
Pid = spawn(fun () ->
register(?SERVER, self()),
Starter ! {Ref, self(), started},
server_loop(make_state())
end),
Mon = erlang:monitor(process, Pid),
receive
{Ref, Pid, started} ->
req(ReqMsg, Ref, false, Mon);
{'DOWN', Mon, _, _, _} ->
req(ReqMsg, Ref, false)
end.
server_loop(State) ->
NewState = receive
{request, From, Ref, save_scenario} ->
Alloc = save_scenario(State#state.alloc),
From ! {response, Ref, ok},
State#state{alloc = Alloc, have_scenario = true};
{request, From, Ref, {make_config, IODev}} ->
case State#state.have_scenario of
true ->
Conf = #conf{segments = ?MBC_MSEG_LIMIT,
format_to = IODev},
Res = mk_config(Conf, State#state.alloc),
From ! {response, Ref, Res};
_ ->
From ! {response, Ref, no_scenario_saved}
end,
State;
{request, From, Ref, stop} ->
From ! {response, Ref, ok},
exit(normal);
{request, From, Ref, state} ->
From ! {response, Ref, State},
State;
{request, From, Ref, Req} ->
From ! {response, Ref, {unknown_request, Req}},
State;
_ ->
State
end,
server_loop(NewState).
carrier_migration_support(aoff) ->
true;
carrier_migration_support(aoffcbf) ->
true;
carrier_migration_support(aoffcaobf) ->
true;
carrier_migration_support(_) ->
false.
allocator_instances(ll_alloc, Strategy) ->
case carrier_migration_support(Strategy) of
true -> erlang:system_info(schedulers);
false -> 1
end;
allocator_instances(_A, undefined) ->
1;
allocator_instances(_A, _Strategy) ->
erlang:system_info(schedulers).
strategy(temp_alloc, _AI) ->
af;
strategy(A, AI) ->
try
{A, OptList} = lists:keyfind(A, 1, AI),
{as, S} = lists:keyfind(as, 1, OptList),
S
catch
_ : _ ->
undefined
end.
strategy_str(af) ->
"A fit";
strategy_str(gf) ->
"Good fit";
strategy_str(bf) ->
"Best fit";
strategy_str(aobf) ->
"Address order best fit";
strategy_str(aoff) ->
"Address order first fit";
strategy_str(aoffcbf) ->
"Address order first fit carrier best fit";
strategy_str(aoffcaobf) ->
"Address order first fit carrier adress order best fit".
default_acul(A, S) ->
case carrier_migration_support(S) of
false ->
0;
true ->
case A of
ll_alloc -> 85;
eheap_alloc -> 45;
_ -> 60
end
end.
make_state() ->
{_, _, _, AI} = erlang:system_info(allocator),
#state{alloc = lists:map(fun (A) ->
S = strategy(A, AI),
#alloc{name = A,
strategy = S,
acul = default_acul(A, S),
instances = allocator_instances(A, S)}
end,
?ALLOCATORS)}.
ai_value(Key1, Key2, AI) ->
case lists:keysearch(Key1, 1, AI) of
{value, {Key1, Value1}} ->
case lists:keysearch(Key2, 1, Value1) of
{value, Result} -> Result;
_ -> undefined
end;
_ -> undefined
end.
chk_mbcs_blocks_size(#alloc{low_mbc_blocks_size = undefined,
high_mbc_blocks_size = undefined} = Alc,
Min,
Max) ->
Alc#alloc{low_mbc_blocks_size = Min,
high_mbc_blocks_size = Max,
enabled = true};
chk_mbcs_blocks_size(#alloc{low_mbc_blocks_size = LowBS,
high_mbc_blocks_size = HighBS} = Alc,
Min,
Max) ->
true = is_integer(LowBS),
true = is_integer(HighBS),
Alc1 = case Min < LowBS of
true -> Alc#alloc{low_mbc_blocks_size = Min};
false -> Alc
end,
case Max > HighBS of
true -> Alc1#alloc{high_mbc_blocks_size = Max};
false -> Alc1
end.
set_alloc_util(#alloc{alloc_util = AU} = Alc, AU) ->
Alc;
set_alloc_util(Alc, Val) ->
Alc#alloc{alloc_util = Val}.
chk_sbct(#alloc{sbct = undefined} = Alc, AI) ->
case ai_value(options, sbct, AI) of
{sbct, Bytes} when is_integer(Bytes) -> Alc#alloc{sbct = b2kb(Bytes)};
_ -> Alc
end;
chk_sbct(Alc, _AI) ->
Alc.
save_scenario(AlcList) ->
OP = process_flag(priority, high),
Res = do_save_scenario(AlcList),
process_flag(priority, OP),
Res.
save_ai2(Alc, AI) ->
Alc1 = chk_sbct(Alc, AI),
case ai_value(mbcs, blocks_size, AI) of
{blocks_size, MinBS, _, MaxBS} ->
set_alloc_util(chk_mbcs_blocks_size(Alc1, MinBS, MaxBS), true);
_ ->
set_alloc_util(Alc, false)
end.
save_ai(Alc, [{instance, 0, AI}]) ->
save_ai2(Alc, AI);
save_ai(Alc, [{instance, _, _}, {instance, _, _}| _]) ->
Alc#alloc{enabled = true, need_config_change = true};
save_ai(Alc, AI) ->
do_save_scenario(AlcList) ->
lists:map(fun (#alloc{enabled = false} = Alc) ->
Alc;
(#alloc{name = Name} = Alc) ->
case erlang:system_info({allocator, Name}) of
undefined ->
exit({bad_allocator_name, Name});
false ->
Alc#alloc{enabled = false};
AI when is_list(AI) ->
save_ai(Alc, AI)
end
end,
AlcList).
conf_size(Bytes) when is_integer(Bytes), Bytes < 0 ->
exit({bad_value, Bytes});
conf_size(Bytes) when is_integer(Bytes), Bytes < 1*?MB ->
?ROUNDUP(?B2KB(Bytes), 256);
conf_size(Bytes) when is_integer(Bytes), Bytes < 10*?MB ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(1*?MB));
conf_size(Bytes) when is_integer(Bytes), Bytes < 100*?MB ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(2*?MB));
conf_size(Bytes) when is_integer(Bytes), Bytes < 256*?MB ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(5*?MB));
conf_size(Bytes) when is_integer(Bytes) ->
?ROUNDUP(?B2KB(Bytes), ?B2KB(10*?MB)).
sbct(#conf{format_to = FTO}, #alloc{name = A, sbct = SBCT}) ->
fc(FTO, "Sbc threshold size of ~p kilobytes.", [SBCT]),
format(FTO, " +M~csbct ~p~n", [alloc_char(A), SBCT]).
default_mmbcs(temp_alloc = A, _Insts) ->
{value, {A, MMBCS_Default}} = lists:keysearch(A, 1, ?MMBCS_DEFAULTS),
MMBCS_Default;
default_mmbcs(A, Insts) ->
{value, {A, MMBCS_Default}} = lists:keysearch(A, 1, ?MMBCS_DEFAULTS),
I = case Insts > 4 of
true -> 4;
_ -> Insts
end,
?ROUNDUP(MMBCS_Default div I, ?B2KB(1*?KB)).
mmbcs(#conf{format_to = FTO},
#alloc{name = A, instances = Insts, low_mbc_blocks_size = BlocksSize}) ->
BS = case A of
temp_alloc -> BlocksSize;
_ -> BlocksSize div Insts
end,
DefMMBCS = default_mmbcs(A, Insts),
case {Insts, BS > DefMMBCS} of
{1, true} ->
MMBCS = conf_size(BS),
fc(FTO, "Main mbc size of ~p kilobytes.", [MMBCS]),
format(FTO, " +M~cmmbcs ~p~n", [alloc_char(A), MMBCS]);
_ ->
MMBCS = ?B2KB(DefMMBCS),
fc(FTO, "Main mbc size of ~p kilobytes.", [MMBCS]),
format(FTO, " +M~cmmbcs ~p~n", [alloc_char(A), MMBCS]),
ok
end.
smbcs_lmbcs(#conf{format_to = FTO},
#alloc{name = A, segments = Segments}) ->
MBCS = Segments#segment.size,
AC = alloc_char(A),
fc(FTO, "Mseg mbc size of ~p kilobytes.", [MBCS]),
format(FTO, " +M~csmbcs ~p +M~clmbcs ~p~n", [AC, MBCS, AC, MBCS]),
ok.
alloc_char(binary_alloc) -> $B;
alloc_char(std_alloc) -> $D;
alloc_char(ets_alloc) -> $E;
alloc_char(fix_alloc) -> $F;
alloc_char(eheap_alloc) -> $H;
alloc_char(ll_alloc) -> $L;
alloc_char(mseg_alloc) -> $M;
alloc_char(driver_alloc) -> $R;
alloc_char(sl_alloc) -> $S;
alloc_char(temp_alloc) -> $T;
alloc_char(sys_alloc) -> $Y;
alloc_char(Alloc) ->
exit({bad_allocator, Alloc}).
conf_alloc(#conf{format_to = FTO},
#alloc{name = A, enabled = false}) ->
fcl(FTO, A),
fcp(FTO,
"WARNING: ~p has been disabled. Consider enabling ~p by passing "
"the \"+M~ce true\" command line argument and rerun "
"erts_alloc_config.",
[A, A, alloc_char(A)]);
conf_alloc(#conf{format_to = FTO},
#alloc{name = A, need_config_change = true}) ->
fcl(FTO, A),
fcp(FTO,
"WARNING: ~p has been configured in a way that prevents "
"erts_alloc_config from creating a configuration. The configuration "
"will be automatically adjusted to fit erts_alloc_config if you "
"use the \"+Mea config\" command line argument while running "
"erts_alloc_config.",
[A]);
conf_alloc(#conf{format_to = FTO} = Conf,
#alloc{name = A, alloc_util = true} = Alc) ->
fcl(FTO, A),
chk_xnote(Conf, Alc),
au_conf_alloc(Conf, Alc),
format(FTO, "#~n", []);
conf_alloc(#conf{format_to = FTO} = Conf, #alloc{name = A} = Alc) ->
fcl(FTO, A),
chk_xnote(Conf, Alc).
chk_xnote(#conf{format_to = FTO},
#alloc{name = sys_alloc}) ->
fcp(FTO, "Cannot be configured. Default malloc implementation used.");
chk_xnote(#conf{format_to = FTO},
#alloc{name = mseg_alloc}) ->
fcp(FTO, "Default configuration used.");
chk_xnote(#conf{format_to = FTO},
#alloc{name = ll_alloc}) ->
fcp(FTO,
"Note, blocks allocated with ll_alloc are very "
"seldom deallocated. Placing blocks in mseg "
"carriers is therefore very likely only a waste "
"of resources.");
chk_xnote(#conf{}, #alloc{}) ->
ok.
au_conf_alloc(#conf{format_to = FTO} = Conf,
#alloc{name = A,
alloc_util = true,
instances = Insts,
acul = Acul,
strategy = Strategy,
low_mbc_blocks_size = Low,
high_mbc_blocks_size = High} = Alc) ->
fcp(FTO, "Usage of mbcs: ~p - ~p kilobytes", [?B2KB(Low), ?B2KB(High)]),
case Insts of
1 ->
fc(FTO, "One instance used."),
format(FTO, " +M~ct false~n", [alloc_char(A)]);
_ ->
fc(FTO, "~p + 1 instances used.",
[Insts]),
format(FTO, " +M~ct true~n", [alloc_char(A)]),
case Strategy of
undefined ->
ok;
_ ->
fc(FTO, "Allocation strategy: ~s.",
[strategy_str(Strategy)]),
format(FTO, " +M~cas ~s~n", [alloc_char(A),
atom_to_list(Strategy)])
end,
case carrier_migration_support(Strategy) of
false ->
ok;
true ->
fc(FTO, "Abandon carrier utilization limit of ~p%.", [Acul]),
format(FTO, " +M~cacul ~p~n", [alloc_char(A), Acul])
end
end,
mmbcs(Conf, Alc),
smbcs_lmbcs(Conf, Alc),
sbct(Conf, Alc).
calc_seg_size(Growth, Segs) ->
conf_size(round(Growth*?FRAG_FACT*?GROWTH_SEG_FACT) div Segs).
calc_growth_segments(Conf, AlcList0) ->
CalcSmall = fun (#alloc{name = ll_alloc, instances = 1} = Alc, Acc) ->
{Alc#alloc{segments = #segment{size = conf_size(0),
number = 0}},
Acc};
(#alloc{alloc_util = true,
instances = Insts,
low_mbc_blocks_size = LowMBC,
high_mbc_blocks_size = High} = Alc,
{SL, AL}) ->
Low = case Insts of
1 -> LowMBC;
_ -> 0
end,
Growth = High - Low,
case Growth >= ?LARGE_GROWTH_ABS_LIMIT of
true ->
{Alc, {SL, AL+1}};
false ->
Segs = ?SMALL_GROWTH_SEGS,
SegSize = calc_seg_size(Growth, Segs),
{Alc#alloc{segments
= #segment{size = SegSize,
number = Segs}},
{SL - Segs, AL}}
end;
(Alc, Acc) -> {Alc, Acc}
end,
{AlcList1, {SegsLeft, AllocsLeft}}
= lists:mapfoldl(CalcSmall, {Conf#conf.segments, 0}, AlcList0),
case AllocsLeft of
0 ->
AlcList1;
_ ->
SegsPerAlloc = case (SegsLeft div AllocsLeft) + 1 of
SPA when SPA < ?SMALL_GROWTH_SEGS ->
?SMALL_GROWTH_SEGS;
SPA ->
SPA
end,
CalcLarge = fun (#alloc{alloc_util = true,
segments = undefined,
instances = Insts,
low_mbc_blocks_size = LowMBC,
high_mbc_blocks_size = High} = Alc) ->
Low = case Insts of
1 -> LowMBC;
_ -> 0
end,
Growth = High - Low,
SegSize = calc_seg_size(Growth,
SegsPerAlloc),
Alc#alloc{segments
= #segment{size = SegSize,
number = SegsPerAlloc}};
(Alc) ->
Alc
end,
lists:map(CalcLarge, AlcList1)
end.
mk_config(#conf{format_to = FTO} = Conf, AlcList) ->
format_header(FTO),
Res = lists:foreach(fun (Alc) -> conf_alloc(Conf, Alc) end,
calc_growth_segments(Conf, AlcList)),
format_footer(FTO),
Res.
format_header(FTO) ->
{Y,Mo,D} = erlang:date(),
{H,Mi,S} = erlang:time(),
fcl(FTO),
fcl(FTO, "erts_alloc configuration"),
fcl(FTO),
fcp(FTO,
"This erts_alloc configuration was automatically "
"generated at ~w-~2..0w-~2..0w ~2..0w:~2..0w.~2..0w by "
"erts_alloc_config.",
[Y, Mo, D, H, Mi, S]),
fcp(FTO,
"~s was used when generating the configuration.",
[string:strip(erlang:system_info(system_version), both, $\n)]),
case erlang:system_info(schedulers) of
1 -> ok;
Schdlrs ->
fcp(FTO,
"NOTE: This configuration was made for ~p schedulers. "
"It is very important that ~p schedulers are used.",
[Schdlrs, Schdlrs])
end,
fcp(FTO,
"This configuration is intended as a suggestion and "
"may need to be adjusted manually. Instead of modifying "
"this file, you are advised to write another configuration "
"file and override values that you want to change. "
"Doing it this way simplifies things when you want to "
"rerun erts_alloc_config."),
fcp(FTO,
"This configuration is based on the actual use of "
"multi-block carriers (mbcs) for a set of different "
"runtime scenarios. Note that this configuration may "
"perform bad, ever horrible, for other runtime "
"scenarios."),
fcp(FTO,
"You are advised to rerun erts_alloc_config if the "
"applications run when the configuration was made "
"are changed, or if the load on the applications have "
"changed since the configuration was made. You are also "
"advised to rerun erts_alloc_config if the Erlang runtime "
"system used is changed."),
fcp(FTO,
"Note, that the singel-block carrier (sbc) parameters "
"very much effects the use of mbcs. Therefore, if you "
"change the sbc parameters, you are advised to rerun "
"erts_alloc_config."),
fcp(FTO,
"For more information see the erts_alloc_config(3) "
"documentation."),
ok.
format_footer(FTO) ->
fcl(FTO).
b2kb(B) when is_integer(B) ->
MaxKB = (1 bsl erlang:system_info(wordsize)*8) div 1024,
case ?B2KB(B) of
KB when KB > MaxKB -> MaxKB;
KB -> KB
end.
format(false, _Frmt) ->
ok;
format(IODev, Frmt) ->
io:format(IODev, Frmt, []).
format(false, _Frmt, _Args) ->
ok;
format(IODev, Frmt, Args) ->
io:format(IODev, Frmt, Args).
fcp : format comment
fcp(IODev, Frmt, Args) ->
fc(IODev, Frmt, Args),
format(IODev, "#~n").
fcp(IODev, Frmt) ->
fc(IODev, Frmt),
format(IODev, "#~n").
fc(IODev, Frmt, Args) ->
fc(IODev, lists:flatten(io_lib:format(Frmt, Args))).
fc(IODev, String) ->
fc_aux(IODev, string:tokens(String, " "), 0).
fc_aux(_IODev, [], 0) ->
ok;
fc_aux(IODev, [], _Len) ->
format(IODev, "~n");
fc_aux(IODev, [T|Ts], 0) ->
Len = 2 + length(T),
format(IODev, "# ~s", [T]),
fc_aux(IODev, Ts, Len);
fc_aux(IODev, [T|_Ts] = ATs, Len) when (length(T) + Len) >= ?PRINT_WITDH ->
format(IODev, "~n"),
fc_aux(IODev, ATs, 0);
fc_aux(IODev, [T|Ts], Len) ->
NewLen = Len + 1 + length(T),
format(IODev, " ~s", [T]),
fc_aux(IODev, Ts, NewLen).
fcl(FTO) ->
EndStr = "# ",
Precision = length(EndStr),
FieldWidth = -1*(?PRINT_WITDH),
format(FTO, "~*.*.*s~n", [FieldWidth, Precision, $-, EndStr]).
fcl(FTO, A) when is_atom(A) ->
fcl(FTO, atom_to_list(A));
fcl(FTO, Str) when is_list(Str) ->
Str2 = "# --- " ++ Str ++ " ",
Precision = length(Str2),
FieldWidth = -1*(?PRINT_WITDH),
format(FTO, "~*.*.*s~n", [FieldWidth, Precision, $-, Str2]).
|
c9d2e655405874246aef07f6547f8c2365aa66c931dd4b50a356147197360d0b | iustin/corydalis | Settings.hs |
Copyright ( C ) 2013
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2013 Iustin Pop
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
# LANGUAGE CPP #
# LANGUAGE NoImplicitPrelude #
{-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecordWildCards #
-- | Settings are centralized, as much as possible, into this file. This
-- includes database connection settings, static file locations, etc.
In addition , you can configure a number of different aspects of Yesod
by overriding methods in the Yesod typeclass . That instance is
declared in the Foundation.hs file .
module Settings
( AppSettings(..)
, widgetFile
, combineStylesheets
, combineScripts
) where
import ClassyPrelude.Yesod
import Data.Aeson (withObject, (.!=), (.:?))
import Database.Persist.Sqlite (SqliteConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Util
import Types
-- | Runtime settings to configure this application. These settings can be
-- loaded from various sources: defaults, environment variables, config files,
-- theoretically even a database.
data AppSettings = AppSettings
{ appStaticDir :: String
-- ^ Directory from which to serve static files.
, appDatabaseConf :: SqliteConf
-- ^ Configuration settings for accessing the database.
, appRoot :: Maybe Text
-- ^ Base for all generated URLs. If @Nothing@, determined
-- from the request headers.
, appHost :: HostPreference
-- ^ Host/interface the server should bind to.
, appPort :: Int
-- ^ Port to listen on
, appHttps :: Bool
-- ^ Whether to run HTTPS or not on the configured host/port. This
is mostly available for reverse proxies , where adding TLS
-- doesn't much any additional security. For direct access
-- (without a proxy), this is always recommended.
, appSecureSessions :: Bool
^ Enable secure cookies , and set a Strict - Transport - Security
-- header on the connections. When https is set, this is
-- overriden, and when https is unset, this can be helpful in case
-- of reverse proxying.
, appIpFromHeader :: Bool
-- ^ Get the IP address from the header when logging. Useful when sitting
-- behind a reverse proxy.
, appLoginMessage :: Maybe Text
-- ^ Extra message to show on login page.
, appHomeMessage :: Maybe Text
-- ^ Extra message to show on the main page. Useful for example
-- for demo or public sites.
, appDetailedRequestLogging :: Bool
-- ^ Use detailed request logging system
, appShouldLogAll :: Bool
-- ^ Should all log messages be displayed?
, appConfig :: Config
-- ^ Picture-related configuration
}
isDevel :: Bool
isDevel =
#ifdef DEVELOPMENT
True
#else
False
#endif
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev = isDevel
appStaticDir <- o .: "static-dir"
appDatabaseConf <- o .: "database"
appRoot <- o .:? "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appHttps <- o .: "https"
appSecureSessions <- o .: "secure-sessions"
appIpFromHeader <- o .: "ip-from-header"
appLoginMessage <- o .:? "login-msg"
appHomeMessage <- o .:? "home-msg"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appConfig <- o .: "config"
return AppSettings {..}
-- | Settings for 'widgetFile', such as which template languages to support and
default Hamlet settings .
--
-- For more information on modifying behavior, see:
--
-- -widgetFile
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
-- | How static files should be combined.
combineSettings :: CombineSettings
combineSettings = def
-- The rest of this file contains settings which rarely need changing by a
-- user.
widgetFile :: String -> Q Exp
widgetFile =
#ifdef DEVELOPMENT
widgetFileReload
#else
widgetFileNoReload
#endif
widgetFileSettings
The following two functions can be used to combine multiple CSS or JS files
-- at compile time to decrease the number of http requests.
-- Sample usage (inside a Widget):
--
> $ ( combineStylesheets ' StaticR [ style1_css , style2_css ] )
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets =
combineStylesheets' isDevel combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts =
combineScripts' isDevel combineSettings
| null | https://raw.githubusercontent.com/iustin/corydalis/43f8bf004904847fad43c428a9e1b20e67da964d/src/Settings.hs | haskell | # LANGUAGE OverloadedStrings #
| Settings are centralized, as much as possible, into this file. This
includes database connection settings, static file locations, etc.
| Runtime settings to configure this application. These settings can be
loaded from various sources: defaults, environment variables, config files,
theoretically even a database.
^ Directory from which to serve static files.
^ Configuration settings for accessing the database.
^ Base for all generated URLs. If @Nothing@, determined
from the request headers.
^ Host/interface the server should bind to.
^ Port to listen on
^ Whether to run HTTPS or not on the configured host/port. This
doesn't much any additional security. For direct access
(without a proxy), this is always recommended.
header on the connections. When https is set, this is
overriden, and when https is unset, this can be helpful in case
of reverse proxying.
^ Get the IP address from the header when logging. Useful when sitting
behind a reverse proxy.
^ Extra message to show on login page.
^ Extra message to show on the main page. Useful for example
for demo or public sites.
^ Use detailed request logging system
^ Should all log messages be displayed?
^ Picture-related configuration
| Settings for 'widgetFile', such as which template languages to support and
For more information on modifying behavior, see:
-widgetFile
| How static files should be combined.
The rest of this file contains settings which rarely need changing by a
user.
at compile time to decrease the number of http requests.
Sample usage (inside a Widget):
|
Copyright ( C ) 2013
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2013 Iustin Pop
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
# LANGUAGE CPP #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE RecordWildCards #
In addition , you can configure a number of different aspects of Yesod
by overriding methods in the Yesod typeclass . That instance is
declared in the Foundation.hs file .
module Settings
( AppSettings(..)
, widgetFile
, combineStylesheets
, combineScripts
) where
import ClassyPrelude.Yesod
import Data.Aeson (withObject, (.!=), (.:?))
import Database.Persist.Sqlite (SqliteConf)
import Language.Haskell.TH.Syntax (Exp, Name, Q)
import Network.Wai.Handler.Warp (HostPreference)
import Yesod.Default.Util
import Types
data AppSettings = AppSettings
{ appStaticDir :: String
, appDatabaseConf :: SqliteConf
, appRoot :: Maybe Text
, appHost :: HostPreference
, appPort :: Int
, appHttps :: Bool
is mostly available for reverse proxies , where adding TLS
, appSecureSessions :: Bool
^ Enable secure cookies , and set a Strict - Transport - Security
, appIpFromHeader :: Bool
, appLoginMessage :: Maybe Text
, appHomeMessage :: Maybe Text
, appDetailedRequestLogging :: Bool
, appShouldLogAll :: Bool
, appConfig :: Config
}
isDevel :: Bool
isDevel =
#ifdef DEVELOPMENT
True
#else
False
#endif
instance FromJSON AppSettings where
parseJSON = withObject "AppSettings" $ \o -> do
let defaultDev = isDevel
appStaticDir <- o .: "static-dir"
appDatabaseConf <- o .: "database"
appRoot <- o .:? "approot"
appHost <- fromString <$> o .: "host"
appPort <- o .: "port"
appHttps <- o .: "https"
appSecureSessions <- o .: "secure-sessions"
appIpFromHeader <- o .: "ip-from-header"
appLoginMessage <- o .:? "login-msg"
appHomeMessage <- o .:? "home-msg"
appDetailedRequestLogging <- o .:? "detailed-logging" .!= defaultDev
appShouldLogAll <- o .:? "should-log-all" .!= defaultDev
appConfig <- o .: "config"
return AppSettings {..}
default Hamlet settings .
widgetFileSettings :: WidgetFileSettings
widgetFileSettings = def
combineSettings :: CombineSettings
combineSettings = def
widgetFile :: String -> Q Exp
widgetFile =
#ifdef DEVELOPMENT
widgetFileReload
#else
widgetFileNoReload
#endif
widgetFileSettings
The following two functions can be used to combine multiple CSS or JS files
> $ ( combineStylesheets ' StaticR [ style1_css , style2_css ] )
combineStylesheets :: Name -> [Route Static] -> Q Exp
combineStylesheets =
combineStylesheets' isDevel combineSettings
combineScripts :: Name -> [Route Static] -> Q Exp
combineScripts =
combineScripts' isDevel combineSettings
|
baffa520937a2a5c3c994bbc1b3152db6d2f3504e36240221f4b4f049f0517c2 | nd/bird | 9.2.4.hs | --it is in my opinion | null | https://raw.githubusercontent.com/nd/bird/06dba97af7cfb11f558eaeb31a75bd04cacf7201/ch09/9.2.4.hs | haskell | it is in my opinion | |
459dda73dcc6b3fb834fef2f9e7c5782c1042724de1931860fec09a2d4bb3433 | jayrbolton/coursework | Lexer.hs | module Lexer where
import Data.Char
data Token =
{- Data types -}
Bool | Int | Float | Char |
{- Constants -}
True | False |
{- Loops -}
While |
{- Conditionals -}
If | Else |
{- Functions -}
Main |
{- Punctuation -}
LeftBrace | RightBrace | LeftBracket | RightBracket |
LeftParen | RightParen | Semicolon | Comma |
Operators
Assign | Equals | Less | LessEqual | Greater | GreaterEqual |
Not | NotEqual | Plus | Minus | Multiply | Divide | And | Or |
{- Values -}
Identifier String | IntLiteral Int | FloatLiteral Float | CharLiteral Char |
{- Formatting -}
Eof
deriving (Eq, Ord, Show, Read)
test1 = "int main() { }"
test2 = "int main() { \nint x;\nbool true;\nfloat 4.44;\n}"
keywords = ["bool", "int", "float", "char", "true", "false", "while", "if", "else", "main"]
symbols = ['{', '}', '[', ']', '(', ')', ';', ',', ':', '=', '<', '>', '!', '+', '-', '*', '/', '&', '|', '\'']
scan :: String -> [Token]
scan [] = [Eof]
scan s@(x:xs)
| isSpace x = scan xs
| isControl x = scan xs
| isUpper x = getIdent w : scan rest
| isDigit x = getNum w : scan rest
| x == '{' = LeftBrace : scan xs
| x == '}' = RightBrace : scan xs
| x == '[' = LeftBracket : scan xs
| x == ']' = RightBracket : scan xs
| x == '(' = LeftParen : scan xs
| x == ')' = RightParen : scan xs
| x == ';' = Semicolon : scan xs
| x == ',' = Comma : scan xs
| x == ':' = Assign : scan xs
| x == '=' = Equals : scan xs
| x == '<' = getLess w : scan rest
| x == '>' = getGreater w : scan rest
| x == '!' = getNot w : scan rest
| x == '+' = Plus : scan xs
| x == '-' = Minus : scan xs
| x == '*' = Multiply : scan xs
| x == '/' = Divide : scan xs
| x == '&' = And : scan xs
| x == '|' = Or : scan xs
| x == '\'' = getCharLit w : scan rest
| otherwise = getIdent w : scan rest
where
(w, rest) = span (\x -> x /= ' ' && not (x `elem` symbols)) s
getIdent :: String -> Token
getIdent s
| s `elem` keywords = getKey s
| otherwise = Identifier s
getKey :: String -> Token
getKey (x:xs) = read ((toUpper x):xs) :: Token
getNum :: String -> Token
getNum s
| '.' `elem` s = FloatLiteral (read s :: Float)
| otherwise = IntLiteral (read s :: Int)
getLess :: String -> Token
getLess s
| s == "<" = Less
| s == "<=" = LessEqual
getGreater :: String -> Token
getGreater s
| s == ">" = Greater
| s == ">=" = GreaterEqual
getNot :: String -> Token
getNot s
| s == "!" = Not
| s == "!=" = NotEqual
getCharLit :: String -> Token
getCharLit (_:x:_) = CharLiteral x
| null | https://raw.githubusercontent.com/jayrbolton/coursework/f0da276527d42a6751fb8d29c76de35ce358fe65/computability_and_formal_languages/Kingston_backup/cnc/Haskell/hws/Clite/Lexer.hs | haskell | Data types
Constants
Loops
Conditionals
Functions
Punctuation
Values
Formatting | module Lexer where
import Data.Char
data Token =
Bool | Int | Float | Char |
True | False |
While |
If | Else |
Main |
LeftBrace | RightBrace | LeftBracket | RightBracket |
LeftParen | RightParen | Semicolon | Comma |
Operators
Assign | Equals | Less | LessEqual | Greater | GreaterEqual |
Not | NotEqual | Plus | Minus | Multiply | Divide | And | Or |
Identifier String | IntLiteral Int | FloatLiteral Float | CharLiteral Char |
Eof
deriving (Eq, Ord, Show, Read)
test1 = "int main() { }"
test2 = "int main() { \nint x;\nbool true;\nfloat 4.44;\n}"
keywords = ["bool", "int", "float", "char", "true", "false", "while", "if", "else", "main"]
symbols = ['{', '}', '[', ']', '(', ')', ';', ',', ':', '=', '<', '>', '!', '+', '-', '*', '/', '&', '|', '\'']
scan :: String -> [Token]
scan [] = [Eof]
scan s@(x:xs)
| isSpace x = scan xs
| isControl x = scan xs
| isUpper x = getIdent w : scan rest
| isDigit x = getNum w : scan rest
| x == '{' = LeftBrace : scan xs
| x == '}' = RightBrace : scan xs
| x == '[' = LeftBracket : scan xs
| x == ']' = RightBracket : scan xs
| x == '(' = LeftParen : scan xs
| x == ')' = RightParen : scan xs
| x == ';' = Semicolon : scan xs
| x == ',' = Comma : scan xs
| x == ':' = Assign : scan xs
| x == '=' = Equals : scan xs
| x == '<' = getLess w : scan rest
| x == '>' = getGreater w : scan rest
| x == '!' = getNot w : scan rest
| x == '+' = Plus : scan xs
| x == '-' = Minus : scan xs
| x == '*' = Multiply : scan xs
| x == '/' = Divide : scan xs
| x == '&' = And : scan xs
| x == '|' = Or : scan xs
| x == '\'' = getCharLit w : scan rest
| otherwise = getIdent w : scan rest
where
(w, rest) = span (\x -> x /= ' ' && not (x `elem` symbols)) s
getIdent :: String -> Token
getIdent s
| s `elem` keywords = getKey s
| otherwise = Identifier s
getKey :: String -> Token
getKey (x:xs) = read ((toUpper x):xs) :: Token
getNum :: String -> Token
getNum s
| '.' `elem` s = FloatLiteral (read s :: Float)
| otherwise = IntLiteral (read s :: Int)
getLess :: String -> Token
getLess s
| s == "<" = Less
| s == "<=" = LessEqual
getGreater :: String -> Token
getGreater s
| s == ">" = Greater
| s == ">=" = GreaterEqual
getNot :: String -> Token
getNot s
| s == "!" = Not
| s == "!=" = NotEqual
getCharLit :: String -> Token
getCharLit (_:x:_) = CharLiteral x
|
664eebbd7df4b5e393190219e105d15148eca173cfa42fef822498ddb4a33593 | akazukin5151/kpxhs | ExitDialogEvents.hs | module ViewEvents.ExitDialogEvents (exitEvent) where
import qualified Brick.Main as M
import qualified Brick.Types as T
import Brick.Widgets.Dialog (handleDialogEvent)
import qualified Brick.Widgets.Dialog as D
import Control.Monad.IO.Class (MonadIO (liftIO))
import Data.Functor ((<&>))
import qualified Graphics.Vty as V
import Lens.Micro ((&), (.~), (^.))
import Types
( ExitDialog (Cancel, Clear, Exit)
, Field
, State
, activeView
, exitDialog
, previousView
)
import ViewEvents.Common (updateFooter)
import ViewEvents.Copy (clearClipboard)
exitEvent :: State -> T.BrickEvent Field e -> T.EventM Field (T.Next State)
exitEvent st (T.VtyEvent (V.EvKey V.KEnter [])) = handleEnter st
exitEvent st (T.VtyEvent e) = handleDialog st e
exitEvent st _ = M.continue st
handleDialog :: State -> V.Event -> T.EventM Field (T.Next State)
handleDialog st e =
handleDialogEventEsc e st >>= M.continue
handleDialogEventEsc :: V.Event -> State -> T.EventM n State
handleDialogEventEsc ev st =
case ev of
V.EvKey V.KEsc [] -> pure $ cancelExit st
_ -> handleDialogEvent ev (st^.exitDialog) <&> setDialog
where
handleDialogEvent returns EventM ( Dialog a )
setDialog transforms that inner Dialog back into a State
setDialog :: D.Dialog ExitDialog -> State
setDialog x = st & exitDialog .~ x
cancelExit :: State -> State
cancelExit st = st & activeView .~ (st^.previousView)
& updateFooter
handleEnter :: State -> T.EventM Field (T.Next State)
handleEnter st =
case D.dialogSelection (st^.exitDialog) of
Just Clear -> liftIO clearClipboard *> M.halt st
Just Cancel -> M.continue $ cancelExit st
Just Exit -> M.halt st
_ -> M.continue st
| null | https://raw.githubusercontent.com/akazukin5151/kpxhs/a8fd8eae38e145a2d7e6eccd8e9e40cb15d974c6/src/kpxhs/ViewEvents/ExitDialogEvents.hs | haskell | module ViewEvents.ExitDialogEvents (exitEvent) where
import qualified Brick.Main as M
import qualified Brick.Types as T
import Brick.Widgets.Dialog (handleDialogEvent)
import qualified Brick.Widgets.Dialog as D
import Control.Monad.IO.Class (MonadIO (liftIO))
import Data.Functor ((<&>))
import qualified Graphics.Vty as V
import Lens.Micro ((&), (.~), (^.))
import Types
( ExitDialog (Cancel, Clear, Exit)
, Field
, State
, activeView
, exitDialog
, previousView
)
import ViewEvents.Common (updateFooter)
import ViewEvents.Copy (clearClipboard)
exitEvent :: State -> T.BrickEvent Field e -> T.EventM Field (T.Next State)
exitEvent st (T.VtyEvent (V.EvKey V.KEnter [])) = handleEnter st
exitEvent st (T.VtyEvent e) = handleDialog st e
exitEvent st _ = M.continue st
handleDialog :: State -> V.Event -> T.EventM Field (T.Next State)
handleDialog st e =
handleDialogEventEsc e st >>= M.continue
handleDialogEventEsc :: V.Event -> State -> T.EventM n State
handleDialogEventEsc ev st =
case ev of
V.EvKey V.KEsc [] -> pure $ cancelExit st
_ -> handleDialogEvent ev (st^.exitDialog) <&> setDialog
where
handleDialogEvent returns EventM ( Dialog a )
setDialog transforms that inner Dialog back into a State
setDialog :: D.Dialog ExitDialog -> State
setDialog x = st & exitDialog .~ x
cancelExit :: State -> State
cancelExit st = st & activeView .~ (st^.previousView)
& updateFooter
handleEnter :: State -> T.EventM Field (T.Next State)
handleEnter st =
case D.dialogSelection (st^.exitDialog) of
Just Clear -> liftIO clearClipboard *> M.halt st
Just Cancel -> M.continue $ cancelExit st
Just Exit -> M.halt st
_ -> M.continue st
| |
b17076be80b1f4c1eb432cda3b75446dc72eab93403498e9d3c9c418fb296b2d | qfpl/reflex-tutorial | Run.hs | {-# LANGUAGE OverloadedStrings #-}
# LANGUAGE RecursiveDo #
{-# LANGUAGE GADTs #-}
module Ex06.Run (
host
) where
import Control.Monad.Fix (MonadFix)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as Text
import Reflex.Dom.Core
import qualified Util.Bootstrap as B
import Ex06.Common
moneyDisplay ::
Money ->
Text
moneyDisplay =
("$" <>) . Text.pack . show
grid ::
MonadWidget t m =>
m a ->
m a
grid =
elClass "div" "container"
row ::
MonadWidget t m =>
m a ->
m b ->
m c ->
m d ->
m d
row ma mb mc md = elClass "div" "row" $
(\_ _ _ x -> x)
<$> elClass "div" "col-md-3" ma
<*> elClass "div" "col-md-1" mb
<*> elClass "div" "col-md-1" mc
<*> elClass "div" "col-md-1" md
radioButton ::
( MonadWidget t m
, Eq a
) =>
Text ->
Dynamic t a ->
Dynamic t a ->
m (Event t a)
radioButton name dValue dSelected =
let
attrs =
"type" =: "radio" <>
"name" =: name
mkAttrs a n =
if a == n
then "checked" =: ""
else mempty
dynAttrs = mkAttrs <$> dValue <*> dSelected
in do
(e, _) <- elDynAttr' "input" (pure attrs <> dynAttrs) $ pure ()
let eClick = domEvent Click e
pure $ current dValue <@ eClick
stockWidget ::
MonadWidget t m =>
Dynamic t Stock ->
Dynamic t Text ->
m (Event t Text)
stockWidget dStock dSelected =
let
r1 = dynText $ pName . sProduct <$> dStock
r2 = dynText $ Text.pack . show . sQuantity <$> dStock
r3 = dynText $ moneyDisplay . pCost . sProduct <$> dStock
r4 = radioButton "stock" ((pName . sProduct) <$> dStock) dSelected
in
row r1 r2 r3 r4
mkStock ::
( Reflex t
, MonadHold t m
, MonadFix m
) =>
Int ->
Product ->
Event t Text ->
m (Dynamic t Stock)
mkStock i p e = mdo
let
dNonZero = (0 <) <$> dQuantity
eSub = gate (current dNonZero) e
dQuantity <- foldDyn ($) i $
subtract 1 <$ ffilter (== pName p) eSub
pure $ Stock p <$> dQuantity
host ::
MonadWidget t m =>
Ex06Fn t m ->
m ()
host fn = B.panel . divClass "card my-2" . divClass "card-body" . grid $ mdo
dCarrot <- mkStock 5 carrot eVend
dCelery <- mkStock 5 celery eVend
dCucumber <- mkStock 5 cucumber eVend
input <- mdo
eCarrot <-
stockWidget dCarrot dSelected
eCelery <-
stockWidget dCelery dSelected
eCucumber <-
stockWidget dCucumber dSelected
dSelected <-
holdDyn (pName carrot) .
leftmost $ [eCarrot, eCelery, eCucumber]
pure $
Inputs
dMoney
dCarrot
dCelery
dCucumber
dSelected
eBuy
eRefund
eBuy <- buyRow
eAdd <- moneyRow dMoney
dMoney <- trackMoney $ MoneyInputs eAdd eSpend eRefund
outputs <- fn input
let
eVend = oeVend outputs
eSpend = oeSpend outputs
dChange = odChange outputs
dVend = odVend outputs
eRefund <- changeRow dChange
vendRow dVend
pure ()
buyRow ::
MonadWidget t m =>
m (Event t ())
buyRow =
let
rBlank = pure ()
in
row rBlank rBlank rBlank $
B.button "Buy"
data MoneyInputs t =
MoneyInputs
(Event t ()) -- add
(Event t Money) -- spend
(Event t ()) -- refund
trackMoney ::
( Reflex t
, MonadFix m
, MonadHold t m
) =>
MoneyInputs t ->
m (Dynamic t Money)
trackMoney (MoneyInputs eAdd eSpend eRefund) =
foldDyn ($) 0 . mergeWith (.) $ [
(+ 1) <$ eAdd
, flip (-) <$> eSpend
, const 0 <$ eRefund
]
moneyRow ::
( MonadWidget t m
) =>
Dynamic t Money ->
m (Event t ())
moneyRow dMoney =
let
r1 = text "Money inserted:"
r2 = pure ()
r3 = dynText $ moneyDisplay <$> dMoney
r4 = B.button "Add money"
in
row r1 r2 r3 r4
changeRow ::
( MonadWidget t m
) =>
Dynamic t Money ->
m (Event t ())
changeRow dChange =
let
r1 = text "Change:"
r2 = pure ()
r3 = dynText $ moneyDisplay <$> dChange
r4 = B.button "Refund"
in
row r1 r2 r3 r4
vendRow ::
( MonadWidget t m
) =>
Dynamic t Text ->
m ()
vendRow dVend =
let
r1 = text "Tray:"
rBlank = pure ()
r3 = dynText dVend
in
row r1 rBlank r3 rBlank
| null | https://raw.githubusercontent.com/qfpl/reflex-tutorial/07c1e6fab387cbeedd031630ba6a5cd946cc612e/code/exercises/src/Ex06/Run.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE GADTs #
add
spend
refund | # LANGUAGE RecursiveDo #
module Ex06.Run (
host
) where
import Control.Monad.Fix (MonadFix)
import Data.Monoid ((<>))
import Data.Text (Text)
import qualified Data.Text as Text
import Reflex.Dom.Core
import qualified Util.Bootstrap as B
import Ex06.Common
moneyDisplay ::
Money ->
Text
moneyDisplay =
("$" <>) . Text.pack . show
grid ::
MonadWidget t m =>
m a ->
m a
grid =
elClass "div" "container"
row ::
MonadWidget t m =>
m a ->
m b ->
m c ->
m d ->
m d
row ma mb mc md = elClass "div" "row" $
(\_ _ _ x -> x)
<$> elClass "div" "col-md-3" ma
<*> elClass "div" "col-md-1" mb
<*> elClass "div" "col-md-1" mc
<*> elClass "div" "col-md-1" md
radioButton ::
( MonadWidget t m
, Eq a
) =>
Text ->
Dynamic t a ->
Dynamic t a ->
m (Event t a)
radioButton name dValue dSelected =
let
attrs =
"type" =: "radio" <>
"name" =: name
mkAttrs a n =
if a == n
then "checked" =: ""
else mempty
dynAttrs = mkAttrs <$> dValue <*> dSelected
in do
(e, _) <- elDynAttr' "input" (pure attrs <> dynAttrs) $ pure ()
let eClick = domEvent Click e
pure $ current dValue <@ eClick
stockWidget ::
MonadWidget t m =>
Dynamic t Stock ->
Dynamic t Text ->
m (Event t Text)
stockWidget dStock dSelected =
let
r1 = dynText $ pName . sProduct <$> dStock
r2 = dynText $ Text.pack . show . sQuantity <$> dStock
r3 = dynText $ moneyDisplay . pCost . sProduct <$> dStock
r4 = radioButton "stock" ((pName . sProduct) <$> dStock) dSelected
in
row r1 r2 r3 r4
mkStock ::
( Reflex t
, MonadHold t m
, MonadFix m
) =>
Int ->
Product ->
Event t Text ->
m (Dynamic t Stock)
mkStock i p e = mdo
let
dNonZero = (0 <) <$> dQuantity
eSub = gate (current dNonZero) e
dQuantity <- foldDyn ($) i $
subtract 1 <$ ffilter (== pName p) eSub
pure $ Stock p <$> dQuantity
host ::
MonadWidget t m =>
Ex06Fn t m ->
m ()
host fn = B.panel . divClass "card my-2" . divClass "card-body" . grid $ mdo
dCarrot <- mkStock 5 carrot eVend
dCelery <- mkStock 5 celery eVend
dCucumber <- mkStock 5 cucumber eVend
input <- mdo
eCarrot <-
stockWidget dCarrot dSelected
eCelery <-
stockWidget dCelery dSelected
eCucumber <-
stockWidget dCucumber dSelected
dSelected <-
holdDyn (pName carrot) .
leftmost $ [eCarrot, eCelery, eCucumber]
pure $
Inputs
dMoney
dCarrot
dCelery
dCucumber
dSelected
eBuy
eRefund
eBuy <- buyRow
eAdd <- moneyRow dMoney
dMoney <- trackMoney $ MoneyInputs eAdd eSpend eRefund
outputs <- fn input
let
eVend = oeVend outputs
eSpend = oeSpend outputs
dChange = odChange outputs
dVend = odVend outputs
eRefund <- changeRow dChange
vendRow dVend
pure ()
buyRow ::
MonadWidget t m =>
m (Event t ())
buyRow =
let
rBlank = pure ()
in
row rBlank rBlank rBlank $
B.button "Buy"
data MoneyInputs t =
MoneyInputs
trackMoney ::
( Reflex t
, MonadFix m
, MonadHold t m
) =>
MoneyInputs t ->
m (Dynamic t Money)
trackMoney (MoneyInputs eAdd eSpend eRefund) =
foldDyn ($) 0 . mergeWith (.) $ [
(+ 1) <$ eAdd
, flip (-) <$> eSpend
, const 0 <$ eRefund
]
moneyRow ::
( MonadWidget t m
) =>
Dynamic t Money ->
m (Event t ())
moneyRow dMoney =
let
r1 = text "Money inserted:"
r2 = pure ()
r3 = dynText $ moneyDisplay <$> dMoney
r4 = B.button "Add money"
in
row r1 r2 r3 r4
changeRow ::
( MonadWidget t m
) =>
Dynamic t Money ->
m (Event t ())
changeRow dChange =
let
r1 = text "Change:"
r2 = pure ()
r3 = dynText $ moneyDisplay <$> dChange
r4 = B.button "Refund"
in
row r1 r2 r3 r4
vendRow ::
( MonadWidget t m
) =>
Dynamic t Text ->
m ()
vendRow dVend =
let
r1 = text "Tray:"
rBlank = pure ()
r3 = dynText dVend
in
row r1 rBlank r3 rBlank
|
66f1e522c5f5e4426cbf0e8eae0d1f147869c680bea12905bffd38a7d01debcf | pixlsus/registry.gimp.org_static | sg-luminosity-masks.scm | ; Create Luminosity Masks for an image
Originally authored by < >
re - coded by to honor selection and layer offsets
; Will isolate light, mid, and dark tones in an image as channel masks
Adapted from tutorial by ( originally for PS )
; -1.html
; This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation ; either version 2 of the License , or
; (at your option) any later version.
;
; This program is distributed in the hope that it will be useful,
; but WITHOUT ANY WARRANTY; without even the implied warranty of
; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
; GNU General Public License for more details.
(define (script-fu-sg-luminosity-masks image drawable)
(gimp-image-undo-group-start image)
(let ((orig-sel (car (gimp-selection-save image)))
(L (car (gimp-selection-save image)))
(LL #f)
(LLL #f)
(D #f)
(DD #f)
(DDD #f)
(M #f)
(MM #f)
(MMM #f)
(masks '())
(name (car (gimp-drawable-get-name drawable))) )
(set! masks (cons L masks))
(gimp-drawable-set-name L (string-append "L-" name))
(gimp-selection-none image)
; paste uses luminosity desaturation method
(let ((buffer (car (gimp-edit-named-copy drawable "temp"))))
(gimp-floating-sel-anchor (car (gimp-edit-named-paste L buffer TRUE)))
(gimp-buffer-delete buffer) )
(set! D (car (gimp-channel-copy L)))
(gimp-image-insert-channel image D 0 1)
(gimp-drawable-set-name D (string-append "D-" name))
(gimp-invert D)
(set! masks (cons D masks))
(set! DD (car (gimp-channel-copy D)))
(gimp-image-insert-channel image DD 0 2)
(gimp-drawable-set-name DD (string-append "DD-" name))
(gimp-channel-combine-masks DD L CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons DD masks))
(set! DDD (car (gimp-channel-copy DD)))
(gimp-image-insert-channel image DDD 0 3)
(gimp-drawable-set-name DDD (string-append "DDD-" name))
(gimp-channel-combine-masks DDD L CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons DDD masks))
(set! LL (car (gimp-channel-copy L)))
(gimp-image-insert-channel image LL 0 1)
(gimp-drawable-set-name LL (string-append "LL-" name))
(gimp-channel-combine-masks LL D CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons LL masks))
(set! LLL (car (gimp-channel-copy LL)))
(gimp-image-insert-channel image LLL 0 2)
(gimp-drawable-set-name LLL (string-append "LLL-" name))
(gimp-channel-combine-masks LLL D CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons LLL masks))
(set! M (car (gimp-channel-copy L)))
(gimp-image-insert-channel image M 0 3)
(gimp-drawable-set-name M (string-append "M-" name))
(gimp-channel-combine-masks M D CHANNEL-OP-INTERSECT 0 0)
(set! masks (cons M masks))
(set! MM (car (gimp-channel-copy LL)))
(gimp-image-insert-channel image MM 0 3)
(gimp-drawable-set-name MM (string-append "MM-" name))
(gimp-invert MM)
(gimp-channel-combine-masks MM DD CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons MM masks))
(set! MMM (car (gimp-channel-copy LLL)))
(gimp-image-insert-channel image MMM 0 3)
(gimp-invert MMM)
(gimp-drawable-set-name MMM (string-append "MMM-" name))
(gimp-channel-combine-masks MMM DDD CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons MMM masks))
(gimp-selection-load orig-sel)
(if (or (= (car (gimp-selection-is-empty image)) TRUE)
(= (car (gimp-drawable-mask-intersect drawable)) FALSE) )
(gimp-selection-all image) )
(gimp-rect-select image
(car (gimp-drawable-offsets drawable))
(cadr (gimp-drawable-offsets drawable))
(car (gimp-drawable-width drawable))
(car (gimp-drawable-height drawable))
CHANNEL-OP-INTERSECT 0 0 )
(gimp-selection-invert image)
(unless (= (car (gimp-selection-is-empty image)) TRUE)
(map (lambda (x) (gimp-edit-fill x WHITE-FILL)
(gimp-invert x) )
masks ))
(gimp-selection-load orig-sel)
(gimp-image-remove-channel image orig-sel)
)
(gimp-image-set-active-layer image drawable)
(gimp-image-undo-group-end image)
(gimp-displays-flush)
)
(script-fu-register "script-fu-sg-luminosity-masks"
"Luminosity Masks (saulgoode)"
"Create Luminosity Masks of Layer"
"saul goode"
"saul goode"
"Nov 2013"
"RGB*, GRAY*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
)
(script-fu-menu-register "script-fu-sg-luminosity-masks" "<Image>/Filters/Generic")
| null | https://raw.githubusercontent.com/pixlsus/registry.gimp.org_static/ffcde7400f402728373ff6579947c6ffe87d1a5e/registry.gimp.org/files/sg-luminosity-masks.scm | scheme | Create Luminosity Masks for an image
Will isolate light, mid, and dark tones in an image as channel masks
-1.html
This program is free software; you can redistribute it and/or modify
either version 2 of the License , or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
paste uses luminosity desaturation method | Originally authored by < >
re - coded by to honor selection and layer offsets
Adapted from tutorial by ( originally for PS )
it under the terms of the GNU General Public License as published by
(define (script-fu-sg-luminosity-masks image drawable)
(gimp-image-undo-group-start image)
(let ((orig-sel (car (gimp-selection-save image)))
(L (car (gimp-selection-save image)))
(LL #f)
(LLL #f)
(D #f)
(DD #f)
(DDD #f)
(M #f)
(MM #f)
(MMM #f)
(masks '())
(name (car (gimp-drawable-get-name drawable))) )
(set! masks (cons L masks))
(gimp-drawable-set-name L (string-append "L-" name))
(gimp-selection-none image)
(let ((buffer (car (gimp-edit-named-copy drawable "temp"))))
(gimp-floating-sel-anchor (car (gimp-edit-named-paste L buffer TRUE)))
(gimp-buffer-delete buffer) )
(set! D (car (gimp-channel-copy L)))
(gimp-image-insert-channel image D 0 1)
(gimp-drawable-set-name D (string-append "D-" name))
(gimp-invert D)
(set! masks (cons D masks))
(set! DD (car (gimp-channel-copy D)))
(gimp-image-insert-channel image DD 0 2)
(gimp-drawable-set-name DD (string-append "DD-" name))
(gimp-channel-combine-masks DD L CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons DD masks))
(set! DDD (car (gimp-channel-copy DD)))
(gimp-image-insert-channel image DDD 0 3)
(gimp-drawable-set-name DDD (string-append "DDD-" name))
(gimp-channel-combine-masks DDD L CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons DDD masks))
(set! LL (car (gimp-channel-copy L)))
(gimp-image-insert-channel image LL 0 1)
(gimp-drawable-set-name LL (string-append "LL-" name))
(gimp-channel-combine-masks LL D CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons LL masks))
(set! LLL (car (gimp-channel-copy LL)))
(gimp-image-insert-channel image LLL 0 2)
(gimp-drawable-set-name LLL (string-append "LLL-" name))
(gimp-channel-combine-masks LLL D CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons LLL masks))
(set! M (car (gimp-channel-copy L)))
(gimp-image-insert-channel image M 0 3)
(gimp-drawable-set-name M (string-append "M-" name))
(gimp-channel-combine-masks M D CHANNEL-OP-INTERSECT 0 0)
(set! masks (cons M masks))
(set! MM (car (gimp-channel-copy LL)))
(gimp-image-insert-channel image MM 0 3)
(gimp-drawable-set-name MM (string-append "MM-" name))
(gimp-invert MM)
(gimp-channel-combine-masks MM DD CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons MM masks))
(set! MMM (car (gimp-channel-copy LLL)))
(gimp-image-insert-channel image MMM 0 3)
(gimp-invert MMM)
(gimp-drawable-set-name MMM (string-append "MMM-" name))
(gimp-channel-combine-masks MMM DDD CHANNEL-OP-SUBTRACT 0 0)
(set! masks (cons MMM masks))
(gimp-selection-load orig-sel)
(if (or (= (car (gimp-selection-is-empty image)) TRUE)
(= (car (gimp-drawable-mask-intersect drawable)) FALSE) )
(gimp-selection-all image) )
(gimp-rect-select image
(car (gimp-drawable-offsets drawable))
(cadr (gimp-drawable-offsets drawable))
(car (gimp-drawable-width drawable))
(car (gimp-drawable-height drawable))
CHANNEL-OP-INTERSECT 0 0 )
(gimp-selection-invert image)
(unless (= (car (gimp-selection-is-empty image)) TRUE)
(map (lambda (x) (gimp-edit-fill x WHITE-FILL)
(gimp-invert x) )
masks ))
(gimp-selection-load orig-sel)
(gimp-image-remove-channel image orig-sel)
)
(gimp-image-set-active-layer image drawable)
(gimp-image-undo-group-end image)
(gimp-displays-flush)
)
(script-fu-register "script-fu-sg-luminosity-masks"
"Luminosity Masks (saulgoode)"
"Create Luminosity Masks of Layer"
"saul goode"
"saul goode"
"Nov 2013"
"RGB*, GRAY*"
SF-IMAGE "Image" 0
SF-DRAWABLE "Drawable" 0
)
(script-fu-menu-register "script-fu-sg-luminosity-masks" "<Image>/Filters/Generic")
|
296ad626650cfa40ed38e362efdbfc4afa8f9862f9d92075549be8cf5eb8197d | lesguillemets/sicp-haskell | NewtonsMethod.hs | module NewtonsMethod where
newtonsMethod :: (Double -> Double) -> Double -> Double
newtonsMethod g guess = fixP (newtonTransform g) guess
newtonTransform :: (Double -> Double) -> Double -> Double
newtonTransform g x = x - (g x / deriv g x)
deriv :: (Double -> Double) -> Double -> Double
deriv g = \x -> (g (x+dx) - g x) / dx where dx = 0.00001
averaageDamp :: (Num a, Fractional a) => (a -> a) -> a -> a
averaageDamp f x = average x (f x)
fixP :: (Double -> Double) -> Double -> Double
fixP = fixedPoint' torelance
fixedPoint' :: (Num a, Ord a) => a -> (a -> a) -> a -> a
fixedPoint' tore f guess =
let next = f guess
in
if closerThan tore guess next
then next
else fixedPoint' tore f next
average :: (Num a, Fractional a) => a -> a -> a
average = ((/2) . ) . (+)
torelance :: Double
torelance = 0.0000001
closerThan :: (Num a, Ord a) => a -> a -> a -> Bool
closerThan tor v0 v1 = abs (v1 - v0) < tor
| null | https://raw.githubusercontent.com/lesguillemets/sicp-haskell/df524a1e28c45fb16a56f539cad8babc881d0431/exercise/chap01/sect3/NewtonsMethod.hs | haskell | module NewtonsMethod where
newtonsMethod :: (Double -> Double) -> Double -> Double
newtonsMethod g guess = fixP (newtonTransform g) guess
newtonTransform :: (Double -> Double) -> Double -> Double
newtonTransform g x = x - (g x / deriv g x)
deriv :: (Double -> Double) -> Double -> Double
deriv g = \x -> (g (x+dx) - g x) / dx where dx = 0.00001
averaageDamp :: (Num a, Fractional a) => (a -> a) -> a -> a
averaageDamp f x = average x (f x)
fixP :: (Double -> Double) -> Double -> Double
fixP = fixedPoint' torelance
fixedPoint' :: (Num a, Ord a) => a -> (a -> a) -> a -> a
fixedPoint' tore f guess =
let next = f guess
in
if closerThan tore guess next
then next
else fixedPoint' tore f next
average :: (Num a, Fractional a) => a -> a -> a
average = ((/2) . ) . (+)
torelance :: Double
torelance = 0.0000001
closerThan :: (Num a, Ord a) => a -> a -> a -> Bool
closerThan tor v0 v1 = abs (v1 - v0) < tor
| |
4896aba95a792e9575e305a64deaf145eec56adae578ddde751fd228a4a5e3ad | EFanZh/EOPL-Exercises | exercise-2.16-test.rkt | #lang racket/base
(require rackunit)
(require "../solutions/exercise-2.16.rkt")
(let ([var-exp-example (var-exp 'a)]
[lambda-exp-example (lambda-exp 'a (var-exp 'b))]
[app-exp-example (app-exp (var-exp 'a) (var-exp 'b))])
(check-true (var-exp? var-exp-example))
(check-false (var-exp? lambda-exp-example))
(check-false (var-exp? app-exp-example))
(check-false (lambda-exp? var-exp-example))
(check-true (lambda-exp? lambda-exp-example))
(check-false (lambda-exp? app-exp-example))
(check-false (app-exp? var-exp-example))
(check-false (app-exp? lambda-exp-example))
(check-true (app-exp? app-exp-example))
(check-eqv? (var-exp->var var-exp-example) 'a)
(check-eqv? (lambda-exp->bound-var lambda-exp-example) 'a)
(check-eqv? (var-exp->var (lambda-exp->body lambda-exp-example)) 'b)
(check-eqv? (var-exp->var (app-exp->rator app-exp-example)) 'a)
(check-eqv? (var-exp->var (app-exp->rand app-exp-example)) 'b))
| null | https://raw.githubusercontent.com/EFanZh/EOPL-Exercises/11667f1e84a1a3e300c2182630b56db3e3d9246a/tests/exercise-2.16-test.rkt | racket | #lang racket/base
(require rackunit)
(require "../solutions/exercise-2.16.rkt")
(let ([var-exp-example (var-exp 'a)]
[lambda-exp-example (lambda-exp 'a (var-exp 'b))]
[app-exp-example (app-exp (var-exp 'a) (var-exp 'b))])
(check-true (var-exp? var-exp-example))
(check-false (var-exp? lambda-exp-example))
(check-false (var-exp? app-exp-example))
(check-false (lambda-exp? var-exp-example))
(check-true (lambda-exp? lambda-exp-example))
(check-false (lambda-exp? app-exp-example))
(check-false (app-exp? var-exp-example))
(check-false (app-exp? lambda-exp-example))
(check-true (app-exp? app-exp-example))
(check-eqv? (var-exp->var var-exp-example) 'a)
(check-eqv? (lambda-exp->bound-var lambda-exp-example) 'a)
(check-eqv? (var-exp->var (lambda-exp->body lambda-exp-example)) 'b)
(check-eqv? (var-exp->var (app-exp->rator app-exp-example)) 'a)
(check-eqv? (var-exp->var (app-exp->rand app-exp-example)) 'b))
| |
058785537048bad76958aacdea4578e66ad1885f787bf66b3ce8e496dcd4be8f | wargrey/graphics | constants.rkt | #lang typed/racket/base
(provide (all-defined-out))
(require "color.rkt")
(require "paint.rkt")
(require "font.rkt")
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
#;(void transparent hilite black)
(define white : FlRGBA (rgb* 'white))
(define gray : FlRGBA (rgb* 'gray))
(define brown : FlRGBA (rgb* 'brown))
(define magenta : FlRGBA (rgb* 'magenta))
(define red : FlRGBA (rgb* 'red))
(define orange : FlRGBA (rgb* 'orange))
(define yellow : FlRGBA (rgb* 'yellow))
(define green : FlRGBA (rgb* 'green))
(define blue : FlRGBA (rgb* 'blue))
(define cyan : FlRGBA (rgb* 'cyan))
(define purple : FlRGBA (rgb* 'purple))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define solid : Stroke (desc-stroke (default-stroke) #:dash 'solid))
(define dot : Stroke (desc-stroke (default-stroke) #:dash 'dot))
(define dot-dash : Stroke (desc-stroke (default-stroke) #:dash 'dot-dash))
(define short-dash : Stroke (desc-stroke (default-stroke) #:dash 'short-dash))
(define long-dash : Stroke (desc-stroke (default-stroke) #:dash 'long-dash))
(define solid-frame : Stroke (desc-stroke (default-border) #:dash 'solid))
(define dot-frame : Stroke (desc-stroke (default-border) #:dash 'dot))
(define dot-dash-frame : Stroke (desc-stroke (default-border) #:dash 'dot-dash))
(define short-dash-frame : Stroke (desc-stroke (default-border) #:dash 'short-dash))
(define long-dash-frame : Stroke (desc-stroke (default-border) #:dash 'long-dash))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(define sans-serif : Font (desc-font #:family 'sans-serif))
(define serif : Font (desc-font #:family 'serif))
(define monospace : Font (desc-font #:family 'monospace))
(define fantasy : Font (desc-font #:family 'fantasy))
(define cursive : Font (desc-font #:family 'cursive))
(define system-ui : Font (desc-font #:family 'system-ui))
(define emoji : Font (desc-font #:family 'emoji))
(define math : Font (desc-font #:family 'math))
(define fangsong : Font (desc-font #:family 'fangsong))
| null | https://raw.githubusercontent.com/wargrey/graphics/203b69dca12a5f3db384ef7096bf41f87360a2f0/bitmap/constants.rkt | racket |
(void transparent hilite black)
| #lang typed/racket/base
(provide (all-defined-out))
(require "color.rkt")
(require "paint.rkt")
(require "font.rkt")
(define white : FlRGBA (rgb* 'white))
(define gray : FlRGBA (rgb* 'gray))
(define brown : FlRGBA (rgb* 'brown))
(define magenta : FlRGBA (rgb* 'magenta))
(define red : FlRGBA (rgb* 'red))
(define orange : FlRGBA (rgb* 'orange))
(define yellow : FlRGBA (rgb* 'yellow))
(define green : FlRGBA (rgb* 'green))
(define blue : FlRGBA (rgb* 'blue))
(define cyan : FlRGBA (rgb* 'cyan))
(define purple : FlRGBA (rgb* 'purple))
(define solid : Stroke (desc-stroke (default-stroke) #:dash 'solid))
(define dot : Stroke (desc-stroke (default-stroke) #:dash 'dot))
(define dot-dash : Stroke (desc-stroke (default-stroke) #:dash 'dot-dash))
(define short-dash : Stroke (desc-stroke (default-stroke) #:dash 'short-dash))
(define long-dash : Stroke (desc-stroke (default-stroke) #:dash 'long-dash))
(define solid-frame : Stroke (desc-stroke (default-border) #:dash 'solid))
(define dot-frame : Stroke (desc-stroke (default-border) #:dash 'dot))
(define dot-dash-frame : Stroke (desc-stroke (default-border) #:dash 'dot-dash))
(define short-dash-frame : Stroke (desc-stroke (default-border) #:dash 'short-dash))
(define long-dash-frame : Stroke (desc-stroke (default-border) #:dash 'long-dash))
(define sans-serif : Font (desc-font #:family 'sans-serif))
(define serif : Font (desc-font #:family 'serif))
(define monospace : Font (desc-font #:family 'monospace))
(define fantasy : Font (desc-font #:family 'fantasy))
(define cursive : Font (desc-font #:family 'cursive))
(define system-ui : Font (desc-font #:family 'system-ui))
(define emoji : Font (desc-font #:family 'emoji))
(define math : Font (desc-font #:family 'math))
(define fangsong : Font (desc-font #:family 'fangsong))
|
bf6185a74cd127c08c77dd93cbc440923b91e184d98ccdaef7930711ead12d6f | gretay-js/ocamlfdo | report.ml | open Core
let verbose = ref true
let extension = "fdo.org"
let last_id = ref 0
let names = String.Table.create ()
let save_names = false
let get_id name =
if save_names
then (
match Hashtbl.find names name with
| None ->
Hashtbl.add_exn names ~key:name ~data:!last_id;
incr last_id;
!last_id - 1
| Some id -> id)
else (
incr last_id;
!last_id - 1)
;;
(* depending on whether the name is write-only, or need to be reused, we can
save ids associated with each name. *)
let get_filename ~name ~title ~sub =
let filename = sprintf "%s-%s.%s" name title sub in
if String.length name < 255
then filename
else sprintf "%s-%d-%s.%s" (String.prefix name 200) (get_id name) title sub
;;
let filename = sprintf "summary.%s" extension
let enabled = ref false
let percent part total =
if total > 0 then Float.(100. *. (of_int part /. of_int total)) else 0.
;;
let timestamp () = Time.to_string (Time.now ())
let log msg =
if !verbose then printf "%s" msg;
if !enabled
then
Out_channel.with_file ~append:true ~binary:false filename ~f:(fun oc ->
Printf.fprintf
oc
"%s%s"
msg
(if String.is_suffix msg ~suffix:"\n" then "" else "\n"))
;;
let logf fmt =
Format.kasprintf (fun msg -> log msg) ("@?%s: " ^^ fmt ^^ "@.") (timestamp ())
;;
let start () =
if !verbose then printf "Creating summary file %s\n" filename;
enabled := true
;;
let finish () =
if !verbose then printf "Written summary to %s\n" filename;
enabled := false
;;
module Hint = struct
type t =
| Old_profile
| Mismatch
let to_fmt = function
| Mismatch ->
format_of_string
"Cannot apply the profile to code because the source code changed and md5 check \
is disabled.\n\
Try generating a new profile or use command line flag -on-md5-mismatch skip \
-on-md5-missing skip."
| Old_profile ->
format_of_string
"Profile format may have changed.\n\
If you are using an old profile, try generating a new one."
;;
end
let user_error ?(hint = None) ?(exn = None) fmt =
let fmt_hint =
match hint with
| None -> fmt
| Some h -> fmt ^^ "\nHint: " ^^ Hint.to_fmt h
in
Format.kfprintf
(fun _ ->
match exn with
| None -> exit 321
| Some exn -> raise exn)
Format.err_formatter
("@?Error: " ^^ fmt_hint ^^ "@.")
;;
| null | https://raw.githubusercontent.com/gretay-js/ocamlfdo/5866fe9c2bfea03bc7efb033cc7b91a3a25cf520/src/report.ml | ocaml | depending on whether the name is write-only, or need to be reused, we can
save ids associated with each name. | open Core
let verbose = ref true
let extension = "fdo.org"
let last_id = ref 0
let names = String.Table.create ()
let save_names = false
let get_id name =
if save_names
then (
match Hashtbl.find names name with
| None ->
Hashtbl.add_exn names ~key:name ~data:!last_id;
incr last_id;
!last_id - 1
| Some id -> id)
else (
incr last_id;
!last_id - 1)
;;
let get_filename ~name ~title ~sub =
let filename = sprintf "%s-%s.%s" name title sub in
if String.length name < 255
then filename
else sprintf "%s-%d-%s.%s" (String.prefix name 200) (get_id name) title sub
;;
let filename = sprintf "summary.%s" extension
let enabled = ref false
let percent part total =
if total > 0 then Float.(100. *. (of_int part /. of_int total)) else 0.
;;
let timestamp () = Time.to_string (Time.now ())
let log msg =
if !verbose then printf "%s" msg;
if !enabled
then
Out_channel.with_file ~append:true ~binary:false filename ~f:(fun oc ->
Printf.fprintf
oc
"%s%s"
msg
(if String.is_suffix msg ~suffix:"\n" then "" else "\n"))
;;
let logf fmt =
Format.kasprintf (fun msg -> log msg) ("@?%s: " ^^ fmt ^^ "@.") (timestamp ())
;;
let start () =
if !verbose then printf "Creating summary file %s\n" filename;
enabled := true
;;
let finish () =
if !verbose then printf "Written summary to %s\n" filename;
enabled := false
;;
module Hint = struct
type t =
| Old_profile
| Mismatch
let to_fmt = function
| Mismatch ->
format_of_string
"Cannot apply the profile to code because the source code changed and md5 check \
is disabled.\n\
Try generating a new profile or use command line flag -on-md5-mismatch skip \
-on-md5-missing skip."
| Old_profile ->
format_of_string
"Profile format may have changed.\n\
If you are using an old profile, try generating a new one."
;;
end
let user_error ?(hint = None) ?(exn = None) fmt =
let fmt_hint =
match hint with
| None -> fmt
| Some h -> fmt ^^ "\nHint: " ^^ Hint.to_fmt h
in
Format.kfprintf
(fun _ ->
match exn with
| None -> exit 321
| Some exn -> raise exn)
Format.err_formatter
("@?Error: " ^^ fmt_hint ^^ "@.")
;;
|
0f5ed126c5aed7bc14696f297dfc5e1a2d25366697158207989428a2e61d0596 | marcoheisig/adventofcode | day-09.lisp | (defpackage :adventofcode-2019-day-9
(:use :cl :intcode-computer))
(in-package :adventofcode-2019-day-9)
(defun solve-day-09-part-1 ()
(run-intcode-computer
(make-intcode-computer
:input (constantly 1)
:program (read-intcode-computer-program "input"))))
(defun solve-day-09-part-2 ()
(run-intcode-computer
(make-intcode-computer
:input (constantly 2)
:program (read-intcode-computer-program "input"))))
| null | https://raw.githubusercontent.com/marcoheisig/adventofcode/e96a0da17cd79f424af984ed2648b49ffdacb893/2019/day-09/day-09.lisp | lisp | (defpackage :adventofcode-2019-day-9
(:use :cl :intcode-computer))
(in-package :adventofcode-2019-day-9)
(defun solve-day-09-part-1 ()
(run-intcode-computer
(make-intcode-computer
:input (constantly 1)
:program (read-intcode-computer-program "input"))))
(defun solve-day-09-part-2 ()
(run-intcode-computer
(make-intcode-computer
:input (constantly 2)
:program (read-intcode-computer-program "input"))))
| |
8037e105f1d4c17ae593013ecc585dadf89535b2ea9bc2fb6327ce57863b2f5d | ghcjs/jsaddle-dom | MediaStreamAudioSourceNode.hs | # LANGUAGE PatternSynonyms #
-- For HasCallStack compatibility
{-# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures #-}
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.MediaStreamAudioSourceNode
(getMediaStream, MediaStreamAudioSourceNode(..),
gTypeMediaStreamAudioSourceNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/MediaStreamAudioSourceNode.mediaStream Mozilla MediaStreamAudioSourceNode.mediaStream documentation >
getMediaStream ::
(MonadDOM m) => MediaStreamAudioSourceNode -> m MediaStream
getMediaStream self
= liftDOM ((self ^. js "mediaStream") >>= fromJSValUnchecked)
| null | https://raw.githubusercontent.com/ghcjs/jsaddle-dom/5f5094277d4b11f3dc3e2df6bb437b75712d268f/src/JSDOM/Generated/MediaStreamAudioSourceNode.hs | haskell | For HasCallStack compatibility
# LANGUAGE ImplicitParams, ConstraintKinds, KindSignatures # | # LANGUAGE PatternSynonyms #
# OPTIONS_GHC -fno - warn - unused - imports #
module JSDOM.Generated.MediaStreamAudioSourceNode
(getMediaStream, MediaStreamAudioSourceNode(..),
gTypeMediaStreamAudioSourceNode)
where
import Prelude ((.), (==), (>>=), return, IO, Int, Float, Double, Bool(..), Maybe, maybe, fromIntegral, round, realToFrac, fmap, Show, Read, Eq, Ord, Maybe(..))
import qualified Prelude (error)
import Data.Typeable (Typeable)
import Data.Traversable (mapM)
import Language.Javascript.JSaddle (JSM(..), JSVal(..), JSString, strictEqual, toJSVal, valToStr, valToNumber, valToBool, js, jss, jsf, jsg, function, asyncFunction, new, array, jsUndefined, (!), (!!))
import Data.Int (Int64)
import Data.Word (Word, Word64)
import JSDOM.Types
import Control.Applicative ((<$>))
import Control.Monad (void)
import Control.Lens.Operators ((^.))
import JSDOM.EventTargetClosures (EventName, unsafeEventName, unsafeEventNameAsync)
import JSDOM.Enums
| < -US/docs/Web/API/MediaStreamAudioSourceNode.mediaStream Mozilla MediaStreamAudioSourceNode.mediaStream documentation >
getMediaStream ::
(MonadDOM m) => MediaStreamAudioSourceNode -> m MediaStream
getMediaStream self
= liftDOM ((self ^. js "mediaStream") >>= fromJSValUnchecked)
|
c11d0ebafad2e35a24f404a2578fa8476577b0f0b740ca0e920f8e0752761443 | pokepay/aws-sdk-lisp | streams.dynamodb.lisp | ;; DO NOT EDIT: File is generated by AWS-SDK/GENERATOR.
(uiop:define-package #:aws-sdk/services/streams.dynamodb
(:use)
(:use-reexport #:aws-sdk/services/streams.dynamodb/api))
| null | https://raw.githubusercontent.com/pokepay/aws-sdk-lisp/836b87df520391478a19462443342dc56f4b3f2c/services/streams.dynamodb.lisp | lisp | DO NOT EDIT: File is generated by AWS-SDK/GENERATOR. |
(uiop:define-package #:aws-sdk/services/streams.dynamodb
(:use)
(:use-reexport #:aws-sdk/services/streams.dynamodb/api))
|
510831c33a098cf1364c92dd5e7f8601cc0b4871c6540bc747daa28381f08480 | binghe/OSCAR | pc-examples.lisp | ;; This runs on OSCAR_3.31
#| To run these problems, first load Perception-Causes_3.31a.lisp.
Then load this file. To run problem n, execute (simulate-oscar n). |#
(in-package "OSCAR")
(setf *simulation-problems* nil)
;======================================================================
(make-simulation-problem
:number 1
:message
"This is the perceptual updating problem. First, Fred looks red to me. Later, Fred looks blue to me.
What should I conclude about the color of Fred?"
:reasons
*PERCEPTION*
*TEMPORAL-PROJECTION*
*incompatible-colors*
:inputs
(1 "(the color of Fred is red)" 1.0)
(30 "(the color of Fred is blue)" 1.0)
:interests
("(? x)((the color of Fred is x) at 50)" 0.2)
)
;======================================================================
(make-simulation-problem
:number 2
:message
"This is the perceptual updating problem. First, Fred looks red to me. Later, Fred looks blue to me.
What should I conclude about the color of Fred?"
:reasons
*INDEXICAL-PERCEPTION*
*indexical-incompatible-colors*
:inputs
(1 "(the color of Fred is red)" 1.0)
(30 "(the color of Fred is blue)" 1.0)
:interests
("(? x)(the color of Fred is x)" 0.75)
)
;======================================================================
(make-simulation-problem
:number 3
:message
"First, Fred looks red to me. Later, I am informed by Merrill that I am then
wearing blue-tinted glasses. Later still, Fred looks blue to me. All along, I know that the
probability is not high of Fred being blue given that Fred looks blue to me, but I am
wearing blue tinted glasses. What should I conclude about the color of Fred?"
:reasons
*PERCEPTION*
*RELIABLE-INFORMANT*
*PERCEPTUAL-RELIABILITY*
*TEMPORAL-PROJECTION*
*INCOMPATIBLE-COLORS*
:inputs
(1 "(the color of Fred is red)" 0.8)
(20 "(Merrill reports that I_am_wearing_blue_tinted_glasses)" 1.0)
(30 "(the color of Fred is blue)" 0.8)
:premises
("((the probability of (the color of Fred is blue) given
((I have a percept with content (the color of Fred is blue)) & I_am_wearing_blue_tinted_glasses)) <= .8)"
1.0)
("(Merrill is a reliable informant)" 1.0)
:interests
("(? x)((the color of Fred is x) at 50)" 0.55)
)
;======================================================================
;; This requires the temporal-projectibility of ~my_surroundings_are_illuminated_by_red_light.
(make-simulation-problem
:number 4
:message
"This illustrates the use of discounted-perception and perceptual-unreliability."
:reasons
*perception*
*discounted-perception*
*perceptual-reliability*
*perceptual-unreliability*
*temporal-projection*
neg-at-intro
:inputs
(10 "(the color of Fred is red)" 1.0)
:premises
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red))
& my_surroundings_are_illuminated_by_red_light)) <= .7)"
1.0)
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red)) & I_am_wearing_red_tinted_glasses)) <= .8)"
1.0)
("(I_am_wearing_red_tinted_glasses at 1)" 1.0 15)
("(my_surroundings_are_illuminated_by_red_light at 1)" 1.0 30)
("(~my_surroundings_are_illuminated_by_red_light at 8)" 1.0 50)
:interests
("((the color of Fred is red) at 10)" 0.5)
)
;======================================================================
;; This requires the temporal-projectibility of ~my_surroundings_are_illuminated_by_red_light.
(make-simulation-problem
:number 5
:message
"This illustrates the use of discounted-indexical-perception and indexical-perceptual-unreliability."
:reasons
*indexical-perception*
*discounted-indexical-perception*
*indexical-perceptual-reliability*
*indexical-perceptual-unreliability*
*temporal-projection*
neg-at-intro
:inputs
(10 "(the color of Fred is red)" 1.0)
:premises
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red))
& my_surroundings_are_illuminated_by_red_light)) <= .7)"
1.0)
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red)) & I_am_wearing_red_tinted_glasses)) <= .8)"
1.0)
("(I_am_wearing_red_tinted_glasses at 1)" 1.0 15)
("(my_surroundings_are_illuminated_by_red_light at 1)" 1.0 30)
("(~my_surroundings_are_illuminated_by_red_light at 8)" 1.0 50)
:interests
("(the color of Fred is red)" 0.5)
)
;======================================================================
(make-simulation-problem
:number 6
:message
"This is the Yale Shooting Problem. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. Should I conclude that Jones becomes dead?"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER+*
*CAUSAL-IMPLICATION*
neg-at-intro
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(all x)(all time)(((x is dead) at time) <-> ~((x is alive) at time))" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
(Jones is dead) after an interval 10)" 1.0)
:interests
("((Jones is alive) at 50)" 0.75)
("((Jones is dead) at 50)" 0.75)
)
;======================================================================
(make-simulation-problem
:number 7
:message
"This is the solved Yale Shooting Problem. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. Should I conclude that Jones becomes dead?"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
neg-at-intro
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
:interests
("(? ((Jones is alive) at 50))" 0.75)
)
;======================================================================
(make-simulation-problem
:number 13
:message
"This illustrates sequential causation. This requires causal undercutting for
causal implication. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. But I also know that he will be resuscitated later, and then he will
be alive. Should I conclude that Jones becomes dead? This version is solved incorrectly."
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
neg-at-intro
neg-at-intro2
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(Jones_is_resuscitated at 45)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
("(Jones_is_resuscitated when ~(Jones is alive) is causally sufficient for
(Jones is alive) after an interval 5)" 1.0)
:interests
("(? ((Jones is alive) at 60))" 0.75)
)
;======================================================================
(make-simulation-problem
:number 14
:message
"This illustrates sequential causation. This requires causal undercutting for
causal implication. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. But I also know that he will be resuscitated later, and then he will
be alive. Should I conclude that Jones becomes dead?"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
*CAUSAL-UNDERCUTTER-FOR-CAUSAL-IMPLICATION*
neg-at-intro
neg-at-intro2
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(Jones_is_resuscitated at 45)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
("(Jones_is_resuscitated when ~(Jones is alive) is causally sufficient for
(Jones is alive) after an interval 5)" 1.0)
:interests
("(? ((Jones is alive) at 60))" 0.75)
)
;======================================================================
(make-simulation-problem
:number 8
:message
"This is the indexical Yale Shooting Problem. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. Should I conclude that Jones becomes dead?"
:reasons
*INDEXICAL-TEMPORAL-PROJECTION*
*TEMPORAL-PROJECTION*
*INDEXICAL-CAUSAL-UNDERCUTTER*
*INDEXICAL-CAUSAL-IMPLICATION*
:start-time 50
:inputs
:premises
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_loaded at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
:interests
("(? (Jones is alive))" 0.75)
)
;======================================================================
(make-simulation-problem
:number 9
:message
"1. An interest in whether b1 and b2 collide at 10 generates an interest in their positions at 10.
Because we know their positions at 0, we adopt interest in their velocities between 0 and 10.
2. We know the velocities at 0, and temporal-projection leads to an inference that those velocities
remain unchanged between 0 and 10. From that we can compute the positions at 10, and infer
that b1 and b2 collide at 10.
3. However, temporal projection also leads to an inference that the positions at 10 are the
same as those at 0. Because the velocities at 0 are nonzero, causal undercutting defeats this
inference, leaving us with a unique conclusion regarding the positions at 10 (they are at (5.0 3.0)).
"
:reasons
neg-at-elimination
&-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*COLLISION*
*NEW-POSITION*
*POSITION-INCOMPATIBILITY-1*
*POSITION-INCOMPATIBILITY-2*
strict-arithmetical-inequality
arithmetical-inequality
is-past-or-present
neg-at-intro
arithmetical-nonequality
inequality-transitivity
pair-nonidentity
pair-nonidentity-at-time
&-at-intro
arithmetical-equality
:inputs
:premises
("((the position of b1 is (0.0 3.0)) at 0)" 1.0)
("((the position of b2 is (1.0 0.0)) at 0)" 1.0)
("(all b)(all x)(all y)(all vx)(all vy)
((the velocity of b is (vx vy))
when ((the position of b is (x y)) & ~((vx vy) = (0.0 0.0)))
is causally sufficient for ~(the position of b is (x y)) after an interval 0)" 1.0)
("((the velocity of b1 is (.5 0.0)) at 0)" 1.0)
("((the velocity of b2 is (.4 .3)) at 0)" 1.0)
("(5.0 = (0.0 + (0.5 * (10 - 0))))" 1.0)
("(3.0 = (3.0 + (0.0 * (10 - 0))))" 1.0)
("(5.0 = (1.0 + (0.4 * (10 - 0))))" 1.0)
("(3.0 = (0.0 + (0.3 * (10 - 0))))" 1.0)
:interests
( " ( ? ( ( b1 and b2 collide ) at 10 ) ) " 0.75 )
("(? x)(? y) ((the position of b1 is (x y)) at 10)" 0.75)
)
;======================================================================
(make-simulation-problem
:number 10
:message
"
1. An interest in whether b1 and b2 collide at 10 generates an interest in their positions at 10.
Because we know their positions at 0, we adopt interest in their velocities between 0 and 10.
2. We know the velocities at 0, and temporal-projection leads to an inference that those velocities
remain unchanged between 0 and 10. From that we can compute the positions at 10, and infer
that b1 and b2 collide at 10.
3. However, temporal projection also leads to an inference that the positions at 10 are the
same as those at 0. Because the velocities at 0 are nonzero, causal undercutting defeats this
inference, leaving us with a unique conclusion regarding the positions at 10 (they are at (5.0 3.0)).
"
:reasons
neg-at-elimination
&-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*COLLISION*
*NEW-POSITION*
*POSITION-INCOMPATIBILITY-1*
*POSITION-INCOMPATIBILITY-2*
strict-arithmetical-inequality
arithmetical-inequality
is-past-or-present
neg-at-intro
arithmetical-nonequality
inequality-transitivity
pair-nonidentity
pair-nonidentity-at-time
&-at-intro
arithmetical-equality
; *CAUSAL-IMPLICATION*
; COLLISION-SYMMETRY
; *CAUSAL-UNDERCUTTER+*
:inputs
:premises
("((the position of b1 is (0.0 3.0)) at 0)" 1.0)
("((the position of b2 is (1.0 0.0)) at 0)" 1.0)
("(all b)(all x)(all y)(all vx)(all vy)
((the velocity of b is (vx vy))
when ((the position of b is (x y)) & ~((vx vy) = (0.0 0.0)))
is causally sufficient for ~(the position of b is (x y)) after an interval 0)" 1.0)
("((the velocity of b1 is (.5 0.0)) at 0)" 1.0)
("((the velocity of b2 is (.4 .3)) at 0)" 1.0)
("(5.0 = (0.0 + (0.5 * (10 - 0))))" 1.0)
("(3.0 = (3.0 + (0.0 * (10 - 0))))" 1.0)
("(5.0 = (1.0 + (0.4 * (10 - 0))))" 1.0)
("(3.0 = (0.0 + (0.3 * (10 - 0))))" 1.0)
( " ( ( 0 + 0 ) < 10 ) " 1.0 )
:interests
("(? ((b1 and b2 collide) at 10))" 0.75)
)
;======================================================================
(make-simulation-problem
:number 11
:message
"1. We are given the velocities of b1 and b2 at 0, and are told they collide at (5 3) at 10.
We are interested in the velocity of b1 at 20.
2. By causal-implication, we can infer that the velocity of b1 at 20 is (.4 .3).
3. By temporal projection, we can also infer that the velocity of b1 at 20 is (.5 .0). But this
is defeated by causal-undercutter+, because we also know that if the velocity is (.4 .3) then
it is not (.5 .0).
"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER+*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
*COLLISION*
*NEW-POSITION*
strict-arithmetical-inequality
arithmetical-inequality
is-past-or-present
neg-at-intro
arithmetical-nonequality
inequality-transitivity
:inputs
:premises
("((the velocity of b1 is (.5 0.0)) at 10)" 1.0)
("((the velocity of b2 is (.4 .3)) at 10)" 1.0)
("(b1 is a dimensionless billiard ball)" 1.0)
("(b2 is a dimensionless billiard ball)" 1.0)
("((b1 and b2 collide) at 10)" 1.0)
("(((.5 expt 2) + (0.0 expt 2)) = ((.4 expt 2) + (.3 expt 2)))" 1.0)
("(same-mass b1 b2)" 1.0)
("(all b)(all time) (((the velocity of b is (0.4 0.3)) at time)
-> ~((the velocity of b is (0.5 0.0)) at time))" 1.0)
("(all b)(all time) (((the velocity of b is (0.5 0.0)) at time)
-> ~((the velocity of b is (0.4 0.3)) at time))" 1.0)
("(all b1)(all b2)(all v1x)(all v1y)(all v2x)(all v2y)
((((b1 is a dimensionless billiard ball) & (b2 is a dimensionless billiard ball))
& ((same-mass b1 b2) & (((v1x expt 2) + (v1y expt 2)) = ((v2x expt 2) + (v2y expt 2)))))
->
((b1 and b2 collide)
when (the velocity of b2 is (v2x v2y))
is causally sufficient for (the velocity of b1 is (v2x v2y))
after an interval 0))" 1.0)
("(all b1)(all b2)(all v1x)(all v1y)(all v2x)(all v2y)
((((b1 is a dimensionless billiard ball) & (b2 is a dimensionless billiard ball))
& ((same-mass b1 b2) & (((v1x expt 2) + (v1y expt 2)) = ((v2x expt 2) + (v2y expt 2)))))
->
((b1 and b2 collide)
when (the velocity of b1 is (v2x v2y))
is causally sufficient for (the velocity of b2 is (v2x v2y))
after an interval 0))" 1.0)
:interests
("(? x)(? y) ((the velocity of b1 is (x y)) at 20)" 0.75)
)
;======================================================================
(make-simulation-problem
:number 12
:message
" This is the Extended Prediction Problem.
1. We are given the velocities of b1 and b2 at 0, and are told they collide at (5 3) at 10.
We are interested in the position of b1 at 20. Given knowledge of the position of b1 at 10,
this generates an interest in the velocity of b1 between 10 and 20.
2. By causal-implication, we can infer that the velocity of b1 between 10 and 20 is (.4 .3).
From this we can compute that the position of b1 at 20 is (9.0 6.0).
3. By temporal projection, we can also infer that the velocity of b1 at 20 is (.5 .0). But this
is defeated by causal-undercutter, because we also know that if the velocity is (.4 .3) then
it is not (.5 .0).
4. By temporal projection, we can infer that the position of b1 at 20 is the same as at 0.
But this is defeated by causal-undercutter, because we know that the velocity of b1 at 0
is nonzero.
5. By temporal projection, we can infer that the position of b1 at 20 is the same as at 10.
This is defeated in the same fashion as (4), because we know the velocity of
b1 between 0 and 10, and we are given that 10 is between 0 and 10.
"
:reasons
*CAUSAL-IMPLICATION*
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*COLLISION*
*NEW-POSITION*
*POSITION-INCOMPATIBILITY*
pair-nonidentity
pair-nonidentity-at-time
&-at-intro
:inputs
:premises
("((the position of b1 is (0.0 3.0)) at 0)" 1.0)
("((the position of b2 is (1.0 0.0)) at 0)" 1.0)
("((the velocity of b1 is (.5 0.0)) at 0)" 1.0)
("((the velocity of b2 is (.4 .3)) at 0)" 1.0)
("(b1 is a dimensionless billiard ball)" 1.0)
("(b2 is a dimensionless billiard ball)" 1.0)
("(all b)(all x)(all y)(all vx)(all vy)
((the position of b is (x y))
when ((the velocity of b is (vx vy)) & ~((vx vy) = (0.0 0.0)))
is causally sufficient for ~(the position of b is (x y)) after an interval 0)" 1.0)
("(all b1)(all b2)(all v1x)(all v1y)(all v2x)(all v2y)
((((b1 is a dimensionless billiard ball) & (b2 is a dimensionless billiard ball))
& ((same-mass b1 b2) & (((v1x expt 2) + (v1y expt 2)) = ((v2x expt 2) + (v2y expt 2)))))
->
((b1 and b2 collide)
when (the velocity of b2 is (v2x v2y))
is causally sufficient for (the velocity of b1 is (v2x v2y))
after an interval 0))" 1.0)
("(same-mass b1 b2)" 1.0)
("(5.0 = (0.0 + (0.5 * (10 - 0))))" 1.0)
("(3.0 = (3.0 + (0.0 * (10 - 0))))" 1.0)
("(5.0 = (1.0 + (0.4 * (10 - 0))))" 1.0)
("(3.0 = (0.0 + (0.3 * (10 - 0))))" 1.0)
("(9.0 = (5.0 + (0.4 * (20 - 10))))" 1.0)
("(6.0 = (3.0 + (0.3 * (20 - 10))))" 1.0)
("(((.5 expt 2) + (0.0 expt 2)) = ((.4 expt 2) + (.3 expt 2)))" 1.0)
:interests
("(? ((b1 and b2 collide) at 10))" 0.75)
( " ( ? x ) ( ? y ) ( ( the velocity of b1 is ( x y ) ) throughout ( clopen 10 20 ) ) " 0.75 )
("(? x)(? y) ((the position of b1 is (x y)) at 20)" 0.75)
)
;======================================================================
| null | https://raw.githubusercontent.com/binghe/OSCAR/049f3ea3cda8f7de1d58174d6fca019dbf2176df/pc-examples.lisp | lisp | This runs on OSCAR_3.31
To run these problems, first load Perception-Causes_3.31a.lisp.
Then load this file. To run problem n, execute (simulate-oscar n).
======================================================================
======================================================================
======================================================================
======================================================================
This requires the temporal-projectibility of ~my_surroundings_are_illuminated_by_red_light.
======================================================================
This requires the temporal-projectibility of ~my_surroundings_are_illuminated_by_red_light.
======================================================================
======================================================================
======================================================================
======================================================================
======================================================================
======================================================================
======================================================================
*CAUSAL-IMPLICATION*
COLLISION-SYMMETRY
*CAUSAL-UNDERCUTTER+*
======================================================================
======================================================================
====================================================================== |
(in-package "OSCAR")
(setf *simulation-problems* nil)
(make-simulation-problem
:number 1
:message
"This is the perceptual updating problem. First, Fred looks red to me. Later, Fred looks blue to me.
What should I conclude about the color of Fred?"
:reasons
*PERCEPTION*
*TEMPORAL-PROJECTION*
*incompatible-colors*
:inputs
(1 "(the color of Fred is red)" 1.0)
(30 "(the color of Fred is blue)" 1.0)
:interests
("(? x)((the color of Fred is x) at 50)" 0.2)
)
(make-simulation-problem
:number 2
:message
"This is the perceptual updating problem. First, Fred looks red to me. Later, Fred looks blue to me.
What should I conclude about the color of Fred?"
:reasons
*INDEXICAL-PERCEPTION*
*indexical-incompatible-colors*
:inputs
(1 "(the color of Fred is red)" 1.0)
(30 "(the color of Fred is blue)" 1.0)
:interests
("(? x)(the color of Fred is x)" 0.75)
)
(make-simulation-problem
:number 3
:message
"First, Fred looks red to me. Later, I am informed by Merrill that I am then
wearing blue-tinted glasses. Later still, Fred looks blue to me. All along, I know that the
probability is not high of Fred being blue given that Fred looks blue to me, but I am
wearing blue tinted glasses. What should I conclude about the color of Fred?"
:reasons
*PERCEPTION*
*RELIABLE-INFORMANT*
*PERCEPTUAL-RELIABILITY*
*TEMPORAL-PROJECTION*
*INCOMPATIBLE-COLORS*
:inputs
(1 "(the color of Fred is red)" 0.8)
(20 "(Merrill reports that I_am_wearing_blue_tinted_glasses)" 1.0)
(30 "(the color of Fred is blue)" 0.8)
:premises
("((the probability of (the color of Fred is blue) given
((I have a percept with content (the color of Fred is blue)) & I_am_wearing_blue_tinted_glasses)) <= .8)"
1.0)
("(Merrill is a reliable informant)" 1.0)
:interests
("(? x)((the color of Fred is x) at 50)" 0.55)
)
(make-simulation-problem
:number 4
:message
"This illustrates the use of discounted-perception and perceptual-unreliability."
:reasons
*perception*
*discounted-perception*
*perceptual-reliability*
*perceptual-unreliability*
*temporal-projection*
neg-at-intro
:inputs
(10 "(the color of Fred is red)" 1.0)
:premises
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red))
& my_surroundings_are_illuminated_by_red_light)) <= .7)"
1.0)
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red)) & I_am_wearing_red_tinted_glasses)) <= .8)"
1.0)
("(I_am_wearing_red_tinted_glasses at 1)" 1.0 15)
("(my_surroundings_are_illuminated_by_red_light at 1)" 1.0 30)
("(~my_surroundings_are_illuminated_by_red_light at 8)" 1.0 50)
:interests
("((the color of Fred is red) at 10)" 0.5)
)
(make-simulation-problem
:number 5
:message
"This illustrates the use of discounted-indexical-perception and indexical-perceptual-unreliability."
:reasons
*indexical-perception*
*discounted-indexical-perception*
*indexical-perceptual-reliability*
*indexical-perceptual-unreliability*
*temporal-projection*
neg-at-intro
:inputs
(10 "(the color of Fred is red)" 1.0)
:premises
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red))
& my_surroundings_are_illuminated_by_red_light)) <= .7)"
1.0)
("((the probability of (the color of Fred is red) given
((I have a percept with content (the color of Fred is red)) & I_am_wearing_red_tinted_glasses)) <= .8)"
1.0)
("(I_am_wearing_red_tinted_glasses at 1)" 1.0 15)
("(my_surroundings_are_illuminated_by_red_light at 1)" 1.0 30)
("(~my_surroundings_are_illuminated_by_red_light at 8)" 1.0 50)
:interests
("(the color of Fred is red)" 0.5)
)
(make-simulation-problem
:number 6
:message
"This is the Yale Shooting Problem. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. Should I conclude that Jones becomes dead?"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER+*
*CAUSAL-IMPLICATION*
neg-at-intro
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(all x)(all time)(((x is dead) at time) <-> ~((x is alive) at time))" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
(Jones is dead) after an interval 10)" 1.0)
:interests
("((Jones is alive) at 50)" 0.75)
("((Jones is dead) at 50)" 0.75)
)
(make-simulation-problem
:number 7
:message
"This is the solved Yale Shooting Problem. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. Should I conclude that Jones becomes dead?"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
neg-at-intro
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
:interests
("(? ((Jones is alive) at 50))" 0.75)
)
(make-simulation-problem
:number 13
:message
"This illustrates sequential causation. This requires causal undercutting for
causal implication. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. But I also know that he will be resuscitated later, and then he will
be alive. Should I conclude that Jones becomes dead? This version is solved incorrectly."
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
neg-at-intro
neg-at-intro2
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(Jones_is_resuscitated at 45)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
("(Jones_is_resuscitated when ~(Jones is alive) is causally sufficient for
(Jones is alive) after an interval 5)" 1.0)
:interests
("(? ((Jones is alive) at 60))" 0.75)
)
(make-simulation-problem
:number 14
:message
"This illustrates sequential causation. This requires causal undercutting for
causal implication. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. But I also know that he will be resuscitated later, and then he will
be alive. Should I conclude that Jones becomes dead?"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
*CAUSAL-UNDERCUTTER-FOR-CAUSAL-IMPLICATION*
neg-at-intro
neg-at-intro2
:inputs
:premises
("(the_gun_is_loaded at 20)" 1.0)
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(Jones_is_resuscitated at 45)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
("(Jones_is_resuscitated when ~(Jones is alive) is causally sufficient for
(Jones is alive) after an interval 5)" 1.0)
:interests
("(? ((Jones is alive) at 60))" 0.75)
)
(make-simulation-problem
:number 8
:message
"This is the indexical Yale Shooting Problem. I know that the gun being fired while loaded
will cause Jones to become dead. I know that the gun is initially loaded, and Jones is initially
alive. Later, the gun is fired. Should I conclude that Jones becomes dead?"
:reasons
*INDEXICAL-TEMPORAL-PROJECTION*
*TEMPORAL-PROJECTION*
*INDEXICAL-CAUSAL-UNDERCUTTER*
*INDEXICAL-CAUSAL-IMPLICATION*
:start-time 50
:inputs
:premises
("((Jones is alive) at 20)" 1.0)
("(the_gun_is_loaded at 20)" 1.0)
("(the_gun_is_fired at 30)" 1.0)
("(the_gun_is_fired when the_gun_is_loaded is causally sufficient for
~(Jones is alive) after an interval 10)" 1.0)
:interests
("(? (Jones is alive))" 0.75)
)
(make-simulation-problem
:number 9
:message
"1. An interest in whether b1 and b2 collide at 10 generates an interest in their positions at 10.
Because we know their positions at 0, we adopt interest in their velocities between 0 and 10.
2. We know the velocities at 0, and temporal-projection leads to an inference that those velocities
remain unchanged between 0 and 10. From that we can compute the positions at 10, and infer
that b1 and b2 collide at 10.
3. However, temporal projection also leads to an inference that the positions at 10 are the
same as those at 0. Because the velocities at 0 are nonzero, causal undercutting defeats this
inference, leaving us with a unique conclusion regarding the positions at 10 (they are at (5.0 3.0)).
"
:reasons
neg-at-elimination
&-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*COLLISION*
*NEW-POSITION*
*POSITION-INCOMPATIBILITY-1*
*POSITION-INCOMPATIBILITY-2*
strict-arithmetical-inequality
arithmetical-inequality
is-past-or-present
neg-at-intro
arithmetical-nonequality
inequality-transitivity
pair-nonidentity
pair-nonidentity-at-time
&-at-intro
arithmetical-equality
:inputs
:premises
("((the position of b1 is (0.0 3.0)) at 0)" 1.0)
("((the position of b2 is (1.0 0.0)) at 0)" 1.0)
("(all b)(all x)(all y)(all vx)(all vy)
((the velocity of b is (vx vy))
when ((the position of b is (x y)) & ~((vx vy) = (0.0 0.0)))
is causally sufficient for ~(the position of b is (x y)) after an interval 0)" 1.0)
("((the velocity of b1 is (.5 0.0)) at 0)" 1.0)
("((the velocity of b2 is (.4 .3)) at 0)" 1.0)
("(5.0 = (0.0 + (0.5 * (10 - 0))))" 1.0)
("(3.0 = (3.0 + (0.0 * (10 - 0))))" 1.0)
("(5.0 = (1.0 + (0.4 * (10 - 0))))" 1.0)
("(3.0 = (0.0 + (0.3 * (10 - 0))))" 1.0)
:interests
( " ( ? ( ( b1 and b2 collide ) at 10 ) ) " 0.75 )
("(? x)(? y) ((the position of b1 is (x y)) at 10)" 0.75)
)
(make-simulation-problem
:number 10
:message
"
1. An interest in whether b1 and b2 collide at 10 generates an interest in their positions at 10.
Because we know their positions at 0, we adopt interest in their velocities between 0 and 10.
2. We know the velocities at 0, and temporal-projection leads to an inference that those velocities
remain unchanged between 0 and 10. From that we can compute the positions at 10, and infer
that b1 and b2 collide at 10.
3. However, temporal projection also leads to an inference that the positions at 10 are the
same as those at 0. Because the velocities at 0 are nonzero, causal undercutting defeats this
inference, leaving us with a unique conclusion regarding the positions at 10 (they are at (5.0 3.0)).
"
:reasons
neg-at-elimination
&-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*COLLISION*
*NEW-POSITION*
*POSITION-INCOMPATIBILITY-1*
*POSITION-INCOMPATIBILITY-2*
strict-arithmetical-inequality
arithmetical-inequality
is-past-or-present
neg-at-intro
arithmetical-nonequality
inequality-transitivity
pair-nonidentity
pair-nonidentity-at-time
&-at-intro
arithmetical-equality
:inputs
:premises
("((the position of b1 is (0.0 3.0)) at 0)" 1.0)
("((the position of b2 is (1.0 0.0)) at 0)" 1.0)
("(all b)(all x)(all y)(all vx)(all vy)
((the velocity of b is (vx vy))
when ((the position of b is (x y)) & ~((vx vy) = (0.0 0.0)))
is causally sufficient for ~(the position of b is (x y)) after an interval 0)" 1.0)
("((the velocity of b1 is (.5 0.0)) at 0)" 1.0)
("((the velocity of b2 is (.4 .3)) at 0)" 1.0)
("(5.0 = (0.0 + (0.5 * (10 - 0))))" 1.0)
("(3.0 = (3.0 + (0.0 * (10 - 0))))" 1.0)
("(5.0 = (1.0 + (0.4 * (10 - 0))))" 1.0)
("(3.0 = (0.0 + (0.3 * (10 - 0))))" 1.0)
( " ( ( 0 + 0 ) < 10 ) " 1.0 )
:interests
("(? ((b1 and b2 collide) at 10))" 0.75)
)
(make-simulation-problem
:number 11
:message
"1. We are given the velocities of b1 and b2 at 0, and are told they collide at (5 3) at 10.
We are interested in the velocity of b1 at 20.
2. By causal-implication, we can infer that the velocity of b1 at 20 is (.4 .3).
3. By temporal projection, we can also infer that the velocity of b1 at 20 is (.5 .0). But this
is defeated by causal-undercutter+, because we also know that if the velocity is (.4 .3) then
it is not (.5 .0).
"
:reasons
neg-at-elimination
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER+*
*CAUSAL-UNDERCUTTER*
*CAUSAL-IMPLICATION*
*COLLISION*
*NEW-POSITION*
strict-arithmetical-inequality
arithmetical-inequality
is-past-or-present
neg-at-intro
arithmetical-nonequality
inequality-transitivity
:inputs
:premises
("((the velocity of b1 is (.5 0.0)) at 10)" 1.0)
("((the velocity of b2 is (.4 .3)) at 10)" 1.0)
("(b1 is a dimensionless billiard ball)" 1.0)
("(b2 is a dimensionless billiard ball)" 1.0)
("((b1 and b2 collide) at 10)" 1.0)
("(((.5 expt 2) + (0.0 expt 2)) = ((.4 expt 2) + (.3 expt 2)))" 1.0)
("(same-mass b1 b2)" 1.0)
("(all b)(all time) (((the velocity of b is (0.4 0.3)) at time)
-> ~((the velocity of b is (0.5 0.0)) at time))" 1.0)
("(all b)(all time) (((the velocity of b is (0.5 0.0)) at time)
-> ~((the velocity of b is (0.4 0.3)) at time))" 1.0)
("(all b1)(all b2)(all v1x)(all v1y)(all v2x)(all v2y)
((((b1 is a dimensionless billiard ball) & (b2 is a dimensionless billiard ball))
& ((same-mass b1 b2) & (((v1x expt 2) + (v1y expt 2)) = ((v2x expt 2) + (v2y expt 2)))))
->
((b1 and b2 collide)
when (the velocity of b2 is (v2x v2y))
is causally sufficient for (the velocity of b1 is (v2x v2y))
after an interval 0))" 1.0)
("(all b1)(all b2)(all v1x)(all v1y)(all v2x)(all v2y)
((((b1 is a dimensionless billiard ball) & (b2 is a dimensionless billiard ball))
& ((same-mass b1 b2) & (((v1x expt 2) + (v1y expt 2)) = ((v2x expt 2) + (v2y expt 2)))))
->
((b1 and b2 collide)
when (the velocity of b1 is (v2x v2y))
is causally sufficient for (the velocity of b2 is (v2x v2y))
after an interval 0))" 1.0)
:interests
("(? x)(? y) ((the velocity of b1 is (x y)) at 20)" 0.75)
)
(make-simulation-problem
:number 12
:message
" This is the Extended Prediction Problem.
1. We are given the velocities of b1 and b2 at 0, and are told they collide at (5 3) at 10.
We are interested in the position of b1 at 20. Given knowledge of the position of b1 at 10,
this generates an interest in the velocity of b1 between 10 and 20.
2. By causal-implication, we can infer that the velocity of b1 between 10 and 20 is (.4 .3).
From this we can compute that the position of b1 at 20 is (9.0 6.0).
3. By temporal projection, we can also infer that the velocity of b1 at 20 is (.5 .0). But this
is defeated by causal-undercutter, because we also know that if the velocity is (.4 .3) then
it is not (.5 .0).
4. By temporal projection, we can infer that the position of b1 at 20 is the same as at 0.
But this is defeated by causal-undercutter, because we know that the velocity of b1 at 0
is nonzero.
5. By temporal projection, we can infer that the position of b1 at 20 is the same as at 10.
This is defeated in the same fashion as (4), because we know the velocity of
b1 between 0 and 10, and we are given that 10 is between 0 and 10.
"
:reasons
*CAUSAL-IMPLICATION*
*TEMPORAL-PROJECTION*
*CAUSAL-UNDERCUTTER*
*COLLISION*
*NEW-POSITION*
*POSITION-INCOMPATIBILITY*
pair-nonidentity
pair-nonidentity-at-time
&-at-intro
:inputs
:premises
("((the position of b1 is (0.0 3.0)) at 0)" 1.0)
("((the position of b2 is (1.0 0.0)) at 0)" 1.0)
("((the velocity of b1 is (.5 0.0)) at 0)" 1.0)
("((the velocity of b2 is (.4 .3)) at 0)" 1.0)
("(b1 is a dimensionless billiard ball)" 1.0)
("(b2 is a dimensionless billiard ball)" 1.0)
("(all b)(all x)(all y)(all vx)(all vy)
((the position of b is (x y))
when ((the velocity of b is (vx vy)) & ~((vx vy) = (0.0 0.0)))
is causally sufficient for ~(the position of b is (x y)) after an interval 0)" 1.0)
("(all b1)(all b2)(all v1x)(all v1y)(all v2x)(all v2y)
((((b1 is a dimensionless billiard ball) & (b2 is a dimensionless billiard ball))
& ((same-mass b1 b2) & (((v1x expt 2) + (v1y expt 2)) = ((v2x expt 2) + (v2y expt 2)))))
->
((b1 and b2 collide)
when (the velocity of b2 is (v2x v2y))
is causally sufficient for (the velocity of b1 is (v2x v2y))
after an interval 0))" 1.0)
("(same-mass b1 b2)" 1.0)
("(5.0 = (0.0 + (0.5 * (10 - 0))))" 1.0)
("(3.0 = (3.0 + (0.0 * (10 - 0))))" 1.0)
("(5.0 = (1.0 + (0.4 * (10 - 0))))" 1.0)
("(3.0 = (0.0 + (0.3 * (10 - 0))))" 1.0)
("(9.0 = (5.0 + (0.4 * (20 - 10))))" 1.0)
("(6.0 = (3.0 + (0.3 * (20 - 10))))" 1.0)
("(((.5 expt 2) + (0.0 expt 2)) = ((.4 expt 2) + (.3 expt 2)))" 1.0)
:interests
("(? ((b1 and b2 collide) at 10))" 0.75)
( " ( ? x ) ( ? y ) ( ( the velocity of b1 is ( x y ) ) throughout ( clopen 10 20 ) ) " 0.75 )
("(? x)(? y) ((the position of b1 is (x y)) at 20)" 0.75)
)
|
b8f50a12466e84e7aacc635f54a32e17aea99a03a122cac598dbe1757b0b0124 | input-output-hk/cardano-wallet | Wallets.hs | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedLabels #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module Test.Integration.Scenario.CLI.Byron.Wallets
( spec
) where
import Prelude
import Cardano.Wallet.Api.Types
( ApiByronWallet, ApiUtxoStatistics )
import Cardano.Wallet.Primitive.Passphrase
( PassphraseMaxLength (..), PassphraseMinLength (..) )
import Cardano.Wallet.Primitive.SyncProgress
( SyncProgress (..) )
import Control.Monad
( forM_ )
import Control.Monad.IO.Class
( liftIO )
import Control.Monad.Trans.Resource
( runResourceT )
import Data.Generics.Internal.VL.Lens
( view, (^.) )
import Data.Maybe
( isJust )
import Data.Proxy
( Proxy (..) )
import Data.Quantity
( Quantity (..) )
import System.Command
( Exit (..), Stderr (..), Stdout (..) )
import System.Exit
( ExitCode (..) )
import Test.Hspec
( SpecWith, describe, it, runIO )
import Test.Hspec.Expectations.Lifted
( shouldBe, shouldContain, shouldNotBe, shouldSatisfy )
import Test.Integration.Framework.DSL
( Context (..)
, MnemonicLength (..)
, createWalletViaCLI
, deleteWalletViaCLI
, emptyIcarusWallet
, emptyRandomWallet
, eventually
, expectCliField
, expectCliListField
, expectValidJSON
, expectWalletUTxO
, fixturePassphrase
, genMnemonics
, getWalletUtxoStatisticsViaCLI
, getWalletViaCLI
, listWalletsViaCLI
, updateWalletNameViaCLI
, updateWalletPassphraseViaCLI
, verify
, walletId
)
import Test.Integration.Framework.TestData
( arabicWalletName
, cmdOk
, errMsg400NumberOfWords
, errMsg403WrongPass
, errMsg404NoWallet
, errMsg409WalletExists
, russianWalletName
, wildcardsWalletName
)
import qualified Data.Text as T
spec :: SpecWith Context
spec = describe "BYRON_CLI_WALLETS" $ do
describe "CLI_BYRON_GET_04, CLI_BYRON_DELETE_01, BYRON_RESTORE_02, BYRON_RESTORE_03 -\
\ Deleted wallet is not available, but can be restored" $ do
let matrix = [ ("random", genMnemonics M12)
, ("icarus", genMnemonics M15)
]
forM_ matrix $ \(style, genM) -> it style $ \ctx -> runResourceT $ do
mnemonic <- liftIO genM
let args =
[ "Name of the wallet"
, "--wallet-style", style
]
--create
(c, out, err) <- createWalletViaCLI @_ @IO ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase"
T.unpack err `shouldContain` cmdOk
c `shouldBe` ExitSuccess
j <- expectValidJSON (Proxy @ApiByronWallet) out
let wid = T.unpack $ j ^. walletId
--delete
(Exit cd, Stdout outd, Stderr errd) <- deleteWalletViaCLI ctx wid
outd`shouldBe` "\n"
cd `shouldBe` ExitSuccess
errd `shouldContain` cmdOk
--not available
(Exit c2, Stdout out2, Stderr err2) <- getWalletViaCLI ctx wid
out2 `shouldBe` mempty
c2 `shouldBe` ExitFailure 1
err2 `shouldContain` errMsg404NoWallet (T.pack wid)
--re-create
(c3, out3, err3) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase-restored"
c3 `shouldBe` ExitSuccess
T.unpack err3 `shouldContain` cmdOk
jr <- expectValidJSON (Proxy @ApiByronWallet) out3
verify jr [ expectCliField walletId (`shouldBe` T.pack wid) ]
--re-create again? No!
(c4, out4, err4) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase-restored-again"
c4 `shouldBe` ExitFailure 1
T.unpack err4 `shouldContain` (errMsg409WalletExists wid)
out4 `shouldBe` mempty
describe "CLI_BYRON_RESTORE_01, CLI_BYRON_GET_01, CLI_BYRON_LIST_01 -\
\Restore a wallet" $ do
let scenarioSuccess style mnemonic ctx = runResourceT @IO $ do
let name = "Name of the wallet"
let args =
[ name
, "--wallet-style", style
]
let expectations =
[ expectCliField (#name . #getApiT . #getWalletName)
(`shouldBe` (T.pack name))
, expectCliField (#balance . #available)
(`shouldBe` Quantity 0)
, expectCliField (#balance . #total)
(`shouldBe` Quantity 0)
, expectCliField #passphrase (`shouldNotBe` Nothing)
]
-- create
(c, out, err) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase"
T.unpack err `shouldContain` cmdOk
c `shouldBe` ExitSuccess
j <- expectValidJSON (Proxy @ApiByronWallet) out
liftIO $ verify j expectations
let wid = T.unpack $ j ^. walletId
eventually "wallet is available and ready" $ do
-- get
(Exit c2, Stdout out2, Stderr err2) <- getWalletViaCLI ctx wid
c2 `shouldBe` ExitSuccess
err2 `shouldContain` cmdOk
jg <- expectValidJSON (Proxy @ApiByronWallet) out2
verify jg $
(expectCliField (#state . #getApiT) (`shouldBe` Ready)) : expectations
-- list
(Exit c3, Stdout out3, Stderr err3) <- listWalletsViaCLI ctx
c3 `shouldBe` ExitSuccess
err3 `shouldBe` cmdOk
jl <- expectValidJSON (Proxy @[ApiByronWallet]) out3
length jl `shouldBe` 1
expectCliListField 0 walletId (`shouldBe` T.pack wid) jl
let scenarioFailure style mnemonic ctx = runResourceT @IO $ do
let args =
[ "The wallet that didn't exist"
, "--wallet-style", style
]
(c, out, err) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase"
T.unpack err `shouldContain` errMsg400NumberOfWords
c `shouldBe` ExitFailure 1
out `shouldBe` mempty
let it' style genMnemonicIO test = do
mnemonic <- runIO genMnemonicIO
flip it (test style mnemonic) $ unwords
[ style
, show (length mnemonic)
, "words"
]
it' "random" (genMnemonics M9) scenarioFailure -- ❌
it' "random" (genMnemonics M12) scenarioSuccess -- ✔️
it' "random" (genMnemonics M15) scenarioSuccess -- ✔️
it' "random" (genMnemonics M18) scenarioSuccess -- ✔️
it' "random" (genMnemonics M21) scenarioSuccess -- ✔️
it' "random" (genMnemonics M24) scenarioSuccess -- ✔️
it' "icarus" (genMnemonics M9) scenarioFailure -- ❌
it' "icarus" (genMnemonics M12) scenarioSuccess -- ✔️
it' "icarus" (genMnemonics M15) scenarioSuccess -- ✔️
it' "icarus" (genMnemonics M18) scenarioSuccess -- ✔️
it' "icarus" (genMnemonics M21) scenarioSuccess -- ✔️
it' "icarus" (genMnemonics M24) scenarioSuccess -- ✔️
it' "trezor" (genMnemonics M9) scenarioFailure -- ❌
it' "trezor" (genMnemonics M12) scenarioSuccess -- ✔️
it' "trezor" (genMnemonics M15) scenarioSuccess -- ✔️
it' "trezor" (genMnemonics M18) scenarioSuccess -- ✔️
it' "trezor" (genMnemonics M21) scenarioSuccess -- ✔️
it' "trezor" (genMnemonics M24) scenarioSuccess -- ✔️
it' "ledger" (genMnemonics M9) scenarioFailure -- ❌
it' "ledger" (genMnemonics M12) scenarioSuccess -- ✔️
it' "ledger" (genMnemonics M15) scenarioSuccess -- ✔️
it' "ledger" (genMnemonics M18) scenarioSuccess -- ✔️
it' "ledger" (genMnemonics M21) scenarioSuccess -- ✔️
it' "ledger" (genMnemonics M24) scenarioSuccess -- ✔️
describe "CLI_BYRON_RESTORE_06 - Passphrase" $ do
let minLength = passphraseMinLength (Proxy @"user")
let maxLength = passphraseMaxLength (Proxy @"user")
let matrix =
[ ( show minLength ++ " char long"
, T.pack (replicate minLength 'ź')
)
, ( show maxLength ++ " char long"
, T.pack (replicate maxLength 'ą')
)
, ( "Russian passphrase", russianWalletName )
, ( "Arabic passphrase", arabicWalletName )
, ( "Wildcards passphrase", wildcardsWalletName )
]
forM_ matrix $ \(title, passphrase) -> it title $
\ctx -> runResourceT @IO $ do
let args =
[ "Name of the wallet"
, "--wallet-style", "random"
]
mnemonic <- liftIO $ genMnemonics M12
(c, out, err) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" (T.unpack passphrase)
T.unpack err `shouldContain` cmdOk
_ <- expectValidJSON (Proxy @ApiByronWallet) out
c `shouldBe` ExitSuccess
it "CLI_BYRON_UPDATE_NAME_01 - Update names of wallets" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
let updatedName = "Name is updated"
(Exit c, Stdout out, Stderr err) <-
updateWalletNameViaCLI ctx [wid, updatedName]
c `shouldBe` ExitSuccess
err `shouldBe` cmdOk
ju <- expectValidJSON (Proxy @ApiByronWallet) out
expectCliField
(#name . #getApiT . #getWalletName)
(`shouldBe` T.pack updatedName) ju
it "CLI_BYRON_UPDATE_NAME_02 - When updated name too long" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
let updatedName = replicate 500 'o'
(Exit c, Stdout out, Stderr err) <-
updateWalletNameViaCLI ctx [wid, updatedName]
c `shouldBe` ExitFailure 1
err `shouldContain` "name is too long: expected at most 255 characters"
out `shouldBe` mempty
it "CLI_BYRON_UTXO_01 - Wallet's inactivity is reflected in utxo" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ]
$ \emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
(Exit c, Stdout o, Stderr e) <- getWalletUtxoStatisticsViaCLI ctx wid
c `shouldBe` ExitSuccess
e `shouldBe` cmdOk
utxoStats <- expectValidJSON (Proxy @ApiUtxoStatistics) o
expectWalletUTxO [] (Right utxoStats)
it "CLI_BYRON_UPDATE_PASS_01 - change passphrase" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
Stdout out <- getWalletViaCLI ctx wid
expectValidJSON (Proxy @ApiByronWallet) out
>>= flip verify [ expectCliField #passphrase (`shouldSatisfy` isJust) ]
let oldPass = T.unpack fixturePassphrase
let newPass = "cardano-wallet-new-pass"
(c, o, e) <-
updateWalletPassphraseViaCLI ctx wid oldPass newPass newPass
c `shouldBe` ExitSuccess
o `shouldBe` "\n"
T.unpack e `shouldContain` cmdOk
it "CLI_BYRON_UPDATE_PASS_02 - Old passphrase incorrect" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
Stdout out <- getWalletViaCLI ctx wid
expectValidJSON (Proxy @ApiByronWallet) out
>>= flip verify [ expectCliField #passphrase (`shouldSatisfy` isJust) ]
let oldPass = "incorrect-passphrase"
let newPass = "cardano-wallet-new-pass"
(c, o, e) <-
updateWalletPassphraseViaCLI ctx wid oldPass newPass newPass
c `shouldBe` ExitFailure 1
o `shouldBe` mempty
T.unpack e `shouldContain` errMsg403WrongPass
describe "CLI_BYRON_UPDATE_PASS_03 - Pass length incorrect" $ do
let minLength = passphraseMinLength (Proxy @"user")
let maxLength = passphraseMaxLength (Proxy @"user")
let passTooShort = replicate (minLength - 1) 'o'
let errMsgTooShort = "passphrase is too short: expected at least 10 characters"
let passTooLong = replicate (maxLength + 1) 'o'
let errMsgTooLong = "passphrase is too long: expected at most 255 characters"
let passOK = T.unpack fixturePassphrase
let matrix = [ ("old pass too short", passTooShort, passOK, errMsgTooShort)
, ("old pass too long", passTooLong, passOK, errMsgTooLong)
, ("new pass too short", passOK, passTooShort, errMsgTooShort)
, ("new pass too long", passOK, passTooLong, errMsgTooLong)
]
forM_ matrix $ \(title, oldPass, newPass, errMsg) -> it title $ \ctx -> runResourceT @IO $ do
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $ \emptyByronWallet -> do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
Stdout out <- getWalletViaCLI ctx wid
expectValidJSON (Proxy @ApiByronWallet) out
>>= flip verify [ expectCliField #passphrase (`shouldSatisfy` isJust) ]
(c, o, e) <-
updateWalletPassphraseViaCLI ctx wid oldPass newPass newPass
T.unpack e `shouldContain` errMsg
c `shouldBe` ExitFailure 1
o `shouldBe` mempty
| null | https://raw.githubusercontent.com/input-output-hk/cardano-wallet/7b0192110fe226f992bca6198b8ee83fa4a37f46/lib/wallet/integration/src/Test/Integration/Scenario/CLI/Byron/Wallets.hs | haskell | create
delete
not available
re-create
re-create again? No!
create
get
list
❌
✔️
✔️
✔️
✔️
✔️
❌
✔️
✔️
✔️
✔️
✔️
❌
✔️
✔️
✔️
✔️
✔️
❌
✔️
✔️
✔️
✔️
✔️ | # LANGUAGE AllowAmbiguousTypes #
# LANGUAGE DataKinds #
# LANGUAGE DuplicateRecordFields #
# LANGUAGE FlexibleContexts #
# LANGUAGE OverloadedLabels #
# LANGUAGE ScopedTypeVariables #
# LANGUAGE TypeApplications #
# LANGUAGE TypeFamilies #
module Test.Integration.Scenario.CLI.Byron.Wallets
( spec
) where
import Prelude
import Cardano.Wallet.Api.Types
( ApiByronWallet, ApiUtxoStatistics )
import Cardano.Wallet.Primitive.Passphrase
( PassphraseMaxLength (..), PassphraseMinLength (..) )
import Cardano.Wallet.Primitive.SyncProgress
( SyncProgress (..) )
import Control.Monad
( forM_ )
import Control.Monad.IO.Class
( liftIO )
import Control.Monad.Trans.Resource
( runResourceT )
import Data.Generics.Internal.VL.Lens
( view, (^.) )
import Data.Maybe
( isJust )
import Data.Proxy
( Proxy (..) )
import Data.Quantity
( Quantity (..) )
import System.Command
( Exit (..), Stderr (..), Stdout (..) )
import System.Exit
( ExitCode (..) )
import Test.Hspec
( SpecWith, describe, it, runIO )
import Test.Hspec.Expectations.Lifted
( shouldBe, shouldContain, shouldNotBe, shouldSatisfy )
import Test.Integration.Framework.DSL
( Context (..)
, MnemonicLength (..)
, createWalletViaCLI
, deleteWalletViaCLI
, emptyIcarusWallet
, emptyRandomWallet
, eventually
, expectCliField
, expectCliListField
, expectValidJSON
, expectWalletUTxO
, fixturePassphrase
, genMnemonics
, getWalletUtxoStatisticsViaCLI
, getWalletViaCLI
, listWalletsViaCLI
, updateWalletNameViaCLI
, updateWalletPassphraseViaCLI
, verify
, walletId
)
import Test.Integration.Framework.TestData
( arabicWalletName
, cmdOk
, errMsg400NumberOfWords
, errMsg403WrongPass
, errMsg404NoWallet
, errMsg409WalletExists
, russianWalletName
, wildcardsWalletName
)
import qualified Data.Text as T
spec :: SpecWith Context
spec = describe "BYRON_CLI_WALLETS" $ do
describe "CLI_BYRON_GET_04, CLI_BYRON_DELETE_01, BYRON_RESTORE_02, BYRON_RESTORE_03 -\
\ Deleted wallet is not available, but can be restored" $ do
let matrix = [ ("random", genMnemonics M12)
, ("icarus", genMnemonics M15)
]
forM_ matrix $ \(style, genM) -> it style $ \ctx -> runResourceT $ do
mnemonic <- liftIO genM
let args =
[ "Name of the wallet"
, "--wallet-style", style
]
(c, out, err) <- createWalletViaCLI @_ @IO ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase"
T.unpack err `shouldContain` cmdOk
c `shouldBe` ExitSuccess
j <- expectValidJSON (Proxy @ApiByronWallet) out
let wid = T.unpack $ j ^. walletId
(Exit cd, Stdout outd, Stderr errd) <- deleteWalletViaCLI ctx wid
outd`shouldBe` "\n"
cd `shouldBe` ExitSuccess
errd `shouldContain` cmdOk
(Exit c2, Stdout out2, Stderr err2) <- getWalletViaCLI ctx wid
out2 `shouldBe` mempty
c2 `shouldBe` ExitFailure 1
err2 `shouldContain` errMsg404NoWallet (T.pack wid)
(c3, out3, err3) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase-restored"
c3 `shouldBe` ExitSuccess
T.unpack err3 `shouldContain` cmdOk
jr <- expectValidJSON (Proxy @ApiByronWallet) out3
verify jr [ expectCliField walletId (`shouldBe` T.pack wid) ]
(c4, out4, err4) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase-restored-again"
c4 `shouldBe` ExitFailure 1
T.unpack err4 `shouldContain` (errMsg409WalletExists wid)
out4 `shouldBe` mempty
describe "CLI_BYRON_RESTORE_01, CLI_BYRON_GET_01, CLI_BYRON_LIST_01 -\
\Restore a wallet" $ do
let scenarioSuccess style mnemonic ctx = runResourceT @IO $ do
let name = "Name of the wallet"
let args =
[ name
, "--wallet-style", style
]
let expectations =
[ expectCliField (#name . #getApiT . #getWalletName)
(`shouldBe` (T.pack name))
, expectCliField (#balance . #available)
(`shouldBe` Quantity 0)
, expectCliField (#balance . #total)
(`shouldBe` Quantity 0)
, expectCliField #passphrase (`shouldNotBe` Nothing)
]
(c, out, err) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase"
T.unpack err `shouldContain` cmdOk
c `shouldBe` ExitSuccess
j <- expectValidJSON (Proxy @ApiByronWallet) out
liftIO $ verify j expectations
let wid = T.unpack $ j ^. walletId
eventually "wallet is available and ready" $ do
(Exit c2, Stdout out2, Stderr err2) <- getWalletViaCLI ctx wid
c2 `shouldBe` ExitSuccess
err2 `shouldContain` cmdOk
jg <- expectValidJSON (Proxy @ApiByronWallet) out2
verify jg $
(expectCliField (#state . #getApiT) (`shouldBe` Ready)) : expectations
(Exit c3, Stdout out3, Stderr err3) <- listWalletsViaCLI ctx
c3 `shouldBe` ExitSuccess
err3 `shouldBe` cmdOk
jl <- expectValidJSON (Proxy @[ApiByronWallet]) out3
length jl `shouldBe` 1
expectCliListField 0 walletId (`shouldBe` T.pack wid) jl
let scenarioFailure style mnemonic ctx = runResourceT @IO $ do
let args =
[ "The wallet that didn't exist"
, "--wallet-style", style
]
(c, out, err) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" "secure-passphrase"
T.unpack err `shouldContain` errMsg400NumberOfWords
c `shouldBe` ExitFailure 1
out `shouldBe` mempty
let it' style genMnemonicIO test = do
mnemonic <- runIO genMnemonicIO
flip it (test style mnemonic) $ unwords
[ style
, show (length mnemonic)
, "words"
]
describe "CLI_BYRON_RESTORE_06 - Passphrase" $ do
let minLength = passphraseMinLength (Proxy @"user")
let maxLength = passphraseMaxLength (Proxy @"user")
let matrix =
[ ( show minLength ++ " char long"
, T.pack (replicate minLength 'ź')
)
, ( show maxLength ++ " char long"
, T.pack (replicate maxLength 'ą')
)
, ( "Russian passphrase", russianWalletName )
, ( "Arabic passphrase", arabicWalletName )
, ( "Wildcards passphrase", wildcardsWalletName )
]
forM_ matrix $ \(title, passphrase) -> it title $
\ctx -> runResourceT @IO $ do
let args =
[ "Name of the wallet"
, "--wallet-style", "random"
]
mnemonic <- liftIO $ genMnemonics M12
(c, out, err) <- createWalletViaCLI ctx
args (unwords $ T.unpack <$> mnemonic)
"\n" (T.unpack passphrase)
T.unpack err `shouldContain` cmdOk
_ <- expectValidJSON (Proxy @ApiByronWallet) out
c `shouldBe` ExitSuccess
it "CLI_BYRON_UPDATE_NAME_01 - Update names of wallets" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
let updatedName = "Name is updated"
(Exit c, Stdout out, Stderr err) <-
updateWalletNameViaCLI ctx [wid, updatedName]
c `shouldBe` ExitSuccess
err `shouldBe` cmdOk
ju <- expectValidJSON (Proxy @ApiByronWallet) out
expectCliField
(#name . #getApiT . #getWalletName)
(`shouldBe` T.pack updatedName) ju
it "CLI_BYRON_UPDATE_NAME_02 - When updated name too long" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
let updatedName = replicate 500 'o'
(Exit c, Stdout out, Stderr err) <-
updateWalletNameViaCLI ctx [wid, updatedName]
c `shouldBe` ExitFailure 1
err `shouldContain` "name is too long: expected at most 255 characters"
out `shouldBe` mempty
it "CLI_BYRON_UTXO_01 - Wallet's inactivity is reflected in utxo" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ]
$ \emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
(Exit c, Stdout o, Stderr e) <- getWalletUtxoStatisticsViaCLI ctx wid
c `shouldBe` ExitSuccess
e `shouldBe` cmdOk
utxoStats <- expectValidJSON (Proxy @ApiUtxoStatistics) o
expectWalletUTxO [] (Right utxoStats)
it "CLI_BYRON_UPDATE_PASS_01 - change passphrase" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
Stdout out <- getWalletViaCLI ctx wid
expectValidJSON (Proxy @ApiByronWallet) out
>>= flip verify [ expectCliField #passphrase (`shouldSatisfy` isJust) ]
let oldPass = T.unpack fixturePassphrase
let newPass = "cardano-wallet-new-pass"
(c, o, e) <-
updateWalletPassphraseViaCLI ctx wid oldPass newPass newPass
c `shouldBe` ExitSuccess
o `shouldBe` "\n"
T.unpack e `shouldContain` cmdOk
it "CLI_BYRON_UPDATE_PASS_02 - Old passphrase incorrect" $ \ctx ->
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $
\emptyByronWallet -> runResourceT @IO $ do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
Stdout out <- getWalletViaCLI ctx wid
expectValidJSON (Proxy @ApiByronWallet) out
>>= flip verify [ expectCliField #passphrase (`shouldSatisfy` isJust) ]
let oldPass = "incorrect-passphrase"
let newPass = "cardano-wallet-new-pass"
(c, o, e) <-
updateWalletPassphraseViaCLI ctx wid oldPass newPass newPass
c `shouldBe` ExitFailure 1
o `shouldBe` mempty
T.unpack e `shouldContain` errMsg403WrongPass
describe "CLI_BYRON_UPDATE_PASS_03 - Pass length incorrect" $ do
let minLength = passphraseMinLength (Proxy @"user")
let maxLength = passphraseMaxLength (Proxy @"user")
let passTooShort = replicate (minLength - 1) 'o'
let errMsgTooShort = "passphrase is too short: expected at least 10 characters"
let passTooLong = replicate (maxLength + 1) 'o'
let errMsgTooLong = "passphrase is too long: expected at most 255 characters"
let passOK = T.unpack fixturePassphrase
let matrix = [ ("old pass too short", passTooShort, passOK, errMsgTooShort)
, ("old pass too long", passTooLong, passOK, errMsgTooLong)
, ("new pass too short", passOK, passTooShort, errMsgTooShort)
, ("new pass too long", passOK, passTooLong, errMsgTooLong)
]
forM_ matrix $ \(title, oldPass, newPass, errMsg) -> it title $ \ctx -> runResourceT @IO $ do
forM_ [ emptyRandomWallet, emptyIcarusWallet ] $ \emptyByronWallet -> do
wid <- fmap (T.unpack . view walletId) (emptyByronWallet ctx)
Stdout out <- getWalletViaCLI ctx wid
expectValidJSON (Proxy @ApiByronWallet) out
>>= flip verify [ expectCliField #passphrase (`shouldSatisfy` isJust) ]
(c, o, e) <-
updateWalletPassphraseViaCLI ctx wid oldPass newPass newPass
T.unpack e `shouldContain` errMsg
c `shouldBe` ExitFailure 1
o `shouldBe` mempty
|
c6295dc77abdcfd3e1d16fc5f7633ff41bec9bfe1e72bd6da1bc41134e461105 | wireless-net/erlang-nommu | system_information.erl | %% %CopyrightBegin%
%%
Copyright Ericsson AB 2013 . All Rights Reserved .
%%
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
%% compliance with the License. You should have received a copy of the
%% Erlang Public License along with this software. If not, it can be
%% retrieved online at /.
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and limitations
%% under the License.
%%
%% %CopyrightEnd%
%%
%% The main purpose of system_information is to aggregate all information
%% deemed useful for investigation, i.e. system_information:report/0.
%% The server and all other utilities surrounding this is for inspecting
%% reported values. Functions will be added to this as time goes by.
-module(system_information).
-behaviour(gen_server).
%% API
-export([
report/0,
from_file/1,
to_file/1
]).
-export([
start/0, stop/0,
load_report/0, load_report/2,
applications/0, applications/1,
application/1, application/2,
environment/0, environment/1,
module/1, module/2,
modules/1,
sanity_check/0
]).
%% gen_server callbacks
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-define(SERVER, ?MODULE).
%% change version if parsing of file changes
-define(REPORT_FILE_VSN, "1.0").
-record(state, {
report
}).
%%===================================================================
%% API
%%===================================================================
start() ->
gen_server:start({local, ?SERVER}, ?MODULE, [], []).
stop() ->
gen_server:call(?SERVER, stop).
load_report() -> load_report(data, report()).
load_report(file, File) -> load_report(data, from_file(File));
load_report(data, Report) ->
start(), gen_server:call(?SERVER, {load_report, Report}).
report() -> [
{init_arguments, init:get_arguments()},
{code_paths, code:get_path()},
{code, code()},
{system_info, erlang_system_info()},
{erts_compile_info, erlang:system_info(compile_info)},
{beam_dynamic_libraries, get_dynamic_libraries()},
{environment_erts, os_getenv_erts_specific()},
{environment, [split_env(Env) || Env <- os:getenv()]},
{sanity_check, sanity_check()}
].
-spec to_file(FileName) -> ok | {error, Reason} when
FileName :: file:name_all(),
Reason :: file:posix() | badarg | terminated | system_limit.
to_file(File) ->
file:write_file(File, iolist_to_binary([
io_lib:format("{system_information_version, ~p}.~n", [
?REPORT_FILE_VSN
]),
io_lib:format("{system_information, ~p}.~n", [
report()
])
])).
from_file(File) ->
case file:consult(File) of
{ok, Data} ->
case get_value([system_information_version], Data) of
?REPORT_FILE_VSN ->
get_value([system_information], Data);
Vsn ->
erlang:error({unknown_version, Vsn})
end;
_ ->
erlang:error(bad_report_file)
end.
applications() -> applications([]).
applications(Opts) when is_list(Opts) ->
gen_server:call(?SERVER, {applications, Opts}).
application(App) when is_atom(App) -> application(App, []).
application(App, Opts) when is_atom(App), is_list(Opts) ->
gen_server:call(?SERVER, {application, App, Opts}).
environment() -> environment([]).
environment(Opts) when is_list(Opts) ->
gen_server:call(?SERVER, {environment, Opts}).
module(M) when is_atom(M) -> module(M, []).
module(M, Opts) when is_atom(M), is_list(Opts) ->
gen_server:call(?SERVER, {module, M, Opts}).
modules(Opt) when is_atom(Opt) ->
gen_server:call(?SERVER, {modules, Opt}).
-spec sanity_check() -> ok | {failed, Failures} when
Application :: atom(),
ApplicationVersion :: string(),
MissingRuntimeDependencies :: {missing_runtime_dependencies,
ApplicationVersion,
[ApplicationVersion]},
InvalidApplicationVersion :: {invalid_application_version,
ApplicationVersion},
InvalidAppFile :: {invalid_app_file, Application},
Failure :: MissingRuntimeDependencies
| InvalidApplicationVersion
| InvalidAppFile,
Failures :: [Failure].
sanity_check() ->
case check_runtime_dependencies() of
[] -> ok;
Issues -> {failed, Issues}
end.
%%===================================================================
%% gen_server callbacks
%%===================================================================
init([]) ->
{ok, #state{}}.
handle_call(stop, _From, S) ->
{stop, normal, ok, S};
handle_call({load_report, Report}, _From, S) ->
Version = get_value([system_info, system_version], Report),
io:format("Loaded report from system version: ~s~n", [Version]),
{reply, ok, S#state{ report = Report }};
handle_call(_Req, _From, #state{ report = undefined } = S) ->
{reply, {error, report_not_loaded}, S};
handle_call({applications, Opts}, _From, #state{ report = Report } = S) ->
ok = print_applications(get_value([code], Report), Opts),
{reply, ok, S};
handle_call({application, App, Opts}, _From, #state{ report = Report } = S) ->
Data = get_value([App], [AppInfo||{application, AppInfo}<-get_value([code], Report)]),
ok = print_application({App, Data}, Opts),
{reply, ok, S};
handle_call({environment, Opts}, _From, #state{ report = Report } = S) ->
Choices = case proplists:get_bool(full, Opts) of
true -> [environment];
false -> [environment_erts]
end,
ok = print_environments(get_value(Choices, Report), Opts),
{reply, ok, S};
handle_call({module, M, Opts}, _From, #state{ report = Report } = S) ->
Mods = find_modules_from_code(M, get_value([code], Report)),
print_modules_from_code(M, Mods, Opts),
{reply, ok, S};
handle_call({modules, native}, _From, #state{ report = Report } = S) ->
Codes = get_native_modules_from_code(get_value([code],Report)),
io:format("~p~n", [Codes]),
{reply, ok, S};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%===================================================================
Internal functions
%%===================================================================
%% handle report values
get_value([], Data) -> Data;
get_value([K|Ks], Data) ->
get_value(Ks, proplists:get_value(K, Data, [])).
find_modules_from_code(M, [{code, Info}|Codes]) ->
case find_modules(M, get_value([modules], Info)) of
[] -> find_modules_from_code(M, Codes);
Mods ->
Path = get_value([path], Info),
[{Path, Mods}|find_modules_from_code(M, Codes)]
end;
find_modules_from_code(M, [{application, {App, Info}}|Codes]) ->
case find_modules(M, get_value([modules], Info)) of
[] -> find_modules_from_code(M, Codes);
Mods ->
Path = get_value([path], Info),
Vsn = get_value([vsn], Info),
[{App, Vsn, Path, Mods}|find_modules_from_code(M, Codes)]
end;
find_modules_from_code(_, []) -> [].
find_modules(M, [{M, _}=Info|Ms]) -> [Info|find_modules(M,Ms)];
find_modules(M, [_|Ms]) -> find_modules(M, Ms);
find_modules(_, []) -> [].
get_native_modules_from_code([{application, {App, Info}}|Cs]) ->
case get_native_modules(get_value([modules], Info)) of
[] -> get_native_modules_from_code(Cs);
Mods ->
Path = get_value([path], Info),
Vsn = get_value([vsn], Info),
[{App, Vsn, Path, Mods}|get_native_modules_from_code(Cs)]
end;
get_native_modules_from_code([{code, Info}|Cs]) ->
case get_native_modules(get_value([modules], Info)) of
[] -> get_native_modules_from_code(Cs);
Mods ->
Path = get_value([path], Info),
[{Path, Mods}|get_native_modules_from_code(Cs)]
end;
get_native_modules_from_code([]) -> [].
get_native_modules([]) -> [];
get_native_modules([{Mod, Info}|Ms]) ->
case proplists:get_value(native, Info) of
false -> get_native_modules(Ms);
_ -> [Mod|get_native_modules(Ms)]
end.
%% print information
print_applications([{application, App}|Apps], Opts) ->
print_application(App, Opts),
print_applications(Apps, Opts);
print_applications([{code,_}|Apps], Opts) ->
print_applications(Apps, Opts);
print_applications([], _) ->
ok.
print_application({App, Info}, Opts) ->
Vsn = get_value([vsn], Info),
io:format(" * ~w-~s~n", [App, Vsn]),
case proplists:get_bool(full, Opts) of
true ->
_ = [ begin
print_module(Minfo)
end || Minfo <- get_value([modules], Info) ],
ok;
false ->
ok
end.
print_environments([Env|Envs],Opts) ->
print_environment(Env,Opts),
print_environments(Envs,Opts);
print_environments([],_) ->
ok.
print_environment({_Key, false},_) -> ok;
print_environment({Key, Value},_) ->
io:format(" - ~s = ~ts~n", [Key, Value]).
print_modules_from_code(M, [Info|Ms], Opts) ->
print_module_from_code(M, Info),
case proplists:get_bool(full, Opts) of
true -> print_modules_from_code(M, Ms, Opts);
false -> ok
end;
print_modules_from_code(_, [], _) ->
ok.
print_module_from_code(M, {Path, [{M,ModInfo}]}) ->
io:format(" from path \"~ts\" (no application):~n", [Path]),
io:format(" - compiler: ~s~n", [get_value([compiler], ModInfo)]),
io:format(" - md5: ~s~n", [get_value([md5], ModInfo)]),
io:format(" - native: ~w~n", [get_value([native], ModInfo)]),
io:format(" - loaded: ~w~n", [get_value([loaded], ModInfo)]),
ok;
print_module_from_code(M, {App,Vsn,Path,[{M,ModInfo}]}) ->
io:format(" from path \"~ts\" (~w-~s):~n", [Path,App,Vsn]),
io:format(" - compiler: ~s~n", [get_value([compiler], ModInfo)]),
io:format(" - md5: ~s~n", [get_value([md5], ModInfo)]),
io:format(" - native: ~w~n", [get_value([native], ModInfo)]),
io:format(" - loaded: ~w~n", [get_value([loaded], ModInfo)]),
ok.
print_module({Mod, ModInfo}) ->
io:format(" - ~w:~n", [Mod]),
io:format(" - compiler: ~s~n", [get_value([compiler], ModInfo)]),
io:format(" - md5: ~s~n", [get_value([md5], ModInfo)]),
io:format(" - native: ~w~n", [get_value([native], ModInfo)]),
io:format(" - loaded: ~w~n", [get_value([loaded], ModInfo)]),
ok.
get useful information from erlang :
erlang_system_info() ->
erlang_system_info([
allocator,
check_io,
otp_release,
port_limit,
process_limit,
% procs, % not needed
smp_support,
system_version,
system_architecture,
threads,
thread_pool_size,
{wordsize,internal},
{wordsize,external},
{cpu_topology, defined},
{cpu_topology, detected},
scheduler_bind_type,
scheduler_bindings,
compat_rel,
schedulers_state,
build_type,
logical_processors,
logical_processors_online,
logical_processors_available,
driver_version,
emu_args,
ethread_info,
beam_jump_table,
taints
]).
erlang_system_info([]) -> [];
erlang_system_info([Type|Types]) ->
[{Type, erlang:system_info(Type)}|erlang_system_info(Types)].
%% get known useful erts environment
os_getenv_erts_specific() ->
os_getenv_erts_specific([
"BINDIR",
"DIALYZER_EMULATOR",
"CERL_DETACHED_PROG",
"EMU",
"ERL_CONSOLE_MODE",
"ERL_CRASH_DUMP",
"ERL_CRASH_DUMP_NICE",
"ERL_CRASH_DUMP_SECONDS",
"ERL_EPMD_PORT",
"ERL_EMULATOR_DLL",
"ERL_FULLSWEEP_AFTER",
"ERL_LIBS",
"ERL_MALLOC_LIB",
"ERL_MAX_PORTS",
"ERL_MAX_ETS_TABLES",
"ERL_NO_VFORK",
"ERL_NO_KERNEL_POLL",
"ERL_THREAD_POOL_SIZE",
"ERLC_EMULATOR",
"ESCRIPT_EMULATOR",
"HOME",
"HOMEDRIVE",
"HOMEPATH",
"LANG",
"LC_ALL",
"LC_CTYPE",
"PATH",
"PROGNAME",
"RELDIR",
"ROOTDIR",
"TERM",
" " ,
%% heart
"COMSPEC",
"HEART_COMMAND",
%% run_erl
"RUN_ERL_LOG_ALIVE_MINUTES",
"RUN_ERL_LOG_ACTIVITY_MINUTES",
"RUN_ERL_LOG_ALIVE_FORMAT",
"RUN_ERL_LOG_ALIVE_IN_UTC",
"RUN_ERL_LOG_GENERATIONS",
"RUN_ERL_LOG_MAXSIZE",
"RUN_ERL_DISABLE_FLOWCNTRL",
driver
"CALLER_DRV_USE_OUTPUTV",
"ERL_INET_GETHOST_DEBUG",
"ERL_EFILE_THREAD_SHORT_CIRCUIT",
"ERL_WINDOW_TITLE",
"ERL_ABORT_ON_FAILURE",
"TTYSL_DEBUG_LOG"
]).
os_getenv_erts_specific([]) -> [];
os_getenv_erts_specific([Key|Keys]) ->
[{Key, os:getenv(Key)}|os_getenv_erts_specific(Keys)].
split_env(Env) ->
split_env(Env, []).
split_env([$=|Vs], Key) -> {lists:reverse(Key), Vs};
split_env([I|Vs], Key) -> split_env(Vs, [I|Key]);
split_env([], KV) -> lists:reverse(KV). % should not happen.
%% get applications
code() ->
% order is important
get_code_from_paths(code:get_path()).
get_code_from_paths([]) -> [];
get_code_from_paths([Path|Paths]) ->
case is_application_path(Path) of
true ->
[{application, get_application_from_path(Path)}|get_code_from_paths(Paths)];
false ->
[{code, [
{path, Path},
{modules, get_modules_from_path(Path)}
]}|get_code_from_paths(Paths)]
end.
is_application_path(Path) ->
case filelib:wildcard(filename:join(Path, "*.app")) of
[] -> false;
_ -> true
end.
get_application_from_path(Path) ->
[Appfile|_] = filelib:wildcard(filename:join(Path, "*.app")),
case file:consult(Appfile) of
{ok, [{application, App, Info}]} ->
{App, [
{description, proplists:get_value(description, Info, [])},
{vsn, proplists:get_value(vsn, Info, [])},
{path, Path},
{runtime_dependencies,
proplists:get_value(runtime_dependencies, Info, [])},
{modules, get_modules_from_path(Path)}
]}
end.
get_modules_from_path(Path) ->
[
begin
{ok,{Mod, Md5}} = beam_lib:md5(Beam),
Loaded = case code:is_loaded(Mod) of
false -> false;
_ -> true
end,
{Mod, [
{loaded, Loaded},
{native, beam_is_native_compiled(Beam)},
{compiler, get_compiler_version(Beam)},
{md5, hexstring(Md5)}
]}
end || Beam <- filelib:wildcard(filename:join(Path, "*.beam"))
].
hexstring(Bin) when is_binary(Bin) ->
lists:flatten([io_lib:format("~2.16.0b", [V]) || <<V>> <= Bin]).
%% inspect beam files for information
get_compiler_version(Beam) ->
case beam_lib:chunks(Beam, [compile_info]) of
{ok,{_,[{compile_info, Info}]}} ->
proplists:get_value(version, Info);
_ -> undefined
end.
%% we don't know the specific chunk names of native code
%% we don't want to load the code to check it
beam_is_native_compiled(Beam) ->
Chunks = get_value([chunks], beam_lib:info(Beam)),
case check_known_hipe_chunks(Chunks) of
[] -> false;
[Arch] -> {true, Arch};
Archs -> {true, Archs}
end.
check_known_hipe_chunks([{Tag,_,_}|Cs]) ->
case is_chunk_tag_hipe_arch(Tag) of
false -> check_known_hipe_chunks(Cs);
{true, Arch} -> [Arch|check_known_hipe_chunks(Cs)]
end;
check_known_hipe_chunks([]) -> [].
%% these values are taken from hipe_unified_loader
%% perhaps these should be exported in that module?
-define(HS8P_TAG,"HS8P").
-define(HPPC_TAG,"HPPC").
-define(HP64_TAG,"HP64").
-define(HARM_TAG,"HARM").
-define(HX86_TAG,"HX86").
-define(HA64_TAG,"HA64").
is_chunk_tag_hipe_arch(Tag) ->
case Tag of
HiPE , x86_64 , ( implicit : 64 - bit , Unix )
HiPE , arm , v5 ( implicit : 32 - bit , Linux )
HiPE , PowerPC ( implicit : 32 - bit , Linux )
HiPE , ppc64 ( implicit : 64 - bit , Linux )
HiPE , SPARC , V8 + ( implicit : 32 - bit )
Future : HSV9 % % HiPE , SPARC , V9 ( implicit : 64 - bit )
HW32 % % HiPE , x86 , Win32
_ -> false
end.
get_dynamic_libraries() ->
Beam = filename:join([os:getenv("BINDIR"),get_beam_name()]),
case os:type() of
{unix, darwin} -> os:cmd("otool -L " ++ Beam);
_ -> os:cmd("ldd " ++ Beam)
end.
get_beam_name() ->
Type = case erlang:system_info(build_type) of
opt -> "";
TypeName -> "." ++ atom_to_list(TypeName)
end,
Flavor = case erlang:system_info(smp_support) of
false -> "";
true -> ".smp"
end,
Beam = case os:getenv("EMU") of
false -> "beam";
Value -> Value
end,
Beam ++ Type ++ Flavor.
%% Check runtime dependencies...
vsnstr2vsn(VsnStr) ->
list_to_tuple(lists:map(fun (Part) ->
list_to_integer(Part)
end,
string:tokens(VsnStr, "."))).
rtdepstrs2rtdeps([]) ->
[];
rtdepstrs2rtdeps([RTDep | RTDeps]) ->
[AppStr, VsnStr] = string:tokens(RTDep, "-"),
[{list_to_atom(AppStr), vsnstr2vsn(VsnStr)} | rtdepstrs2rtdeps(RTDeps)].
build_app_table([], AppTab) ->
AppTab;
build_app_table([App | Apps], AppTab0) ->
AppTab1 = try
%% We may have multiple application versions installed
%% of the same application! It is therefore important
%% to look up the application version that actually will
%% be used via code server.
AppFile = code:where_is_file(atom_to_list(App) ++ ".app"),
{ok, [{application, App, Info}]} = file:consult(AppFile),
VsnStr = proplists:get_value(vsn, Info),
Vsn = vsnstr2vsn(VsnStr),
RTDepStrs = proplists:get_value(runtime_dependencies,
Info, []),
RTDeps = rtdepstrs2rtdeps(RTDepStrs),
gb_trees:insert(App, {Vsn, RTDeps}, AppTab0)
catch
_ : _ ->
AppTab0
end,
build_app_table(Apps, AppTab1).
meets_min_req(Vsn, Vsn) ->
true;
meets_min_req({X}, VsnReq) ->
meets_min_req({X, 0, 0}, VsnReq);
meets_min_req({X, Y}, VsnReq) ->
meets_min_req({X, Y, 0}, VsnReq);
meets_min_req(Vsn, {X}) ->
meets_min_req(Vsn, {X, 0, 0});
meets_min_req(Vsn, {X, Y}) ->
meets_min_req(Vsn, {X, Y, 0});
meets_min_req({X, _Y, _Z}, {XReq, _YReq, _ZReq}) when X > XReq ->
true;
meets_min_req({X, Y, _Z}, {X, YReq, _ZReq}) when Y > YReq ->
true;
meets_min_req({X, Y, Z}, {X, Y, ZReq}) when Z > ZReq ->
true;
meets_min_req({_X, _Y, _Z}, {_XReq, _YReq, _ZReq}) ->
false;
meets_min_req(Vsn, VsnReq) ->
gp_meets_min_req(mk_gp_vsn_list(Vsn), mk_gp_vsn_list(VsnReq)).
gp_meets_min_req([X, Y, Z | _Vs], [X, Y, Z]) ->
true;
gp_meets_min_req([X, Y, Z | _Vs], [XReq, YReq, ZReq]) ->
meets_min_req({X, Y, Z}, {XReq, YReq, ZReq});
gp_meets_min_req([X, Y, Z | Vs], [X, Y, Z | VReqs]) ->
gp_meets_min_req_tail(Vs, VReqs);
gp_meets_min_req(_Vsn, _VReq) ->
%% Versions on different version branches, i.e., the minimum
required functionality is not included in Vsn .
false.
gp_meets_min_req_tail([V | Vs], [V | VReqs]) ->
gp_meets_min_req_tail(Vs, VReqs);
gp_meets_min_req_tail([], []) ->
true;
gp_meets_min_req_tail([_V | _Vs], []) ->
true;
gp_meets_min_req_tail([V | _Vs], [VReq]) when V > VReq ->
true;
gp_meets_min_req_tail(_Vs, _VReqs) ->
%% Versions on different version branches, i.e., the minimum
required functionality is not included in Vsn .
false.
mk_gp_vsn_list(Vsn) ->
[X, Y, Z | Tail] = tuple_to_list(Vsn),
[X, Y, Z | remove_trailing_zeroes(Tail)].
remove_trailing_zeroes([]) ->
[];
remove_trailing_zeroes([0 | Vs]) ->
case remove_trailing_zeroes(Vs) of
[] -> [];
NewVs -> [0 | NewVs]
end;
remove_trailing_zeroes([V | Vs]) ->
[V | remove_trailing_zeroes(Vs)].
mk_app_vsn_str({App, Vsn}) ->
mk_app_vsn_str(App, Vsn).
mk_app_vsn_str(App, Vsn) ->
VsnList = tuple_to_list(Vsn),
lists:flatten([atom_to_list(App),
$-,
integer_to_list(hd(VsnList)),
lists:map(fun (Part) ->
[$., integer_to_list(Part)]
end, tl(VsnList))]).
otp_17_0_vsns_orddict() ->
[{asn1,{3,0}},
{common_test,{1,8}},
{compiler,{5,0}},
{cosEvent,{2,1,15}},
{cosEventDomain,{1,1,14}},
{cosFileTransfer,{1,1,16}},
{cosNotification,{1,1,21}},
{cosProperty,{1,1,17}},
{cosTime,{1,1,14}},
{cosTransactions,{1,2,14}},
{crypto,{3,3}},
{debugger,{4,0}},
{dialyzer,{2,7}},
{diameter,{1,6}},
{edoc,{0,7,13}},
{eldap,{1,0,3}},
{erl_docgen,{0,3,5}},
{erl_interface,{3,7,16}},
{erts,{6,0}},
{et,{1,5}},
{eunit,{2,2,7}},
{gs,{1,5,16}},
{hipe,{3,10,3}},
{ic,{4,3,5}},
{inets,{5,10}},
{jinterface,{1,5,9}},
{kernel,{3,0}},
{megaco,{3,17,1}},
{mnesia,{4,12}},
{observer,{2,0}},
{odbc,{2,10,20}},
{orber,{3,6,27}},
{os_mon,{2,2,15}},
{ose,{1,0}},
{otp_mibs,{1,0,9}},
{parsetools,{2,0,11}},
{percept,{0,8,9}},
{public_key,{0,22}},
{reltool,{0,6,5}},
{runtime_tools,{1,8,14}},
{sasl,{2,4}},
{snmp,{4,25,1}},
{ssh,{3,0,1}},
{ssl,{5,3,4}},
{stdlib,{2,0}},
{syntax_tools,{1,6,14}},
{test_server,{3,7}},
{tools,{2,6,14}},
{typer,{0,9,6}},
{webtool,{0,8,10}},
{wx,{1,2}},
{xmerl,{1,3,7}}].
otp_17_0_vsns_tab() ->
gb_trees:from_orddict(otp_17_0_vsns_orddict()).
check_runtime_dependency({App, DepVsn}, AppTab) ->
case gb_trees:lookup(App, AppTab) of
none ->
false;
{value, {Vsn, _}} ->
meets_min_req(Vsn, DepVsn)
end.
check_runtime_dependencies(App, AppTab, OtpMinVsnTab) ->
case gb_trees:lookup(App, AppTab) of
none ->
[{invalid_app_file, App}];
{value, {Vsn, RTDeps}} ->
RTD = case lists:foldl(
fun (RTDep, Acc) ->
case check_runtime_dependency(RTDep, AppTab) of
true ->
Acc;
false ->
[mk_app_vsn_str(RTDep) | Acc]
end
end,
[],
RTDeps) of
[] ->
[];
MissingDeps ->
[{missing_runtime_dependencies,
mk_app_vsn_str(App, Vsn),
MissingDeps}]
end,
case gb_trees:lookup(App, OtpMinVsnTab) of
none ->
RTD;
{value, MinVsn} ->
case meets_min_req(Vsn, MinVsn) of
true ->
RTD;
false ->
[{invalid_application_version,
mk_app_vsn_str(App, Vsn)} | RTD]
end
end
end.
app_file_to_app(AF) ->
list_to_atom(filename:basename(AF, ".app")).
get_apps() ->
get_apps(code:get_path(), []).
get_apps([], Apps) ->
lists:usort(Apps);
get_apps([Path|Paths], Apps) ->
case filelib:wildcard(filename:join(Path, "*.app")) of
[] ->
%% Not app or invalid app
get_apps(Paths, Apps);
[AppFile] ->
get_apps(Paths, [app_file_to_app(AppFile) | Apps]);
[_AppFile| _] = AppFiles ->
Strange with .app files ... Lets put them
%% all in the list and see what we get...
lists:map(fun (AF) ->
app_file_to_app(AF)
end, AppFiles) ++ Apps
end.
check_runtime_dependencies() ->
OtpMinVsnTab = otp_17_0_vsns_tab(),
Apps = get_apps(),
AppTab = build_app_table(Apps, gb_trees:empty()),
lists:foldl(fun (App, Acc) ->
case check_runtime_dependencies(App,
AppTab,
OtpMinVsnTab) of
[] -> Acc;
Issues -> Issues ++ Acc
end
end,
[],
Apps).
%% End of runtime dependency checks
| null | https://raw.githubusercontent.com/wireless-net/erlang-nommu/79f32f81418e022d8ad8e0e447deaea407289926/lib/runtime_tools/src/system_information.erl | erlang | %CopyrightBegin%
compliance with the License. You should have received a copy of the
Erlang Public License along with this software. If not, it can be
retrieved online at /.
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and limitations
under the License.
%CopyrightEnd%
The main purpose of system_information is to aggregate all information
deemed useful for investigation, i.e. system_information:report/0.
The server and all other utilities surrounding this is for inspecting
reported values. Functions will be added to this as time goes by.
API
gen_server callbacks
change version if parsing of file changes
===================================================================
API
===================================================================
===================================================================
gen_server callbacks
===================================================================
===================================================================
===================================================================
handle report values
print information
procs, % not needed
get known useful erts environment
heart
run_erl
should not happen.
get applications
order is important
inspect beam files for information
we don't know the specific chunk names of native code
we don't want to load the code to check it
these values are taken from hipe_unified_loader
perhaps these should be exported in that module?
% HiPE , SPARC , V9 ( implicit : 64 - bit )
% HiPE , x86 , Win32
Check runtime dependencies...
We may have multiple application versions installed
of the same application! It is therefore important
to look up the application version that actually will
be used via code server.
Versions on different version branches, i.e., the minimum
Versions on different version branches, i.e., the minimum
Not app or invalid app
all in the list and see what we get...
End of runtime dependency checks | Copyright Ericsson AB 2013 . All Rights Reserved .
The contents of this file are subject to the Erlang Public License ,
Version 1.1 , ( the " License " ) ; you may not use this file except in
Software distributed under the License is distributed on an " AS IS "
-module(system_information).
-behaviour(gen_server).
-export([
report/0,
from_file/1,
to_file/1
]).
-export([
start/0, stop/0,
load_report/0, load_report/2,
applications/0, applications/1,
application/1, application/2,
environment/0, environment/1,
module/1, module/2,
modules/1,
sanity_check/0
]).
-export([
init/1,
handle_call/3,
handle_cast/2,
handle_info/2,
terminate/2,
code_change/3
]).
-define(SERVER, ?MODULE).
-define(REPORT_FILE_VSN, "1.0").
-record(state, {
report
}).
start() ->
gen_server:start({local, ?SERVER}, ?MODULE, [], []).
stop() ->
gen_server:call(?SERVER, stop).
load_report() -> load_report(data, report()).
load_report(file, File) -> load_report(data, from_file(File));
load_report(data, Report) ->
start(), gen_server:call(?SERVER, {load_report, Report}).
report() -> [
{init_arguments, init:get_arguments()},
{code_paths, code:get_path()},
{code, code()},
{system_info, erlang_system_info()},
{erts_compile_info, erlang:system_info(compile_info)},
{beam_dynamic_libraries, get_dynamic_libraries()},
{environment_erts, os_getenv_erts_specific()},
{environment, [split_env(Env) || Env <- os:getenv()]},
{sanity_check, sanity_check()}
].
-spec to_file(FileName) -> ok | {error, Reason} when
FileName :: file:name_all(),
Reason :: file:posix() | badarg | terminated | system_limit.
to_file(File) ->
file:write_file(File, iolist_to_binary([
io_lib:format("{system_information_version, ~p}.~n", [
?REPORT_FILE_VSN
]),
io_lib:format("{system_information, ~p}.~n", [
report()
])
])).
from_file(File) ->
case file:consult(File) of
{ok, Data} ->
case get_value([system_information_version], Data) of
?REPORT_FILE_VSN ->
get_value([system_information], Data);
Vsn ->
erlang:error({unknown_version, Vsn})
end;
_ ->
erlang:error(bad_report_file)
end.
applications() -> applications([]).
applications(Opts) when is_list(Opts) ->
gen_server:call(?SERVER, {applications, Opts}).
application(App) when is_atom(App) -> application(App, []).
application(App, Opts) when is_atom(App), is_list(Opts) ->
gen_server:call(?SERVER, {application, App, Opts}).
environment() -> environment([]).
environment(Opts) when is_list(Opts) ->
gen_server:call(?SERVER, {environment, Opts}).
module(M) when is_atom(M) -> module(M, []).
module(M, Opts) when is_atom(M), is_list(Opts) ->
gen_server:call(?SERVER, {module, M, Opts}).
modules(Opt) when is_atom(Opt) ->
gen_server:call(?SERVER, {modules, Opt}).
-spec sanity_check() -> ok | {failed, Failures} when
Application :: atom(),
ApplicationVersion :: string(),
MissingRuntimeDependencies :: {missing_runtime_dependencies,
ApplicationVersion,
[ApplicationVersion]},
InvalidApplicationVersion :: {invalid_application_version,
ApplicationVersion},
InvalidAppFile :: {invalid_app_file, Application},
Failure :: MissingRuntimeDependencies
| InvalidApplicationVersion
| InvalidAppFile,
Failures :: [Failure].
sanity_check() ->
case check_runtime_dependencies() of
[] -> ok;
Issues -> {failed, Issues}
end.
init([]) ->
{ok, #state{}}.
handle_call(stop, _From, S) ->
{stop, normal, ok, S};
handle_call({load_report, Report}, _From, S) ->
Version = get_value([system_info, system_version], Report),
io:format("Loaded report from system version: ~s~n", [Version]),
{reply, ok, S#state{ report = Report }};
handle_call(_Req, _From, #state{ report = undefined } = S) ->
{reply, {error, report_not_loaded}, S};
handle_call({applications, Opts}, _From, #state{ report = Report } = S) ->
ok = print_applications(get_value([code], Report), Opts),
{reply, ok, S};
handle_call({application, App, Opts}, _From, #state{ report = Report } = S) ->
Data = get_value([App], [AppInfo||{application, AppInfo}<-get_value([code], Report)]),
ok = print_application({App, Data}, Opts),
{reply, ok, S};
handle_call({environment, Opts}, _From, #state{ report = Report } = S) ->
Choices = case proplists:get_bool(full, Opts) of
true -> [environment];
false -> [environment_erts]
end,
ok = print_environments(get_value(Choices, Report), Opts),
{reply, ok, S};
handle_call({module, M, Opts}, _From, #state{ report = Report } = S) ->
Mods = find_modules_from_code(M, get_value([code], Report)),
print_modules_from_code(M, Mods, Opts),
{reply, ok, S};
handle_call({modules, native}, _From, #state{ report = Report } = S) ->
Codes = get_native_modules_from_code(get_value([code],Report)),
io:format("~p~n", [Codes]),
{reply, ok, S};
handle_call(_Request, _From, State) ->
{reply, ok, State}.
handle_cast(_Msg, State) ->
{noreply, State}.
handle_info(_Info, State) ->
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
Internal functions
get_value([], Data) -> Data;
get_value([K|Ks], Data) ->
get_value(Ks, proplists:get_value(K, Data, [])).
find_modules_from_code(M, [{code, Info}|Codes]) ->
case find_modules(M, get_value([modules], Info)) of
[] -> find_modules_from_code(M, Codes);
Mods ->
Path = get_value([path], Info),
[{Path, Mods}|find_modules_from_code(M, Codes)]
end;
find_modules_from_code(M, [{application, {App, Info}}|Codes]) ->
case find_modules(M, get_value([modules], Info)) of
[] -> find_modules_from_code(M, Codes);
Mods ->
Path = get_value([path], Info),
Vsn = get_value([vsn], Info),
[{App, Vsn, Path, Mods}|find_modules_from_code(M, Codes)]
end;
find_modules_from_code(_, []) -> [].
find_modules(M, [{M, _}=Info|Ms]) -> [Info|find_modules(M,Ms)];
find_modules(M, [_|Ms]) -> find_modules(M, Ms);
find_modules(_, []) -> [].
get_native_modules_from_code([{application, {App, Info}}|Cs]) ->
case get_native_modules(get_value([modules], Info)) of
[] -> get_native_modules_from_code(Cs);
Mods ->
Path = get_value([path], Info),
Vsn = get_value([vsn], Info),
[{App, Vsn, Path, Mods}|get_native_modules_from_code(Cs)]
end;
get_native_modules_from_code([{code, Info}|Cs]) ->
case get_native_modules(get_value([modules], Info)) of
[] -> get_native_modules_from_code(Cs);
Mods ->
Path = get_value([path], Info),
[{Path, Mods}|get_native_modules_from_code(Cs)]
end;
get_native_modules_from_code([]) -> [].
get_native_modules([]) -> [];
get_native_modules([{Mod, Info}|Ms]) ->
case proplists:get_value(native, Info) of
false -> get_native_modules(Ms);
_ -> [Mod|get_native_modules(Ms)]
end.
print_applications([{application, App}|Apps], Opts) ->
print_application(App, Opts),
print_applications(Apps, Opts);
print_applications([{code,_}|Apps], Opts) ->
print_applications(Apps, Opts);
print_applications([], _) ->
ok.
print_application({App, Info}, Opts) ->
Vsn = get_value([vsn], Info),
io:format(" * ~w-~s~n", [App, Vsn]),
case proplists:get_bool(full, Opts) of
true ->
_ = [ begin
print_module(Minfo)
end || Minfo <- get_value([modules], Info) ],
ok;
false ->
ok
end.
print_environments([Env|Envs],Opts) ->
print_environment(Env,Opts),
print_environments(Envs,Opts);
print_environments([],_) ->
ok.
print_environment({_Key, false},_) -> ok;
print_environment({Key, Value},_) ->
io:format(" - ~s = ~ts~n", [Key, Value]).
print_modules_from_code(M, [Info|Ms], Opts) ->
print_module_from_code(M, Info),
case proplists:get_bool(full, Opts) of
true -> print_modules_from_code(M, Ms, Opts);
false -> ok
end;
print_modules_from_code(_, [], _) ->
ok.
print_module_from_code(M, {Path, [{M,ModInfo}]}) ->
io:format(" from path \"~ts\" (no application):~n", [Path]),
io:format(" - compiler: ~s~n", [get_value([compiler], ModInfo)]),
io:format(" - md5: ~s~n", [get_value([md5], ModInfo)]),
io:format(" - native: ~w~n", [get_value([native], ModInfo)]),
io:format(" - loaded: ~w~n", [get_value([loaded], ModInfo)]),
ok;
print_module_from_code(M, {App,Vsn,Path,[{M,ModInfo}]}) ->
io:format(" from path \"~ts\" (~w-~s):~n", [Path,App,Vsn]),
io:format(" - compiler: ~s~n", [get_value([compiler], ModInfo)]),
io:format(" - md5: ~s~n", [get_value([md5], ModInfo)]),
io:format(" - native: ~w~n", [get_value([native], ModInfo)]),
io:format(" - loaded: ~w~n", [get_value([loaded], ModInfo)]),
ok.
print_module({Mod, ModInfo}) ->
io:format(" - ~w:~n", [Mod]),
io:format(" - compiler: ~s~n", [get_value([compiler], ModInfo)]),
io:format(" - md5: ~s~n", [get_value([md5], ModInfo)]),
io:format(" - native: ~w~n", [get_value([native], ModInfo)]),
io:format(" - loaded: ~w~n", [get_value([loaded], ModInfo)]),
ok.
get useful information from erlang :
erlang_system_info() ->
erlang_system_info([
allocator,
check_io,
otp_release,
port_limit,
process_limit,
smp_support,
system_version,
system_architecture,
threads,
thread_pool_size,
{wordsize,internal},
{wordsize,external},
{cpu_topology, defined},
{cpu_topology, detected},
scheduler_bind_type,
scheduler_bindings,
compat_rel,
schedulers_state,
build_type,
logical_processors,
logical_processors_online,
logical_processors_available,
driver_version,
emu_args,
ethread_info,
beam_jump_table,
taints
]).
erlang_system_info([]) -> [];
erlang_system_info([Type|Types]) ->
[{Type, erlang:system_info(Type)}|erlang_system_info(Types)].
os_getenv_erts_specific() ->
os_getenv_erts_specific([
"BINDIR",
"DIALYZER_EMULATOR",
"CERL_DETACHED_PROG",
"EMU",
"ERL_CONSOLE_MODE",
"ERL_CRASH_DUMP",
"ERL_CRASH_DUMP_NICE",
"ERL_CRASH_DUMP_SECONDS",
"ERL_EPMD_PORT",
"ERL_EMULATOR_DLL",
"ERL_FULLSWEEP_AFTER",
"ERL_LIBS",
"ERL_MALLOC_LIB",
"ERL_MAX_PORTS",
"ERL_MAX_ETS_TABLES",
"ERL_NO_VFORK",
"ERL_NO_KERNEL_POLL",
"ERL_THREAD_POOL_SIZE",
"ERLC_EMULATOR",
"ESCRIPT_EMULATOR",
"HOME",
"HOMEDRIVE",
"HOMEPATH",
"LANG",
"LC_ALL",
"LC_CTYPE",
"PATH",
"PROGNAME",
"RELDIR",
"ROOTDIR",
"TERM",
" " ,
"COMSPEC",
"HEART_COMMAND",
"RUN_ERL_LOG_ALIVE_MINUTES",
"RUN_ERL_LOG_ACTIVITY_MINUTES",
"RUN_ERL_LOG_ALIVE_FORMAT",
"RUN_ERL_LOG_ALIVE_IN_UTC",
"RUN_ERL_LOG_GENERATIONS",
"RUN_ERL_LOG_MAXSIZE",
"RUN_ERL_DISABLE_FLOWCNTRL",
driver
"CALLER_DRV_USE_OUTPUTV",
"ERL_INET_GETHOST_DEBUG",
"ERL_EFILE_THREAD_SHORT_CIRCUIT",
"ERL_WINDOW_TITLE",
"ERL_ABORT_ON_FAILURE",
"TTYSL_DEBUG_LOG"
]).
os_getenv_erts_specific([]) -> [];
os_getenv_erts_specific([Key|Keys]) ->
[{Key, os:getenv(Key)}|os_getenv_erts_specific(Keys)].
split_env(Env) ->
split_env(Env, []).
split_env([$=|Vs], Key) -> {lists:reverse(Key), Vs};
split_env([I|Vs], Key) -> split_env(Vs, [I|Key]);
code() ->
get_code_from_paths(code:get_path()).
get_code_from_paths([]) -> [];
get_code_from_paths([Path|Paths]) ->
case is_application_path(Path) of
true ->
[{application, get_application_from_path(Path)}|get_code_from_paths(Paths)];
false ->
[{code, [
{path, Path},
{modules, get_modules_from_path(Path)}
]}|get_code_from_paths(Paths)]
end.
is_application_path(Path) ->
case filelib:wildcard(filename:join(Path, "*.app")) of
[] -> false;
_ -> true
end.
get_application_from_path(Path) ->
[Appfile|_] = filelib:wildcard(filename:join(Path, "*.app")),
case file:consult(Appfile) of
{ok, [{application, App, Info}]} ->
{App, [
{description, proplists:get_value(description, Info, [])},
{vsn, proplists:get_value(vsn, Info, [])},
{path, Path},
{runtime_dependencies,
proplists:get_value(runtime_dependencies, Info, [])},
{modules, get_modules_from_path(Path)}
]}
end.
get_modules_from_path(Path) ->
[
begin
{ok,{Mod, Md5}} = beam_lib:md5(Beam),
Loaded = case code:is_loaded(Mod) of
false -> false;
_ -> true
end,
{Mod, [
{loaded, Loaded},
{native, beam_is_native_compiled(Beam)},
{compiler, get_compiler_version(Beam)},
{md5, hexstring(Md5)}
]}
end || Beam <- filelib:wildcard(filename:join(Path, "*.beam"))
].
hexstring(Bin) when is_binary(Bin) ->
lists:flatten([io_lib:format("~2.16.0b", [V]) || <<V>> <= Bin]).
get_compiler_version(Beam) ->
case beam_lib:chunks(Beam, [compile_info]) of
{ok,{_,[{compile_info, Info}]}} ->
proplists:get_value(version, Info);
_ -> undefined
end.
beam_is_native_compiled(Beam) ->
Chunks = get_value([chunks], beam_lib:info(Beam)),
case check_known_hipe_chunks(Chunks) of
[] -> false;
[Arch] -> {true, Arch};
Archs -> {true, Archs}
end.
check_known_hipe_chunks([{Tag,_,_}|Cs]) ->
case is_chunk_tag_hipe_arch(Tag) of
false -> check_known_hipe_chunks(Cs);
{true, Arch} -> [Arch|check_known_hipe_chunks(Cs)]
end;
check_known_hipe_chunks([]) -> [].
-define(HS8P_TAG,"HS8P").
-define(HPPC_TAG,"HPPC").
-define(HP64_TAG,"HP64").
-define(HARM_TAG,"HARM").
-define(HX86_TAG,"HX86").
-define(HA64_TAG,"HA64").
is_chunk_tag_hipe_arch(Tag) ->
case Tag of
HiPE , x86_64 , ( implicit : 64 - bit , Unix )
HiPE , arm , v5 ( implicit : 32 - bit , Linux )
HiPE , PowerPC ( implicit : 32 - bit , Linux )
HiPE , ppc64 ( implicit : 64 - bit , Linux )
HiPE , SPARC , V8 + ( implicit : 32 - bit )
_ -> false
end.
get_dynamic_libraries() ->
Beam = filename:join([os:getenv("BINDIR"),get_beam_name()]),
case os:type() of
{unix, darwin} -> os:cmd("otool -L " ++ Beam);
_ -> os:cmd("ldd " ++ Beam)
end.
get_beam_name() ->
Type = case erlang:system_info(build_type) of
opt -> "";
TypeName -> "." ++ atom_to_list(TypeName)
end,
Flavor = case erlang:system_info(smp_support) of
false -> "";
true -> ".smp"
end,
Beam = case os:getenv("EMU") of
false -> "beam";
Value -> Value
end,
Beam ++ Type ++ Flavor.
vsnstr2vsn(VsnStr) ->
list_to_tuple(lists:map(fun (Part) ->
list_to_integer(Part)
end,
string:tokens(VsnStr, "."))).
rtdepstrs2rtdeps([]) ->
[];
rtdepstrs2rtdeps([RTDep | RTDeps]) ->
[AppStr, VsnStr] = string:tokens(RTDep, "-"),
[{list_to_atom(AppStr), vsnstr2vsn(VsnStr)} | rtdepstrs2rtdeps(RTDeps)].
build_app_table([], AppTab) ->
AppTab;
build_app_table([App | Apps], AppTab0) ->
AppTab1 = try
AppFile = code:where_is_file(atom_to_list(App) ++ ".app"),
{ok, [{application, App, Info}]} = file:consult(AppFile),
VsnStr = proplists:get_value(vsn, Info),
Vsn = vsnstr2vsn(VsnStr),
RTDepStrs = proplists:get_value(runtime_dependencies,
Info, []),
RTDeps = rtdepstrs2rtdeps(RTDepStrs),
gb_trees:insert(App, {Vsn, RTDeps}, AppTab0)
catch
_ : _ ->
AppTab0
end,
build_app_table(Apps, AppTab1).
meets_min_req(Vsn, Vsn) ->
true;
meets_min_req({X}, VsnReq) ->
meets_min_req({X, 0, 0}, VsnReq);
meets_min_req({X, Y}, VsnReq) ->
meets_min_req({X, Y, 0}, VsnReq);
meets_min_req(Vsn, {X}) ->
meets_min_req(Vsn, {X, 0, 0});
meets_min_req(Vsn, {X, Y}) ->
meets_min_req(Vsn, {X, Y, 0});
meets_min_req({X, _Y, _Z}, {XReq, _YReq, _ZReq}) when X > XReq ->
true;
meets_min_req({X, Y, _Z}, {X, YReq, _ZReq}) when Y > YReq ->
true;
meets_min_req({X, Y, Z}, {X, Y, ZReq}) when Z > ZReq ->
true;
meets_min_req({_X, _Y, _Z}, {_XReq, _YReq, _ZReq}) ->
false;
meets_min_req(Vsn, VsnReq) ->
gp_meets_min_req(mk_gp_vsn_list(Vsn), mk_gp_vsn_list(VsnReq)).
gp_meets_min_req([X, Y, Z | _Vs], [X, Y, Z]) ->
true;
gp_meets_min_req([X, Y, Z | _Vs], [XReq, YReq, ZReq]) ->
meets_min_req({X, Y, Z}, {XReq, YReq, ZReq});
gp_meets_min_req([X, Y, Z | Vs], [X, Y, Z | VReqs]) ->
gp_meets_min_req_tail(Vs, VReqs);
gp_meets_min_req(_Vsn, _VReq) ->
required functionality is not included in Vsn .
false.
gp_meets_min_req_tail([V | Vs], [V | VReqs]) ->
gp_meets_min_req_tail(Vs, VReqs);
gp_meets_min_req_tail([], []) ->
true;
gp_meets_min_req_tail([_V | _Vs], []) ->
true;
gp_meets_min_req_tail([V | _Vs], [VReq]) when V > VReq ->
true;
gp_meets_min_req_tail(_Vs, _VReqs) ->
required functionality is not included in Vsn .
false.
mk_gp_vsn_list(Vsn) ->
[X, Y, Z | Tail] = tuple_to_list(Vsn),
[X, Y, Z | remove_trailing_zeroes(Tail)].
remove_trailing_zeroes([]) ->
[];
remove_trailing_zeroes([0 | Vs]) ->
case remove_trailing_zeroes(Vs) of
[] -> [];
NewVs -> [0 | NewVs]
end;
remove_trailing_zeroes([V | Vs]) ->
[V | remove_trailing_zeroes(Vs)].
mk_app_vsn_str({App, Vsn}) ->
mk_app_vsn_str(App, Vsn).
mk_app_vsn_str(App, Vsn) ->
VsnList = tuple_to_list(Vsn),
lists:flatten([atom_to_list(App),
$-,
integer_to_list(hd(VsnList)),
lists:map(fun (Part) ->
[$., integer_to_list(Part)]
end, tl(VsnList))]).
otp_17_0_vsns_orddict() ->
[{asn1,{3,0}},
{common_test,{1,8}},
{compiler,{5,0}},
{cosEvent,{2,1,15}},
{cosEventDomain,{1,1,14}},
{cosFileTransfer,{1,1,16}},
{cosNotification,{1,1,21}},
{cosProperty,{1,1,17}},
{cosTime,{1,1,14}},
{cosTransactions,{1,2,14}},
{crypto,{3,3}},
{debugger,{4,0}},
{dialyzer,{2,7}},
{diameter,{1,6}},
{edoc,{0,7,13}},
{eldap,{1,0,3}},
{erl_docgen,{0,3,5}},
{erl_interface,{3,7,16}},
{erts,{6,0}},
{et,{1,5}},
{eunit,{2,2,7}},
{gs,{1,5,16}},
{hipe,{3,10,3}},
{ic,{4,3,5}},
{inets,{5,10}},
{jinterface,{1,5,9}},
{kernel,{3,0}},
{megaco,{3,17,1}},
{mnesia,{4,12}},
{observer,{2,0}},
{odbc,{2,10,20}},
{orber,{3,6,27}},
{os_mon,{2,2,15}},
{ose,{1,0}},
{otp_mibs,{1,0,9}},
{parsetools,{2,0,11}},
{percept,{0,8,9}},
{public_key,{0,22}},
{reltool,{0,6,5}},
{runtime_tools,{1,8,14}},
{sasl,{2,4}},
{snmp,{4,25,1}},
{ssh,{3,0,1}},
{ssl,{5,3,4}},
{stdlib,{2,0}},
{syntax_tools,{1,6,14}},
{test_server,{3,7}},
{tools,{2,6,14}},
{typer,{0,9,6}},
{webtool,{0,8,10}},
{wx,{1,2}},
{xmerl,{1,3,7}}].
otp_17_0_vsns_tab() ->
gb_trees:from_orddict(otp_17_0_vsns_orddict()).
check_runtime_dependency({App, DepVsn}, AppTab) ->
case gb_trees:lookup(App, AppTab) of
none ->
false;
{value, {Vsn, _}} ->
meets_min_req(Vsn, DepVsn)
end.
check_runtime_dependencies(App, AppTab, OtpMinVsnTab) ->
case gb_trees:lookup(App, AppTab) of
none ->
[{invalid_app_file, App}];
{value, {Vsn, RTDeps}} ->
RTD = case lists:foldl(
fun (RTDep, Acc) ->
case check_runtime_dependency(RTDep, AppTab) of
true ->
Acc;
false ->
[mk_app_vsn_str(RTDep) | Acc]
end
end,
[],
RTDeps) of
[] ->
[];
MissingDeps ->
[{missing_runtime_dependencies,
mk_app_vsn_str(App, Vsn),
MissingDeps}]
end,
case gb_trees:lookup(App, OtpMinVsnTab) of
none ->
RTD;
{value, MinVsn} ->
case meets_min_req(Vsn, MinVsn) of
true ->
RTD;
false ->
[{invalid_application_version,
mk_app_vsn_str(App, Vsn)} | RTD]
end
end
end.
app_file_to_app(AF) ->
list_to_atom(filename:basename(AF, ".app")).
get_apps() ->
get_apps(code:get_path(), []).
get_apps([], Apps) ->
lists:usort(Apps);
get_apps([Path|Paths], Apps) ->
case filelib:wildcard(filename:join(Path, "*.app")) of
[] ->
get_apps(Paths, Apps);
[AppFile] ->
get_apps(Paths, [app_file_to_app(AppFile) | Apps]);
[_AppFile| _] = AppFiles ->
Strange with .app files ... Lets put them
lists:map(fun (AF) ->
app_file_to_app(AF)
end, AppFiles) ++ Apps
end.
check_runtime_dependencies() ->
OtpMinVsnTab = otp_17_0_vsns_tab(),
Apps = get_apps(),
AppTab = build_app_table(Apps, gb_trees:empty()),
lists:foldl(fun (App, Acc) ->
case check_runtime_dependencies(App,
AppTab,
OtpMinVsnTab) of
[] -> Acc;
Issues -> Issues ++ Acc
end
end,
[],
Apps).
|
232a899d808513f7d361235cad2ddf5aa2ff45d96f58e522670338099ad21081 | monadbobo/ocaml-core | file_tail.mli | File_tail is useful for pulling data from a file that is being appended to by another
process . Creating a file tail returns the reader half of a pipe whose writer half is
populated by a background process that roughly does the following loop .
loop :
stat to find out if data is available
read data ( repeatedly [ open , seek , read , close ] until all data is read )
wait for some time
process. Creating a file tail returns the reader half of a pipe whose writer half is
populated by a background process that roughly does the following loop.
loop:
stat to find out if data is available
read data (repeatedly [ open, seek, read, close ] until all data is read)
wait for some time
*)
open Core.Std
open Import
module Error : sig
(* Errors are written to the pipe, and are nonrecoverable. After an error, the pipe
will always be closed. *)
type t =
* [ File_replaced ] occurs if the inode of the file changed and the file tail was
configured with [ ignore_inode_change = false ] .
configured with [ignore_inode_change = false]. *)
| File_replaced
* [ File_shrank ] occurs if [ stat ] detects that the length of the file decreases from
one call to the next .
one call to the next. *)
| File_shrank
(** [Read_failed] occurs if some aspect of the open-seek-read-close used to get data
fails. *)
| Read_failed of exn
(** [Stat_failed] occurs if [stat] fails. *)
| Stat_failed of exn
with sexp_of
val to_string_hum : t -> string
end
module Warning : sig
type t =
(** [Did_not_reach_eof_for span] occurs if it has been longer than
[eof_latency_tolerance] since [stat] detected that there is new data in the file and
the file tail processed all the new data. The [span] is how long it has been since
[stat] detected new data in the file. *)
| Did_not_reach_eof_for of Time.Span.t
* [ Reached_eof ] occurs whenever the file tail reaches the end of file , irrespective of
whether there has previously been a [ Did_not_reach_eof_for ] warning .
whether there has previously been a [Did_not_reach_eof_for] warning. *)
| Reached_eof
(** [Delayed_due_to_null_reads_for span] occurs when the file tail is unable to get data
from the file, because the data being read has null ('\000') characters. The span
is how long it has been attempting to read and been getting nulls. This warning
will only occur if [retry_null_reads = true]. This warning will be repeated until
the null reads stop. *)
| Delayed_due_to_null_reads_for of Time.Span.t
(** [No_longer_delayed_due_to_null_reads] occurs after a nonempty sequence of
[Delayed_due_to_null_reads_for] warnings, once the file tail gets a read that does
not contain null reads. *)
| No_longer_delayed_due_to_null_reads
with sexp_of
val to_string_hum : t -> string
end
module Update : sig
type t =
(** [Data string] contains a chunk of data from the file. If [break_on_lines], then
data will be a single line (without the terminating newline). *)
| Data of string
(** Warnings do not close the stream and whatever is reading can keep on doing so.
The [string] is the file name. *)
| Warning of string * Warning.t
(** Errors cause the stream to be closed. The [string] is the file name. *)
| Error of string * Error.t
with sexp_of
val to_string_hum : t -> string
end
* [ create file ] creates a [ File_tail.t ] that will immediately begin reading [ file ] , and
then will start the stat - read loop .
[ read_buf_len ] sets the size of the internal buffer used for making read system calls .
[ read_delay ] sets how long the stat - read loop waits each time after it reaches eof
before stat'ing again . Setting [ read_delay ] too low could cause unecessary load .
If [ retry_null_reads = true ] , then reads that return data with null ( ' \000 ' )
characters are ignored and cause the system to delay 0.2s and attempt the read again .
If [ retry_null_reads = false ] , then the file tail will process data with nulls just as
it would any other data .
If [ break_on_lines = true ] , the file tail will break data into lines on ' \n ' . If not ,
the fill tail will return chunks of data from the end of the file as they are
available .
If [ ignore_inode_change = true ] , the file tail will silently press on when the
[ file ] 's inode changes . If not , an inode change will cause the file tail to report an
error and stop . CIFS changes inodes of mounted files few times a day and we need
[ ignore_inode_change = true ] option to keep tailers watching files on it alive .
[ start_at ] determines the file position at which the file tail starts .
[ eof_latency_tolerance ] affects the [ Did_not_reach_eof_for ] warning .
[ null_read_tolerance ] determines how long the tailing must observe null reads
before it will report a [ Delayed_due_to_null_reads_for ] warning .
then will start the stat-read loop.
[read_buf_len] sets the size of the internal buffer used for making read system calls.
[read_delay] sets how long the stat-read loop waits each time after it reaches eof
before stat'ing again. Setting [read_delay] too low could cause unecessary load.
If [retry_null_reads = true], then reads that return data with null ('\000')
characters are ignored and cause the system to delay 0.2s and attempt the read again.
If [retry_null_reads = false], then the file tail will process data with nulls just as
it would any other data.
If [break_on_lines = true], the file tail will break data into lines on '\n'. If not,
the fill tail will return chunks of data from the end of the file as they are
available.
If [ignore_inode_change = true], the file tail will silently press on when the
[file]'s inode changes. If not, an inode change will cause the file tail to report an
error and stop. CIFS changes inodes of mounted files few times a day and we need
[ignore_inode_change = true] option to keep tailers watching files on it alive.
[start_at] determines the file position at which the file tail starts.
[eof_latency_tolerance] affects the [Did_not_reach_eof_for] warning.
[null_read_tolerance] determines how long the tailing must observe null reads
before it will report a [Delayed_due_to_null_reads_for] warning.
*)
val create :
default 32k
default 0.5s
-> ?retry_null_reads:bool (* default [true] *)
-> ?break_on_lines:bool (* default [true] *)
-> ?ignore_inode_change:bool (* default [false] *)
-> ?start_at:[ `Beginning | `End | `Pos of Int64.t ] (* default [`Beginning] *)
default 5s
default 0s
-> string
-> Update.t Pipe.Reader.t
| null | https://raw.githubusercontent.com/monadbobo/ocaml-core/9c1c06e7a1af7e15b6019a325d7dbdbd4cdb4020/base/async/extra/lib/file_tail.mli | ocaml | Errors are written to the pipe, and are nonrecoverable. After an error, the pipe
will always be closed.
* [Read_failed] occurs if some aspect of the open-seek-read-close used to get data
fails.
* [Stat_failed] occurs if [stat] fails.
* [Did_not_reach_eof_for span] occurs if it has been longer than
[eof_latency_tolerance] since [stat] detected that there is new data in the file and
the file tail processed all the new data. The [span] is how long it has been since
[stat] detected new data in the file.
* [Delayed_due_to_null_reads_for span] occurs when the file tail is unable to get data
from the file, because the data being read has null ('\000') characters. The span
is how long it has been attempting to read and been getting nulls. This warning
will only occur if [retry_null_reads = true]. This warning will be repeated until
the null reads stop.
* [No_longer_delayed_due_to_null_reads] occurs after a nonempty sequence of
[Delayed_due_to_null_reads_for] warnings, once the file tail gets a read that does
not contain null reads.
* [Data string] contains a chunk of data from the file. If [break_on_lines], then
data will be a single line (without the terminating newline).
* Warnings do not close the stream and whatever is reading can keep on doing so.
The [string] is the file name.
* Errors cause the stream to be closed. The [string] is the file name.
default [true]
default [true]
default [false]
default [`Beginning] | File_tail is useful for pulling data from a file that is being appended to by another
process . Creating a file tail returns the reader half of a pipe whose writer half is
populated by a background process that roughly does the following loop .
loop :
stat to find out if data is available
read data ( repeatedly [ open , seek , read , close ] until all data is read )
wait for some time
process. Creating a file tail returns the reader half of a pipe whose writer half is
populated by a background process that roughly does the following loop.
loop:
stat to find out if data is available
read data (repeatedly [ open, seek, read, close ] until all data is read)
wait for some time
*)
open Core.Std
open Import
module Error : sig
type t =
* [ File_replaced ] occurs if the inode of the file changed and the file tail was
configured with [ ignore_inode_change = false ] .
configured with [ignore_inode_change = false]. *)
| File_replaced
* [ File_shrank ] occurs if [ stat ] detects that the length of the file decreases from
one call to the next .
one call to the next. *)
| File_shrank
| Read_failed of exn
| Stat_failed of exn
with sexp_of
val to_string_hum : t -> string
end
module Warning : sig
type t =
| Did_not_reach_eof_for of Time.Span.t
* [ Reached_eof ] occurs whenever the file tail reaches the end of file , irrespective of
whether there has previously been a [ Did_not_reach_eof_for ] warning .
whether there has previously been a [Did_not_reach_eof_for] warning. *)
| Reached_eof
| Delayed_due_to_null_reads_for of Time.Span.t
| No_longer_delayed_due_to_null_reads
with sexp_of
val to_string_hum : t -> string
end
module Update : sig
type t =
| Data of string
| Warning of string * Warning.t
| Error of string * Error.t
with sexp_of
val to_string_hum : t -> string
end
* [ create file ] creates a [ File_tail.t ] that will immediately begin reading [ file ] , and
then will start the stat - read loop .
[ read_buf_len ] sets the size of the internal buffer used for making read system calls .
[ read_delay ] sets how long the stat - read loop waits each time after it reaches eof
before stat'ing again . Setting [ read_delay ] too low could cause unecessary load .
If [ retry_null_reads = true ] , then reads that return data with null ( ' \000 ' )
characters are ignored and cause the system to delay 0.2s and attempt the read again .
If [ retry_null_reads = false ] , then the file tail will process data with nulls just as
it would any other data .
If [ break_on_lines = true ] , the file tail will break data into lines on ' \n ' . If not ,
the fill tail will return chunks of data from the end of the file as they are
available .
If [ ignore_inode_change = true ] , the file tail will silently press on when the
[ file ] 's inode changes . If not , an inode change will cause the file tail to report an
error and stop . CIFS changes inodes of mounted files few times a day and we need
[ ignore_inode_change = true ] option to keep tailers watching files on it alive .
[ start_at ] determines the file position at which the file tail starts .
[ eof_latency_tolerance ] affects the [ Did_not_reach_eof_for ] warning .
[ null_read_tolerance ] determines how long the tailing must observe null reads
before it will report a [ Delayed_due_to_null_reads_for ] warning .
then will start the stat-read loop.
[read_buf_len] sets the size of the internal buffer used for making read system calls.
[read_delay] sets how long the stat-read loop waits each time after it reaches eof
before stat'ing again. Setting [read_delay] too low could cause unecessary load.
If [retry_null_reads = true], then reads that return data with null ('\000')
characters are ignored and cause the system to delay 0.2s and attempt the read again.
If [retry_null_reads = false], then the file tail will process data with nulls just as
it would any other data.
If [break_on_lines = true], the file tail will break data into lines on '\n'. If not,
the fill tail will return chunks of data from the end of the file as they are
available.
If [ignore_inode_change = true], the file tail will silently press on when the
[file]'s inode changes. If not, an inode change will cause the file tail to report an
error and stop. CIFS changes inodes of mounted files few times a day and we need
[ignore_inode_change = true] option to keep tailers watching files on it alive.
[start_at] determines the file position at which the file tail starts.
[eof_latency_tolerance] affects the [Did_not_reach_eof_for] warning.
[null_read_tolerance] determines how long the tailing must observe null reads
before it will report a [Delayed_due_to_null_reads_for] warning.
*)
val create :
default 32k
default 0.5s
default 5s
default 0s
-> string
-> Update.t Pipe.Reader.t
|
3d94ffd908f51e11f91d344d840e183c2a090e25ab1e9b0e6efba7512edbe08b | helium/erlang-tpke | dealer_test.erl | -module(dealer_test).
-include_lib("eunit/include/eunit.hrl").
first_secret_equality_test() ->
{ok, Dealer} = dealer:new(),
{ok, Group} = dealer:group(Dealer),
TODO make this work over the MNT224 curve
Group = erlang_pbc : group_new('MNT224 ' ) ,
Element = erlang_pbc:element_new('Zr', Group),
K = 5,
Coefficients = [erlang_pbc:element_random(Element) || _ <- lists:seq(1, K)],
Secret = hd(Coefficients),
FirstSecret = dealer:share_secret(0, Coefficients),
?assert(erlang_pbc:element_cmp(Secret, FirstSecret)).
zero_reconstruction_test() ->
{ok, Dealer} = dealer:new(),
{ok, Group} = dealer:group(Dealer),
{ok, {PubKey, _PrivateKeys}} = dealer:deal(Dealer),
Element = erlang_pbc:element_new('Zr', Group),
K = 5,
Coefficients = [erlang_pbc:element_random(Element) || _ <- lists:seq(1, K)],
FirstSecret = dealer:share_secret(0, Coefficients),
Set = ordsets:from_list(lists:seq(0, K-1)),
Bits = [ erlang_pbc:element_mul(tpke_pubkey:lagrange(PubKey, Set, J), dealer:share_secret(J+1, Coefficients)) || J <- ordsets:to_list(Set)],
SumBits = lists:foldl(fun erlang_pbc:element_add/2, hd(Bits), tl(Bits)),
?assert(erlang_pbc:element_cmp(FirstSecret, SumBits)).
| null | https://raw.githubusercontent.com/helium/erlang-tpke/0d9f8c5b30c10c73727402465963e6a33e680a56/test/dealer_test.erl | erlang | -module(dealer_test).
-include_lib("eunit/include/eunit.hrl").
first_secret_equality_test() ->
{ok, Dealer} = dealer:new(),
{ok, Group} = dealer:group(Dealer),
TODO make this work over the MNT224 curve
Group = erlang_pbc : group_new('MNT224 ' ) ,
Element = erlang_pbc:element_new('Zr', Group),
K = 5,
Coefficients = [erlang_pbc:element_random(Element) || _ <- lists:seq(1, K)],
Secret = hd(Coefficients),
FirstSecret = dealer:share_secret(0, Coefficients),
?assert(erlang_pbc:element_cmp(Secret, FirstSecret)).
zero_reconstruction_test() ->
{ok, Dealer} = dealer:new(),
{ok, Group} = dealer:group(Dealer),
{ok, {PubKey, _PrivateKeys}} = dealer:deal(Dealer),
Element = erlang_pbc:element_new('Zr', Group),
K = 5,
Coefficients = [erlang_pbc:element_random(Element) || _ <- lists:seq(1, K)],
FirstSecret = dealer:share_secret(0, Coefficients),
Set = ordsets:from_list(lists:seq(0, K-1)),
Bits = [ erlang_pbc:element_mul(tpke_pubkey:lagrange(PubKey, Set, J), dealer:share_secret(J+1, Coefficients)) || J <- ordsets:to_list(Set)],
SumBits = lists:foldl(fun erlang_pbc:element_add/2, hd(Bits), tl(Bits)),
?assert(erlang_pbc:element_cmp(FirstSecret, SumBits)).
| |
a42c4f1ccd99486fdb9ec2cf5bcc84e9b43b5835022cac6aa0880fff344c0b4c | gedge-platform/gedge-platform | jose_jwe_enc_c20p.erl | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
%% vim: ts=4 sw=4 ft=erlang noet
%%%-------------------------------------------------------------------
@author < >
2014 - 2019 ,
%%% @doc
%%%
%%% @end
Created : 31 May 2016 by < >
%%%-------------------------------------------------------------------
-module(jose_jwe_enc_c20p).
-behaviour(jose_jwe).
-behaviour(jose_jwe_enc).
%% jose_jwe callbacks
-export([from_map/1]).
-export([to_map/2]).
%% jose_jwe_enc callbacks
-export([algorithm/1]).
-export([bits/1]).
-export([block_decrypt/4]).
-export([block_encrypt/4]).
-export([next_cek/1]).
-export([next_iv/1]).
%% API
-export([cipher_supported/0]).
%% Types
-type enc() :: {chacha20_poly1305, 256}.
-export_type([enc/0]).
Macros
-define(CHACHA20_POLY1305, {chacha20_poly1305, 256}).
%%====================================================================
%% jose_jwe callbacks
%%====================================================================
from_map(F = #{ <<"enc">> := <<"C20P">> }) ->
{?CHACHA20_POLY1305, maps:remove(<<"enc">>, F)}.
to_map(?CHACHA20_POLY1305, F) ->
F#{ <<"enc">> => <<"C20P">> }.
%%====================================================================
%% jose_jwe_enc callbacks
%%====================================================================
algorithm(?CHACHA20_POLY1305) -> <<"C20P">>.
bits(?CHACHA20_POLY1305) -> 256.
block_decrypt({AAD, CipherText, CipherTag}, CEK, IV, ?CHACHA20_POLY1305) ->
jose_jwa:block_decrypt(?CHACHA20_POLY1305, CEK, IV, {AAD, CipherText, CipherTag}).
block_encrypt({AAD, PlainText}, CEK, IV, ?CHACHA20_POLY1305) ->
jose_jwa:block_encrypt(?CHACHA20_POLY1305, CEK, IV, {AAD, PlainText}).
next_cek(?CHACHA20_POLY1305) ->
crypto:strong_rand_bytes(32).
next_iv(?CHACHA20_POLY1305) ->
crypto:strong_rand_bytes(12).
%%====================================================================
%% API functions
%%====================================================================
cipher_supported() ->
[chacha20_poly1305].
%%%-------------------------------------------------------------------
Internal functions
%%%-------------------------------------------------------------------
| null | https://raw.githubusercontent.com/gedge-platform/gedge-platform/97c1e87faf28ba2942a77196b6be0a952bff1c3e/gs-broker/broker-server/deps/jose/src/jwe/jose_jwe_enc_c20p.erl | erlang | vim: ts=4 sw=4 ft=erlang noet
-------------------------------------------------------------------
@doc
@end
-------------------------------------------------------------------
jose_jwe callbacks
jose_jwe_enc callbacks
API
Types
====================================================================
jose_jwe callbacks
====================================================================
====================================================================
jose_jwe_enc callbacks
====================================================================
====================================================================
API functions
====================================================================
-------------------------------------------------------------------
------------------------------------------------------------------- | -*- mode : erlang ; tab - width : 4 ; indent - tabs - mode : 1 ; st - rulers : [ 70 ] -*-
@author < >
2014 - 2019 ,
Created : 31 May 2016 by < >
-module(jose_jwe_enc_c20p).
-behaviour(jose_jwe).
-behaviour(jose_jwe_enc).
-export([from_map/1]).
-export([to_map/2]).
-export([algorithm/1]).
-export([bits/1]).
-export([block_decrypt/4]).
-export([block_encrypt/4]).
-export([next_cek/1]).
-export([next_iv/1]).
-export([cipher_supported/0]).
-type enc() :: {chacha20_poly1305, 256}.
-export_type([enc/0]).
Macros
-define(CHACHA20_POLY1305, {chacha20_poly1305, 256}).
from_map(F = #{ <<"enc">> := <<"C20P">> }) ->
{?CHACHA20_POLY1305, maps:remove(<<"enc">>, F)}.
to_map(?CHACHA20_POLY1305, F) ->
F#{ <<"enc">> => <<"C20P">> }.
algorithm(?CHACHA20_POLY1305) -> <<"C20P">>.
bits(?CHACHA20_POLY1305) -> 256.
block_decrypt({AAD, CipherText, CipherTag}, CEK, IV, ?CHACHA20_POLY1305) ->
jose_jwa:block_decrypt(?CHACHA20_POLY1305, CEK, IV, {AAD, CipherText, CipherTag}).
block_encrypt({AAD, PlainText}, CEK, IV, ?CHACHA20_POLY1305) ->
jose_jwa:block_encrypt(?CHACHA20_POLY1305, CEK, IV, {AAD, PlainText}).
next_cek(?CHACHA20_POLY1305) ->
crypto:strong_rand_bytes(32).
next_iv(?CHACHA20_POLY1305) ->
crypto:strong_rand_bytes(12).
cipher_supported() ->
[chacha20_poly1305].
Internal functions
|
7a04eb4d8a964201db8adb99e35c3b6dbecea51edc2493b8bd5bbb7560cf31fe | sullyj3/adventofcode2022 | Day01.hs | module Day01 where
import AOC
import Data.Maybe (fromJust)
import Data.Text (Text)
import qualified Data.Text as T
import Utils (tRead)
solution = Solution {..}
where
parse = id
part1 = const ()
part2 = const ()
main = aocMain "inputs/01.txt" solution
| null | https://raw.githubusercontent.com/sullyj3/adventofcode2022/beff93b2f464ccd149f5a9d7bbae648aa9429eab/src/Day01.hs | haskell | module Day01 where
import AOC
import Data.Maybe (fromJust)
import Data.Text (Text)
import qualified Data.Text as T
import Utils (tRead)
solution = Solution {..}
where
parse = id
part1 = const ()
part2 = const ()
main = aocMain "inputs/01.txt" solution
| |
ae58e638db58e7eb1cf8c9c833351105711843094471f4cd7af77797261e6c23 | rudymatela/conjure | Prim.hs | -- |
-- Module : Conjure.Prim
Copyright : ( c ) 2021
License : 3 - Clause BSD ( see the file LICENSE )
Maintainer : < >
--
This module is part of " Conjure " .
--
-- The 'Prim' type and utilities involving it.
--
-- You are probably better off importing "Conjure".
module Conjure.Prim
( Prim (..)
, prim
, pr
, prif
, primOrdCaseFor
, cjHoles
, cjTiersFor
, cjAreEqual
, cjMkEquation
)
where
import Conjure.Conjurable
import Conjure.Expr
import Conjure.Utils
import Test.LeanCheck.Error (errorToFalse)
import Test.LeanCheck.Utils
import Test.Speculate.Expr
-- | A primtive expression (paired with instance reification).
type Prim = (Expr, Reification)
-- | Provides a primitive value to Conjure.
-- To be used on 'Show' instances.
-- (cf. 'prim')
pr :: (Conjurable a, Show a) => a -> Prim
pr x = (val x, conjureType x)
-- | Provides a primitive value to Conjure.
-- To be used on values that are not 'Show' instances
-- such as functions.
-- (cf. 'pr')
prim :: Conjurable a => String -> a -> Prim
prim s x = (value s x, conjureType x)
-- | Provides an if condition bound to the given return type.
prif :: Conjurable a => a -> Prim
prif x = (ifFor x, conjureType x)
-- | Provides a case condition bound to the given return type.
primOrdCaseFor :: Conjurable a => a -> Prim
primOrdCaseFor x = (caseForOrd x, conjureType x)
-- the following functions mirror their "conjure" counterparts from
Conjure . but need a list of Prims instead of a
-- representative.
| Computes a list of ' 's from a list of ' Prim 's .
--
-- This function mirrors functionality of 'conjureReification'.
cjReification :: [Prim] -> [Reification1]
cjReification ps = nubOn (\(eh,_,_,_,_,_) -> eh)
$ foldr (.) id (map snd ps) [conjureReification1 bool]
| Computes a list of holes encoded as ' 's from a list of ' Prim 's .
--
-- This function mirrors functionality from 'conjureHoles'.
cjHoles :: [Prim] -> [Expr]
cjHoles ps = [eh | (eh,_,Just _,_,_,_) <- cjReification ps]
| Computes a function that equates two ' 's from a list of ' Prim 's .
--
-- This function mirrors functionality from 'conjureMkEquation'.
cjMkEquation :: [Prim] -> Expr -> Expr -> Expr
cjMkEquation ps = mkEquation [eq | (_,Just eq,_,_,_,_) <- cjReification ps]
| Given a list of ' Prim 's ,
computes a function that checks whether two ' 's are equal
-- up to a given number of tests.
cjAreEqual :: [Prim] -> Int -> Expr -> Expr -> Bool
cjAreEqual ps maxTests = (===)
where
(-==-) = cjMkEquation ps
e1 === e2 = isTrue $ e1 -==- e2
isTrue = all (errorToFalse . eval False) . gs
gs = take maxTests . grounds (cjTiersFor ps)
| Given a list of ' Prim 's ,
-- returns a function that given an 'Expr'
will return tiers of test ' ' values .
--
-- This is used in 'cjAreEqual'.
cjTiersFor :: [Prim] -> Expr -> [[Expr]]
cjTiersFor ps e = tf allTiers
where
allTiers :: [ [[Expr]] ]
allTiers = [etiers | (_,_,Just etiers,_,_,_) <- cjReification ps]
tf [] = [[e]] -- no tiers found, keep variable
tf (etiers:etc) = case etiers of
((e':_):_) | typ e' == typ e -> etiers
_ -> tf etc
| null | https://raw.githubusercontent.com/rudymatela/conjure/9d2167e758e72b6be5970bc33f6b9e893fc5874f/src/Conjure/Prim.hs | haskell | |
Module : Conjure.Prim
The 'Prim' type and utilities involving it.
You are probably better off importing "Conjure".
| A primtive expression (paired with instance reification).
| Provides a primitive value to Conjure.
To be used on 'Show' instances.
(cf. 'prim')
| Provides a primitive value to Conjure.
To be used on values that are not 'Show' instances
such as functions.
(cf. 'pr')
| Provides an if condition bound to the given return type.
| Provides a case condition bound to the given return type.
the following functions mirror their "conjure" counterparts from
representative.
This function mirrors functionality of 'conjureReification'.
This function mirrors functionality from 'conjureHoles'.
This function mirrors functionality from 'conjureMkEquation'.
up to a given number of tests.
returns a function that given an 'Expr'
This is used in 'cjAreEqual'.
no tiers found, keep variable | Copyright : ( c ) 2021
License : 3 - Clause BSD ( see the file LICENSE )
Maintainer : < >
This module is part of " Conjure " .
module Conjure.Prim
( Prim (..)
, prim
, pr
, prif
, primOrdCaseFor
, cjHoles
, cjTiersFor
, cjAreEqual
, cjMkEquation
)
where
import Conjure.Conjurable
import Conjure.Expr
import Conjure.Utils
import Test.LeanCheck.Error (errorToFalse)
import Test.LeanCheck.Utils
import Test.Speculate.Expr
type Prim = (Expr, Reification)
pr :: (Conjurable a, Show a) => a -> Prim
pr x = (val x, conjureType x)
prim :: Conjurable a => String -> a -> Prim
prim s x = (value s x, conjureType x)
prif :: Conjurable a => a -> Prim
prif x = (ifFor x, conjureType x)
primOrdCaseFor :: Conjurable a => a -> Prim
primOrdCaseFor x = (caseForOrd x, conjureType x)
Conjure . but need a list of Prims instead of a
| Computes a list of ' 's from a list of ' Prim 's .
cjReification :: [Prim] -> [Reification1]
cjReification ps = nubOn (\(eh,_,_,_,_,_) -> eh)
$ foldr (.) id (map snd ps) [conjureReification1 bool]
| Computes a list of holes encoded as ' 's from a list of ' Prim 's .
cjHoles :: [Prim] -> [Expr]
cjHoles ps = [eh | (eh,_,Just _,_,_,_) <- cjReification ps]
| Computes a function that equates two ' 's from a list of ' Prim 's .
cjMkEquation :: [Prim] -> Expr -> Expr -> Expr
cjMkEquation ps = mkEquation [eq | (_,Just eq,_,_,_,_) <- cjReification ps]
| Given a list of ' Prim 's ,
computes a function that checks whether two ' 's are equal
cjAreEqual :: [Prim] -> Int -> Expr -> Expr -> Bool
cjAreEqual ps maxTests = (===)
where
(-==-) = cjMkEquation ps
e1 === e2 = isTrue $ e1 -==- e2
isTrue = all (errorToFalse . eval False) . gs
gs = take maxTests . grounds (cjTiersFor ps)
| Given a list of ' Prim 's ,
will return tiers of test ' ' values .
cjTiersFor :: [Prim] -> Expr -> [[Expr]]
cjTiersFor ps e = tf allTiers
where
allTiers :: [ [[Expr]] ]
allTiers = [etiers | (_,_,Just etiers,_,_,_) <- cjReification ps]
tf (etiers:etc) = case etiers of
((e':_):_) | typ e' == typ e -> etiers
_ -> tf etc
|
5895b1c93e28d29b28f1ae682559f56046031bc52dfe438871b9c015b88f3a9f | screenshotbot/screenshotbot-oss | help.lisp | ;; Copyright 2018-Present Modern Interpreters Inc.
;;
This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(uiop:define-package :screenshotbot/sdk/help
(:use #:cl
#:alexandria)
(:export
#:help))
(in-package :screenshotbot/sdk/help)
(defun help ()
(format t "Screenshotbot Recorder script~%~%")
(format t "Usage: recorder [options]~%~%")
(format t "Use this script from your CI pipelines or locally to
upload screenshots and generate reports~%~%")
(format t "Options:~%~%")
(loop for (name . flag) in (sort com.google.flag::*registered-flags* #'string< :key #'car) do
(let* ((lines (mapcar #'str:trim (str:lines (com.google.flag::help flag))))
(lines (loop for line in lines
for start from 0
if (= start 0)
collect line
else
collect (str:concat " " line)))
(lines (cond
((< (length name) 22)
lines)
(t
(list* "" lines)))))
(format t " --~22A~40A~%" name
(or (car lines) ""))
(loop for l in (cdr lines) do
(format t "~25A~A~%" " " l))))
(format t "~%Copyright 2020-2022 Modern Interpreters Inc.~%")
(format t "Please reach out to for any questions~%"))
| null | https://raw.githubusercontent.com/screenshotbot/screenshotbot-oss/da6a181de5222dc78f19951e32efa3a46277c32e/src/screenshotbot/sdk/help.lisp | lisp | Copyright 2018-Present Modern Interpreters Inc.
| This Source Code Form is subject to the terms of the Mozilla Public
License , v. 2.0 . If a copy of the MPL was not distributed with this
file , You can obtain one at /.
(uiop:define-package :screenshotbot/sdk/help
(:use #:cl
#:alexandria)
(:export
#:help))
(in-package :screenshotbot/sdk/help)
(defun help ()
(format t "Screenshotbot Recorder script~%~%")
(format t "Usage: recorder [options]~%~%")
(format t "Use this script from your CI pipelines or locally to
upload screenshots and generate reports~%~%")
(format t "Options:~%~%")
(loop for (name . flag) in (sort com.google.flag::*registered-flags* #'string< :key #'car) do
(let* ((lines (mapcar #'str:trim (str:lines (com.google.flag::help flag))))
(lines (loop for line in lines
for start from 0
if (= start 0)
collect line
else
collect (str:concat " " line)))
(lines (cond
((< (length name) 22)
lines)
(t
(list* "" lines)))))
(format t " --~22A~40A~%" name
(or (car lines) ""))
(loop for l in (cdr lines) do
(format t "~25A~A~%" " " l))))
(format t "~%Copyright 2020-2022 Modern Interpreters Inc.~%")
(format t "Please reach out to for any questions~%"))
|
6109f07c79fa57dcdcadde97675a8e8f416af0486ba39f605f14270dff779fe7 | thierry-martinez/stdcompat | weak.mli | type 'a t
val create : int -> 'a t
val length : 'a t -> int
val set : 'a t -> int -> 'a option -> unit
val get : 'a t -> int -> 'a option
val get_copy : 'a t -> int -> 'a option
val check : 'a t -> int -> bool
val fill : 'a t -> int -> int -> 'a option -> unit
val blit : 'a t -> int -> 'a t -> int -> int -> unit
module type S =
sig
type data
and t
val create : int -> t
val clear : t -> unit
val merge : t -> data -> data
val add : t -> data -> unit
val remove : t -> data -> unit
val find : t -> data -> data
val find_all : t -> data -> data list
val mem : t -> data -> bool
val iter : (data -> unit) -> t -> unit
val fold : (data -> 'a -> 'a) -> t -> 'a -> 'a
val count : t -> int
val stats : t -> (int * int * int * int * int * int)
end
module Make :
functor (H : Hashtbl.HashedType) ->
sig
type data = H.t
and t
val create : int -> t
val clear : t -> unit
val merge : t -> data -> data
val add : t -> data -> unit
val remove : t -> data -> unit
val find : t -> data -> data
val find_all : t -> data -> data list
val mem : t -> data -> bool
val iter : (data -> unit) -> t -> unit
val fold : (data -> 'a -> 'a) -> t -> 'a -> 'a
val count : t -> int
val stats : t -> (int * int * int * int * int * int)
end
| null | https://raw.githubusercontent.com/thierry-martinez/stdcompat/83d786cdb17fae0caadf5c342e283c3dcfee2279/interfaces/3.07/weak.mli | ocaml | type 'a t
val create : int -> 'a t
val length : 'a t -> int
val set : 'a t -> int -> 'a option -> unit
val get : 'a t -> int -> 'a option
val get_copy : 'a t -> int -> 'a option
val check : 'a t -> int -> bool
val fill : 'a t -> int -> int -> 'a option -> unit
val blit : 'a t -> int -> 'a t -> int -> int -> unit
module type S =
sig
type data
and t
val create : int -> t
val clear : t -> unit
val merge : t -> data -> data
val add : t -> data -> unit
val remove : t -> data -> unit
val find : t -> data -> data
val find_all : t -> data -> data list
val mem : t -> data -> bool
val iter : (data -> unit) -> t -> unit
val fold : (data -> 'a -> 'a) -> t -> 'a -> 'a
val count : t -> int
val stats : t -> (int * int * int * int * int * int)
end
module Make :
functor (H : Hashtbl.HashedType) ->
sig
type data = H.t
and t
val create : int -> t
val clear : t -> unit
val merge : t -> data -> data
val add : t -> data -> unit
val remove : t -> data -> unit
val find : t -> data -> data
val find_all : t -> data -> data list
val mem : t -> data -> bool
val iter : (data -> unit) -> t -> unit
val fold : (data -> 'a -> 'a) -> t -> 'a -> 'a
val count : t -> int
val stats : t -> (int * int * int * int * int * int)
end
| |
6284f7120b47e83d01ee189fed6ed0371dcd1d2c31b47629768f2424d0886ec3 | SamB/coq | lib.mli | (************************************************************************)
v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
(* // * This file is distributed under the terms of the *)
(* * GNU Lesser General Public License Version 2.1 *)
(************************************************************************)
(*i $Id$ i*)
(*s This module provides a general mechanism to keep a trace of all operations
and to backtrack (undo) those operations. It provides also the section
mechanism (at a low level; discharge is not known at this step). *)
type node =
| Leaf of Libobject.obj
| CompilingLibrary of Libnames.object_prefix
| OpenedModule of bool option * Libnames.object_prefix * Summary.frozen
| ClosedModule of library_segment
| OpenedModtype of Libnames.object_prefix * Summary.frozen
| ClosedModtype of library_segment
| OpenedSection of Libnames.object_prefix * Summary.frozen
| ClosedSection of library_segment
| FrozenState of Summary.frozen
and library_segment = (Libnames.object_name * node) list
type lib_objects = (Names.identifier * Libobject.obj) list
(*s Object iteratation functions. *)
val open_objects : int -> Libnames.object_prefix -> lib_objects -> unit
val load_objects : int -> Libnames.object_prefix -> lib_objects -> unit
val subst_objects : Libnames.object_prefix -> Mod_subst.substitution -> lib_objects -> lib_objects
[ classify_segment seg ] verifies that there are no OpenedThings ,
clears ClosedSections and FrozenStates and divides Leafs according
to their answers to the [ classify_object ] function in three groups :
[ Substitute ] , [ Keep ] , [ Anticipate ] respectively . The order of each
returned list is the same as in the input list .
clears ClosedSections and FrozenStates and divides Leafs according
to their answers to the [classify_object] function in three groups:
[Substitute], [Keep], [Anticipate] respectively. The order of each
returned list is the same as in the input list. *)
val classify_segment :
library_segment -> lib_objects * lib_objects * Libobject.obj list
[ segment_of_objects prefix objs ] forms a list of Leafs
val segment_of_objects :
Libnames.object_prefix -> lib_objects -> library_segment
s Adding operations ( which call the [ cache ] method , and getting the
current list of operations ( most recent ones coming first ) .
current list of operations (most recent ones coming first). *)
val add_leaf : Names.identifier -> Libobject.obj -> Libnames.object_name
val add_absolutely_named_leaf : Libnames.object_name -> Libobject.obj -> unit
val add_anonymous_leaf : Libobject.obj -> unit
(* this operation adds all objects with the same name and calls [load_object]
for each of them *)
val add_leaves : Names.identifier -> Libobject.obj list -> Libnames.object_name
val add_frozen_state : unit -> unit
(* Adds a "dummy" entry in lib_stk with a unique new label number. *)
val mark_end_of_command : unit -> unit
(* Returns the current label number *)
val current_command_label : unit -> int
(* [reset_label n ] resets [lib_stk] to the label n registered by
[mark_end_of_command()]. That is it forgets the label and anything
registered after it. *)
val reset_label : int -> unit
(*s The function [contents_after] returns the current library segment,
starting from a given section path. If not given, the entire segment
is returned. *)
val contents_after : Libnames.object_name option -> library_segment
(*s Functions relative to current path *)
(* User-side names *)
val cwd : unit -> Names.dir_path
val current_dirpath : bool -> Names.dir_path
val make_path : Names.identifier -> Libnames.section_path
val path_of_include : unit -> Libnames.section_path
(* Kernel-side names *)
val current_prefix : unit -> Names.module_path * Names.dir_path
val make_kn : Names.identifier -> Names.kernel_name
val make_con : Names.identifier -> Names.constant
(* Are we inside an opened section *)
val sections_are_opened : unit -> bool
val sections_depth : unit -> int
(* Are we inside an opened module type *)
val is_modtype : unit -> bool
val is_module : unit -> bool
val current_mod_id : unit -> Names.module_ident
(* Returns the most recent OpenedThing node *)
val what_is_opened : unit -> Libnames.object_name * node
(*s Modules and module types *)
val start_module :
bool option -> Names.module_ident -> Names.module_path -> Summary.frozen -> Libnames.object_prefix
val end_module : Names.module_ident
-> Libnames.object_name * Libnames.object_prefix * Summary.frozen * library_segment
val start_modtype :
Names.module_ident -> Names.module_path -> Summary.frozen -> Libnames.object_prefix
val end_modtype : Names.module_ident
-> Libnames.object_name * Libnames.object_prefix * Summary.frozen * library_segment
(* [Lib.add_frozen_state] must be called after each of the above functions *)
(*s Compilation units *)
val start_compilation : Names.dir_path -> Names.module_path -> unit
val end_compilation : Names.dir_path -> Libnames.object_prefix * library_segment
The function [ library_dp ] returns the [ dir_path ] of the current
compiling library ( or [ default_library ] )
compiling library (or [default_library]) *)
val library_dp : unit -> Names.dir_path
(* Extract the library part of a name even if in a section *)
val dp_of_mp : Names.module_path -> Names.dir_path
val split_mp : Names.module_path -> Names.dir_path * Names.dir_path
val split_modpath : Names.module_path -> Names.dir_path * Names.identifier list
val library_part : Libnames.global_reference -> Names.dir_path
val remove_section_part : Libnames.global_reference -> Names.dir_path
(*s Sections *)
val open_section : Names.identifier -> unit
val close_section : Names.identifier -> unit
(*s Backtracking (undo). *)
val reset_to : Libnames.object_name -> unit
val reset_name : Names.identifier Util.located -> unit
val remove_name : Names.identifier Util.located -> unit
val reset_mod : Names.identifier Util.located -> unit
val reset_to_state : Libnames.object_name -> unit
val has_top_frozen_state : unit -> Libnames.object_name option
(* [back n] resets to the place corresponding to the $n$-th call of
[mark_end_of_command] (counting backwards) *)
val back : int -> unit
(*s We can get and set the state of the operations (used in [States]). *)
type frozen
val freeze : unit -> frozen
val unfreeze : frozen -> unit
val init : unit -> unit
val declare_initial_state : unit -> unit
val reset_initial : unit -> unit
(* XML output hooks *)
val set_xml_open_section : (Names.identifier -> unit) -> unit
val set_xml_close_section : (Names.identifier -> unit) -> unit
(*s Section management for discharge *)
val section_segment_of_constant : Names.constant -> Sign.named_context
val section_segment_of_mutual_inductive: Names.mutual_inductive -> Sign.named_context
val section_instance : Libnames.global_reference -> Names.identifier array
val add_section_variable : Names.identifier -> bool -> Term.types option -> unit
val add_section_constant : Names.constant -> Sign.named_context -> unit
val add_section_kn : Names.kernel_name -> Sign.named_context -> unit
val replacement_context : unit ->
(Names.identifier array Names.Cmap.t * Names.identifier array Names.KNmap.t)
(*s Discharge: decrease the section level if in the current section *)
val discharge_kn : Names.kernel_name -> Names.kernel_name
val discharge_con : Names.constant -> Names.constant
val discharge_global : Libnames.global_reference -> Libnames.global_reference
val discharge_inductive : Names.inductive -> Names.inductive
| null | https://raw.githubusercontent.com/SamB/coq/8f84aba9ae83a4dc43ea6e804227ae8cae8086b1/library/lib.mli | ocaml | **********************************************************************
// * This file is distributed under the terms of the
* GNU Lesser General Public License Version 2.1
**********************************************************************
i $Id$ i
s This module provides a general mechanism to keep a trace of all operations
and to backtrack (undo) those operations. It provides also the section
mechanism (at a low level; discharge is not known at this step).
s Object iteratation functions.
this operation adds all objects with the same name and calls [load_object]
for each of them
Adds a "dummy" entry in lib_stk with a unique new label number.
Returns the current label number
[reset_label n ] resets [lib_stk] to the label n registered by
[mark_end_of_command()]. That is it forgets the label and anything
registered after it.
s The function [contents_after] returns the current library segment,
starting from a given section path. If not given, the entire segment
is returned.
s Functions relative to current path
User-side names
Kernel-side names
Are we inside an opened section
Are we inside an opened module type
Returns the most recent OpenedThing node
s Modules and module types
[Lib.add_frozen_state] must be called after each of the above functions
s Compilation units
Extract the library part of a name even if in a section
s Sections
s Backtracking (undo).
[back n] resets to the place corresponding to the $n$-th call of
[mark_end_of_command] (counting backwards)
s We can get and set the state of the operations (used in [States]).
XML output hooks
s Section management for discharge
s Discharge: decrease the section level if in the current section | v * The Coq Proof Assistant / The Coq Development Team
< O _ _ _ , , * CNRS - Ecole Polytechnique - INRIA Futurs - Universite Paris Sud
\VV/ * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
type node =
| Leaf of Libobject.obj
| CompilingLibrary of Libnames.object_prefix
| OpenedModule of bool option * Libnames.object_prefix * Summary.frozen
| ClosedModule of library_segment
| OpenedModtype of Libnames.object_prefix * Summary.frozen
| ClosedModtype of library_segment
| OpenedSection of Libnames.object_prefix * Summary.frozen
| ClosedSection of library_segment
| FrozenState of Summary.frozen
and library_segment = (Libnames.object_name * node) list
type lib_objects = (Names.identifier * Libobject.obj) list
val open_objects : int -> Libnames.object_prefix -> lib_objects -> unit
val load_objects : int -> Libnames.object_prefix -> lib_objects -> unit
val subst_objects : Libnames.object_prefix -> Mod_subst.substitution -> lib_objects -> lib_objects
[ classify_segment seg ] verifies that there are no OpenedThings ,
clears ClosedSections and FrozenStates and divides Leafs according
to their answers to the [ classify_object ] function in three groups :
[ Substitute ] , [ Keep ] , [ Anticipate ] respectively . The order of each
returned list is the same as in the input list .
clears ClosedSections and FrozenStates and divides Leafs according
to their answers to the [classify_object] function in three groups:
[Substitute], [Keep], [Anticipate] respectively. The order of each
returned list is the same as in the input list. *)
val classify_segment :
library_segment -> lib_objects * lib_objects * Libobject.obj list
[ segment_of_objects prefix objs ] forms a list of Leafs
val segment_of_objects :
Libnames.object_prefix -> lib_objects -> library_segment
s Adding operations ( which call the [ cache ] method , and getting the
current list of operations ( most recent ones coming first ) .
current list of operations (most recent ones coming first). *)
val add_leaf : Names.identifier -> Libobject.obj -> Libnames.object_name
val add_absolutely_named_leaf : Libnames.object_name -> Libobject.obj -> unit
val add_anonymous_leaf : Libobject.obj -> unit
val add_leaves : Names.identifier -> Libobject.obj list -> Libnames.object_name
val add_frozen_state : unit -> unit
val mark_end_of_command : unit -> unit
val current_command_label : unit -> int
val reset_label : int -> unit
val contents_after : Libnames.object_name option -> library_segment
val cwd : unit -> Names.dir_path
val current_dirpath : bool -> Names.dir_path
val make_path : Names.identifier -> Libnames.section_path
val path_of_include : unit -> Libnames.section_path
val current_prefix : unit -> Names.module_path * Names.dir_path
val make_kn : Names.identifier -> Names.kernel_name
val make_con : Names.identifier -> Names.constant
val sections_are_opened : unit -> bool
val sections_depth : unit -> int
val is_modtype : unit -> bool
val is_module : unit -> bool
val current_mod_id : unit -> Names.module_ident
val what_is_opened : unit -> Libnames.object_name * node
val start_module :
bool option -> Names.module_ident -> Names.module_path -> Summary.frozen -> Libnames.object_prefix
val end_module : Names.module_ident
-> Libnames.object_name * Libnames.object_prefix * Summary.frozen * library_segment
val start_modtype :
Names.module_ident -> Names.module_path -> Summary.frozen -> Libnames.object_prefix
val end_modtype : Names.module_ident
-> Libnames.object_name * Libnames.object_prefix * Summary.frozen * library_segment
val start_compilation : Names.dir_path -> Names.module_path -> unit
val end_compilation : Names.dir_path -> Libnames.object_prefix * library_segment
The function [ library_dp ] returns the [ dir_path ] of the current
compiling library ( or [ default_library ] )
compiling library (or [default_library]) *)
val library_dp : unit -> Names.dir_path
val dp_of_mp : Names.module_path -> Names.dir_path
val split_mp : Names.module_path -> Names.dir_path * Names.dir_path
val split_modpath : Names.module_path -> Names.dir_path * Names.identifier list
val library_part : Libnames.global_reference -> Names.dir_path
val remove_section_part : Libnames.global_reference -> Names.dir_path
val open_section : Names.identifier -> unit
val close_section : Names.identifier -> unit
val reset_to : Libnames.object_name -> unit
val reset_name : Names.identifier Util.located -> unit
val remove_name : Names.identifier Util.located -> unit
val reset_mod : Names.identifier Util.located -> unit
val reset_to_state : Libnames.object_name -> unit
val has_top_frozen_state : unit -> Libnames.object_name option
val back : int -> unit
type frozen
val freeze : unit -> frozen
val unfreeze : frozen -> unit
val init : unit -> unit
val declare_initial_state : unit -> unit
val reset_initial : unit -> unit
val set_xml_open_section : (Names.identifier -> unit) -> unit
val set_xml_close_section : (Names.identifier -> unit) -> unit
val section_segment_of_constant : Names.constant -> Sign.named_context
val section_segment_of_mutual_inductive: Names.mutual_inductive -> Sign.named_context
val section_instance : Libnames.global_reference -> Names.identifier array
val add_section_variable : Names.identifier -> bool -> Term.types option -> unit
val add_section_constant : Names.constant -> Sign.named_context -> unit
val add_section_kn : Names.kernel_name -> Sign.named_context -> unit
val replacement_context : unit ->
(Names.identifier array Names.Cmap.t * Names.identifier array Names.KNmap.t)
val discharge_kn : Names.kernel_name -> Names.kernel_name
val discharge_con : Names.constant -> Names.constant
val discharge_global : Libnames.global_reference -> Libnames.global_reference
val discharge_inductive : Names.inductive -> Names.inductive
|
801ddf42af149916ab4570ddedd304790a7d5fb110fd37a3cf633b2e85973fb4 | quil-lang/qvm | ping.lisp | api / ping.lisp
;;;;
Author :
(in-package #:qvm-app)
(defun handle-ping ()
(format nil "pong ~D" (get-universal-time)))
| null | https://raw.githubusercontent.com/quil-lang/qvm/de95ead6e7df70a1f8e0212455a802bd0cef201c/app/src/api/ping.lisp | lisp | api / ping.lisp
Author :
(in-package #:qvm-app)
(defun handle-ping ()
(format nil "pong ~D" (get-universal-time)))
| |
7a9be8d824a7beb76f17f1080a4cd12b4e94b7283b15a802c763cad2f59c656e | cronburg/antlr-haskell | Common.hs | |
Module : Text . Description : Haskell - level helper functions used throughout Text . ANTLR
Copyright : ( c ) , 2018
License : BSD3
Maintainer :
Stability : experimental
Portability : POSIX
Module : Text.ANTLR.Common
Description : Haskell-level helper functions used throughout Text.ANTLR
Copyright : (c) Karl Cronburg, 2018
License : BSD3
Maintainer :
Stability : experimental
Portability : POSIX
-}
module Text.ANTLR.Common where
concatWith cs [] = []
concatWith cs [x] = x
concatWith cs (x:xs) = x ++ cs ++ concatWith cs xs
| null | https://raw.githubusercontent.com/cronburg/antlr-haskell/7a9367038eaa58f9764f2ff694269245fbebc155/src/Text/ANTLR/Common.hs | haskell | |
Module : Text . Description : Haskell - level helper functions used throughout Text . ANTLR
Copyright : ( c ) , 2018
License : BSD3
Maintainer :
Stability : experimental
Portability : POSIX
Module : Text.ANTLR.Common
Description : Haskell-level helper functions used throughout Text.ANTLR
Copyright : (c) Karl Cronburg, 2018
License : BSD3
Maintainer :
Stability : experimental
Portability : POSIX
-}
module Text.ANTLR.Common where
concatWith cs [] = []
concatWith cs [x] = x
concatWith cs (x:xs) = x ++ cs ++ concatWith cs xs
| |
affcd6971cd49425d0ef9a6a6c24e6c1af19f2c2195f0817303f3a9a3cae9ea6 | scmlab/gcl | Pretty.hs | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
{-# LANGUAGE OverloadedStrings #-}
module Pretty
( module Prettyprinter
, module Pretty.Util
) where
import Error ( Error )
import Prelude hiding ( Ordering(..) )
import Pretty.Abstract ( )
import Pretty.Concrete ( )
import Pretty.Error ( )
import Pretty.Predicate ( )
import Pretty.Util
import Prettyprinter
--------------------------------------------------------------------------------
-- | Misc
instance {-# OVERLAPPING #-} (Pretty a) => Pretty (Either Error a) where
pretty (Left a) = "Error" <+> pretty a
pretty (Right b) = pretty b
instance (Pretty a, Pretty b, Pretty c, Pretty d) => Pretty (a, b, c, d) where
pretty (a, b, c, d) =
"("
<> pretty a
<> ", "
<> pretty b
<> ", "
<> pretty c
<> ", "
<> pretty d
<> ")"
| null | https://raw.githubusercontent.com/scmlab/gcl/3d3aefb513ce6d3821265d77b6f3f0ffba0f4c66/src/Pretty.hs | haskell | # LANGUAGE OverloadedStrings #
------------------------------------------------------------------------------
| Misc
# OVERLAPPING # | # LANGUAGE FlexibleInstances #
# LANGUAGE MultiParamTypeClasses #
module Pretty
( module Prettyprinter
, module Pretty.Util
) where
import Error ( Error )
import Prelude hiding ( Ordering(..) )
import Pretty.Abstract ( )
import Pretty.Concrete ( )
import Pretty.Error ( )
import Pretty.Predicate ( )
import Pretty.Util
import Prettyprinter
pretty (Left a) = "Error" <+> pretty a
pretty (Right b) = pretty b
instance (Pretty a, Pretty b, Pretty c, Pretty d) => Pretty (a, b, c, d) where
pretty (a, b, c, d) =
"("
<> pretty a
<> ", "
<> pretty b
<> ", "
<> pretty c
<> ", "
<> pretty d
<> ")"
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.