_id stringlengths 64 64 | repository stringlengths 6 84 | name stringlengths 4 110 | content stringlengths 0 248k | license null | download_url stringlengths 89 454 | language stringclasses 7
values | comments stringlengths 0 74.6k | code stringlengths 0 248k |
|---|---|---|---|---|---|---|---|---|
db902fc6108eba6a42a580fd1412a5e11fe5dee002b46d6aef8f8103cb7ce7ae | FranklinChen/hugs98-plus-Sep2006 | Process.hs | # OPTIONS_GHC -cpp -fffi #
-----------------------------------------------------------------------------
-- |
-- Module : System.Process
Copyright : ( c ) The University of Glasgow 2004
-- License : BSD-style (see the file libraries/base/LICENSE)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
Operations for creating and interacting with sub - processes .
--
-----------------------------------------------------------------------------
ToDo :
-- * Flag to control whether exiting the parent also kills the child.
* Windows impl of runProcess should close the Handles .
* Add system / replacements
NOTES on createPipe :
createPipe is no longer exported , because of the following problems :
- it was n't used to implement runInteractiveProcess on Unix , because
the file descriptors for the unused ends of the pipe need to be closed
in the child process .
- on Windows , a special version of createPipe is needed that sets
the inheritance flags correctly on the ends of the pipe ( see
mkAnonPipe below ) .
createPipe is no longer exported, because of the following problems:
- it wasn't used to implement runInteractiveProcess on Unix, because
the file descriptors for the unused ends of the pipe need to be closed
in the child process.
- on Windows, a special version of createPipe is needed that sets
the inheritance flags correctly on the ends of the pipe (see
mkAnonPipe below).
-}
module System.Process (
-- * Running sub-processes
ProcessHandle,
runCommand,
runProcess,
runInteractiveCommand,
runInteractiveProcess,
-- * Process completion
waitForProcess,
getProcessExitCode,
terminateProcess,
) where
import Prelude
import System.Process.Internals
import Foreign
import Foreign.C
import System.IO ( IOMode(..), Handle, hClose )
import System.Exit ( ExitCode(..) )
import System.Posix.Internals
import GHC.IOBase ( FD )
import GHC.Handle ( openFd )
-- ----------------------------------------------------------------------------
{- | Runs a command using the shell.
-}
runCommand
:: String
-> IO ProcessHandle
runCommand string = do
(cmd,args) <- commandToProcess string
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
runProcessPosix "runCommand" cmd args Nothing Nothing Nothing Nothing Nothing
Nothing Nothing
#else
runProcessWin32 "runCommand" cmd [] Nothing Nothing Nothing Nothing Nothing args
#endif
-- ----------------------------------------------------------------------------
-- runProcess
| Runs a raw command , optionally specifying ' Handle 's from which to
take the @stdin@ , @stdout@ and @stderr@ channels for the new
process .
Any ' Handle 's passed to ' runProcess ' are placed immediately in the
closed state .
take the @stdin@, @stdout@ and @stderr@ channels for the new
process.
Any 'Handle's passed to 'runProcess' are placed immediately in the
closed state.
-}
runProcess
:: FilePath -- ^ Filename of the executable
-> [String] -- ^ Arguments to pass to the executable
-> Maybe FilePath -- ^ Optional path to the working directory
-> Maybe [(String,String)] -- ^ Optional environment (otherwise inherit)
-> Maybe Handle -- ^ Handle to use for @stdin@
-> Maybe Handle -- ^ Handle to use for @stdout@
^ Handle to use for @stderr@
-> IO ProcessHandle
runProcess cmd args mb_cwd mb_env mb_stdin mb_stdout mb_stderr = do
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
h <- runProcessPosix "runProcess" cmd args mb_cwd mb_env
mb_stdin mb_stdout mb_stderr
Nothing Nothing
#else
h <- runProcessWin32 "runProcess" cmd args mb_cwd mb_env
mb_stdin mb_stdout mb_stderr ""
#endif
maybe (return ()) hClose mb_stdin
maybe (return ()) hClose mb_stdout
maybe (return ()) hClose mb_stderr
return h
-- ----------------------------------------------------------------------------
-- runInteractiveCommand
| Runs a command using the shell , and returns ' Handle 's that may
be used to communicate with the process via its @stdin@ , @stdout@ ,
and @stderr@ respectively .
be used to communicate with the process via its @stdin@, @stdout@,
and @stderr@ respectively.
-}
runInteractiveCommand
:: String
-> IO (Handle,Handle,Handle,ProcessHandle)
runInteractiveCommand string = do
(cmd,args) <- commandToProcess string
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
runInteractiveProcess1 "runInteractiveCommand" cmd args Nothing Nothing
#else
runInteractiveProcess1 "runInteractiveCommand" cmd [] Nothing Nothing args
#endif
-- ----------------------------------------------------------------------------
-- runInteractiveProcess
| Runs a raw command , and returns ' Handle 's that may be used to communicate
with the process via its @stdin@ , @stdout@ and @stderr@ respectively .
For example , to start a process and feed a string to its stdin :
> ( inp , out , err , pid ) < - runInteractiveProcess " ... "
> forkIO ( hPutStr inp str )
with the process via its @stdin@, @stdout@ and @stderr@ respectively.
For example, to start a process and feed a string to its stdin:
> (inp,out,err,pid) <- runInteractiveProcess "..."
> forkIO (hPutStr inp str)
-}
runInteractiveProcess
:: FilePath -- ^ Filename of the executable
-> [String] -- ^ Arguments to pass to the executable
-> Maybe FilePath -- ^ Optional path to the working directory
-> Maybe [(String,String)] -- ^ Optional environment (otherwise inherit)
-> IO (Handle,Handle,Handle,ProcessHandle)
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
runInteractiveProcess cmd args mb_cwd mb_env =
runInteractiveProcess1 "runInteractiveProcess" cmd args mb_cwd mb_env
runInteractiveProcess1 fun cmd args mb_cwd mb_env = do
withFilePathException cmd $
alloca $ \ pfdStdInput ->
alloca $ \ pfdStdOutput ->
alloca $ \ pfdStdError ->
maybeWith withCEnvironment mb_env $ \pEnv ->
maybeWith withCString mb_cwd $ \pWorkDir ->
withMany withCString (cmd:args) $ \cstrs ->
withArray0 nullPtr cstrs $ \pargs -> do
proc_handle <- throwErrnoIfMinus1 fun
(c_runInteractiveProcess pargs pWorkDir pEnv
pfdStdInput pfdStdOutput pfdStdError)
hndStdInput <- fdToHandle pfdStdInput WriteMode
hndStdOutput <- fdToHandle pfdStdOutput ReadMode
hndStdError <- fdToHandle pfdStdError ReadMode
ph <- mkProcessHandle proc_handle
return (hndStdInput, hndStdOutput, hndStdError, ph)
foreign import ccall unsafe "runInteractiveProcess"
c_runInteractiveProcess
:: Ptr CString
-> CString
-> Ptr CString
-> Ptr FD
-> Ptr FD
-> Ptr FD
-> IO PHANDLE
#else
runInteractiveProcess cmd args mb_cwd mb_env =
runInteractiveProcess1 "runInteractiveProcess" cmd args mb_cwd mb_env ""
runInteractiveProcess1 fun cmd args workDir env extra_cmdline
= withFilePathException cmd $ do
let cmdline = translate cmd ++
concat (map ((' ':) . translate) args) ++
(if null extra_cmdline then "" else ' ':extra_cmdline)
withCString cmdline $ \pcmdline ->
alloca $ \ pfdStdInput ->
alloca $ \ pfdStdOutput ->
alloca $ \ pfdStdError -> do
maybeWith withCEnvironment env $ \pEnv -> do
maybeWith withCString workDir $ \pWorkDir -> do
proc_handle <- throwErrnoIfMinus1 fun $
c_runInteractiveProcess pcmdline pWorkDir pEnv
pfdStdInput pfdStdOutput pfdStdError
hndStdInput <- fdToHandle pfdStdInput WriteMode
hndStdOutput <- fdToHandle pfdStdOutput ReadMode
hndStdError <- fdToHandle pfdStdError ReadMode
ph <- mkProcessHandle proc_handle
return (hndStdInput, hndStdOutput, hndStdError, ph)
foreign import ccall unsafe "runInteractiveProcess"
c_runInteractiveProcess
:: CString
-> CString
-> Ptr ()
-> Ptr FD
-> Ptr FD
-> Ptr FD
-> IO PHANDLE
#endif
fdToHandle :: Ptr FD -> IOMode -> IO Handle
fdToHandle pfd mode = do
fd <- peek pfd
openFd fd (Just Stream)
False{-not a socket-}
("fd:" ++ show fd) mode True{-binary-}
-- ----------------------------------------------------------------------------
waitForProcess
| Waits for the specified process to terminate , and returns its exit code .
GHC Note : in order to call @waitForProcess@ without blocking all the
other threads in the system , you must compile the program with
@-threaded@.
GHC Note: in order to call @waitForProcess@ without blocking all the
other threads in the system, you must compile the program with
@-threaded@.
-}
waitForProcess
:: ProcessHandle
-> IO ExitCode
waitForProcess ph = do
p_ <- withProcessHandle ph $ \p_ -> return (p_,p_)
case p_ of
ClosedHandle e -> return e
OpenHandle h -> do
do n't hold the MVar while we call c_waitForProcess ...
-- (XXX but there's a small race window here during which another
thread could close the handle or call waitForProcess )
code <- throwErrnoIfMinus1 "waitForProcess" (c_waitForProcess h)
withProcessHandle ph $ \p_ ->
case p_ of
ClosedHandle e -> return (p_,e)
OpenHandle ph -> do
closePHANDLE ph
let e = if (code == 0)
then ExitSuccess
else (ExitFailure (fromIntegral code))
return (ClosedHandle e, e)
-- ----------------------------------------------------------------------------
-- terminateProcess
-- | Attempts to terminate the specified process. This function should
-- not be used under normal circumstances - no guarantees are given regarding
-- how cleanly the process is terminated. To check whether the process
-- has indeed terminated, use 'getProcessExitCode'.
--
On Unix systems , ' terminateProcess ' sends the process the SIGKILL signal .
On Windows systems , the Win32 @TerminateProcess@ function is called , passing
an exit code of 1 .
terminateProcess :: ProcessHandle -> IO ()
terminateProcess ph = do
withProcessHandle_ ph $ \p_ ->
case p_ of
ClosedHandle _ -> return p_
OpenHandle h -> do
throwErrnoIfMinus1_ "terminateProcess" $ c_terminateProcess h
return p_
-- does not close the handle, we might want to try terminating it
-- again, or get its exit code.
-- ----------------------------------------------------------------------------
-- getProcessExitCode
|
This is a non - blocking version of ' waitForProcess ' . If the process is
still running , ' Nothing ' is returned . If the process has exited , then
@'Just ' e@ is returned where @e@ is the exit code of the process .
Subsequent calls to @getProcessExitStatus@ always return @'Just '
' ExitSuccess'@ , regardless of what the original exit code was .
This is a non-blocking version of 'waitForProcess'. If the process is
still running, 'Nothing' is returned. If the process has exited, then
@'Just' e@ is returned where @e@ is the exit code of the process.
Subsequent calls to @getProcessExitStatus@ always return @'Just'
'ExitSuccess'@, regardless of what the original exit code was.
-}
getProcessExitCode :: ProcessHandle -> IO (Maybe ExitCode)
getProcessExitCode ph = do
withProcessHandle ph $ \p_ ->
case p_ of
ClosedHandle e -> return (p_, Just e)
OpenHandle h ->
alloca $ \pExitCode -> do
res <- throwErrnoIfMinus1 "getProcessExitCode" $
c_getProcessExitCode h pExitCode
code <- peek pExitCode
if res == 0
then return (p_, Nothing)
else do
closePHANDLE h
let e | code == 0 = ExitSuccess
| otherwise = ExitFailure (fromIntegral code)
return (ClosedHandle e, Just e)
-- ----------------------------------------------------------------------------
Interface to C bits
foreign import ccall unsafe "terminateProcess"
c_terminateProcess
:: PHANDLE
-> IO CInt
foreign import ccall unsafe "getProcessExitCode"
c_getProcessExitCode
:: PHANDLE
-> Ptr CInt
-> IO CInt
NB . safe - can block
c_waitForProcess
:: PHANDLE
-> IO CInt
| null | https://raw.githubusercontent.com/FranklinChen/hugs98-plus-Sep2006/54ab69bd6313adbbed1d790b46aca2a0305ea67e/packages/base/System/Process.hs | haskell | ---------------------------------------------------------------------------
|
Module : System.Process
License : BSD-style (see the file libraries/base/LICENSE)
Maintainer :
Stability : experimental
Portability : portable
---------------------------------------------------------------------------
* Flag to control whether exiting the parent also kills the child.
* Running sub-processes
* Process completion
----------------------------------------------------------------------------
| Runs a command using the shell.
----------------------------------------------------------------------------
runProcess
^ Filename of the executable
^ Arguments to pass to the executable
^ Optional path to the working directory
^ Optional environment (otherwise inherit)
^ Handle to use for @stdin@
^ Handle to use for @stdout@
----------------------------------------------------------------------------
runInteractiveCommand
----------------------------------------------------------------------------
runInteractiveProcess
^ Filename of the executable
^ Arguments to pass to the executable
^ Optional path to the working directory
^ Optional environment (otherwise inherit)
not a socket
binary
----------------------------------------------------------------------------
(XXX but there's a small race window here during which another
----------------------------------------------------------------------------
terminateProcess
| Attempts to terminate the specified process. This function should
not be used under normal circumstances - no guarantees are given regarding
how cleanly the process is terminated. To check whether the process
has indeed terminated, use 'getProcessExitCode'.
does not close the handle, we might want to try terminating it
again, or get its exit code.
----------------------------------------------------------------------------
getProcessExitCode
---------------------------------------------------------------------------- | # OPTIONS_GHC -cpp -fffi #
Copyright : ( c ) The University of Glasgow 2004
Operations for creating and interacting with sub - processes .
ToDo :
* Windows impl of runProcess should close the Handles .
* Add system / replacements
NOTES on createPipe :
createPipe is no longer exported , because of the following problems :
- it was n't used to implement runInteractiveProcess on Unix , because
the file descriptors for the unused ends of the pipe need to be closed
in the child process .
- on Windows , a special version of createPipe is needed that sets
the inheritance flags correctly on the ends of the pipe ( see
mkAnonPipe below ) .
createPipe is no longer exported, because of the following problems:
- it wasn't used to implement runInteractiveProcess on Unix, because
the file descriptors for the unused ends of the pipe need to be closed
in the child process.
- on Windows, a special version of createPipe is needed that sets
the inheritance flags correctly on the ends of the pipe (see
mkAnonPipe below).
-}
module System.Process (
ProcessHandle,
runCommand,
runProcess,
runInteractiveCommand,
runInteractiveProcess,
waitForProcess,
getProcessExitCode,
terminateProcess,
) where
import Prelude
import System.Process.Internals
import Foreign
import Foreign.C
import System.IO ( IOMode(..), Handle, hClose )
import System.Exit ( ExitCode(..) )
import System.Posix.Internals
import GHC.IOBase ( FD )
import GHC.Handle ( openFd )
runCommand
:: String
-> IO ProcessHandle
runCommand string = do
(cmd,args) <- commandToProcess string
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
runProcessPosix "runCommand" cmd args Nothing Nothing Nothing Nothing Nothing
Nothing Nothing
#else
runProcessWin32 "runCommand" cmd [] Nothing Nothing Nothing Nothing Nothing args
#endif
| Runs a raw command , optionally specifying ' Handle 's from which to
take the @stdin@ , @stdout@ and @stderr@ channels for the new
process .
Any ' Handle 's passed to ' runProcess ' are placed immediately in the
closed state .
take the @stdin@, @stdout@ and @stderr@ channels for the new
process.
Any 'Handle's passed to 'runProcess' are placed immediately in the
closed state.
-}
runProcess
^ Handle to use for @stderr@
-> IO ProcessHandle
runProcess cmd args mb_cwd mb_env mb_stdin mb_stdout mb_stderr = do
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
h <- runProcessPosix "runProcess" cmd args mb_cwd mb_env
mb_stdin mb_stdout mb_stderr
Nothing Nothing
#else
h <- runProcessWin32 "runProcess" cmd args mb_cwd mb_env
mb_stdin mb_stdout mb_stderr ""
#endif
maybe (return ()) hClose mb_stdin
maybe (return ()) hClose mb_stdout
maybe (return ()) hClose mb_stderr
return h
| Runs a command using the shell , and returns ' Handle 's that may
be used to communicate with the process via its @stdin@ , @stdout@ ,
and @stderr@ respectively .
be used to communicate with the process via its @stdin@, @stdout@,
and @stderr@ respectively.
-}
runInteractiveCommand
:: String
-> IO (Handle,Handle,Handle,ProcessHandle)
runInteractiveCommand string = do
(cmd,args) <- commandToProcess string
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
runInteractiveProcess1 "runInteractiveCommand" cmd args Nothing Nothing
#else
runInteractiveProcess1 "runInteractiveCommand" cmd [] Nothing Nothing args
#endif
| Runs a raw command , and returns ' Handle 's that may be used to communicate
with the process via its @stdin@ , @stdout@ and @stderr@ respectively .
For example , to start a process and feed a string to its stdin :
> ( inp , out , err , pid ) < - runInteractiveProcess " ... "
> forkIO ( hPutStr inp str )
with the process via its @stdin@, @stdout@ and @stderr@ respectively.
For example, to start a process and feed a string to its stdin:
> (inp,out,err,pid) <- runInteractiveProcess "..."
> forkIO (hPutStr inp str)
-}
runInteractiveProcess
-> IO (Handle,Handle,Handle,ProcessHandle)
#if !defined(mingw32_HOST_OS) && !defined(__MINGW32__)
runInteractiveProcess cmd args mb_cwd mb_env =
runInteractiveProcess1 "runInteractiveProcess" cmd args mb_cwd mb_env
runInteractiveProcess1 fun cmd args mb_cwd mb_env = do
withFilePathException cmd $
alloca $ \ pfdStdInput ->
alloca $ \ pfdStdOutput ->
alloca $ \ pfdStdError ->
maybeWith withCEnvironment mb_env $ \pEnv ->
maybeWith withCString mb_cwd $ \pWorkDir ->
withMany withCString (cmd:args) $ \cstrs ->
withArray0 nullPtr cstrs $ \pargs -> do
proc_handle <- throwErrnoIfMinus1 fun
(c_runInteractiveProcess pargs pWorkDir pEnv
pfdStdInput pfdStdOutput pfdStdError)
hndStdInput <- fdToHandle pfdStdInput WriteMode
hndStdOutput <- fdToHandle pfdStdOutput ReadMode
hndStdError <- fdToHandle pfdStdError ReadMode
ph <- mkProcessHandle proc_handle
return (hndStdInput, hndStdOutput, hndStdError, ph)
foreign import ccall unsafe "runInteractiveProcess"
c_runInteractiveProcess
:: Ptr CString
-> CString
-> Ptr CString
-> Ptr FD
-> Ptr FD
-> Ptr FD
-> IO PHANDLE
#else
runInteractiveProcess cmd args mb_cwd mb_env =
runInteractiveProcess1 "runInteractiveProcess" cmd args mb_cwd mb_env ""
runInteractiveProcess1 fun cmd args workDir env extra_cmdline
= withFilePathException cmd $ do
let cmdline = translate cmd ++
concat (map ((' ':) . translate) args) ++
(if null extra_cmdline then "" else ' ':extra_cmdline)
withCString cmdline $ \pcmdline ->
alloca $ \ pfdStdInput ->
alloca $ \ pfdStdOutput ->
alloca $ \ pfdStdError -> do
maybeWith withCEnvironment env $ \pEnv -> do
maybeWith withCString workDir $ \pWorkDir -> do
proc_handle <- throwErrnoIfMinus1 fun $
c_runInteractiveProcess pcmdline pWorkDir pEnv
pfdStdInput pfdStdOutput pfdStdError
hndStdInput <- fdToHandle pfdStdInput WriteMode
hndStdOutput <- fdToHandle pfdStdOutput ReadMode
hndStdError <- fdToHandle pfdStdError ReadMode
ph <- mkProcessHandle proc_handle
return (hndStdInput, hndStdOutput, hndStdError, ph)
foreign import ccall unsafe "runInteractiveProcess"
c_runInteractiveProcess
:: CString
-> CString
-> Ptr ()
-> Ptr FD
-> Ptr FD
-> Ptr FD
-> IO PHANDLE
#endif
fdToHandle :: Ptr FD -> IOMode -> IO Handle
fdToHandle pfd mode = do
fd <- peek pfd
openFd fd (Just Stream)
waitForProcess
| Waits for the specified process to terminate , and returns its exit code .
GHC Note : in order to call @waitForProcess@ without blocking all the
other threads in the system , you must compile the program with
@-threaded@.
GHC Note: in order to call @waitForProcess@ without blocking all the
other threads in the system, you must compile the program with
@-threaded@.
-}
waitForProcess
:: ProcessHandle
-> IO ExitCode
waitForProcess ph = do
p_ <- withProcessHandle ph $ \p_ -> return (p_,p_)
case p_ of
ClosedHandle e -> return e
OpenHandle h -> do
do n't hold the MVar while we call c_waitForProcess ...
thread could close the handle or call waitForProcess )
code <- throwErrnoIfMinus1 "waitForProcess" (c_waitForProcess h)
withProcessHandle ph $ \p_ ->
case p_ of
ClosedHandle e -> return (p_,e)
OpenHandle ph -> do
closePHANDLE ph
let e = if (code == 0)
then ExitSuccess
else (ExitFailure (fromIntegral code))
return (ClosedHandle e, e)
On Unix systems , ' terminateProcess ' sends the process the SIGKILL signal .
On Windows systems , the Win32 @TerminateProcess@ function is called , passing
an exit code of 1 .
terminateProcess :: ProcessHandle -> IO ()
terminateProcess ph = do
withProcessHandle_ ph $ \p_ ->
case p_ of
ClosedHandle _ -> return p_
OpenHandle h -> do
throwErrnoIfMinus1_ "terminateProcess" $ c_terminateProcess h
return p_
|
This is a non - blocking version of ' waitForProcess ' . If the process is
still running , ' Nothing ' is returned . If the process has exited , then
@'Just ' e@ is returned where @e@ is the exit code of the process .
Subsequent calls to @getProcessExitStatus@ always return @'Just '
' ExitSuccess'@ , regardless of what the original exit code was .
This is a non-blocking version of 'waitForProcess'. If the process is
still running, 'Nothing' is returned. If the process has exited, then
@'Just' e@ is returned where @e@ is the exit code of the process.
Subsequent calls to @getProcessExitStatus@ always return @'Just'
'ExitSuccess'@, regardless of what the original exit code was.
-}
getProcessExitCode :: ProcessHandle -> IO (Maybe ExitCode)
getProcessExitCode ph = do
withProcessHandle ph $ \p_ ->
case p_ of
ClosedHandle e -> return (p_, Just e)
OpenHandle h ->
alloca $ \pExitCode -> do
res <- throwErrnoIfMinus1 "getProcessExitCode" $
c_getProcessExitCode h pExitCode
code <- peek pExitCode
if res == 0
then return (p_, Nothing)
else do
closePHANDLE h
let e | code == 0 = ExitSuccess
| otherwise = ExitFailure (fromIntegral code)
return (ClosedHandle e, Just e)
Interface to C bits
foreign import ccall unsafe "terminateProcess"
c_terminateProcess
:: PHANDLE
-> IO CInt
foreign import ccall unsafe "getProcessExitCode"
c_getProcessExitCode
:: PHANDLE
-> Ptr CInt
-> IO CInt
NB . safe - can block
c_waitForProcess
:: PHANDLE
-> IO CInt
|
7c3903332e76dd82697e86d2fd7318d699f720caa8d1ef91c386ece0f3e08e5a | google/lisp-koans | std-method-comb.lisp | Copyright 2013 Google Inc.
;;;
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
;;; you may not use this file except in compliance with the License.
;;; You may obtain a copy of the License at
;;;
;;; -2.0
;;;
;;; Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
;;; WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
;;; See the License for the specific language governing permissions and
;;; limitations under the License.
(defclass access-counter ()
((value :accessor value :initarg :value)
(access-count :reader access-count :initform 0)))
;;; The generated reader, writer, and accessor functions are generic functions.
;;; The methods of a generic function are combined using a method combination;
;;; by default, the standard method combination is used.
This allows us to define : and : AFTER methods whose code is executed
;;; before or after the primary method, and whose return values are discarded.
;;; The :BEFORE and :AFTER keywords used in this context are called qualifiers.
(defmethod value :after ((object access-counter))
(incf (slot-value object 'access-count)))
(defmethod (setf value) :after (new-value (object access-counter))
(incf (slot-value object 'access-count)))
(define-test defmethod-after
(let ((counter (make-instance 'access-counter :value 42)))
(assert-equal ____ (access-count counter))
(assert-equal ____ (value counter))
(assert-equal ____ (access-count counter))
(setf (value counter) 24)
(assert-equal ____ (access-count counter))
(assert-equal ____ (value counter))
(assert-equal ____ (access-count counter))
We read the value three more times and discard the result .
(value counter)
(value counter)
(value counter)
(assert-equal ____ (access-count counter))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; In addition to :BEFORE and :AFTER methods is also possible to write :AROUND
;;; methods, which execute instead of the primary methods. In such context, it
;;; is possible to call the primary method via CALL-NEXT-METHOD.
;;; In the standard method combination, the :AROUND method, if one exists, is
executed first , and it may choose whether and how to call next methods .
(defgeneric grab-lollipop ()
(:method () :lollipop))
(defgeneric grab-lollipop-while-mom-is-nearby (was-nice-p)
(:method :around (was-nice-p) (if was-nice-p (call-next-method) :no-lollipop))
(:method (was-nice-p) (declare (ignore was-nice-p)) :lollipop))
(define-test lollipop
(assert-equal ____ (grab-lollipop))
(assert-equal ____ (grab-lollipop-while-mom-is-nearby t))
(assert-equal ____ (grab-lollipop-while-mom-is-nearby nil)))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass countdown ()
;; The countdown object represents an ongoing countdown. Each time the
REMAINING - TIME function is called , it should return a number one less than
the previous time that it returned . If the countdown hits zero , : BANG
;; should be returned instead.
((remaining-time :reader remaining-time :initarg :time)))
(defmethod remaining-time :around ((object countdown))
(let ((time (call-next-method)))
(if (< 0 time)
PROG1 returns the value of the first expression in the sequence .
DECF is similar to INCF . It decreases the value stored in the place
;; and returns the decreased value.
(prog1
time
(decf (slot-value object 'remaining-time)))
:bang)))
(define-test countdown
(let ((countdown (make-instance 'countdown :time 4)))
(assert-equal 4 (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; It is possible for multiple :BEFORE, :AFTER, :AROUND, or primary methods to
;;; be executed in a single method call.
(defclass object ()
((counter :accessor counter :initform 0)))
(defclass bigger-object (object) ())
(defgeneric frobnicate (x)
(:method :around ((x bigger-object))
(incf (counter x) 8)
(call-next-method))
(:method :around ((x object))
(incf (counter x) 70)
(call-next-method))
(:method :before ((x bigger-object))
(incf (counter x) 600))
(:method :before ((x object))
(incf (counter x) 5000))
(:method ((x bigger-object))
(incf (counter x) 40000)
(call-next-method))
(:method ((x object))
(incf (counter x) 300000))
(:method :after ((x object))
(incf (counter x) 2000000))
(:method :after ((x bigger-object))
(incf (counter x) 10000000)))
(define-test multiple-methods
(let ((object (make-instance 'object)))
(frobnicate object)
(assert-equal ____ (counter object)))
(let ((object (make-instance 'bigger-object)))
(frobnicate object)
(assert-equal ____ (counter object))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;; The method order of the standard combination is as follows:
First , the most specific : AROUND method is executed .
Second , all : BEFORE methods are executed , most specific first .
;;; Third, the most specific primary method is executed.
;;; Fourth, all :AFTER methods are executed, most specific last.
(defgeneric calculate (x)
(:method :around ((x bigger-object))
(setf (counter x) 40)
(call-next-method))
(:method :around ((x object))
(incf (counter x) 24)
(call-next-method))
(:method :before ((x bigger-object))
(setf (counter x) (mod (counter x) 6)))
(:method :before ((x object))
(setf (counter x) (/ (counter x) 4)))
(:method ((x bigger-object))
(setf (counter x) (* (counter x) (counter x)))
(call-next-method))
(:method ((x object))
(decf (counter x) 100))
(:method :after ((x object))
(setf (counter x) (/ 1 (counter x))))
(:method :after ((x bigger-object))
(incf (counter x) 2)))
(define-test standard-method-combination-order
(let ((object (make-instance 'object)))
(calculate object)
(assert-equal ____ (counter object)))
(let ((object (make-instance 'bigger-object)))
(calculate object)
(assert-equal ____ (counter object))))
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
(defclass programmer () ())
(defclass senior-programmer (programmer) ())
(defclass full-stack-programmer (programmer) ())
(defclass senior-full-stack-programmer (senior-programmer
full-stack-programmer)
())
The : BEFORE , : AFTER , and : AROUND methods are only available in the standard
;;; method combination. It is possible to use other method combinations, such as
;;; +.
(defgeneric salary-at-company-a (programmer)
(:method-combination +)
(:method + ((programmer programmer)) 120000)
(:method + ((programmer senior-programmer)) 200000)
(:method + ((programmer full-stack-programmer)) 48000))
(define-test salary-at-company-a
(let ((programmer (make-instance 'programmer)))
(assert-equal ____ (salary-at-company-a programmer)))
(let ((programmer (make-instance 'senior-programmer)))
(assert-equal ____ (salary-at-company-a programmer)))
(let ((programmer (make-instance 'full-stack-programmer)))
(assert-equal ____ (salary-at-company-a programmer)))
(let ((programmer (make-instance 'senior-full-stack-programmer)))
(assert-equal ____ (salary-at-company-a programmer))))
;;; It is also possible to define custom method combinations.
(define-method-combination multiply :operator *)
(defgeneric salary-at-company-b (programmer)
(:method-combination multiply)
(:method multiply ((programmer programmer)) 120000)
(:method multiply ((programmer senior-programmer)) 2)
(:method multiply ((programmer full-stack-programmer)) 7/5))
(define-test salary-at-company-b
(let ((programmer (make-instance 'programmer)))
(assert-equal ____ (salary-at-company-b programmer)))
(let ((programmer (make-instance 'senior-programmer)))
(assert-equal ____ (salary-at-company-b programmer)))
(let ((programmer (make-instance 'full-stack-programmer)))
(assert-equal ____ (salary-at-company-b programmer)))
(let ((programmer (make-instance 'senior-full-stack-programmer)))
(assert-equal ____ (salary-at-company-b programmer))))
| null | https://raw.githubusercontent.com/google/lisp-koans/57b901f8d4b16d66696896a745110b7561120da3/koans/std-method-comb.lisp | lisp |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
The generated reader, writer, and accessor functions are generic functions.
The methods of a generic function are combined using a method combination;
by default, the standard method combination is used.
before or after the primary method, and whose return values are discarded.
The :BEFORE and :AFTER keywords used in this context are called qualifiers.
In addition to :BEFORE and :AFTER methods is also possible to write :AROUND
methods, which execute instead of the primary methods. In such context, it
is possible to call the primary method via CALL-NEXT-METHOD.
In the standard method combination, the :AROUND method, if one exists, is
The countdown object represents an ongoing countdown. Each time the
should be returned instead.
and returns the decreased value.
It is possible for multiple :BEFORE, :AFTER, :AROUND, or primary methods to
be executed in a single method call.
The method order of the standard combination is as follows:
Third, the most specific primary method is executed.
Fourth, all :AFTER methods are executed, most specific last.
method combination. It is possible to use other method combinations, such as
+.
It is also possible to define custom method combinations. | Copyright 2013 Google Inc.
distributed under the License is distributed on an " AS IS " BASIS ,
(defclass access-counter ()
((value :accessor value :initarg :value)
(access-count :reader access-count :initform 0)))
This allows us to define : and : AFTER methods whose code is executed
(defmethod value :after ((object access-counter))
(incf (slot-value object 'access-count)))
(defmethod (setf value) :after (new-value (object access-counter))
(incf (slot-value object 'access-count)))
(define-test defmethod-after
(let ((counter (make-instance 'access-counter :value 42)))
(assert-equal ____ (access-count counter))
(assert-equal ____ (value counter))
(assert-equal ____ (access-count counter))
(setf (value counter) 24)
(assert-equal ____ (access-count counter))
(assert-equal ____ (value counter))
(assert-equal ____ (access-count counter))
We read the value three more times and discard the result .
(value counter)
(value counter)
(value counter)
(assert-equal ____ (access-count counter))))
executed first , and it may choose whether and how to call next methods .
(defgeneric grab-lollipop ()
(:method () :lollipop))
(defgeneric grab-lollipop-while-mom-is-nearby (was-nice-p)
(:method :around (was-nice-p) (if was-nice-p (call-next-method) :no-lollipop))
(:method (was-nice-p) (declare (ignore was-nice-p)) :lollipop))
(define-test lollipop
(assert-equal ____ (grab-lollipop))
(assert-equal ____ (grab-lollipop-while-mom-is-nearby t))
(assert-equal ____ (grab-lollipop-while-mom-is-nearby nil)))
(defclass countdown ()
REMAINING - TIME function is called , it should return a number one less than
the previous time that it returned . If the countdown hits zero , : BANG
((remaining-time :reader remaining-time :initarg :time)))
(defmethod remaining-time :around ((object countdown))
(let ((time (call-next-method)))
(if (< 0 time)
PROG1 returns the value of the first expression in the sequence .
DECF is similar to INCF . It decreases the value stored in the place
(prog1
time
(decf (slot-value object 'remaining-time)))
:bang)))
(define-test countdown
(let ((countdown (make-instance 'countdown :time 4)))
(assert-equal 4 (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))
(assert-equal ____ (remaining-time countdown))))
(defclass object ()
((counter :accessor counter :initform 0)))
(defclass bigger-object (object) ())
(defgeneric frobnicate (x)
(:method :around ((x bigger-object))
(incf (counter x) 8)
(call-next-method))
(:method :around ((x object))
(incf (counter x) 70)
(call-next-method))
(:method :before ((x bigger-object))
(incf (counter x) 600))
(:method :before ((x object))
(incf (counter x) 5000))
(:method ((x bigger-object))
(incf (counter x) 40000)
(call-next-method))
(:method ((x object))
(incf (counter x) 300000))
(:method :after ((x object))
(incf (counter x) 2000000))
(:method :after ((x bigger-object))
(incf (counter x) 10000000)))
(define-test multiple-methods
(let ((object (make-instance 'object)))
(frobnicate object)
(assert-equal ____ (counter object)))
(let ((object (make-instance 'bigger-object)))
(frobnicate object)
(assert-equal ____ (counter object))))
First , the most specific : AROUND method is executed .
Second , all : BEFORE methods are executed , most specific first .
(defgeneric calculate (x)
(:method :around ((x bigger-object))
(setf (counter x) 40)
(call-next-method))
(:method :around ((x object))
(incf (counter x) 24)
(call-next-method))
(:method :before ((x bigger-object))
(setf (counter x) (mod (counter x) 6)))
(:method :before ((x object))
(setf (counter x) (/ (counter x) 4)))
(:method ((x bigger-object))
(setf (counter x) (* (counter x) (counter x)))
(call-next-method))
(:method ((x object))
(decf (counter x) 100))
(:method :after ((x object))
(setf (counter x) (/ 1 (counter x))))
(:method :after ((x bigger-object))
(incf (counter x) 2)))
(define-test standard-method-combination-order
(let ((object (make-instance 'object)))
(calculate object)
(assert-equal ____ (counter object)))
(let ((object (make-instance 'bigger-object)))
(calculate object)
(assert-equal ____ (counter object))))
(defclass programmer () ())
(defclass senior-programmer (programmer) ())
(defclass full-stack-programmer (programmer) ())
(defclass senior-full-stack-programmer (senior-programmer
full-stack-programmer)
())
The : BEFORE , : AFTER , and : AROUND methods are only available in the standard
(defgeneric salary-at-company-a (programmer)
(:method-combination +)
(:method + ((programmer programmer)) 120000)
(:method + ((programmer senior-programmer)) 200000)
(:method + ((programmer full-stack-programmer)) 48000))
(define-test salary-at-company-a
(let ((programmer (make-instance 'programmer)))
(assert-equal ____ (salary-at-company-a programmer)))
(let ((programmer (make-instance 'senior-programmer)))
(assert-equal ____ (salary-at-company-a programmer)))
(let ((programmer (make-instance 'full-stack-programmer)))
(assert-equal ____ (salary-at-company-a programmer)))
(let ((programmer (make-instance 'senior-full-stack-programmer)))
(assert-equal ____ (salary-at-company-a programmer))))
(define-method-combination multiply :operator *)
(defgeneric salary-at-company-b (programmer)
(:method-combination multiply)
(:method multiply ((programmer programmer)) 120000)
(:method multiply ((programmer senior-programmer)) 2)
(:method multiply ((programmer full-stack-programmer)) 7/5))
(define-test salary-at-company-b
(let ((programmer (make-instance 'programmer)))
(assert-equal ____ (salary-at-company-b programmer)))
(let ((programmer (make-instance 'senior-programmer)))
(assert-equal ____ (salary-at-company-b programmer)))
(let ((programmer (make-instance 'full-stack-programmer)))
(assert-equal ____ (salary-at-company-b programmer)))
(let ((programmer (make-instance 'senior-full-stack-programmer)))
(assert-equal ____ (salary-at-company-b programmer))))
|
24d2a44d42d96b2fee5231866abb932dcaf9ae51193768823bc59371b832e3b5 | input-output-hk/cardano-sl | EpochIndex.hs | # LANGUAGE RecordWildCards #
module Pos.Core.Slotting.EpochIndex
( EpochIndex (..)
, HasEpochIndex (..)
, isBootstrapEra
) where
import Universum
import Control.Lens (choosing)
import qualified Data.Aeson as Aeson (FromJSON (..), ToJSON (..))
import Data.Ix (Ix)
import Data.SafeCopy (base, deriveSafeCopySimple)
import Formatting (bprint, int, (%))
import qualified Formatting.Buildable as Buildable
import Servant.API (FromHttpApiData)
import Text.JSON.Canonical (FromJSON (..), ReportSchemaErrors,
ToJSON (..))
import Pos.Binary.Class (Bi (..))
import Pos.Util.Json.Canonical ()
import Pos.Util.Some (Some, liftLensSome)
-- | Index of epoch.
newtype EpochIndex = EpochIndex
{ getEpochIndex :: Word64
} deriving (Show, Eq, Ord, Num, Enum, Ix, Integral, Real, Generic, Hashable, Bounded, Typeable, NFData)
instance Buildable EpochIndex where
build = bprint ("#"%int)
instance Bi EpochIndex where
encode (EpochIndex epoch) = encode epoch
decode = EpochIndex <$> decode
deriving instance FromHttpApiData EpochIndex
Note that it will be encoded as string , because ' EpochIndex '
does n't necessary fit into JS number .
instance Monad m => ToJSON m EpochIndex where
toJSON = toJSON . getEpochIndex
deriving instance Aeson.FromJSON EpochIndex
deriving instance Aeson.ToJSON EpochIndex
class HasEpochIndex a where
epochIndexL :: Lens' a EpochIndex
instance HasEpochIndex (Some HasEpochIndex) where
epochIndexL = liftLensSome epochIndexL
instance (HasEpochIndex a, HasEpochIndex b) =>
HasEpochIndex (Either a b) where
epochIndexL = choosing epochIndexL epochIndexL
instance ReportSchemaErrors m => FromJSON m EpochIndex where
fromJSON = fmap EpochIndex . fromJSON
-- | Bootstrap era is ongoing until stakes are unlocked. The reward era starts
-- from the epoch specified as the epoch that unlocks stakes:
--
-- @
-- [unlock stake epoch]
-- /
Epoch : ... E-3 E-2 E-1 E+0 E+1 E+2 E+3 ...
-- ------------------ | -----------------------
era Reward era
-- @
--
--
-- | This function has been stubbed out to always return True, since
-- this codebase will not be decentralized.
isBootstrapEra
:: EpochIndex -- ^ Unlock stake epoch
-> EpochIndex -- ^ Epoch in question (for which we determine whether it
-- belongs to the bootstrap era).
-> Bool
isBootstrapEra _unlockStakeEpoch _epoch = True
deriveSafeCopySimple 0 'base ''EpochIndex
| null | https://raw.githubusercontent.com/input-output-hk/cardano-sl/1499214d93767b703b9599369a431e67d83f10a2/core/src/Pos/Core/Slotting/EpochIndex.hs | haskell | | Index of epoch.
| Bootstrap era is ongoing until stakes are unlocked. The reward era starts
from the epoch specified as the epoch that unlocks stakes:
@
[unlock stake epoch]
/
------------------ | -----------------------
@
| This function has been stubbed out to always return True, since
this codebase will not be decentralized.
^ Unlock stake epoch
^ Epoch in question (for which we determine whether it
belongs to the bootstrap era). | # LANGUAGE RecordWildCards #
module Pos.Core.Slotting.EpochIndex
( EpochIndex (..)
, HasEpochIndex (..)
, isBootstrapEra
) where
import Universum
import Control.Lens (choosing)
import qualified Data.Aeson as Aeson (FromJSON (..), ToJSON (..))
import Data.Ix (Ix)
import Data.SafeCopy (base, deriveSafeCopySimple)
import Formatting (bprint, int, (%))
import qualified Formatting.Buildable as Buildable
import Servant.API (FromHttpApiData)
import Text.JSON.Canonical (FromJSON (..), ReportSchemaErrors,
ToJSON (..))
import Pos.Binary.Class (Bi (..))
import Pos.Util.Json.Canonical ()
import Pos.Util.Some (Some, liftLensSome)
newtype EpochIndex = EpochIndex
{ getEpochIndex :: Word64
} deriving (Show, Eq, Ord, Num, Enum, Ix, Integral, Real, Generic, Hashable, Bounded, Typeable, NFData)
instance Buildable EpochIndex where
build = bprint ("#"%int)
instance Bi EpochIndex where
encode (EpochIndex epoch) = encode epoch
decode = EpochIndex <$> decode
deriving instance FromHttpApiData EpochIndex
Note that it will be encoded as string , because ' EpochIndex '
does n't necessary fit into JS number .
instance Monad m => ToJSON m EpochIndex where
toJSON = toJSON . getEpochIndex
deriving instance Aeson.FromJSON EpochIndex
deriving instance Aeson.ToJSON EpochIndex
class HasEpochIndex a where
epochIndexL :: Lens' a EpochIndex
instance HasEpochIndex (Some HasEpochIndex) where
epochIndexL = liftLensSome epochIndexL
instance (HasEpochIndex a, HasEpochIndex b) =>
HasEpochIndex (Either a b) where
epochIndexL = choosing epochIndexL epochIndexL
instance ReportSchemaErrors m => FromJSON m EpochIndex where
fromJSON = fmap EpochIndex . fromJSON
Epoch : ... E-3 E-2 E-1 E+0 E+1 E+2 E+3 ...
era Reward era
isBootstrapEra
-> Bool
isBootstrapEra _unlockStakeEpoch _epoch = True
deriveSafeCopySimple 0 'base ''EpochIndex
|
9d46243b303d324cf3e74f808cb2739bbec99e9db3bac46099d3ad0aa1b46dc6 | openmusic-project/RQ | interface.lisp | ;;; Interfaces for k-best quantification
;; Most of the functions here are now obsolete (everything replaced by the GUI)
(use-package :om :rq)
(in-package :rq)
(export '(k-best-quantify) :rq)
(defun to-input (onsets durations schema sig tempo)
"Takes a list of onsets and a list of durations (of same length) and returns an input object"
(let ((mesdur (* (* (first sig) (/ 4 (second sig))) (/ 60 tempo) 1000)))
;creates the output list and the mask from the onsets and durations
(labels ((out-mask (onsets durations output mask)
(if (or (null onsets) (null durations))
(values (append output (list mesdur)) (append mask (list -1)))
(let ((on1 (first onsets))
(dur1 (first durations))
(on2 (second onsets))
(dur2 (second durations)))
(if (or (null on2) (null dur2)) ;on1 is the last note
(out-mask (cdr onsets) (cdr durations) (append output (list on1 (+ on1 dur1))) (append mask (list 1 -1)))
if there is a silence between the current note and the next one
(out-mask (cdr onsets) (cdr durations) (append output (list on1 (+ on1 dur1))) (append mask (list 1 -1)))
(out-mask (cdr onsets) (cdr durations) (append output (list on1)) (append mask (list 1)))))))))
(multiple-value-bind (output mask) (out-mask onsets durations nil nil)
(input-make schema (coerce output 'vector) (coerce mask 'vector))))))
(defun cut-measures (onsets durations signatures tempo)
" cuts the onsets and durations lists in measures
the note series has to be monophonic, otherwise, the results are not guaranteed.
output onsets are given as the duration between the beginning of the measure and the onset of the note
OUTPUTS :
on-mes holds the onsets grouped in measures
dur-mes holds the durations grouped in measures
slur is a list : the nth element is 1 if the last note of the nth measure is slurred with the next one, nil otherwise"
(labels ((make-measures (onsets durations sig temp &optional (on-mes nil) (dur-mes nil) (slur nil) (sigs nil) (temps nil))
(let* ((current-sig (first sig))
(current-temp (first temp))
(mes-duration (* (* (first current-sig) (/ 4 (second current-sig))) (/ 60 current-temp) 1000)) ;duration of a measure in ms
(end mes-duration) ;the beginning of the measure is always 0
index of the first element of the next measures
(last-on (if index (nth (1- index) onsets) (car (last onsets)))) ;if there are no elements in the next measures (ie current measure is last), last on is the last onset, otherwise it is the index-th element of onsets
(last-dur (if index (nth (1- index) durations) (car (last durations)))) ;idem
(exceed (> (+ last-on last-dur) end)) ;true iff the last note of the measure lasts after the end of the measure
if there are not enough signatures or tempo given , we continue with the last one
sig
(rest sig)))
(new-temp (if (null (rest temp))
temp
(rest temp))))
(if (null exceed)
(if (null index)
; the current measure is the last : there are no notes after the end of the measure
(values (append on-mes (list onsets)) (append dur-mes (list durations)) (append slur (list nil)) (append sigs (list current-sig)) (append temps (list current-temp)))
;the current measure is not the last, we iterate
(make-measures (update-onsets (subseq onsets index) mes-duration) (subseq durations index) new-sig new-temp (append on-mes (list (subseq onsets 0 index))) (append dur-mes (list (subseq durations 0 index))) (append slur (list nil)) (append sigs (list current-sig)) (append temps (list current-temp)) ))
if the last note is cut by the measure bar , we cut it into two notes , and put 1 in slur ( the measures are slurred )
(let* ((new-dur (- end last-on))
(excess (- last-dur new-dur)))
(if (null index)
;if there are no notes after, we just add a measure for the end of the note
(values (append on-mes (list onsets) (list (list 0))) (append dur-mes (list (append (butlast durations) (list new-dur))) (list (list excess))) (append slur (list 1 nil)) (append sigs (list current-sig (car new-sig))) (append temps (list current-temp (car new-temp))) )
;if there are notes after, we iterate
(make-measures (append (list 0) (update-onsets (subseq onsets index) mes-duration)) (append (list excess) (subseq durations index)) new-sig new-temp (append on-mes (list (subseq onsets 0 index))) (append dur-mes (list (append (subseq durations 0 (1- index)) (list new-dur)))) (append slur (list 1)) (append sigs (list current-sig)) (append temps (list current-temp))))))))
(update-onsets (onsets measure)
(mapcar #'(lambda (x) (- x measure)) onsets)))
(if (listp (car signatures)) ;if a list of signatures is given
(if (listp tempo) ;if a list of tempo is given
() ;we do nothing
if a list of signatures is provided , but only 1 tempo is given
(setq tempo (make-list (length signatures) :initial-element tempo)))
if only one signature is given
(if (listp tempo)
(setq signatures (make-list (length tempo) :initial-element signatures))
if only one signature and one tempo is given
(progn
(setq signatures (list signatures))
(setq tempo (list tempo)))))
(make-measures onsets (remove-overlap onsets (normalize-durations durations)) signatures tempo)))
(defun normalize-durations (durations &optional (buffer nil))
"if durations is the output of a chord-seq, it is a list of lists of length 1, otherwise, the input is a normal list
This function transforms the durations output of a chord-seq into a normal list
The chord-seq has to be monophonic"
(if (null durations)
buffer
(let ((current (car durations)))
(if (listp current)
(normalize-durations (rest durations) (append buffer (list (reduce #'max current))))
(normalize-durations (rest durations) (append buffer (list current)))))))
(defun remove-overlap (onsets durations &optional (dur-buf nil))
(let ((on1 (first onsets))
(dur1 (first durations))
(on2 (second onsets))
(dur2 (second durations)))
(if (or (null on2) (null dur2))
(append dur-buf (list dur1))
(if (< (+ on1 dur1) on2) ;if there is a silence between the current note and the next one)
(remove-overlap (rest onsets) (rest durations) (append dur-buf (list dur1)))
(remove-overlap (rest onsets) (rest durations) (append dur-buf (list (- on2 on1))))))))
(defun create-inputs (onsets durations schema sigs tempo schema-measure?)
"returns a list of input objects, each corresponding to a measure"
(labels ((create-schemas (siglist schema)
(mapcar #'(lambda (sig) (append (list (car sig)) schema)) siglist)))
(multiple-value-bind (onlist durlist slurs siglist templist) (cut-measures onsets durations sigs tempo)
(if schema-measure?
(values (mapcar #'to-input onlist durlist (make-list (length onlist) :initial-element schema) siglist templist) slurs siglist templist)
(values (mapcar #'to-input onlist durlist (create-schemas siglist schema) siglist templist) slurs siglist templist)))))
(om::defmethod! k-best-quantify (onsets durations k schema sigs tempo &optional (precision 0.5) (schema-measure? nil))
:initvals '((0) (1000) 10 '(4 2 2) '(4 4) 60 0.5 nil)
:indoc '("onsets" "durations" "number of solutions" "schema" "signatures" "tempo" "precision (0.0-1.0)" "schema-measure?")
:icon '(252)
:doc "Quantizes a list of onsets and a list of durations. The output is given as a list of possible rhythm trees.
<onsets> : a list of onsets in 1/1000th of a second
<dusations> : a list of durations in 1/1000th of a second
<k> : number of output solutions
<schema> : a subdivision schema applied to each time unit (by default : each quarter note). A subdivision schema is given in the form of a list. For example : (2 2 3) means that each time unit is divided in two, then each part is again divided in two, and finally, each of these parts is divided in 3.
If an element of the list is a list itself, it means that the choice is given between the values of the list. For example : (2 (2 3 5) 3) means (2 2 3) or (2 3 3) or (2 5 3).
If an element of the list is a list of lists, it means that the choice is given between various successions of subdivisions. For example : ( ((2 3) (3 2)) ) means (2 3) or (3 2).
Example : ((2 3) ((2 3) ((3 5) 2))) means (2 2 3) or (2 3 2) or (2 5 2) or (3 2 3) or (3 3 2) or (3 5 2).
Non prime numbers can be used as, for exemple, dividing in 2 then in 2 gives a different notation than dividing in 4.
<sigs> : the signature to be used for each measure. If there are various signatures, a list of signatures can be given (ex : ((3 4) (4 4)) means the signature of the first measure is (3 4), and the signature of the following measures is (4 4))
<tempo> : the tempo to be used. If the tempo changes from measure to measure, a list of tempo can be given (ex : (60 50) means the tempo of the first measure is 60, and the tempo of the following measures is 50)
<precision> : a float in (0.0 .. 1.0). Smaller values mean 'simplicity' while bigger values mean 'precision'.
<schema-measure?> : a boolean. If true, the schema is applied to each measure. If false, the schema is applied to each pulse. False by default.
"
(labels ((add-signatures (trees signatures &optional (buffer nil))
(if (null trees)
buffer
(add-signatures (cdr trees) (cdr signatures) (append buffer (list (loop for tree in (car trees) collect (list (car signatures) tree))))))))
(multiple-value-bind (inputs slurs siglist templist) (create-inputs (butlast onsets) durations schema sigs tempo schema-measure?)
(let ((trees nil)
(N (length inputs))
we add a 0 to math the indexes : a 1 in slurs now means the current measure is slurred with the previous , and not that the next is slurred with the current .
(if (equalp *weight-precision* precision)
()
(setf *weight-precision* precision))
;for each measure, computation of the k best solutions
(setq trees (loop for n from 0 to (1- N)
collect
(let ((ktable (init (transitions (nth n inputs)))))
(loop for i from 1 to k
collect (rq-to-omtree ktable (nth n inputs) i (nth n slurs))))))
;we add the signatures
(setq trees (add-signatures trees siglist))
we have a list of size n_measures , containing lists of , we want a list o length k , containing lists of size n_measure
;In other words, we want to have a list indexed by the rank of the solution, not by the number of the measure.
(setq trees (loop for i from 0 to (1- k) collect
(loop for tree in trees collect
(nth i tree))))
(mapcar #'(lambda (x) (append (list '?) (list x))) trees)))))
(defun rq-to-omtree (ktable inst k flag &optional (slur nil) (gracenotes 0) (path nil) (previous -1))
"Function to reconstruct a rhythm tree from the hashtable
The slur parameter is nil when the current measure is not slurred with the previous one, an integer when it is"
(let ((rootp (or path (path-empty inst)))
nil when the first note has not been slurred yet , t when if has .
(grace (if gracenotes gracenotes 0)) ;number of grace notes in the previous step
prev : sign of the last input in the previous subdivision ( 1 : note , -1 : silence ) . By default : we begin with a silence
(labels ((to-omtree (inst p k flag)
(multiple-value-bind (run w) (rq-best ktable inst p k flag)
(let ((r (length run))
(y nil)) ;buffer to hold the sub-tree
(if (> r 0)
( list 1 ( mapcar # ' ( lambda ( k i ) ( to - omtree inst ( path - addi p r i ) k ) ) run ( from-1 - to r ) ) )
(list 1 (loop for i from 1 to r
for krun in run
collect (to-omtree inst (path-addi p r i) (car krun) (cdr krun))))
;if the node considered is a leaf
(multiple-value-bind (ll rr) (align ktable p)
;processing ll
(if (null ll) ;if there are no inputs aligned to the left
(if (> grace 0) ;if there were notes aligned to the right in the previous subdivision
if there is only one note
(setq y 1)
(setq y (list 1 (append (make-list (1- grace) :initial-element 0) (list 1)))))
;if there is no input in the subdivision and there was no note aligned to the right before
(if (equalp prev 1)
(setq y 1.0) ;if there was a note before, tie it to the current
(setq y -1))) ;if there was a silence, return a silence
;if there are inputs aligned to the left
(if (and (equalp grace 0) (equalp (length ll) 1))
(setq y (* (input-sign inst (car ll)) 1)) ;if there is only 1 input aligned to the left
(let ((ll0 (remove-silences ll inst)))
if there are only rests aligned to the left ( at least 1 )
(if (> grace 0) ;if there were notes aligned to the right in the previous subdivision
if there is only one note
(setq y 1)
(setq y (list 1 (append (make-list (1- grace) :initial-element 0) (list 1)))))
;if there are only rests in the subdivision and there was no note aligned to the right before
(setq y -1))
;if there are notes aligned to the left
(if (and (equalp (length ll0) 1) (equalp grace 0)) ;if there is exactly one note aligned to the left
(setq y 1)
if there is at least 1 note aligned to the left or 1 note aligned to the right before
(setq y (list 1 (append (make-list (1- (+ grace (length ll0))) :initial-element 0) (list 1)))))))))
if the current note is the first and if it has to be slurred , we slur it and remember that the first note has already been slurred
(if (and (null slurred) slur)
(progn
(if (listp y) ;if the note to slur has grace-notes : the exceeding part of the previous measure was aligned to the left so we delete it
(if (equalp (second y) '(0 1))
(setq y 1)
(setq y (list 1 (rest (second y)))))
(setq y (float y)))
(setq slurred t)))
;otherwise, we do nothing
;processing rr
(if (null rr)
(if (not (null ll))
(setq prev (input-sign inst (car (last ll)))) ;if there are no notes aligned to the right, but there are some to the left
;if there are no notes in the subdivision, we do nothing
)
(setq prev (input-sign inst (car (last rr)))))
(setq grace (length (remove-silences rr inst)))
y))))))
(if (weight-infinityp (nth-value 1 (rq-best ktable inst rootp k flag)))
(setq out -1)
(setq out (to-omtree inst rootp k flag)))
(list out))))
(defun rq-remove-top-level (tree)
we remove the first top - level ( 1 ( ... ) ) , except when we only have a 1
tree
(second (car tree))))
(defun output (tree &optional (sig '(1 4)))
(list '? (list (list sig tree))))
(defun get-nth-measure (tree n)
(nth n (second tree)))
(defun concatenate-measures (&rest trees)
(list '? trees))
| null | https://raw.githubusercontent.com/openmusic-project/RQ/d6b1274a4462c1500dfc2edab81a4425a6dfcda7/src/algorithm/interface.lisp | lisp | Interfaces for k-best quantification
Most of the functions here are now obsolete (everything replaced by the GUI)
creates the output list and the mask from the onsets and durations
on1 is the last note
duration of a measure in ms
the beginning of the measure is always 0
if there are no elements in the next measures (ie current measure is last), last on is the last onset, otherwise it is the index-th element of onsets
idem
true iff the last note of the measure lasts after the end of the measure
the current measure is the last : there are no notes after the end of the measure
the current measure is not the last, we iterate
if there are no notes after, we just add a measure for the end of the note
if there are notes after, we iterate
if a list of signatures is given
if a list of tempo is given
we do nothing
if there is a silence between the current note and the next one)
for each measure, computation of the k best solutions
we add the signatures
In other words, we want to have a list indexed by the rank of the solution, not by the number of the measure.
number of grace notes in the previous step
buffer to hold the sub-tree
if the node considered is a leaf
processing ll
if there are no inputs aligned to the left
if there were notes aligned to the right in the previous subdivision
if there is no input in the subdivision and there was no note aligned to the right before
if there was a note before, tie it to the current
if there was a silence, return a silence
if there are inputs aligned to the left
if there is only 1 input aligned to the left
if there were notes aligned to the right in the previous subdivision
if there are only rests in the subdivision and there was no note aligned to the right before
if there are notes aligned to the left
if there is exactly one note aligned to the left
if the note to slur has grace-notes : the exceeding part of the previous measure was aligned to the left so we delete it
otherwise, we do nothing
processing rr
if there are no notes aligned to the right, but there are some to the left
if there are no notes in the subdivision, we do nothing |
(use-package :om :rq)
(in-package :rq)
(export '(k-best-quantify) :rq)
(defun to-input (onsets durations schema sig tempo)
"Takes a list of onsets and a list of durations (of same length) and returns an input object"
(let ((mesdur (* (* (first sig) (/ 4 (second sig))) (/ 60 tempo) 1000)))
(labels ((out-mask (onsets durations output mask)
(if (or (null onsets) (null durations))
(values (append output (list mesdur)) (append mask (list -1)))
(let ((on1 (first onsets))
(dur1 (first durations))
(on2 (second onsets))
(dur2 (second durations)))
(out-mask (cdr onsets) (cdr durations) (append output (list on1 (+ on1 dur1))) (append mask (list 1 -1)))
if there is a silence between the current note and the next one
(out-mask (cdr onsets) (cdr durations) (append output (list on1 (+ on1 dur1))) (append mask (list 1 -1)))
(out-mask (cdr onsets) (cdr durations) (append output (list on1)) (append mask (list 1)))))))))
(multiple-value-bind (output mask) (out-mask onsets durations nil nil)
(input-make schema (coerce output 'vector) (coerce mask 'vector))))))
(defun cut-measures (onsets durations signatures tempo)
" cuts the onsets and durations lists in measures
the note series has to be monophonic, otherwise, the results are not guaranteed.
output onsets are given as the duration between the beginning of the measure and the onset of the note
OUTPUTS :
on-mes holds the onsets grouped in measures
dur-mes holds the durations grouped in measures
slur is a list : the nth element is 1 if the last note of the nth measure is slurred with the next one, nil otherwise"
(labels ((make-measures (onsets durations sig temp &optional (on-mes nil) (dur-mes nil) (slur nil) (sigs nil) (temps nil))
(let* ((current-sig (first sig))
(current-temp (first temp))
index of the first element of the next measures
if there are not enough signatures or tempo given , we continue with the last one
sig
(rest sig)))
(new-temp (if (null (rest temp))
temp
(rest temp))))
(if (null exceed)
(if (null index)
(values (append on-mes (list onsets)) (append dur-mes (list durations)) (append slur (list nil)) (append sigs (list current-sig)) (append temps (list current-temp)))
(make-measures (update-onsets (subseq onsets index) mes-duration) (subseq durations index) new-sig new-temp (append on-mes (list (subseq onsets 0 index))) (append dur-mes (list (subseq durations 0 index))) (append slur (list nil)) (append sigs (list current-sig)) (append temps (list current-temp)) ))
if the last note is cut by the measure bar , we cut it into two notes , and put 1 in slur ( the measures are slurred )
(let* ((new-dur (- end last-on))
(excess (- last-dur new-dur)))
(if (null index)
(values (append on-mes (list onsets) (list (list 0))) (append dur-mes (list (append (butlast durations) (list new-dur))) (list (list excess))) (append slur (list 1 nil)) (append sigs (list current-sig (car new-sig))) (append temps (list current-temp (car new-temp))) )
(make-measures (append (list 0) (update-onsets (subseq onsets index) mes-duration)) (append (list excess) (subseq durations index)) new-sig new-temp (append on-mes (list (subseq onsets 0 index))) (append dur-mes (list (append (subseq durations 0 (1- index)) (list new-dur)))) (append slur (list 1)) (append sigs (list current-sig)) (append temps (list current-temp))))))))
(update-onsets (onsets measure)
(mapcar #'(lambda (x) (- x measure)) onsets)))
if a list of signatures is provided , but only 1 tempo is given
(setq tempo (make-list (length signatures) :initial-element tempo)))
if only one signature is given
(if (listp tempo)
(setq signatures (make-list (length tempo) :initial-element signatures))
if only one signature and one tempo is given
(progn
(setq signatures (list signatures))
(setq tempo (list tempo)))))
(make-measures onsets (remove-overlap onsets (normalize-durations durations)) signatures tempo)))
(defun normalize-durations (durations &optional (buffer nil))
"if durations is the output of a chord-seq, it is a list of lists of length 1, otherwise, the input is a normal list
This function transforms the durations output of a chord-seq into a normal list
The chord-seq has to be monophonic"
(if (null durations)
buffer
(let ((current (car durations)))
(if (listp current)
(normalize-durations (rest durations) (append buffer (list (reduce #'max current))))
(normalize-durations (rest durations) (append buffer (list current)))))))
(defun remove-overlap (onsets durations &optional (dur-buf nil))
(let ((on1 (first onsets))
(dur1 (first durations))
(on2 (second onsets))
(dur2 (second durations)))
(if (or (null on2) (null dur2))
(append dur-buf (list dur1))
(remove-overlap (rest onsets) (rest durations) (append dur-buf (list dur1)))
(remove-overlap (rest onsets) (rest durations) (append dur-buf (list (- on2 on1))))))))
(defun create-inputs (onsets durations schema sigs tempo schema-measure?)
"returns a list of input objects, each corresponding to a measure"
(labels ((create-schemas (siglist schema)
(mapcar #'(lambda (sig) (append (list (car sig)) schema)) siglist)))
(multiple-value-bind (onlist durlist slurs siglist templist) (cut-measures onsets durations sigs tempo)
(if schema-measure?
(values (mapcar #'to-input onlist durlist (make-list (length onlist) :initial-element schema) siglist templist) slurs siglist templist)
(values (mapcar #'to-input onlist durlist (create-schemas siglist schema) siglist templist) slurs siglist templist)))))
(om::defmethod! k-best-quantify (onsets durations k schema sigs tempo &optional (precision 0.5) (schema-measure? nil))
:initvals '((0) (1000) 10 '(4 2 2) '(4 4) 60 0.5 nil)
:indoc '("onsets" "durations" "number of solutions" "schema" "signatures" "tempo" "precision (0.0-1.0)" "schema-measure?")
:icon '(252)
:doc "Quantizes a list of onsets and a list of durations. The output is given as a list of possible rhythm trees.
<onsets> : a list of onsets in 1/1000th of a second
<dusations> : a list of durations in 1/1000th of a second
<k> : number of output solutions
<schema> : a subdivision schema applied to each time unit (by default : each quarter note). A subdivision schema is given in the form of a list. For example : (2 2 3) means that each time unit is divided in two, then each part is again divided in two, and finally, each of these parts is divided in 3.
If an element of the list is a list itself, it means that the choice is given between the values of the list. For example : (2 (2 3 5) 3) means (2 2 3) or (2 3 3) or (2 5 3).
If an element of the list is a list of lists, it means that the choice is given between various successions of subdivisions. For example : ( ((2 3) (3 2)) ) means (2 3) or (3 2).
Example : ((2 3) ((2 3) ((3 5) 2))) means (2 2 3) or (2 3 2) or (2 5 2) or (3 2 3) or (3 3 2) or (3 5 2).
Non prime numbers can be used as, for exemple, dividing in 2 then in 2 gives a different notation than dividing in 4.
<sigs> : the signature to be used for each measure. If there are various signatures, a list of signatures can be given (ex : ((3 4) (4 4)) means the signature of the first measure is (3 4), and the signature of the following measures is (4 4))
<tempo> : the tempo to be used. If the tempo changes from measure to measure, a list of tempo can be given (ex : (60 50) means the tempo of the first measure is 60, and the tempo of the following measures is 50)
<precision> : a float in (0.0 .. 1.0). Smaller values mean 'simplicity' while bigger values mean 'precision'.
<schema-measure?> : a boolean. If true, the schema is applied to each measure. If false, the schema is applied to each pulse. False by default.
"
(labels ((add-signatures (trees signatures &optional (buffer nil))
(if (null trees)
buffer
(add-signatures (cdr trees) (cdr signatures) (append buffer (list (loop for tree in (car trees) collect (list (car signatures) tree))))))))
(multiple-value-bind (inputs slurs siglist templist) (create-inputs (butlast onsets) durations schema sigs tempo schema-measure?)
(let ((trees nil)
(N (length inputs))
we add a 0 to math the indexes : a 1 in slurs now means the current measure is slurred with the previous , and not that the next is slurred with the current .
(if (equalp *weight-precision* precision)
()
(setf *weight-precision* precision))
(setq trees (loop for n from 0 to (1- N)
collect
(let ((ktable (init (transitions (nth n inputs)))))
(loop for i from 1 to k
collect (rq-to-omtree ktable (nth n inputs) i (nth n slurs))))))
(setq trees (add-signatures trees siglist))
we have a list of size n_measures , containing lists of , we want a list o length k , containing lists of size n_measure
(setq trees (loop for i from 0 to (1- k) collect
(loop for tree in trees collect
(nth i tree))))
(mapcar #'(lambda (x) (append (list '?) (list x))) trees)))))
(defun rq-to-omtree (ktable inst k flag &optional (slur nil) (gracenotes 0) (path nil) (previous -1))
"Function to reconstruct a rhythm tree from the hashtable
The slur parameter is nil when the current measure is not slurred with the previous one, an integer when it is"
(let ((rootp (or path (path-empty inst)))
nil when the first note has not been slurred yet , t when if has .
prev : sign of the last input in the previous subdivision ( 1 : note , -1 : silence ) . By default : we begin with a silence
(labels ((to-omtree (inst p k flag)
(multiple-value-bind (run w) (rq-best ktable inst p k flag)
(let ((r (length run))
(if (> r 0)
( list 1 ( mapcar # ' ( lambda ( k i ) ( to - omtree inst ( path - addi p r i ) k ) ) run ( from-1 - to r ) ) )
(list 1 (loop for i from 1 to r
for krun in run
collect (to-omtree inst (path-addi p r i) (car krun) (cdr krun))))
(multiple-value-bind (ll rr) (align ktable p)
if there is only one note
(setq y 1)
(setq y (list 1 (append (make-list (1- grace) :initial-element 0) (list 1)))))
(if (equalp prev 1)
(if (and (equalp grace 0) (equalp (length ll) 1))
(let ((ll0 (remove-silences ll inst)))
if there are only rests aligned to the left ( at least 1 )
if there is only one note
(setq y 1)
(setq y (list 1 (append (make-list (1- grace) :initial-element 0) (list 1)))))
(setq y -1))
(setq y 1)
if there is at least 1 note aligned to the left or 1 note aligned to the right before
(setq y (list 1 (append (make-list (1- (+ grace (length ll0))) :initial-element 0) (list 1)))))))))
if the current note is the first and if it has to be slurred , we slur it and remember that the first note has already been slurred
(if (and (null slurred) slur)
(progn
(if (equalp (second y) '(0 1))
(setq y 1)
(setq y (list 1 (rest (second y)))))
(setq y (float y)))
(setq slurred t)))
(if (null rr)
(if (not (null ll))
)
(setq prev (input-sign inst (car (last rr)))))
(setq grace (length (remove-silences rr inst)))
y))))))
(if (weight-infinityp (nth-value 1 (rq-best ktable inst rootp k flag)))
(setq out -1)
(setq out (to-omtree inst rootp k flag)))
(list out))))
(defun rq-remove-top-level (tree)
we remove the first top - level ( 1 ( ... ) ) , except when we only have a 1
tree
(second (car tree))))
(defun output (tree &optional (sig '(1 4)))
(list '? (list (list sig tree))))
(defun get-nth-measure (tree n)
(nth n (second tree)))
(defun concatenate-measures (&rest trees)
(list '? trees))
|
ee77f5950d2db661aac6d9c394aa92165b0c28ea8c318f2274832e50bad0b28f | typelead/intellij-eta | Operator00001.hs | module Operator00001 where
renderNode (s, a) = text (label a) # bold # font "sans-serif"
| null | https://raw.githubusercontent.com/typelead/intellij-eta/ee66d621aa0bfdf56d7d287279a9a54e89802cf9/plugin/src/test/resources/fixtures/eta/sources/Operator00001.hs | haskell | module Operator00001 where
renderNode (s, a) = text (label a) # bold # font "sans-serif"
| |
2f6a7ab96da1df9e2fb8747a2db370bbc775ae866e260c31b8a2551acba7ba75 | circuithub/rel8 | ADT.hs | # language AllowAmbiguousTypes #
# language BlockArguments #
# language DataKinds #
{-# language FlexibleContexts #-}
# language FlexibleInstances #
# language MultiParamTypeClasses #
# language RankNTypes #
{-# language ScopedTypeVariables #-}
# language StandaloneKindSignatures #
{-# language TupleSections #-}
# language TypeApplications #
{-# language TypeFamilies #-}
# language TypeOperators #
{-# language UndecidableInstances #-}
module Rel8.Generic.Construction.ADT
( GConstructableADT
, GBuildADT, gbuildADT, gunbuildADT
, GConstructADT, gconstructADT, gdeconstructADT
, GFields, RepresentableFields, gftabulate, gfindex
, GConstructors, RepresentableConstructors, gctabulate, gcindex
, GConstructorADT, GMakeableADT, gmakeADT
)
where
-- base
import Data.Bifunctor ( first )
import Data.Functor.Identity ( runIdentity )
import Data.Kind ( Constraint, Type )
import Data.List.NonEmpty ( NonEmpty )
import Data.Proxy ( Proxy( Proxy ) )
import GHC.Generics
( (:+:), (:*:)( (:*:) ), M1, U1
, C, D
, Meta( MetaData, MetaCons )
)
import GHC.TypeLits
( ErrorMessage( (:<>:), Text ), TypeError
, Symbol, KnownSymbol, symbolVal
)
import Prelude hiding ( null )
-- rel8
import Rel8.FCF ( Exp )
import Rel8.Generic.Construction.Record
( GConstruct, GConstructable, gconstruct, gdeconstruct
, GFields, Representable, gtabulate, gindex
, FromColumns, ToColumns
)
import Rel8.Generic.Table.ADT ( GColumnsADT, GColumnsADT' )
import Rel8.Generic.Table.Record ( GColumns )
import Rel8.Schema.HTable ( HTable )
import Rel8.Schema.HTable.Identity ( HIdentity )
import Rel8.Schema.HTable.Label ( HLabel, hlabel, hunlabel )
import Rel8.Schema.HTable.Nullify ( HNullify, hnulls, hnullify, hunnullify )
import Rel8.Schema.HTable.Product ( HProduct( HProduct ) )
import Rel8.Schema.Null ( Nullify )
import Rel8.Schema.Spec ( Spec )
import qualified Rel8.Schema.Kind as K
import Rel8.Type.Tag ( Tag( Tag ) )
-- text
import Data.Text ( pack )
type Null :: K.Context -> Type
type Null context = forall a. Spec a -> context (Nullify a)
type Nullifier :: K.Context -> Type
type Nullifier context = forall a. Spec a -> context a -> context (Nullify a)
type Unnullifier :: K.Context -> Type
type Unnullifier context = forall a. Spec a -> context (Nullify a) -> context a
type NoConstructor :: Symbol -> Symbol -> ErrorMessage
type NoConstructor datatype constructor =
( 'Text "The type `" ':<>:
'Text datatype ':<>:
'Text "` has no constructor `" ':<>:
'Text constructor ':<>:
'Text "`."
)
type GConstructorADT :: Symbol -> (Type -> Type) -> Type -> Type
type family GConstructorADT name rep where
GConstructorADT name (M1 D ('MetaData datatype _ _ _) rep) =
GConstructorADT' name rep (TypeError (NoConstructor datatype name))
type GConstructorADT' :: Symbol -> (Type -> Type) -> (Type -> Type) -> Type -> Type
type family GConstructorADT' name rep fallback where
GConstructorADT' name (M1 D _ rep) fallback =
GConstructorADT' name rep fallback
GConstructorADT' name (a :+: b) fallback =
GConstructorADT' name a (GConstructorADT' name b fallback)
GConstructorADT' name (M1 C ('MetaCons name _ _) rep) _ = rep
GConstructorADT' _ _ fallback = fallback
type GConstructADT
:: (Type -> Exp Type)
-> (Type -> Type) -> Type -> Type -> Type
type family GConstructADT f rep r x where
GConstructADT f (M1 D _ rep) r x = GConstructADT f rep r x
GConstructADT f (a :+: b) r x = GConstructADT f a r (GConstructADT f b r x)
GConstructADT f (M1 C _ rep) r x = GConstruct f rep r -> x
type GConstructors :: (Type -> Exp Type) -> (Type -> Type) -> Type -> Type
type family GConstructors f rep where
GConstructors f (M1 D _ rep) = GConstructors f rep
GConstructors f (a :+: b) = GConstructors f a :*: GConstructors f b
GConstructors f (M1 C _ rep) = (->) (GFields f rep)
type RepresentableConstructors :: (Type -> Exp Type) -> (Type -> Type) -> Constraint
class RepresentableConstructors f rep where
gctabulate :: (GConstructors f rep r -> a) -> GConstructADT f rep r a
gcindex :: GConstructADT f rep r a -> GConstructors f rep r -> a
instance RepresentableConstructors f rep => RepresentableConstructors f (M1 D meta rep) where
gctabulate = gctabulate @f @rep
gcindex = gcindex @f @rep
instance (RepresentableConstructors f a, RepresentableConstructors f b) =>
RepresentableConstructors f (a :+: b)
where
gctabulate f =
gctabulate @f @a \a -> gctabulate @f @b \b -> f (a :*: b)
gcindex f (a :*: b) = gcindex @f @b (gcindex @f @a f a) b
instance Representable f rep => RepresentableConstructors f (M1 C meta rep) where
gctabulate f = f . gindex @f @rep
gcindex f = f . gtabulate @f @rep
type GBuildADT :: (Type -> Exp Type) -> (Type -> Type) -> Type -> Type
type family GBuildADT f rep r where
GBuildADT f (M1 D _ rep) r = GBuildADT f rep r
GBuildADT f (a :+: b) r = GBuildADT f a (GBuildADT f b r)
GBuildADT f (M1 C _ rep) r = GConstruct f rep r
type GFieldsADT :: (Type -> Exp Type) -> (Type -> Type) -> Type
type family GFieldsADT f rep where
GFieldsADT f (M1 D _ rep) = GFieldsADT f rep
GFieldsADT f (a :+: b) = (GFieldsADT f a, GFieldsADT f b)
GFieldsADT f (M1 C _ rep) = GFields f rep
type RepresentableFields :: (Type -> Exp Type) -> (Type -> Type) -> Constraint
class RepresentableFields f rep where
gftabulate :: (GFieldsADT f rep -> a) -> GBuildADT f rep a
gfindex :: GBuildADT f rep a -> GFieldsADT f rep -> a
instance RepresentableFields f rep => RepresentableFields f (M1 D meta rep) where
gftabulate = gftabulate @f @rep
gfindex = gfindex @f @rep
instance (RepresentableFields f a, RepresentableFields f b) => RepresentableFields f (a :+: b) where
gftabulate f =
gftabulate @f @a \a -> gftabulate @f @b \b -> f (a, b)
gfindex f (a, b) = gfindex @f @b (gfindex @f @a f a) b
instance Representable f rep => RepresentableFields f (M1 C meta rep) where
gftabulate = gtabulate @f @rep
gfindex = gindex @f @rep
type GConstructableADT
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> (Type -> Type) -> Constraint
class GConstructableADT _Table _Columns f context rep where
gbuildADT :: ()
=> ToColumns _Table _Columns f context
-> (Tag -> Nullifier context)
-> HIdentity Tag context
-> GFieldsADT f rep
-> GColumnsADT _Columns rep context
gunbuildADT :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GColumnsADT _Columns rep context
-> (HIdentity Tag context, GFieldsADT f rep)
gconstructADT :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> (Tag -> HIdentity Tag context)
-> GConstructors f rep (GColumnsADT _Columns rep context)
gdeconstructADT :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GConstructors f rep r
-> GColumnsADT _Columns rep context
-> (HIdentity Tag context, NonEmpty (Tag, r))
instance
( htable ~ HLabel "tag" (HIdentity Tag)
, GConstructableADT' _Table _Columns f context htable rep
)
=> GConstructableADT _Table _Columns f context (M1 D meta rep)
where
gbuildADT toColumns nullifier =
gbuildADT' @_Table @_Columns @f @context @htable @rep toColumns nullifier .
hlabel
gunbuildADT fromColumns unnullifier =
first hunlabel .
gunbuildADT' @_Table @_Columns @f @context @htable @rep fromColumns unnullifier
gconstructADT toColumns null nullifier mk =
gconstructADT' @_Table @_Columns @f @context @htable @rep toColumns null nullifier
(hlabel . mk)
gdeconstructADT fromColumns unnullifier cases =
first hunlabel .
gdeconstructADT' @_Table @_Columns @f @context @htable @rep fromColumns unnullifier cases
type GConstructableADT'
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> K.HTable -> (Type -> Type) -> Constraint
class GConstructableADT' _Table _Columns f context htable rep where
gbuildADT' :: ()
=> ToColumns _Table _Columns f context
-> (Tag -> Nullifier context)
-> htable context
-> GFieldsADT f rep
-> GColumnsADT' _Columns htable rep context
gunbuildADT' :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GColumnsADT' _Columns htable rep context
-> (htable context, GFieldsADT f rep)
gconstructADT' :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> (Tag -> htable context)
-> GConstructors f rep (GColumnsADT' _Columns htable rep context)
gdeconstructADT' :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GConstructors f rep r
-> GColumnsADT' _Columns htable rep context
-> (htable context, NonEmpty (Tag, r))
gfill :: ()
=> Null context
-> htable context
-> GColumnsADT' _Columns htable rep context
instance
( htable' ~ GColumnsADT' _Columns htable a
, Functor (GConstructors f a)
, GConstructableADT' _Table _Columns f context htable a
, GConstructableADT' _Table _Columns f context htable' b
)
=> GConstructableADT' _Table _Columns f context htable (a :+: b)
where
gbuildADT' toColumns nullifier htable (a, b) =
gbuildADT' @_Table @_Columns @f @context @htable' @b toColumns nullifier
(gbuildADT' @_Table @_Columns @f @context @htable @a toColumns nullifier htable a)
b
gunbuildADT' fromColumns unnullifier columns =
case gunbuildADT' @_Table @_Columns @f @context @htable' @b fromColumns unnullifier columns of
(htable', b) ->
case gunbuildADT' @_Table @_Columns @f @context @htable @a fromColumns unnullifier htable' of
(htable, a) -> (htable, (a, b))
gconstructADT' toColumns null nullifier mk =
fmap (gfill @_Table @_Columns @f @context @htable' @b null) (gconstructADT' @_Table @_Columns @f @context @htable @a toColumns null nullifier mk) :*:
gconstructADT' @_Table @_Columns @f @context @htable' @b toColumns null nullifier (gfill @_Table @_Columns @f @context @htable @a null . mk)
gdeconstructADT' fromColumns unnullifier (a :*: b) columns =
case gdeconstructADT' @_Table @_Columns @f @context @htable' @b fromColumns unnullifier b columns of
(htable', cases) ->
case gdeconstructADT' @_Table @_Columns @f @context @htable @a fromColumns unnullifier a htable' of
(htable, cases') -> (htable, cases' <> cases)
gfill null =
gfill @_Table @_Columns @f @context @htable' @b null .
gfill @_Table @_Columns @f @context @htable @a null
instance (meta ~ 'MetaCons label _fixity _isRecord, KnownSymbol label) =>
GConstructableADT' _Table _Columns f context htable (M1 C meta U1)
where
gbuildADT' _ _ = const
gunbuildADT' _ _ = (, ())
gconstructADT' _ _ _ f _ = f tag
where
tag = Tag $ pack $ symbolVal (Proxy @label)
gdeconstructADT' _ _ r htable = (htable, pure (tag, r ()))
where
tag = Tag $ pack $ symbolVal (Proxy @label)
gfill _ = id
instance {-# OVERLAPPABLE #-}
( HTable (GColumns _Columns rep)
, KnownSymbol label
, meta ~ 'MetaCons label _fixity _isRecord
, GConstructable _Table _Columns f context rep
, GColumnsADT' _Columns htable (M1 C meta rep) ~
HProduct htable (HLabel label (HNullify (GColumns _Columns rep)))
)
=> GConstructableADT' _Table _Columns f context htable (M1 C meta rep)
where
gbuildADT' toColumns nullifier htable =
HProduct htable .
hlabel .
hnullify (nullifier tag) .
gconstruct @_Table @_Columns @f @context @rep toColumns
where
tag = Tag $ pack $ symbolVal (Proxy @label)
gunbuildADT' fromColumns unnullifier (HProduct htable a) =
( htable
, gdeconstruct @_Table @_Columns @f @context @rep fromColumns $
runIdentity $
hunnullify (\spec -> pure . unnullifier spec) $
hunlabel
a
)
gconstructADT' toColumns _ nullifier mk =
HProduct htable .
hlabel .
hnullify nullifier .
gconstruct @_Table @_Columns @f @context @rep toColumns
where
tag = Tag $ pack $ symbolVal (Proxy @label)
htable = mk tag
gdeconstructADT' fromColumns unnullifier r (HProduct htable columns) =
( htable
, pure (tag, r a)
)
where
a = gdeconstruct @_Table @_Columns @f @context @rep fromColumns $
runIdentity $
hunnullify (\spec -> pure . unnullifier spec) $
hunlabel
columns
tag = Tag $ pack $ symbolVal (Proxy @label)
gfill null htable = HProduct htable (hlabel (hnulls null))
type GMakeableADT
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> Symbol -> (Type -> Type) -> Constraint
class GMakeableADT _Table _Columns f context name rep where
gmakeADT :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> (Tag -> HIdentity Tag context)
-> GFields f (GConstructorADT name rep)
-> GColumnsADT _Columns rep context
instance
( htable ~ HLabel "tag" (HIdentity Tag)
, meta ~ 'MetaData datatype _module _package _newtype
, fallback ~ TypeError (NoConstructor datatype name)
, fields ~ GFields f (GConstructorADT' name rep fallback)
, GMakeableADT' _Table _Columns f context htable name rep fields
, KnownSymbol name
)
=> GMakeableADT _Table _Columns f context name (M1 D meta rep)
where
gmakeADT toColumns null nullifier wrap =
gmakeADT'
@_Table @_Columns @f @context @htable @name @rep @fields
toColumns null nullifier htable
where
tag = Tag $ pack $ symbolVal (Proxy @name)
htable = hlabel (wrap tag)
type GMakeableADT'
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> K.HTable -> Symbol -> (Type -> Type) -> Type -> Constraint
class GMakeableADT' _Table _Columns f context htable name rep fields where
gmakeADT' :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> htable context
-> fields
-> GColumnsADT' _Columns htable rep context
instance
( htable' ~ GColumnsADT' _Columns htable a
, GMakeableADT' _Table _Columns f context htable name a fields
, GMakeableADT' _Table _Columns f context htable' name b fields
)
=> GMakeableADT' _Table _Columns f context htable name (a :+: b) fields
where
gmakeADT' toColumns null nullifier htable x =
gmakeADT' @_Table @_Columns @f @context @htable' @name @b @fields
toColumns null nullifier
(gmakeADT'
@_Table @_Columns @f @context @htable @name @a @fields toColumns
null nullifier htable x)
x
instance {-# OVERLAPPING #-}
GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons name _fixity _isRecord) U1) fields
where
gmakeADT' _ _ _ = const
instance {-# OVERLAPS #-}
GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons label _fixity _isRecord) U1) fields
where
gmakeADT' _ _ _ = const
instance {-# OVERLAPS #-}
( HTable (GColumns _Columns rep)
, GConstructable _Table _Columns f context rep
, fields ~ GFields f rep
, GColumnsADT' _Columns htable (M1 C ('MetaCons name _fixity _isRecord) rep) ~
HProduct htable (HLabel name (HNullify (GColumns _Columns rep)))
)
=> GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons name _fixity _isRecord) rep) fields
where
gmakeADT' toColumns _ nullifier htable =
HProduct htable .
hlabel .
hnullify nullifier .
gconstruct @_Table @_Columns @f @context @rep toColumns
instance {-# OVERLAPPABLE #-}
( HTable (GColumns _Columns rep)
, GColumnsADT' _Columns htable (M1 C ('MetaCons label _fixity _isRecord) rep) ~
HProduct htable (HLabel label (HNullify (GColumns _Columns rep)))
)
=> GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons label _fixity _isRecord) rep) fields
where
gmakeADT' _ null _ htable _ =
HProduct htable $
hlabel $
hnulls null
| null | https://raw.githubusercontent.com/circuithub/rel8/7b9676bb3d4caaebae14cb431c6b8d026758c306/src/Rel8/Generic/Construction/ADT.hs | haskell | # language FlexibleContexts #
# language ScopedTypeVariables #
# language TupleSections #
# language TypeFamilies #
# language UndecidableInstances #
base
rel8
text
# OVERLAPPABLE #
# OVERLAPPING #
# OVERLAPS #
# OVERLAPS #
# OVERLAPPABLE # | # language AllowAmbiguousTypes #
# language BlockArguments #
# language DataKinds #
# language FlexibleInstances #
# language MultiParamTypeClasses #
# language RankNTypes #
# language StandaloneKindSignatures #
# language TypeApplications #
# language TypeOperators #
module Rel8.Generic.Construction.ADT
( GConstructableADT
, GBuildADT, gbuildADT, gunbuildADT
, GConstructADT, gconstructADT, gdeconstructADT
, GFields, RepresentableFields, gftabulate, gfindex
, GConstructors, RepresentableConstructors, gctabulate, gcindex
, GConstructorADT, GMakeableADT, gmakeADT
)
where
import Data.Bifunctor ( first )
import Data.Functor.Identity ( runIdentity )
import Data.Kind ( Constraint, Type )
import Data.List.NonEmpty ( NonEmpty )
import Data.Proxy ( Proxy( Proxy ) )
import GHC.Generics
( (:+:), (:*:)( (:*:) ), M1, U1
, C, D
, Meta( MetaData, MetaCons )
)
import GHC.TypeLits
( ErrorMessage( (:<>:), Text ), TypeError
, Symbol, KnownSymbol, symbolVal
)
import Prelude hiding ( null )
import Rel8.FCF ( Exp )
import Rel8.Generic.Construction.Record
( GConstruct, GConstructable, gconstruct, gdeconstruct
, GFields, Representable, gtabulate, gindex
, FromColumns, ToColumns
)
import Rel8.Generic.Table.ADT ( GColumnsADT, GColumnsADT' )
import Rel8.Generic.Table.Record ( GColumns )
import Rel8.Schema.HTable ( HTable )
import Rel8.Schema.HTable.Identity ( HIdentity )
import Rel8.Schema.HTable.Label ( HLabel, hlabel, hunlabel )
import Rel8.Schema.HTable.Nullify ( HNullify, hnulls, hnullify, hunnullify )
import Rel8.Schema.HTable.Product ( HProduct( HProduct ) )
import Rel8.Schema.Null ( Nullify )
import Rel8.Schema.Spec ( Spec )
import qualified Rel8.Schema.Kind as K
import Rel8.Type.Tag ( Tag( Tag ) )
import Data.Text ( pack )
type Null :: K.Context -> Type
type Null context = forall a. Spec a -> context (Nullify a)
type Nullifier :: K.Context -> Type
type Nullifier context = forall a. Spec a -> context a -> context (Nullify a)
type Unnullifier :: K.Context -> Type
type Unnullifier context = forall a. Spec a -> context (Nullify a) -> context a
type NoConstructor :: Symbol -> Symbol -> ErrorMessage
type NoConstructor datatype constructor =
( 'Text "The type `" ':<>:
'Text datatype ':<>:
'Text "` has no constructor `" ':<>:
'Text constructor ':<>:
'Text "`."
)
type GConstructorADT :: Symbol -> (Type -> Type) -> Type -> Type
type family GConstructorADT name rep where
GConstructorADT name (M1 D ('MetaData datatype _ _ _) rep) =
GConstructorADT' name rep (TypeError (NoConstructor datatype name))
type GConstructorADT' :: Symbol -> (Type -> Type) -> (Type -> Type) -> Type -> Type
type family GConstructorADT' name rep fallback where
GConstructorADT' name (M1 D _ rep) fallback =
GConstructorADT' name rep fallback
GConstructorADT' name (a :+: b) fallback =
GConstructorADT' name a (GConstructorADT' name b fallback)
GConstructorADT' name (M1 C ('MetaCons name _ _) rep) _ = rep
GConstructorADT' _ _ fallback = fallback
type GConstructADT
:: (Type -> Exp Type)
-> (Type -> Type) -> Type -> Type -> Type
type family GConstructADT f rep r x where
GConstructADT f (M1 D _ rep) r x = GConstructADT f rep r x
GConstructADT f (a :+: b) r x = GConstructADT f a r (GConstructADT f b r x)
GConstructADT f (M1 C _ rep) r x = GConstruct f rep r -> x
type GConstructors :: (Type -> Exp Type) -> (Type -> Type) -> Type -> Type
type family GConstructors f rep where
GConstructors f (M1 D _ rep) = GConstructors f rep
GConstructors f (a :+: b) = GConstructors f a :*: GConstructors f b
GConstructors f (M1 C _ rep) = (->) (GFields f rep)
type RepresentableConstructors :: (Type -> Exp Type) -> (Type -> Type) -> Constraint
class RepresentableConstructors f rep where
gctabulate :: (GConstructors f rep r -> a) -> GConstructADT f rep r a
gcindex :: GConstructADT f rep r a -> GConstructors f rep r -> a
instance RepresentableConstructors f rep => RepresentableConstructors f (M1 D meta rep) where
gctabulate = gctabulate @f @rep
gcindex = gcindex @f @rep
instance (RepresentableConstructors f a, RepresentableConstructors f b) =>
RepresentableConstructors f (a :+: b)
where
gctabulate f =
gctabulate @f @a \a -> gctabulate @f @b \b -> f (a :*: b)
gcindex f (a :*: b) = gcindex @f @b (gcindex @f @a f a) b
instance Representable f rep => RepresentableConstructors f (M1 C meta rep) where
gctabulate f = f . gindex @f @rep
gcindex f = f . gtabulate @f @rep
type GBuildADT :: (Type -> Exp Type) -> (Type -> Type) -> Type -> Type
type family GBuildADT f rep r where
GBuildADT f (M1 D _ rep) r = GBuildADT f rep r
GBuildADT f (a :+: b) r = GBuildADT f a (GBuildADT f b r)
GBuildADT f (M1 C _ rep) r = GConstruct f rep r
type GFieldsADT :: (Type -> Exp Type) -> (Type -> Type) -> Type
type family GFieldsADT f rep where
GFieldsADT f (M1 D _ rep) = GFieldsADT f rep
GFieldsADT f (a :+: b) = (GFieldsADT f a, GFieldsADT f b)
GFieldsADT f (M1 C _ rep) = GFields f rep
type RepresentableFields :: (Type -> Exp Type) -> (Type -> Type) -> Constraint
class RepresentableFields f rep where
gftabulate :: (GFieldsADT f rep -> a) -> GBuildADT f rep a
gfindex :: GBuildADT f rep a -> GFieldsADT f rep -> a
instance RepresentableFields f rep => RepresentableFields f (M1 D meta rep) where
gftabulate = gftabulate @f @rep
gfindex = gfindex @f @rep
instance (RepresentableFields f a, RepresentableFields f b) => RepresentableFields f (a :+: b) where
gftabulate f =
gftabulate @f @a \a -> gftabulate @f @b \b -> f (a, b)
gfindex f (a, b) = gfindex @f @b (gfindex @f @a f a) b
instance Representable f rep => RepresentableFields f (M1 C meta rep) where
gftabulate = gtabulate @f @rep
gfindex = gindex @f @rep
type GConstructableADT
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> (Type -> Type) -> Constraint
class GConstructableADT _Table _Columns f context rep where
gbuildADT :: ()
=> ToColumns _Table _Columns f context
-> (Tag -> Nullifier context)
-> HIdentity Tag context
-> GFieldsADT f rep
-> GColumnsADT _Columns rep context
gunbuildADT :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GColumnsADT _Columns rep context
-> (HIdentity Tag context, GFieldsADT f rep)
gconstructADT :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> (Tag -> HIdentity Tag context)
-> GConstructors f rep (GColumnsADT _Columns rep context)
gdeconstructADT :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GConstructors f rep r
-> GColumnsADT _Columns rep context
-> (HIdentity Tag context, NonEmpty (Tag, r))
instance
( htable ~ HLabel "tag" (HIdentity Tag)
, GConstructableADT' _Table _Columns f context htable rep
)
=> GConstructableADT _Table _Columns f context (M1 D meta rep)
where
gbuildADT toColumns nullifier =
gbuildADT' @_Table @_Columns @f @context @htable @rep toColumns nullifier .
hlabel
gunbuildADT fromColumns unnullifier =
first hunlabel .
gunbuildADT' @_Table @_Columns @f @context @htable @rep fromColumns unnullifier
gconstructADT toColumns null nullifier mk =
gconstructADT' @_Table @_Columns @f @context @htable @rep toColumns null nullifier
(hlabel . mk)
gdeconstructADT fromColumns unnullifier cases =
first hunlabel .
gdeconstructADT' @_Table @_Columns @f @context @htable @rep fromColumns unnullifier cases
type GConstructableADT'
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> K.HTable -> (Type -> Type) -> Constraint
class GConstructableADT' _Table _Columns f context htable rep where
gbuildADT' :: ()
=> ToColumns _Table _Columns f context
-> (Tag -> Nullifier context)
-> htable context
-> GFieldsADT f rep
-> GColumnsADT' _Columns htable rep context
gunbuildADT' :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GColumnsADT' _Columns htable rep context
-> (htable context, GFieldsADT f rep)
gconstructADT' :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> (Tag -> htable context)
-> GConstructors f rep (GColumnsADT' _Columns htable rep context)
gdeconstructADT' :: ()
=> FromColumns _Table _Columns f context
-> Unnullifier context
-> GConstructors f rep r
-> GColumnsADT' _Columns htable rep context
-> (htable context, NonEmpty (Tag, r))
gfill :: ()
=> Null context
-> htable context
-> GColumnsADT' _Columns htable rep context
instance
( htable' ~ GColumnsADT' _Columns htable a
, Functor (GConstructors f a)
, GConstructableADT' _Table _Columns f context htable a
, GConstructableADT' _Table _Columns f context htable' b
)
=> GConstructableADT' _Table _Columns f context htable (a :+: b)
where
gbuildADT' toColumns nullifier htable (a, b) =
gbuildADT' @_Table @_Columns @f @context @htable' @b toColumns nullifier
(gbuildADT' @_Table @_Columns @f @context @htable @a toColumns nullifier htable a)
b
gunbuildADT' fromColumns unnullifier columns =
case gunbuildADT' @_Table @_Columns @f @context @htable' @b fromColumns unnullifier columns of
(htable', b) ->
case gunbuildADT' @_Table @_Columns @f @context @htable @a fromColumns unnullifier htable' of
(htable, a) -> (htable, (a, b))
gconstructADT' toColumns null nullifier mk =
fmap (gfill @_Table @_Columns @f @context @htable' @b null) (gconstructADT' @_Table @_Columns @f @context @htable @a toColumns null nullifier mk) :*:
gconstructADT' @_Table @_Columns @f @context @htable' @b toColumns null nullifier (gfill @_Table @_Columns @f @context @htable @a null . mk)
gdeconstructADT' fromColumns unnullifier (a :*: b) columns =
case gdeconstructADT' @_Table @_Columns @f @context @htable' @b fromColumns unnullifier b columns of
(htable', cases) ->
case gdeconstructADT' @_Table @_Columns @f @context @htable @a fromColumns unnullifier a htable' of
(htable, cases') -> (htable, cases' <> cases)
gfill null =
gfill @_Table @_Columns @f @context @htable' @b null .
gfill @_Table @_Columns @f @context @htable @a null
instance (meta ~ 'MetaCons label _fixity _isRecord, KnownSymbol label) =>
GConstructableADT' _Table _Columns f context htable (M1 C meta U1)
where
gbuildADT' _ _ = const
gunbuildADT' _ _ = (, ())
gconstructADT' _ _ _ f _ = f tag
where
tag = Tag $ pack $ symbolVal (Proxy @label)
gdeconstructADT' _ _ r htable = (htable, pure (tag, r ()))
where
tag = Tag $ pack $ symbolVal (Proxy @label)
gfill _ = id
( HTable (GColumns _Columns rep)
, KnownSymbol label
, meta ~ 'MetaCons label _fixity _isRecord
, GConstructable _Table _Columns f context rep
, GColumnsADT' _Columns htable (M1 C meta rep) ~
HProduct htable (HLabel label (HNullify (GColumns _Columns rep)))
)
=> GConstructableADT' _Table _Columns f context htable (M1 C meta rep)
where
gbuildADT' toColumns nullifier htable =
HProduct htable .
hlabel .
hnullify (nullifier tag) .
gconstruct @_Table @_Columns @f @context @rep toColumns
where
tag = Tag $ pack $ symbolVal (Proxy @label)
gunbuildADT' fromColumns unnullifier (HProduct htable a) =
( htable
, gdeconstruct @_Table @_Columns @f @context @rep fromColumns $
runIdentity $
hunnullify (\spec -> pure . unnullifier spec) $
hunlabel
a
)
gconstructADT' toColumns _ nullifier mk =
HProduct htable .
hlabel .
hnullify nullifier .
gconstruct @_Table @_Columns @f @context @rep toColumns
where
tag = Tag $ pack $ symbolVal (Proxy @label)
htable = mk tag
gdeconstructADT' fromColumns unnullifier r (HProduct htable columns) =
( htable
, pure (tag, r a)
)
where
a = gdeconstruct @_Table @_Columns @f @context @rep fromColumns $
runIdentity $
hunnullify (\spec -> pure . unnullifier spec) $
hunlabel
columns
tag = Tag $ pack $ symbolVal (Proxy @label)
gfill null htable = HProduct htable (hlabel (hnulls null))
type GMakeableADT
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> Symbol -> (Type -> Type) -> Constraint
class GMakeableADT _Table _Columns f context name rep where
gmakeADT :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> (Tag -> HIdentity Tag context)
-> GFields f (GConstructorADT name rep)
-> GColumnsADT _Columns rep context
instance
( htable ~ HLabel "tag" (HIdentity Tag)
, meta ~ 'MetaData datatype _module _package _newtype
, fallback ~ TypeError (NoConstructor datatype name)
, fields ~ GFields f (GConstructorADT' name rep fallback)
, GMakeableADT' _Table _Columns f context htable name rep fields
, KnownSymbol name
)
=> GMakeableADT _Table _Columns f context name (M1 D meta rep)
where
gmakeADT toColumns null nullifier wrap =
gmakeADT'
@_Table @_Columns @f @context @htable @name @rep @fields
toColumns null nullifier htable
where
tag = Tag $ pack $ symbolVal (Proxy @name)
htable = hlabel (wrap tag)
type GMakeableADT'
:: (Type -> Exp Constraint)
-> (Type -> Exp K.HTable)
-> (Type -> Exp Type)
-> K.Context -> K.HTable -> Symbol -> (Type -> Type) -> Type -> Constraint
class GMakeableADT' _Table _Columns f context htable name rep fields where
gmakeADT' :: ()
=> ToColumns _Table _Columns f context
-> Null context
-> Nullifier context
-> htable context
-> fields
-> GColumnsADT' _Columns htable rep context
instance
( htable' ~ GColumnsADT' _Columns htable a
, GMakeableADT' _Table _Columns f context htable name a fields
, GMakeableADT' _Table _Columns f context htable' name b fields
)
=> GMakeableADT' _Table _Columns f context htable name (a :+: b) fields
where
gmakeADT' toColumns null nullifier htable x =
gmakeADT' @_Table @_Columns @f @context @htable' @name @b @fields
toColumns null nullifier
(gmakeADT'
@_Table @_Columns @f @context @htable @name @a @fields toColumns
null nullifier htable x)
x
GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons name _fixity _isRecord) U1) fields
where
gmakeADT' _ _ _ = const
GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons label _fixity _isRecord) U1) fields
where
gmakeADT' _ _ _ = const
( HTable (GColumns _Columns rep)
, GConstructable _Table _Columns f context rep
, fields ~ GFields f rep
, GColumnsADT' _Columns htable (M1 C ('MetaCons name _fixity _isRecord) rep) ~
HProduct htable (HLabel name (HNullify (GColumns _Columns rep)))
)
=> GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons name _fixity _isRecord) rep) fields
where
gmakeADT' toColumns _ nullifier htable =
HProduct htable .
hlabel .
hnullify nullifier .
gconstruct @_Table @_Columns @f @context @rep toColumns
( HTable (GColumns _Columns rep)
, GColumnsADT' _Columns htable (M1 C ('MetaCons label _fixity _isRecord) rep) ~
HProduct htable (HLabel label (HNullify (GColumns _Columns rep)))
)
=> GMakeableADT' _Table _Columns f context htable name (M1 C ('MetaCons label _fixity _isRecord) rep) fields
where
gmakeADT' _ null _ htable _ =
HProduct htable $
hlabel $
hnulls null
|
3480dbd6c6bd0891b76a574278f758894882406f41325ba3ae619e78990a3a44 | technomancy/leiningen | clj_test.clj | (ns clj-test
(:use [clojure.test]
[selectors :only [record-ran]]))
(deftest clojure-test
(record-ran :clj-test)
(is true))
| null | https://raw.githubusercontent.com/technomancy/leiningen/24fb93936133bd7fc30c393c127e9e69bb5f2392/test_projects/sample-reader-cond/test/clj_test.clj | clojure | (ns clj-test
(:use [clojure.test]
[selectors :only [record-ran]]))
(deftest clojure-test
(record-ran :clj-test)
(is true))
| |
49fb3f8320361ed910f3bfff4821db70d336c8725ec739f0cc431fee538c7d32 | quark-lang/quark | Literal.hs | module Core.Parser.AST.Literal where
import Core.Utility.Color
data Literal
= Integer Integer
| String String
| Float Float
| Char Char
deriving Eq
instance Show Literal where
show (Integer i) = bYellow $ show i
show (String s) = bGreen $ show s
show (Float s) = bYellow $ show s
show (Char c) = bGreen $ show c | null | https://raw.githubusercontent.com/quark-lang/quark/151f66399b61ba438879f22d6f0e4f21ae4ce66c/app/Core/Parser/AST/Literal.hs | haskell | module Core.Parser.AST.Literal where
import Core.Utility.Color
data Literal
= Integer Integer
| String String
| Float Float
| Char Char
deriving Eq
instance Show Literal where
show (Integer i) = bYellow $ show i
show (String s) = bGreen $ show s
show (Float s) = bYellow $ show s
show (Char c) = bGreen $ show c | |
1c4b20a6298c066b403fdae9bc26b53eda834f132ad8878c0668e4bb34e23816 | YoEight/lambda-database-experiment | Settings.hs | --------------------------------------------------------------------------------
-- |
-- Module : Lambda.Node.Settings
Copyright : ( C ) 2017
-- License : (see the file LICENSE)
--
Maintainer : < >
-- Stability : provisional
-- Portability : non-portable
--
--------------------------------------------------------------------------------
module Lambda.Node.Settings where
--------------------------------------------------------------------------------
import Lambda.Logger
import Lambda.Prelude
import Options.Applicative
import Network
import Text.PrettyPrint hiding ((<>))
--------------------------------------------------------------------------------
data Settings =
Settings
{ heartbeatInterval :: !NominalDiffTime
, heartbeatTimeout :: !NominalDiffTime
, connectionSettings :: !ConnectionSettings
}
--------------------------------------------------------------------------------
instance PrettyPrint Settings where
pprint Settings{..} =
vcat [ text "heartbeat-interval: " <+> text (show heartbeatInterval)
, text "heartbeat-timeout:" <+> text (show heartbeatTimeout)
, text "Connection settings:"
, nest 5 (ppConnectionSettings connectionSettings)
]
--------------------------------------------------------------------------------
instance AppSettings Settings where
settingsParser = parseSettings
description _ =
fullDesc <> header "LDE - Lambda Database Experiment."
<> progDesc "Starts the LDE server."
--------------------------------------------------------------------------------
parseSettings :: Parser Settings
parseSettings = Settings <$> parseHeartbeatInterval
<*> parseHeartbeatTimeout
<*> parseConnectionSettings
--------------------------------------------------------------------------------
parseHeartbeatInterval :: Parser NominalDiffTime
parseHeartbeatInterval = option (maybeReader check) go
where
go = long "heartbeat-interval" <> metavar "HEARTBEAT_INTERVAL"
<> help "Heartbeat interval: Delay in which \
\the server start to worry if it \
\has no news from the client."
<> value 0.5
<> showDefault
check input =
fmap realToFrac (readMay input :: Maybe Double)
--------------------------------------------------------------------------------
parseHeartbeatTimeout :: Parser NominalDiffTime
parseHeartbeatTimeout = option (maybeReader check) go
where
go = long "heartbeat-timeout" <> metavar "HEARTBEAT_TIMEOUT"
<> help "Heartbeat timeout: Delay that a \
\client has to send a heartbeat \
\response."
<> value 0.75
<> showDefault
check input =
fmap realToFrac (readMay input :: Maybe Double)
--------------------------------------------------------------------------------
data ConnectionSettings =
ConnectionSettings
{ portNumber :: !PortNumber
, hostname :: !String
}
--------------------------------------------------------------------------------
ppConnectionSettings :: ConnectionSettings -> Doc
ppConnectionSettings ConnectionSettings{..} =
vcat [ text "host:" <+> text hostname
, text "port:" <+> text (show portNumber)
]
--------------------------------------------------------------------------------
parseConnectionSettings :: Parser ConnectionSettings
parseConnectionSettings =
ConnectionSettings <$> parsePort
<*> parseHost
--------------------------------------------------------------------------------
parseHost :: Parser String
parseHost = strOption go
where
go = long "host" <> metavar "HOST"
<> help "Server hostname address."
<> value "127.0.0.1"
<> showDefault
--------------------------------------------------------------------------------
parsePort :: Parser PortNumber
parsePort = option (eitherReader check) go
where
go = long "port" <> metavar "PORT"
<> help "Server port."
<> value 1113
<> showDefault
check input =
case readMay input of
Nothing -> Left "Invalid port number."
Just port
| port > 0 && port < 65535 -> Right port
| otherwise -> Left [i|Port should be ]0-65535[|]
| null | https://raw.githubusercontent.com/YoEight/lambda-database-experiment/da4fab8bd358fb8fb78412c805d6f5bc05854432/lambda-node/library/Lambda/Node/Settings.hs | haskell | ------------------------------------------------------------------------------
|
Module : Lambda.Node.Settings
License : (see the file LICENSE)
Stability : provisional
Portability : non-portable
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------
------------------------------------------------------------------------------ | Copyright : ( C ) 2017
Maintainer : < >
module Lambda.Node.Settings where
import Lambda.Logger
import Lambda.Prelude
import Options.Applicative
import Network
import Text.PrettyPrint hiding ((<>))
data Settings =
Settings
{ heartbeatInterval :: !NominalDiffTime
, heartbeatTimeout :: !NominalDiffTime
, connectionSettings :: !ConnectionSettings
}
instance PrettyPrint Settings where
pprint Settings{..} =
vcat [ text "heartbeat-interval: " <+> text (show heartbeatInterval)
, text "heartbeat-timeout:" <+> text (show heartbeatTimeout)
, text "Connection settings:"
, nest 5 (ppConnectionSettings connectionSettings)
]
instance AppSettings Settings where
settingsParser = parseSettings
description _ =
fullDesc <> header "LDE - Lambda Database Experiment."
<> progDesc "Starts the LDE server."
parseSettings :: Parser Settings
parseSettings = Settings <$> parseHeartbeatInterval
<*> parseHeartbeatTimeout
<*> parseConnectionSettings
parseHeartbeatInterval :: Parser NominalDiffTime
parseHeartbeatInterval = option (maybeReader check) go
where
go = long "heartbeat-interval" <> metavar "HEARTBEAT_INTERVAL"
<> help "Heartbeat interval: Delay in which \
\the server start to worry if it \
\has no news from the client."
<> value 0.5
<> showDefault
check input =
fmap realToFrac (readMay input :: Maybe Double)
parseHeartbeatTimeout :: Parser NominalDiffTime
parseHeartbeatTimeout = option (maybeReader check) go
where
go = long "heartbeat-timeout" <> metavar "HEARTBEAT_TIMEOUT"
<> help "Heartbeat timeout: Delay that a \
\client has to send a heartbeat \
\response."
<> value 0.75
<> showDefault
check input =
fmap realToFrac (readMay input :: Maybe Double)
data ConnectionSettings =
ConnectionSettings
{ portNumber :: !PortNumber
, hostname :: !String
}
ppConnectionSettings :: ConnectionSettings -> Doc
ppConnectionSettings ConnectionSettings{..} =
vcat [ text "host:" <+> text hostname
, text "port:" <+> text (show portNumber)
]
parseConnectionSettings :: Parser ConnectionSettings
parseConnectionSettings =
ConnectionSettings <$> parsePort
<*> parseHost
parseHost :: Parser String
parseHost = strOption go
where
go = long "host" <> metavar "HOST"
<> help "Server hostname address."
<> value "127.0.0.1"
<> showDefault
parsePort :: Parser PortNumber
parsePort = option (eitherReader check) go
where
go = long "port" <> metavar "PORT"
<> help "Server port."
<> value 1113
<> showDefault
check input =
case readMay input of
Nothing -> Left "Invalid port number."
Just port
| port > 0 && port < 65535 -> Right port
| otherwise -> Left [i|Port should be ]0-65535[|]
|
764c4b345e492fb45530878e318913e63cee9040fe9d980e9b85a3a8e86ddacc | s-zeng/cs442-ocaml-starter | cs442.ml | open! Core
module Twosum = Twosum
let main () = print_endline "HUZZAH!"
| null | https://raw.githubusercontent.com/s-zeng/cs442-ocaml-starter/196f9ea14593d49e8f6edf43400c578e2dd17fd0/src/cs442.ml | ocaml | open! Core
module Twosum = Twosum
let main () = print_endline "HUZZAH!"
| |
1a07fa19057d49354485841c1b3a18fba392bf6c3bd2d678ad17e26eaf07a673 | serokell/tzbot | MessageBlock.hs | SPDX - FileCopyrightText : 2022 >
--
SPDX - License - Identifier : MPL-2.0
{- | This module contains datatypes that are used to parse the message blocks
- that the message objects are shipped with. They seem to be not properly
- documented and we have to be safe and assume that some unknown objects
- can appear.
- See:
- * -09-what-they-see-is-what-you-get-and-more-and-less
- *
-}
module TzBot.Slack.API.MessageBlock
( -- * Block datatype
MessageBlock
-- * Extract errors (or, more precisely, warnings)
, ExtractError (..)
, UnknownBlockElementLevel1Type (..)
, UnknownBlockElementLevel2Error (..)
-- * Functions
, extractPieces
, splitExtractErrors
) where
import Universum
import Control.Monad.Trans.Writer.CPS (Writer, runWriter, tell)
import Data.Aeson
(FromJSON(..), Options(..), SumEncoding(..), ToJSON(toJSON), Value, camelTo2, defaultOptions,
genericParseJSON, genericToJSON)
import Data.Aeson.Lens (AsPrimitive(_String), key)
import Data.Char (isLower)
import Data.String.Conversions (cs)
import Data.Text.Internal.Builder (Builder, fromText, toLazyText)
import Deriving.Aeson (CamelToSnake, ConstructorTagModifier, CustomJSON(..), StripPrefix)
import TzBot.Util
newtype MessageBlock = MessageBlock
{ mbElements :: [BlockElementLevel1]
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via RecordWrapper MessageBlock
data BlockElementLevel1
= BEL1List RichTextList
| BEL1Plain PlainBlockElementLevel1
deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via SumWrapper BlockElementLevel1
data RichTextList = RichTextList
{ rtlElements :: [BlockElementLevel1]
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via TypedWrapper RichTextList
data BlockElementType
= BETRichTextSection -- ^ Simple text section
^ code block
| BETRichTextQuote -- ^ Slack quote
deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via CustomJSON '[ConstructorTagModifier '[StripPrefix "BET", CamelToSnake]] BlockElementType
data PlainBlockElementLevel1 = PlainBlockElementLevel1
{ beType :: WithUnknown BlockElementType
, beElements :: Maybe [WithUnknown ElementText]
^ Level 2 elements
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via RecordWrapper PlainBlockElementLevel1
----
data Style = Style
{ styCode :: Maybe Bool
, styStrike :: Maybe Bool
, styItalic :: Maybe Bool
, styBold :: Maybe Bool
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via RecordWrapper Style
--
| Here it 's the only level 2 element because we are not interested
-- in others at the current moment.
data ElementText = ElementText
{ etText :: Text
, etStyle :: Maybe Style
} deriving stock (Eq, Show, Generic)
blockElementOptions :: Options
blockElementOptions = defaultOptions
{ fieldLabelModifier = camelTo2 '_' . dropWhile isLower
, constructorTagModifier = camelTo2 '_' . stripPrefixIfPresent "Element"
, tagSingleConstructors = True
, sumEncoding = TaggedObject "type" "contents"
, omitNothingFields = True
}
instance FromJSON ElementText where
parseJSON = genericParseJSON blockElementOptions
instance ToJSON ElementText where
toJSON = genericToJSON blockElementOptions
----
data ExtractError
= EEUnknownBlockElementLevel1Type UnknownBlockElementLevel1Type
| EEUnknownBlockElementLevel2 UnknownBlockElementLevel2Error
deriving stock (Eq, Show)
splitExtractErrors
:: [ExtractError]
-> ([UnknownBlockElementLevel1Type], [UnknownBlockElementLevel2Error])
splitExtractErrors = partitionEithers . map f
where
f (EEUnknownBlockElementLevel1Type val) = Left val
f (EEUnknownBlockElementLevel2 l2Val) = Right l2Val
newtype UnknownBlockElementLevel1Type = UnknownBlockElementLevel1Type
{ ubeltValue :: Value
} deriving stock (Eq, Show)
data UnknownBlockElementLevel2Error = UnknownBlockElementLevel2Error
{ ubeType :: Text
, ubeValue :: Value
} deriving stock (Eq, Show)
| This function has two main tasks :
1 . Analyze the Slack - provided structure of the incoming message ;
2 . Ignore code blocks .
--
-- Also since the message blocks are not documented, it collects unrecognized
values of level1 / level2 block elements .
extractPieces :: [MessageBlock] -> ([Text], [ExtractError])
extractPieces mBlocks = runWriter $ concat <$> mapM goMessageBlock mBlocks
where
goMessageBlock :: MessageBlock -> Writer [ExtractError] [Text]
goMessageBlock MessageBlock {..} = concat <$> mapM goBlockElementLevel1 mbElements
goBlockElementLevel1 :: BlockElementLevel1 -> Writer [ExtractError] [Text]
goBlockElementLevel1 = \case
BEL1List RichTextList {..} -> concat <$> mapM goBlockElementLevel1 rtlElements
BEL1Plain PlainBlockElementLevel1 {..} -> do
whenLeft (unUnknown beType) \val ->
tell [EEUnknownBlockElementLevel1Type $ UnknownBlockElementLevel1Type val]
-- ignore multiline code block
case beType of
WithUnknown (Right BETRichTextPreformatted) -> pure []
_ -> maybe (pure []) goBlockElementLevel2 beElements
goBlockElementLevel2 :: [WithUnknown ElementText] -> Writer [ExtractError] [Text]
goBlockElementLevel2 els = reverse <$> go Nothing [] els
where
go :: Maybe Builder -> [Text] -> [WithUnknown ElementText] -> Writer [ExtractError] [Text]
go mbCurPiece prevPieces (e:es) = case unUnknown e of
Left val -> do
let _type = fromMaybe "unknown" (val ^? key "type" . _String)
tell [EEUnknownBlockElementLevel2 $ UnknownBlockElementLevel2Error _type val]
go Nothing (prependMbCurrentToPrevious mbCurPiece prevPieces) es
Right ElementText {..} -> do
let etTextB = fromText etText
if (etStyle >>= styCode) == Just True
-- ignore simple code block
then go Nothing (prependMbCurrentToPrevious mbCurPiece prevPieces) es
else go (Just $ maybe etTextB (<> etTextB) mbCurPiece) prevPieces es
go mbCurPiece prevPieces [] =
pure $ prependMbCurrentToPrevious mbCurPiece prevPieces
prependMbCurrentToPrevious :: Maybe Builder -> [Text] -> [Text]
prependMbCurrentToPrevious mbCurPiece prevPieces =
maybe prevPieces ((: prevPieces) . cs . toLazyText) mbCurPiece
| null | https://raw.githubusercontent.com/serokell/tzbot/5cdf8ce17784bd41d2e10f4131ac0b0bd11d0ec4/src/TzBot/Slack/API/MessageBlock.hs | haskell |
| This module contains datatypes that are used to parse the message blocks
- that the message objects are shipped with. They seem to be not properly
- documented and we have to be safe and assume that some unknown objects
- can appear.
- See:
- * -09-what-they-see-is-what-you-get-and-more-and-less
- *
* Block datatype
* Extract errors (or, more precisely, warnings)
* Functions
^ Simple text section
^ Slack quote
--
in others at the current moment.
--
Also since the message blocks are not documented, it collects unrecognized
ignore multiline code block
ignore simple code block | SPDX - FileCopyrightText : 2022 >
SPDX - License - Identifier : MPL-2.0
module TzBot.Slack.API.MessageBlock
MessageBlock
, ExtractError (..)
, UnknownBlockElementLevel1Type (..)
, UnknownBlockElementLevel2Error (..)
, extractPieces
, splitExtractErrors
) where
import Universum
import Control.Monad.Trans.Writer.CPS (Writer, runWriter, tell)
import Data.Aeson
(FromJSON(..), Options(..), SumEncoding(..), ToJSON(toJSON), Value, camelTo2, defaultOptions,
genericParseJSON, genericToJSON)
import Data.Aeson.Lens (AsPrimitive(_String), key)
import Data.Char (isLower)
import Data.String.Conversions (cs)
import Data.Text.Internal.Builder (Builder, fromText, toLazyText)
import Deriving.Aeson (CamelToSnake, ConstructorTagModifier, CustomJSON(..), StripPrefix)
import TzBot.Util
newtype MessageBlock = MessageBlock
{ mbElements :: [BlockElementLevel1]
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via RecordWrapper MessageBlock
data BlockElementLevel1
= BEL1List RichTextList
| BEL1Plain PlainBlockElementLevel1
deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via SumWrapper BlockElementLevel1
data RichTextList = RichTextList
{ rtlElements :: [BlockElementLevel1]
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via TypedWrapper RichTextList
data BlockElementType
^ code block
deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via CustomJSON '[ConstructorTagModifier '[StripPrefix "BET", CamelToSnake]] BlockElementType
data PlainBlockElementLevel1 = PlainBlockElementLevel1
{ beType :: WithUnknown BlockElementType
, beElements :: Maybe [WithUnknown ElementText]
^ Level 2 elements
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via RecordWrapper PlainBlockElementLevel1
data Style = Style
{ styCode :: Maybe Bool
, styStrike :: Maybe Bool
, styItalic :: Maybe Bool
, styBold :: Maybe Bool
} deriving stock (Eq, Show, Generic)
deriving (FromJSON, ToJSON) via RecordWrapper Style
| Here it 's the only level 2 element because we are not interested
data ElementText = ElementText
{ etText :: Text
, etStyle :: Maybe Style
} deriving stock (Eq, Show, Generic)
blockElementOptions :: Options
blockElementOptions = defaultOptions
{ fieldLabelModifier = camelTo2 '_' . dropWhile isLower
, constructorTagModifier = camelTo2 '_' . stripPrefixIfPresent "Element"
, tagSingleConstructors = True
, sumEncoding = TaggedObject "type" "contents"
, omitNothingFields = True
}
instance FromJSON ElementText where
parseJSON = genericParseJSON blockElementOptions
instance ToJSON ElementText where
toJSON = genericToJSON blockElementOptions
data ExtractError
= EEUnknownBlockElementLevel1Type UnknownBlockElementLevel1Type
| EEUnknownBlockElementLevel2 UnknownBlockElementLevel2Error
deriving stock (Eq, Show)
splitExtractErrors
:: [ExtractError]
-> ([UnknownBlockElementLevel1Type], [UnknownBlockElementLevel2Error])
splitExtractErrors = partitionEithers . map f
where
f (EEUnknownBlockElementLevel1Type val) = Left val
f (EEUnknownBlockElementLevel2 l2Val) = Right l2Val
newtype UnknownBlockElementLevel1Type = UnknownBlockElementLevel1Type
{ ubeltValue :: Value
} deriving stock (Eq, Show)
data UnknownBlockElementLevel2Error = UnknownBlockElementLevel2Error
{ ubeType :: Text
, ubeValue :: Value
} deriving stock (Eq, Show)
| This function has two main tasks :
1 . Analyze the Slack - provided structure of the incoming message ;
2 . Ignore code blocks .
values of level1 / level2 block elements .
extractPieces :: [MessageBlock] -> ([Text], [ExtractError])
extractPieces mBlocks = runWriter $ concat <$> mapM goMessageBlock mBlocks
where
goMessageBlock :: MessageBlock -> Writer [ExtractError] [Text]
goMessageBlock MessageBlock {..} = concat <$> mapM goBlockElementLevel1 mbElements
goBlockElementLevel1 :: BlockElementLevel1 -> Writer [ExtractError] [Text]
goBlockElementLevel1 = \case
BEL1List RichTextList {..} -> concat <$> mapM goBlockElementLevel1 rtlElements
BEL1Plain PlainBlockElementLevel1 {..} -> do
whenLeft (unUnknown beType) \val ->
tell [EEUnknownBlockElementLevel1Type $ UnknownBlockElementLevel1Type val]
case beType of
WithUnknown (Right BETRichTextPreformatted) -> pure []
_ -> maybe (pure []) goBlockElementLevel2 beElements
goBlockElementLevel2 :: [WithUnknown ElementText] -> Writer [ExtractError] [Text]
goBlockElementLevel2 els = reverse <$> go Nothing [] els
where
go :: Maybe Builder -> [Text] -> [WithUnknown ElementText] -> Writer [ExtractError] [Text]
go mbCurPiece prevPieces (e:es) = case unUnknown e of
Left val -> do
let _type = fromMaybe "unknown" (val ^? key "type" . _String)
tell [EEUnknownBlockElementLevel2 $ UnknownBlockElementLevel2Error _type val]
go Nothing (prependMbCurrentToPrevious mbCurPiece prevPieces) es
Right ElementText {..} -> do
let etTextB = fromText etText
if (etStyle >>= styCode) == Just True
then go Nothing (prependMbCurrentToPrevious mbCurPiece prevPieces) es
else go (Just $ maybe etTextB (<> etTextB) mbCurPiece) prevPieces es
go mbCurPiece prevPieces [] =
pure $ prependMbCurrentToPrevious mbCurPiece prevPieces
prependMbCurrentToPrevious :: Maybe Builder -> [Text] -> [Text]
prependMbCurrentToPrevious mbCurPiece prevPieces =
maybe prevPieces ((: prevPieces) . cs . toLazyText) mbCurPiece
|
47b6af6b2852b399bb39b83d750e169c4922c45238d2c09a44375d39af99e380 | jeromesimeon/Galax | code_util_matching.ml | (***********************************************************************)
(* *)
(* GALAX *)
(* XQuery Engine *)
(* *)
Copyright 2001 - 2007 .
(* Distributed only by permission. *)
(* *)
(***********************************************************************)
$ I d : , v 1.14 2007/07/13 18:24:42 mff Exp $
(* Module: Code_util_matching
Description:
This module contains some auxiliary evaluation code for type
matching.
*)
open Error
open Datatypes
open Datatypes_util
open Xquery_ast
open Xquery_algebra_ast
open Xquery_common_ast
open Dm_types
open Physical_value
open Physical_item
open Norm_context
open Typing_context
open Xquery_algebra_ast
(******************)
(* Error messages *)
(******************)
let raise_type_matching_error stat_ctxt ic sequencetype =
let s = Serialization.bserialize_datamodel (Norm_context.processing_context_from_norm_context(Typing_context.norm_context_from_stat_context stat_ctxt)) ic in
raise (error_with_file_location sequencetype.pasequencetype_loc
(Query (Type_Error (Print_top.bprintf_asequencetype ("Type of value '\n"^s^"\n' does not match sequence type: ") sequencetype))))
(*****************************************)
(* Auxiliary functions for type matching *)
(*****************************************)
let item_matches_named_type cxschema dtk (singlearg : item) : bool =
(* Note:
New type matching code with named typing!
- Jerome 05/22/2004
*)
match dtk with
| AITItem -> true
| _ ->
begin
match item_kind singlearg with
| AtomicValueKind ->
begin
match dtk with
| AITAtomic a ->
begin
Schema_judge.check_declared_type cxschema a;
let at1 = (getAtomicValue singlearg)#atomic_type() in
Schema_judge.derives_from cxschema at1 a
end
| AITNumeric ->
let at1 = (getAtomicValue singlearg)#getAtomicValueKind() in
(Datatypes_util.atomic_is_numeric at1)
| _ -> false
end
| NodeKind ->
New code to support kind tests 09/06/2005 - Jerome
match dtk with
| AITKindTest AAnyKind ->
true
| AITKindTest dtk ->
let n = getNode singlearg in
Dm_step.item_matches_kind_test (n#get_access_ops_dm) cxschema dtk n
| _ -> false
end
end
(* dynamic_type_check checks dynamically that a value matches a sequence type. *)
let dynamic_type_check stat_ctxt dt input_cursor =
1 . Extract the item type as well as the bounds for the Sequence Type
let (adtk,b1,b2) = Typing_util.factor_asequencetype dt in
let norm_ctxt = norm_context_from_stat_context stat_ctxt in
let cxschema = cxschema_from_norm_context norm_ctxt in
2 . Auxiliary function to check the bound once we know the
cardinality of the sequence
cardinality of the sequence *)
let check_bound counter =
let b = Occurrence.occurs counter
in (Occurrence.le b b2) && (Occurrence.le b1 b)
in
3 . Deal with the empty sequence first
if Cursor.cursor_is_empty input_cursor
then
begin
if
(match adtk with
| AITEmpty -> true
| _ -> false) || check_bound 0
then
input_cursor
else
raise_type_matching_error stat_ctxt input_cursor dt
end
else
4 . Now there is at least one item in the sequence
match adtk with
| AITEmpty ->
raise_type_matching_error stat_ctxt input_cursor dt
| _ ->
let counter = ref 0 in
let item_match_fun x =
incr counter;
if (item_matches_named_type cxschema adtk x)
then x
else raise_type_matching_error stat_ctxt input_cursor dt
in
let matched_cursor = Cursor.cursor_map item_match_fun input_cursor in
let check_bound_fun x =
begin
if (Cursor.cursor_is_empty input_cursor)
then
if check_bound !counter
then ()
else raise_type_matching_error stat_ctxt input_cursor dt
else
()
end;
x
in
Cursor.cursor_map check_bound_fun matched_cursor
let dynamic_opttype_check stat_ctxt odt input_cursor =
match odt with
| None -> input_cursor
| Some dt -> dynamic_type_check stat_ctxt dt input_cursor
let dynamic_type_check_item stat_ctxt dt input_item =
let input_cursor = Cursor.cursor_of_singleton input_item in
ignore(dynamic_type_check stat_ctxt dt input_cursor)
let dynamic_opttype_check_item stat_ctxt dt input_item =
let input_cursor = Cursor.cursor_of_singleton input_item in
ignore(dynamic_opttype_check stat_ctxt dt input_cursor)
let boolean_dynamic_type_check stat_ctxt dt input_item_list =
let input_cursor = Cursor.cursor_of_list input_item_list in
try
Cursor.cursor_iter (fun x -> ()) (dynamic_type_check stat_ctxt dt input_cursor);
true
with
| (Query (Type_Error _)) ->
false
| null | https://raw.githubusercontent.com/jeromesimeon/Galax/bc565acf782c140291911d08c1c784c9ac09b432/code_util/code_util_matching.ml | ocaml | *********************************************************************
GALAX
XQuery Engine
Distributed only by permission.
*********************************************************************
Module: Code_util_matching
Description:
This module contains some auxiliary evaluation code for type
matching.
****************
Error messages
****************
***************************************
Auxiliary functions for type matching
***************************************
Note:
New type matching code with named typing!
- Jerome 05/22/2004
dynamic_type_check checks dynamically that a value matches a sequence type. | Copyright 2001 - 2007 .
$ I d : , v 1.14 2007/07/13 18:24:42 mff Exp $
open Error
open Datatypes
open Datatypes_util
open Xquery_ast
open Xquery_algebra_ast
open Xquery_common_ast
open Dm_types
open Physical_value
open Physical_item
open Norm_context
open Typing_context
open Xquery_algebra_ast
let raise_type_matching_error stat_ctxt ic sequencetype =
let s = Serialization.bserialize_datamodel (Norm_context.processing_context_from_norm_context(Typing_context.norm_context_from_stat_context stat_ctxt)) ic in
raise (error_with_file_location sequencetype.pasequencetype_loc
(Query (Type_Error (Print_top.bprintf_asequencetype ("Type of value '\n"^s^"\n' does not match sequence type: ") sequencetype))))
let item_matches_named_type cxschema dtk (singlearg : item) : bool =
match dtk with
| AITItem -> true
| _ ->
begin
match item_kind singlearg with
| AtomicValueKind ->
begin
match dtk with
| AITAtomic a ->
begin
Schema_judge.check_declared_type cxschema a;
let at1 = (getAtomicValue singlearg)#atomic_type() in
Schema_judge.derives_from cxschema at1 a
end
| AITNumeric ->
let at1 = (getAtomicValue singlearg)#getAtomicValueKind() in
(Datatypes_util.atomic_is_numeric at1)
| _ -> false
end
| NodeKind ->
New code to support kind tests 09/06/2005 - Jerome
match dtk with
| AITKindTest AAnyKind ->
true
| AITKindTest dtk ->
let n = getNode singlearg in
Dm_step.item_matches_kind_test (n#get_access_ops_dm) cxschema dtk n
| _ -> false
end
end
let dynamic_type_check stat_ctxt dt input_cursor =
1 . Extract the item type as well as the bounds for the Sequence Type
let (adtk,b1,b2) = Typing_util.factor_asequencetype dt in
let norm_ctxt = norm_context_from_stat_context stat_ctxt in
let cxschema = cxschema_from_norm_context norm_ctxt in
2 . Auxiliary function to check the bound once we know the
cardinality of the sequence
cardinality of the sequence *)
let check_bound counter =
let b = Occurrence.occurs counter
in (Occurrence.le b b2) && (Occurrence.le b1 b)
in
3 . Deal with the empty sequence first
if Cursor.cursor_is_empty input_cursor
then
begin
if
(match adtk with
| AITEmpty -> true
| _ -> false) || check_bound 0
then
input_cursor
else
raise_type_matching_error stat_ctxt input_cursor dt
end
else
4 . Now there is at least one item in the sequence
match adtk with
| AITEmpty ->
raise_type_matching_error stat_ctxt input_cursor dt
| _ ->
let counter = ref 0 in
let item_match_fun x =
incr counter;
if (item_matches_named_type cxschema adtk x)
then x
else raise_type_matching_error stat_ctxt input_cursor dt
in
let matched_cursor = Cursor.cursor_map item_match_fun input_cursor in
let check_bound_fun x =
begin
if (Cursor.cursor_is_empty input_cursor)
then
if check_bound !counter
then ()
else raise_type_matching_error stat_ctxt input_cursor dt
else
()
end;
x
in
Cursor.cursor_map check_bound_fun matched_cursor
let dynamic_opttype_check stat_ctxt odt input_cursor =
match odt with
| None -> input_cursor
| Some dt -> dynamic_type_check stat_ctxt dt input_cursor
let dynamic_type_check_item stat_ctxt dt input_item =
let input_cursor = Cursor.cursor_of_singleton input_item in
ignore(dynamic_type_check stat_ctxt dt input_cursor)
let dynamic_opttype_check_item stat_ctxt dt input_item =
let input_cursor = Cursor.cursor_of_singleton input_item in
ignore(dynamic_opttype_check stat_ctxt dt input_cursor)
let boolean_dynamic_type_check stat_ctxt dt input_item_list =
let input_cursor = Cursor.cursor_of_list input_item_list in
try
Cursor.cursor_iter (fun x -> ()) (dynamic_type_check stat_ctxt dt input_cursor);
true
with
| (Query (Type_Error _)) ->
false
|
68667d57a5d3382b3f7a84d86c62b847c91250546590db56c0342e22ab1c2481 | iustin/corydalis | FlaggedImages.hs |
Copyright ( C ) 2013
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2013 Iustin Pop
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE OverloadedStrings #
{-# LANGUAGE QuasiQuotes #-}
# LANGUAGE RecordWildCards #
{-# LANGUAGE TemplateHaskell #-}
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
module Handler.FlaggedImages
( getFlaggedImagesR
, getFlaggedImagesListR
, putImageFlagR
, deleteImageFlagR
)
where
import qualified Data.Text as Text
import Database.Esqueleto.Legacy ((?.), (^.))
import qualified Database.Esqueleto.Legacy as E
import Handler.Utils
import Handler.Widgets
import Import
import Pics
getFlaggedImagesR :: Handler Html
getFlaggedImagesR = do
flagged <- runDB
$ E.select
$ E.from $ \(flaggedimage `E.LeftOuterJoin` user) -> do
E.on $ E.just (flaggedimage ^. FlaggedImageAuthorId) E.==. user ?. UserId
return ( flaggedimage ^. FlaggedImageFolder
, flaggedimage ^. FlaggedImageName
, user ?. UserName
)
pics <- getPics
let flagged' = map (\(E.Value dir, E.Value name, E.Value author) ->
let img = lookupImage pics dir name
in (dir, name, author, img)) flagged
defaultLayout $ do
setHtmlTitle "listing flagged images"
$(widgetFile "flaggedimages")
getFlaggedImagesListR :: Handler Text
getFlaggedImagesListR = do
flagged <- runDB $
selectList [] [Asc FlaggedImageFolder, Asc FlaggedImageName]
let flagged' = map entityVal flagged
return . Text.unlines . map flaggedImageFolder $ flagged'
flagImageMsg :: Bool -> Text
flagImageMsg True = "Image flagged"
flagImageMsg False = "Image already flagged!"
unFlagImageMsg :: Bool -> Text
unFlagImageMsg True = "Image flag removed"
unFlagImageMsg False = "Image was not flagged!"
flagImage :: Text -> ImageName -> Handler Bool
flagImage folder iname = do
_ <- getImage folder iname
cuser <- requireAuthId
r <- runDB $ insertUnique $ FlaggedImage folder iname cuser
return $ isJust r
unFlagImage :: Text -> ImageName -> Handler Bool
unFlagImage folder iname = runDB $ do
let u = UniqueFlaggedImage folder iname
fi <- getBy u
case fi of
Just (Entity fii _) -> delete fii >> return True
Nothing -> return False
flagHtml :: Text -> ImageName -> Text -> Text -> Handler Html
flagHtml folder iname msg kind = do
setMessage $ toHtml msg
setSession msgTypeKey kind
setUltDestReferer
redirectUltDest $ ImageR folder iname
flagJson :: Text -> Handler Value
flagJson msg = return $ object ["text" .= msg]
flagHandler
:: (Text -> ImageName -> Handler Bool)
-> (Bool -> Text)
-> Text
-> ImageName
-> Handler TypedContent
flagHandler action msggen folder iname = do
r <- action folder iname
let msg = msggen r
kind = if r then msgSuccess else msgWarning
selectRep $ do
provideRep $ flagJson msg
provideRep $ flagHtml folder iname msg kind
putImageFlagR :: Text -> ImageName -> Handler TypedContent
putImageFlagR = flagHandler flagImage flagImageMsg
deleteImageFlagR :: Text -> ImageName -> Handler TypedContent
deleteImageFlagR = flagHandler unFlagImage unFlagImageMsg
| null | https://raw.githubusercontent.com/iustin/corydalis/eb9641cab57da3800e5d6ff82682a0451e25c793/src/Handler/FlaggedImages.hs | haskell | # LANGUAGE QuasiQuotes #
# LANGUAGE TemplateHaskell # |
Copyright ( C ) 2013
This program is free software : you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation , either version 3 of the License , or
( at your option ) any later version .
This program is distributed in the hope that it will be useful ,
but WITHOUT ANY WARRANTY ; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
GNU Affero General Public License for more details .
You should have received a copy of the GNU Affero General Public License
along with this program . If not , see < / > .
Copyright (C) 2013 Iustin Pop
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU Affero General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Affero General Public License for more details.
You should have received a copy of the GNU Affero General Public License
along with this program. If not, see </>.
-}
# LANGUAGE MultiParamTypeClasses #
# LANGUAGE NoImplicitPrelude #
# LANGUAGE OverloadedStrings #
# LANGUAGE RecordWildCards #
# LANGUAGE TupleSections #
# LANGUAGE TypeFamilies #
# LANGUAGE ViewPatterns #
module Handler.FlaggedImages
( getFlaggedImagesR
, getFlaggedImagesListR
, putImageFlagR
, deleteImageFlagR
)
where
import qualified Data.Text as Text
import Database.Esqueleto.Legacy ((?.), (^.))
import qualified Database.Esqueleto.Legacy as E
import Handler.Utils
import Handler.Widgets
import Import
import Pics
getFlaggedImagesR :: Handler Html
getFlaggedImagesR = do
flagged <- runDB
$ E.select
$ E.from $ \(flaggedimage `E.LeftOuterJoin` user) -> do
E.on $ E.just (flaggedimage ^. FlaggedImageAuthorId) E.==. user ?. UserId
return ( flaggedimage ^. FlaggedImageFolder
, flaggedimage ^. FlaggedImageName
, user ?. UserName
)
pics <- getPics
let flagged' = map (\(E.Value dir, E.Value name, E.Value author) ->
let img = lookupImage pics dir name
in (dir, name, author, img)) flagged
defaultLayout $ do
setHtmlTitle "listing flagged images"
$(widgetFile "flaggedimages")
getFlaggedImagesListR :: Handler Text
getFlaggedImagesListR = do
flagged <- runDB $
selectList [] [Asc FlaggedImageFolder, Asc FlaggedImageName]
let flagged' = map entityVal flagged
return . Text.unlines . map flaggedImageFolder $ flagged'
flagImageMsg :: Bool -> Text
flagImageMsg True = "Image flagged"
flagImageMsg False = "Image already flagged!"
unFlagImageMsg :: Bool -> Text
unFlagImageMsg True = "Image flag removed"
unFlagImageMsg False = "Image was not flagged!"
flagImage :: Text -> ImageName -> Handler Bool
flagImage folder iname = do
_ <- getImage folder iname
cuser <- requireAuthId
r <- runDB $ insertUnique $ FlaggedImage folder iname cuser
return $ isJust r
unFlagImage :: Text -> ImageName -> Handler Bool
unFlagImage folder iname = runDB $ do
let u = UniqueFlaggedImage folder iname
fi <- getBy u
case fi of
Just (Entity fii _) -> delete fii >> return True
Nothing -> return False
flagHtml :: Text -> ImageName -> Text -> Text -> Handler Html
flagHtml folder iname msg kind = do
setMessage $ toHtml msg
setSession msgTypeKey kind
setUltDestReferer
redirectUltDest $ ImageR folder iname
flagJson :: Text -> Handler Value
flagJson msg = return $ object ["text" .= msg]
flagHandler
:: (Text -> ImageName -> Handler Bool)
-> (Bool -> Text)
-> Text
-> ImageName
-> Handler TypedContent
flagHandler action msggen folder iname = do
r <- action folder iname
let msg = msggen r
kind = if r then msgSuccess else msgWarning
selectRep $ do
provideRep $ flagJson msg
provideRep $ flagHtml folder iname msg kind
putImageFlagR :: Text -> ImageName -> Handler TypedContent
putImageFlagR = flagHandler flagImage flagImageMsg
deleteImageFlagR :: Text -> ImageName -> Handler TypedContent
deleteImageFlagR = flagHandler unFlagImage unFlagImageMsg
|
f987ffc4f03d4eaa433e0021687ab04ad4a87f744663c17bdad6c059926fbdae | c-cube/qcheck | QCheck_runner.ml |
include QCheck_base_runner
include QCheck_ounit
| null | https://raw.githubusercontent.com/c-cube/qcheck/063c1d74795a24eb77fa661d218c4715382df566/src/QCheck_runner.ml | ocaml |
include QCheck_base_runner
include QCheck_ounit
| |
4da8cbc3bc712e6d5a2fd731a157dcbd45eb140a0b9b2665ec9cfc0825f53809 | roosta/herb | global.cljs | (ns site.tutorials.global
(:require
[garden.units :refer [em px rem]]
[site.components.code :refer [code]]
[site.components.paper :refer [paper]]
[site.components.text :refer [text]]
[site.snippets.global :as syntax]
[herb.core :as herb :refer [<class <id]]
[reagent.core :as r])
(:require-macros
[site.macros :as macros])
)
(defn main []
(let [e1 (macros/example-src "global.cljs")
e2 (macros/example-src "global.html")]
[paper {:id "global"}
[text {:variant :heading}
"Global styles"]
[text
"Even though Herb is all about scoped CSS sometimes you can't get around
needing to target spesific elements using a selector. That's where the
macro `defglobal` comes in"]
[text
"`defglobal` provides an interface to Gardens selector syntax, and also
ensures that the style is added to the DOM:"]
[code {:lang :clojure}
e1]
[code {:lang :html}
e2]
[text
[:a {:href ""} "Garden"]
" syntax applies to the selectors, and no concession is made to ensure
that there does not exist duplicates."] ]))
| null | https://raw.githubusercontent.com/roosta/herb/64afb133a7bf51d7171a3c5260584c09dbe4e504/site/src/site/tutorials/global.cljs | clojure | (ns site.tutorials.global
(:require
[garden.units :refer [em px rem]]
[site.components.code :refer [code]]
[site.components.paper :refer [paper]]
[site.components.text :refer [text]]
[site.snippets.global :as syntax]
[herb.core :as herb :refer [<class <id]]
[reagent.core :as r])
(:require-macros
[site.macros :as macros])
)
(defn main []
(let [e1 (macros/example-src "global.cljs")
e2 (macros/example-src "global.html")]
[paper {:id "global"}
[text {:variant :heading}
"Global styles"]
[text
"Even though Herb is all about scoped CSS sometimes you can't get around
needing to target spesific elements using a selector. That's where the
macro `defglobal` comes in"]
[text
"`defglobal` provides an interface to Gardens selector syntax, and also
ensures that the style is added to the DOM:"]
[code {:lang :clojure}
e1]
[code {:lang :html}
e2]
[text
[:a {:href ""} "Garden"]
" syntax applies to the selectors, and no concession is made to ensure
that there does not exist duplicates."] ]))
| |
dcecd33da3b43f3ffb0488d2bbb0cefc0c01bdbc22715623e9423abb9c1a0ecf | exercism/ocaml | change.ml | let make_change ~target ~coins =
failwith "'make_change' is missing" | null | https://raw.githubusercontent.com/exercism/ocaml/bfd6121f757817865a34db06c3188b5e0ccab518/exercises/practice/change/change.ml | ocaml | let make_change ~target ~coins =
failwith "'make_change' is missing" | |
05ae12ae647faace9c7da8740118b7e04e5c446ebbcc121b5434bf39918121b3 | codegouvfr/guide-juridique-logiciel-libre | test_runner.cljs | (ns choices.test-runner
(:require
[choices.core-test]
[figwheel.main.testing :refer [run-tests-async]]))
(defn -main [& args]
(run-tests-async 5000))
| null | https://raw.githubusercontent.com/codegouvfr/guide-juridique-logiciel-libre/62118cfca9a8e1cc36e2d4af6fcd447bc11cda18/test/choices/test_runner.cljs | clojure | (ns choices.test-runner
(:require
[choices.core-test]
[figwheel.main.testing :refer [run-tests-async]]))
(defn -main [& args]
(run-tests-async 5000))
| |
d6c4c68f9e49707249219b4f09276ae3bdcdfb1fdaba0a266fc8d54a50a68851 | dleslie/allegro-egg | fixed.scm | (cond-expand
(windows
(define fix-to-rad (foreign-value "al_fixtorad_r" integer32))
(define rad-to-fix (foreign-value "al_radtofix_r" integer32)))
(else #f))
| null | https://raw.githubusercontent.com/dleslie/allegro-egg/0435fb891dda5c64e95aa9dedccddd31b17e27da/fixed.scm | scheme | (cond-expand
(windows
(define fix-to-rad (foreign-value "al_fixtorad_r" integer32))
(define rad-to-fix (foreign-value "al_radtofix_r" integer32)))
(else #f))
| |
1d9306e9dcd938c42ba10de725eb6f17e32612ab61aa67e2c23b26e98e2144c7 | helium/blockchain-core | blockchain_txn_consensus_group_v1.erl | %%%-------------------------------------------------------------------
%% @doc
%% == Blockchain Transaction Genesis Consensus Group ==
%% @end
%%%-------------------------------------------------------------------
-module(blockchain_txn_consensus_group_v1).
-behavior(blockchain_txn).
-behavior(blockchain_json).
-include("blockchain_json.hrl").
-include("blockchain.hrl").
-include_lib("helium_proto/include/blockchain_txn_consensus_group_v1_pb.hrl").
-export([
new/4,
hash/1,
sign/2,
members/1,
proof/1,
height/1,
delay/1,
fee/1,
fee_payer/2,
is_valid/2,
absorb/2,
print/1,
json_type/0,
to_json/2
]).
-include("blockchain_vars.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type txn_consensus_group() :: #blockchain_txn_consensus_group_v1_pb{}.
-export_type([txn_consensus_group/0]).
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec new([libp2p_crypto:pubkey_bin()], binary(), pos_integer(), non_neg_integer()) -> txn_consensus_group().
new(_Members, _Proof, 0, _Delay) ->
error(blowupyay);
new(Members, Proof, Height, Delay) ->
#blockchain_txn_consensus_group_v1_pb{members = Members,
proof = Proof,
height = Height,
delay = Delay}.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec hash(txn_consensus_group()) -> blockchain_txn:hash().
hash(Txn) ->
EncodedTxn = blockchain_txn_consensus_group_v1_pb:encode_msg(Txn),
crypto:hash(sha256, EncodedTxn).
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec sign(txn_consensus_group(), libp2p_crypto:sig_fun()) -> txn_consensus_group().
sign(Txn, _SigFun) ->
Txn.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec members(txn_consensus_group()) -> [libp2p_crypto:pubkey_bin()].
members(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.members.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec proof(txn_consensus_group()) -> binary().
proof(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.proof.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec height(txn_consensus_group()) -> pos_integer().
height(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.height.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec delay(txn_consensus_group()) -> non_neg_integer().
delay(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.delay.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec fee(txn_consensus_group()) -> 0.
fee(_Txn) ->
0.
-spec fee_payer(txn_consensus_group(), blockchain_ledger_v1:ledger()) -> libp2p_crypto:pubkey_bin() | undefined.
fee_payer(_Txn, _Ledger) ->
undefined.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec is_valid(txn_consensus_group(), blockchain:blockchain()) -> {error, atom()} | {error, {atom(), any()}}.
is_valid(Txn, Chain) ->
Ledger = blockchain:ledger(Chain),
Members = ?MODULE:members(Txn),
Delay = ?MODULE:delay(Txn),
Proof0 = ?MODULE:proof(Txn),
try
case Members of
[] ->
throw({error, no_members});
_ ->
ok
end,
TxnHeight = ?MODULE:height(Txn),
case blockchain_ledger_v1:current_height(Ledger) of
%% no chain, genesis block
{ok, 0} ->
ok;
{ok, CurrHeight} ->
{ok, #block_info_v2{election_info={_, LastElectionHeight}}} = blockchain:get_block_info(CurrHeight, Chain),
case blockchain_ledger_v1:election_height(Ledger) of
%% no chain, genesis block
{error, not_found} ->
ok;
{ok, BaseHeight} when TxnHeight > BaseHeight ->
ok;
{ok, BaseHeight} ->
throw({error, {duplicate_group, {?MODULE:height(Txn), BaseHeight}}})
end,
{ok, ElectionInterval} = ?get_var(?election_interval, Ledger),
The next election should be at least ElectionInterval blocks past the last election
%% This check prevents elections ahead of schedule
case TxnHeight >= LastElectionHeight + ElectionInterval of
true ->
Proof = binary_to_term(Proof0),
EffectiveHeight = LastElectionHeight + ElectionInterval + Delay,
{ok, Block} = blockchain:get_block(EffectiveHeight, Chain),
{ok, RestartInterval} = ?get_var(?election_restart_interval, Ledger),
IntervalRange =
case ?get_var(?election_restart_interval_range, Ledger) of
{ok, IR} -> IR;
_ -> 1
end,
%% The next election should occur within RestartInterval blocks of when the election started
NextRestart = LastElectionHeight + ElectionInterval + Delay +
(RestartInterval * IntervalRange),
case CurrHeight > NextRestart of
true ->
throw({error, {txn_too_old, {CurrHeight, NextRestart}}});
_ ->
ok
end,
{ok, N} = ?get_var(?num_consensus_members, Ledger),
case length(Members) == N of
true -> ok;
_ -> throw({error, {wrong_members_size, {N, length(Members)}}})
end,
%% if we're on validators make sure that everyone is staked
case ?get_var(?election_version, Ledger) of
{ok, N} when N >= 5 ->
case lists:all(fun(M) ->
{ok, V} = blockchain_ledger_v1:get_validator(M, Ledger),
blockchain_ledger_validator_v1:status(V) == staked end,
Members) of
true -> ok;
false -> throw({error, not_all_validators_staked})
end;
_ -> ok
end,
Hash = blockchain_block:hash_block(Block),
{ok, OldLedger} = blockchain:ledger_at(EffectiveHeight, Chain),
case verify_proof(Proof, Members, Hash, Delay, OldLedger) of
ok -> ok;
{error, _} = VerifyErr -> throw(VerifyErr)
end;
_ ->
throw({error, {election_too_early, {TxnHeight,
LastElectionHeight + ElectionInterval}}})
end
end
catch throw:E ->
E
end.
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
-spec absorb(txn_consensus_group(), blockchain:blockchain()) -> ok | {error, atom()} | {error, {atom(), any()}}.
absorb(Txn, Chain) ->
Height = ?MODULE:height(Txn),
Ledger = blockchain:ledger(Chain),
Members = ?MODULE:members(Txn),
{Gen, Check} =
case blockchain_ledger_v1:election_height(Ledger) of
%% no chain, genesis block
{error, not_found} ->
{true, ok};
{ok, BaseHeight} when Height > BaseHeight ->
{false, ok};
{ok, BaseHeight} ->
{false, {error, {duplicate_group, {?MODULE:height(Txn), BaseHeight}}}}
end,
case Check of
ok ->
case ?get_var(?election_version, Ledger) of
{ok, N} when N >= 5 andalso Gen == false ->
{ok, PenaltyLimit} = ?get_var(?penalty_history_limit, Ledger),
{ok, TenurePenalty} = ?get_var(?tenure_penalty, Ledger),
{ok, OldMembers0} = blockchain_ledger_v1:consensus_members(Ledger),
{ok, CurrHeight} = blockchain_ledger_v1:current_height(Ledger),
OldMembers = lists:filter(fun(X) -> is_validator(X, Ledger) end, OldMembers0),
EpochPenalties =
case OldMembers == OldMembers0 of
%% no gateways to mess up the adjustment
true ->
blockchain_election:validator_penalties(OldMembers, Ledger);
false -> #{}
end,
from election version 7 , apply tenure penalty to the old group at the end of the epoch which
%% is the same time that performance penalties are applied. otherwise maintain old logic of
%% applying at start of round. this change allows tenure penalty to be used as part of the current
%% group's penalty during election rather than being included in the penalty history on the ledger
TenureMembers = case N of N0 when N0 >= 7 -> OldMembers; _ -> Members end,
lists:foreach(
fun(M) ->
{ok, V} = blockchain_ledger_v1:get_validator(M, Ledger),
V1 = blockchain_ledger_validator_v1:add_penalty(V, CurrHeight,
tenure,
TenurePenalty,
PenaltyLimit),
blockchain_ledger_v1:update_validator(M, V1, Ledger)
end,
TenureMembers),
%% persist performance penalties for all validators in the last epoch
lists:foreach(
fun(M) ->
{ok, V} = blockchain_ledger_v1:get_validator(M, Ledger),
V1 = case maps:get(M, EpochPenalties, none) of
none -> V;
0.0 -> V;
Penalty ->
blockchain_ledger_validator_v1:add_penalty(V,
CurrHeight,
performance,
Penalty,
PenaltyLimit)
end,
blockchain_ledger_v1:update_validator(M, V1, Ledger)
end,
OldMembers);
_ -> ok
end,
{ok, Epoch} = blockchain_ledger_v1:election_epoch(Ledger),
ok = blockchain_ledger_v1:election_epoch(Epoch + 1, Ledger),
ok = blockchain_ledger_v1:consensus_members(Members, Ledger),
ok = blockchain_ledger_v1:election_height(Height, Ledger);
{error, _} = Err ->
Err
end.
is_validator(Addr, Ledger) ->
case blockchain_ledger_v1:get_validator(Addr, Ledger) of
{ok, _V} -> true;
_ -> false
end.
-spec print(txn_consensus_group()) -> iodata().
print(undefined) -> <<"type=group, undefined">>;
print(#blockchain_txn_consensus_group_v1_pb{height = Height,
delay = Delay,
members = Members,
proof = Proof}) ->
io_lib:format("type=group height=~p delay=~p members=~p proof_hash=~p",
[Height,
Delay,
lists:map(fun blockchain_utils:addr2name/1, Members),
erlang:phash2(Proof)]).
json_type() ->
<<"consensus_group_v1">>.
-spec to_json(txn_consensus_group(), blockchain_json:opts()) -> blockchain_json:json_object().
to_json(Txn, _Opts) ->
#{
type => ?MODULE:json_type(),
hash => ?BIN_TO_B64(hash(Txn)),
members => [?BIN_TO_B58(M) || M <- members(Txn)],
proof => ?BIN_TO_B64(proof(Txn)),
height => height(Txn),
delay => delay(Txn)
}.
%% ------------------------------------------------------------------
%% Internal Function Definitions
%% ------------------------------------------------------------------
%%--------------------------------------------------------------------
%% @doc
%% @end
%%--------------------------------------------------------------------
verify_proof(Proof, Members, Hash, Delay, OldLedger) ->
%% verify that the list is the proper list
L = length(Members),
HashMembers = blockchain_election:new_group(OldLedger, Hash, L, Delay),
%% clean up ledger context
blockchain_ledger_v1:delete_context(OldLedger),
Artifact = term_to_binary(Members),
case HashMembers of
Members ->
%% verify all the signatures
%% verify that the signatories are all in the members list
case lists:all(fun({Addr, Sig}) ->
lists:member(Addr, Members) andalso
libp2p_crypto:verify(Artifact, Sig,
libp2p_crypto:bin_to_pubkey(Addr))
end, Proof) andalso
lists:all(fun(M) ->
lists:keymember(M, 1, Proof)
end, Members) of
true ->
ok;
_ ->
{error, group_verification_failed}
end;
_ ->
lager:info("groups didn't match: ~p ~p ~ntxn ~p ~nhash ~p",
[length(Members), length(HashMembers),
lists:map(fun blockchain_utils:addr2name/1, Members),
lists:map(fun blockchain_utils:addr2name/1, HashMembers)]),
{error, group_mismatch}
end.
%% ------------------------------------------------------------------
EUNIT Tests
%% ------------------------------------------------------------------
-ifdef(TEST).
new_test() ->
Tx = #blockchain_txn_consensus_group_v1_pb{members = [<<"1">>],
proof = <<"proof">>,
height = 1,
delay = 0},
?assertEqual(Tx, new([<<"1">>], <<"proof">>, 1, 0)).
members_test() ->
Tx = new([<<"1">>], <<"proof">>, 1, 0),
?assertEqual([<<"1">>], members(Tx)).
to_json_test() ->
Tx = new([<<"1">>], <<"proof">>, 1, 0),
Json = to_json(Tx, []),
?assert(lists:all(fun(K) -> maps:is_key(K, Json) end,
[type, hash, members, proof, height, delay])).
-endif.
| null | https://raw.githubusercontent.com/helium/blockchain-core/a34308a576a40594d5057d29570f419516dc87b1/src/transactions/v1/blockchain_txn_consensus_group_v1.erl | erlang | -------------------------------------------------------------------
@doc
== Blockchain Transaction Genesis Consensus Group ==
@end
-------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
no chain, genesis block
no chain, genesis block
This check prevents elections ahead of schedule
The next election should occur within RestartInterval blocks of when the election started
if we're on validators make sure that everyone is staked
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
no chain, genesis block
no gateways to mess up the adjustment
is the same time that performance penalties are applied. otherwise maintain old logic of
applying at start of round. this change allows tenure penalty to be used as part of the current
group's penalty during election rather than being included in the penalty history on the ledger
persist performance penalties for all validators in the last epoch
------------------------------------------------------------------
Internal Function Definitions
------------------------------------------------------------------
--------------------------------------------------------------------
@doc
@end
--------------------------------------------------------------------
verify that the list is the proper list
clean up ledger context
verify all the signatures
verify that the signatories are all in the members list
------------------------------------------------------------------
------------------------------------------------------------------ | -module(blockchain_txn_consensus_group_v1).
-behavior(blockchain_txn).
-behavior(blockchain_json).
-include("blockchain_json.hrl").
-include("blockchain.hrl").
-include_lib("helium_proto/include/blockchain_txn_consensus_group_v1_pb.hrl").
-export([
new/4,
hash/1,
sign/2,
members/1,
proof/1,
height/1,
delay/1,
fee/1,
fee_payer/2,
is_valid/2,
absorb/2,
print/1,
json_type/0,
to_json/2
]).
-include("blockchain_vars.hrl").
-ifdef(TEST).
-include_lib("eunit/include/eunit.hrl").
-endif.
-type txn_consensus_group() :: #blockchain_txn_consensus_group_v1_pb{}.
-export_type([txn_consensus_group/0]).
-spec new([libp2p_crypto:pubkey_bin()], binary(), pos_integer(), non_neg_integer()) -> txn_consensus_group().
new(_Members, _Proof, 0, _Delay) ->
error(blowupyay);
new(Members, Proof, Height, Delay) ->
#blockchain_txn_consensus_group_v1_pb{members = Members,
proof = Proof,
height = Height,
delay = Delay}.
-spec hash(txn_consensus_group()) -> blockchain_txn:hash().
hash(Txn) ->
EncodedTxn = blockchain_txn_consensus_group_v1_pb:encode_msg(Txn),
crypto:hash(sha256, EncodedTxn).
-spec sign(txn_consensus_group(), libp2p_crypto:sig_fun()) -> txn_consensus_group().
sign(Txn, _SigFun) ->
Txn.
-spec members(txn_consensus_group()) -> [libp2p_crypto:pubkey_bin()].
members(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.members.
-spec proof(txn_consensus_group()) -> binary().
proof(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.proof.
-spec height(txn_consensus_group()) -> pos_integer().
height(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.height.
-spec delay(txn_consensus_group()) -> non_neg_integer().
delay(Txn) ->
Txn#blockchain_txn_consensus_group_v1_pb.delay.
-spec fee(txn_consensus_group()) -> 0.
fee(_Txn) ->
0.
-spec fee_payer(txn_consensus_group(), blockchain_ledger_v1:ledger()) -> libp2p_crypto:pubkey_bin() | undefined.
fee_payer(_Txn, _Ledger) ->
undefined.
-spec is_valid(txn_consensus_group(), blockchain:blockchain()) -> {error, atom()} | {error, {atom(), any()}}.
is_valid(Txn, Chain) ->
Ledger = blockchain:ledger(Chain),
Members = ?MODULE:members(Txn),
Delay = ?MODULE:delay(Txn),
Proof0 = ?MODULE:proof(Txn),
try
case Members of
[] ->
throw({error, no_members});
_ ->
ok
end,
TxnHeight = ?MODULE:height(Txn),
case blockchain_ledger_v1:current_height(Ledger) of
{ok, 0} ->
ok;
{ok, CurrHeight} ->
{ok, #block_info_v2{election_info={_, LastElectionHeight}}} = blockchain:get_block_info(CurrHeight, Chain),
case blockchain_ledger_v1:election_height(Ledger) of
{error, not_found} ->
ok;
{ok, BaseHeight} when TxnHeight > BaseHeight ->
ok;
{ok, BaseHeight} ->
throw({error, {duplicate_group, {?MODULE:height(Txn), BaseHeight}}})
end,
{ok, ElectionInterval} = ?get_var(?election_interval, Ledger),
The next election should be at least ElectionInterval blocks past the last election
case TxnHeight >= LastElectionHeight + ElectionInterval of
true ->
Proof = binary_to_term(Proof0),
EffectiveHeight = LastElectionHeight + ElectionInterval + Delay,
{ok, Block} = blockchain:get_block(EffectiveHeight, Chain),
{ok, RestartInterval} = ?get_var(?election_restart_interval, Ledger),
IntervalRange =
case ?get_var(?election_restart_interval_range, Ledger) of
{ok, IR} -> IR;
_ -> 1
end,
NextRestart = LastElectionHeight + ElectionInterval + Delay +
(RestartInterval * IntervalRange),
case CurrHeight > NextRestart of
true ->
throw({error, {txn_too_old, {CurrHeight, NextRestart}}});
_ ->
ok
end,
{ok, N} = ?get_var(?num_consensus_members, Ledger),
case length(Members) == N of
true -> ok;
_ -> throw({error, {wrong_members_size, {N, length(Members)}}})
end,
case ?get_var(?election_version, Ledger) of
{ok, N} when N >= 5 ->
case lists:all(fun(M) ->
{ok, V} = blockchain_ledger_v1:get_validator(M, Ledger),
blockchain_ledger_validator_v1:status(V) == staked end,
Members) of
true -> ok;
false -> throw({error, not_all_validators_staked})
end;
_ -> ok
end,
Hash = blockchain_block:hash_block(Block),
{ok, OldLedger} = blockchain:ledger_at(EffectiveHeight, Chain),
case verify_proof(Proof, Members, Hash, Delay, OldLedger) of
ok -> ok;
{error, _} = VerifyErr -> throw(VerifyErr)
end;
_ ->
throw({error, {election_too_early, {TxnHeight,
LastElectionHeight + ElectionInterval}}})
end
end
catch throw:E ->
E
end.
-spec absorb(txn_consensus_group(), blockchain:blockchain()) -> ok | {error, atom()} | {error, {atom(), any()}}.
absorb(Txn, Chain) ->
Height = ?MODULE:height(Txn),
Ledger = blockchain:ledger(Chain),
Members = ?MODULE:members(Txn),
{Gen, Check} =
case blockchain_ledger_v1:election_height(Ledger) of
{error, not_found} ->
{true, ok};
{ok, BaseHeight} when Height > BaseHeight ->
{false, ok};
{ok, BaseHeight} ->
{false, {error, {duplicate_group, {?MODULE:height(Txn), BaseHeight}}}}
end,
case Check of
ok ->
case ?get_var(?election_version, Ledger) of
{ok, N} when N >= 5 andalso Gen == false ->
{ok, PenaltyLimit} = ?get_var(?penalty_history_limit, Ledger),
{ok, TenurePenalty} = ?get_var(?tenure_penalty, Ledger),
{ok, OldMembers0} = blockchain_ledger_v1:consensus_members(Ledger),
{ok, CurrHeight} = blockchain_ledger_v1:current_height(Ledger),
OldMembers = lists:filter(fun(X) -> is_validator(X, Ledger) end, OldMembers0),
EpochPenalties =
case OldMembers == OldMembers0 of
true ->
blockchain_election:validator_penalties(OldMembers, Ledger);
false -> #{}
end,
from election version 7 , apply tenure penalty to the old group at the end of the epoch which
TenureMembers = case N of N0 when N0 >= 7 -> OldMembers; _ -> Members end,
lists:foreach(
fun(M) ->
{ok, V} = blockchain_ledger_v1:get_validator(M, Ledger),
V1 = blockchain_ledger_validator_v1:add_penalty(V, CurrHeight,
tenure,
TenurePenalty,
PenaltyLimit),
blockchain_ledger_v1:update_validator(M, V1, Ledger)
end,
TenureMembers),
lists:foreach(
fun(M) ->
{ok, V} = blockchain_ledger_v1:get_validator(M, Ledger),
V1 = case maps:get(M, EpochPenalties, none) of
none -> V;
0.0 -> V;
Penalty ->
blockchain_ledger_validator_v1:add_penalty(V,
CurrHeight,
performance,
Penalty,
PenaltyLimit)
end,
blockchain_ledger_v1:update_validator(M, V1, Ledger)
end,
OldMembers);
_ -> ok
end,
{ok, Epoch} = blockchain_ledger_v1:election_epoch(Ledger),
ok = blockchain_ledger_v1:election_epoch(Epoch + 1, Ledger),
ok = blockchain_ledger_v1:consensus_members(Members, Ledger),
ok = blockchain_ledger_v1:election_height(Height, Ledger);
{error, _} = Err ->
Err
end.
is_validator(Addr, Ledger) ->
case blockchain_ledger_v1:get_validator(Addr, Ledger) of
{ok, _V} -> true;
_ -> false
end.
-spec print(txn_consensus_group()) -> iodata().
print(undefined) -> <<"type=group, undefined">>;
print(#blockchain_txn_consensus_group_v1_pb{height = Height,
delay = Delay,
members = Members,
proof = Proof}) ->
io_lib:format("type=group height=~p delay=~p members=~p proof_hash=~p",
[Height,
Delay,
lists:map(fun blockchain_utils:addr2name/1, Members),
erlang:phash2(Proof)]).
json_type() ->
<<"consensus_group_v1">>.
-spec to_json(txn_consensus_group(), blockchain_json:opts()) -> blockchain_json:json_object().
to_json(Txn, _Opts) ->
#{
type => ?MODULE:json_type(),
hash => ?BIN_TO_B64(hash(Txn)),
members => [?BIN_TO_B58(M) || M <- members(Txn)],
proof => ?BIN_TO_B64(proof(Txn)),
height => height(Txn),
delay => delay(Txn)
}.
verify_proof(Proof, Members, Hash, Delay, OldLedger) ->
L = length(Members),
HashMembers = blockchain_election:new_group(OldLedger, Hash, L, Delay),
blockchain_ledger_v1:delete_context(OldLedger),
Artifact = term_to_binary(Members),
case HashMembers of
Members ->
case lists:all(fun({Addr, Sig}) ->
lists:member(Addr, Members) andalso
libp2p_crypto:verify(Artifact, Sig,
libp2p_crypto:bin_to_pubkey(Addr))
end, Proof) andalso
lists:all(fun(M) ->
lists:keymember(M, 1, Proof)
end, Members) of
true ->
ok;
_ ->
{error, group_verification_failed}
end;
_ ->
lager:info("groups didn't match: ~p ~p ~ntxn ~p ~nhash ~p",
[length(Members), length(HashMembers),
lists:map(fun blockchain_utils:addr2name/1, Members),
lists:map(fun blockchain_utils:addr2name/1, HashMembers)]),
{error, group_mismatch}
end.
EUNIT Tests
-ifdef(TEST).
new_test() ->
Tx = #blockchain_txn_consensus_group_v1_pb{members = [<<"1">>],
proof = <<"proof">>,
height = 1,
delay = 0},
?assertEqual(Tx, new([<<"1">>], <<"proof">>, 1, 0)).
members_test() ->
Tx = new([<<"1">>], <<"proof">>, 1, 0),
?assertEqual([<<"1">>], members(Tx)).
to_json_test() ->
Tx = new([<<"1">>], <<"proof">>, 1, 0),
Json = to_json(Tx, []),
?assert(lists:all(fun(K) -> maps:is_key(K, Json) end,
[type, hash, members, proof, height, delay])).
-endif.
|
fe6bbce64cd5ec6a3846bee557bf0555ea6bc42467cf90b2fd1541de6f348eb2 | keera-studios/keera-hails | ProtectedModelInternals.hs | -- |
--
Copyright : ( C ) Keera Studios Ltd , 2013
-- License : BSD3
Maintainer :
module Model.ProtectedModel.ProtectedModelInternals
( ProtectedModel
, GPM.onReactiveModel
, GPM.applyToReactiveModel
, GPM.onEvent
, GPM.waitFor
)
where
import Model.Model
import Model.ReactiveModel.ModelEvents
import qualified Control.Concurrent.Model.ProtectedModel as GPM
type ProtectedModel = GPM.ProtectedModel Model ModelEvent
| null | https://raw.githubusercontent.com/keera-studios/keera-hails/bf069e5aafc85a1f55fa119ae45a025a2bd4a3d0/demos/keera-hails-demos-gtk/elementarygtkprogram/src/Model/ProtectedModel/ProtectedModelInternals.hs | haskell | |
License : BSD3 | Copyright : ( C ) Keera Studios Ltd , 2013
Maintainer :
module Model.ProtectedModel.ProtectedModelInternals
( ProtectedModel
, GPM.onReactiveModel
, GPM.applyToReactiveModel
, GPM.onEvent
, GPM.waitFor
)
where
import Model.Model
import Model.ReactiveModel.ModelEvents
import qualified Control.Concurrent.Model.ProtectedModel as GPM
type ProtectedModel = GPM.ProtectedModel Model ModelEvent
|
f9cba152ef874351d2da528c6aa51f4445b4f05de5cb8c334cd2f8db831707df | tari3x/csec-modex | parser.ml | type token =
| COMMA
| LPAREN
| RPAREN
| LBRACKET
| RBRACKET
| BAR
| SEMI
| COLON
| NEW
| OUT
| IN
| IDENT of (Ptree.ident)
| STRING of (Ptree.ident)
| INT of (int)
| FLOAT of (float)
| REPL
| LEQ
| IF
| THEN
| ELSE
| FIND
| ORFIND
| SUCHTHAT
| DEFINED
| EQUAL
| DIFF
| FUN
| FORALL
| PARAM
| PROBA
| TYPE
| PROCESS
| DOT
| EOF
| LET
| QUERY
| SECRET
| SECRET1
| AND
| OR
| CONST
| CHANNEL
| EQUIV
| EQUIVLEFT
| EQUIVRIGHT
| MAPSTO
| DEF
| MUL
| DIV
| ADD
| SUB
| POWER
| SET
| COLLISION
| EVENT
| IMPLIES
| TIME
| YIELD
| OTHERUSES
| MAXLENGTH
| LENGTH
| MAX
| COUNT
| NEWCHANNEL
| INJ
| DEFINE
| EXPAND
| LBRACE
| RBRACE
| PROOF
open Parsing;;
# 2 "parser.mly"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* *
* *
* Copyright ( C ) ENS , CNRS , INRIA , 2005 - 2011 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet *
* *
* Copyright (C) ENS, CNRS, INRIA, 2005-2011 *
* *
*************************************************************)
Copyright ENS , CNRS , INRIA
contributor : ,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model .
This software is governed by the CeCILL - B license under French law and
abiding by the rules of distribution of free software . You can use ,
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " .
As a counterpart to the access to the source code and rights to copy ,
modify and redistribute granted by the license , users are provided only
with a limited warranty and the software 's author , the holder of the
economic rights , and the successive licensors have only limited
liability .
In this respect , the user 's attention is drawn to the risks associated
with loading , using , modifying and/or developing or reproducing the
software by the user in light of its specific status of free software ,
that may mean that it is complicated to manipulate , and that also
therefore means that it is reserved for developers and experienced
professionals having in - depth computer knowledge . Users are therefore
encouraged to load and test the software 's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and , more generally , to use and operate it in the
same conditions as regards security .
The fact that you are presently reading this means that you have had
knowledge of the CeCILL - B license and that you accept its terms .
Copyright ENS, CNRS, INRIA
contributor: Bruno Blanchet,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model.
This software is governed by the CeCILL-B license under French law and
abiding by the rules of distribution of free software. You can use,
modify and/ or redistribute the software under the terms of the CeCILL-B
license as circulated by CEA, CNRS and INRIA at the following URL
"".
As a counterpart to the access to the source code and rights to copy,
modify and redistribute granted by the license, users are provided only
with a limited warranty and the software's author, the holder of the
economic rights, and the successive licensors have only limited
liability.
In this respect, the user's attention is drawn to the risks associated
with loading, using, modifying and/or developing or reproducing the
software by the user in light of its specific status of free software,
that may mean that it is complicated to manipulate, and that also
therefore means that it is reserved for developers and experienced
professionals having in-depth computer knowledge. Users are therefore
encouraged to load and test the software's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and, more generally, to use and operate it in the
same conditions as regards security.
The fact that you are presently reading this means that you have had
knowledge of the CeCILL-B license and that you accept its terms.
*)
# 49 "parser.mly"
open Parsing_helper
open Ptree
exception Syntax
let repl_counter = ref 0
let new_repl_occ () =
incr repl_counter;
!repl_counter
let cst_true = (PIdent ("true", dummy_ext), dummy_ext)
let dummy_channel = ("@dummy_channel", dummy_ext)
# 137 "parser.ml"
let yytransl_const = [|
257 (* COMMA *);
258 (* LPAREN *);
259 (* RPAREN *);
260 (* LBRACKET *);
261 (* RBRACKET *);
BAR
263 (* SEMI *);
264 (* COLON *);
265 (* NEW *);
266 (* OUT *);
267 (* IN *);
272 (* REPL *);
LEQ
274 (* IF *);
275 (* THEN *);
276 (* ELSE *);
277 (* FIND *);
ORFIND
SUCHTHAT
280 (* DEFINED *);
EQUAL
DIFF
283 (* FUN *);
284 (* FORALL *);
PARAM
PROBA
287 (* TYPE *);
288 (* PROCESS *);
DOT
EOF
290 (* LET *);
291 (* QUERY *);
292 (* SECRET *);
SECRET1
294 (* AND *);
295 (* OR *);
CONST
297 (* CHANNEL *);
EQUIV
EQUIVRIGHT
301 (* MAPSTO *);
302 (* DEF *);
303 (* MUL *);
DIV
305 (* ADD *);
306 (* SUB *);
307 (* POWER *);
308 (* SET *);
309 (* COLLISION *);
310 (* EVENT *);
311 (* IMPLIES *);
TIME
313 (* YIELD *);
OTHERUSES
MAXLENGTH
316 (* LENGTH *);
MAX
318 (* COUNT *);
NEWCHANNEL
INJ
321 (* DEFINE *);
322 (* EXPAND *);
323 (* LBRACE *);
RBRACE
325 (* PROOF *);
0|]
let yytransl_block = [|
268 (* IDENT *);
269 (* STRING *);
270 (* INT *);
FLOAT
0|]
let yylhs = "\255\255\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\017\000\018\000\018\000\016\000\016\000\
\007\000\007\000\001\000\006\000\006\000\011\000\011\000\008\000\
\008\000\019\000\019\000\004\000\004\000\004\000\004\000\004\000\
\004\000\004\000\004\000\004\000\004\000\004\000\004\000\004\000\
\004\000\004\000\004\000\024\000\024\000\025\000\025\000\026\000\
\026\000\027\000\027\000\027\000\027\000\021\000\021\000\021\000\
\028\000\022\000\022\000\030\000\030\000\029\000\029\000\031\000\
\031\000\020\000\020\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\009\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\009\000\036\000\034\000\034\000\032\000\032\000\
\035\000\035\000\033\000\033\000\023\000\023\000\023\000\023\000\
\023\000\038\000\038\000\037\000\037\000\010\000\010\000\039\000\
\039\000\039\000\039\000\012\000\012\000\040\000\040\000\015\000\
\015\000\042\000\042\000\043\000\043\000\014\000\014\000\014\000\
\044\000\044\000\045\000\045\000\045\000\045\000\041\000\041\000\
\041\000\041\000\041\000\041\000\041\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\047\000\047\000\046\000\
\046\000\003\000\003\000\003\000\003\000\003\000\049\000\048\000\
\048\000\050\000\050\000\051\000\051\000\052\000\052\000\005\000\
\005\000\000\000\000\000\000\000\000\000\000\000"
let yylen = "\002\000\
\010\000\004\000\007\000\006\000\006\000\006\000\006\000\004\000\
\005\000\004\000\006\000\004\000\005\000\009\000\012\000\009\000\
\009\000\007\000\005\000\000\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\002\000\001\000\003\000\
\003\000\000\000\004\000\000\000\001\000\001\000\003\000\000\000\
\001\000\003\000\005\000\004\000\003\000\006\000\001\000\004\000\
\003\000\006\000\005\000\008\000\006\000\006\000\002\000\003\000\
\003\000\003\000\003\000\004\000\001\000\006\000\004\000\001\000\
\003\000\008\000\006\000\006\000\004\000\001\000\001\000\003\000\
\005\000\001\000\003\000\003\000\005\000\001\000\000\000\003\000\
\001\000\001\000\000\000\003\000\001\000\003\000\005\000\001\000\
\005\000\005\000\004\000\003\000\006\000\004\000\007\000\007\000\
\007\000\001\000\003\000\005\000\001\000\003\000\002\000\000\000\
\002\000\000\000\002\000\000\000\001\000\003\000\004\000\003\000\
\002\000\003\000\001\000\001\000\000\000\001\000\003\000\002\000\
\002\000\006\000\004\000\001\000\003\000\001\000\004\000\000\000\
\006\000\000\000\007\000\001\000\003\000\004\000\004\000\001\000\
\000\000\001\000\003\000\005\000\003\000\005\000\006\000\008\000\
\007\000\004\000\006\000\006\000\008\000\003\000\003\000\003\000\
\003\000\003\000\004\000\001\000\002\000\004\000\003\000\001\000\
\005\000\006\000\004\000\006\000\006\000\007\000\008\000\004\000\
\004\000\005\000\004\000\004\000\005\000\006\000\009\000\005\000\
\001\000\001\000\004\000\005\000\007\000\002\000\000\000\001\000\
\003\000\004\000\003\000\002\000\002\000\005\000\004\000\001\000\
\003\000\001\000\003\000\003\000\001\000\003\000\003\000\001\000\
\003\000\002\000\002\000\002\000\002\000\002\000"
let yydefred = "\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\202\000\000\000\
\203\000\000\000\000\000\000\000\000\000\000\000\204\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\206\000\000\000\000\000\000\000\000\000\000\000\
\000\000\041\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\070\000\000\000\000\000\078\000\188\000\000\000\
\000\000\000\000\000\000\000\000\189\000\000\000\000\000\082\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\120\000\121\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\138\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\028\000\021\000\022\000\023\000\027\000\025\000\
\024\000\026\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\085\000\088\000\000\000\000\000\000\000\000\000\000\000\
\098\000\000\000\000\000\000\000\000\000\187\000\000\000\000\000\
\000\000\000\000\000\000\116\000\000\000\000\000\000\000\000\000\
\000\000\049\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\057\000\000\000\000\000\
\195\000\199\000\196\000\198\000\201\000\000\000\037\000\000\000\
\000\000\039\000\000\000\010\000\000\000\000\000\000\000\000\000\
\008\000\119\000\000\000\012\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\177\000\178\000\
\000\000\000\000\000\000\000\000\000\000\000\000\125\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\030\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\035\000\186\000\
\072\000\000\000\000\000\000\000\000\000\000\000\193\000\000\000\
\112\000\000\000\110\000\000\000\080\000\000\000\044\000\048\000\
\000\000\033\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\009\000\013\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\146\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\157\000\000\000\000\000\000\000\000\000\000\000\
\127\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\019\000\032\000\084\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\092\000\099\000\000\000\000\000\000\000\000\000\191\000\114\000\
\111\000\190\000\000\000\000\000\000\000\075\000\000\000\000\000\
\000\000\000\000\043\000\004\000\005\000\000\000\011\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\150\000\159\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\153\000\154\000\000\000\000\000\006\000\007\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\091\000\000\000\000\000\
\000\000\000\000\000\000\065\000\000\000\077\000\000\000\000\000\
\000\000\000\000\046\000\000\000\000\000\140\000\142\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\148\000\000\000\
\147\000\000\000\158\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\163\000\172\000\000\000\000\000\000\000\
\000\000\168\000\169\000\171\000\179\000\000\000\000\000\155\000\
\000\000\129\000\000\000\000\000\003\000\000\000\018\000\089\000\
\000\000\000\000\000\000\090\000\000\000\000\000\102\000\000\000\
\000\000\060\000\000\000\000\000\000\000\000\000\000\000\000\000\
\135\000\000\000\134\000\000\000\000\000\000\000\133\000\185\000\
\000\000\000\000\170\000\000\000\000\000\176\000\182\000\161\000\
\173\000\000\000\000\000\000\000\000\000\180\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\093\000\000\000\
\000\000\000\000\000\000\000\000\000\000\149\000\000\000\164\000\
\000\000\174\000\165\000\000\000\162\000\000\000\014\000\000\000\
\016\000\017\000\000\000\097\000\096\000\000\000\095\000\000\000\
\000\000\000\000\001\000\131\000\166\000\000\000\000\000\181\000\
\000\000\000\000\063\000\000\000\000\000\167\000\000\000\000\000\
\175\000\015\000\062\000"
let yydgoto = "\006\000\
\023\000\024\000\031\000\094\000\043\000\206\000\045\001\059\000\
\170\000\060\000\207\000\067\000\126\001\046\001\072\000\155\000\
\156\000\157\000\050\000\095\000\082\000\192\000\178\000\011\001\
\237\001\012\001\083\000\193\000\085\000\086\000\096\000\089\001\
\166\001\084\001\036\002\085\001\179\000\180\000\061\000\068\000\
\069\000\227\000\123\001\132\000\133\000\127\001\203\001\087\000\
\088\000\044\000\045\000\046\000"
let yysindex = "\048\003\
\043\002\043\002\039\255\066\255\103\255\000\000\034\255\085\255\
\127\255\208\255\221\255\226\255\181\255\127\255\127\255\132\000\
\050\000\001\000\063\000\078\000\087\000\118\255\000\000\230\255\
\000\000\097\000\038\255\128\000\155\255\133\000\000\000\066\255\
\139\000\043\000\038\255\075\000\155\255\150\000\124\000\102\002\
\115\000\144\000\000\000\152\000\162\000\202\000\209\000\208\000\
\212\000\000\000\225\000\075\000\198\000\075\000\210\000\230\000\
\233\000\066\255\234\000\204\000\237\000\250\000\239\000\254\000\
\016\001\007\001\240\000\020\001\024\001\010\001\025\001\062\255\
\043\255\052\001\062\001\209\255\025\002\058\001\165\001\063\001\
\102\002\051\001\000\000\061\001\064\001\000\000\000\000\053\001\
\155\255\007\000\066\255\068\001\000\000\008\001\077\001\000\000\
\093\001\066\255\066\255\069\001\127\255\128\000\078\001\000\000\
\096\001\066\255\066\255\066\255\066\255\091\001\114\001\114\001\
\115\001\103\255\127\255\117\001\066\255\127\255\105\001\043\002\
\119\001\025\002\000\000\000\000\222\001\107\001\043\002\181\255\
\122\001\043\002\124\255\156\001\000\000\254\000\250\255\209\001\
\132\000\160\001\037\000\167\001\085\255\075\002\127\255\043\002\
\127\255\127\255\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\100\001\209\255\166\001\025\002\164\001\178\001\
\180\001\000\000\000\000\173\001\038\255\075\000\155\255\176\001\
\000\000\017\000\181\001\187\001\186\001\000\000\192\001\038\255\
\128\000\204\001\203\001\000\000\155\255\195\001\151\255\066\255\
\066\255\000\000\200\001\210\001\228\001\066\255\230\001\218\001\
\220\001\226\001\066\255\241\001\225\001\000\000\099\000\102\002\
\000\000\000\000\000\000\000\000\000\000\253\001\000\000\002\002\
\030\001\000\000\043\002\000\000\043\002\048\255\066\255\066\255\
\000\000\000\000\229\001\000\000\252\001\255\001\091\000\013\002\
\016\002\018\002\231\000\209\001\026\002\054\002\000\000\000\000\
\056\002\061\002\062\002\067\002\045\002\101\001\000\000\082\002\
\053\002\057\002\079\002\085\002\209\001\099\002\000\000\107\002\
\108\002\043\002\000\000\209\255\131\255\091\002\066\255\066\255\
\008\002\104\002\128\000\090\002\051\255\025\002\000\000\000\000\
\000\000\126\002\130\002\129\002\135\002\118\002\000\000\155\255\
\000\000\140\002\000\000\022\002\000\000\137\002\000\000\000\000\
\238\255\000\000\066\255\128\000\038\255\029\002\066\255\142\002\
\085\255\043\002\000\000\000\000\043\002\102\002\050\002\043\002\
\150\002\155\002\155\000\153\002\154\002\114\002\153\002\152\002\
\156\002\108\001\000\000\105\255\157\002\209\001\216\001\066\255\
\156\255\209\001\000\000\153\002\209\001\209\001\209\001\209\001\
\000\000\043\002\043\002\159\002\066\255\098\002\128\002\095\002\
\131\002\000\000\000\000\000\000\158\002\170\001\006\002\160\002\
\161\002\025\002\145\002\149\002\151\002\066\255\066\255\025\002\
\000\000\000\000\066\255\186\001\133\002\128\000\000\000\000\000\
\000\000\000\000\066\255\066\255\102\002\000\000\162\002\066\255\
\171\002\163\002\000\000\000\000\000\000\066\255\000\000\254\000\
\254\000\172\002\134\002\164\002\066\255\136\002\168\002\109\001\
\074\000\175\002\180\002\000\000\000\000\100\255\181\002\127\255\
\173\002\174\002\116\000\176\002\184\002\185\002\186\002\177\002\
\182\002\015\000\189\002\190\002\192\002\214\001\127\255\184\002\
\193\002\132\000\000\000\000\000\079\000\079\000\000\000\000\000\
\001\000\081\002\066\255\043\002\043\002\043\002\191\002\066\255\
\155\255\025\002\088\000\038\255\025\002\000\000\128\000\055\002\
\194\002\161\002\195\002\000\000\182\255\000\000\102\002\102\002\
\066\255\078\002\000\000\075\000\102\002\000\000\000\000\075\000\
\156\002\196\002\102\002\066\255\075\000\108\001\000\000\132\000\
\000\000\209\001\000\000\199\002\198\002\201\002\127\255\184\002\
\202\002\209\001\203\002\000\000\000\000\204\002\184\002\127\255\
\184\002\000\000\000\000\000\000\000\000\205\002\206\002\000\000\
\166\002\000\000\209\001\096\002\000\000\143\002\000\000\000\000\
\219\001\207\002\161\002\000\000\197\002\161\002\000\000\025\002\
\191\002\000\000\210\002\102\002\179\002\102\002\066\255\187\002\
\000\000\066\255\000\000\102\002\208\002\211\002\000\000\000\000\
\184\002\215\002\000\000\214\002\218\002\000\000\000\000\000\000\
\000\000\219\002\220\002\221\002\184\002\000\000\043\002\105\002\
\043\002\043\002\222\002\191\002\025\002\088\000\000\000\186\001\
\066\255\102\002\043\002\102\002\156\002\000\000\223\002\000\000\
\213\002\000\000\000\000\184\002\000\000\224\002\000\000\066\255\
\000\000\000\000\025\002\000\000\000\000\161\002\000\000\120\000\
\183\002\102\002\000\000\000\000\000\000\184\002\227\002\000\000\
\100\002\161\002\000\000\186\001\228\002\000\000\043\002\229\002\
\000\000\000\000\000\000"
let yyrindex = "\000\000\
\209\002\213\003\000\000\000\000\000\000\000\000\000\000\226\002\
\000\000\000\000\000\000\000\000\226\002\000\000\000\000\000\000\
\000\000\182\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\212\002\000\000\000\000\000\000\231\002\
\000\000\175\000\000\000\147\255\000\000\000\000\000\000\236\003\
\011\000\000\000\000\000\012\000\000\000\237\003\000\000\000\000\
\000\000\000\000\160\000\216\002\000\000\216\002\000\000\000\000\
\000\000\000\000\000\000\000\000\217\002\000\000\000\000\235\002\
\000\000\000\000\000\000\018\255\090\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\231\002\000\000\
\225\002\000\000\000\000\000\000\000\000\000\000\000\000\239\003\
\237\002\229\255\000\000\000\000\000\000\168\000\000\000\000\000\
\000\000\231\002\238\002\000\000\000\000\212\002\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\239\002\000\000\000\000\000\000\000\000\009\000\
\000\000\000\000\000\000\000\000\000\000\000\000\009\000\226\002\
\000\000\009\000\000\000\000\000\000\000\235\002\116\001\000\000\
\000\000\000\000\000\000\000\000\226\002\000\000\239\002\009\000\
\239\002\239\002\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\030\255\178\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\147\255\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\212\002\242\002\000\000\000\000\237\002\000\000\174\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\232\002\000\000\000\000\067\000\249\000\000\000\201\000\019\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\240\002\
\000\000\000\000\009\000\000\000\009\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\230\002\000\000\
\000\000\000\000\000\000\000\000\000\000\140\000\000\000\000\000\
\048\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\009\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\212\002\000\000\022\000\000\000\000\000\000\000\
\000\000\041\255\067\255\000\000\233\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\212\002\000\000\000\000\231\002\000\000\
\000\000\009\000\000\000\000\000\009\000\088\255\000\000\009\000\
\245\002\248\002\000\000\230\002\072\000\000\000\234\002\000\000\
\116\001\235\002\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\141\001\000\000\000\000\000\000\000\000\
\000\000\009\000\009\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\082\000\000\000\000\000\113\000\127\000\000\000\231\002\000\000\
\000\000\000\000\238\002\000\000\060\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\039\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\250\002\000\000\000\000\000\000\251\002\000\000\239\002\
\000\000\000\000\000\000\000\000\252\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\239\002\252\002\
\000\000\000\000\000\000\000\000\116\255\201\255\000\000\000\000\
\182\001\000\000\000\000\009\000\200\002\009\000\022\000\000\000\
\000\000\000\000\030\000\000\000\000\000\000\000\212\002\028\000\
\000\000\109\000\000\000\000\000\000\000\000\000\080\001\111\001\
\000\000\159\001\000\000\216\002\145\255\000\000\000\000\185\001\
\031\255\000\000\204\255\000\000\253\002\235\002\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\252\002\
\000\000\000\000\000\000\000\000\000\000\000\000\252\002\239\002\
\252\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\176\000\000\000\000\000\226\000\000\000\000\000\
\022\000\000\000\000\000\169\255\176\255\195\000\000\000\000\000\
\000\000\000\000\000\000\224\000\000\000\000\000\000\000\000\000\
\252\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\252\002\000\000\009\000\000\000\
\009\000\009\000\034\000\022\000\000\000\030\000\000\000\000\000\
\000\000\136\001\009\000\103\001\055\001\000\000\000\000\000\000\
\000\000\000\000\000\000\252\002\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\229\000\000\000\236\002\
\000\000\224\255\000\000\000\000\000\000\252\002\000\000\000\000\
\000\000\057\001\000\000\000\000\000\000\000\000\009\000\000\000\
\000\000\000\000\000\000"
let yygindex = "\000\000\
\000\000\254\255\000\000\253\255\143\003\129\255\225\255\023\000\
\137\255\130\003\249\255\132\255\135\255\184\255\106\002\009\003\
\000\000\106\003\241\002\183\255\231\255\243\002\233\255\025\254\
\000\000\199\254\184\003\000\000\158\255\170\002\080\003\133\254\
\122\254\103\002\000\000\000\000\085\003\000\003\000\000\000\000\
\049\255\234\254\032\000\133\003\173\255\236\254\178\254\092\003\
\000\000\000\000\071\001\000\000"
let yytablesize = 1049
let yytable = "\025\000\
\040\000\052\000\214\000\194\000\102\000\092\000\062\000\063\000\
\020\000\100\000\194\000\197\000\239\000\103\000\238\000\246\000\
\007\001\248\000\249\000\051\001\119\000\104\000\121\000\081\000\
\188\000\189\000\120\001\094\000\228\001\108\000\049\000\081\000\
\130\000\106\000\172\001\224\001\029\000\145\001\253\000\079\000\
\040\002\061\000\130\000\061\000\143\000\047\000\033\000\026\000\
\130\000\034\000\124\000\130\000\087\001\006\001\125\000\035\000\
\027\000\088\001\036\000\028\000\124\000\080\000\069\000\032\000\
\130\000\215\001\045\000\032\000\142\000\064\000\033\000\037\000\
\029\000\034\000\033\000\144\000\056\002\034\000\069\000\035\000\
\037\001\086\000\036\000\035\000\130\000\061\000\036\000\183\000\
\123\000\141\000\126\000\038\000\030\000\191\000\130\000\037\000\
\048\000\029\000\122\001\037\000\194\001\039\000\197\000\198\000\
\199\000\200\000\052\001\124\001\103\000\015\002\210\000\064\000\
\108\000\209\000\041\000\038\000\151\000\212\000\151\000\038\000\
\123\000\253\001\126\000\070\001\217\000\039\000\101\000\220\000\
\002\002\039\000\004\002\221\000\126\000\076\001\003\001\039\002\
\006\001\081\001\051\000\002\001\222\000\247\000\090\001\004\001\
\037\002\122\000\061\001\062\001\063\001\064\001\014\001\061\001\
\062\001\063\001\064\001\042\000\089\000\143\001\034\000\151\000\
\083\001\081\000\242\001\244\000\151\000\151\000\090\000\144\001\
\191\001\034\000\023\002\068\000\081\000\248\001\047\000\087\000\
\107\000\122\000\067\000\091\000\020\001\255\001\030\002\032\000\
\076\000\194\000\025\001\068\000\108\000\109\000\033\000\030\001\
\048\000\034\000\067\000\147\001\148\001\149\001\150\001\035\000\
\059\000\152\000\036\000\152\000\143\000\047\002\143\000\143\000\
\035\001\147\000\036\001\038\001\039\001\105\001\041\002\037\000\
\056\000\057\000\163\001\053\000\148\000\149\000\150\000\053\002\
\170\001\107\000\066\000\115\001\100\000\109\000\118\001\109\000\
\054\000\151\000\058\000\038\000\143\000\055\000\122\001\235\001\
\122\001\152\000\066\000\146\001\152\000\039\000\143\000\074\001\
\056\000\152\000\152\000\078\001\079\001\109\000\044\002\153\000\
\196\001\100\001\225\000\103\001\154\000\077\000\106\000\107\000\
\181\000\071\000\226\000\194\000\197\000\169\001\182\000\214\001\
\208\001\171\001\058\000\108\000\109\000\217\001\006\001\101\001\
\104\000\081\000\209\001\104\000\182\001\183\001\094\000\108\001\
\108\000\094\000\109\001\191\000\106\000\111\001\051\000\106\000\
\020\000\104\000\227\001\104\000\098\000\230\001\099\000\094\000\
\241\000\094\000\242\000\108\000\142\001\106\000\104\000\106\000\
\105\000\194\000\194\000\197\000\094\000\070\000\108\000\151\001\
\152\001\154\001\106\000\045\000\083\001\045\000\045\000\045\000\
\045\000\136\000\073\000\134\000\020\000\045\000\101\000\054\000\
\003\002\221\000\168\001\136\000\086\000\045\000\045\000\136\000\
\045\000\074\000\222\000\045\000\045\000\006\001\043\001\175\001\
\176\001\008\002\075\000\045\000\178\001\086\000\044\001\086\000\
\045\000\045\000\181\001\165\001\078\000\045\000\050\000\103\000\
\014\002\187\001\086\000\108\000\136\000\136\000\108\000\199\001\
\092\001\045\000\051\002\106\000\107\000\061\001\062\001\200\001\
\103\000\101\000\103\000\105\000\101\000\064\000\108\000\052\000\
\108\000\226\001\229\001\084\000\156\000\103\000\156\000\065\000\
\093\000\108\000\101\000\066\000\240\001\038\002\097\000\220\001\
\241\001\221\001\222\001\223\001\225\001\245\001\053\000\101\000\
\081\000\104\000\038\000\038\000\038\000\110\000\051\000\038\000\
\114\001\236\001\081\000\050\002\081\000\238\001\113\000\047\000\
\113\000\047\000\087\000\047\000\047\000\202\000\203\000\156\000\
\244\001\047\000\156\000\156\000\156\000\156\000\111\000\252\001\
\038\000\047\000\047\000\087\000\047\000\087\000\112\000\047\000\
\047\000\059\000\114\000\059\000\059\000\059\000\059\000\047\000\
\087\000\113\000\115\000\059\000\047\000\047\000\073\000\116\000\
\073\000\047\000\117\000\059\000\059\000\246\001\059\000\247\001\
\145\000\118\000\145\000\145\000\107\000\047\000\120\000\100\000\
\050\001\059\000\122\000\018\002\127\000\128\000\020\002\059\000\
\126\000\123\000\065\000\059\000\124\000\107\000\066\000\107\000\
\100\000\056\000\100\000\056\000\056\000\056\000\056\000\059\000\
\145\000\129\000\107\000\056\000\031\002\100\000\033\002\034\002\
\185\000\131\000\145\000\056\000\056\000\042\002\056\000\130\000\
\043\002\134\000\135\000\058\000\137\000\058\000\058\000\058\000\
\058\000\056\000\136\000\138\000\049\002\058\000\056\000\056\000\
\106\000\107\000\139\000\056\000\140\000\058\000\058\000\051\000\
\058\000\051\000\051\000\051\000\051\000\108\000\109\000\056\000\
\160\000\051\000\160\000\058\000\058\002\145\000\106\000\107\000\
\130\000\051\000\051\000\105\000\051\000\058\000\034\001\146\000\
\173\000\171\000\130\000\108\000\109\000\174\000\130\000\051\000\
\130\000\058\000\177\000\130\000\105\000\175\000\105\000\186\000\
\054\000\051\000\054\000\054\000\054\000\054\000\176\000\190\000\
\130\000\105\000\054\000\160\000\184\000\051\000\160\000\160\000\
\160\000\160\000\054\000\054\000\187\000\054\000\195\000\144\000\
\201\000\144\000\144\000\196\000\130\000\064\000\190\001\050\000\
\054\000\050\000\050\000\050\000\050\000\130\000\130\000\121\001\
\065\000\050\000\054\000\066\000\066\000\041\000\204\000\130\000\
\208\000\050\000\050\000\130\000\050\000\219\000\054\000\144\000\
\052\000\211\000\052\000\052\000\052\000\052\000\034\000\050\000\
\060\001\144\000\052\000\061\001\062\001\063\001\064\001\213\000\
\034\000\050\000\052\000\052\000\034\000\052\000\223\000\053\000\
\216\000\053\000\053\000\053\000\053\000\050\000\032\000\250\000\
\052\000\053\000\160\001\240\000\252\000\033\000\243\000\254\000\
\034\000\053\000\052\000\255\000\053\000\000\001\035\000\128\000\
\001\001\036\000\034\000\005\001\080\000\009\001\052\000\053\000\
\008\001\128\000\106\000\107\000\034\000\010\001\037\000\128\000\
\034\000\053\000\128\000\013\001\016\001\017\001\019\001\108\000\
\109\000\128\000\228\000\022\001\023\001\053\000\229\000\128\000\
\213\001\128\001\038\000\129\001\230\000\011\002\231\000\232\000\
\130\001\131\001\132\001\133\001\039\000\034\000\034\000\134\001\
\024\001\135\001\026\001\128\000\136\001\027\001\106\000\107\000\
\137\001\028\001\031\001\106\000\107\000\128\000\106\000\107\000\
\029\001\138\001\107\000\108\000\109\000\139\001\140\001\032\001\
\108\000\109\000\033\001\108\000\109\000\040\001\161\001\041\001\
\233\000\158\000\042\001\234\000\235\000\236\000\237\000\047\001\
\159\000\160\000\161\000\162\000\215\000\163\000\141\001\164\000\
\080\001\165\000\158\000\048\001\166\000\049\001\106\000\107\000\
\098\001\159\000\160\000\161\000\162\000\053\001\163\000\104\001\
\164\000\167\000\165\000\108\000\109\000\166\000\106\000\107\000\
\001\000\002\000\003\000\004\000\005\000\106\000\107\000\054\001\
\059\001\055\001\167\000\108\000\109\000\168\000\056\001\057\001\
\169\000\232\001\108\000\109\000\058\001\007\000\008\000\009\000\
\010\000\011\000\106\000\107\000\012\000\013\000\168\000\106\000\
\107\000\169\000\014\000\015\000\016\000\066\001\065\001\108\000\
\109\000\067\001\068\001\069\001\108\000\109\000\017\000\018\000\
\019\000\239\001\077\001\106\000\107\000\071\001\106\000\107\000\
\110\001\106\000\107\000\020\000\021\000\072\001\073\001\022\000\
\108\000\109\000\086\001\108\000\109\000\245\000\108\000\109\000\
\106\000\107\000\082\001\219\001\106\000\107\000\106\000\107\000\
\009\002\091\001\092\001\093\001\055\002\108\000\109\000\094\001\
\095\001\108\000\109\000\108\000\109\000\155\001\097\001\099\001\
\061\001\062\001\063\001\064\001\032\002\106\001\112\001\061\001\
\062\001\063\001\064\001\113\001\043\001\116\001\117\001\119\001\
\156\001\157\001\125\001\158\001\225\000\153\001\006\001\164\001\
\165\001\159\001\173\001\162\001\167\001\179\001\180\001\192\001\
\184\001\186\001\185\001\189\001\177\001\188\001\193\001\195\001\
\202\001\198\001\197\001\204\001\205\001\201\001\206\001\210\001\
\211\001\207\001\212\001\216\001\233\001\088\001\007\002\234\001\
\243\001\249\001\250\001\251\001\254\001\000\002\001\002\005\002\
\006\002\012\002\010\002\016\002\020\000\022\002\021\002\013\002\
\017\002\024\002\025\002\019\002\026\002\027\002\028\002\029\002\
\046\002\045\002\048\002\052\002\035\002\054\002\057\002\059\002\
\040\000\083\000\079\000\205\000\200\000\137\000\192\000\117\000\
\020\000\036\000\083\000\071\000\115\000\031\000\042\000\139\000\
\034\000\118\000\141\000\074\000\132\000\184\000\183\000\076\000\
\205\000\218\000\218\001\034\000\075\001\251\000\172\000\174\001\
\021\001\018\001\224\000\020\000\015\001\231\001\102\001\096\001\
\000\000\107\001\034\000\000\000\000\000\000\000\000\000\034\000\
\064\000"
let yycheck = "\002\000\
\004\000\009\000\122\000\102\000\036\000\029\000\014\000\015\000\
\000\000\035\000\000\000\000\000\137\000\037\000\136\000\143\000\
\000\000\145\000\146\000\227\000\052\000\000\000\054\000\027\000\
\098\000\099\000\049\001\000\000\163\001\000\000\008\000\035\000\
\002\001\000\000\092\001\159\001\007\001\058\001\158\000\002\001\
\016\002\001\001\012\001\003\001\002\001\012\001\009\001\009\001\
\018\001\012\001\033\001\021\001\002\001\006\001\058\000\018\001\
\018\001\007\001\021\001\021\001\043\001\024\001\003\001\002\001\
\034\001\144\001\000\000\002\001\072\000\003\001\009\001\034\001\
\034\001\012\001\009\001\033\001\052\002\012\001\019\001\018\001\
\033\001\000\000\021\001\018\001\054\001\045\001\021\001\091\000\
\001\001\028\001\001\001\054\001\054\001\101\000\064\001\034\001\
\012\001\068\001\050\001\034\001\001\001\064\001\106\000\107\000\
\108\000\109\000\228\000\003\001\000\000\233\001\118\000\045\001\
\000\000\117\000\012\001\054\001\001\001\120\000\003\001\054\001\
\033\001\200\001\033\001\245\000\127\000\064\001\000\000\130\000\
\207\001\064\001\209\001\008\001\043\001\003\001\166\000\014\002\
\006\001\001\001\012\001\165\000\017\001\144\000\006\001\167\000\
\012\002\001\001\047\001\048\001\049\001\050\001\176\000\047\001\
\048\001\049\001\050\001\053\001\002\001\002\001\012\001\044\001\
\003\001\165\000\185\001\141\000\049\001\050\001\012\001\012\001\
\120\001\023\001\249\001\003\001\176\000\194\001\000\000\000\000\
\026\001\033\001\003\001\025\001\184\000\202\001\005\002\002\001\
\067\001\028\001\190\000\019\001\038\001\039\001\009\001\195\000\
\012\001\012\001\019\001\061\001\062\001\063\001\064\001\018\001\
\000\000\001\001\021\001\003\001\001\001\028\002\003\001\004\001\
\211\000\001\001\213\000\215\000\216\000\031\001\016\002\034\001\
\036\001\037\001\082\001\012\001\012\001\013\001\014\001\046\002\
\088\001\000\000\003\001\044\001\000\000\001\001\047\001\003\001\
\012\001\025\001\054\001\054\001\033\001\012\001\190\001\058\001\
\192\001\033\001\019\001\060\001\044\001\064\001\043\001\250\000\
\000\000\049\001\050\001\255\000\000\001\025\001\021\002\047\001\
\128\001\020\001\009\001\029\001\052\001\032\001\025\001\026\001\
\002\001\009\001\017\001\001\001\001\001\087\001\008\001\143\001\
\002\001\091\001\000\000\038\001\039\001\146\001\006\001\027\001\
\003\001\029\001\012\001\006\001\112\001\113\001\003\001\034\001\
\003\001\006\001\037\001\043\001\003\001\040\001\000\000\006\001\
\032\001\020\001\162\001\022\001\002\001\165\001\004\001\020\001\
\012\001\022\001\014\001\022\001\056\001\020\001\033\001\022\001\
\000\000\047\001\048\001\048\001\033\001\012\001\033\001\066\001\
\067\001\069\001\033\001\001\001\167\001\003\001\004\001\005\001\
\006\001\002\001\012\001\002\001\068\001\011\001\004\001\000\000\
\208\001\008\001\086\001\012\001\003\001\019\001\020\001\016\001\
\022\001\012\001\017\001\025\001\026\001\006\001\004\001\099\001\
\100\001\219\001\012\001\033\001\104\001\020\001\012\001\022\001\
\038\001\039\001\110\001\020\001\012\001\043\001\000\000\003\001\
\232\001\117\001\033\001\003\001\045\001\046\001\006\001\004\001\
\001\001\055\001\003\001\025\001\026\001\047\001\048\001\012\001\
\020\001\003\001\022\001\008\001\006\001\002\001\022\001\000\000\
\038\001\161\001\164\001\012\001\001\001\033\001\003\001\012\001\
\012\001\033\001\020\001\016\001\180\001\013\002\012\001\155\001\
\184\001\156\001\157\001\158\001\160\001\189\001\000\000\033\001\
\164\001\012\001\003\001\004\001\005\001\051\001\012\001\008\001\
\014\001\173\001\003\001\035\002\005\001\177\001\001\001\001\001\
\003\001\003\001\003\001\005\001\006\001\111\000\112\000\044\001\
\188\001\011\001\047\001\048\001\049\001\050\001\047\001\199\001\
\033\001\019\001\020\001\020\001\022\001\022\001\047\001\025\001\
\026\001\001\001\001\001\003\001\004\001\005\001\006\001\033\001\
\033\001\048\001\002\001\011\001\038\001\039\001\020\001\008\001\
\022\001\043\001\007\001\019\001\020\001\190\001\022\001\192\001\
\001\001\001\001\003\001\004\001\003\001\055\001\033\001\003\001\
\002\001\033\001\025\001\239\001\033\001\001\001\242\001\039\001\
\007\001\012\001\012\001\043\001\012\001\020\001\016\001\022\001\
\020\001\001\001\022\001\003\001\004\001\005\001\006\001\055\001\
\033\001\008\001\033\001\011\001\007\002\033\001\009\002\010\002\
\001\001\012\001\043\001\019\001\020\001\017\002\022\001\033\001\
\019\002\002\001\012\001\001\001\001\001\003\001\004\001\005\001\
\006\001\033\001\043\001\004\001\032\002\011\001\038\001\039\001\
\025\001\026\001\025\001\043\001\012\001\019\001\020\001\001\001\
\022\001\003\001\004\001\005\001\006\001\038\001\039\001\055\001\
\001\001\011\001\003\001\033\001\055\002\002\001\025\001\026\001\
\002\001\019\001\020\001\003\001\022\001\043\001\033\001\002\001\
\002\001\008\001\012\001\038\001\039\001\019\001\016\001\033\001\
\018\001\055\001\022\001\021\001\020\001\017\001\022\001\003\001\
\001\001\043\001\003\001\004\001\005\001\006\001\023\001\019\001\
\034\001\033\001\011\001\044\001\025\001\055\001\047\001\048\001\
\049\001\050\001\019\001\020\001\008\001\022\001\025\001\001\001\
\014\001\003\001\004\001\012\001\054\001\002\001\002\001\001\001\
\033\001\003\001\004\001\005\001\006\001\002\001\064\001\012\001\
\012\001\011\001\043\001\016\001\016\001\012\001\012\001\012\001\
\012\001\019\001\020\001\016\001\022\001\012\001\055\001\033\001\
\001\001\033\001\003\001\004\001\005\001\006\001\002\001\033\001\
\044\001\043\001\011\001\047\001\048\001\049\001\050\001\033\001\
\012\001\043\001\019\001\020\001\016\001\022\001\003\001\001\001\
\054\001\003\001\004\001\005\001\006\001\055\001\002\001\068\001\
\033\001\011\001\001\001\012\001\007\001\009\001\008\001\012\001\
\012\001\019\001\043\001\002\001\022\001\002\001\018\001\002\001\
\012\001\021\001\002\001\012\001\024\001\003\001\055\001\033\001\
\012\001\012\001\025\001\026\001\012\001\012\001\034\001\018\001\
\016\001\043\001\021\001\012\001\001\001\003\001\012\001\038\001\
\039\001\028\001\002\001\012\001\003\001\055\001\006\001\034\001\
\003\001\002\001\054\001\004\001\012\001\003\001\014\001\015\001\
\009\001\010\001\011\001\012\001\064\001\045\001\046\001\016\001\
\005\001\018\001\005\001\054\001\021\001\020\001\025\001\026\001\
\025\001\022\001\002\001\025\001\026\001\064\001\025\001\026\001\
\023\001\034\001\026\001\038\001\039\001\038\001\039\001\003\001\
\038\001\039\001\001\001\038\001\039\001\033\001\001\001\012\001\
\056\001\002\001\012\001\059\001\060\001\061\001\062\001\003\001\
\009\001\010\001\011\001\012\001\055\001\014\001\063\001\016\001\
\017\001\018\001\002\001\012\001\021\001\012\001\025\001\026\001\
\011\001\009\001\010\001\011\001\012\001\012\001\014\001\011\001\
\016\001\034\001\018\001\038\001\039\001\021\001\025\001\026\001\
\001\000\002\000\003\000\004\000\005\000\025\001\026\001\002\001\
\012\001\002\001\034\001\038\001\039\001\054\001\002\001\002\001\
\057\001\011\001\038\001\039\001\002\001\027\001\028\001\029\001\
\030\001\031\001\025\001\026\001\034\001\035\001\054\001\025\001\
\026\001\057\001\040\001\041\001\042\001\033\001\005\001\038\001\
\039\001\033\001\012\001\007\001\038\001\039\001\052\001\053\001\
\054\001\020\001\008\001\025\001\026\001\003\001\025\001\026\001\
\055\001\025\001\026\001\065\001\066\001\003\001\003\001\069\001\
\038\001\039\001\025\001\038\001\039\001\043\001\038\001\039\001\
\025\001\026\001\019\001\043\001\025\001\026\001\025\001\026\001\
\033\001\004\001\001\001\003\001\033\001\038\001\039\001\001\001\
\019\001\038\001\039\001\038\001\039\001\044\001\003\001\007\001\
\047\001\048\001\049\001\050\001\044\001\008\001\001\001\047\001\
\048\001\049\001\050\001\001\001\004\001\004\001\045\001\008\001\
\033\001\067\001\006\001\033\001\009\001\007\001\006\001\023\001\
\020\001\012\001\038\001\012\001\022\001\003\001\012\001\001\001\
\005\001\014\001\045\001\012\001\019\001\046\001\003\001\003\001\
\001\001\012\001\014\001\003\001\003\001\014\001\014\001\003\001\
\003\001\012\001\003\001\003\001\003\001\007\001\033\001\005\001\
\005\001\003\001\005\001\003\001\003\001\003\001\003\001\003\001\
\003\001\003\001\068\001\002\001\000\000\003\001\007\001\019\001\
\038\001\003\001\005\001\033\001\003\001\003\001\003\001\003\001\
\012\001\003\001\003\001\045\001\007\001\003\001\003\001\003\001\
\007\001\003\001\023\001\000\000\000\000\003\001\000\000\003\001\
\032\001\003\001\005\001\019\001\003\001\068\001\007\001\003\001\
\033\001\033\001\003\001\020\001\003\001\003\001\003\001\023\001\
\114\000\128\000\153\001\007\001\252\000\156\000\079\000\094\001\
\185\000\181\000\134\000\068\001\177\000\167\001\028\001\016\001\
\255\255\033\001\045\001\255\255\255\255\255\255\255\255\046\001\
\045\001"
let yynames_const = "\
COMMA\000\
LPAREN\000\
RPAREN\000\
LBRACKET\000\
RBRACKET\000\
BAR\000\
SEMI\000\
COLON\000\
NEW\000\
OUT\000\
IN\000\
REPL\000\
LEQ\000\
IF\000\
THEN\000\
ELSE\000\
FIND\000\
ORFIND\000\
SUCHTHAT\000\
DEFINED\000\
EQUAL\000\
DIFF\000\
FUN\000\
FORALL\000\
PARAM\000\
PROBA\000\
TYPE\000\
PROCESS\000\
DOT\000\
EOF\000\
LET\000\
QUERY\000\
SECRET\000\
SECRET1\000\
AND\000\
OR\000\
CONST\000\
CHANNEL\000\
EQUIV\000\
EQUIVLEFT\000\
EQUIVRIGHT\000\
MAPSTO\000\
DEF\000\
MUL\000\
DIV\000\
ADD\000\
SUB\000\
POWER\000\
SET\000\
COLLISION\000\
EVENT\000\
IMPLIES\000\
TIME\000\
YIELD\000\
OTHERUSES\000\
MAXLENGTH\000\
LENGTH\000\
MAX\000\
COUNT\000\
NEWCHANNEL\000\
INJ\000\
DEFINE\000\
EXPAND\000\
LBRACE\000\
RBRACE\000\
PROOF\000\
"
let yynames_block = "\
IDENT\000\
STRING\000\
INT\000\
FLOAT\000\
"
let yyact = [|
(fun _ -> failwith "parser")
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 8 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 6 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _8 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _10 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 165 "parser.mly"
( (FunDecl(_2, _4, _7, _8)) :: _10 )
# 866 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 167 "parser.mly"
( (EventDecl(_2, [])) :: _4 )
# 874 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 169 "parser.mly"
( (EventDecl(_2, _4)) :: _7 )
# 883 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'vartypelist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 171 "parser.mly"
( (Statement(_2, _4)) :: _6 )
# 892 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'process) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 173 "parser.mly"
( (PDef(_2,_4)) :: _6 )
# 901 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 175 "parser.mly"
( (Setting(_2,S _4)) :: _6 )
# 910 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 177 "parser.mly"
( (Setting(_2,I _4)) :: _6 )
# 919 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'queryseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 179 "parser.mly"
( (Query(_2)) :: _4 )
# 927 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'neidentlist) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 181 "parser.mly"
( (List.map (fun x -> (ParamDecl(x, _3))) _2) @ _5 )
# 936 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 183 "parser.mly"
( (ProbabilityDecl(_2)) :: _4 )
# 944 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'neidentlist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 185 "parser.mly"
( (List.map (fun x -> (ConstDecl(x,_4))) _2) @ _6 )
# 953 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'neidentlist) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 187 "parser.mly"
( (List.map (fun x -> (ChannelDecl(x))) _2) @ _4 )
# 961 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 189 "parser.mly"
( (TypeDecl(_2,_3)) :: _5 )
# 970 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : 'eqmember) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'probaf) in
let _6 = (Parsing.peek_val __caml_parser_env 3 : 'optpriority) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : 'eqmember) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 191 "parser.mly"
( (EqStatement(_2, _7, _4, _6)) :: _9 )
# 981 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 10 : 'newlist) in
let _4 = (Parsing.peek_val __caml_parser_env 8 : 'vartypelist) in
let _6 = (Parsing.peek_val __caml_parser_env 6 : Ptree.term_e) in
let _8 = (Parsing.peek_val __caml_parser_env 4 : 'probaf) in
let _10 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _12 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 193 "parser.mly"
( (Collision(_2, _4, _6, _8, _10)) :: _12 )
# 993 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : 'newlist) in
let _3 = (Parsing.peek_val __caml_parser_env 6 : Ptree.term_e) in
let _5 = (Parsing.peek_val __caml_parser_env 4 : 'probaf) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 195 "parser.mly"
( (Collision(_2, [], _3, _5, _7)) :: _9 )
# 1004 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Ptree.decl list) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 197 "parser.mly"
( (Define(_2, _4, _7)) :: _9 )
# 1014 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 199 "parser.mly"
( (Expand(_2, _4)) :: _7 )
# 1023 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'proof) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 201 "parser.mly"
( (Proofinfo(_3))::_5 )
# 1031 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
Obj.repr(
# 203 "parser.mly"
( [] )
# 1037 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 208 "parser.mly"
( _1 )
# 1044 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 210 "parser.mly"
( _1 )
# 1051 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 212 "parser.mly"
( string_of_int _1, parse_extent() )
# 1058 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 214 "parser.mly"
( "*", parse_extent() )
# 1064 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 216 "parser.mly"
( ".", parse_extent() )
# 1070 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 218 "parser.mly"
( "set", parse_extent() )
# 1076 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 220 "parser.mly"
( "=", parse_extent() )
# 1082 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 222 "parser.mly"
( ",", parse_extent() )
# 1088 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'prooftoken) in
Obj.repr(
# 226 "parser.mly"
( [_1] )
# 1095 "parser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'prooftoken) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 228 "parser.mly"
( _1 :: _2 )
# 1103 "parser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 232 "parser.mly"
( [_1] )
# 1110 "parser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'proofcommand) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'proof) in
Obj.repr(
# 234 "parser.mly"
( _1 :: _3 )
# 1118 "parser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'neidentlist) in
Obj.repr(
# 238 "parser.mly"
( _2 )
# 1125 "parser.ml"
: 'options))
; (fun __caml_parser_env ->
Obj.repr(
# 240 "parser.mly"
( [] )
# 1131 "parser.ml"
: 'options))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.decl list) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
Obj.repr(
# 244 "parser.mly"
( _1 , _3 )
# 1139 "parser.ml"
: Ptree.decl list * Ptree.process_e))
; (fun __caml_parser_env ->
Obj.repr(
# 248 "parser.mly"
( [] )
# 1145 "parser.ml"
: 'identlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'neidentlist) in
Obj.repr(
# 250 "parser.mly"
( _1 )
# 1152 "parser.ml"
: 'identlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 254 "parser.mly"
( [_1] )
# 1159 "parser.ml"
: 'neidentlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'neidentlist) in
Obj.repr(
# 256 "parser.mly"
( _1 :: _3 )
# 1167 "parser.ml"
: 'neidentlist))
; (fun __caml_parser_env ->
Obj.repr(
# 260 "parser.mly"
( [] )
# 1173 "parser.ml"
: 'vartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypelist) in
Obj.repr(
# 262 "parser.mly"
( _1 )
# 1180 "parser.ml"
: 'vartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 266 "parser.mly"
( [(_1, _3)] )
# 1188 "parser.ml"
: 'nevartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypelist) in
Obj.repr(
# 268 "parser.mly"
( (_1, _3) :: _5 )
# 1197 "parser.ml"
: 'nevartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 272 "parser.mly"
( PFunApp (_1, _3), parse_extent() )
# 1205 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 274 "parser.mly"
( PInjEvent(_3, []), parse_extent() )
# 1212 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 276 "parser.mly"
( PInjEvent(_3, _5), parse_extent() )
# 1220 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 278 "parser.mly"
( PIdent (_1), parse_extent() )
# 1227 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 280 "parser.mly"
( PArray (_1, _3), parse_extent() )
# 1235 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 282 "parser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PTuple(l), parse_extent() )
# 1245 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'findcond) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 287 "parser.mly"
( begin
match _2 with
([],t) -> PTestE(t, _4, _6)
| (def_list, t) ->
PFindE([([], def_list, t, _4)], _6, [])
end, parse_extent() )
# 1259 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'options) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'findlistterm) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 294 "parser.mly"
( PFindE(_3, _5, _2), parse_extent() )
# 1268 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 296 "parser.mly"
( PLetE(_2,_4,_6,Some _8), parse_extent() )
# 1278 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 298 "parser.mly"
( PLetE(_2,_4,_6,None), parse_extent() )
# 1287 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 300 "parser.mly"
( PResE(_2, _4, _6), parse_extent() )
# 1296 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 302 "parser.mly"
( PEventE(_2), parse_extent() )
# 1303 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 304 "parser.mly"
( PEqual(_1, _3), parse_extent() )
# 1311 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 306 "parser.mly"
( PDiff(_1, _3), parse_extent() )
# 1319 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 308 "parser.mly"
( POr(_1, _3), parse_extent() )
# 1327 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 310 "parser.mly"
( PAnd(_1, _3), parse_extent() )
# 1335 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 314 "parser.mly"
( _1,_3 )
# 1343 "parser.ml"
: 'vref))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 316 "parser.mly"
( _1, [] )
# 1350 "parser.ml"
: 'vref))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'vreflist) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'vref) in
Obj.repr(
# 320 "parser.mly"
( None )
# 1358 "parser.ml"
: 'otherusescond))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'vref) in
Obj.repr(
# 322 "parser.mly"
( None )
# 1365 "parser.ml"
: 'otherusescond))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'vref) in
Obj.repr(
# 326 "parser.mly"
( [_1] )
# 1372 "parser.ml"
: 'vreflist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'vref) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'vreflist) in
Obj.repr(
# 328 "parser.mly"
( _1::_3 )
# 1380 "parser.ml"
: 'vreflist))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 5 : 'vreflist) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'otherusescond) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 332 "parser.mly"
( (_3, _8) )
# 1389 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'vreflist) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'otherusescond) in
Obj.repr(
# 334 "parser.mly"
( (_3, cst_true) )
# 1397 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'vreflist) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 336 "parser.mly"
( (_3, _6) )
# 1405 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'vreflist) in
Obj.repr(
# 338 "parser.mly"
( (_3, cst_true) )
# 1412 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findcond1) in
Obj.repr(
# 342 "parser.mly"
( _1 )
# 1419 "parser.ml"
: 'findcond))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 344 "parser.mly"
( ([], _1) )
# 1426 "parser.ml"
: 'findcond))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'findcond1) in
Obj.repr(
# 346 "parser.mly"
( _2 )
# 1433 "parser.ml"
: 'findcond))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tidentseq) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'findcond) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 350 "parser.mly"
( let (def_list, t) = _3 in
(_1, def_list, t, _5) )
# 1443 "parser.ml"
: 'findoneterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findoneterm) in
Obj.repr(
# 355 "parser.mly"
( [_1] )
# 1450 "parser.ml"
: 'findlistterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'findoneterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'findlistterm) in
Obj.repr(
# 357 "parser.mly"
( _1 :: _3 )
# 1458 "parser.ml"
: 'findlistterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 361 "parser.mly"
( [_1,_3] )
# 1466 "parser.ml"
: 'netidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'netidentseq) in
Obj.repr(
# 363 "parser.mly"
( (_1,_3)::_5 )
# 1475 "parser.ml"
: 'netidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netidentseq) in
Obj.repr(
# 367 "parser.mly"
( _1 )
# 1482 "parser.ml"
: 'tidentseq))
; (fun __caml_parser_env ->
Obj.repr(
# 369 "parser.mly"
( [] )
# 1488 "parser.ml"
: 'tidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 373 "parser.mly"
( _1 :: _3 )
# 1496 "parser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 375 "parser.mly"
( [_1] )
# 1503 "parser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 379 "parser.mly"
( _1 )
# 1510 "parser.ml"
: 'termseq))
; (fun __caml_parser_env ->
Obj.repr(
# 381 "parser.mly"
( [] )
# 1516 "parser.ml"
: 'termseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
Obj.repr(
# 385 "parser.mly"
( _2 )
# 1523 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 387 "parser.mly"
( PLetDef _1, parse_extent() )
# 1530 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 389 "parser.mly"
( PRepl (new_repl_occ(),None,_2,_3), parse_extent() )
# 1538 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 391 "parser.mly"
( PRepl (new_repl_occ(),Some _2,_4,_5), parse_extent() )
# 1547 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 393 "parser.mly"
( let x = _1 in
if x = 0 then PNil, parse_extent() else
input_error ("The only integer in a process is 0 for the nil process") (parse_extent()) )
# 1556 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 397 "parser.mly"
( PRestr(_2, _4, _5), parse_extent() )
# 1565 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'findcond) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'optelse) in
Obj.repr(
# 399 "parser.mly"
( match _2 with
([], t) -> PTest(t, _4, _5), parse_extent()
| (def_list, t) ->
PFind([([], def_list, t, _4)], _5, []), parse_extent() )
# 1577 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'findlistproc) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'optelse) in
Obj.repr(
# 404 "parser.mly"
( PFind(_3,_4,_2), parse_extent() )
# 1586 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 406 "parser.mly"
( PEvent((PFunApp(_2, []), parse_extent()), _3), parse_extent() )
# 1594 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'termseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 408 "parser.mly"
( PEvent((PFunApp(_2, _4), parse_extent()), _6), parse_extent() )
# 1603 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 410 "parser.mly"
( PLet(_2,_4,(PYield, parse_extent()),(PYield, parse_extent())), parse_extent() )
# 1611 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'optelse) in
Obj.repr(
# 412 "parser.mly"
( PLet(_2,_4,_6,_7), parse_extent() )
# 1621 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : Ptree.term_e) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 414 "parser.mly"
( PInput(_3,_5,_7), parse_extent() )
# 1630 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : Ptree.term_e) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'optinputprocess) in
Obj.repr(
# 416 "parser.mly"
( POutput(_3,_5,_7), parse_extent() )
# 1639 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
Obj.repr(
# 418 "parser.mly"
( PYield, parse_extent() )
# 1645 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'process) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 420 "parser.mly"
( PPar(_1,_3), parse_extent() )
# 1653 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tidentseq) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'findcond) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 424 "parser.mly"
( let (def_list, t) = _3 in
(_1, def_list, t, _5) )
# 1663 "parser.ml"
: 'findoneproc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findoneproc) in
Obj.repr(
# 429 "parser.mly"
( [_1] )
# 1670 "parser.ml"
: 'findlistproc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'findoneproc) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'findlistproc) in
Obj.repr(
# 431 "parser.mly"
( _1 :: _3 )
# 1678 "parser.ml"
: 'findlistproc))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 435 "parser.mly"
( _2 )
# 1685 "parser.ml"
: 'optprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 437 "parser.mly"
( PYield, parse_extent() )
# 1691 "parser.ml"
: 'optprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 441 "parser.mly"
( _2 )
# 1698 "parser.ml"
: 'optinputprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 443 "parser.mly"
( PNil, parse_extent() )
# 1704 "parser.ml"
: 'optinputprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 447 "parser.mly"
( _2 )
# 1711 "parser.ml"
: 'optelse))
; (fun __caml_parser_env ->
Obj.repr(
# 449 "parser.mly"
( PYield, parse_extent() )
# 1717 "parser.ml"
: 'optelse))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 453 "parser.mly"
( PPatVar(_1,None), parse_extent() )
# 1724 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 455 "parser.mly"
( PPatVar(_1,Some _3), parse_extent() )
# 1732 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'patternseq) in
Obj.repr(
# 457 "parser.mly"
( PPatFunApp(_1,_3), parse_extent() )
# 1740 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'patternseq) in
Obj.repr(
# 459 "parser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PPatTuple(_2), parse_extent() )
# 1750 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 464 "parser.mly"
( PPatEqual(_2), parse_extent() )
# 1757 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 468 "parser.mly"
( _1 :: _3 )
# 1765 "parser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in
Obj.repr(
# 470 "parser.mly"
( [_1] )
# 1772 "parser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 474 "parser.mly"
( _1 )
# 1779 "parser.ml"
: 'patternseq))
; (fun __caml_parser_env ->
Obj.repr(
# 476 "parser.mly"
( [] )
# 1785 "parser.ml"
: 'patternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'query) in
Obj.repr(
# 480 "parser.mly"
( [_1] )
# 1792 "parser.ml"
: 'queryseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'query) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'queryseq) in
Obj.repr(
# 482 "parser.mly"
( _1::_3 )
# 1800 "parser.ml"
: 'queryseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 486 "parser.mly"
( PQSecret _2 )
# 1807 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 488 "parser.mly"
( PQSecret1 _2 )
# 1814 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : 'vartypelist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 490 "parser.mly"
( PQEvent(_1, _4, _6) )
# 1823 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 492 "parser.mly"
( PQEvent([], _2, _4) )
# 1831 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'funmode) in
Obj.repr(
# 496 "parser.mly"
( [_1], parse_extent() )
# 1838 "parser.ml"
: 'eqmember))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'funmode) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'eqmember) in
Obj.repr(
# 498 "parser.mly"
( _1 :: (fst _3), parse_extent() )
# 1846 "parser.ml"
: 'eqmember))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 503 "parser.mly"
( _1,None, parse_extent() )
# 1853 "parser.ml"
: 'funmode))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'fungroup) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
Obj.repr(
# 505 "parser.mly"
( _1,Some _3, parse_extent() )
# 1861 "parser.ml"
: 'funmode))
; (fun __caml_parser_env ->
Obj.repr(
# 509 "parser.mly"
( [] )
# 1867 "parser.ml"
: 'newlist))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'newlist) in
Obj.repr(
# 511 "parser.mly"
( (_2,_4)::_6 )
# 1876 "parser.ml"
: 'newlist))
; (fun __caml_parser_env ->
Obj.repr(
# 515 "parser.mly"
( [] )
# 1882 "parser.ml"
: 'newlistopt))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'newlistopt) in
Obj.repr(
# 517 "parser.mly"
( (_2,_4,_5)::_7 )
# 1892 "parser.ml"
: 'newlistopt))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 521 "parser.mly"
( [_1] )
# 1899 "parser.ml"
: 'funlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'fungroup) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'funlist) in
Obj.repr(
# 523 "parser.mly"
( _1 :: _3 )
# 1907 "parser.ml"
: 'funlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'options) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : int) in
Obj.repr(
# 527 "parser.mly"
( _3, _1 )
# 1915 "parser.ml"
: 'optpriority))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'options) in
Obj.repr(
# 529 "parser.mly"
( _2, _4 )
# 1923 "parser.ml"
: 'optpriority))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'options) in
Obj.repr(
# 531 "parser.mly"
( 0, _1 )
# 1930 "parser.ml"
: 'optpriority))
; (fun __caml_parser_env ->
Obj.repr(
# 535 "parser.mly"
( [] )
# 1936 "parser.ml"
: 'vartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypeilist) in
Obj.repr(
# 537 "parser.mly"
( _1 )
# 1943 "parser.ml"
: 'vartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 541 "parser.mly"
( [(_1, Tid _3)] )
# 1951 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypeilist) in
Obj.repr(
# 543 "parser.mly"
( (_1, Tid _3) :: _5 )
# 1960 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 545 "parser.mly"
( [(_1, TBound _3)] )
# 1968 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypeilist) in
Obj.repr(
# 547 "parser.mly"
( (_1, TBound _3) :: _5 )
# 1977 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'vartypeilist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'optpriority) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 551 "parser.mly"
( PFun(dummy_channel, _2, _6, _4) )
# 1986 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'vartypeilist) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 3 : 'optpriority) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'newlistopt) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 553 "parser.mly"
( PReplRestr((new_repl_occ(), None, _4), _7, [PFun(dummy_channel, _2, _8, _5)]) )
# 1997 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 6 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'vartypeilist) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'optpriority) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 555 "parser.mly"
( PFun(_1, _3, _7, _5) )
# 2007 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'newlistopt) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 557 "parser.mly"
( PReplRestr((new_repl_occ(), None, _2), _3, [_4]) )
# 2016 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'newlistopt) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'funlist) in
Obj.repr(
# 559 "parser.mly"
( PReplRestr((new_repl_occ(), None, _2), _3, _5) )
# 2025 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'newlistopt) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 561 "parser.mly"
( PReplRestr((new_repl_occ(), Some _2, _4), _5, [_6]) )
# 2035 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 3 : 'newlistopt) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'funlist) in
Obj.repr(
# 563 "parser.mly"
( PReplRestr((new_repl_occ(), Some _2, _4), _5, _7) )
# 2045 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'probaf) in
Obj.repr(
# 567 "parser.mly"
( _2 )
# 2052 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 569 "parser.mly"
( PAdd(_1,_3), parse_extent() )
# 2060 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 571 "parser.mly"
( PSub(_1, _3), parse_extent() )
# 2068 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 573 "parser.mly"
( PProd(_1,_3), parse_extent() )
# 2076 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 575 "parser.mly"
( PDiv(_1,_3), parse_extent() )
# 2084 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'probaflist) in
Obj.repr(
# 577 "parser.mly"
( PMax(_3), parse_extent() )
# 2091 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 579 "parser.mly"
( (PPIdent _1), parse_extent() )
# 2098 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 581 "parser.mly"
( (PCount _2), parse_extent() )
# 2105 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'probaflist) in
Obj.repr(
# 583 "parser.mly"
( (PPFun(_1,_3)), parse_extent() )
# 2113 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
Obj.repr(
# 585 "parser.mly"
( PCard(_2), parse_extent() )
# 2120 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 587 "parser.mly"
( PTime, parse_extent() )
# 2126 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 589 "parser.mly"
( PActTime(PAFunApp _3, _4), parse_extent() )
# 2134 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 591 "parser.mly"
( PActTime(PAPatFunApp _4, _5), parse_extent() )
# 2142 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 593 "parser.mly"
( PActTime(PAReplIndex, []), parse_extent() )
# 2148 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : int) in
Obj.repr(
# 595 "parser.mly"
( PActTime(PAArrayAccess _4, []), parse_extent() )
# 2155 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 597 "parser.mly"
( PActTime(PACompare _4, _5), parse_extent() )
# 2163 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _6 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 599 "parser.mly"
( PActTime(PAAppTuple _4, _6), parse_extent() )
# 2171 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _5 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 601 "parser.mly"
( PActTime(PAPatTuple _5, _7), parse_extent() )
# 2179 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 603 "parser.mly"
( PActTime(PAAnd, []), parse_extent() )
# 2185 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 605 "parser.mly"
( PActTime(PAOr, []), parse_extent() )
# 2191 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
Obj.repr(
# 607 "parser.mly"
( PActTime(PANew _4, []), parse_extent() )
# 2198 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 609 "parser.mly"
( PActTime(PANewChannel, []), parse_extent() )
# 2204 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 611 "parser.mly"
( PActTime(PAIf, []), parse_extent() )
# 2210 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 1 : int) in
Obj.repr(
# 613 "parser.mly"
( PActTime(PAFind _4, []), parse_extent() )
# 2217 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 615 "parser.mly"
( PActTime(PAOut([], _4), _5), parse_extent() )
# 2225 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _5 = (Parsing.peek_val __caml_parser_env 4 : 'neidentlist) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _8 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 617 "parser.mly"
( PActTime(PAOut(_5, _7), _8), parse_extent() )
# 2234 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 1 : int) in
Obj.repr(
# 619 "parser.mly"
( PActTime(PAIn _4, []), parse_extent() )
# 2241 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 621 "parser.mly"
( let x = _1 in
if x = 0 then (PPZero,parse_extent()) else
(PCst x,parse_extent()) )
# 2250 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : float) in
Obj.repr(
# 625 "parser.mly"
( let x = _1 in
if x = 0.0 then (PPZero,parse_extent()) else
(PFloatCst x,parse_extent()) )
# 2259 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : Ptree.term_e) in
Obj.repr(
# 629 "parser.mly"
( PMaxlength(_3), parse_extent() )
# 2266 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 631 "parser.mly"
( PLength(_3, _4), parse_extent() )
# 2274 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _6 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 633 "parser.mly"
( PLengthTuple(_4, _6), parse_extent() )
# 2282 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'probaflist) in
Obj.repr(
# 637 "parser.mly"
( _2 )
# 2289 "parser.ml"
: 'probaflistopt))
; (fun __caml_parser_env ->
Obj.repr(
# 639 "parser.mly"
( [] )
# 2295 "parser.ml"
: 'probaflistopt))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 643 "parser.mly"
( [_1] )
# 2302 "parser.ml"
: 'probaflist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaflist) in
Obj.repr(
# 645 "parser.mly"
( _1 :: _3 )
# 2310 "parser.ml"
: 'probaflist))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 651 "parser.mly"
( PRestr(_2, _4, (PYield, parse_extent())), parse_extent() )
# 2318 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'findcond) in
Obj.repr(
# 653 "parser.mly"
(
let yield = (PYield, parse_extent()) in
match _2 with
([], t) -> PTest(t, yield, yield), parse_extent()
| (def_list, t) ->
PFind([([], def_list, t, yield)], yield, []), parse_extent()
)
# 2331 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'findlistins) in
Obj.repr(
# 661 "parser.mly"
( PFind(_2, (PYield, parse_extent()), []), parse_extent() )
# 2338 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 663 "parser.mly"
( PEvent((PFunApp(_2, []), parse_extent()), (PYield, parse_extent())), parse_extent() )
# 2345 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.term_e) in
Obj.repr(
# 665 "parser.mly"
( PLet(_2,_4,(PYield, parse_extent()),(PYield, parse_extent())), parse_extent() )
# 2353 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'tidentseq) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'findcond) in
Obj.repr(
# 669 "parser.mly"
( let (def_list, t) = _3 in
(_1, def_list, t, (PYield, parse_extent())) )
# 2362 "parser.ml"
: 'findoneins))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findoneins) in
Obj.repr(
# 674 "parser.mly"
( [_1] )
# 2369 "parser.ml"
: 'findlistins))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'findoneins) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'findlistins) in
Obj.repr(
# 676 "parser.mly"
( _1 :: _3 )
# 2377 "parser.ml"
: 'findlistins))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 683 "parser.mly"
( (_1, 1) )
# 2384 "parser.ml"
: 'factor))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 685 "parser.mly"
( (_1, _3) )
# 2392 "parser.ml"
: 'factor))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'factor) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'num) in
Obj.repr(
# 689 "parser.mly"
( _1 :: _3 )
# 2400 "parser.ml"
: 'num))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'factor) in
Obj.repr(
# 691 "parser.mly"
( [_1] )
# 2407 "parser.ml"
: 'num))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'num) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 695 "parser.mly"
( (_1, Some _3) )
# 2415 "parser.ml"
: 'quot))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'num) in
Obj.repr(
# 697 "parser.mly"
( (_3, None) )
# 2422 "parser.ml"
: 'quot))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'quot) in
Obj.repr(
# 701 "parser.mly"
( [_1] )
# 2429 "parser.ml"
: ((Ptree.ident * int) list * Ptree.ident option) list))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'quot) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : ((Ptree.ident * int) list * Ptree.ident option) list) in
Obj.repr(
# 703 "parser.mly"
( _1 :: _3 )
# 2437 "parser.ml"
: ((Ptree.ident * int) list * Ptree.ident option) list))
(* Entry all *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry lib *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry instruct *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry term *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
(* Entry allowed_coll *)
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
|]
let yytables =
{ Parsing.actions=yyact;
Parsing.transl_const=yytransl_const;
Parsing.transl_block=yytransl_block;
Parsing.lhs=yylhs;
Parsing.len=yylen;
Parsing.defred=yydefred;
Parsing.dgoto=yydgoto;
Parsing.sindex=yysindex;
Parsing.rindex=yyrindex;
Parsing.gindex=yygindex;
Parsing.tablesize=yytablesize;
Parsing.table=yytable;
Parsing.check=yycheck;
Parsing.error_function=parse_error;
Parsing.names_const=yynames_const;
Parsing.names_block=yynames_block }
let all (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 1 lexfun lexbuf : Ptree.decl list * Ptree.process_e)
let lib (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 2 lexfun lexbuf : Ptree.decl list)
let instruct (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 3 lexfun lexbuf : Ptree.process_e)
let term (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 4 lexfun lexbuf : Ptree.term_e)
let allowed_coll (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 5 lexfun lexbuf : ((Ptree.ident * int) list * Ptree.ident option) list)
| null | https://raw.githubusercontent.com/tari3x/csec-modex/5ab2aa18ef308b4d18ac479e5ab14476328a6a50/src/symtrace/cryptoverif/parser.ml | ocaml | COMMA
LPAREN
RPAREN
LBRACKET
RBRACKET
SEMI
COLON
NEW
OUT
IN
REPL
IF
THEN
ELSE
FIND
DEFINED
FUN
FORALL
TYPE
PROCESS
LET
QUERY
SECRET
AND
OR
CHANNEL
MAPSTO
DEF
MUL
ADD
SUB
POWER
SET
COLLISION
EVENT
IMPLIES
YIELD
LENGTH
COUNT
DEFINE
EXPAND
LBRACE
PROOF
IDENT
STRING
INT
Entry all
Entry lib
Entry instruct
Entry term
Entry allowed_coll | type token =
| COMMA
| LPAREN
| RPAREN
| LBRACKET
| RBRACKET
| BAR
| SEMI
| COLON
| NEW
| OUT
| IN
| IDENT of (Ptree.ident)
| STRING of (Ptree.ident)
| INT of (int)
| FLOAT of (float)
| REPL
| LEQ
| IF
| THEN
| ELSE
| FIND
| ORFIND
| SUCHTHAT
| DEFINED
| EQUAL
| DIFF
| FUN
| FORALL
| PARAM
| PROBA
| TYPE
| PROCESS
| DOT
| EOF
| LET
| QUERY
| SECRET
| SECRET1
| AND
| OR
| CONST
| CHANNEL
| EQUIV
| EQUIVLEFT
| EQUIVRIGHT
| MAPSTO
| DEF
| MUL
| DIV
| ADD
| SUB
| POWER
| SET
| COLLISION
| EVENT
| IMPLIES
| TIME
| YIELD
| OTHERUSES
| MAXLENGTH
| LENGTH
| MAX
| COUNT
| NEWCHANNEL
| INJ
| DEFINE
| EXPAND
| LBRACE
| RBRACE
| PROOF
open Parsing;;
# 2 "parser.mly"
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* *
* *
* Copyright ( C ) ENS , CNRS , INRIA , 2005 - 2011 *
* *
* * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * * *
* *
* Cryptographic protocol verifier *
* *
* Bruno Blanchet *
* *
* Copyright (C) ENS, CNRS, INRIA, 2005-2011 *
* *
*************************************************************)
Copyright ENS , CNRS , INRIA
contributor : ,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model .
This software is governed by the CeCILL - B license under French law and
abiding by the rules of distribution of free software . You can use ,
modify and/ or redistribute the software under the terms of the CeCILL - B
license as circulated by CEA , CNRS and INRIA at the following URL
" " .
As a counterpart to the access to the source code and rights to copy ,
modify and redistribute granted by the license , users are provided only
with a limited warranty and the software 's author , the holder of the
economic rights , and the successive licensors have only limited
liability .
In this respect , the user 's attention is drawn to the risks associated
with loading , using , modifying and/or developing or reproducing the
software by the user in light of its specific status of free software ,
that may mean that it is complicated to manipulate , and that also
therefore means that it is reserved for developers and experienced
professionals having in - depth computer knowledge . Users are therefore
encouraged to load and test the software 's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and , more generally , to use and operate it in the
same conditions as regards security .
The fact that you are presently reading this means that you have had
knowledge of the CeCILL - B license and that you accept its terms .
Copyright ENS, CNRS, INRIA
contributor: Bruno Blanchet,
This software is a computer program whose purpose is to verify
cryptographic protocols in the computational model.
This software is governed by the CeCILL-B license under French law and
abiding by the rules of distribution of free software. You can use,
modify and/ or redistribute the software under the terms of the CeCILL-B
license as circulated by CEA, CNRS and INRIA at the following URL
"".
As a counterpart to the access to the source code and rights to copy,
modify and redistribute granted by the license, users are provided only
with a limited warranty and the software's author, the holder of the
economic rights, and the successive licensors have only limited
liability.
In this respect, the user's attention is drawn to the risks associated
with loading, using, modifying and/or developing or reproducing the
software by the user in light of its specific status of free software,
that may mean that it is complicated to manipulate, and that also
therefore means that it is reserved for developers and experienced
professionals having in-depth computer knowledge. Users are therefore
encouraged to load and test the software's suitability as regards their
requirements in conditions enabling the security of their systems and/or
data to be ensured and, more generally, to use and operate it in the
same conditions as regards security.
The fact that you are presently reading this means that you have had
knowledge of the CeCILL-B license and that you accept its terms.
*)
# 49 "parser.mly"
open Parsing_helper
open Ptree
exception Syntax
let repl_counter = ref 0
let new_repl_occ () =
incr repl_counter;
!repl_counter
let cst_true = (PIdent ("true", dummy_ext), dummy_ext)
let dummy_channel = ("@dummy_channel", dummy_ext)
# 137 "parser.ml"
let yytransl_const = [|
BAR
LEQ
ORFIND
SUCHTHAT
EQUAL
DIFF
PARAM
PROBA
DOT
EOF
SECRET1
CONST
EQUIV
EQUIVRIGHT
DIV
TIME
OTHERUSES
MAXLENGTH
MAX
NEWCHANNEL
INJ
RBRACE
0|]
let yytransl_block = [|
FLOAT
0|]
let yylhs = "\255\255\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\002\000\002\000\002\000\002\000\
\002\000\002\000\002\000\002\000\017\000\017\000\017\000\017\000\
\017\000\017\000\017\000\017\000\018\000\018\000\016\000\016\000\
\007\000\007\000\001\000\006\000\006\000\011\000\011\000\008\000\
\008\000\019\000\019\000\004\000\004\000\004\000\004\000\004\000\
\004\000\004\000\004\000\004\000\004\000\004\000\004\000\004\000\
\004\000\004\000\004\000\024\000\024\000\025\000\025\000\026\000\
\026\000\027\000\027\000\027\000\027\000\021\000\021\000\021\000\
\028\000\022\000\022\000\030\000\030\000\029\000\029\000\031\000\
\031\000\020\000\020\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\009\000\009\000\009\000\009\000\009\000\009\000\
\009\000\009\000\009\000\036\000\034\000\034\000\032\000\032\000\
\035\000\035\000\033\000\033\000\023\000\023\000\023\000\023\000\
\023\000\038\000\038\000\037\000\037\000\010\000\010\000\039\000\
\039\000\039\000\039\000\012\000\012\000\040\000\040\000\015\000\
\015\000\042\000\042\000\043\000\043\000\014\000\014\000\014\000\
\044\000\044\000\045\000\045\000\045\000\045\000\041\000\041\000\
\041\000\041\000\041\000\041\000\041\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\013\000\013\000\013\000\
\013\000\013\000\013\000\013\000\013\000\047\000\047\000\046\000\
\046\000\003\000\003\000\003\000\003\000\003\000\049\000\048\000\
\048\000\050\000\050\000\051\000\051\000\052\000\052\000\005\000\
\005\000\000\000\000\000\000\000\000\000\000\000"
let yylen = "\002\000\
\010\000\004\000\007\000\006\000\006\000\006\000\006\000\004\000\
\005\000\004\000\006\000\004\000\005\000\009\000\012\000\009\000\
\009\000\007\000\005\000\000\000\001\000\001\000\001\000\001\000\
\001\000\001\000\001\000\001\000\001\000\002\000\001\000\003\000\
\003\000\000\000\004\000\000\000\001\000\001\000\003\000\000\000\
\001\000\003\000\005\000\004\000\003\000\006\000\001\000\004\000\
\003\000\006\000\005\000\008\000\006\000\006\000\002\000\003\000\
\003\000\003\000\003\000\004\000\001\000\006\000\004\000\001\000\
\003\000\008\000\006\000\006\000\004\000\001\000\001\000\003\000\
\005\000\001\000\003\000\003\000\005\000\001\000\000\000\003\000\
\001\000\001\000\000\000\003\000\001\000\003\000\005\000\001\000\
\005\000\005\000\004\000\003\000\006\000\004\000\007\000\007\000\
\007\000\001\000\003\000\005\000\001\000\003\000\002\000\000\000\
\002\000\000\000\002\000\000\000\001\000\003\000\004\000\003\000\
\002\000\003\000\001\000\001\000\000\000\001\000\003\000\002\000\
\002\000\006\000\004\000\001\000\003\000\001\000\004\000\000\000\
\006\000\000\000\007\000\001\000\003\000\004\000\004\000\001\000\
\000\000\001\000\003\000\005\000\003\000\005\000\006\000\008\000\
\007\000\004\000\006\000\006\000\008\000\003\000\003\000\003\000\
\003\000\003\000\004\000\001\000\002\000\004\000\003\000\001\000\
\005\000\006\000\004\000\006\000\006\000\007\000\008\000\004\000\
\004\000\005\000\004\000\004\000\005\000\006\000\009\000\005\000\
\001\000\001\000\004\000\005\000\007\000\002\000\000\000\001\000\
\003\000\004\000\003\000\002\000\002\000\005\000\004\000\001\000\
\003\000\001\000\003\000\003\000\001\000\003\000\003\000\001\000\
\003\000\002\000\002\000\002\000\002\000\002\000"
let yydefred = "\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\202\000\000\000\
\203\000\000\000\000\000\000\000\000\000\000\000\204\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\206\000\000\000\000\000\000\000\000\000\000\000\
\000\000\041\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\070\000\000\000\000\000\078\000\188\000\000\000\
\000\000\000\000\000\000\000\000\189\000\000\000\000\000\082\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\055\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\120\000\121\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\138\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\028\000\021\000\022\000\023\000\027\000\025\000\
\024\000\026\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\085\000\088\000\000\000\000\000\000\000\000\000\000\000\
\098\000\000\000\000\000\000\000\000\000\187\000\000\000\000\000\
\000\000\000\000\000\000\116\000\000\000\000\000\000\000\000\000\
\000\000\049\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\057\000\000\000\000\000\
\195\000\199\000\196\000\198\000\201\000\000\000\037\000\000\000\
\000\000\039\000\000\000\010\000\000\000\000\000\000\000\000\000\
\008\000\119\000\000\000\012\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\177\000\178\000\
\000\000\000\000\000\000\000\000\000\000\000\000\125\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\002\000\000\000\
\000\000\000\000\030\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\035\000\186\000\
\072\000\000\000\000\000\000\000\000\000\000\000\193\000\000\000\
\112\000\000\000\110\000\000\000\080\000\000\000\044\000\048\000\
\000\000\033\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\009\000\013\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\146\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\157\000\000\000\000\000\000\000\000\000\000\000\
\127\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\019\000\032\000\084\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\092\000\099\000\000\000\000\000\000\000\000\000\191\000\114\000\
\111\000\190\000\000\000\000\000\000\000\075\000\000\000\000\000\
\000\000\000\000\043\000\004\000\005\000\000\000\011\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\150\000\159\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\153\000\154\000\000\000\000\000\006\000\007\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\091\000\000\000\000\000\
\000\000\000\000\000\000\065\000\000\000\077\000\000\000\000\000\
\000\000\000\000\046\000\000\000\000\000\140\000\142\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\148\000\000\000\
\147\000\000\000\158\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\163\000\172\000\000\000\000\000\000\000\
\000\000\168\000\169\000\171\000\179\000\000\000\000\000\155\000\
\000\000\129\000\000\000\000\000\003\000\000\000\018\000\089\000\
\000\000\000\000\000\000\090\000\000\000\000\000\102\000\000\000\
\000\000\060\000\000\000\000\000\000\000\000\000\000\000\000\000\
\135\000\000\000\134\000\000\000\000\000\000\000\133\000\185\000\
\000\000\000\000\170\000\000\000\000\000\176\000\182\000\161\000\
\173\000\000\000\000\000\000\000\000\000\180\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\093\000\000\000\
\000\000\000\000\000\000\000\000\000\000\149\000\000\000\164\000\
\000\000\174\000\165\000\000\000\162\000\000\000\014\000\000\000\
\016\000\017\000\000\000\097\000\096\000\000\000\095\000\000\000\
\000\000\000\000\001\000\131\000\166\000\000\000\000\000\181\000\
\000\000\000\000\063\000\000\000\000\000\167\000\000\000\000\000\
\175\000\015\000\062\000"
let yydgoto = "\006\000\
\023\000\024\000\031\000\094\000\043\000\206\000\045\001\059\000\
\170\000\060\000\207\000\067\000\126\001\046\001\072\000\155\000\
\156\000\157\000\050\000\095\000\082\000\192\000\178\000\011\001\
\237\001\012\001\083\000\193\000\085\000\086\000\096\000\089\001\
\166\001\084\001\036\002\085\001\179\000\180\000\061\000\068\000\
\069\000\227\000\123\001\132\000\133\000\127\001\203\001\087\000\
\088\000\044\000\045\000\046\000"
let yysindex = "\048\003\
\043\002\043\002\039\255\066\255\103\255\000\000\034\255\085\255\
\127\255\208\255\221\255\226\255\181\255\127\255\127\255\132\000\
\050\000\001\000\063\000\078\000\087\000\118\255\000\000\230\255\
\000\000\097\000\038\255\128\000\155\255\133\000\000\000\066\255\
\139\000\043\000\038\255\075\000\155\255\150\000\124\000\102\002\
\115\000\144\000\000\000\152\000\162\000\202\000\209\000\208\000\
\212\000\000\000\225\000\075\000\198\000\075\000\210\000\230\000\
\233\000\066\255\234\000\204\000\237\000\250\000\239\000\254\000\
\016\001\007\001\240\000\020\001\024\001\010\001\025\001\062\255\
\043\255\052\001\062\001\209\255\025\002\058\001\165\001\063\001\
\102\002\051\001\000\000\061\001\064\001\000\000\000\000\053\001\
\155\255\007\000\066\255\068\001\000\000\008\001\077\001\000\000\
\093\001\066\255\066\255\069\001\127\255\128\000\078\001\000\000\
\096\001\066\255\066\255\066\255\066\255\091\001\114\001\114\001\
\115\001\103\255\127\255\117\001\066\255\127\255\105\001\043\002\
\119\001\025\002\000\000\000\000\222\001\107\001\043\002\181\255\
\122\001\043\002\124\255\156\001\000\000\254\000\250\255\209\001\
\132\000\160\001\037\000\167\001\085\255\075\002\127\255\043\002\
\127\255\127\255\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\100\001\209\255\166\001\025\002\164\001\178\001\
\180\001\000\000\000\000\173\001\038\255\075\000\155\255\176\001\
\000\000\017\000\181\001\187\001\186\001\000\000\192\001\038\255\
\128\000\204\001\203\001\000\000\155\255\195\001\151\255\066\255\
\066\255\000\000\200\001\210\001\228\001\066\255\230\001\218\001\
\220\001\226\001\066\255\241\001\225\001\000\000\099\000\102\002\
\000\000\000\000\000\000\000\000\000\000\253\001\000\000\002\002\
\030\001\000\000\043\002\000\000\043\002\048\255\066\255\066\255\
\000\000\000\000\229\001\000\000\252\001\255\001\091\000\013\002\
\016\002\018\002\231\000\209\001\026\002\054\002\000\000\000\000\
\056\002\061\002\062\002\067\002\045\002\101\001\000\000\082\002\
\053\002\057\002\079\002\085\002\209\001\099\002\000\000\107\002\
\108\002\043\002\000\000\209\255\131\255\091\002\066\255\066\255\
\008\002\104\002\128\000\090\002\051\255\025\002\000\000\000\000\
\000\000\126\002\130\002\129\002\135\002\118\002\000\000\155\255\
\000\000\140\002\000\000\022\002\000\000\137\002\000\000\000\000\
\238\255\000\000\066\255\128\000\038\255\029\002\066\255\142\002\
\085\255\043\002\000\000\000\000\043\002\102\002\050\002\043\002\
\150\002\155\002\155\000\153\002\154\002\114\002\153\002\152\002\
\156\002\108\001\000\000\105\255\157\002\209\001\216\001\066\255\
\156\255\209\001\000\000\153\002\209\001\209\001\209\001\209\001\
\000\000\043\002\043\002\159\002\066\255\098\002\128\002\095\002\
\131\002\000\000\000\000\000\000\158\002\170\001\006\002\160\002\
\161\002\025\002\145\002\149\002\151\002\066\255\066\255\025\002\
\000\000\000\000\066\255\186\001\133\002\128\000\000\000\000\000\
\000\000\000\000\066\255\066\255\102\002\000\000\162\002\066\255\
\171\002\163\002\000\000\000\000\000\000\066\255\000\000\254\000\
\254\000\172\002\134\002\164\002\066\255\136\002\168\002\109\001\
\074\000\175\002\180\002\000\000\000\000\100\255\181\002\127\255\
\173\002\174\002\116\000\176\002\184\002\185\002\186\002\177\002\
\182\002\015\000\189\002\190\002\192\002\214\001\127\255\184\002\
\193\002\132\000\000\000\000\000\079\000\079\000\000\000\000\000\
\001\000\081\002\066\255\043\002\043\002\043\002\191\002\066\255\
\155\255\025\002\088\000\038\255\025\002\000\000\128\000\055\002\
\194\002\161\002\195\002\000\000\182\255\000\000\102\002\102\002\
\066\255\078\002\000\000\075\000\102\002\000\000\000\000\075\000\
\156\002\196\002\102\002\066\255\075\000\108\001\000\000\132\000\
\000\000\209\001\000\000\199\002\198\002\201\002\127\255\184\002\
\202\002\209\001\203\002\000\000\000\000\204\002\184\002\127\255\
\184\002\000\000\000\000\000\000\000\000\205\002\206\002\000\000\
\166\002\000\000\209\001\096\002\000\000\143\002\000\000\000\000\
\219\001\207\002\161\002\000\000\197\002\161\002\000\000\025\002\
\191\002\000\000\210\002\102\002\179\002\102\002\066\255\187\002\
\000\000\066\255\000\000\102\002\208\002\211\002\000\000\000\000\
\184\002\215\002\000\000\214\002\218\002\000\000\000\000\000\000\
\000\000\219\002\220\002\221\002\184\002\000\000\043\002\105\002\
\043\002\043\002\222\002\191\002\025\002\088\000\000\000\186\001\
\066\255\102\002\043\002\102\002\156\002\000\000\223\002\000\000\
\213\002\000\000\000\000\184\002\000\000\224\002\000\000\066\255\
\000\000\000\000\025\002\000\000\000\000\161\002\000\000\120\000\
\183\002\102\002\000\000\000\000\000\000\184\002\227\002\000\000\
\100\002\161\002\000\000\186\001\228\002\000\000\043\002\229\002\
\000\000\000\000\000\000"
let yyrindex = "\000\000\
\209\002\213\003\000\000\000\000\000\000\000\000\000\000\226\002\
\000\000\000\000\000\000\000\000\226\002\000\000\000\000\000\000\
\000\000\182\001\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\212\002\000\000\000\000\000\000\231\002\
\000\000\175\000\000\000\147\255\000\000\000\000\000\000\236\003\
\011\000\000\000\000\000\012\000\000\000\237\003\000\000\000\000\
\000\000\000\000\160\000\216\002\000\000\216\002\000\000\000\000\
\000\000\000\000\000\000\000\000\217\002\000\000\000\000\235\002\
\000\000\000\000\000\000\018\255\090\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\231\002\000\000\
\225\002\000\000\000\000\000\000\000\000\000\000\000\000\239\003\
\237\002\229\255\000\000\000\000\000\000\168\000\000\000\000\000\
\000\000\231\002\238\002\000\000\000\000\212\002\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\239\002\000\000\000\000\000\000\000\000\009\000\
\000\000\000\000\000\000\000\000\000\000\000\000\009\000\226\002\
\000\000\009\000\000\000\000\000\000\000\235\002\116\001\000\000\
\000\000\000\000\000\000\000\000\226\002\000\000\239\002\009\000\
\239\002\239\002\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\030\255\178\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\147\255\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\212\002\242\002\000\000\000\000\237\002\000\000\174\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\232\002\000\000\000\000\067\000\249\000\000\000\201\000\019\001\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\240\002\
\000\000\000\000\009\000\000\000\009\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\230\002\000\000\
\000\000\000\000\000\000\000\000\000\000\140\000\000\000\000\000\
\048\001\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\009\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\212\002\000\000\022\000\000\000\000\000\000\000\
\000\000\041\255\067\255\000\000\233\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\212\002\000\000\000\000\231\002\000\000\
\000\000\009\000\000\000\000\000\009\000\088\255\000\000\009\000\
\245\002\248\002\000\000\230\002\072\000\000\000\234\002\000\000\
\116\001\235\002\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\141\001\000\000\000\000\000\000\000\000\
\000\000\009\000\009\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\082\000\000\000\000\000\113\000\127\000\000\000\231\002\000\000\
\000\000\000\000\238\002\000\000\060\255\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\039\001\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\250\002\000\000\000\000\000\000\251\002\000\000\239\002\
\000\000\000\000\000\000\000\000\252\002\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\239\002\252\002\
\000\000\000\000\000\000\000\000\116\255\201\255\000\000\000\000\
\182\001\000\000\000\000\009\000\200\002\009\000\022\000\000\000\
\000\000\000\000\030\000\000\000\000\000\000\000\212\002\028\000\
\000\000\109\000\000\000\000\000\000\000\000\000\080\001\111\001\
\000\000\159\001\000\000\216\002\145\255\000\000\000\000\185\001\
\031\255\000\000\204\255\000\000\253\002\235\002\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\252\002\
\000\000\000\000\000\000\000\000\000\000\000\000\252\002\239\002\
\252\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\176\000\000\000\000\000\226\000\000\000\000\000\
\022\000\000\000\000\000\169\255\176\255\195\000\000\000\000\000\
\000\000\000\000\000\000\224\000\000\000\000\000\000\000\000\000\
\252\002\000\000\000\000\000\000\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\252\002\000\000\009\000\000\000\
\009\000\009\000\034\000\022\000\000\000\030\000\000\000\000\000\
\000\000\136\001\009\000\103\001\055\001\000\000\000\000\000\000\
\000\000\000\000\000\000\252\002\000\000\000\000\000\000\000\000\
\000\000\000\000\000\000\000\000\000\000\229\000\000\000\236\002\
\000\000\224\255\000\000\000\000\000\000\252\002\000\000\000\000\
\000\000\057\001\000\000\000\000\000\000\000\000\009\000\000\000\
\000\000\000\000\000\000"
let yygindex = "\000\000\
\000\000\254\255\000\000\253\255\143\003\129\255\225\255\023\000\
\137\255\130\003\249\255\132\255\135\255\184\255\106\002\009\003\
\000\000\106\003\241\002\183\255\231\255\243\002\233\255\025\254\
\000\000\199\254\184\003\000\000\158\255\170\002\080\003\133\254\
\122\254\103\002\000\000\000\000\085\003\000\003\000\000\000\000\
\049\255\234\254\032\000\133\003\173\255\236\254\178\254\092\003\
\000\000\000\000\071\001\000\000"
let yytablesize = 1049
let yytable = "\025\000\
\040\000\052\000\214\000\194\000\102\000\092\000\062\000\063\000\
\020\000\100\000\194\000\197\000\239\000\103\000\238\000\246\000\
\007\001\248\000\249\000\051\001\119\000\104\000\121\000\081\000\
\188\000\189\000\120\001\094\000\228\001\108\000\049\000\081\000\
\130\000\106\000\172\001\224\001\029\000\145\001\253\000\079\000\
\040\002\061\000\130\000\061\000\143\000\047\000\033\000\026\000\
\130\000\034\000\124\000\130\000\087\001\006\001\125\000\035\000\
\027\000\088\001\036\000\028\000\124\000\080\000\069\000\032\000\
\130\000\215\001\045\000\032\000\142\000\064\000\033\000\037\000\
\029\000\034\000\033\000\144\000\056\002\034\000\069\000\035\000\
\037\001\086\000\036\000\035\000\130\000\061\000\036\000\183\000\
\123\000\141\000\126\000\038\000\030\000\191\000\130\000\037\000\
\048\000\029\000\122\001\037\000\194\001\039\000\197\000\198\000\
\199\000\200\000\052\001\124\001\103\000\015\002\210\000\064\000\
\108\000\209\000\041\000\038\000\151\000\212\000\151\000\038\000\
\123\000\253\001\126\000\070\001\217\000\039\000\101\000\220\000\
\002\002\039\000\004\002\221\000\126\000\076\001\003\001\039\002\
\006\001\081\001\051\000\002\001\222\000\247\000\090\001\004\001\
\037\002\122\000\061\001\062\001\063\001\064\001\014\001\061\001\
\062\001\063\001\064\001\042\000\089\000\143\001\034\000\151\000\
\083\001\081\000\242\001\244\000\151\000\151\000\090\000\144\001\
\191\001\034\000\023\002\068\000\081\000\248\001\047\000\087\000\
\107\000\122\000\067\000\091\000\020\001\255\001\030\002\032\000\
\076\000\194\000\025\001\068\000\108\000\109\000\033\000\030\001\
\048\000\034\000\067\000\147\001\148\001\149\001\150\001\035\000\
\059\000\152\000\036\000\152\000\143\000\047\002\143\000\143\000\
\035\001\147\000\036\001\038\001\039\001\105\001\041\002\037\000\
\056\000\057\000\163\001\053\000\148\000\149\000\150\000\053\002\
\170\001\107\000\066\000\115\001\100\000\109\000\118\001\109\000\
\054\000\151\000\058\000\038\000\143\000\055\000\122\001\235\001\
\122\001\152\000\066\000\146\001\152\000\039\000\143\000\074\001\
\056\000\152\000\152\000\078\001\079\001\109\000\044\002\153\000\
\196\001\100\001\225\000\103\001\154\000\077\000\106\000\107\000\
\181\000\071\000\226\000\194\000\197\000\169\001\182\000\214\001\
\208\001\171\001\058\000\108\000\109\000\217\001\006\001\101\001\
\104\000\081\000\209\001\104\000\182\001\183\001\094\000\108\001\
\108\000\094\000\109\001\191\000\106\000\111\001\051\000\106\000\
\020\000\104\000\227\001\104\000\098\000\230\001\099\000\094\000\
\241\000\094\000\242\000\108\000\142\001\106\000\104\000\106\000\
\105\000\194\000\194\000\197\000\094\000\070\000\108\000\151\001\
\152\001\154\001\106\000\045\000\083\001\045\000\045\000\045\000\
\045\000\136\000\073\000\134\000\020\000\045\000\101\000\054\000\
\003\002\221\000\168\001\136\000\086\000\045\000\045\000\136\000\
\045\000\074\000\222\000\045\000\045\000\006\001\043\001\175\001\
\176\001\008\002\075\000\045\000\178\001\086\000\044\001\086\000\
\045\000\045\000\181\001\165\001\078\000\045\000\050\000\103\000\
\014\002\187\001\086\000\108\000\136\000\136\000\108\000\199\001\
\092\001\045\000\051\002\106\000\107\000\061\001\062\001\200\001\
\103\000\101\000\103\000\105\000\101\000\064\000\108\000\052\000\
\108\000\226\001\229\001\084\000\156\000\103\000\156\000\065\000\
\093\000\108\000\101\000\066\000\240\001\038\002\097\000\220\001\
\241\001\221\001\222\001\223\001\225\001\245\001\053\000\101\000\
\081\000\104\000\038\000\038\000\038\000\110\000\051\000\038\000\
\114\001\236\001\081\000\050\002\081\000\238\001\113\000\047\000\
\113\000\047\000\087\000\047\000\047\000\202\000\203\000\156\000\
\244\001\047\000\156\000\156\000\156\000\156\000\111\000\252\001\
\038\000\047\000\047\000\087\000\047\000\087\000\112\000\047\000\
\047\000\059\000\114\000\059\000\059\000\059\000\059\000\047\000\
\087\000\113\000\115\000\059\000\047\000\047\000\073\000\116\000\
\073\000\047\000\117\000\059\000\059\000\246\001\059\000\247\001\
\145\000\118\000\145\000\145\000\107\000\047\000\120\000\100\000\
\050\001\059\000\122\000\018\002\127\000\128\000\020\002\059\000\
\126\000\123\000\065\000\059\000\124\000\107\000\066\000\107\000\
\100\000\056\000\100\000\056\000\056\000\056\000\056\000\059\000\
\145\000\129\000\107\000\056\000\031\002\100\000\033\002\034\002\
\185\000\131\000\145\000\056\000\056\000\042\002\056\000\130\000\
\043\002\134\000\135\000\058\000\137\000\058\000\058\000\058\000\
\058\000\056\000\136\000\138\000\049\002\058\000\056\000\056\000\
\106\000\107\000\139\000\056\000\140\000\058\000\058\000\051\000\
\058\000\051\000\051\000\051\000\051\000\108\000\109\000\056\000\
\160\000\051\000\160\000\058\000\058\002\145\000\106\000\107\000\
\130\000\051\000\051\000\105\000\051\000\058\000\034\001\146\000\
\173\000\171\000\130\000\108\000\109\000\174\000\130\000\051\000\
\130\000\058\000\177\000\130\000\105\000\175\000\105\000\186\000\
\054\000\051\000\054\000\054\000\054\000\054\000\176\000\190\000\
\130\000\105\000\054\000\160\000\184\000\051\000\160\000\160\000\
\160\000\160\000\054\000\054\000\187\000\054\000\195\000\144\000\
\201\000\144\000\144\000\196\000\130\000\064\000\190\001\050\000\
\054\000\050\000\050\000\050\000\050\000\130\000\130\000\121\001\
\065\000\050\000\054\000\066\000\066\000\041\000\204\000\130\000\
\208\000\050\000\050\000\130\000\050\000\219\000\054\000\144\000\
\052\000\211\000\052\000\052\000\052\000\052\000\034\000\050\000\
\060\001\144\000\052\000\061\001\062\001\063\001\064\001\213\000\
\034\000\050\000\052\000\052\000\034\000\052\000\223\000\053\000\
\216\000\053\000\053\000\053\000\053\000\050\000\032\000\250\000\
\052\000\053\000\160\001\240\000\252\000\033\000\243\000\254\000\
\034\000\053\000\052\000\255\000\053\000\000\001\035\000\128\000\
\001\001\036\000\034\000\005\001\080\000\009\001\052\000\053\000\
\008\001\128\000\106\000\107\000\034\000\010\001\037\000\128\000\
\034\000\053\000\128\000\013\001\016\001\017\001\019\001\108\000\
\109\000\128\000\228\000\022\001\023\001\053\000\229\000\128\000\
\213\001\128\001\038\000\129\001\230\000\011\002\231\000\232\000\
\130\001\131\001\132\001\133\001\039\000\034\000\034\000\134\001\
\024\001\135\001\026\001\128\000\136\001\027\001\106\000\107\000\
\137\001\028\001\031\001\106\000\107\000\128\000\106\000\107\000\
\029\001\138\001\107\000\108\000\109\000\139\001\140\001\032\001\
\108\000\109\000\033\001\108\000\109\000\040\001\161\001\041\001\
\233\000\158\000\042\001\234\000\235\000\236\000\237\000\047\001\
\159\000\160\000\161\000\162\000\215\000\163\000\141\001\164\000\
\080\001\165\000\158\000\048\001\166\000\049\001\106\000\107\000\
\098\001\159\000\160\000\161\000\162\000\053\001\163\000\104\001\
\164\000\167\000\165\000\108\000\109\000\166\000\106\000\107\000\
\001\000\002\000\003\000\004\000\005\000\106\000\107\000\054\001\
\059\001\055\001\167\000\108\000\109\000\168\000\056\001\057\001\
\169\000\232\001\108\000\109\000\058\001\007\000\008\000\009\000\
\010\000\011\000\106\000\107\000\012\000\013\000\168\000\106\000\
\107\000\169\000\014\000\015\000\016\000\066\001\065\001\108\000\
\109\000\067\001\068\001\069\001\108\000\109\000\017\000\018\000\
\019\000\239\001\077\001\106\000\107\000\071\001\106\000\107\000\
\110\001\106\000\107\000\020\000\021\000\072\001\073\001\022\000\
\108\000\109\000\086\001\108\000\109\000\245\000\108\000\109\000\
\106\000\107\000\082\001\219\001\106\000\107\000\106\000\107\000\
\009\002\091\001\092\001\093\001\055\002\108\000\109\000\094\001\
\095\001\108\000\109\000\108\000\109\000\155\001\097\001\099\001\
\061\001\062\001\063\001\064\001\032\002\106\001\112\001\061\001\
\062\001\063\001\064\001\113\001\043\001\116\001\117\001\119\001\
\156\001\157\001\125\001\158\001\225\000\153\001\006\001\164\001\
\165\001\159\001\173\001\162\001\167\001\179\001\180\001\192\001\
\184\001\186\001\185\001\189\001\177\001\188\001\193\001\195\001\
\202\001\198\001\197\001\204\001\205\001\201\001\206\001\210\001\
\211\001\207\001\212\001\216\001\233\001\088\001\007\002\234\001\
\243\001\249\001\250\001\251\001\254\001\000\002\001\002\005\002\
\006\002\012\002\010\002\016\002\020\000\022\002\021\002\013\002\
\017\002\024\002\025\002\019\002\026\002\027\002\028\002\029\002\
\046\002\045\002\048\002\052\002\035\002\054\002\057\002\059\002\
\040\000\083\000\079\000\205\000\200\000\137\000\192\000\117\000\
\020\000\036\000\083\000\071\000\115\000\031\000\042\000\139\000\
\034\000\118\000\141\000\074\000\132\000\184\000\183\000\076\000\
\205\000\218\000\218\001\034\000\075\001\251\000\172\000\174\001\
\021\001\018\001\224\000\020\000\015\001\231\001\102\001\096\001\
\000\000\107\001\034\000\000\000\000\000\000\000\000\000\034\000\
\064\000"
let yycheck = "\002\000\
\004\000\009\000\122\000\102\000\036\000\029\000\014\000\015\000\
\000\000\035\000\000\000\000\000\137\000\037\000\136\000\143\000\
\000\000\145\000\146\000\227\000\052\000\000\000\054\000\027\000\
\098\000\099\000\049\001\000\000\163\001\000\000\008\000\035\000\
\002\001\000\000\092\001\159\001\007\001\058\001\158\000\002\001\
\016\002\001\001\012\001\003\001\002\001\012\001\009\001\009\001\
\018\001\012\001\033\001\021\001\002\001\006\001\058\000\018\001\
\018\001\007\001\021\001\021\001\043\001\024\001\003\001\002\001\
\034\001\144\001\000\000\002\001\072\000\003\001\009\001\034\001\
\034\001\012\001\009\001\033\001\052\002\012\001\019\001\018\001\
\033\001\000\000\021\001\018\001\054\001\045\001\021\001\091\000\
\001\001\028\001\001\001\054\001\054\001\101\000\064\001\034\001\
\012\001\068\001\050\001\034\001\001\001\064\001\106\000\107\000\
\108\000\109\000\228\000\003\001\000\000\233\001\118\000\045\001\
\000\000\117\000\012\001\054\001\001\001\120\000\003\001\054\001\
\033\001\200\001\033\001\245\000\127\000\064\001\000\000\130\000\
\207\001\064\001\209\001\008\001\043\001\003\001\166\000\014\002\
\006\001\001\001\012\001\165\000\017\001\144\000\006\001\167\000\
\012\002\001\001\047\001\048\001\049\001\050\001\176\000\047\001\
\048\001\049\001\050\001\053\001\002\001\002\001\012\001\044\001\
\003\001\165\000\185\001\141\000\049\001\050\001\012\001\012\001\
\120\001\023\001\249\001\003\001\176\000\194\001\000\000\000\000\
\026\001\033\001\003\001\025\001\184\000\202\001\005\002\002\001\
\067\001\028\001\190\000\019\001\038\001\039\001\009\001\195\000\
\012\001\012\001\019\001\061\001\062\001\063\001\064\001\018\001\
\000\000\001\001\021\001\003\001\001\001\028\002\003\001\004\001\
\211\000\001\001\213\000\215\000\216\000\031\001\016\002\034\001\
\036\001\037\001\082\001\012\001\012\001\013\001\014\001\046\002\
\088\001\000\000\003\001\044\001\000\000\001\001\047\001\003\001\
\012\001\025\001\054\001\054\001\033\001\012\001\190\001\058\001\
\192\001\033\001\019\001\060\001\044\001\064\001\043\001\250\000\
\000\000\049\001\050\001\255\000\000\001\025\001\021\002\047\001\
\128\001\020\001\009\001\029\001\052\001\032\001\025\001\026\001\
\002\001\009\001\017\001\001\001\001\001\087\001\008\001\143\001\
\002\001\091\001\000\000\038\001\039\001\146\001\006\001\027\001\
\003\001\029\001\012\001\006\001\112\001\113\001\003\001\034\001\
\003\001\006\001\037\001\043\001\003\001\040\001\000\000\006\001\
\032\001\020\001\162\001\022\001\002\001\165\001\004\001\020\001\
\012\001\022\001\014\001\022\001\056\001\020\001\033\001\022\001\
\000\000\047\001\048\001\048\001\033\001\012\001\033\001\066\001\
\067\001\069\001\033\001\001\001\167\001\003\001\004\001\005\001\
\006\001\002\001\012\001\002\001\068\001\011\001\004\001\000\000\
\208\001\008\001\086\001\012\001\003\001\019\001\020\001\016\001\
\022\001\012\001\017\001\025\001\026\001\006\001\004\001\099\001\
\100\001\219\001\012\001\033\001\104\001\020\001\012\001\022\001\
\038\001\039\001\110\001\020\001\012\001\043\001\000\000\003\001\
\232\001\117\001\033\001\003\001\045\001\046\001\006\001\004\001\
\001\001\055\001\003\001\025\001\026\001\047\001\048\001\012\001\
\020\001\003\001\022\001\008\001\006\001\002\001\022\001\000\000\
\038\001\161\001\164\001\012\001\001\001\033\001\003\001\012\001\
\012\001\033\001\020\001\016\001\180\001\013\002\012\001\155\001\
\184\001\156\001\157\001\158\001\160\001\189\001\000\000\033\001\
\164\001\012\001\003\001\004\001\005\001\051\001\012\001\008\001\
\014\001\173\001\003\001\035\002\005\001\177\001\001\001\001\001\
\003\001\003\001\003\001\005\001\006\001\111\000\112\000\044\001\
\188\001\011\001\047\001\048\001\049\001\050\001\047\001\199\001\
\033\001\019\001\020\001\020\001\022\001\022\001\047\001\025\001\
\026\001\001\001\001\001\003\001\004\001\005\001\006\001\033\001\
\033\001\048\001\002\001\011\001\038\001\039\001\020\001\008\001\
\022\001\043\001\007\001\019\001\020\001\190\001\022\001\192\001\
\001\001\001\001\003\001\004\001\003\001\055\001\033\001\003\001\
\002\001\033\001\025\001\239\001\033\001\001\001\242\001\039\001\
\007\001\012\001\012\001\043\001\012\001\020\001\016\001\022\001\
\020\001\001\001\022\001\003\001\004\001\005\001\006\001\055\001\
\033\001\008\001\033\001\011\001\007\002\033\001\009\002\010\002\
\001\001\012\001\043\001\019\001\020\001\017\002\022\001\033\001\
\019\002\002\001\012\001\001\001\001\001\003\001\004\001\005\001\
\006\001\033\001\043\001\004\001\032\002\011\001\038\001\039\001\
\025\001\026\001\025\001\043\001\012\001\019\001\020\001\001\001\
\022\001\003\001\004\001\005\001\006\001\038\001\039\001\055\001\
\001\001\011\001\003\001\033\001\055\002\002\001\025\001\026\001\
\002\001\019\001\020\001\003\001\022\001\043\001\033\001\002\001\
\002\001\008\001\012\001\038\001\039\001\019\001\016\001\033\001\
\018\001\055\001\022\001\021\001\020\001\017\001\022\001\003\001\
\001\001\043\001\003\001\004\001\005\001\006\001\023\001\019\001\
\034\001\033\001\011\001\044\001\025\001\055\001\047\001\048\001\
\049\001\050\001\019\001\020\001\008\001\022\001\025\001\001\001\
\014\001\003\001\004\001\012\001\054\001\002\001\002\001\001\001\
\033\001\003\001\004\001\005\001\006\001\002\001\064\001\012\001\
\012\001\011\001\043\001\016\001\016\001\012\001\012\001\012\001\
\012\001\019\001\020\001\016\001\022\001\012\001\055\001\033\001\
\001\001\033\001\003\001\004\001\005\001\006\001\002\001\033\001\
\044\001\043\001\011\001\047\001\048\001\049\001\050\001\033\001\
\012\001\043\001\019\001\020\001\016\001\022\001\003\001\001\001\
\054\001\003\001\004\001\005\001\006\001\055\001\002\001\068\001\
\033\001\011\001\001\001\012\001\007\001\009\001\008\001\012\001\
\012\001\019\001\043\001\002\001\022\001\002\001\018\001\002\001\
\012\001\021\001\002\001\012\001\024\001\003\001\055\001\033\001\
\012\001\012\001\025\001\026\001\012\001\012\001\034\001\018\001\
\016\001\043\001\021\001\012\001\001\001\003\001\012\001\038\001\
\039\001\028\001\002\001\012\001\003\001\055\001\006\001\034\001\
\003\001\002\001\054\001\004\001\012\001\003\001\014\001\015\001\
\009\001\010\001\011\001\012\001\064\001\045\001\046\001\016\001\
\005\001\018\001\005\001\054\001\021\001\020\001\025\001\026\001\
\025\001\022\001\002\001\025\001\026\001\064\001\025\001\026\001\
\023\001\034\001\026\001\038\001\039\001\038\001\039\001\003\001\
\038\001\039\001\001\001\038\001\039\001\033\001\001\001\012\001\
\056\001\002\001\012\001\059\001\060\001\061\001\062\001\003\001\
\009\001\010\001\011\001\012\001\055\001\014\001\063\001\016\001\
\017\001\018\001\002\001\012\001\021\001\012\001\025\001\026\001\
\011\001\009\001\010\001\011\001\012\001\012\001\014\001\011\001\
\016\001\034\001\018\001\038\001\039\001\021\001\025\001\026\001\
\001\000\002\000\003\000\004\000\005\000\025\001\026\001\002\001\
\012\001\002\001\034\001\038\001\039\001\054\001\002\001\002\001\
\057\001\011\001\038\001\039\001\002\001\027\001\028\001\029\001\
\030\001\031\001\025\001\026\001\034\001\035\001\054\001\025\001\
\026\001\057\001\040\001\041\001\042\001\033\001\005\001\038\001\
\039\001\033\001\012\001\007\001\038\001\039\001\052\001\053\001\
\054\001\020\001\008\001\025\001\026\001\003\001\025\001\026\001\
\055\001\025\001\026\001\065\001\066\001\003\001\003\001\069\001\
\038\001\039\001\025\001\038\001\039\001\043\001\038\001\039\001\
\025\001\026\001\019\001\043\001\025\001\026\001\025\001\026\001\
\033\001\004\001\001\001\003\001\033\001\038\001\039\001\001\001\
\019\001\038\001\039\001\038\001\039\001\044\001\003\001\007\001\
\047\001\048\001\049\001\050\001\044\001\008\001\001\001\047\001\
\048\001\049\001\050\001\001\001\004\001\004\001\045\001\008\001\
\033\001\067\001\006\001\033\001\009\001\007\001\006\001\023\001\
\020\001\012\001\038\001\012\001\022\001\003\001\012\001\001\001\
\005\001\014\001\045\001\012\001\019\001\046\001\003\001\003\001\
\001\001\012\001\014\001\003\001\003\001\014\001\014\001\003\001\
\003\001\012\001\003\001\003\001\003\001\007\001\033\001\005\001\
\005\001\003\001\005\001\003\001\003\001\003\001\003\001\003\001\
\003\001\003\001\068\001\002\001\000\000\003\001\007\001\019\001\
\038\001\003\001\005\001\033\001\003\001\003\001\003\001\003\001\
\012\001\003\001\003\001\045\001\007\001\003\001\003\001\003\001\
\007\001\003\001\023\001\000\000\000\000\003\001\000\000\003\001\
\032\001\003\001\005\001\019\001\003\001\068\001\007\001\003\001\
\033\001\033\001\003\001\020\001\003\001\003\001\003\001\023\001\
\114\000\128\000\153\001\007\001\252\000\156\000\079\000\094\001\
\185\000\181\000\134\000\068\001\177\000\167\001\028\001\016\001\
\255\255\033\001\045\001\255\255\255\255\255\255\255\255\046\001\
\045\001"
let yynames_const = "\
COMMA\000\
LPAREN\000\
RPAREN\000\
LBRACKET\000\
RBRACKET\000\
BAR\000\
SEMI\000\
COLON\000\
NEW\000\
OUT\000\
IN\000\
REPL\000\
LEQ\000\
IF\000\
THEN\000\
ELSE\000\
FIND\000\
ORFIND\000\
SUCHTHAT\000\
DEFINED\000\
EQUAL\000\
DIFF\000\
FUN\000\
FORALL\000\
PARAM\000\
PROBA\000\
TYPE\000\
PROCESS\000\
DOT\000\
EOF\000\
LET\000\
QUERY\000\
SECRET\000\
SECRET1\000\
AND\000\
OR\000\
CONST\000\
CHANNEL\000\
EQUIV\000\
EQUIVLEFT\000\
EQUIVRIGHT\000\
MAPSTO\000\
DEF\000\
MUL\000\
DIV\000\
ADD\000\
SUB\000\
POWER\000\
SET\000\
COLLISION\000\
EVENT\000\
IMPLIES\000\
TIME\000\
YIELD\000\
OTHERUSES\000\
MAXLENGTH\000\
LENGTH\000\
MAX\000\
COUNT\000\
NEWCHANNEL\000\
INJ\000\
DEFINE\000\
EXPAND\000\
LBRACE\000\
RBRACE\000\
PROOF\000\
"
let yynames_block = "\
IDENT\000\
STRING\000\
INT\000\
FLOAT\000\
"
let yyact = [|
(fun _ -> failwith "parser")
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 8 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 6 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _8 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _10 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 165 "parser.mly"
( (FunDecl(_2, _4, _7, _8)) :: _10 )
# 866 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 167 "parser.mly"
( (EventDecl(_2, [])) :: _4 )
# 874 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 169 "parser.mly"
( (EventDecl(_2, _4)) :: _7 )
# 883 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'vartypelist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 171 "parser.mly"
( (Statement(_2, _4)) :: _6 )
# 892 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'process) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 173 "parser.mly"
( (PDef(_2,_4)) :: _6 )
# 901 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 175 "parser.mly"
( (Setting(_2,S _4)) :: _6 )
# 910 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 177 "parser.mly"
( (Setting(_2,I _4)) :: _6 )
# 919 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'queryseq) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 179 "parser.mly"
( (Query(_2)) :: _4 )
# 927 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'neidentlist) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 181 "parser.mly"
( (List.map (fun x -> (ParamDecl(x, _3))) _2) @ _5 )
# 936 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 183 "parser.mly"
( (ProbabilityDecl(_2)) :: _4 )
# 944 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'neidentlist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 185 "parser.mly"
( (List.map (fun x -> (ConstDecl(x,_4))) _2) @ _6 )
# 953 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'neidentlist) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 187 "parser.mly"
( (List.map (fun x -> (ChannelDecl(x))) _2) @ _4 )
# 961 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 189 "parser.mly"
( (TypeDecl(_2,_3)) :: _5 )
# 970 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : 'eqmember) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'probaf) in
let _6 = (Parsing.peek_val __caml_parser_env 3 : 'optpriority) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : 'eqmember) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 191 "parser.mly"
( (EqStatement(_2, _7, _4, _6)) :: _9 )
# 981 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 10 : 'newlist) in
let _4 = (Parsing.peek_val __caml_parser_env 8 : 'vartypelist) in
let _6 = (Parsing.peek_val __caml_parser_env 6 : Ptree.term_e) in
let _8 = (Parsing.peek_val __caml_parser_env 4 : 'probaf) in
let _10 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _12 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 193 "parser.mly"
( (Collision(_2, _4, _6, _8, _10)) :: _12 )
# 993 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : 'newlist) in
let _3 = (Parsing.peek_val __caml_parser_env 6 : Ptree.term_e) in
let _5 = (Parsing.peek_val __caml_parser_env 4 : 'probaf) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 195 "parser.mly"
( (Collision(_2, [], _3, _5, _7)) :: _9 )
# 1004 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 7 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 5 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Ptree.decl list) in
let _9 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 197 "parser.mly"
( (Define(_2, _4, _7)) :: _9 )
# 1014 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 199 "parser.mly"
( (Expand(_2, _4)) :: _7 )
# 1023 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'proof) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.decl list) in
Obj.repr(
# 201 "parser.mly"
( (Proofinfo(_3))::_5 )
# 1031 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
Obj.repr(
# 203 "parser.mly"
( [] )
# 1037 "parser.ml"
: Ptree.decl list))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 208 "parser.mly"
( _1 )
# 1044 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 210 "parser.mly"
( _1 )
# 1051 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 212 "parser.mly"
( string_of_int _1, parse_extent() )
# 1058 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 214 "parser.mly"
( "*", parse_extent() )
# 1064 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 216 "parser.mly"
( ".", parse_extent() )
# 1070 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 218 "parser.mly"
( "set", parse_extent() )
# 1076 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 220 "parser.mly"
( "=", parse_extent() )
# 1082 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
Obj.repr(
# 222 "parser.mly"
( ",", parse_extent() )
# 1088 "parser.ml"
: 'prooftoken))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'prooftoken) in
Obj.repr(
# 226 "parser.mly"
( [_1] )
# 1095 "parser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 1 : 'prooftoken) in
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 228 "parser.mly"
( _1 :: _2 )
# 1103 "parser.ml"
: 'proofcommand))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'proofcommand) in
Obj.repr(
# 232 "parser.mly"
( [_1] )
# 1110 "parser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'proofcommand) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'proof) in
Obj.repr(
# 234 "parser.mly"
( _1 :: _3 )
# 1118 "parser.ml"
: 'proof))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'neidentlist) in
Obj.repr(
# 238 "parser.mly"
( _2 )
# 1125 "parser.ml"
: 'options))
; (fun __caml_parser_env ->
Obj.repr(
# 240 "parser.mly"
( [] )
# 1131 "parser.ml"
: 'options))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.decl list) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
Obj.repr(
# 244 "parser.mly"
( _1 , _3 )
# 1139 "parser.ml"
: Ptree.decl list * Ptree.process_e))
; (fun __caml_parser_env ->
Obj.repr(
# 248 "parser.mly"
( [] )
# 1145 "parser.ml"
: 'identlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'neidentlist) in
Obj.repr(
# 250 "parser.mly"
( _1 )
# 1152 "parser.ml"
: 'identlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 254 "parser.mly"
( [_1] )
# 1159 "parser.ml"
: 'neidentlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'neidentlist) in
Obj.repr(
# 256 "parser.mly"
( _1 :: _3 )
# 1167 "parser.ml"
: 'neidentlist))
; (fun __caml_parser_env ->
Obj.repr(
# 260 "parser.mly"
( [] )
# 1173 "parser.ml"
: 'vartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypelist) in
Obj.repr(
# 262 "parser.mly"
( _1 )
# 1180 "parser.ml"
: 'vartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 266 "parser.mly"
( [(_1, _3)] )
# 1188 "parser.ml"
: 'nevartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypelist) in
Obj.repr(
# 268 "parser.mly"
( (_1, _3) :: _5 )
# 1197 "parser.ml"
: 'nevartypelist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 272 "parser.mly"
( PFunApp (_1, _3), parse_extent() )
# 1205 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 274 "parser.mly"
( PInjEvent(_3, []), parse_extent() )
# 1212 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 276 "parser.mly"
( PInjEvent(_3, _5), parse_extent() )
# 1220 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 278 "parser.mly"
( PIdent (_1), parse_extent() )
# 1227 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 280 "parser.mly"
( PArray (_1, _3), parse_extent() )
# 1235 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 282 "parser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PTuple(l), parse_extent() )
# 1245 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'findcond) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 287 "parser.mly"
( begin
match _2 with
([],t) -> PTestE(t, _4, _6)
| (def_list, t) ->
PFindE([([], def_list, t, _4)], _6, [])
end, parse_extent() )
# 1259 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'options) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'findlistterm) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 294 "parser.mly"
( PFindE(_3, _5, _2), parse_extent() )
# 1268 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 296 "parser.mly"
( PLetE(_2,_4,_6,Some _8), parse_extent() )
# 1278 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 298 "parser.mly"
( PLetE(_2,_4,_6,None), parse_extent() )
# 1287 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 300 "parser.mly"
( PResE(_2, _4, _6), parse_extent() )
# 1296 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 302 "parser.mly"
( PEventE(_2), parse_extent() )
# 1303 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 304 "parser.mly"
( PEqual(_1, _3), parse_extent() )
# 1311 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 306 "parser.mly"
( PDiff(_1, _3), parse_extent() )
# 1319 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 308 "parser.mly"
( POr(_1, _3), parse_extent() )
# 1327 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 310 "parser.mly"
( PAnd(_1, _3), parse_extent() )
# 1335 "parser.ml"
: Ptree.term_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'termseq) in
Obj.repr(
# 314 "parser.mly"
( _1,_3 )
# 1343 "parser.ml"
: 'vref))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 316 "parser.mly"
( _1, [] )
# 1350 "parser.ml"
: 'vref))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'vreflist) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'vref) in
Obj.repr(
# 320 "parser.mly"
( None )
# 1358 "parser.ml"
: 'otherusescond))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'vref) in
Obj.repr(
# 322 "parser.mly"
( None )
# 1365 "parser.ml"
: 'otherusescond))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'vref) in
Obj.repr(
# 326 "parser.mly"
( [_1] )
# 1372 "parser.ml"
: 'vreflist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'vref) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'vreflist) in
Obj.repr(
# 328 "parser.mly"
( _1::_3 )
# 1380 "parser.ml"
: 'vreflist))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 5 : 'vreflist) in
let _6 = (Parsing.peek_val __caml_parser_env 2 : 'otherusescond) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 332 "parser.mly"
( (_3, _8) )
# 1389 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'vreflist) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'otherusescond) in
Obj.repr(
# 334 "parser.mly"
( (_3, cst_true) )
# 1397 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'vreflist) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 336 "parser.mly"
( (_3, _6) )
# 1405 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'vreflist) in
Obj.repr(
# 338 "parser.mly"
( (_3, cst_true) )
# 1412 "parser.ml"
: 'findcond1))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findcond1) in
Obj.repr(
# 342 "parser.mly"
( _1 )
# 1419 "parser.ml"
: 'findcond))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 344 "parser.mly"
( ([], _1) )
# 1426 "parser.ml"
: 'findcond))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'findcond1) in
Obj.repr(
# 346 "parser.mly"
( _2 )
# 1433 "parser.ml"
: 'findcond))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tidentseq) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'findcond) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 350 "parser.mly"
( let (def_list, t) = _3 in
(_1, def_list, t, _5) )
# 1443 "parser.ml"
: 'findoneterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findoneterm) in
Obj.repr(
# 355 "parser.mly"
( [_1] )
# 1450 "parser.ml"
: 'findlistterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'findoneterm) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'findlistterm) in
Obj.repr(
# 357 "parser.mly"
( _1 :: _3 )
# 1458 "parser.ml"
: 'findlistterm))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 361 "parser.mly"
( [_1,_3] )
# 1466 "parser.ml"
: 'netidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'netidentseq) in
Obj.repr(
# 363 "parser.mly"
( (_1,_3)::_5 )
# 1475 "parser.ml"
: 'netidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netidentseq) in
Obj.repr(
# 367 "parser.mly"
( _1 )
# 1482 "parser.ml"
: 'tidentseq))
; (fun __caml_parser_env ->
Obj.repr(
# 369 "parser.mly"
( [] )
# 1488 "parser.ml"
: 'tidentseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 373 "parser.mly"
( _1 :: _3 )
# 1496 "parser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 375 "parser.mly"
( [_1] )
# 1503 "parser.ml"
: 'netermseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'netermseq) in
Obj.repr(
# 379 "parser.mly"
( _1 )
# 1510 "parser.ml"
: 'termseq))
; (fun __caml_parser_env ->
Obj.repr(
# 381 "parser.mly"
( [] )
# 1516 "parser.ml"
: 'termseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
Obj.repr(
# 385 "parser.mly"
( _2 )
# 1523 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 387 "parser.mly"
( PLetDef _1, parse_extent() )
# 1530 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 389 "parser.mly"
( PRepl (new_repl_occ(),None,_2,_3), parse_extent() )
# 1538 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 391 "parser.mly"
( PRepl (new_repl_occ(),Some _2,_4,_5), parse_extent() )
# 1547 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 393 "parser.mly"
( let x = _1 in
if x = 0 then PNil, parse_extent() else
input_error ("The only integer in a process is 0 for the nil process") (parse_extent()) )
# 1556 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 397 "parser.mly"
( PRestr(_2, _4, _5), parse_extent() )
# 1565 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'findcond) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'optelse) in
Obj.repr(
# 399 "parser.mly"
( match _2 with
([], t) -> PTest(t, _4, _5), parse_extent()
| (def_list, t) ->
PFind([([], def_list, t, _4)], _5, []), parse_extent() )
# 1577 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'findlistproc) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'optelse) in
Obj.repr(
# 404 "parser.mly"
( PFind(_3,_4,_2), parse_extent() )
# 1586 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 406 "parser.mly"
( PEvent((PFunApp(_2, []), parse_extent()), _3), parse_extent() )
# 1594 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'termseq) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 408 "parser.mly"
( PEvent((PFunApp(_2, _4), parse_extent()), _6), parse_extent() )
# 1603 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 410 "parser.mly"
( PLet(_2,_4,(PYield, parse_extent()),(PYield, parse_extent())), parse_extent() )
# 1611 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 1 : 'process) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'optelse) in
Obj.repr(
# 412 "parser.mly"
( PLet(_2,_4,_6,_7), parse_extent() )
# 1621 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : Ptree.term_e) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'optprocess) in
Obj.repr(
# 414 "parser.mly"
( PInput(_3,_5,_7), parse_extent() )
# 1630 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 4 : Ptree.term_e) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'optinputprocess) in
Obj.repr(
# 416 "parser.mly"
( POutput(_3,_5,_7), parse_extent() )
# 1639 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
Obj.repr(
# 418 "parser.mly"
( PYield, parse_extent() )
# 1645 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'process) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 420 "parser.mly"
( PPar(_1,_3), parse_extent() )
# 1653 "parser.ml"
: 'process))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : 'tidentseq) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : 'findcond) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 424 "parser.mly"
( let (def_list, t) = _3 in
(_1, def_list, t, _5) )
# 1663 "parser.ml"
: 'findoneproc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findoneproc) in
Obj.repr(
# 429 "parser.mly"
( [_1] )
# 1670 "parser.ml"
: 'findlistproc))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'findoneproc) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'findlistproc) in
Obj.repr(
# 431 "parser.mly"
( _1 :: _3 )
# 1678 "parser.ml"
: 'findlistproc))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 435 "parser.mly"
( _2 )
# 1685 "parser.ml"
: 'optprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 437 "parser.mly"
( PYield, parse_extent() )
# 1691 "parser.ml"
: 'optprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 441 "parser.mly"
( _2 )
# 1698 "parser.ml"
: 'optinputprocess))
; (fun __caml_parser_env ->
Obj.repr(
# 443 "parser.mly"
( PNil, parse_extent() )
# 1704 "parser.ml"
: 'optinputprocess))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'process) in
Obj.repr(
# 447 "parser.mly"
( _2 )
# 1711 "parser.ml"
: 'optelse))
; (fun __caml_parser_env ->
Obj.repr(
# 449 "parser.mly"
( PYield, parse_extent() )
# 1717 "parser.ml"
: 'optelse))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 453 "parser.mly"
( PPatVar(_1,None), parse_extent() )
# 1724 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 455 "parser.mly"
( PPatVar(_1,Some _3), parse_extent() )
# 1732 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'patternseq) in
Obj.repr(
# 457 "parser.mly"
( PPatFunApp(_1,_3), parse_extent() )
# 1740 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'patternseq) in
Obj.repr(
# 459 "parser.mly"
( match _2 with
Allow parentheses for priorities of infix operators ;
Tuples can not have one element .
Tuples cannot have one element. *)
| l -> PPatTuple(_2), parse_extent() )
# 1750 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 464 "parser.mly"
( PPatEqual(_2), parse_extent() )
# 1757 "parser.ml"
: 'pattern))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'pattern) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 468 "parser.mly"
( _1 :: _3 )
# 1765 "parser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'pattern) in
Obj.repr(
# 470 "parser.mly"
( [_1] )
# 1772 "parser.ml"
: 'nepatternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nepatternseq) in
Obj.repr(
# 474 "parser.mly"
( _1 )
# 1779 "parser.ml"
: 'patternseq))
; (fun __caml_parser_env ->
Obj.repr(
# 476 "parser.mly"
( [] )
# 1785 "parser.ml"
: 'patternseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'query) in
Obj.repr(
# 480 "parser.mly"
( [_1] )
# 1792 "parser.ml"
: 'queryseq))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'query) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'queryseq) in
Obj.repr(
# 482 "parser.mly"
( _1::_3 )
# 1800 "parser.ml"
: 'queryseq))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 486 "parser.mly"
( PQSecret _2 )
# 1807 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 488 "parser.mly"
( PQSecret1 _2 )
# 1814 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 5 : 'vartypelist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 490 "parser.mly"
( PQEvent(_1, _4, _6) )
# 1823 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.term_e) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 492 "parser.mly"
( PQEvent([], _2, _4) )
# 1831 "parser.ml"
: 'query))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'funmode) in
Obj.repr(
# 496 "parser.mly"
( [_1], parse_extent() )
# 1838 "parser.ml"
: 'eqmember))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'funmode) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'eqmember) in
Obj.repr(
# 498 "parser.mly"
( _1 :: (fst _3), parse_extent() )
# 1846 "parser.ml"
: 'eqmember))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 503 "parser.mly"
( _1,None, parse_extent() )
# 1853 "parser.ml"
: 'funmode))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'fungroup) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
Obj.repr(
# 505 "parser.mly"
( _1,Some _3, parse_extent() )
# 1861 "parser.ml"
: 'funmode))
; (fun __caml_parser_env ->
Obj.repr(
# 509 "parser.mly"
( [] )
# 1867 "parser.ml"
: 'newlist))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'newlist) in
Obj.repr(
# 511 "parser.mly"
( (_2,_4)::_6 )
# 1876 "parser.ml"
: 'newlist))
; (fun __caml_parser_env ->
Obj.repr(
# 515 "parser.mly"
( [] )
# 1882 "parser.ml"
: 'newlistopt))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 5 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'options) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : 'newlistopt) in
Obj.repr(
# 517 "parser.mly"
( (_2,_4,_5)::_7 )
# 1892 "parser.ml"
: 'newlistopt))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 521 "parser.mly"
( [_1] )
# 1899 "parser.ml"
: 'funlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'fungroup) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'funlist) in
Obj.repr(
# 523 "parser.mly"
( _1 :: _3 )
# 1907 "parser.ml"
: 'funlist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'options) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : int) in
Obj.repr(
# 527 "parser.mly"
( _3, _1 )
# 1915 "parser.ml"
: 'optpriority))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : int) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'options) in
Obj.repr(
# 529 "parser.mly"
( _2, _4 )
# 1923 "parser.ml"
: 'optpriority))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'options) in
Obj.repr(
# 531 "parser.mly"
( 0, _1 )
# 1930 "parser.ml"
: 'optpriority))
; (fun __caml_parser_env ->
Obj.repr(
# 535 "parser.mly"
( [] )
# 1936 "parser.ml"
: 'vartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypeilist) in
Obj.repr(
# 537 "parser.mly"
( _1 )
# 1943 "parser.ml"
: 'vartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 541 "parser.mly"
( [(_1, Tid _3)] )
# 1951 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypeilist) in
Obj.repr(
# 543 "parser.mly"
( (_1, Tid _3) :: _5 )
# 1960 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 545 "parser.mly"
( [(_1, TBound _3)] )
# 1968 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 0 : 'nevartypeilist) in
Obj.repr(
# 547 "parser.mly"
( (_1, TBound _3) :: _5 )
# 1977 "parser.ml"
: 'nevartypeilist))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : 'vartypeilist) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : 'optpriority) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 551 "parser.mly"
( PFun(dummy_channel, _2, _6, _4) )
# 1986 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : 'vartypeilist) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 3 : 'optpriority) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'newlistopt) in
let _8 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 553 "parser.mly"
( PReplRestr((new_repl_occ(), None, _4), _7, [PFun(dummy_channel, _2, _8, _5)]) )
# 1997 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 6 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 4 : 'vartypeilist) in
let _5 = (Parsing.peek_val __caml_parser_env 2 : 'optpriority) in
let _7 = (Parsing.peek_val __caml_parser_env 0 : Ptree.term_e) in
Obj.repr(
# 555 "parser.mly"
( PFun(_1, _3, _7, _5) )
# 2007 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'newlistopt) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 557 "parser.mly"
( PReplRestr((new_repl_occ(), None, _2), _3, [_4]) )
# 2016 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 3 : 'newlistopt) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'funlist) in
Obj.repr(
# 559 "parser.mly"
( PReplRestr((new_repl_occ(), None, _2), _3, _5) )
# 2025 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'newlistopt) in
let _6 = (Parsing.peek_val __caml_parser_env 0 : 'fungroup) in
Obj.repr(
# 561 "parser.mly"
( PReplRestr((new_repl_occ(), Some _2, _4), _5, [_6]) )
# 2035 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 6 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 4 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 3 : 'newlistopt) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'funlist) in
Obj.repr(
# 563 "parser.mly"
( PReplRestr((new_repl_occ(), Some _2, _4), _5, _7) )
# 2045 "parser.ml"
: 'fungroup))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'probaf) in
Obj.repr(
# 567 "parser.mly"
( _2 )
# 2052 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 569 "parser.mly"
( PAdd(_1,_3), parse_extent() )
# 2060 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 571 "parser.mly"
( PSub(_1, _3), parse_extent() )
# 2068 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 573 "parser.mly"
( PProd(_1,_3), parse_extent() )
# 2076 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 575 "parser.mly"
( PDiv(_1,_3), parse_extent() )
# 2084 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'probaflist) in
Obj.repr(
# 577 "parser.mly"
( PMax(_3), parse_extent() )
# 2091 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 579 "parser.mly"
( (PPIdent _1), parse_extent() )
# 2098 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 581 "parser.mly"
( (PCount _2), parse_extent() )
# 2105 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'probaflist) in
Obj.repr(
# 583 "parser.mly"
( (PPFun(_1,_3)), parse_extent() )
# 2113 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
Obj.repr(
# 585 "parser.mly"
( PCard(_2), parse_extent() )
# 2120 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 587 "parser.mly"
( PTime, parse_extent() )
# 2126 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 589 "parser.mly"
( PActTime(PAFunApp _3, _4), parse_extent() )
# 2134 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 591 "parser.mly"
( PActTime(PAPatFunApp _4, _5), parse_extent() )
# 2142 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 593 "parser.mly"
( PActTime(PAReplIndex, []), parse_extent() )
# 2148 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : int) in
Obj.repr(
# 595 "parser.mly"
( PActTime(PAArrayAccess _4, []), parse_extent() )
# 2155 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 597 "parser.mly"
( PActTime(PACompare _4, _5), parse_extent() )
# 2163 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _6 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 599 "parser.mly"
( PActTime(PAAppTuple _4, _6), parse_extent() )
# 2171 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _5 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _7 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 601 "parser.mly"
( PActTime(PAPatTuple _5, _7), parse_extent() )
# 2179 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 603 "parser.mly"
( PActTime(PAAnd, []), parse_extent() )
# 2185 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 605 "parser.mly"
( PActTime(PAOr, []), parse_extent() )
# 2191 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.ident) in
Obj.repr(
# 607 "parser.mly"
( PActTime(PANew _4, []), parse_extent() )
# 2198 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 609 "parser.mly"
( PActTime(PANewChannel, []), parse_extent() )
# 2204 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
Obj.repr(
# 611 "parser.mly"
( PActTime(PAIf, []), parse_extent() )
# 2210 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 1 : int) in
Obj.repr(
# 613 "parser.mly"
( PActTime(PAFind _4, []), parse_extent() )
# 2217 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _5 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 615 "parser.mly"
( PActTime(PAOut([], _4), _5), parse_extent() )
# 2225 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _5 = (Parsing.peek_val __caml_parser_env 4 : 'neidentlist) in
let _7 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _8 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 617 "parser.mly"
( PActTime(PAOut(_5, _7), _8), parse_extent() )
# 2234 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 1 : int) in
Obj.repr(
# 619 "parser.mly"
( PActTime(PAIn _4, []), parse_extent() )
# 2241 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 621 "parser.mly"
( let x = _1 in
if x = 0 then (PPZero,parse_extent()) else
(PCst x,parse_extent()) )
# 2250 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : float) in
Obj.repr(
# 625 "parser.mly"
( let x = _1 in
if x = 0.0 then (PPZero,parse_extent()) else
(PFloatCst x,parse_extent()) )
# 2259 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 1 : Ptree.term_e) in
Obj.repr(
# 629 "parser.mly"
( PMaxlength(_3), parse_extent() )
# 2266 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 631 "parser.mly"
( PLength(_3, _4), parse_extent() )
# 2274 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _4 = (Parsing.peek_val __caml_parser_env 3 : 'identlist) in
let _6 = (Parsing.peek_val __caml_parser_env 1 : 'probaflistopt) in
Obj.repr(
# 633 "parser.mly"
( PLengthTuple(_4, _6), parse_extent() )
# 2282 "parser.ml"
: 'probaf))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'probaflist) in
Obj.repr(
# 637 "parser.mly"
( _2 )
# 2289 "parser.ml"
: 'probaflistopt))
; (fun __caml_parser_env ->
Obj.repr(
# 639 "parser.mly"
( [] )
# 2295 "parser.ml"
: 'probaflistopt))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'probaf) in
Obj.repr(
# 643 "parser.mly"
( [_1] )
# 2302 "parser.ml"
: 'probaflist))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'probaf) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'probaflist) in
Obj.repr(
# 645 "parser.mly"
( _1 :: _3 )
# 2310 "parser.ml"
: 'probaflist))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _4 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 651 "parser.mly"
( PRestr(_2, _4, (PYield, parse_extent())), parse_extent() )
# 2318 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 1 : 'findcond) in
Obj.repr(
# 653 "parser.mly"
(
let yield = (PYield, parse_extent()) in
match _2 with
([], t) -> PTest(t, yield, yield), parse_extent()
| (def_list, t) ->
PFind([([], def_list, t, yield)], yield, []), parse_extent()
)
# 2331 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : 'findlistins) in
Obj.repr(
# 661 "parser.mly"
( PFind(_2, (PYield, parse_extent()), []), parse_extent() )
# 2338 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 663 "parser.mly"
( PEvent((PFunApp(_2, []), parse_extent()), (PYield, parse_extent())), parse_extent() )
# 2345 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _2 = (Parsing.peek_val __caml_parser_env 3 : 'pattern) in
let _4 = (Parsing.peek_val __caml_parser_env 1 : Ptree.term_e) in
Obj.repr(
# 665 "parser.mly"
( PLet(_2,_4,(PYield, parse_extent()),(PYield, parse_extent())), parse_extent() )
# 2353 "parser.ml"
: Ptree.process_e))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 3 : 'tidentseq) in
let _3 = (Parsing.peek_val __caml_parser_env 1 : 'findcond) in
Obj.repr(
# 669 "parser.mly"
( let (def_list, t) = _3 in
(_1, def_list, t, (PYield, parse_extent())) )
# 2362 "parser.ml"
: 'findoneins))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'findoneins) in
Obj.repr(
# 674 "parser.mly"
( [_1] )
# 2369 "parser.ml"
: 'findlistins))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'findoneins) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'findlistins) in
Obj.repr(
# 676 "parser.mly"
( _1 :: _3 )
# 2377 "parser.ml"
: 'findlistins))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 683 "parser.mly"
( (_1, 1) )
# 2384 "parser.ml"
: 'factor))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : Ptree.ident) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : int) in
Obj.repr(
# 685 "parser.mly"
( (_1, _3) )
# 2392 "parser.ml"
: 'factor))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'factor) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'num) in
Obj.repr(
# 689 "parser.mly"
( _1 :: _3 )
# 2400 "parser.ml"
: 'num))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'factor) in
Obj.repr(
# 691 "parser.mly"
( [_1] )
# 2407 "parser.ml"
: 'num))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'num) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : Ptree.ident) in
Obj.repr(
# 695 "parser.mly"
( (_1, Some _3) )
# 2415 "parser.ml"
: 'quot))
; (fun __caml_parser_env ->
let _3 = (Parsing.peek_val __caml_parser_env 0 : 'num) in
Obj.repr(
# 697 "parser.mly"
( (_3, None) )
# 2422 "parser.ml"
: 'quot))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 0 : 'quot) in
Obj.repr(
# 701 "parser.mly"
( [_1] )
# 2429 "parser.ml"
: ((Ptree.ident * int) list * Ptree.ident option) list))
; (fun __caml_parser_env ->
let _1 = (Parsing.peek_val __caml_parser_env 2 : 'quot) in
let _3 = (Parsing.peek_val __caml_parser_env 0 : ((Ptree.ident * int) list * Ptree.ident option) list) in
Obj.repr(
# 703 "parser.mly"
( _1 :: _3 )
# 2437 "parser.ml"
: ((Ptree.ident * int) list * Ptree.ident option) list))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
; (fun __caml_parser_env -> raise (Parsing.YYexit (Parsing.peek_val __caml_parser_env 0)))
|]
let yytables =
{ Parsing.actions=yyact;
Parsing.transl_const=yytransl_const;
Parsing.transl_block=yytransl_block;
Parsing.lhs=yylhs;
Parsing.len=yylen;
Parsing.defred=yydefred;
Parsing.dgoto=yydgoto;
Parsing.sindex=yysindex;
Parsing.rindex=yyrindex;
Parsing.gindex=yygindex;
Parsing.tablesize=yytablesize;
Parsing.table=yytable;
Parsing.check=yycheck;
Parsing.error_function=parse_error;
Parsing.names_const=yynames_const;
Parsing.names_block=yynames_block }
let all (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 1 lexfun lexbuf : Ptree.decl list * Ptree.process_e)
let lib (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 2 lexfun lexbuf : Ptree.decl list)
let instruct (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 3 lexfun lexbuf : Ptree.process_e)
let term (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 4 lexfun lexbuf : Ptree.term_e)
let allowed_coll (lexfun : Lexing.lexbuf -> token) (lexbuf : Lexing.lexbuf) =
(Parsing.yyparse yytables 5 lexfun lexbuf : ((Ptree.ident * int) list * Ptree.ident option) list)
|
84118fe2f46dcd8247a529bc2824b35fdc3fc1579786f19adb78933511c083a8 | cojna/iota | Mo.hs | {-# LANGUAGE BangPatterns #-}
# LANGUAGE TypeApplications #
{- |
= Mo's Algotrithm
-}
module Algorithm.Mo where
import Control.Monad.Primitive
import Data.Bits
import qualified Data.Vector.Fusion.Stream.Monadic as MS
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import Data.Word
import Unsafe.Coerce
--
import Data.Vector.Sort.Radix (radixSort64)
import My.Prelude (stream, streamR)
-- | /O((N+Q)sqrt N)/
moAlgorithm ::
(U.Unbox a, PrimMonad m) =>
-- | add
(a -> Int -> m a) ->
-- | delete
(a -> Int -> m a) ->
-- | initial value
a ->
-- | block size (sqrt N)
Int ->
-- | query [l, r)
U.Vector (Int, Int) ->
m (U.Vector a)
moAlgorithm add delete acc0 blockSize lrs = do
result <- UM.unsafeNew (U.length lrs)
U.foldM'_
( \(MoState l r acc) (qi, (ql, qr)) -> do
!addR <- MS.foldM' add acc $ stream r qr
!deleteR <- MS.foldM' delete addR $ streamR qr r
!addL <- MS.foldM' add deleteR $ streamR ql l
!deleteL <- MS.foldM' delete addL $ stream l ql
UM.unsafeWrite result qi deleteL
return $! MoState ql qr deleteL
)
(MoState 0 0 acc0)
(moSort blockSize lrs)
U.unsafeFreeze result
# INLINE moAlgorithm #
moBlockSize :: Int -> Int
moBlockSize = ceiling . sqrt @Double . fromIntegral
data MoState a = MoState !Int !Int !a deriving (Eq)
moSort :: Int -> U.Vector (Int, Int) -> U.Vector (Int, (Int, Int))
moSort blockSize lrs =
U.map (\i -> (i, U.unsafeIndex lrs i))
. U.map moDecode
. radixSort64
$ U.imap (\i (l, r) -> moEncode blockSize i l r) lrs
# INLINE moSort #
moEncode :: Int -> Int -> Int -> Int -> Word64
moEncode blockSize qi l r =
unsafeCoerce @Int @Word64 $
unsafeShiftL l' 40 .|. unsafeShiftL r' 20 .|. qi
where
l' = quot l blockSize
r'
| l' .&. 1 == 1 = 0xfffff - r
| otherwise = r
# INLINE moEncode #
moDecode :: Word64 -> Int
moDecode = unsafeCoerce @Word64 @Int . (.&. 0xfffff)
# INLINE moDecode #
| null | https://raw.githubusercontent.com/cojna/iota/6d2ad5b71b1b50bca9136d6ed84f80a0b7713d7c/src/Algorithm/Mo.hs | haskell | # LANGUAGE BangPatterns #
|
= Mo's Algotrithm
| /O((N+Q)sqrt N)/
| add
| delete
| initial value
| block size (sqrt N)
| query [l, r) | # LANGUAGE TypeApplications #
module Algorithm.Mo where
import Control.Monad.Primitive
import Data.Bits
import qualified Data.Vector.Fusion.Stream.Monadic as MS
import qualified Data.Vector.Unboxed as U
import qualified Data.Vector.Unboxed.Mutable as UM
import Data.Word
import Unsafe.Coerce
import Data.Vector.Sort.Radix (radixSort64)
import My.Prelude (stream, streamR)
moAlgorithm ::
(U.Unbox a, PrimMonad m) =>
(a -> Int -> m a) ->
(a -> Int -> m a) ->
a ->
Int ->
U.Vector (Int, Int) ->
m (U.Vector a)
moAlgorithm add delete acc0 blockSize lrs = do
result <- UM.unsafeNew (U.length lrs)
U.foldM'_
( \(MoState l r acc) (qi, (ql, qr)) -> do
!addR <- MS.foldM' add acc $ stream r qr
!deleteR <- MS.foldM' delete addR $ streamR qr r
!addL <- MS.foldM' add deleteR $ streamR ql l
!deleteL <- MS.foldM' delete addL $ stream l ql
UM.unsafeWrite result qi deleteL
return $! MoState ql qr deleteL
)
(MoState 0 0 acc0)
(moSort blockSize lrs)
U.unsafeFreeze result
# INLINE moAlgorithm #
moBlockSize :: Int -> Int
moBlockSize = ceiling . sqrt @Double . fromIntegral
data MoState a = MoState !Int !Int !a deriving (Eq)
moSort :: Int -> U.Vector (Int, Int) -> U.Vector (Int, (Int, Int))
moSort blockSize lrs =
U.map (\i -> (i, U.unsafeIndex lrs i))
. U.map moDecode
. radixSort64
$ U.imap (\i (l, r) -> moEncode blockSize i l r) lrs
# INLINE moSort #
moEncode :: Int -> Int -> Int -> Int -> Word64
moEncode blockSize qi l r =
unsafeCoerce @Int @Word64 $
unsafeShiftL l' 40 .|. unsafeShiftL r' 20 .|. qi
where
l' = quot l blockSize
r'
| l' .&. 1 == 1 = 0xfffff - r
| otherwise = r
# INLINE moEncode #
moDecode :: Word64 -> Int
moDecode = unsafeCoerce @Word64 @Int . (.&. 0xfffff)
# INLINE moDecode #
|
df0e88780266cce20b3635e31518f82091c3fa11ae2ccf35f533d7f5adfa6432 | Outdooractive/elevation-profile | dem-gdal.scm | ;;;
dem ( digital elevation model ) via ( )
;;;
Copyright ( c ) 2012 < >
;;;
;;; Redistribution and use in source and binary forms, with or without
;;; modification, are permitted provided that the following conditions
;;; are met:
;;;
;;; 1. Redistributions of source code must retain the above copyright
;;; notice, this list of conditions and the following disclaimer.
;;;
;;; 2. Redistributions in binary form must reproduce the above copyright
;;; notice, this list of conditions and the following disclaimer in the
;;; documentation and/or other materials provided with the distribution.
;;;
;;; 3. Neither the name of the authors nor the names of its contributors
;;; may be used to endorse or promote products derived from this
;;; software without specific prior written permission.
;;;
;;; THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
;;; LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
;;; A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
;;; OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
;;; TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
;;; PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
;;; NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
;;; SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
;;;
;; notes/todo:
;; - quite a hack
;; - get rid of c-wrapper / speedup
;; - leaks memory => call procedures only once if possible!
- you can use 's vrt format to merge images
;; see also:
and in general ( WMS , .... )
;;
- a more general purpose wrapper would be nice
;; (upload to gl texture ...)
;; - use GDAL_CACHEMAX?
(define-module dem-gdal
(use srfi-1)
(use gauche.collection) ;; use after srfi-1 to make find work as expected!
(use gauche.sequence)
(use srfi-13)
(use c-wrapper)
(use gauche.array)
(use gauche.uvector)
(use gauche.process)
(use runtime-compile)
(use binary.pack)
;;(use sxml.adaptor)
(use geod)
(export dem->xy->z
dem->xy-project->z
dem->xy-project->z-debug
dem-stack->xy->z
dem-stack->xy->z-debug
))
(select-module dem-gdal)
(c-load '("gdal/gdal.h" "gdal/ogr_srs_api.h")
:libs-cmd "gdal-config --libs"
:compiled-lib "gdal"
:cflags "-O2")
;; todo: hmm
(CPLSetErrorHandler 0)
(define-macro (assert e)
`(when (not ,e)
(error "assertion failed: " ,(x->string e))))
(define (gdal-open-dataset name)
(assert (string? name))
(with-output-to-port (current-error-port)
(lambda()
(gdal-init)
(let ((dataset (GDALOpen name GA_ReadOnly)))
(cond [(not (null-ptr? dataset))
(let ((driver (GDALGetDatasetDriver dataset)))
( print # ` " Driver , ( GDALGetDriverShortName driver)/,(GDALGetDriverLongName driver ) " )
( print # ` " Size is , ( GDALGetRasterXSize dataset)x,(GDALGetRasterYSize dataset)x,(GDALGetRasterCount dataset ) " )
(when (not (null-ptr? (GDALGetProjectionRef dataset)))
;; (print #`"Projection is ',(GDALGetProjectionRef dataset)'")
(let ((transform (make (c-array <c-double> 6))))
(when (= (GDALGetGeoTransform dataset transform) CE_None)
;;#?=(map (cut cast <number> <>) transform)
( print # ` " Origin = , ( ref transform 0 ) , , ( ref transform 3 ) " )
( print # ` " Pixel Size = , ( ref transform 1 ) , , ( ref transform 5 ) " )
#t))))
dataset]
[else
(error "Unsupported format")])))))
(define (osr-from-user-input s)
(let ((hSRS (OSRNewSpatialReference NULL))) ;; todo: leak!
(when (not (= (OSRSetFromUserInput hSRS s) OGRERR_NONE))
(error "OSRSetFromUserInput failed"))
hSRS))
(define (osr-from-dataset dataset)
(let ((hSRS (OSRNewSpatialReference NULL)))
(when (not (= (OSRImportFromWkt hSRS (ptr (GDALGetProjectionRef dataset))) OGRERR_NONE))
(error "OSRImportFromWkt failed"))
hSRS))
(define (c-int->bool x)
(not (zero? (cast <number> x))))
(define (osr-is-same? from to)
(c-int->bool (OSRIsSame from to)))
(define-condition-type <transform-error> <error>
transform-error?
(pos transform-error-pos))
(cond-expand
(no-runtime-compile
(define (osr-transform from to)
(if (osr-is-same? from to)
identity
(let ((ct (OCTNewCoordinateTransformation from to))
(xa (make (c-array <c-double> 1)))
(ya (make (c-array <c-double> 1)))
(za (make (c-array <c-double> 1))))
(assert (not (null-ptr? ct)))
(lambda(l)
(set! (ref xa 0) (ref l 0))
(set! (ref ya 0) (ref l 1))
(set! (ref za 0) (ref l 2 0))
(when (not (c-int->bool (OCTTransform ct 1 xa ya za)))
(error <transform-error> :pos l))
(list (ref xa 0) (ref ya 0) (ref za 0))))))
;; todo:
;; - gdal already should provide that, no?
;; - slow
(define (gdal-get-geotransform⁻¹ dataset)
(let1 A (array-inverse (array-mul (gdal-get-geotransform-matrix dataset)
(array (shape 0 3 0 3)
1.0 0.0 0.5
0.0 1.0 0.5
0.0 0.0 1.0)))
(lambda(l)
(let1 r (array-mul A (array (shape 0 3 0 1) (ref l 0) (ref l 1) 1))
(list (array-ref r 0 0) (array-ref r 1 0))))))
(define (f32vector-replace! vec from to)
(let1 s (f32vector-length vec)
(dotimes (i s)
(when (= (f32vector-ref vec i) from)
(f32vector-set! vec i to))))
vec)
(define (get-gdal-read-band-row! band nodata)
(let ((xsize (GDALGetRasterBandXSize band))
(ysize (GDALGetRasterBandYSize band)))
(lambda(scanline row . args)
(let-optionals* args ((start 0)
(end xsize))
(assert (<= start end))
(let1 count (- end start)
(assert (>= (size-of scanline) count))
(f32vector-fill! scanline +nan.0)
(cond [(and (> count 0)
(>= row 0)
(< row ysize))
(let ((rstart (max 0 start))
(rend (min end xsize)))
(let ((lfill (- rstart start))
;; (rfill (- end rend))
(rcount (- rend rstart)))
(when (and (> rcount 0)
(not (zero? (GDALRasterIO band GF_Read rstart row rcount 1
(c-ptr+ (cast (ptr <c-float>) scanline) lfill)
rcount 1 GDT_Float32 0 0))))
(error "todo"))
(assert (or (boolean? nodata) (number? nodata)))
replace with
(when nodata
(f32vector-replace! scanline nodata +nan.0))
count
(let ((s (f32vector-length scanline))
(r 0))
(dotimes (i s)
(when (nan? (f32vector-ref scanline i))
(inc! r)))
r)))]
[else
(f32vector-length scanline)]))))))
;; taken from grass (interp.c)
;; return (u * (u * (u * (c3 + -3 * c2 + 3 * c1 - c0) +
( -c3 + 4 * c2 - 5 * c1 + 2 * c0 ) + ( c2 - c0 ) ) + 2 * c1 ) / 2 ;
(define (interp-cubic u c0 c1 c2 c3)
(/ (+ (* u (+ (* u (+ (* u (+ c3 (* -3 c2) (* 3 c1) (- c0)))
(- c3)
(* 4 c2)
(* -5 c1)
(* 2 c0)))
c2
(- c0)))
(* 2 c1))
2))
;; todo: improve
(define (mod4 x m minx maxx)
(cond [(and (< x minx)
(or (>= (- maxx minx) m)
(<= (+ x m) maxx)))
(mod4 (+ x m) m minx maxx)]
[(and (> x maxx)
(or (>= (- maxx minx) m)
(>= (- x m) minx)))
(mod4 (- x m) m minx maxx)]
[else
x]))
(define wrap-long-to (cut mod4 <> 360 <> <>))
;; todo: improve / or maybe just clip?!
(define (wrap-lat x y . l)
(cond [(< y -90)
(apply wrap-lat (append (list (+ x 180) (- -180 y))
l))]
[(> y 90)
(apply wrap-lat (append (list (+ x 180) (- 180 y))
l))]
[else
(append (list x y) l)]))
(define (get-bbox-geo-wrap geobox)
(lambda(xy)
(let1 xy (apply wrap-lat xy)
(list (wrap-long-to (car xy)
(ref* geobox 0 0)
(ref* geobox 1 0))
(cadr xy)))))
(define (geo-wrap xy)
(let1 xy (apply wrap-lat xy)
note : ( fmod ( car xy ) 360 ) ca n't be expressed using wrap - long - to :(
(list (fmod (car xy) 360)
(cadr xy))))
(define (raster-pos->4x4-box raster-pos)
(let1 tl (map (lambda(x) (- (floor->exact x) 1)) raster-pos)
(list tl (map (cut + <> 4) tl))))
(define (raster-pos->2x2-box raster-pos)
;;(assert (list? raster-pos))
(let1 tl (map floor->exact raster-pos)
(list tl (map (cut + <> 2) tl))))
(define (raster-pos->1x1-box raster-pos)
;;(assert (list? raster-pos))
(let1 tl (map round->exact raster-pos)
(list tl (map (cut + <> 1) tl))))
(define (get-rasterpos projection dataset)
(let1 osr (osr-from-dataset dataset)
(let1 f (apply compose
(reverse ;; just for readability
(filter (lambda(f) (not (eq? f identity)))
(list
(if (not (string-null? projection))
(osr-transform (osr-from-user-input projection)
(OSRCloneGeogCS osr))
identity)
(if (osr-is-geographic? osr)
;; todo: at the moment we can only get the
geographic bbox if the dataset osr is
;; geographic
(get-bbox-geo-wrap (gdal-geographic-bbox dataset))
;; note: input always geographic!
geo-wrap)
(gdal-get-projection dataset)
(gdal-get-geotransform⁻¹ dataset)))))
(lambda(x y)
(f (list x y))))))
(define (get-rasterpos&bbox! projection dataset get-box width height)
(let1 rasterpos (get-rasterpos projection dataset)
(lambda(x y rp box)
(guard (e [(transform-error? e)
#f])
(let* ((rp2 (rasterpos x y))
(box2 (get-box rp2)))
(cond [(or (<= (caadr box2) 0) (>= (caar box2) width)
(<= (cadadr box2) 0) (>= (cadar box2) height))
#f]
[else
(set! (ref rp 0) (car rp2))
(set! (ref rp 1) (cadr rp2))
;; #?=rp
# ? = box2
(set! (ref box 0) (car (car box2)))
(set! (ref box 1) (cadr (car box2)))
(set! (ref box 2) (car (cadr box2)))
(set! (ref box 3) (cadr (cadr box2)))
#t]))))))
)
(else
(compile-and-load
`((inline-stub
(declcode
(.include "gauche/uvector.h")
(.include "gdal/gdal.h")
(.include "gdal/ogr_srs_api.h")
"static ScmClass *osrn_transform_class = NULL;"
"/* stolen from cwcompile output */
static void cw_unbox(void *dest, ScmObj obj, size_t size)
{
static ScmObj bufferof_proc = NULL;
ScmObj buf;
if (!bufferof_proc) {
bufferof_proc = SCM_SYMBOL_VALUE(\"c-wrapper.c-ffi\", \"buffer-of\");
}
buf = Scm_ApplyRec(bufferof_proc, SCM_LIST1(obj));
memcpy(dest, SCM_UVECTOR_ELEMENTS(buf), size);
}"
)
(define-cproc make-osrn-transform (fromp top)
(let* ((from::OGRSpatialReferenceH NULL)
(to::OGRSpatialReferenceH NULL))
(cw_unbox (& from) fromp (sizeof OGRSpatialReferenceH))
(cw_unbox (& to) top (sizeof OGRSpatialReferenceH))
(when (not from)
(Scm_Error "failed to set from"))
(when (not to)
(Scm_Error "failed to set to"))
(return (Scm_MakeForeignPointer osrn_transform_class (OCTNewCoordinateTransformation from to)))))
(define-cproc osrn-apply-transform (it x::<double> y::<double>)
(unless (SCM_XTYPEP it osrn_transform_class) (SCM_TYPE_ERROR it "<osrn:transform>"))
(let* ((t::OGRCoordinateTransformationH (SCM_FOREIGN_POINTER_REF OGRCoordinateTransformationH it))
(xr::double x)
(yr::double y)
(zr::double 0))
(when (not (OCTTransform t 1 (& xr) (& yr) (& zr)))
todo : use Scm_Raise ?
(result (SCM_LIST3 (Scm_MakeFlonum xr)
(Scm_MakeFlonum yr)
(Scm_MakeFlonum zr)))))
(define-cfn osrn-transform-cleanup (h) ::void :static
(OCTDestroyCoordinateTransformation (SCM_FOREIGN_POINTER_REF OGRCoordinateTransformationH h)))
(define-cfn osrn-transform-print (h p::ScmPort* c::ScmWriteContext*) ::void :static
(Scm_Printf p "#<osrn:transform @%p->%p>" h (SCM_FOREIGN_POINTER_REF OGRCoordinateTransformationH h)))
(define-cproc c-gdal-read-band-row!
(bandp nodata xsize::<int> ysize::<int> scanline::<f32vector> row::<int> start::<int> end::<int>)
(let* ((band::GDALRasterBandH NULL))
(cw_unbox (& band) bandp (sizeof GDALRasterBandH))
(unless (<= start end) (Scm_Error "(<= start end)")) ;; todo: c-level assert?!
(let* ((count::int (- end start)))
(unless (>= (SCM_UVECTOR_SIZE scanline) count) (Scm_Error "(>= (SCM_UVECTOR_SIZE scanline) count)"))
(Scm_F32VectorFill scanline NAN 0 (SCM_UVECTOR_SIZE scanline))
(cond [(and (> count 0)
(>= row 0)
(< row ysize))
(let* ((rstart::int (?: (< start 0) 0 start))
(rend::int (?: (< end xsize) end xsize))
(lfill::int (- rstart start))
;; (rfill (- end rend))
(rcount::int (- rend rstart)))
(when (and (> rcount 0)
(not (== (GDALRasterIO band GF_Read rstart row rcount 1
(+ (SCM_F32VECTOR_ELEMENTS scanline) lfill)
rcount 1 GDT_Float32 0 0)
0)))
(Scm_Error "todo"))
(let* ((r::int 0)
(i::int 0))
replace with
(unless (or (SCM_BOOLP nodata) (SCM_FLONUMP nodata))
(Scm_Error "(or (SCM_BOOLP nodata) (SCM_FLONUMP nodata))"))
(when (and (not (SCM_BOOLP nodata))
(SCM_FLONUMP nodata))
(for [(set! i 0) (< i (SCM_UVECTOR_SIZE scanline)) (pre++ i)]
(when (== (aref (SCM_F32VECTOR_ELEMENTS scanline) i) (SCM_FLONUM_VALUE nodata))
(set! (aref (SCM_F32VECTOR_ELEMENTS scanline) i) NAN))))
count
(for [(set! i 0) (< i (SCM_UVECTOR_SIZE scanline)) (pre++ i)]
(when (isnan (aref (SCM_F32VECTOR_ELEMENTS scanline) i))
(pre++ r)))
(result (SCM_MAKE_INT r))))]
[else
(result (SCM_MAKE_INT (SCM_UVECTOR_SIZE scanline)))]))))
(initcode (= osrn_transform_class (Scm_MakeForeignPointerClass
(Scm_CurrentModule)
"<osrn:transform>" osrn-transform-print osrn-transform-cleanup
SCM_FOREIGN_POINTER_KEEP_IDENTITY)))
))
'(make-osrn-transform osrn-apply-transform c-gdal-read-band-row!)
:libs (process-output->string "gdal-config --libs"))
(define (osr-transform from to)
(if (osr-is-same? from to)
identity
(let1 fp (make-osrn-transform from to)
(lambda(l)
(guard (e [else
;;#?=e
(error <transform-error> :pos l)])
(osrn-apply-transform fp (car l) (cadr l)))))))
(with-module gauche.array
(define (symbolic-array-mul a b) ; NxM * MxP => NxP
(let ([a-start (start-vector-of a)]
[a-end (end-vector-of a)]
[b-start (start-vector-of b)]
[b-end (end-vector-of b)])
(unless (= 2 (s32vector-length a-start) (s32vector-length b-start))
(error "array-mul matrices must be of rank 2"))
(let* ([a-start-row (s32vector-ref a-start 0)]
[a-end-row (s32vector-ref a-end 0)]
[a-start-col (s32vector-ref a-start 1)]
[a-end-col (s32vector-ref a-end 1)]
[b-start-col (s32vector-ref b-start 1)]
[b-end-col (s32vector-ref b-end 1)]
[n (- a-end-row a-start-row)]
[m (- a-end-col a-start-col)]
[p (- b-end-col b-start-col)]
[a-col-b-row-off (- a-start-col (s32vector-ref b-start 0))]
[res (make-minimal-backend-array (list a b) (shape 0 n 0 p))])
(unless (= m (- (s32vector-ref b-end 0) (s32vector-ref b-start 0)))
(errorf "dimension mismatch: can't mul shapes ~S and ~S"
(array-shape a) (array-shape b)))
(do ([i a-start-row (+ i 1)]) ; for-each row of a
[(= i a-end-row) res]
(do ([k b-start-col (+ k 1)]) ; for-each col of b
[(= k b-end-col)]
(let1 tmp (list '+)
(do ([j a-start-col (+ j 1)]) ; for-each col of a & row of b
[(= j a-end-col)]
(append! tmp (list (list '* (array-ref a i j) (array-ref b (- j a-col-b-row-off) k)))))
(array-set! res (- i a-start-row) (- k b-start-col) tmp)))))))
(export symbolic-array-mul)
)
;; todo: use macro?!
(define (compile-cise-function args body)
(let1 mod (compile-and-load
`((inline-stub
(declcode
(.include "gdal/gdal.h")
(.include "gdal/ogr_srs_api.h"))
(define-cproc foo ,args . ,body)))
`()
:libs (process-output->string "gdal-config --libs"))
(global-variable-ref mod 'foo)))
(define (gdal-get-geotransform-cise⁻¹ dataset)
(let* ((A (array-inverse (array-mul (gdal-get-geotransform-matrix dataset)
(array (shape 0 3 0 3)
1.0 0.0 0.5
0.0 1.0 0.5
0.0 0.0 1.0))))
(sr (symbolic-array-mul A (array (shape 0 3 0 1) 'x 'y 1))))
`((set! x ,(array-ref sr 0 0))
(set! y ,(array-ref sr 1 0)))))
;; todo:
;; - gdal already should provide that, no?
(define (gdal-get-geotransform⁻¹ dataset)
(let1 nf (compile-cise-function '(x::<double> y::<double>)
(append (gdal-get-geotransform-cise⁻¹ dataset)
`((return (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))
(lambda(l)
(nf (ref l 0) (ref l 1)))))
(define (get-gdal-read-band-row! band nodata)
(let ((xsize (GDALGetRasterBandXSize band))
(ysize (GDALGetRasterBandYSize band)))
(lambda(scanline row . args)
(let-optionals* args ((start 0)
(end xsize))
(c-gdal-read-band-row! band nodata xsize ysize scanline row start end)))))
(compile-and-load
`((inline-stub
(define-cproc interp-cubic (u::<double> c0::<double> c1::<double> c2::<double> c3::<double>)
::<number> ;; :fast-flonum :constant
(result (Scm_MakeFlonum (/ (+ (* u (+ (* u (+ (* u (+ c3 (* -3 c2) (* 3 c1) (- c0)))
(- c3)
(* 4 c2)
(* -5 c1)
(* 2 c0)))
c2
(- c0)))
(* 2 c1))
2))))))
'(interp-cubic))
(define (bbox-geo-wrap-cise-2 minx maxx)
`((while 1
(cond [(< y -90)
(+= x 180)
(set! y (- -180 y))]
[(> y 90)
(+= x 180)
(set! y (- 180 y))]
[else
(break)]))
;; todo: improve
(while 1
(cond [(and (< x ,minx)
(or (>= ,(- maxx minx) 360)
(<= (+ x 360) ,maxx)))
(+= x 360)]
[(and (> x ,maxx)
(or (>= ,(- maxx minx) 360)
(>= (- x 360) ,minx)))
(-= x 360)]
[else
(break)]))))
(define (get-bbox-geo-wrap-cise geobox)
(bbox-geo-wrap-cise-2 (ref* geobox 0 0) (ref* geobox 1 0)))
(define (get-bbox-geo-wrap geobox)
(let1 f (compile-cise-function '(x::<double> y::<double>)
(append
(get-bbox-geo-wrap-cise geobox)
'((result (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))
(lambda(xy)
(f (car xy) (cadr xy)))))
(define (geo-wrap-cise)
'((while 1
(cond [(< y -90)
(+= x 180)
(set! y (- -180 y))]
[(> y 90)
(+= x 180)
(set! y (- 180 y))]
[else
(break)]))
(set! x (fmod x 360))))
(define (geo-wrap)
(let1 f (compile-cise-function '(x::<double> y::<double>)
(append
(geo-wrap-cise)
'((result (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))
(lambda(xy)
(f (car xy) (cadr xy)))))
;; todo: really ugly hack
(define (c-wrapper-ptr-value p)
(car (unpack ;; no pointer?!
(case (c-sizeof (ptr <c-void>))
[(8) "Q"]
[(4) "L"]
[else
(error "pointer size not supported")])
:from-string (u8vector->string (slot-ref (cast (ptr <c-void>) p) 'buffer)))))
(define (c-wrapper-ptr->cise-ptr p)
(gc)
(gc)
`(cast (void *) ,(string->symbol (format "0x~x" (c-wrapper-ptr-value p)))))
(define (osr-transform-cise fromp top . args)
(let-optionals* args ((transform-error '(Scm_Error "transform failed")))
(cond [(osr-is-same? fromp top)
identity]
[else
(assert (not (null-ptr? fromp)))
(assert (not (null-ptr? top)))
`((let* ((from::(static OGRSpatialReferenceH) NULL)
(to::(static OGRSpatialReferenceH) NULL)
(t::(static OGRCoordinateTransformationH) NULL))
(when (not t)
(set! from ,(c-wrapper-ptr->cise-ptr fromp))
(set! to ,(c-wrapper-ptr->cise-ptr top))
(set! t (OCTNewCoordinateTransformation from to))
(when (not t) (Scm_Error "failed to set up t")))
(let* ((z::double 0))
(when (not (OCTTransform t 1 (& x) (& y) (& z)))
todo : use Scm_Raise ?
)))])))
(define (gdal-get-projection-cise dataset . args)
(let-optionals* args ((transform-error '(Scm_Error "transform failed")))
(let ((hSRS (osr-from-dataset dataset)))
(if (osr-is-projected? hSRS)
(osr-transform-cise (OSRCloneGeogCS hSRS) hSRS transform-error)
identity)))) ;; (lambda(l) l))))
(define (get-rasterpos projection dataset)
(let* ((osr (osr-from-dataset dataset))
(f (compile-cise-function
'(x::<double> y::<double>)
(append (apply append (filter (lambda(f) (not (eq? f identity)))
(list
(if (not (string-null? projection))
(osr-transform-cise (osr-from-user-input projection)
(OSRCloneGeogCS osr))
identity)
(if (osr-is-geographic? osr)
;; todo: at the moment we can only get the
geographic bbox if the dataset osr is
;; geographic
(get-bbox-geo-wrap-cise (gdal-geographic-bbox dataset))
;; note: input always geographic!
(geo-wrap-cise))
(gdal-get-projection-cise dataset)
(gdal-get-geotransform-cise⁻¹ dataset))))
'((result (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))))
(lambda(x y)
(guard (e
[else
;; todo: check it really is a transform error?!
(error <transform-error> :pos (list x y))])
(f x y)))))
(define raster-pos->4x4-box
`((set! tl_x (- (cast int (floor x)) 1))
(set! tl_y (- (cast int (floor y)) 1))
(set! br_x (+ tl_x 4))
(set! br_y (+ tl_y 4))))
(define raster-pos->2x2-box
`((set! tl_x (cast int (floor x)))
(set! tl_y (cast int (floor y)))
(set! br_x (+ tl_x 2))
(set! br_y (+ tl_y 2))))
(define raster-pos->1x1-box
;; rint to match round->exact
`((set! tl_x (cast int (rint x)))
(set! tl_y (cast int (rint y)))
(set! br_x (+ tl_x 1))
(set! br_y (+ tl_y 1))))
(define (get-rasterpos&bbox! projection dataset get-box width height)
(let* ((osr (osr-from-dataset dataset))
(f (compile-cise-function
'(x::<double> y::<double> rp::<f64vector> box::<s64vector>)
`((let* ((tl_x::int64_t)
(tl_y::int64_t)
(br_x::int64_t)
(br_y::int64_t))
. ,(append
;; `((Scm_Printf SCM_CURERR "huhu\n"))
(apply append (filter (lambda(f) (not (eq? f identity)))
(list
(if (not (string-null? projection))
(osr-transform-cise (osr-from-user-input projection)
(OSRCloneGeogCS osr)
'(return SCM_FALSE))
identity)
(if (osr-is-geographic? osr)
;; todo: at the moment we can only get the
geographic bbox if the dataset osr is
;; geographic
(get-bbox-geo-wrap-cise (gdal-geographic-bbox dataset))
;; note: input always geographic!
(geo-wrap-cise))
(gdal-get-projection-cise dataset '(return SCM_FALSE))
(gdal-get-geotransform-cise⁻¹ dataset))))
get-box
`((cond [(or (<= br_x 0) (>= tl_x ,width)
(<= br_y 0) (>= tl_y ,height))
(result SCM_FALSE)]
[else
(when (or (< (SCM_UVECTOR_SIZE rp) 2)
(< (SCM_UVECTOR_SIZE box) 4))
(Scm_Printf SCM_CURERR "abort\n")
(abort))
(set! (aref (SCM_F64VECTOR_ELEMENTS rp) 0) x)
(set! (aref (SCM_F64VECTOR_ELEMENTS rp) 1) y)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 0) tl_x)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 1) tl_y)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 2) br_x)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 3) br_y)
(result SCM_TRUE)]))))))))
(lambda(x y rp box)
(guard (e
[else
;; #?=e
(error <transform-error> :pos (list x y))])
(f x y rp box)))))
))
(define (osr-is-geographic? osr)
(let1 r (c-int->bool (OSRIsGeographic osr))
(assert (eq? r (not (osr-is-projected? osr))))
r))
;; note: same as (not osr-is-geographic?)
(define (osr-is-projected? osr)
(c-int->bool (OSRIsProjected osr)))
not used and not available in older versions
;; (define (osr-is-compound? osr)
( c - int->bool ( osr ) ) )
(define (gdal-get-projection dataset)
(let ((hSRS (osr-from-dataset dataset)))
(if (osr-is-projected? hSRS)
(osr-transform (OSRCloneGeogCS hSRS) hSRS)
identity))) ;; (lambda(l) l))))
(define (gdal-get-projection⁻¹ dataset)
(let ((hSRS (osr-from-dataset dataset)))
(if (osr-is-projected? hSRS)
(osr-transform hSRS (OSRCloneGeogCS hSRS))
identity))) ;; (lambda(l) l))))
(define (gdal-get-geotransform-matrix dataset)
(let ((m (make (c-array <c-double> 6))))
(GDALGetGeoTransform dataset (ptr m))
(apply array (cons (shape 0 3 0 3)
(append (map (cut ref m <>) '(1 2 0))
(map (cut ref m <>) '(4 5 3))
'(0.0 0.0 1.0))))))
;; todo:
;; - gdal already should provide that, no?
;; - slow, but typically not called very often
(define (get-geotransform dataset)
(let ((A (array-mul (gdal-get-geotransform-matrix dataset)
(array (shape 0 3 0 3)
1.0 0.0 0.5
0.0 1.0 0.5
0.0 0.0 1.0))))
(lambda(l)
(let1 r (array-mul A (array (shape 0 3 0 1) (ref l 0) (ref l 1) 1))
(list (array-ref r 0 0) (array-ref r 1 0))))))
(define (gdal-open-band dataset band)
(let ((hband (GDALGetRasterBand dataset band))
;; (block-size-x (make <c-int>))
;; (block-size-y (make <c-int>))
;; (gotMin (make <c-int>))
;; (gotMax (make <c-int>))
( ( make ( c - array < c - double > 2 ) ) )
)
( GDALGetBlockSize hband ( ptr block - size - x ) ( ptr block - size - y ) )
( print # ` " Block=,(cast < number > block - size - x)x,(cast < number > block - size - y ) Type=,(GDALGetDataTypeName ( GDALGetRasterDataType hband ) ) , ColorInterp=,(GDALGetColorInterpretationName ( GDALGetRasterColorInterpretation hband ) ) " )
( set ! ( ref adfMinMax 0 ) ( ( ptr gotMin ) ) )
( set ! ( ref adfMinMax 1 ) ( GDALGetRasterMaximum hband ( ptr gotMax ) ) )
;; (when (not (and (c-int->bool gotMin) (c-int->bool gotMax)))
( hband TRUE ) )
( print # ` " Min=,(ref adfMinMax 0 ) , Max=,(ref 1 ) " )
( when ( < 0 ( GDALGetOverviewCount hband ) )
( print " Band has , ( GDALGetOverviewCount hband ) overviews . " ) )
;; (when (not (null-ptr? (GDALGetRasterColorTable hband)))
;; (print #`"Band has a color table with ,(GDALGetColorEntryCount (GDALGetRasterColorTable hband)) entries."))
hband))
(define (gdal-band-nodata hband)
(let ((gotNoData (make <c-int>)))
(GDALGetRasterNoDataValue hband (ptr gotNoData))
(and (c-int->bool gotNoData)
(GDALGetRasterNoDataValue hband (ptr gotNoData)))))
(define (interp-linear u c0 c1)
(+ (* u (- c1 c0)) c0))
(define (bi-interp u v f rows)
(apply f
(cons v
(map (lambda(x)
(apply f
(cons u
(f32vector->list (ref rows x)))))
(iota (size-of rows))))))
(define (interp-bicubic u v rows)
(assert (= (size-of rows) 4))
(bi-interp u v interp-cubic rows))
( benchmark 10000 ( lambda _ ( interp - bicubic 0.2 0.2 ' ( # f32(0 1 0 0 ) # f32(0 2 0 0)#f32(0 0 0 0)#f32(0 0 0 0 ) ) ) ) )
(define (interp-bilinear u v rows)
(assert (= (size-of rows) 2))
(bi-interp u v interp-linear rows))
(define (raster-pos->uv x y)
(values (- x (floor x))
(- y (floor y))))
(define gdal-init
(let1 called #f
(lambda()
(cond [(not called)
(set! called #t)
(GDALAllRegister)
#t]
[else
#f]))))
(define (gdal-raster-size dataset)
(map x->number (list (GDALGetRasterXSize dataset) (GDALGetRasterYSize dataset))))
(define (gdal-geographic-bbox dataset)
(let ((osr (osr-from-dataset dataset))
(rsize (gdal-raster-size dataset)))
(assert (osr-is-geographic? osr))
(let* ((l1 (map (get-geotransform dataset)
(list '(-1/2 -1/2)
(map (cut - <> 1/2) rsize))))
(l2 (append
(receive lx (apply min&max (map car l1))
lx)
(receive ly (apply min&max (map cadr l1))
ly))))
(list (permute l2 '(0 2))
(permute l2 '(1 3))))))
(define (nan-to-false n)
(if (nan? n)
#f
n))
;; return function to get z value at position x y
;; (using coordinate system described by projection)
;; note: empty projection => input cs is _geographic cs_ of dataset
;; todo: maybe disallow empty value? or special symbols? 'geographic 'projected ?!
(define (dem->xy-project->z projection name . args)
(let-keywords args ((next (lambda _ +nan.0))
(interpolation 'bi-cubic)
(band 1))
(let* ((dataset (gdal-open-dataset name))
(band (gdal-open-band dataset band)))
(let ((width (GDALGetRasterBandXSize band))
(height (GDALGetRasterBandYSize band))
(osr (osr-from-dataset dataset)))
(let1 xy->z (lambda(fi get-box box-width box-height)
(let ((rasterpos (get-rasterpos projection dataset)) ;; todo: get rid of rasterpos
(rp (make-f64vector 2))
(box (make-s64vector 4))
(rasterpos&bbox! (get-rasterpos&bbox! projection dataset get-box width height))
;; todo:
;; - only what I want if projection is a geographic cs?
;; - slow, but typically not called very often
(rasterpos⁻¹ (apply compose
(reverse
(filter (lambda(f) (not (eq? f identity)))
(list
(get-geotransform dataset)
(gdal-get-projection⁻¹ dataset)
(if (not (string-null? projection))
(osr-transform (OSRCloneGeogCS osr)
(osr-from-user-input projection))
identity)
(cut subseq <> 0 2))))))
(read-row! (get-gdal-read-band-row! band (gdal-band-nodata band)))
(rows (map (lambda(y) (make-f32vector box-width)) (iota box-height)))
(geographic-dataset? (osr-is-geographic? osr)))
(let ((read-row (lambda(y xs xe)
(let1 row (make-f32vector (- xe xs))
(read-row! row y xs xe)
row)))
(read-box! (lambda()
(let ((start (s64vector-ref box 0))
(end (s64vector-ref box 2))
(y (s64vector-ref box 1))
(r 0))
(dotimes (idx box-height)
(inc! r (read-row! (ref rows idx) (+ y idx) start end)))
r)))
)
(let* ((read-pixel (lambda(x y)
(let1 x (round->exact x)
(f32vector-ref (read-row (round->exact y) x (+ x 1)) 0))))
(read-geo-pixel (lambda(x y)
(guard (e [(transform-error? e)
#f])
(nan-to-false (apply read-pixel (rasterpos x y)))))))
(lambda(x y)
(if (not (rasterpos&bbox! x y rp box))
(next x y)
(let ((nans (read-box!)))
(cond [(= nans (* box-width box-height))
(next x y)]
[(> nans 0)
try to replace
;; todo: maybe split into geographic and non-geographic case?
(call/cc
(lambda(break)
(for-each-with-index
(lambda(ry r)
(for-each-with-index
(lambda(rx v)
(when (nan? v)
(receive (cx cy)
(apply values (rasterpos⁻¹ (list (+ (s64vector-ref box 0) rx)
(+ (s64vector-ref box 1) ry))))
(if-let1 nv
(or (and geographic-dataset?
(or (read-geo-pixel (+ cx 360.0) cy)
(read-geo-pixel (- cx 360.0) cy)
(and (or (> cy 90.0) (< cy -90.0))
(read-geo-pixel cx cy))))
(nan-to-false (next cx cy)))
(set! (ref r rx) nv)
failed to replace
(break (next x y))))))
r))
rows)
( assert ( not ( any ( cut find ? < > ) rows ) ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(fi u v rows))))]
[else
( assert ( zero ? ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(fi u v rows))]))))))))
(case interpolation
((bi-cubic) (xy->z interp-bicubic raster-pos->4x4-box 4 4))
((bi-linear) (xy->z interp-bilinear raster-pos->2x2-box 2 2))
((nearest) (xy->z (lambda(u v rows) (ref* rows 0 0)) raster-pos->1x1-box 1 1))
(else (error "Unknown interpolation:" interpolation))))))))
(define (dem->xy-project->z-debug projection name . args)
(let-keywords args ((next (lambda (x y depth) (values +nan.0 +nan.0 depth)))
(interpolation 'bi-cubic)
(band 1))
(let* ((dataset (gdal-open-dataset name))
(band (gdal-open-band dataset band)))
(let ((width (GDALGetRasterBandXSize band))
(height (GDALGetRasterBandYSize band))
(osr (osr-from-dataset dataset)))
(let1 xy->z-debug (lambda(fi get-box box-width box-height)
(let ((rasterpos (get-rasterpos projection dataset)) ;; todo: get rid of rasterpos
(rp (make-f64vector 2))
(box (make-s64vector 4))
(rasterpos&bbox! (get-rasterpos&bbox! projection dataset get-box width height))
;; todo:
;; - only what I want if projection is a geographic cs?
(rasterpos⁻¹ (apply compose
(reverse
(filter (lambda(f) (not (eq? f identity)))
(list
(get-geotransform dataset)
(gdal-get-projection⁻¹ dataset)
(if (not (string-null? projection))
(osr-transform (OSRCloneGeogCS osr)
(osr-from-user-input projection))
identity)
(cut subseq <> 0 2))))))
(read-row! (get-gdal-read-band-row! band (gdal-band-nodata band)))
(rows (map (lambda(y) (make-f32vector box-width)) (iota box-height)))
(geographic-dataset? (osr-is-geographic? osr)))
(let ((read-row (lambda(y xs xe)
(let1 row (make-f32vector (- xe xs))
(read-row! row y xs xe)
row)))
(read-box! (lambda()
(let ((start (s64vector-ref box 0))
(end (s64vector-ref box 2))
(y (s64vector-ref box 1))
(r 0))
(dotimes (idx box-height)
(inc! r (read-row! (ref rows idx) (+ y idx) start end)))
r)))
)
(let* ((read-pixel (lambda(x y)
(let1 x (round->exact x)
(f32vector-ref (read-row (round->exact y) x (+ x 1)) 0))))
(read-geo-pixel (lambda(x y)
(guard (e [(transform-error? e)
#f])
(nan-to-false (apply read-pixel (rasterpos x y)))))))
(lambda(x y :optional (depth 0))
(if (not (rasterpos&bbox! x y rp box))
(next x y (+ depth 1))
(let* ((nans (read-box!))
(c (map floor (list (f64vector-ref rp 0) (f64vector-ref rp 1))))
(xres (geod-distance 'wgs84 (rasterpos⁻¹ c) (rasterpos⁻¹ (map + c '(1 0)))))
(yres (geod-distance 'wgs84 (rasterpos⁻¹ c) (rasterpos⁻¹ (map + c '(0 1)))))
(res (max xres yres))
(max-depth depth))
(cond [(= nans (* box-width box-height))
(next x y (+ depth 1))]
[(> nans 0)
try to replace
;; todo: maybe split into geographic and non-geographic case?
(call/cc
(lambda(break)
(for-each-with-index
(lambda(ry r)
(for-each-with-index
(lambda(rx v)
(when (nan? v)
(receive (cx cy)
(apply values (rasterpos⁻¹ (list (+ (s64vector-ref box 0) rx)
(+ (s64vector-ref box 1) ry))))
(let1 nv (and geographic-dataset?
(or (read-geo-pixel (+ cx 360.0) cy)
(read-geo-pixel (- cx 360.0) cy)
(and (or (> cy 90.0) (< cy -90.0))
(read-geo-pixel cx cy))))
(if nv
(set! (ref r rx) nv)
(let1 next-value (values->list (next cx cy (+ depth 1)))
(cond [(nan-to-false (car next-value))
(set! (ref r rx) (car next-value))
(set! res (max res (cadr next-value)))
(set! max-depth (max max-depth (caddr next-value)))]
[else
failed to replace
(break (next x y (+ depth 1)))])))))))
r))
rows)
( assert ( not ( any ( cut find ? < > ) rows ) ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(values (fi u v rows)
res
max-depth))))]
[else
( assert ( zero ? ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(values (fi u v rows)
res
max-depth))]))))))))
(case interpolation
((bi-cubic) (xy->z-debug interp-bicubic raster-pos->4x4-box 4 4))
((bi-linear) (xy->z-debug interp-bilinear raster-pos->2x2-box 2 2))
((nearest) (xy->z-debug (lambda(u v rows) (ref* rows 0 0)) raster-pos->1x1-box 1 1))
(else (error "Unknown interpolation:" interpolation))))))))
;; return function to get z value at position x y (using coordinate system of the dataset)
(define (dem->xy->z name . args)
(apply dem->xy-project->z (append (list "" name) args)))
(define (keyword-exists? key kv-list)
(or (get-keyword key kv-list #f)
(not (equal? 1 (get-keyword key kv-list 1)))))
(define (dem-stack->xy->z projection dem-stack)
(let1 l (reverse dem-stack)
(fold (lambda(n o)
;; note: maybe we should use delete-keyword on n instead
;; of assuming let-keywords takes the last value
;; even better: throw an error if there is a next keyword!
;; there is no such thing as keyword-exists?
(when (keyword-exists? :next (cdr n))
(error ":next only allowed in last element"))
(apply dem->xy-project->z (cons projection (append n (list :next o)))))
(apply dem->xy-project->z (cons projection (car l)))
(cdr l))))
(define (dem-stack->xy->z-debug projection dem-stack)
(let1 l (reverse dem-stack)
(fold (lambda(n o)
;; note: maybe we should use delete-keyword on n instead
;; of assuming let-keywords takes the last value
;; even better: throw an error if there is a next keyword!
;; there is no such thing as keyword-exists?
(when (keyword-exists? :next (cdr n))
(error ":next only allowed in last element"))
(apply dem->xy-project->z-debug (cons projection (append n (list :next o)))))
(apply dem->xy-project->z-debug (cons projection (car l)))
(cdr l))))
| null | https://raw.githubusercontent.com/Outdooractive/elevation-profile/5e60f0d321af6fcc609727a96ff9a980153d6421/dem-gdal.scm | scheme |
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions
are met:
1. Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
2. Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
3. Neither the name of the authors nor the names of its contributors
may be used to endorse or promote products derived from this
software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
notes/todo:
- quite a hack
- get rid of c-wrapper / speedup
- leaks memory => call procedures only once if possible!
see also:
(upload to gl texture ...)
- use GDAL_CACHEMAX?
use after srfi-1 to make find work as expected!
(use sxml.adaptor)
todo: hmm
(print #`"Projection is ',(GDALGetProjectionRef dataset)'")
#?=(map (cut cast <number> <>) transform)
todo: leak!
todo:
- gdal already should provide that, no?
- slow
(rfill (- end rend))
taken from grass (interp.c)
return (u * (u * (u * (c3 + -3 * c2 + 3 * c1 - c0) +
todo: improve
todo: improve / or maybe just clip?!
(assert (list? raster-pos))
(assert (list? raster-pos))
just for readability
todo: at the moment we can only get the
geographic
note: input always geographic!
#?=rp
todo: c-level assert?!
(rfill (- end rend))
#?=e
NxM * MxP => NxP
for-each row of a
for-each col of b
for-each col of a & row of b
todo: use macro?!
todo:
- gdal already should provide that, no?
:fast-flonum :constant
todo: improve
todo: really ugly hack
no pointer?!
(lambda(l) l))))
todo: at the moment we can only get the
geographic
note: input always geographic!
todo: check it really is a transform error?!
rint to match round->exact
`((Scm_Printf SCM_CURERR "huhu\n"))
todo: at the moment we can only get the
geographic
note: input always geographic!
#?=e
note: same as (not osr-is-geographic?)
(define (osr-is-compound? osr)
(lambda(l) l))))
(lambda(l) l))))
todo:
- gdal already should provide that, no?
- slow, but typically not called very often
(block-size-x (make <c-int>))
(block-size-y (make <c-int>))
(gotMin (make <c-int>))
(gotMax (make <c-int>))
(when (not (and (c-int->bool gotMin) (c-int->bool gotMax)))
(when (not (null-ptr? (GDALGetRasterColorTable hband)))
(print #`"Band has a color table with ,(GDALGetColorEntryCount (GDALGetRasterColorTable hband)) entries."))
return function to get z value at position x y
(using coordinate system described by projection)
note: empty projection => input cs is _geographic cs_ of dataset
todo: maybe disallow empty value? or special symbols? 'geographic 'projected ?!
todo: get rid of rasterpos
todo:
- only what I want if projection is a geographic cs?
- slow, but typically not called very often
todo: maybe split into geographic and non-geographic case?
todo: get rid of rasterpos
todo:
- only what I want if projection is a geographic cs?
todo: maybe split into geographic and non-geographic case?
return function to get z value at position x y (using coordinate system of the dataset)
note: maybe we should use delete-keyword on n instead
of assuming let-keywords takes the last value
even better: throw an error if there is a next keyword!
there is no such thing as keyword-exists?
note: maybe we should use delete-keyword on n instead
of assuming let-keywords takes the last value
even better: throw an error if there is a next keyword!
there is no such thing as keyword-exists? | dem ( digital elevation model ) via ( )
Copyright ( c ) 2012 < >
" AS IS " AND ANY EXPRESS OR IMPLIED WARRANTIES , INCLUDING , BUT NOT
SPECIAL , EXEMPLARY , OR CONSEQUENTIAL DAMAGES ( INCLUDING , BUT NOT LIMITED
LIABILITY , WHETHER IN CONTRACT , STRICT LIABILITY , OR TORT ( INCLUDING
- you can use 's vrt format to merge images
and in general ( WMS , .... )
- a more general purpose wrapper would be nice
(define-module dem-gdal
(use srfi-1)
(use gauche.sequence)
(use srfi-13)
(use c-wrapper)
(use gauche.array)
(use gauche.uvector)
(use gauche.process)
(use runtime-compile)
(use binary.pack)
(use geod)
(export dem->xy->z
dem->xy-project->z
dem->xy-project->z-debug
dem-stack->xy->z
dem-stack->xy->z-debug
))
(select-module dem-gdal)
(c-load '("gdal/gdal.h" "gdal/ogr_srs_api.h")
:libs-cmd "gdal-config --libs"
:compiled-lib "gdal"
:cflags "-O2")
(CPLSetErrorHandler 0)
(define-macro (assert e)
`(when (not ,e)
(error "assertion failed: " ,(x->string e))))
(define (gdal-open-dataset name)
(assert (string? name))
(with-output-to-port (current-error-port)
(lambda()
(gdal-init)
(let ((dataset (GDALOpen name GA_ReadOnly)))
(cond [(not (null-ptr? dataset))
(let ((driver (GDALGetDatasetDriver dataset)))
( print # ` " Driver , ( GDALGetDriverShortName driver)/,(GDALGetDriverLongName driver ) " )
( print # ` " Size is , ( GDALGetRasterXSize dataset)x,(GDALGetRasterYSize dataset)x,(GDALGetRasterCount dataset ) " )
(when (not (null-ptr? (GDALGetProjectionRef dataset)))
(let ((transform (make (c-array <c-double> 6))))
(when (= (GDALGetGeoTransform dataset transform) CE_None)
( print # ` " Origin = , ( ref transform 0 ) , , ( ref transform 3 ) " )
( print # ` " Pixel Size = , ( ref transform 1 ) , , ( ref transform 5 ) " )
#t))))
dataset]
[else
(error "Unsupported format")])))))
(define (osr-from-user-input s)
(when (not (= (OSRSetFromUserInput hSRS s) OGRERR_NONE))
(error "OSRSetFromUserInput failed"))
hSRS))
(define (osr-from-dataset dataset)
(let ((hSRS (OSRNewSpatialReference NULL)))
(when (not (= (OSRImportFromWkt hSRS (ptr (GDALGetProjectionRef dataset))) OGRERR_NONE))
(error "OSRImportFromWkt failed"))
hSRS))
(define (c-int->bool x)
(not (zero? (cast <number> x))))
(define (osr-is-same? from to)
(c-int->bool (OSRIsSame from to)))
(define-condition-type <transform-error> <error>
transform-error?
(pos transform-error-pos))
(cond-expand
(no-runtime-compile
(define (osr-transform from to)
(if (osr-is-same? from to)
identity
(let ((ct (OCTNewCoordinateTransformation from to))
(xa (make (c-array <c-double> 1)))
(ya (make (c-array <c-double> 1)))
(za (make (c-array <c-double> 1))))
(assert (not (null-ptr? ct)))
(lambda(l)
(set! (ref xa 0) (ref l 0))
(set! (ref ya 0) (ref l 1))
(set! (ref za 0) (ref l 2 0))
(when (not (c-int->bool (OCTTransform ct 1 xa ya za)))
(error <transform-error> :pos l))
(list (ref xa 0) (ref ya 0) (ref za 0))))))
(define (gdal-get-geotransform⁻¹ dataset)
(let1 A (array-inverse (array-mul (gdal-get-geotransform-matrix dataset)
(array (shape 0 3 0 3)
1.0 0.0 0.5
0.0 1.0 0.5
0.0 0.0 1.0)))
(lambda(l)
(let1 r (array-mul A (array (shape 0 3 0 1) (ref l 0) (ref l 1) 1))
(list (array-ref r 0 0) (array-ref r 1 0))))))
(define (f32vector-replace! vec from to)
(let1 s (f32vector-length vec)
(dotimes (i s)
(when (= (f32vector-ref vec i) from)
(f32vector-set! vec i to))))
vec)
(define (get-gdal-read-band-row! band nodata)
(let ((xsize (GDALGetRasterBandXSize band))
(ysize (GDALGetRasterBandYSize band)))
(lambda(scanline row . args)
(let-optionals* args ((start 0)
(end xsize))
(assert (<= start end))
(let1 count (- end start)
(assert (>= (size-of scanline) count))
(f32vector-fill! scanline +nan.0)
(cond [(and (> count 0)
(>= row 0)
(< row ysize))
(let ((rstart (max 0 start))
(rend (min end xsize)))
(let ((lfill (- rstart start))
(rcount (- rend rstart)))
(when (and (> rcount 0)
(not (zero? (GDALRasterIO band GF_Read rstart row rcount 1
(c-ptr+ (cast (ptr <c-float>) scanline) lfill)
rcount 1 GDT_Float32 0 0))))
(error "todo"))
(assert (or (boolean? nodata) (number? nodata)))
replace with
(when nodata
(f32vector-replace! scanline nodata +nan.0))
count
(let ((s (f32vector-length scanline))
(r 0))
(dotimes (i s)
(when (nan? (f32vector-ref scanline i))
(inc! r)))
r)))]
[else
(f32vector-length scanline)]))))))
(define (interp-cubic u c0 c1 c2 c3)
(/ (+ (* u (+ (* u (+ (* u (+ c3 (* -3 c2) (* 3 c1) (- c0)))
(- c3)
(* 4 c2)
(* -5 c1)
(* 2 c0)))
c2
(- c0)))
(* 2 c1))
2))
(define (mod4 x m minx maxx)
(cond [(and (< x minx)
(or (>= (- maxx minx) m)
(<= (+ x m) maxx)))
(mod4 (+ x m) m minx maxx)]
[(and (> x maxx)
(or (>= (- maxx minx) m)
(>= (- x m) minx)))
(mod4 (- x m) m minx maxx)]
[else
x]))
(define wrap-long-to (cut mod4 <> 360 <> <>))
(define (wrap-lat x y . l)
(cond [(< y -90)
(apply wrap-lat (append (list (+ x 180) (- -180 y))
l))]
[(> y 90)
(apply wrap-lat (append (list (+ x 180) (- 180 y))
l))]
[else
(append (list x y) l)]))
(define (get-bbox-geo-wrap geobox)
(lambda(xy)
(let1 xy (apply wrap-lat xy)
(list (wrap-long-to (car xy)
(ref* geobox 0 0)
(ref* geobox 1 0))
(cadr xy)))))
(define (geo-wrap xy)
(let1 xy (apply wrap-lat xy)
note : ( fmod ( car xy ) 360 ) ca n't be expressed using wrap - long - to :(
(list (fmod (car xy) 360)
(cadr xy))))
(define (raster-pos->4x4-box raster-pos)
(let1 tl (map (lambda(x) (- (floor->exact x) 1)) raster-pos)
(list tl (map (cut + <> 4) tl))))
(define (raster-pos->2x2-box raster-pos)
(let1 tl (map floor->exact raster-pos)
(list tl (map (cut + <> 2) tl))))
(define (raster-pos->1x1-box raster-pos)
(let1 tl (map round->exact raster-pos)
(list tl (map (cut + <> 1) tl))))
(define (get-rasterpos projection dataset)
(let1 osr (osr-from-dataset dataset)
(let1 f (apply compose
(filter (lambda(f) (not (eq? f identity)))
(list
(if (not (string-null? projection))
(osr-transform (osr-from-user-input projection)
(OSRCloneGeogCS osr))
identity)
(if (osr-is-geographic? osr)
geographic bbox if the dataset osr is
(get-bbox-geo-wrap (gdal-geographic-bbox dataset))
geo-wrap)
(gdal-get-projection dataset)
(gdal-get-geotransform⁻¹ dataset)))))
(lambda(x y)
(f (list x y))))))
(define (get-rasterpos&bbox! projection dataset get-box width height)
(let1 rasterpos (get-rasterpos projection dataset)
(lambda(x y rp box)
(guard (e [(transform-error? e)
#f])
(let* ((rp2 (rasterpos x y))
(box2 (get-box rp2)))
(cond [(or (<= (caadr box2) 0) (>= (caar box2) width)
(<= (cadadr box2) 0) (>= (cadar box2) height))
#f]
[else
(set! (ref rp 0) (car rp2))
(set! (ref rp 1) (cadr rp2))
# ? = box2
(set! (ref box 0) (car (car box2)))
(set! (ref box 1) (cadr (car box2)))
(set! (ref box 2) (car (cadr box2)))
(set! (ref box 3) (cadr (cadr box2)))
#t]))))))
)
(else
(compile-and-load
`((inline-stub
(declcode
(.include "gauche/uvector.h")
(.include "gdal/gdal.h")
(.include "gdal/ogr_srs_api.h")
"static ScmClass *osrn_transform_class = NULL;"
"/* stolen from cwcompile output */
static void cw_unbox(void *dest, ScmObj obj, size_t size)
{
if (!bufferof_proc) {
}
}"
)
(define-cproc make-osrn-transform (fromp top)
(let* ((from::OGRSpatialReferenceH NULL)
(to::OGRSpatialReferenceH NULL))
(cw_unbox (& from) fromp (sizeof OGRSpatialReferenceH))
(cw_unbox (& to) top (sizeof OGRSpatialReferenceH))
(when (not from)
(Scm_Error "failed to set from"))
(when (not to)
(Scm_Error "failed to set to"))
(return (Scm_MakeForeignPointer osrn_transform_class (OCTNewCoordinateTransformation from to)))))
(define-cproc osrn-apply-transform (it x::<double> y::<double>)
(unless (SCM_XTYPEP it osrn_transform_class) (SCM_TYPE_ERROR it "<osrn:transform>"))
(let* ((t::OGRCoordinateTransformationH (SCM_FOREIGN_POINTER_REF OGRCoordinateTransformationH it))
(xr::double x)
(yr::double y)
(zr::double 0))
(when (not (OCTTransform t 1 (& xr) (& yr) (& zr)))
todo : use Scm_Raise ?
(result (SCM_LIST3 (Scm_MakeFlonum xr)
(Scm_MakeFlonum yr)
(Scm_MakeFlonum zr)))))
(define-cfn osrn-transform-cleanup (h) ::void :static
(OCTDestroyCoordinateTransformation (SCM_FOREIGN_POINTER_REF OGRCoordinateTransformationH h)))
(define-cfn osrn-transform-print (h p::ScmPort* c::ScmWriteContext*) ::void :static
(Scm_Printf p "#<osrn:transform @%p->%p>" h (SCM_FOREIGN_POINTER_REF OGRCoordinateTransformationH h)))
(define-cproc c-gdal-read-band-row!
(bandp nodata xsize::<int> ysize::<int> scanline::<f32vector> row::<int> start::<int> end::<int>)
(let* ((band::GDALRasterBandH NULL))
(cw_unbox (& band) bandp (sizeof GDALRasterBandH))
(let* ((count::int (- end start)))
(unless (>= (SCM_UVECTOR_SIZE scanline) count) (Scm_Error "(>= (SCM_UVECTOR_SIZE scanline) count)"))
(Scm_F32VectorFill scanline NAN 0 (SCM_UVECTOR_SIZE scanline))
(cond [(and (> count 0)
(>= row 0)
(< row ysize))
(let* ((rstart::int (?: (< start 0) 0 start))
(rend::int (?: (< end xsize) end xsize))
(lfill::int (- rstart start))
(rcount::int (- rend rstart)))
(when (and (> rcount 0)
(not (== (GDALRasterIO band GF_Read rstart row rcount 1
(+ (SCM_F32VECTOR_ELEMENTS scanline) lfill)
rcount 1 GDT_Float32 0 0)
0)))
(Scm_Error "todo"))
(let* ((r::int 0)
(i::int 0))
replace with
(unless (or (SCM_BOOLP nodata) (SCM_FLONUMP nodata))
(Scm_Error "(or (SCM_BOOLP nodata) (SCM_FLONUMP nodata))"))
(when (and (not (SCM_BOOLP nodata))
(SCM_FLONUMP nodata))
(for [(set! i 0) (< i (SCM_UVECTOR_SIZE scanline)) (pre++ i)]
(when (== (aref (SCM_F32VECTOR_ELEMENTS scanline) i) (SCM_FLONUM_VALUE nodata))
(set! (aref (SCM_F32VECTOR_ELEMENTS scanline) i) NAN))))
count
(for [(set! i 0) (< i (SCM_UVECTOR_SIZE scanline)) (pre++ i)]
(when (isnan (aref (SCM_F32VECTOR_ELEMENTS scanline) i))
(pre++ r)))
(result (SCM_MAKE_INT r))))]
[else
(result (SCM_MAKE_INT (SCM_UVECTOR_SIZE scanline)))]))))
(initcode (= osrn_transform_class (Scm_MakeForeignPointerClass
(Scm_CurrentModule)
"<osrn:transform>" osrn-transform-print osrn-transform-cleanup
SCM_FOREIGN_POINTER_KEEP_IDENTITY)))
))
'(make-osrn-transform osrn-apply-transform c-gdal-read-band-row!)
:libs (process-output->string "gdal-config --libs"))
(define (osr-transform from to)
(if (osr-is-same? from to)
identity
(let1 fp (make-osrn-transform from to)
(lambda(l)
(guard (e [else
(error <transform-error> :pos l)])
(osrn-apply-transform fp (car l) (cadr l)))))))
(with-module gauche.array
(let ([a-start (start-vector-of a)]
[a-end (end-vector-of a)]
[b-start (start-vector-of b)]
[b-end (end-vector-of b)])
(unless (= 2 (s32vector-length a-start) (s32vector-length b-start))
(error "array-mul matrices must be of rank 2"))
(let* ([a-start-row (s32vector-ref a-start 0)]
[a-end-row (s32vector-ref a-end 0)]
[a-start-col (s32vector-ref a-start 1)]
[a-end-col (s32vector-ref a-end 1)]
[b-start-col (s32vector-ref b-start 1)]
[b-end-col (s32vector-ref b-end 1)]
[n (- a-end-row a-start-row)]
[m (- a-end-col a-start-col)]
[p (- b-end-col b-start-col)]
[a-col-b-row-off (- a-start-col (s32vector-ref b-start 0))]
[res (make-minimal-backend-array (list a b) (shape 0 n 0 p))])
(unless (= m (- (s32vector-ref b-end 0) (s32vector-ref b-start 0)))
(errorf "dimension mismatch: can't mul shapes ~S and ~S"
(array-shape a) (array-shape b)))
[(= i a-end-row) res]
[(= k b-end-col)]
(let1 tmp (list '+)
[(= j a-end-col)]
(append! tmp (list (list '* (array-ref a i j) (array-ref b (- j a-col-b-row-off) k)))))
(array-set! res (- i a-start-row) (- k b-start-col) tmp)))))))
(export symbolic-array-mul)
)
(define (compile-cise-function args body)
(let1 mod (compile-and-load
`((inline-stub
(declcode
(.include "gdal/gdal.h")
(.include "gdal/ogr_srs_api.h"))
(define-cproc foo ,args . ,body)))
`()
:libs (process-output->string "gdal-config --libs"))
(global-variable-ref mod 'foo)))
(define (gdal-get-geotransform-cise⁻¹ dataset)
(let* ((A (array-inverse (array-mul (gdal-get-geotransform-matrix dataset)
(array (shape 0 3 0 3)
1.0 0.0 0.5
0.0 1.0 0.5
0.0 0.0 1.0))))
(sr (symbolic-array-mul A (array (shape 0 3 0 1) 'x 'y 1))))
`((set! x ,(array-ref sr 0 0))
(set! y ,(array-ref sr 1 0)))))
(define (gdal-get-geotransform⁻¹ dataset)
(let1 nf (compile-cise-function '(x::<double> y::<double>)
(append (gdal-get-geotransform-cise⁻¹ dataset)
`((return (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))
(lambda(l)
(nf (ref l 0) (ref l 1)))))
(define (get-gdal-read-band-row! band nodata)
(let ((xsize (GDALGetRasterBandXSize band))
(ysize (GDALGetRasterBandYSize band)))
(lambda(scanline row . args)
(let-optionals* args ((start 0)
(end xsize))
(c-gdal-read-band-row! band nodata xsize ysize scanline row start end)))))
(compile-and-load
`((inline-stub
(define-cproc interp-cubic (u::<double> c0::<double> c1::<double> c2::<double> c3::<double>)
(result (Scm_MakeFlonum (/ (+ (* u (+ (* u (+ (* u (+ c3 (* -3 c2) (* 3 c1) (- c0)))
(- c3)
(* 4 c2)
(* -5 c1)
(* 2 c0)))
c2
(- c0)))
(* 2 c1))
2))))))
'(interp-cubic))
(define (bbox-geo-wrap-cise-2 minx maxx)
`((while 1
(cond [(< y -90)
(+= x 180)
(set! y (- -180 y))]
[(> y 90)
(+= x 180)
(set! y (- 180 y))]
[else
(break)]))
(while 1
(cond [(and (< x ,minx)
(or (>= ,(- maxx minx) 360)
(<= (+ x 360) ,maxx)))
(+= x 360)]
[(and (> x ,maxx)
(or (>= ,(- maxx minx) 360)
(>= (- x 360) ,minx)))
(-= x 360)]
[else
(break)]))))
(define (get-bbox-geo-wrap-cise geobox)
(bbox-geo-wrap-cise-2 (ref* geobox 0 0) (ref* geobox 1 0)))
(define (get-bbox-geo-wrap geobox)
(let1 f (compile-cise-function '(x::<double> y::<double>)
(append
(get-bbox-geo-wrap-cise geobox)
'((result (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))
(lambda(xy)
(f (car xy) (cadr xy)))))
(define (geo-wrap-cise)
'((while 1
(cond [(< y -90)
(+= x 180)
(set! y (- -180 y))]
[(> y 90)
(+= x 180)
(set! y (- 180 y))]
[else
(break)]))
(set! x (fmod x 360))))
(define (geo-wrap)
(let1 f (compile-cise-function '(x::<double> y::<double>)
(append
(geo-wrap-cise)
'((result (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))
(lambda(xy)
(f (car xy) (cadr xy)))))
(define (c-wrapper-ptr-value p)
(case (c-sizeof (ptr <c-void>))
[(8) "Q"]
[(4) "L"]
[else
(error "pointer size not supported")])
:from-string (u8vector->string (slot-ref (cast (ptr <c-void>) p) 'buffer)))))
(define (c-wrapper-ptr->cise-ptr p)
(gc)
(gc)
`(cast (void *) ,(string->symbol (format "0x~x" (c-wrapper-ptr-value p)))))
(define (osr-transform-cise fromp top . args)
(let-optionals* args ((transform-error '(Scm_Error "transform failed")))
(cond [(osr-is-same? fromp top)
identity]
[else
(assert (not (null-ptr? fromp)))
(assert (not (null-ptr? top)))
`((let* ((from::(static OGRSpatialReferenceH) NULL)
(to::(static OGRSpatialReferenceH) NULL)
(t::(static OGRCoordinateTransformationH) NULL))
(when (not t)
(set! from ,(c-wrapper-ptr->cise-ptr fromp))
(set! to ,(c-wrapper-ptr->cise-ptr top))
(set! t (OCTNewCoordinateTransformation from to))
(when (not t) (Scm_Error "failed to set up t")))
(let* ((z::double 0))
(when (not (OCTTransform t 1 (& x) (& y) (& z)))
todo : use Scm_Raise ?
)))])))
(define (gdal-get-projection-cise dataset . args)
(let-optionals* args ((transform-error '(Scm_Error "transform failed")))
(let ((hSRS (osr-from-dataset dataset)))
(if (osr-is-projected? hSRS)
(osr-transform-cise (OSRCloneGeogCS hSRS) hSRS transform-error)
(define (get-rasterpos projection dataset)
(let* ((osr (osr-from-dataset dataset))
(f (compile-cise-function
'(x::<double> y::<double>)
(append (apply append (filter (lambda(f) (not (eq? f identity)))
(list
(if (not (string-null? projection))
(osr-transform-cise (osr-from-user-input projection)
(OSRCloneGeogCS osr))
identity)
(if (osr-is-geographic? osr)
geographic bbox if the dataset osr is
(get-bbox-geo-wrap-cise (gdal-geographic-bbox dataset))
(geo-wrap-cise))
(gdal-get-projection-cise dataset)
(gdal-get-geotransform-cise⁻¹ dataset))))
'((result (SCM_LIST2 (Scm_MakeFlonum x) (Scm_MakeFlonum y))))))))
(lambda(x y)
(guard (e
[else
(error <transform-error> :pos (list x y))])
(f x y)))))
(define raster-pos->4x4-box
`((set! tl_x (- (cast int (floor x)) 1))
(set! tl_y (- (cast int (floor y)) 1))
(set! br_x (+ tl_x 4))
(set! br_y (+ tl_y 4))))
(define raster-pos->2x2-box
`((set! tl_x (cast int (floor x)))
(set! tl_y (cast int (floor y)))
(set! br_x (+ tl_x 2))
(set! br_y (+ tl_y 2))))
(define raster-pos->1x1-box
`((set! tl_x (cast int (rint x)))
(set! tl_y (cast int (rint y)))
(set! br_x (+ tl_x 1))
(set! br_y (+ tl_y 1))))
(define (get-rasterpos&bbox! projection dataset get-box width height)
(let* ((osr (osr-from-dataset dataset))
(f (compile-cise-function
'(x::<double> y::<double> rp::<f64vector> box::<s64vector>)
`((let* ((tl_x::int64_t)
(tl_y::int64_t)
(br_x::int64_t)
(br_y::int64_t))
. ,(append
(apply append (filter (lambda(f) (not (eq? f identity)))
(list
(if (not (string-null? projection))
(osr-transform-cise (osr-from-user-input projection)
(OSRCloneGeogCS osr)
'(return SCM_FALSE))
identity)
(if (osr-is-geographic? osr)
geographic bbox if the dataset osr is
(get-bbox-geo-wrap-cise (gdal-geographic-bbox dataset))
(geo-wrap-cise))
(gdal-get-projection-cise dataset '(return SCM_FALSE))
(gdal-get-geotransform-cise⁻¹ dataset))))
get-box
`((cond [(or (<= br_x 0) (>= tl_x ,width)
(<= br_y 0) (>= tl_y ,height))
(result SCM_FALSE)]
[else
(when (or (< (SCM_UVECTOR_SIZE rp) 2)
(< (SCM_UVECTOR_SIZE box) 4))
(Scm_Printf SCM_CURERR "abort\n")
(abort))
(set! (aref (SCM_F64VECTOR_ELEMENTS rp) 0) x)
(set! (aref (SCM_F64VECTOR_ELEMENTS rp) 1) y)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 0) tl_x)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 1) tl_y)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 2) br_x)
(set! (aref (SCM_S64VECTOR_ELEMENTS box) 3) br_y)
(result SCM_TRUE)]))))))))
(lambda(x y rp box)
(guard (e
[else
(error <transform-error> :pos (list x y))])
(f x y rp box)))))
))
(define (osr-is-geographic? osr)
(let1 r (c-int->bool (OSRIsGeographic osr))
(assert (eq? r (not (osr-is-projected? osr))))
r))
(define (osr-is-projected? osr)
(c-int->bool (OSRIsProjected osr)))
not used and not available in older versions
( c - int->bool ( osr ) ) )
(define (gdal-get-projection dataset)
(let ((hSRS (osr-from-dataset dataset)))
(if (osr-is-projected? hSRS)
(osr-transform (OSRCloneGeogCS hSRS) hSRS)
(define (gdal-get-projection⁻¹ dataset)
(let ((hSRS (osr-from-dataset dataset)))
(if (osr-is-projected? hSRS)
(osr-transform hSRS (OSRCloneGeogCS hSRS))
(define (gdal-get-geotransform-matrix dataset)
(let ((m (make (c-array <c-double> 6))))
(GDALGetGeoTransform dataset (ptr m))
(apply array (cons (shape 0 3 0 3)
(append (map (cut ref m <>) '(1 2 0))
(map (cut ref m <>) '(4 5 3))
'(0.0 0.0 1.0))))))
(define (get-geotransform dataset)
(let ((A (array-mul (gdal-get-geotransform-matrix dataset)
(array (shape 0 3 0 3)
1.0 0.0 0.5
0.0 1.0 0.5
0.0 0.0 1.0))))
(lambda(l)
(let1 r (array-mul A (array (shape 0 3 0 1) (ref l 0) (ref l 1) 1))
(list (array-ref r 0 0) (array-ref r 1 0))))))
(define (gdal-open-band dataset band)
(let ((hband (GDALGetRasterBand dataset band))
( ( make ( c - array < c - double > 2 ) ) )
)
( GDALGetBlockSize hband ( ptr block - size - x ) ( ptr block - size - y ) )
( print # ` " Block=,(cast < number > block - size - x)x,(cast < number > block - size - y ) Type=,(GDALGetDataTypeName ( GDALGetRasterDataType hband ) ) , ColorInterp=,(GDALGetColorInterpretationName ( GDALGetRasterColorInterpretation hband ) ) " )
( set ! ( ref adfMinMax 0 ) ( ( ptr gotMin ) ) )
( set ! ( ref adfMinMax 1 ) ( GDALGetRasterMaximum hband ( ptr gotMax ) ) )
( hband TRUE ) )
( print # ` " Min=,(ref adfMinMax 0 ) , Max=,(ref 1 ) " )
( when ( < 0 ( GDALGetOverviewCount hband ) )
( print " Band has , ( GDALGetOverviewCount hband ) overviews . " ) )
hband))
(define (gdal-band-nodata hband)
(let ((gotNoData (make <c-int>)))
(GDALGetRasterNoDataValue hband (ptr gotNoData))
(and (c-int->bool gotNoData)
(GDALGetRasterNoDataValue hband (ptr gotNoData)))))
(define (interp-linear u c0 c1)
(+ (* u (- c1 c0)) c0))
(define (bi-interp u v f rows)
(apply f
(cons v
(map (lambda(x)
(apply f
(cons u
(f32vector->list (ref rows x)))))
(iota (size-of rows))))))
(define (interp-bicubic u v rows)
(assert (= (size-of rows) 4))
(bi-interp u v interp-cubic rows))
( benchmark 10000 ( lambda _ ( interp - bicubic 0.2 0.2 ' ( # f32(0 1 0 0 ) # f32(0 2 0 0)#f32(0 0 0 0)#f32(0 0 0 0 ) ) ) ) )
(define (interp-bilinear u v rows)
(assert (= (size-of rows) 2))
(bi-interp u v interp-linear rows))
(define (raster-pos->uv x y)
(values (- x (floor x))
(- y (floor y))))
(define gdal-init
(let1 called #f
(lambda()
(cond [(not called)
(set! called #t)
(GDALAllRegister)
#t]
[else
#f]))))
(define (gdal-raster-size dataset)
(map x->number (list (GDALGetRasterXSize dataset) (GDALGetRasterYSize dataset))))
(define (gdal-geographic-bbox dataset)
(let ((osr (osr-from-dataset dataset))
(rsize (gdal-raster-size dataset)))
(assert (osr-is-geographic? osr))
(let* ((l1 (map (get-geotransform dataset)
(list '(-1/2 -1/2)
(map (cut - <> 1/2) rsize))))
(l2 (append
(receive lx (apply min&max (map car l1))
lx)
(receive ly (apply min&max (map cadr l1))
ly))))
(list (permute l2 '(0 2))
(permute l2 '(1 3))))))
(define (nan-to-false n)
(if (nan? n)
#f
n))
(define (dem->xy-project->z projection name . args)
(let-keywords args ((next (lambda _ +nan.0))
(interpolation 'bi-cubic)
(band 1))
(let* ((dataset (gdal-open-dataset name))
(band (gdal-open-band dataset band)))
(let ((width (GDALGetRasterBandXSize band))
(height (GDALGetRasterBandYSize band))
(osr (osr-from-dataset dataset)))
(let1 xy->z (lambda(fi get-box box-width box-height)
(rp (make-f64vector 2))
(box (make-s64vector 4))
(rasterpos&bbox! (get-rasterpos&bbox! projection dataset get-box width height))
(rasterpos⁻¹ (apply compose
(reverse
(filter (lambda(f) (not (eq? f identity)))
(list
(get-geotransform dataset)
(gdal-get-projection⁻¹ dataset)
(if (not (string-null? projection))
(osr-transform (OSRCloneGeogCS osr)
(osr-from-user-input projection))
identity)
(cut subseq <> 0 2))))))
(read-row! (get-gdal-read-band-row! band (gdal-band-nodata band)))
(rows (map (lambda(y) (make-f32vector box-width)) (iota box-height)))
(geographic-dataset? (osr-is-geographic? osr)))
(let ((read-row (lambda(y xs xe)
(let1 row (make-f32vector (- xe xs))
(read-row! row y xs xe)
row)))
(read-box! (lambda()
(let ((start (s64vector-ref box 0))
(end (s64vector-ref box 2))
(y (s64vector-ref box 1))
(r 0))
(dotimes (idx box-height)
(inc! r (read-row! (ref rows idx) (+ y idx) start end)))
r)))
)
(let* ((read-pixel (lambda(x y)
(let1 x (round->exact x)
(f32vector-ref (read-row (round->exact y) x (+ x 1)) 0))))
(read-geo-pixel (lambda(x y)
(guard (e [(transform-error? e)
#f])
(nan-to-false (apply read-pixel (rasterpos x y)))))))
(lambda(x y)
(if (not (rasterpos&bbox! x y rp box))
(next x y)
(let ((nans (read-box!)))
(cond [(= nans (* box-width box-height))
(next x y)]
[(> nans 0)
try to replace
(call/cc
(lambda(break)
(for-each-with-index
(lambda(ry r)
(for-each-with-index
(lambda(rx v)
(when (nan? v)
(receive (cx cy)
(apply values (rasterpos⁻¹ (list (+ (s64vector-ref box 0) rx)
(+ (s64vector-ref box 1) ry))))
(if-let1 nv
(or (and geographic-dataset?
(or (read-geo-pixel (+ cx 360.0) cy)
(read-geo-pixel (- cx 360.0) cy)
(and (or (> cy 90.0) (< cy -90.0))
(read-geo-pixel cx cy))))
(nan-to-false (next cx cy)))
(set! (ref r rx) nv)
failed to replace
(break (next x y))))))
r))
rows)
( assert ( not ( any ( cut find ? < > ) rows ) ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(fi u v rows))))]
[else
( assert ( zero ? ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(fi u v rows))]))))))))
(case interpolation
((bi-cubic) (xy->z interp-bicubic raster-pos->4x4-box 4 4))
((bi-linear) (xy->z interp-bilinear raster-pos->2x2-box 2 2))
((nearest) (xy->z (lambda(u v rows) (ref* rows 0 0)) raster-pos->1x1-box 1 1))
(else (error "Unknown interpolation:" interpolation))))))))
(define (dem->xy-project->z-debug projection name . args)
(let-keywords args ((next (lambda (x y depth) (values +nan.0 +nan.0 depth)))
(interpolation 'bi-cubic)
(band 1))
(let* ((dataset (gdal-open-dataset name))
(band (gdal-open-band dataset band)))
(let ((width (GDALGetRasterBandXSize band))
(height (GDALGetRasterBandYSize band))
(osr (osr-from-dataset dataset)))
(let1 xy->z-debug (lambda(fi get-box box-width box-height)
(rp (make-f64vector 2))
(box (make-s64vector 4))
(rasterpos&bbox! (get-rasterpos&bbox! projection dataset get-box width height))
(rasterpos⁻¹ (apply compose
(reverse
(filter (lambda(f) (not (eq? f identity)))
(list
(get-geotransform dataset)
(gdal-get-projection⁻¹ dataset)
(if (not (string-null? projection))
(osr-transform (OSRCloneGeogCS osr)
(osr-from-user-input projection))
identity)
(cut subseq <> 0 2))))))
(read-row! (get-gdal-read-band-row! band (gdal-band-nodata band)))
(rows (map (lambda(y) (make-f32vector box-width)) (iota box-height)))
(geographic-dataset? (osr-is-geographic? osr)))
(let ((read-row (lambda(y xs xe)
(let1 row (make-f32vector (- xe xs))
(read-row! row y xs xe)
row)))
(read-box! (lambda()
(let ((start (s64vector-ref box 0))
(end (s64vector-ref box 2))
(y (s64vector-ref box 1))
(r 0))
(dotimes (idx box-height)
(inc! r (read-row! (ref rows idx) (+ y idx) start end)))
r)))
)
(let* ((read-pixel (lambda(x y)
(let1 x (round->exact x)
(f32vector-ref (read-row (round->exact y) x (+ x 1)) 0))))
(read-geo-pixel (lambda(x y)
(guard (e [(transform-error? e)
#f])
(nan-to-false (apply read-pixel (rasterpos x y)))))))
(lambda(x y :optional (depth 0))
(if (not (rasterpos&bbox! x y rp box))
(next x y (+ depth 1))
(let* ((nans (read-box!))
(c (map floor (list (f64vector-ref rp 0) (f64vector-ref rp 1))))
(xres (geod-distance 'wgs84 (rasterpos⁻¹ c) (rasterpos⁻¹ (map + c '(1 0)))))
(yres (geod-distance 'wgs84 (rasterpos⁻¹ c) (rasterpos⁻¹ (map + c '(0 1)))))
(res (max xres yres))
(max-depth depth))
(cond [(= nans (* box-width box-height))
(next x y (+ depth 1))]
[(> nans 0)
try to replace
(call/cc
(lambda(break)
(for-each-with-index
(lambda(ry r)
(for-each-with-index
(lambda(rx v)
(when (nan? v)
(receive (cx cy)
(apply values (rasterpos⁻¹ (list (+ (s64vector-ref box 0) rx)
(+ (s64vector-ref box 1) ry))))
(let1 nv (and geographic-dataset?
(or (read-geo-pixel (+ cx 360.0) cy)
(read-geo-pixel (- cx 360.0) cy)
(and (or (> cy 90.0) (< cy -90.0))
(read-geo-pixel cx cy))))
(if nv
(set! (ref r rx) nv)
(let1 next-value (values->list (next cx cy (+ depth 1)))
(cond [(nan-to-false (car next-value))
(set! (ref r rx) (car next-value))
(set! res (max res (cadr next-value)))
(set! max-depth (max max-depth (caddr next-value)))]
[else
failed to replace
(break (next x y (+ depth 1)))])))))))
r))
rows)
( assert ( not ( any ( cut find ? < > ) rows ) ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(values (fi u v rows)
res
max-depth))))]
[else
( assert ( zero ? ) )
(receive (u v) (raster-pos->uv (f64vector-ref rp 0) (f64vector-ref rp 1))
(values (fi u v rows)
res
max-depth))]))))))))
(case interpolation
((bi-cubic) (xy->z-debug interp-bicubic raster-pos->4x4-box 4 4))
((bi-linear) (xy->z-debug interp-bilinear raster-pos->2x2-box 2 2))
((nearest) (xy->z-debug (lambda(u v rows) (ref* rows 0 0)) raster-pos->1x1-box 1 1))
(else (error "Unknown interpolation:" interpolation))))))))
(define (dem->xy->z name . args)
(apply dem->xy-project->z (append (list "" name) args)))
(define (keyword-exists? key kv-list)
(or (get-keyword key kv-list #f)
(not (equal? 1 (get-keyword key kv-list 1)))))
(define (dem-stack->xy->z projection dem-stack)
(let1 l (reverse dem-stack)
(fold (lambda(n o)
(when (keyword-exists? :next (cdr n))
(error ":next only allowed in last element"))
(apply dem->xy-project->z (cons projection (append n (list :next o)))))
(apply dem->xy-project->z (cons projection (car l)))
(cdr l))))
(define (dem-stack->xy->z-debug projection dem-stack)
(let1 l (reverse dem-stack)
(fold (lambda(n o)
(when (keyword-exists? :next (cdr n))
(error ":next only allowed in last element"))
(apply dem->xy-project->z-debug (cons projection (append n (list :next o)))))
(apply dem->xy-project->z-debug (cons projection (car l)))
(cdr l))))
|
4b31f5120b14ec9edbcf1ab80bf931088681ee15d815213f5319e70177996cdd | semilin/layoup | Workman.lisp |
(MAKE-LAYOUT :NAME "Workman" :MATRIX
(APPLY #'KEY-MATRIX '("qdrwbjfup;" "ashtgyneoi" "zxmcvkl,./"))
:SHIFT-MATRIX NIL :KEYBOARD NIL) | null | https://raw.githubusercontent.com/semilin/layoup/27ec9ba9a9388cd944ac46206d10424e3ab45499/data/layouts/Workman.lisp | lisp |
(MAKE-LAYOUT :NAME "Workman" :MATRIX
(APPLY #'KEY-MATRIX '("qdrwbjfup;" "ashtgyneoi" "zxmcvkl,./"))
:SHIFT-MATRIX NIL :KEYBOARD NIL) | |
215507d72a039ed3f76b2aaff57eb9ff7a8b43839d0e79c4920d8a1d85ff799c | runtimeverification/haskell-backend | Main.hs | module Main (main) where
import Data.Text.IO qualified as Text
import GlobalMain
import Kore.Parser (
parseKoreDefinition,
)
import Kore.Syntax.Definition (
ParsedDefinition,
)
import Kore.Unparser
import Options.Applicative
import Prelude.Kore
import Pretty (
LayoutOptions (..),
PageWidth (..),
defaultLayoutOptions,
layoutPretty,
renderIO,
)
import System.IO (
stdout,
)
data KoreFormatOptions = KoreFormatOptions
| file to unparse
fileName :: FilePath
, -- | line width
width :: Int
}
commandLine :: Parser KoreFormatOptions
commandLine =
KoreFormatOptions
<$> argument
str
( metavar "FILE"
<> help "Kore source file to parse"
)
<*> option
auto
( metavar "WIDTH"
<> long "width"
<> value 80
<> help "Line width [default: 80; unlimited if WIDTH <= 0]"
)
infoMod :: InfoMod options
infoMod =
fullDesc
<> progDesc "Parse a Kore definition and render it in standard format"
<> header "kore-format - parse and render Kore definitions"
main :: IO ()
main = do
options <-
mainGlobal
(ExeName "kore-format")
Nothing -- environment variable name for extra arguments
commandLine
infoMod
for_ (localOptions options) mainWorker
where
mainWorker
LocalOptions
{ execOptions =
KoreFormatOptions{fileName, width}
} =
do
defn <- readKoreOrDie fileName
let layoutOptions =
defaultLayoutOptions
{ layoutPageWidth =
if width > 0
then AvailablePerLine width 1.0
else Unbounded
}
renderIO stdout (layoutPretty layoutOptions $ unparse defn)
| Read a ' KoreDefinition ' from the given file name or signal an error .
readKoreOrDie :: FilePath -> IO ParsedDefinition
readKoreOrDie fileName =
Text.readFile fileName
>>= either error return . parseKoreDefinition fileName
| null | https://raw.githubusercontent.com/runtimeverification/haskell-backend/b06757e252ee01fdd5ab8f07de2910711997d845/kore/app/format/Main.hs | haskell | | line width
environment variable name for extra arguments | module Main (main) where
import Data.Text.IO qualified as Text
import GlobalMain
import Kore.Parser (
parseKoreDefinition,
)
import Kore.Syntax.Definition (
ParsedDefinition,
)
import Kore.Unparser
import Options.Applicative
import Prelude.Kore
import Pretty (
LayoutOptions (..),
PageWidth (..),
defaultLayoutOptions,
layoutPretty,
renderIO,
)
import System.IO (
stdout,
)
data KoreFormatOptions = KoreFormatOptions
| file to unparse
fileName :: FilePath
width :: Int
}
commandLine :: Parser KoreFormatOptions
commandLine =
KoreFormatOptions
<$> argument
str
( metavar "FILE"
<> help "Kore source file to parse"
)
<*> option
auto
( metavar "WIDTH"
<> long "width"
<> value 80
<> help "Line width [default: 80; unlimited if WIDTH <= 0]"
)
infoMod :: InfoMod options
infoMod =
fullDesc
<> progDesc "Parse a Kore definition and render it in standard format"
<> header "kore-format - parse and render Kore definitions"
main :: IO ()
main = do
options <-
mainGlobal
(ExeName "kore-format")
commandLine
infoMod
for_ (localOptions options) mainWorker
where
mainWorker
LocalOptions
{ execOptions =
KoreFormatOptions{fileName, width}
} =
do
defn <- readKoreOrDie fileName
let layoutOptions =
defaultLayoutOptions
{ layoutPageWidth =
if width > 0
then AvailablePerLine width 1.0
else Unbounded
}
renderIO stdout (layoutPretty layoutOptions $ unparse defn)
| Read a ' KoreDefinition ' from the given file name or signal an error .
readKoreOrDie :: FilePath -> IO ParsedDefinition
readKoreOrDie fileName =
Text.readFile fileName
>>= either error return . parseKoreDefinition fileName
|
fc63ce544cb850bdb171f4b0cf9a960ce15ca789215938e4f066810974901fa3 | arcusfelis/xapian-erlang-bindings | xapian_server_tests.erl | %% This module is a `gen_server' that handles a single port connection.
-module(xapian_server_tests).
-include_lib("xapian/include/xapian.hrl").
-include_lib("xapian/src/xapian.hrl").
-compile([export_all]).
-import(xapian_helper, [testdb_path/1]).
%% Used for testing, then can be moved to an another file
-define(SRV, xapian_server).
%% ------------------------------------------------------------------
%% Tests
%% ------------------------------------------------------------------
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("stdlib/include/qlc.hrl").
-record(document, {docid}).
-record(collapsed, {docid, collapse_key, collapse_count}).
%% ------------------------------------------------------------------
%% Call C++ tests
%% ------------------------------------------------------------------
%% @doc Check basic memory operations (malloc, free).
memory_test() ->
{ok, Server} = ?SRV:start_link([], []),
?SRV:internal_test_run(Server, memory, []),
?SRV:close(Server),
ok.
echo_test() ->
{ok, Server} = ?SRV:start_link([], []),
?assertEqual(?SRV:internal_test_run(Server, echo, <<0,5>>), <<0,5>>),
Bin = list_to_binary(lists:duplicate(1100, 1)),
?assertEqual(?SRV:internal_test_run(Server, echo, Bin), Bin),
ok.
-define(DOCUMENT_ID(X), X:32/native-unsigned-integer).
%% @doc This test checks the work of `ResultEncoder'.
result_encoder_test() ->
{ok, Server} = ?SRV:start_link([], []),
Reply = ?SRV:internal_test_run(Server, result_encoder, [1, 1000]),
Reply = ?SRV:internal_test_run(Server, result_encoder, [1, 1000]),
Reply = ?SRV:internal_test_run(Server, result_encoder, [1, 1000]),
?SRV:close(Server),
?assertEqual(lists:seq(1, 1000), [ Id || <<?DOCUMENT_ID(Id)>> <= Reply ]),
ok.
%% @doc Check an exception.
exception_test() ->
{ok, Server} = ?SRV:start_link([], []),
? assertException(ClassPattern , TermPattern , )
?assertException(error,
#x_error{type = <<"MemoryAllocationDriverError">>},
?SRV:internal_test_run(Server, exception, [])),
?SRV:close(Server),
ok.
%% ------------------------------------------------------------------
%% Call test generators
%% ------------------------------------------------------------------
wrapper(Name) ->
[{setup,
fun() -> ok end,
fun(_) -> [{atom_to_list(Name), ?MODULE:Name()}] end}].
run_test_generators_once_test_() ->
AllFuns = ?MODULE:module_info(exports),
[wrapper(Name) || {Name, Arity} <- AllFuns,
Arity =:= 0,
lists:suffix("_gen", atom_to_list(Name))].
%% This test tries to create a document with all kinds of fields.
simple_gen() ->
% Open test
Path = testdb_path(simple),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = slot1},
#x_prefix_name{name = author, prefix = <<$A>>}],
Document =
[ #x_stemmer{language = <<"english">>}
, #x_data{value = "My test data as iolist"}
%% It is a term without a position.
, #x_term{value = "Simple"}
%% Posting (a term with a position).
, #x_term{value = "term", position=1}
, #x_value{slot = 0, value = "Slot #0"}
, #x_value{slot = slot1, value = "Slot #1"}
, #x_text{value = "Paragraph 1"}
, #x_delta{}
, #x_text{value = <<"Paragraph 2">>}
, #x_text{value = <<"Michael">>, prefix = author}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
DocIdReplaced1 = ?SRV:replace_or_create_document(Server, DocId, Document),
DocIdReplaced2 = ?SRV:replace_or_create_document(Server, "Simple", Document),
?SRV:delete_document(Server, DocId),
?SRV:delete_document(Server, "Simple"),
[ ?_assert(is_integer(DocId))
, ?_assertEqual(DocId, DocIdReplaced1)
, ?_assertEqual(DocId, DocIdReplaced2)
]
after
?SRV:close(Server)
end.
last_document_id_gen() ->
% Open test
Path = testdb_path(last_docid),
Params = [write, create, overwrite],
Document = [],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% The DB is empty.
NoId = ?SRV:last_document_id(Server),
Add 1 document , check a last i d.
DocId = ?SRV:add_document(Server, Document),
Last = ?SRV:last_document_id(Server),
[ {"Db is empty.", ?_assertEqual(undefined, NoId)}
, ?_assertEqual(DocId, Last)
]
after
?SRV:close(Server)
end.
open_and_register_local_name_test() ->
Name = xapian_server_test_local_name,
%% Register the empty server under the local name
{ok, Server} = ?SRV:start_link([], [{name, Name}]),
?assertEqual(whereis(Name), Server),
?SRV:close(Server),
?assertNot(is_process_alive(Server)).
open_and_register_local_name2_test() ->
Name = xapian_server_test_local_name2,
%% Register the empty server under the local name
{ok, Server} = ?SRV:start_link([], [{name, {local, Name}}]),
?assertEqual(whereis(Name), Server),
?SRV:close(Server).
open_and_register_global_name_test() ->
Name = xapian_server_test_global_name,
%% Register the empty server under the global name
{ok, Server} = ?SRV:start_link([], [{name, {global, Name}}]),
?assertEqual(global:whereis_name(Name), Server),
?SRV:close(Server).
update_document_test() ->
Path = testdb_path(update_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, []),
The document with DocId will be extended .
DocId1 = ?SRV:update_document(Server, DocId, [#x_term{value = "more"}]),
?assertEqual(DocId, DocId1),
%% Cannot add this term again, because the action is `add'.
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
?SRV:update_document(Server, DocId,
[#x_term{action = add, value = "more", ignore = false}])),
%% Add an another term...
?SRV:update_document(Server, DocId,
[#x_term{action = add, value = "other", ignore = false}]),
?assert(?SRV:is_document_exist(Server, "other")),
%% ... and delete it.
?SRV:update_document(Server, DocId,
[#x_term{action = remove, value = "other", ignore = false}]),
?assertNot(?SRV:is_document_exist(Server, "other")),
Can not find a document , using " bad_term " as UID .
?debugMsg("UPD_DOC_BAD_ID_MARK"),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
?SRV:update_document(Server, "bad_term", [])),
One document with the term " more " was found .
%% Because we use a term as a key, few documents can be matched.
%% That is why, undefined is returned (and not a document id).
%% ignore = true catches errors.
?assertEqual(undefined,
?SRV:update_or_create_document(Server, "more",
[#x_term{action = add, value = "more", ignore = true}])),
%% Cannot update the document that is not found.
?assertNot(?SRV:is_document_exist(Server, "fail")),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
?SRV:update_document(Server, "fail", [])),
%% Now we can.
?assertNot(?SRV:is_document_exist(Server, "fail")),
DocId2 = ?SRV:update_or_create_document(Server, "fail", []),
%% Document was created, but it us empty.
?assert(?SRV:is_document_exist(Server, DocId2)),
?assertNot(?SRV:is_document_exist(Server, "fail")),
%% Try the same using the document id as a key.
DocId3 = ?SRV:update_or_create_document(Server, DocId2, []),
?assertEqual(DocId2, DocId3)
after
?SRV:close(Server)
end.
update_document_value_test() ->
Path = testdb_path(update_document_value),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, []),
The document with DocId will be extended .
< < " Slot # 15 " > > = < < 83,108,111,116,32,35,49,53 > >
byte_size(<<"Slot # 15 " > > ) = 8
%%
%% Whole command:
= ERROR REPORT==== 16 - Sep-2015::03:15:10 = = =
%% [update_document:24] OverflowDriverError: "Too short binary."
Data : < < 27,15,0,0,0,0,8,0,0,0,83,108,111,116,32,35,49,53,1,0,1,1,0,0,0 > >
%% C++ position: c_src/common/param_decoder.cpp:29
%%
%% Test DB: add_document_value
%% *failed*
in function xapian_server : client_error_handler/1 ( src / xapian_server.erl , line 1285 )
in call from xapian_server_tests : update_document_value_test/0 ( test / xapian_server_tests.erl , line 241 )
%% **error:{x_error,<<"OverflowDriverError">>,<<"Too short binary.">>,update_document,
%% <<"c_src/common/param_decoder.cpp">>,29}
%%
< < 27 , % SET_VALUE
15,0,0,0 , % slot
%% 0, % xapian_const:value_type_id(string)
%% 8,0,0,0,83,108,111,116,32,35,49,53, % string
1 , % ignore
%% 0, % stop applyDocument
%% 1,1,0,0,0>>
Doc = [#x_value{slot = 15, value = <<"Slot #15">>}],
DocId1 = ?SRV:update_document(Server, DocId, Doc),
?assertEqual(DocId, DocId1)
after
?SRV:close(Server)
end.
add_document_value_test() ->
Path = testdb_path(add_document_value),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Doc = [#x_value{slot = 15, value = <<"Slot #15">>}],
DocId = ?SRV:add_document(Server, Doc)
after
?SRV:close(Server)
end.
%% REP_CRT_DOC_MARK
replace_or_create_document_test() ->
Path = testdb_path(replace_or_create_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Try update using non - existed DocId .
?assertNot(?SRV:is_document_exist(Server, 1)),
DocId = ?SRV:replace_or_create_document(Server, "bad_term", []),
?assertEqual(DocId, 1),
?assert(?SRV:is_document_exist(Server, 1)),
?SRV:delete_document(Server, DocId),
?assertNot(?SRV:is_document_exist(Server, 1)),
%% If there is no document, then the new one will be created.
DocId0 = ?SRV:replace_or_create_document(Server, "bad_term", []),
Even when the first document is deleted , the new document will have
%% another document id.
?assertEqual(DocId0, 2),
?assertNot(?SRV:is_document_exist(Server, "bad_term")),
?assert(?SRV:is_document_exist(Server, DocId0)),
%% Create a new document.
DocId1 = ?SRV:add_document(Server, [#x_term{value = "good_term"}]),
?assert(?SRV:is_document_exist(Server, "good_term")),
%% Replace the whole document with the new one.
DocId2 = ?SRV:replace_or_create_document(Server, "good_term",
[#x_term{value = "nice_term"}]),
%% It returns a document id of replaced document (but it can be more
%% then once).
?assertEqual(DocId1, DocId2),
%% The old document was deleted,
?assertNot(?SRV:is_document_exist(Server, "good_term")),
%% the new document was created.
?assert(?SRV:is_document_exist(Server, "nice_term")),
%% Test few documents with the same term.
%%
%% Add another document with the same term.
DocId3 = ?SRV:add_document(Server, [#x_term{value = "nice_term"}]),
%% Only one document will left after replace_or_create_document.
and DocId3 are still here .
?assert(?SRV:is_document_exist(Server, DocId2)),
?assert(?SRV:is_document_exist(Server, DocId3)),
DocId4 = ?SRV:replace_or_create_document(Server, "nice_term",
[#x_term{value = "mass_term"}]),
Only document with is here , other document with the same term
%% was deleted.
?assertEqual(DocId4, DocId2),
Ids = all_record_ids(Server, "mass_term"),
?assertEqual(Ids, [DocId4]),
?assertNot(?SRV:is_document_exist(Server, DocId3))
after
?SRV:close(Server)
end.
delete_document_gen() ->
Path = testdb_path(delete_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Documents are not exist.
Exists1 = ?SRV:delete_document(Server, "test"),
Exists2 = ?SRV:delete_document(Server, 1),
DocId1 = ?SRV:add_document(Server, [#x_term{value = "term"}]),
DocId2 = ?SRV:add_document(Server, []),
Exists3 = ?SRV:delete_document(Server, "term"),
Exists4 = ?SRV:delete_document(Server, DocId2),
Exists5 = ?SRV:is_document_exist(Server, DocId1),
Exists6 = ?SRV:is_document_exist(Server, DocId2),
[ ?_assertNot(Exists1)
, ?_assertNot(Exists2)
, ?_assert(Exists3)
, ?_assert(Exists4)
, ?_assertNot(Exists5)
, ?_assertNot(Exists6)
]
after
?SRV:close(Server)
end.
%% REP_DOC_MARK
replace_document_test() ->
Path = testdb_path(replace_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Try update using non - existed DocId .
?assertNot(?SRV:is_document_exist(Server, 1)),
DocId = ?SRV:replace_document(Server, "bad_term", []),
%% Nothing was updated.
?assertEqual(DocId, undefined),
?assertNot(?SRV:is_document_exist(Server, 1)),
%% If there is no document, then there is no an error.
DocId0 = ?SRV:replace_document(Server, "bad_term", []),
?assertEqual(DocId0, undefined),
%% Nothing was created.
?assertNot(?SRV:is_document_exist(Server, "bad_term")),
%% Create a new document.
DocId1 = ?SRV:add_document(Server, [#x_term{value = "good_term"}]),
?assert(?SRV:is_document_exist(Server, "good_term")),
?assert(?SRV:is_document_exist(Server, DocId1)),
?assertEqual(DocId1, 1),
%% Replace the whole document with the new one.
DocId2 = ?SRV:replace_document(Server, "good_term",
[#x_term{value = "nice_term"}]),
%% It returns a document id of replaced document (but it can be more
%% then once).
?assertEqual(DocId1, DocId2),
%% The old document was deleted,
?assertNot(?SRV:is_document_exist(Server, "good_term")),
%% the new document was created.
?assert(?SRV:is_document_exist(Server, "nice_term")),
%% Test few documents with the same term.
%%
%% Add another document with the same term.
DocId3 = ?SRV:add_document(Server, [#x_term{value = "nice_term"}]),
%% Only one document will left after replace_document.
and DocId3 are still here .
?assert(?SRV:is_document_exist(Server, DocId2)),
?assert(?SRV:is_document_exist(Server, DocId3)),
DocId4 = ?SRV:replace_document(Server, "nice_term",
[#x_term{value = "mass_term"}]),
Only document with is here , other document with the same term
%% was deleted.
?assertEqual(DocId4, DocId2),
Ids = all_record_ids(Server, "mass_term"),
?assertEqual(Ids, [DocId4]),
?assertNot(?SRV:is_document_exist(Server, DocId3))
after
?SRV:close(Server)
end.
%% REP_DOC_MARK
replace_document_by_id_test() ->
Path = testdb_path(replace_document_by_id),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Create a new document.
DocId1 = ?SRV:add_document(Server, [#x_term{value = "new"}]),
DocId2 = ?SRV:replace_document(Server, DocId1,
[#x_term{value = "other"}]),
?assertEqual(DocId1, DocId2),
Ids = all_record_ids(Server, "other"),
?assertEqual(Ids, [DocId2])
after
?SRV:close(Server)
end.
is_document_exists_gen() ->
Path = testdb_path(is_document_exists),
Params = [write, create, overwrite],
Doc =
[ #x_term{value = "monad"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
BeforeAddTerm = ?SRV:is_document_exist(Server, "monad"),
BeforeAddId = ?SRV:is_document_exist(Server, 1),
?SRV:add_document(Server, Doc),
AfterAddTerm = ?SRV:is_document_exist(Server, "monad"),
AfterAddId = ?SRV:is_document_exist(Server, 1),
[ ?_assertNot(BeforeAddTerm)
, ?_assertNot(BeforeAddId)
, ?_assert(AfterAddTerm)
, ?_assert(AfterAddId)
]
after
?SRV:close(Server)
end.
frequency_test() ->
Path = testdb_path(frequency),
Params = [write, create, overwrite],
Doc =
[ #x_term{value = "term", frequency = {cur, 1}}
, #x_term{value = "term", frequency = {abs, 5}}
, #x_term{value = "term", frequency = {cur, -1}}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Doc)
after
?SRV:close(Server)
end.
term_actions_test() ->
Path = testdb_path(actions),
Params = [write, create, overwrite],
Doc =
[ #x_term{action = add, value = "term"}
, #x_term{action = update, value = "term"}
, #x_term{action = set, value = "term"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Doc)
after
?SRV:close(Server)
end.
-record(term, {value, wdf}).
-record(term_value, {value}).
-record(term_ext, {value, positions, position_count, freq, wdf}).
-record(term_pos, {value, positions, position_count}).
-record(short_term, {wdf}).
-record(term_freq, {value, freq}).
term_qlc_gen() ->
Path = testdb_path(term_qlc),
Params = [write, create, overwrite],
%% Create a document with terms
TermNames =
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(1, 100)],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term, record_info(fields, term)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Records = qlc:e(Table),
Values = [Value || #term{value = Value} <- Records],
Not1Wdf = [X || X = #term{wdf = Wdf} <- Records, Wdf =/= 1],
%% Lookup order test.
%% It is an important test.
Actually , it tests the fact , that skip_to ( " " ) move an TermIterator
%% in the beginning of the document.
OrderTestQuery = qlc:q([Value || #term{value = Value} <- Table,
Value =:= "2" orelse Value =:= "1" orelse Value =:= "3"]),
OrderTestValues = qlc:e(OrderTestQuery),
[ ?_assertEqual(Values, lists:sort(TermNames))
, ?_assertEqual(Not1Wdf, [])
, ?_assertEqual(OrderTestValues, [<<"1">>, <<"2">>, <<"3">>])
]
after
?SRV:close(Server)
end.
term_qlc_invalidation_gen() ->
Path = testdb_path(term_qlc_inv),
Params = [write, create, overwrite],
%% Create a document with terms
TermNames =
["cat", "dog"],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term, record_info(fields, term)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Records1 = qlc:e(Table),
%% Delete the term
?SRV:update_document(Server, DocId,
[#x_term{value = "dog", action = remove}]),
%% The term list was changed.
The must be the same . - no
%% Can it be runned twice? - yes. But values will be new.
% Records2 = qlc:e(Table),
[ ?_assertEqual(Records1, [#term{value = <<"cat">>, wdf = 1},
#term{value = <<"dog">>, wdf = 1}])
we lost one dog :(
% , ?_assertEqual(Records1, Records2)
]
after
?SRV:close(Server)
end.
short_term_qlc_gen() ->
Path = testdb_path(short_term_qlc),
Params = [write, create, overwrite],
%% Create a document with terms
TermNames =
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(1, 100)],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(short_term,
record_info(fields, short_term)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Q = qlc:q([Wdf || #short_term{wdf = Wdf} <- Table]),
WdfSum = qlc:fold(fun erlang:'+'/2, 0, Q),
[ ?_assertEqual(WdfSum, 100) ]
after
?SRV:close(Server)
end.
term_ext_qlc_gen() ->
Path = testdb_path(term_ext_qlc),
Params = [write, create, overwrite],
%% Create a document with terms
TermNames =
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(1, 100)],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term_ext,
record_info(fields, term_ext)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Records = qlc:e(Table),
Not0Pos =
[X || X = #term_ext{position_count = Count} <- Records, Count =/= 0],
NotEmptyPos =
[X || X = #term_ext{positions = Poss} <- Records, Poss =/= []],
Shared table : changes in 1 query handler do n't modify the second one .
%%
%% SUDDENLY! PAIN!
%% In the next string the error can occur.
%%
QH1 = qlc:q([V || #term_ext{value=V} <- Table]),
QH2 = qlc:q([V || #term_ext{value=V} <- Table]),
C1 = qlc:cursor(QH1),
C2 = qlc:cursor(QH2),
C1E1 = qlc:next_answers(C1, 1),
C2E1 = qlc:next_answers(C2, 1),
C1E2 = qlc:next_answers(C1, 1),
C1E3 = qlc:next_answers(C1, 1),
C2E2 = qlc:next_answers(C2, 1),
C2E3 = qlc:next_answers(C2, 1),
C2E4 = qlc:next_answers(C2, 1),
C1E4 = qlc:next_answers(C1, 1),
C2E5 = qlc:next_answers(C2, 25),
C1E5 = qlc:next_answers(C1, 25),
C1E6 = qlc:next_answers(C1, 25),
C2E6 = qlc:next_answers(C2, 25),
[ ?_assertEqual(Not0Pos, [])
, ?_assertEqual(NotEmptyPos, [])
, {"Shared term QLC table.",
[ ?_assertEqual(C1E1, C2E1)
, ?_assertEqual(C1E2, C2E2)
, ?_assertEqual(C1E3, C2E3)
, ?_assertEqual(C1E4, C2E4)
, ?_assertEqual(C1E5, C2E5)
, ?_assertEqual(C1E6, C2E6)
]}
]
after
?SRV:close(Server)
end.
term_numbers(From, To) ->
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(From, To)].
terms(TermNames) ->
[#x_term{value = Term} || Term <- TermNames].
term_qlc_join_gen() ->
Path = testdb_path(term_qlc_join),
Params = [write, create, overwrite],
%% Create a document with terms
TermNames0to99 = term_numbers(0, 99),
TermNames100to199 = term_numbers(100, 199),
TermNames200to299 = term_numbers(200, 299),
TermNames300to399 = term_numbers(300, 399),
{ok, Server} = ?SRV:start_link(Path, Params),
try
Doc1Terms = TermNames0to99 ++ TermNames100to199,
Doc2Terms = TermNames100to199 ++ TermNames200to299,
Doc3Terms = TermNames200to299 ++ TermNames300to399,
Doc1Id = ?SRV:add_document(Server, terms(Doc1Terms)),
Doc2Id = ?SRV:add_document(Server, terms(Doc2Terms)),
Doc3Id = ?SRV:add_document(Server, terms(Doc3Terms)),
Meta = xapian_term_record:record(term, record_info(fields, term)),
Table1 = xapian_term_qlc:document_term_table(Server, Doc1Id, Meta),
Table2 = xapian_term_qlc:document_term_table(Server, Doc2Id, Meta),
Table3 = xapian_term_qlc:document_term_table(Server, Doc3Id, Meta),
Search terms from 2 documents with the same names .
%% It is a natural join.
Q12 = qlc:q([Value1 || #term{value = Value1} <- Table1,
#term{value = Value2} <- Table2,
Value1 =:= Value2]),
Q23 = qlc:q([Value1 || #term{value = Value1} <- Table2,
#term{value = Value2} <- Table3,
Value1 =:= Value2]),
Q1223 = qlc:append(Q12, Q23),
QE12 = qlc:e(Q12),
QE23 = qlc:e(Q23),
QE1223 = qlc:e(Q1223),
{"Natural join of the terms from two document.",
[ ?_assertEqual(QE12, TermNames100to199)
, ?_assertEqual(QE23, TermNames200to299)
, ?_assertEqual(QE1223, TermNames100to199 ++ TermNames200to299)
]}
after
?SRV:close(Server)
end.
term_pos_qlc_gen() ->
Path = testdb_path(term_pos_qlc),
Params = [write, create, overwrite],
Fields =
[ #x_term{value = "term1", position = [1,2,3]}
, #x_term{value = "term2", position = [3,2,1]}
, #x_term{value = "term3", position = [1]}
, #x_term{value = "term3", position = [2,3]}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term_pos,
record_info(fields, term_pos)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Term1Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"term1">>} <- Table])),
Term2Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"term2">>} <- Table])),
Term3Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"term3">>} <- Table])),
AllRecords =
qlc:e(qlc:q([X || X <- Table])),
Term1 = #term_pos{
value = <<"term1">>, position_count = 3, positions = [1,2,3]},
Term2 = #term_pos{
value = <<"term2">>, position_count = 3, positions = [1,2,3]},
Term3 = #term_pos{
value = <<"term3">>, position_count = 3, positions = [1,2,3]},
[ ?_assertEqual([Term1], Term1Records)
, ?_assertEqual([Term2], Term2Records)
, ?_assertEqual([Term3], Term3Records)
, ?_assertEqual(erlang:length(AllRecords), 3)
]
after
?SRV:close(Server)
end.
term_generator_features_gen() ->
Path = testdb_path(tg_features),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog.",
features = [default, {except, spelling},
spelling, {except, [spelling]}]}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Document),
[]
after
?SRV:close(Server)
end.
text_position_gen() ->
Path = testdb_path(text_pos),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog.",
Positions : 6 7 8 9 10 11 12 13 14
position = 5}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Test a term generator
DocId = ?SRV:add_document(Server, Document),
Meta = xapian_term_record:record(term_pos,
record_info(fields, term_pos)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Term1Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"the">>} <- Table])),
Term1 = #term_pos{
value = <<"the">>, position_count = 2, positions = [6, 12]},
[ ?_assertEqual([Term1], Term1Records)
]
after
?SRV:close(Server)
end.
value_count_match_spy_gen() ->
Path = testdb_path(value_count_mspy),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = color}],
{ok, Server} = ?SRV:start_link(Path, Params),
try
There are 2 " green " documents .
Colors = ["Red", "Blue", "green", "white", "black", "green"],
[add_color_document(Server, Color) || Color <- Colors],
%% Call with a slot name
SpySlot1 = xapian_match_spy:value_count(Server, color),
%% Call with a slot number
xapian_match_spy:value_count(Server, 1),
Query = "",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetParams = #x_match_set{
enquire = EnquireResourceId,
spies = [SpySlot1]},
% MSetResourceId =
?SRV:match_set(Server, MSetParams),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
%% These elements are sorted by value.
Table = xapian_term_qlc:value_count_match_spy_table(
Server, SpySlot1, Meta),
%% These elements are sorted by freq.
TopTable = xapian_term_qlc:top_value_count_match_spy_table(
Server, SpySlot1, 100, Meta),
Values = qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table])),
%% "Red" was converted to <<"Red">> because of lookup function call.
Erlang did not match it , but Xapian did .
RedValues = qlc:e(qlc:q([Value
|| #term_freq{value = Value} <- Table, Value =:= "Red"])),
OrderValues = qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table,
Value =:= "white" orelse Value =:= "black"])),
TopAlphOrderValues =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- TopTable,
Value =:= "white" orelse Value =:= "black"])),
TopFreqOrderValues =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- TopTable,
Value =:= "white" orelse Value =:= "green"])),
%% match_spy_info
SpySlot1Slot = ?SRV:match_spy_info(Server, SpySlot1, value_slot),
SpySlot1DocCount = ?SRV:match_spy_info(Server, SpySlot1, document_count),
SpySlot1Info = ?SRV:match_spy_info(Server, SpySlot1,
[value_slot, document_count]),
[ ?_assertEqual(Values,
[<<"Blue">>, <<"Red">>, <<"black">>, <<"green">>, <<"white">>])
, ?_assertEqual(RedValues, [<<"Red">>])
, {"Check order",
[ ?_assertEqual(OrderValues, [<<"black">>, <<"white">>])
, ?_assertEqual(TopAlphOrderValues, [<<"black">>, <<"white">>])
, ?_assertEqual(TopFreqOrderValues, [<<"green">>, <<"white">>])
]}
, ?_assertEqual(RedValues, [<<"Red">>])
, {"xapian_server:match_spy_info/3",
[ ?_assertEqual(SpySlot1Info, [{value_slot, SpySlot1Slot}
,{document_count, SpySlot1DocCount}])
]}
]
after
?SRV:close(Server)
end.
add_color_document(Server, Color) ->
Document = [ #x_value{slot = color, value = Color} ],
?SRV:add_document(Server, Document).
float_value_count_match_spy_gen() ->
Path = testdb_path(value_count_mspy),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = page_count, type = float}],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Doc1 = [ #x_value{slot = page_count, value = 10} ],
Doc2 = [ #x_value{slot = page_count, value = 100} ],
Doc3 = [ #x_value{slot = page_count, value = 200} ],
Doc4 = [ #x_value{slot = page_count, value = 20} ],
Docs = [ Doc1, Doc2, Doc3, Doc4 ],
% DocIds =
[ ?SRV:add_document(Server, Doc) || Doc <- Docs ],
%% Call with a slot name
SpySlot1Res = xapian_match_spy:value_count(Server, page_count),
Query = "",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetParams = #x_match_set{
enquire = EnquireResourceId,
spies = [SpySlot1Res]},
%% Collect statistic
% MSetResourceId =
?SRV:match_set(Server, MSetParams),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
%% Has it the same type?
Slot1 = xapian_server:match_spy_info(Server, SpySlot1Res, value_slot),
?assertEqual(Slot1, 1),
ValueType = xapian_server:slot_to_type(Server, Slot1),
?assertEqual(ValueType, float),
%% These elements sorted by value.
Table = xapian_term_qlc:value_count_match_spy_table(
Server, SpySlot1Res, Meta),
Values =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table])),
FilteredValues =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table, Value =:= 10])),
JoinValues =
qlc:e(qlc:q([V1 || #term_freq{value = V1} <- Table,
#term_freq{value = V2} <- Table, V1 =:= V2])),
[ {"Float values inside MatchSpy.",
?_assertEqual(Values, [10.0, 20.0, 100.0, 200.0])}
, {"Join float values.",
?_assertEqual(JoinValues, [10.0, 20.0, 100.0, 200.0])}
, {"Lookup float values.",
?_assertEqual(FilteredValues, [10.0])}
]
after
?SRV:close(Server)
end.
%% Terms can be deleted, added or replaced using `#x_term{}'.
term_advanced_actions_gen() ->
Path = testdb_path(adv_actions),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, []),
U = fun(Doc) ->
?SRV:update_document(Server, DocId, Doc)
end,
Meta = xapian_term_record:record(term, record_info(fields, term)),
FindTermFn =
fun(Value) ->
DocRes = xapian_server:document(Server, DocId),
Table = xapian_term_qlc:document_term_table(
Server, DocRes, Meta, [ignore_empty]),
?SRV:release_resource(Server, DocRes),
qlc:e(qlc:q([X || X = #term{value = V} <- Table, V =:= Value]))
end,
FF = fun() -> FindTermFn("term") end,
UU = fun(Field) -> U([Field]), FF() end,
Term = #x_term{value = "term"},
TermAdd = Term#x_term{action = add},
TermAddNotIgnore = Term#x_term{action = add, ignore = false},
TermUpdate = Term#x_term{action = update},
TermUpdateNotIgnore = TermUpdate#x_term{ignore = false},
TermSet = Term#x_term{action = set},
TermDec = TermSet#x_term{frequency = -1},
TermSetAbs = TermSet#x_term{frequency = {abs, 10}},
TermRemoveIgnore = Term#x_term{action = remove, frequency = 0},
TermRemove = TermRemoveIgnore#x_term{ignore = false},
TermRemove2 = TermRemove#x_term{frequency = 123},
Terms1 = FF(),
Terms2 = UU(TermAddNotIgnore),
%% Error will be thrown. Value was not changed.
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
UU(TermAddNotIgnore)),
Terms3 = FF(),
%% Error will be ignored. Value was not changed.
Terms4 = UU(TermAdd),
%% Start changing of WDF
Terms5 = UU(TermUpdate),
Terms6 = UU(TermSet),
Terms7 = UU(TermDec),
Terms8 = UU(TermSetAbs),
%% Cannot remove term, because WDF is not matched.
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
UU(TermRemove2)),
Terms9 = FF(),
%% Delete the term
Terms10 = UU(TermRemove),
%% Cannot delete the term twoce
?assertError(#x_error{type = <<"InvalidArgumentError">>},
UU(TermRemove)),
Terms11 = FF(),
%% Cannot update a non-existing term
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
UU(TermUpdateNotIgnore)),
Terms12 = FF(),
%% It will be ignored.
Terms13 = UU(TermUpdate),
NormTerm1 = #term{value = <<"term">>, wdf = 1},
NormTerm2 = #term{value = <<"term">>, wdf = 2},
NormTerm3 = #term{value = <<"term">>, wdf = 3},
NormTerm4 = #term{value = <<"term">>, wdf = 10},
[ ?_assertEqual(Terms1, [])
, ?_assertEqual(Terms2, [NormTerm1])
, ?_assertEqual(Terms3, [NormTerm1])
, ?_assertEqual(Terms4, [NormTerm1])
, ?_assertEqual(Terms5, [NormTerm2])
, ?_assertEqual(Terms6, [NormTerm3])
, ?_assertEqual(Terms7, [NormTerm2])
, ?_assertEqual(Terms8, [NormTerm4])
, ?_assertEqual(Terms9, [NormTerm4])
, ?_assertEqual(Terms10, [])
, ?_assertEqual(Terms11, [])
, ?_assertEqual(Terms12, [])
, ?_assertEqual(Terms13, [])
]
after
?SRV:close(Server)
end.
term_generator_gen() ->
Path = testdb_path(term_generator),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
Meta = xapian_term_record:record(term, record_info(fields, term)),
Stopper = xapian_resource:simple_stopper(["my", "as", "the", "a", "an"]),
Document1 =
[ #x_text{value = "My text is inside the #x_text record."}
],
Document2 =
[ #x_term_generator{stopper = Stopper}
, #x_text{value = "My text is inside the #x_text record."}
],
{ok, Server} = ?SRV:start_link(Path, Params),
ExtractDocTerms = fun(Doc) ->
TermTable = xapian_term_qlc:document_term_table(Server, Doc, Meta),
Values = qlc:q([Val || #term{value = Val} <- TermTable]),
qlc:e(Values)
end,
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
Terms2 = ExtractDocTerms(DocId2),
Terms1 = ExtractDocTerms(DocId1),
%% Test, that the stemmed forms of the words was filtered by Stopper.
[ {"Does the stopper actually work?"
,[?_assert(lists:member(<<"Zmy">>, Terms1))
,?_assertNot(lists:member(<<"Zmy">>, Terms2))
]}
]
after
?SRV:close(Server)
end.
standard_term_generator_gen() ->
Path = testdb_path(std_term_generator),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
Meta = xapian_term_record:record(term, record_info(fields, term)),
The default generator uses the " english " stemmer .
Document1 =
[ #x_term_generator{name = default}
, #x_text{value = "cats"}
],
%% The standard generator is without any stemmer.
Document2 =
[ #x_term_generator{name = standard}
, #x_text{value = "cats"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
ExtractDocTerms = fun(Doc) ->
TermTable = xapian_term_qlc:document_term_table(Server, Doc, Meta),
Values = qlc:q([Val || #term{value = Val} <- TermTable]),
qlc:e(Values)
end,
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
Terms2 = ExtractDocTerms(DocId2),
Terms1 = ExtractDocTerms(DocId1),
[ {"Is #x_term_generator.name respected?"
,[?_assertEqual(Terms1, [<<"Zcat">>,<<"cats">>])
,?_assertEqual(Terms2, [<<"cats">>])
]}
]
after
?SRV:close(Server)
end.
term_generator_from_resource_gen() ->
Path = testdb_path(res_term_generator),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
The default generator uses the " english " stemmer .
Document1 =
[ #x_term_generator{name = default}
, #x_text{value = "cats"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
TGRes = xapian_server:term_generator(Server, #x_term_generator{}),
TGRec = #x_term_generator{name = TGRes},
?SRV:add_document(Server, [TGRec|Document1]),
[]
after
?SRV:close(Server)
end.
reopen_test() ->
% Open test
Path = testdb_path(reopen),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
?SRV:close(Server),
{ok, ReadOnlyServer} = ?SRV:start_link(Path, []),
?SRV:close(ReadOnlyServer).
-record(stemmer_test_record, {docid, data}).
cancel_default_stemmer_gen() ->
Path = testdb_path(cancel_stemmer),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
Document =
[ #x_stemmer{language = none}
, #x_text{value = "cats"}],
Meta = xapian_term_record:record(term, record_info(fields, term)),
{ok, Server} = ?SRV:start_link(Path, Params),
The default generator uses the " english " stemmer .
ExtractDocTerms = fun(Doc) ->
TermTable = xapian_term_qlc:document_term_table(Server, Doc, Meta),
Values = qlc:q([Val || #term{value = Val} <- TermTable]),
qlc:e(Values)
end,
try
DocId = ?SRV:add_document(Server, Document),
Terms = ExtractDocTerms(DocId),
[ {"Is #x_stemmer.language = none respected?"
,?_assertEqual(Terms, [<<"cats">>])}
]
after
?SRV:close(Server)
end.
stemmer_gen() ->
% Open test with the default stemmer
Path = testdb_path(stemmer),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>},
#x_prefix_name{name = author, prefix = <<$A>>, is_boolean=true}],
Document =
[ #x_data{value = "My test data as iolist (NOT INDEXED)"}
, #x_text{value = "Return a list of available languages."}
, #x_text{value = "And filter it."}
, #x_delta{position=300}
, #x_text{value = "And other string is here."}
, #x_text{value = <<"Michael">>, prefix = author}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Test a term generator
DocId = ?SRV:add_document(Server, Document),
?assert(is_integer(DocId)),
%% Test a query parser
Offset = 0,
PageSize = 10,
Meta = xapian_record:record(stemmer_test_record,
record_info(fields, stemmer_test_record)),
Q0 = #x_query_string{parser=standard, value="return"},
Q1 = #x_query_string{value="return AND list"},
Q2 = #x_query_string{value="author:michael"},
Q3 = #x_query_string{value="author:olly list"},
Q4 = #x_query_string{value="author:Michael"},
Q5 = #x_query_string{value="retur*", features=[default, wildcard]},
Q6 = #x_query_string{value="other AND Return"},
Q7 = #x_query_string{value="list NEAR here"},
Q8 = #x_query_string{value="list NEAR filter"},
{ x_query_string,{x_query_parser , default,{x_stemmer , ' , [ ] } ,
%% "trinitrotoluol",<<>>,undefined}
Q9Stem = #x_stemmer{language=da},
Q9Parser = #x_query_parser{stemmer=Q9Stem,
stemming_strategy=none,
max_wildcard_expansion=0,
default_op='AND'},
Q9 = #x_query_string{value="return", parser=Q9Parser},
%% `exclude' is used for unsetting flags.
%% Test x_query_parser.features.
Q10 = #x_query_string{value="test -return"},
Q11 = #x_query_string{value="test -return",
features=[default, {except, lovehate}]},
Q12 = #x_query_string{value="test -return",
features=[default, {except, [lovehate]}]},
F = fun(Query) ->
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList]),
RecList
end,
Qs =
[Q0, Q1, Q2, Q3, Q4, Q5, Q6, Q7, Q8, Q9, Q10, Q11, Q12],
[R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, R10, R11, R12] =
lists:map(F, Qs),
[ ?_assert(is_list(R0) andalso length(R0) =:= 1)
, ?_assertEqual(R1, R0)
, ?_assertEqual(R2, R0)
, ?_assertEqual(R3, [])
, ?_assertEqual(R4, [])
, ?_assertEqual(R5, R0)
, ?_assertEqual(R6, R0)
, ?_assertEqual(R7, [])
, ?_assertEqual(R8, R0)
, ?_assertEqual(R9, R0)
, ?_assertEqual(R10, [])
, ?_assertEqual(R11, R0)
, ?_assertEqual(R12, R0)
]
after
?SRV:close(Server)
end.
query_parser_test() ->
Path = testdb_path(parser),
Params = [write, create, overwrite,
#x_value_name{slot = 0, name = num, type = float}],
Document =
[ #x_data{value = "My test data as iolist (NOT INDEXED)"}
, #x_text{value = "The quick brown fox jumps over the lazy dog."}
, #x_value{slot = num, value = 1}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Test a term generator
DocId = ?SRV:add_document(Server, Document),
?assert(is_integer(DocId)),
%% Test a query parser
Offset = 0,
PageSize = 10,
Meta = xapian_record:record(document, record_info(fields, document)),
F = fun(Query) ->
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList]),
RecList
end,
P1 = #x_query_parser{},
P2 = #x_query_parser{default_op='AND'},
P4 = #x_query_parser{name=standard},
NVRP = xapian_resource:number_value_range_processor(num, "mm", suffix),
P5 = #x_query_parser{value_range_processors = [NVRP]},
Q1 = #x_query_string{parser=P1, value="dog"},
Q2 = #x_query_string{parser=P2, value="dog fox"},
%% Empty parsers
Q3 = #x_query_string{parser=standard, value="dog"},
Q4 = #x_query_string{parser=P4, value="dog"},
Q5 = #x_query_string{parser=P5, value="1..2mm"},
F(Q1),
F(Q2),
F(Q3),
F(Q4),
F(Q5)
after
?SRV:close(Server)
end.
parse_string_gen() ->
Path = testdb_path(parse_string),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog."}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Test a term generator
DocId = ?SRV:add_document(Server, Document),
?assert(is_integer(DocId)),
P1 = #x_query_parser{},
CP is a compiled query parser ( as a resource ) .
CP1 = xapian_server:query_parser(Server, P1),
S1 = #x_query_string{parser=CP1, value="dog"},
CS1 = xapian_server:parse_string(Server, S1, corrected_query_string),
CQ is a compiled query ( as a resource ) .
CQ1 = xapian_server:parse_string(Server, S1, query_resource),
Fs1 = xapian_server:parse_string(Server, S1, [corrected_query_string,
query_resource]),
Ids1 = all_record_ids(Server, CQ1),
[ ?_assertEqual(CS1, same) %% same means the same.
, ?_assertMatch([{corrected_query_string, same}
,{query_resource, _}], Fs1)
, ?_assertEqual(Ids1, [DocId])
]
after
?SRV:close(Server)
end.
parse_string_spelling_correction_gen() ->
Path = testdb_path(ps_spell),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog.",
features = [spelling]}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Test a term generator
?SRV:add_document(Server, Document),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
Table = xapian_term_qlc:spelling_table(Server, Meta),
BrownQuery = qlc:q([Value || #term_freq{value = Value} <- Table,
Value =:= <<"brown">>]),
BrownRecords = qlc:e(BrownQuery),
Records = qlc:e(Table),
io:format(user, "~n~p~n", [Records]),
P1 = #x_query_parser{},
CP is a compiled query parser ( as a resource ) .
CP1 = xapian_server:query_parser(Server, P1),
S1 = #x_query_string{parser=CP1, value="bown",
features = [default, spelling_correction]},
CS1 = xapian_server:parse_string(Server, S1, corrected_query_string),
[ ?_assertEqual(CS1, <<"brown">>)
, ?_assertMatch([_], BrownRecords)
]
after
?SRV:close(Server)
end.
add_spelling_gen() ->
Path = testdb_path(add_spelling),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog."}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
%% Test a term generator
?SRV:add_spelling(Server, Document),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
Table1 = xapian_term_qlc:spelling_table(Server, Meta),
Records1 = qlc:e(Table1),
Spelling = [#x_term{value = "cat", frequency = 1}
,#x_term{value = "dog", frequency = 1}
,#x_term{value = "fox", frequency = {cur, -1}}
,#x_term{value = "the", frequency = {cur, -1}}
,#x_term{value = "lazy", frequency = {cur, 5}}
,#x_term{value = "over", frequency = {abs, 5}}
,#x_term{value = "quick", frequency = -5}
,#x_term{value = "jumps", action = remove}
],
?SRV:add_spelling(Server, Spelling),
Corrected = ?SRV:get_spelling_suggestion(Server, <<"lazzy">>),
Table2 = xapian_term_qlc:spelling_table(Server, Meta),
Records2 = qlc:e(Table2),
io:format(user, "~n Before: ~p\tAfter: ~p~n", [Records1, Records2]),
[?_assertEqual(Records1, [#term_freq{value = <<"brown">>, freq = 1}
,#term_freq{value = <<"dog">>, freq = 1}
,#term_freq{value = <<"fox">>, freq = 1}
,#term_freq{value = <<"jumps">>, freq = 1}
,#term_freq{value = <<"lazy">>, freq = 1}
,#term_freq{value = <<"over">>, freq = 1}
,#term_freq{value = <<"quick">>, freq = 1}
,#term_freq{value = <<"the">>, freq = 2}
])
,?_assertEqual(Records2, [#term_freq{value = <<"brown">>, freq = 1}
,#term_freq{value = <<"cat">>, freq = 1}
,#term_freq{value = <<"dog">>, freq = 2}
,#term_freq{value = <<"lazy">>, freq = 6}
,#term_freq{value = <<"over">>, freq = 5}
,#term_freq{value = <<"the">>, freq = 1}
]),
{"get_spelling_suggestion test",
?_assertEqual(Corrected, <<"lazy">>)}
]
after
?SRV:close(Server)
end.
synonym_gen() ->
Path = testdb_path(synonym),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Synonyms = ["trial", "examination", "exam", "proof",
"evaluation", "assay", "check"],
ExpectedRecords2 =
[#term_value{value = list_to_binary(X)}
|| X <- lists:sort(Synonyms)],
ExpectedRecords3 =
ExpectedRecords2 -- [#term_value{value = <<"check">>}],
[?SRV:add_synonym(Server, "test", Synonym)
|| Synonym <- Synonyms],
Meta = xapian_term_record:record(term_value,
record_info(fields, term_value)),
Table1 = xapian_term_qlc:synonym_key_table(Server, "", Meta),
Records1 = qlc:e(Table1),
Table2 = xapian_term_qlc:synonym_table(Server, "test", Meta),
Records2 = qlc:e(Table2),
?SRV:remove_synonym(Server, "test", "check"),
Table3 = xapian_term_qlc : synonym_table(Server , " test " , Meta ) ,
Table3 = Table2,
Records3 = qlc:e(Table3),
io:format(user, "~n~p~n", [Records2]),
%% Test clear_synonyms.
?SRV:clear_synonyms(Server, "test"),
%% There are no synonyms in the DB.
?assertError(#x_error{type = <<"EmptySetDriverError">>},
xapian_term_qlc:synonym_table(Server, "test", Meta)),
%% Try run it again.
?SRV:clear_synonyms(Server, "test"),
[?_assertEqual(Records1, [#term_value{value = <<"test">>}])
,?_assertEqual(Records2, ExpectedRecords2)
,?_assertEqual(Records3, ExpectedRecords3)
]
after
?SRV:close(Server)
end.
%% ------------------------------------------------------------------
Transations tests
%% ------------------------------------------------------------------
transaction_gen() ->
% Open test
Path1 = testdb_path(transaction1),
Path2 = testdb_path(transaction2),
Params = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
Fun = fun([_S1, _S2]) ->
test_result
end,
BadFun = fun([_S1, _S2]) ->
erlang:exit(badcat)
end,
%% Check fallback
BadFun2 = fun([S1, _S2]) ->
%% Try to kill S1.
Server1 will be killed because of supervision .
erlang:exit(S1, hello)
end,
%% Check fallback when the transaction process is still alive
BadFun3 = fun([S1, _S2]) ->
erlang:exit(S1, hello),
Sleep for 1 second .
%% Because this process is active, then the monitor process will
kill it , because one of the servers is dead .
timer:sleep(1000)
end,
Result1 = ?SRV:transaction([Server1, Server2], Fun, infinity),
Result2 = ?SRV:transaction([Server1, Server2], BadFun),
erlang:unlink(Server1),
%% Wait for DB closing.
timer:sleep(1000),
Result3 = ?SRV:transaction([Server1, Server2], BadFun2),
Server1 was killed . Server2 will replace it .
{ok, Server3} = ?SRV:start_link(Path1, Params),
erlang:unlink(Server2),
timer:sleep(1000),
Result4 = ?SRV:transaction([Server2, Server3], BadFun3),
%% Server3 is still alive.
?SRV:close(Server3),
#x_transaction_result{
is_committed=Committed1,
is_consistent=Consistent1
} = Result1,
#x_transaction_result{
is_committed=Committed2,
is_consistent=Consistent2
} = Result2,
#x_transaction_result{
is_committed=Committed3,
is_consistent=Consistent3
} = Result3,
#x_transaction_result{
is_committed=Committed4,
is_consistent=Consistent4
} = Result4,
{"Check transactions' results for good and bad functions.",
[ ?_assertEqual(Committed1, true)
, ?_assertEqual(Consistent1, true)
, ?_assertEqual(Committed2, false)
, ?_assertEqual(Consistent2, true)
, ?_assertEqual(Committed3, false)
, ?_assertEqual(Consistent3, false)
, ?_assertEqual(Committed4, false)
, ?_assertEqual(Consistent4, false)
, ?_assertEqual(erlang:is_process_alive(Server1), false)
, ?_assertEqual(erlang:is_process_alive(Server2), false)
, ?_assertEqual(erlang:is_process_alive(Server3), false)
]}.
transaction_timeout_gen() ->
% Open test
Path1 = testdb_path(tt1),
Path2 = testdb_path(tt2),
Params = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
Fun = fun([_S1, _S2]) ->
timer:sleep(infinity)
end,
Result1 = ?SRV:transaction([Server1, Server2], Fun, 100),
?SRV:close(Server1),
?SRV:close(Server2),
#x_transaction_result{
is_committed=Committed1,
is_consistent=Consistent1
} = Result1,
{"The transaction is killed by timeout.",
[ ?_assertEqual(Committed1, false)
, ?_assertEqual(Consistent1, true)
]}.
transaction_readonly_error_gen() ->
% Open test
Path1 = testdb_path(transaction1),
Path2 = testdb_path(transaction4),
Params1 = [],
Params2 = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params1),
{ok, Server2} = ?SRV:start_link(Path2, Params2),
Fun = fun([_S1, _S2]) ->
test_result
end,
Result1 = ?SRV:transaction([Server1, Server2], Fun, infinity),
Result2 = ?SRV:transaction([Server2, Server1], Fun, infinity),
?SRV:close(Server1),
?SRV:close(Server2),
#x_transaction_result{
is_committed=Committed1,
is_consistent=Consistent1,
reason=Reason1
} = Result1,
#x_transaction_result{
is_committed=Committed2,
is_consistent=Consistent2,
reason=Reason2
} = Result2,
{"Cannot start transaction for readonly server.",
[ {"read_only @ write",
[ ?_assertEqual(Committed1, false)
, ?_assertEqual(Consistent1, true)
, ?_assertEqual(Reason1, readonly_db)
]}
, {"write @ read_only",
[ ?_assertEqual(Committed2, false)
, ?_assertEqual(Consistent2, true)
, ?_assertEqual(Reason2, readonly_db)
]}
]}.
%% ------------------------------------------------------------------
%% Extracting information
%% ------------------------------------------------------------------
%% The record will contain information about a document.
is a value .
%% docid and data are special fields.
-record(rec_test, {docid, slot1, data}).
-record(rec_test2, {docid, slot1, slot2, data}).
-record(short_rec_test, {data}).
read_document_test() ->
% Open test
Path = testdb_path(read_document),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = slot1}],
Stem = xapian_resource:simple_stemmer(<<"english">>),
Document =
[ #x_term_generator{stemmer = Stem}
, #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = "Slot #0"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
Meta = xapian_record:record(rec_test, record_info(fields, rec_test)),
Rec = ?SRV:read_document(Server, DocId, Meta),
?assertEqual(Rec#rec_test.docid, 1),
?assertEqual(Rec#rec_test.slot1, <<"Slot #0">>),
?assertEqual(Rec#rec_test.data, <<"My test data as iolist">>)
after
?SRV:close(Server)
end.
document_info_test() ->
% Open test
Path = testdb_path(read_document),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = slot1}],
Document =
[ #x_stemmer{language = <<"english">>}
, #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = "Slot #0"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Meta = xapian_record:record(rec_test, record_info(fields, rec_test)),
Rec = ?SRV:document_info(Server, Document, Meta),
?assertEqual(Rec#rec_test.docid, undefined),
?assertEqual(Rec#rec_test.slot1, <<"Slot #0">>),
?assertEqual(Rec#rec_test.data, <<"My test data as iolist">>)
after
?SRV:close(Server)
end.
read_float_value_gen() ->
% Open test
Path = testdb_path(read_float),
Params = [write, create, overwrite
, #x_value_name{slot = 1, name = slot1, type = float}
, #x_value_name{slot = 2, name = slot2, type = string}
],
Document1 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = 7}
],
Document2 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = 66}
, #x_value{slot = slot2, value = "tentacle"}
],
Document3 =
[ #x_data{value = "My test data as iolist"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
Meta = xapian_record:record(rec_test2, record_info(fields, rec_test2)),
Rec1 = ?SRV:read_document(Server, DocId1, Meta),
Rec2 = ?SRV:read_document(Server, DocId2, Meta),
Rec3 = ?SRV:read_document(Server, DocId3, Meta),
%% #document{} is the simple container.
Meta2 = xapian_record:record(document, record_info(fields, document)),
Offset = 0,
PageSize = 10,
Query68 = #x_query_value_range{slot=slot1, from=6, to=8},
Query8 = #x_query_value{op=lower, slot=slot1, value=8},
Query7 = #x_query_value_range{slot=slot1, from=7, to=7},
RecList68 = ?SRV:query_page(Server, Offset, PageSize, Query68, Meta2),
RecList8 = ?SRV:query_page(Server, Offset, PageSize, Query8, Meta2),
RecList7 = ?SRV:query_page(Server, Offset, PageSize, Query7, Meta2),
[ ?_assertEqual(Rec1#rec_test2.docid, 1)
, ?_assertEqual(Rec1#rec_test2.slot1, 7.0)
, ?_assertEqual(Rec1#rec_test2.slot2, undefined)
, ?_assertEqual(Rec1#rec_test2.data, <<"My test data as iolist">>)
, ?_assertEqual(Rec2#rec_test2.docid, 2)
, ?_assertEqual(Rec2#rec_test2.slot1, 66.0)
, ?_assertEqual(Rec2#rec_test2.slot2, <<"tentacle">>)
, ?_assertEqual(Rec2#rec_test2.data, <<"My test data as iolist">>)
, ?_assertEqual(Rec3#rec_test2.docid, 3)
, ?_assertEqual(Rec3#rec_test2.slot1, undefined)
, ?_assertEqual(Rec3#rec_test2.slot2, undefined)
, ?_assertEqual(Rec3#rec_test2.data, <<"My test data as iolist">>)
, ?_assertEqual(RecList68, [#document{docid=1}])
, ?_assertEqual(RecList7, [#document{docid=1}])
, ?_assertEqual(RecList8, [#document{docid=1}])
]
after
?SRV:close(Server)
end.
append_bytes_value_gen() ->
% Open test
Path = testdb_path(bytes),
Params = [write, create, overwrite
, #x_value_name{slot = 1, name = slot1, type = string}
, #x_value_name{slot = 2, name = slot2, type = bytes}
],
Document1 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = <<128>>}
],
Document2 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot2, value = <<128>>}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?assertError(#x_server_error{reason={not_unicode,{error,<<>>,<<128>>}}},
?SRV:add_document(Server, Document1)),
DocId1 = ?SRV:add_document(Server, Document2),
Meta = xapian_record:record(rec_test2, record_info(fields, rec_test2)),
Rec1 = ?SRV:read_document(Server, DocId1, Meta),
[ ?_assertEqual(Rec1#rec_test2.docid, 1)
, ?_assertEqual(Rec1#rec_test2.slot1, undefined)
, ?_assertEqual(Rec1#rec_test2.slot2, <<128>>)
]
after
?SRV:close(Server)
end.
cutoff_gen() ->
% Open test
Path = testdb_path(cutoff),
Params = [write, create, overwrite
, #x_value_name{slot = 0, name = slot0}
],
Document1 =
[ #x_text{value = "cat dog penguin"}
],
Document2 =
[ #x_text{value = "cat dog"}
],
Document3 =
[ #x_text{value = "cat"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
Query = #x_query_string{value = "cat dog"},
Enquire1 = #x_enquire{percent_cutoff = 0, value=Query},
AllIds1 = all_record_ids(Server, Enquire1),
Enquire2 = #x_enquire{percent_cutoff = 50, value=Query},
AllIds2 = all_record_ids(Server, Enquire2),
%% Show weights.
Meta = xapian_record:record(doc_weight,
[docid, percent, weight]),
MSetResourceId = ?SRV:match_set(Server, Enquire1),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
io:format(user, "~p~n", [ qlc:e(Table) ]),
[?_assertEqual(AllIds1, [DocId2, DocId1, DocId3])
,?_assertEqual(AllIds2, [DocId2, DocId1])
]
after
?SRV:close(Server)
end.
docid_order_gen() ->
% Open test
Path = testdb_path(docid_order),
Params = [write, create, overwrite
, #x_value_name{slot = 0, name = slot0}
],
Document1 =
[ #x_text{value = "cat dog penguin"}
],
Document2 =
[ #x_text{value = "cat dog"}
],
Document3 =
[ #x_text{value = "cat"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
Query = #x_query_string{value = "cat dog"},
Enquire1 = #x_enquire{weighting_scheme = xapian_resource:bool_weight(),
value=Query,
docid_order = desc},
AllIds1 = all_record_ids(Server, Enquire1),
Enquire2 = Enquire1#x_enquire{docid_order = asc},
AllIds2 = all_record_ids(Server, Enquire2),
%% Show weights.
Meta = xapian_record:record(doc_weight,
[docid, percent, weight]),
MSetResourceId = ?SRV:match_set(Server, Enquire1),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
io:format(user, "~p~n", [ qlc:e(Table) ]),
[{"docic=desc weighting_scheme=bool"
,?_assertEqual(AllIds1, [DocId3, DocId2, DocId1])}
,{"docic=asc weighting_scheme=bool"
,?_assertEqual(AllIds2, [DocId1, DocId2, DocId3])}
]
after
?SRV:close(Server)
end.
collapse_key_gen() ->
% Open test
Path = testdb_path(collapse_key),
Params = [write, create, overwrite
, #x_value_name{slot = 0, name = slot0}
],
Document1 =
[ #x_value{slot = slot0, value = "a"}
],
Document2 =
[ #x_value{slot = slot0, value = "a"}
],
Document3 =
[ #x_value{slot = slot0, value = "b"}
],
Document4 =
[ #x_value{slot = slot0, value = "b"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
DocId4 = ?SRV:add_document(Server, Document4),
Query = "",
Enquire1 = #x_enquire{collapse_key = slot0, value=Query},
Enquire2 = #x_enquire{collapse_key = slot0, collapse_max=2, value=Query},
Records1 = collapsed_records(Server, Enquire1),
Records2 = collapsed_records(Server, Enquire2),
MSet = #x_match_set{enquire = Enquire1},
MSetResourceId = ?SRV:match_set(Server, MSet),
MatchesCounts = xapian_server:mset_info(Server, MSetResourceId,
[uncollapsed_matches_lower_bound
,uncollapsed_matches_estimated
,uncollapsed_matches_upper_bound
]),
[{uncollapsed_matches_lower_bound, LowerBound}
,{uncollapsed_matches_estimated, Estimated}
,{uncollapsed_matches_upper_bound, UpperBound}] = MatchesCounts,
[?_assertEqual(Records1,
[#collapsed{docid = DocId1, collapse_key = <<"a">>, collapse_count = 1}
,#collapsed{docid = DocId3, collapse_key = <<"b">>, collapse_count = 1}
])
,?_assertEqual(Records2,
[#collapsed{docid = DocId1, collapse_key = <<"a">>, collapse_count = 0}
,#collapsed{docid = DocId2, collapse_key = <<"a">>, collapse_count = 0}
,#collapsed{docid = DocId3, collapse_key = <<"b">>, collapse_count = 0}
,#collapsed{docid = DocId4, collapse_key = <<"b">>, collapse_count = 0}
])
,{"Uncollapsed matches counts."
,[?_assert(LowerBound =< Estimated), ?_assert(Estimated =< UpperBound)]}
]
after
?SRV:close(Server)
end.
collapsed_records(Server, Enquire) ->
MSet = #x_match_set{enquire = Enquire},
MSetResourceId = ?SRV:match_set(Server, MSet),
Meta = xapian_record:record(collapsed, record_info(fields, collapsed)),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
qlc:e(Table).
short_record_test() ->
Path = testdb_path(short_rec_test),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
Document = [#x_data{value = "ok"}],
DocId = ?SRV:add_document(Server, Document),
Meta = xapian_record:record(short_rec_test, record_info(fields, short_rec_test)),
Rec = ?SRV:read_document(Server, DocId, Meta),
?assertEqual(Rec#short_rec_test.data, <<"ok">>),
?SRV:close(Server).
%% @doc Check an exception.
read_bad_docid_test() ->
% Open test
Path = testdb_path(read_document),
Params = [#x_value_name{slot = 1, name = slot1}],
{ok, Server} = ?SRV:start_link(Path, Params),
Meta = xapian_record:record(rec_test, record_info(fields, rec_test)),
DocId = 2,
? assertException(ClassPattern , TermPattern , )
?assertException(error,
#x_error{type = <<"DocNotFoundError">>},
?SRV:read_document(Server, DocId, Meta)),
?SRV:close(Server).
%% ------------------------------------------------------------------
%% Books (query testing)
%% ------------------------------------------------------------------
%% See Driver::selectEncoderAndRetrieveDocument.
%% book fields are in Document.
book_ext fields are both in Document and in Iterator .
book_iter fields are both in in Iterator .
%% These records are used for tests.
%% They describe values of documents.
-record(book, {docid, author, title, data}).
-record(book_ext, {docid, author, title, data, rank, weight, percent}).
-record(book_iter, {docid, rank, weight, percent}).
%% These cases will be runned sequencly.
%% `Server' will be passed as a parameter.
%% `Server' will be opened just once for all cases.
cases_gen() ->
Cases =
[ fun single_term_query_page_case/1
, fun value_range_query_page_case/1
, fun query_value_equal_case/1
, fun double_terms_or_query_page_case/1
, fun special_fields_query_page_case/1
, fun document_case/1
, fun enquire_case/1
, fun enquire_sort_order_case/1
, fun enquire_key_maker_case/1
, fun resource_cleanup_on_process_down_case/1
, fun enquire_to_mset_case/1
, fun qlc_mset_case/1
, fun qlc_mset_doc_case/1
, fun qlc_mset_iter_case/1
, fun create_user_resource_case/1
, fun release_resource_case/1
, fun release_table_case/1
, fun release_table2_case/1
Advanced enquires
, fun advanced_enquire_case/1
, fun advanced_enquire_weight_case/1
%% Info
, fun match_set_info_case/1
, fun database_info_case/1
],
Server = query_page_setup(),
One setup for each test
{setup,
fun() -> Server end,
fun query_page_clean/1,
[Case(Server) || Case <- Cases]}.
query_page_setup() ->
% Open test
Path = testdb_path(query_page),
ValueNames = [ #x_value_name{slot = 1, name = author}
, #x_value_name{slot = 2, name = title}],
Params = [write, create, overwrite] ++ ValueNames,
{ok, Server} = ?SRV:start_link(Path, Params),
Base = [#x_stemmer{language = <<"english">>}],
Document1 = Base ++
[ #x_data{value = "Non-indexed data here"}
, #x_text{value = "erlang/OTP"}
, #x_text{value = "concurrency"}
, #x_term{value = "telecom"}
, #x_value{slot = title, value = "Software for a Concurrent World"}
, #x_value{slot = author, value = "Joe Armstrong"}
],
Document2 = Base ++
[ #x_stemmer{language = <<"english">>}
, #x_text{value = "C++"}
, #x_term{value = "game"}
, #x_value{slot = title, value = "Code Complete: "
"A Practical Handbook of Software Construction"}
, #x_value{slot = author, value = "Steve McConnell"}
],
%% Put the documents into the database
[1, 2] =
[ ?SRV:add_document(Server, Document) || Document <- [Document1, Document2] ],
Server.
query_page_clean(Server) ->
?SRV:close(Server).
single_term_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = "erlang",
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"erlang", Case}.
value_range_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = #x_query_value_range{slot=author,
from="Joe Armstrong",
to="Joe Armstrong"},
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"Joe Armstrong - Joe Armstrong", Case}.
query_value_equal_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = #x_query_value{op=equal,
slot=author,
value="Joe Armstrong"},
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"Joe Armstrong - Joe Armstrong", Case}.
double_terms_or_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = #x_query{op='OR', value=[<<"erlang">>, "c++"]},
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"erlang OR c++", Case}.
%% You can get dynamicly calculated fields.
special_fields_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = "erlang",
Meta = xapian_record:record(book_ext, record_info(fields, book_ext)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"erlang (with rank, weight, percent)", Case}.
document_case(Server) ->
DocRes1 = xapian_server:document(Server, "telecom"),
DocRes2 = xapian_server:document(Server, 1),
?assertError(badarg, xapian_server:document(Server, [])),
Meta = xapian_term_record:record(term, record_info(fields, term)),
AllDocumentTermsFn =
fun(DocRes) ->
Table = xapian_term_qlc:document_term_table(
Server, DocRes, Meta, [ignore_empty]),
?SRV:release_resource(Server, DocRes),
qlc:e(Table)
end,
Doc1Terms = AllDocumentTermsFn(DocRes1),
Doc2Terms = AllDocumentTermsFn(DocRes2),
[ {"Get a document resource by a term or by an id.",
[?_assertEqual(Doc1Terms, Doc2Terms)]}
].
Xapian uses ` Xapian::Enquire ' class as a hub for making queries .
%% Enquire object can be handled as a resource.
enquire_case(Server) ->
Case = fun() ->
Query = "erlang",
ResourceId = ?SRV:enquire(Server, Query),
io:format(user, "~n~p~n", [ResourceId]),
?SRV:release_resource(Server, ResourceId)
end,
{"Simple enquire resource", Case}.
enquire_sort_order_case(Server) ->
Case = fun() ->
%% Sort by value in the 'title' slot
Order = #x_sort_order{type=value, value=title},
%% telecom OR game
Query = #x_query{op = 'OR', value = ["telecom", "game"]},
EnquireDescriptor = #x_enquire{order=Order, value=Query},
AllIds = all_record_ids(Server, EnquireDescriptor),
Were two documents selected ?
?assertMatch([_, _], AllIds),
%% Check documents order
Code = 2 , Software = 1
?assertMatch([2, 1], AllIds),
%% The same case, but it is sorted in the reversed order.
RevOrder1 = #x_sort_order{type=value, value=title, is_reversed = true},
RevEnquireDescriptor1 = #x_enquire{order=RevOrder1, value=Query},
RevAllIds1 = all_record_ids(Server, RevEnquireDescriptor1),
%% Test the default case.
RevOrder2 = #x_sort_order{type=relevance, is_reversed = false},
RevEnquireDescriptor2 = #x_enquire{order=RevOrder2, value=Query},
RevAllIds2 = all_record_ids(Server, RevEnquireDescriptor2),
%% Sorting by relevance in the reversed order is meaningless.
RevOrder3 = #x_sort_order{type=relevance, is_reversed = true},
RevEnquireDescriptor3 = #x_enquire{order=RevOrder3, value=Query},
?assertError(#x_server_error{reason=badarg},
all_record_ids(Server, RevEnquireDescriptor3)),
?assertEqual(RevAllIds1, lists:reverse(AllIds)),
?assertEqual(RevAllIds2, AllIds)
end,
{"Enquire with sorting", Case}.
%% TODO: more strict testing
enquire_key_maker_case(Server) ->
Case = fun() ->
KeyMakerCon = xapian_resource:multi_value_key_maker([author, title]),
Order = #x_sort_order{type=key, value=KeyMakerCon},
%% telecom OR game
Query = #x_query{op = 'OR', value = ["telecom", "game"]},
EnquireDescriptor = #x_enquire{order=Order, value=Query},
AllIds = all_record_ids(Server, EnquireDescriptor),
%% Check documents order
Code = 2 , Software = 1
?assertMatch([1, 2], AllIds)
end,
{"Enquire with sorting", Case}.
%% If the client is dead, then its resources will be released.
resource_cleanup_on_process_down_case(Server) ->
Case = fun() ->
Home = self(),
Ref = make_ref(),
spawn_link(fun() ->
Query = "erlang",
ResourceId = ?SRV:enquire(Server, Query),
Home ! {resource_id, Ref, ResourceId}
end),
ResourceId =
receive
{resource_id, Ref, ResourceIdI} ->
ResourceIdI
end,
?assertError(elem_not_found, ?SRV:release_resource(Server, ResourceId))
end,
{"Check garbidge collection for resources", Case}.
enquire_to_mset_case(Server) ->
Case = fun() ->
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
io:format(user, "~n ~p ~p~n", [EnquireResourceId, MSetResourceId]),
?SRV:release_resource(Server, EnquireResourceId),
?SRV:release_resource(Server, MSetResourceId)
end,
{"Check conversation", Case}.
qlc_mset_case(Server) ->
Case = fun() ->
Query is a query to make for retrieving documents from Xapian .
%% Each document object will be mapped into a document record.
%% A document record is a normal erlang record,
%% it has structure, described by the user,
%% using the `xapian_record:record' call.
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Meta is a record , which contains some information about
%% structure of a document record.
The definition of Meta is incapsulated inside ` xapian_record ' module .
Meta = xapian_record:record(book_ext, record_info(fields, book_ext)),
Create QlcTable from MSet .
After creation of QlcTable , MSet can be removed .
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, MSetResourceId),
QueryAll is a list of all matched records .
QueryAll = Table,
%% Check `lookup' function. This function is used by `qlc' module.
%% It will be called to find a record by an index.
QueryFilter = qlc:q(
[X || X=#book_ext{docid=DocId} <- Table, DocId =:= 1]),
Queries = [QueryAll, QueryFilter],
%% For each query...
[begin
%% ... evaluate (execute) ...
Records = qlc:e(Q),
%% ... and print out.
io:format(user, "~n ~p~n", [Records])
end || Q <- Queries
],
This case will cause an error , because DocId > 0 .
QueryBadFilter = qlc:q(
[X || X=#book_ext{docid=DocId} <- Table, DocId =:= 0]),
?assertError(bad_docid, qlc:e(QueryBadFilter))
end,
{"Check internal_qlc_init", Case}.
qlc_mset_doc_case(Server) ->
Case = fun() ->
Query is a query to make for retrieving documents from Xapian .
%% Each document object will be mapped into a document record.
%% A document record is a normal erlang record,
%% it has structure, described by the user,
%% using the `xapian_record:record' call.
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Meta is a record , which contains some information about
%% structure of a document record.
The definition of Meta is incapsulated inside ` xapian_record ' module .
Meta = xapian_record:record(book, record_info(fields, book)),
Create QlcTable from MSet .
After creation of QlcTable , MSet can be removed .
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, MSetResourceId),
QueryAll is a list of all matched records .
QueryAll = Table,
%% Check `lookup' function. This function is used by `qlc' module.
%% It will be called to find a record by an index.
QueryFilter = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 1]),
QueryFilter2 = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 1 orelse DocId =:= 2]),
QueryFilter3 = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 2 orelse DocId =:= 1]),
Queries = [QueryAll, QueryFilter, QueryFilter2, QueryFilter3],
%% For each query...
[begin
%% ... evaluate (execute) ...
Records = qlc:e(Q),
%% ... and print out.
io:format(user, "~n ~p~n", [Records])
end || Q <- Queries
],
This case will cause an error , because DocId > 0 .
QueryBadFilter = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 0]),
?assertError(bad_docid, qlc:e(QueryBadFilter))
end,
{"Check an iterator source.", Case}.
qlc_mset_iter_case(Server) ->
Case = fun() ->
Query is a query to make for retrieving documents from Xapian .
%% Each document object will be mapped into a document record.
%% A document record is a normal erlang record,
%% it has structure, described by the user,
%% using the `xapian_record:record' call.
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Meta is a record , which contains some information about
%% structure of a document record.
The definition of Meta is incapsulated inside ` xapian_record ' module .
Meta = xapian_record:record(book_iter, record_info(fields, book_iter)),
Create QlcTable from MSet .
After creation of QlcTable , MSet can be removed .
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, MSetResourceId),
QueryAll is a list of all matched records .
QueryAll = Table,
%% Check `lookup' function. This function is used by `qlc' module.
%% It will be called to find a record by an index.
QueryFilter = qlc:q(
[X || X=#book_iter{docid=DocId} <- Table, DocId =:= 1]),
Queries = [QueryAll, QueryFilter],
%% For each query...
[begin
%% ... evaluate (execute) ...
Records = qlc:e(Q),
%% ... and print out.
io:format(user, "~n ~p~n", [Records])
end || Q <- Queries
],
This case will cause an error , because DocId > 0 .
QueryBadFilter = qlc:q(
[X || X=#book_iter{docid=DocId} <- Table, DocId =:= 0]),
?assertError(bad_docid, qlc:e(QueryBadFilter))
end,
{"Check an iterator source.", Case}.
create_user_resource_case(Server) ->
Case = fun() ->
User - defined resource is an object , which is created on C++ side .
We using Erlang references for returning it back to the user .
%% A reference can be used only with this Server.
ResourceId = ?SRV:internal_create_resource(Server, bool_weight),
io:format(user, "User-defined resource ~p~n", [ResourceId])
end,
{"Check creation of user-defined resources", Case}.
%% Additional parameters can be passed to `Xapian::Enquire'.
%% We use `#x_enquire' record for this.
advanced_enquire_case(Server) ->
Case = fun() ->
Query = #x_enquire{
value = "Erlang"
},
EnquireResourceId = ?SRV:enquire(Server, Query),
?assert(is_reference(EnquireResourceId)),
?SRV:release_resource(Server, EnquireResourceId)
end,
{"Check #x_enquire{}", Case}.
We can pass other ` Xapian::Weight ' object , stored as an user resource .
We create new ` Xapian::BoolWeight ' object as a resource and pass it back
%% as an additional parameter.
advanced_enquire_weight_case(Server) ->
Case = fun() ->
Query = #x_enquire{
value = "Erlang",
weighting_scheme = xapian_resource:bool_weight()
},
EnquireResourceId = ?SRV:enquire(Server, Query),
?assert(is_reference(EnquireResourceId)),
?SRV:release_resource(Server, EnquireResourceId)
end,
{"Check #x_enquire{weight=Xapian::BoolWeight}", Case}.
match_set_info_case(Server) ->
Case = fun() ->
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
?assert(is_reference(EnquireResourceId)),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
try
Info =
?SRV:mset_info(Server, MSetResourceId, [matches_lower_bound, size]),
?assertEqual(1, ?SRV:mset_info(Server, MSetResourceId, size)),
%% All atom props
PropKeys = xapian_mset_info:properties(),
AllItems1 = ?SRV:mset_info(Server, MSetResourceId, PropKeys),
AllItems2 = ?SRV:mset_info(Server, MSetResourceId),
?assertEqual(AllItems1, AllItems2),
io:format(user, "~nMSet Info: ~p~n", [Info]),
%% All pair props
[Pair1Key, Pair2Key] =
PairProps = [{term_weight, "erlang"}, {term_freq, "erlang"}],
PairPropResult = ?SRV:mset_info(Server, MSetResourceId, PairProps),
?assertMatch([{Pair1Key, _0dot4}, {Pair2Key, 1}],
PairPropResult)
after
?SRV:release_resource(Server, EnquireResourceId),
?SRV:release_resource(Server, MSetResourceId)
end
end,
{"Check mset_info function.", Case}.
release_resource_case(Server) ->
Case = fun() ->
EnquireResourceId = ?SRV:enquire(Server, "erlang"),
?SRV:release_resource(Server, EnquireResourceId),
%% Try call it twice
?assertError(elem_not_found,
?SRV:release_resource(Server, EnquireResourceId))
end,
{"Check xapian_server:release_resource", Case}.
release_table_case(Server) ->
Case = fun() ->
Create a Qlc Table for query " erlang " .
Table = mset_table(Server, "erlang", document),
Ref = ?SRV:qlc_table_to_reference(Server, Table),
?SRV:release_table(Server, Table),
%% Try call it twice
?assertError(elem_not_found,
?SRV:release_resource(Server, Ref))
end,
{"Try delete the reference after deleting the table.", Case}.
release_table2_case(Server) ->
Case = fun() ->
Create a Qlc Table for query " erlang " .
Table = mset_table(Server, "erlang", document),
Ref = ?SRV:qlc_table_to_reference(Server, Table),
?SRV:release_resource(Server, Ref),
%% Try call it twice
?assertError(elem_not_found,
?SRV:release_table(Server, Table))
end,
{"Try delete the table after deleting the reference.", Case}.
database_info_case(Server) ->
Case = fun() ->
Info =
?SRV:database_info(Server, [document_count]),
io:format(user, "~nDB Info: ~p~n", [Info]),
%% Atoms
AllItems1 = ?SRV:database_info(Server, xapian_db_info:properties()),
AllItems2 = ?SRV:database_info(Server),
?assertEqual(AllItems1, AllItems2),
%% Pairs
?assertEqual(?SRV:database_info(Server, [{term_exists, <<"erlang">>}]),
[{{term_exists, <<"erlang">>}, true}]),
?assertEqual(?SRV:database_info(Server, [{term_exists, <<"prolog">>}]),
[{{term_exists, <<"prolog">>}, false}]),
?assert(?SRV:database_info(Server, {term_exists, <<"erlang">>})),
?assertNot(?SRV:database_info(Server, {term_exists, <<"prolog">>})),
?assertEqual(1, ?SRV:database_info(Server,
{term_freq, <<"erlang">>})),
?assertEqual(undefined, ?SRV:database_info(Server,
{term_freq, <<"prolog">>})),
?assertEqual(undefined, ?SRV:database_info(Server,
{collection_freq, <<"prolog">>})),
?assert(is_integer(?SRV:database_info(Server,
{document_length, 1}))),
?assertEqual(undefined, ?SRV:database_info(Server,
{document_length, 1000})),
?assertEqual(2, ?SRV:database_info(Server,
{value_freq, 1})),
?assertEqual(2, ?SRV:database_info(Server,
{value_freq, author})),
?assertEqual(<<"Joe Armstrong">>,
?SRV:database_info(Server,
{value_lower_bound, author})),
?assertEqual(<<"Steve McConnell">>,
?SRV:database_info(Server,
{value_upper_bound, author})),
?assertEqual(1,
?SRV:database_info(Server,
{wdf_upper_bound, "erlang"})),
?assertEqual(undefined,
?SRV:database_info(Server,
{wdf_upper_bound, "php"}))
end,
{"Check database_info function.", Case}.
metadata_gen() ->
Path = testdb_path(metadata),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
?SRV:set_metadata(Server, "key", "value"),
Info =
?SRV:database_info(Server, {metadata, "key"}),
Info2 =
?SRV:database_info(Server, {metadata, "bad_key"}),
?SRV:close(Server),
[?_assertEqual(Info, <<"value">>)
,?_assertEqual(Info2, <<"">>)
].
%%
extra_weight_gen() ->
Path = testdb_path(extra_weight),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
Terms = ["Sxapian", "weight"],
Document = [#x_term{value = X} || X <- Terms],
DocId = ?SRV:add_document(Server, Document),
Query = extra_weight_query(2.5, "Sxapian", "weight"),
Ids = all_record_ids(Server, Query),
[?_assertEqual(Ids, [DocId])].
large_db_and_qlc_test() ->
Path = testdb_path(large_db_and_qlc),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Terms = ["xapian", "erlang"],
Document = [#x_term{value = X} || X <- Terms],
ExpectedDocIds = lists:seq(1, 1000),
DocIds = [begin
?SRV:add_document(Server, Document)
end || _ <- ExpectedDocIds],
?assertEqual(DocIds, ExpectedDocIds),
Query = "erlang",
Cursor = all_record_cursor(Server, Query),
try
cursor_walk(1, 1001, Cursor)
after
qlc:delete_cursor(Cursor)
end
after
?SRV:close(Server)
end.
large_db_and_qlc_mset_with_joins_test() ->
Path = testdb_path(large_db_and_qlc_joins),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
ExpectedDocIds = lists:seq(1, 1000),
DocIds = [begin
Document = [ #x_term{value = integer_to_list(Id)} ],
?SRV:add_document(Server, Document)
end || Id <- ExpectedDocIds],
?assertEqual(DocIds, ExpectedDocIds),
Query = "",
Table = mset_table(Server, Query, document),
QH1 = qlc:q([Id || #document{docid=Id} <- Table, Id =< 500]),
QH2 = qlc:q([Id || #document{docid=Id} <- Table, Id > 500]),
QH3 = qlc:append(QH1, QH2),
Cursor = qlc:cursor(QH3),
try
cursor_walk(1, 1001, Cursor)
after
qlc:delete_cursor(Cursor)
end.
I d = : =
cursor_walk(Id, Id, Cursor) ->
Result = qlc:next_answers(Cursor, 1),
?assertEqual(Result, []),
[];
cursor_walk(Id, Max, Cursor) ->
Result = qlc:next_answers(Cursor, 1),
?assertEqual(Result, [Id]),
cursor_walk(Id+1, Max, Cursor).
%% `Title' and `Body' are queries.
extra_weight_query(Factor, Title, Body) ->
Scale = #x_query_scale_weight{factor = Factor, value = Title},
#x_query{value = [Scale, Body]}.
%% -------------------------------------------------------------------
%% Multi-DB support
%% -------------------------------------------------------------------
-record(mdocument, {docid, db_name, multi_docid, db_number}).
mset_table(Server, Query, document) ->
Meta = xapian_record:record(document, record_info(fields, document)),
mset_table(Server, Query, Meta);
mset_table(Server, Query, mdocument) ->
Meta = xapian_record:record(mdocument, record_info(fields, mdocument)),
mset_table(Server, Query, Meta);
mset_table(Server, Query, Meta) ->
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
%% Table has a pointer on resources.
?SRV:release_resource(Server, EnquireResourceId),
?SRV:release_resource(Server, MSetResourceId),
Table.
all_record_ids(Server, Query) ->
Table = mset_table(Server, Query, document),
Ids = qlc:e(qlc:q([Id || #document{docid=Id} <- Table])),
Ids.
all_record_cursor(Server, Query) ->
Table = mset_table(Server, Query, document),
qlc:cursor(qlc:q([Id || #document{docid=Id} <- Table])).
all_multidb_records(Server, Query) ->
Table = mset_table(Server, Query, mdocument),
qlc:e(Table).
record_by_id(Server, Query, Id) ->
Table = mset_table(Server, Query, mdocument),
qlc:e(qlc:q([X || X=#mdocument{docid=DocId} <- Table, Id =:= DocId])).
multidb_record_by_id(Server, Query, Id) ->
Table = mset_table(Server, Query, mdocument),
qlc:e(qlc:q([X || X=#mdocument{multi_docid=DocId} <- Table, Id =:= DocId])).
%% Simple usage of a merged DB.
multi_db_gen() ->
Path1 = #x_database{name=multi1, path=testdb_path(multi1)},
Path2 = #x_database{name=multi2, path=testdb_path(multi2)},
Params = [write, create, overwrite],
Document = [#x_term{value = "test"}],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
DocId1 = ?SRV:add_document(Server1, Document),
DocId2 = ?SRV:add_document(Server2, Document),
?SRV:close(Server1),
?SRV:close(Server2),
%% Merged server
{ok, Server} = ?SRV:start_link([Path1, Path2], []),
Query = "test",
Ids = all_record_ids(Server, Query),
Records = all_multidb_records(Server, Query),
DbNames = elements(#mdocument.db_name, Records),
MultiIds = elements(#mdocument.multi_docid, Records),
DbNums = elements(#mdocument.db_number, Records),
LookupRecords1 = record_by_id(Server, Query, 1),
LookupRecords2 = record_by_id(Server, Query, 5),
LookupRecords3 = multidb_record_by_id(Server, Query, 1),
LookupRecords4 = multidb_record_by_id(Server, Query, 2),
LookupRecords5 = multidb_record_by_id(Server, Query, 5),
[?_assertEqual([DocId1, DocId2], [1,1])
,?_assertEqual(Ids, [1,1])
,?_assertEqual(DbNames, [multi1, multi2])
,?_assertEqual(MultiIds, [1,2])
,?_assertEqual(DbNums, [1,2])
,{"Document is not found by id.",
[?_assertEqual(LookupRecords5, [])
,?_assertEqual(LookupRecords2, [])]}
,?_assertEqual(length(LookupRecords1), 2)
,?_assertEqual(length(LookupRecords3), 1)
,?_assertEqual(length(LookupRecords4), 1)
].
multi_docid_gen() ->
Path1 = #x_database{name=multi_docid1, path=testdb_path(multi1)},
Path2 = #x_database{name=multi_docid2, path=testdb_path(multi2)},
Params = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
?SRV:close(Server1),
?SRV:close(Server2),
%% Merged server
{ok, Server} = ?SRV:start_link([Path1, Path2], []),
%% xapian_server:multi_docid
[ ?_assertEqual(1, ?SRV:multi_docid(Server, 1, multi_docid1))
, ?_assertEqual(2, ?SRV:multi_docid(Server, 1, multi_docid2))
, ?_assertEqual(3, ?SRV:multi_docid(Server, 2, multi_docid1))
, ?_assertEqual(4, ?SRV:multi_docid(Server, 2, multi_docid2))
].
elements(Pos, Records) ->
[erlang:element(Pos, Rec) || Rec <- Records].
remote_db_test() ->
Params = [writable, link, {port, 6666}],
DBList = [testdb_path(tcp_remote)],
xapian_utility:tcp_server(DBList, Params),
timer:sleep(1000),
DBConfig = #x_tcp_database{port = 6666, host = "127.0.0.1"},
{ok, Server} = ?SRV:start_link(DBConfig, [write]),
?SRV:close(Server).
get_state_fields_gen() ->
%% Here check, that value names are stored in the orddict correctly.
%% Don't change the order of `#x_value_name' here!
Params =
[ #x_value_name{slot = 2, name = slot2, type = string}
, #x_value_name{slot = 1, name = slot1, type = float}
],
{ok, Server} = ?SRV:start_link([], Params),
try
N2S = ?SRV:name_to_slot(Server),
Num1_1 = xapian_common:slot_id(slot1, N2S),
Num1_2 = xapian_common:slot_id(1, N2S),
Num2_1 = xapian_common:slot_id(slot2, N2S),
Num2_2 = xapian_common:slot_id(2, N2S),
Slot2SlotTests = [{"Slot number is the same.",
[ ?_assertEqual(Num1_2, 1)
, ?_assertEqual(Num2_2, 2)]}],
Name2SlotTests = [{"Name to number conversation.",
[ ?_assertEqual(Num1_1, 1)
, ?_assertEqual(Num2_1, 2)]}],
S2T = ?SRV:slot_to_type(Server),
Type1 = xapian_common:slot_type(1, S2T),
Type2 = xapian_common:slot_type(2, S2T),
SlotTypeTests2 = [{"Name or number to type conversation.",
[ ?_assertEqual(Type1, float)
, ?_assertEqual(Type2, string)
]}],
Slot1_1 = ?SRV:name_to_slot(Server, slot1),
Slot1_2 = ?SRV:name_to_slot(Server, 1),
Slot2_1 = ?SRV:name_to_slot(Server, slot2),
Slot2_2 = ?SRV:name_to_slot(Server, 2),
Slot2SlotTests2 = [{"Slot number is the same.",
[ ?_assertEqual(Slot1_2, 1)
, ?_assertEqual(Slot2_2, 2)]}],
Name2SlotTests2 = [{"Name to number conversation.",
[ ?_assertEqual(Slot1_1, 1)
, ?_assertEqual(Slot2_1, 2)]}],
SlotType1_1 = ?SRV:slot_to_type(Server, slot1),
SlotType1_2 = ?SRV:slot_to_type(Server, 1),
SlotType2_1 = ?SRV:slot_to_type(Server, slot2),
SlotType2_2 = ?SRV:slot_to_type(Server, 2),
SlotTypeTests = [{"Name or number to type conversation.",
[ ?_assertEqual(SlotType1_1, float)
, ?_assertEqual(SlotType1_2, float)
, ?_assertEqual(SlotType2_1, string)
, ?_assertEqual(SlotType2_2, string)
]}],
Slot2SlotTests ++ Name2SlotTests ++
Slot2SlotTests2 ++ Name2SlotTests2 ++
SlotTypeTests ++ SlotTypeTests2
after
?SRV:close(Server)
end.
| null | https://raw.githubusercontent.com/arcusfelis/xapian-erlang-bindings/29871b3e64d658e74701c6ba68bf59e1a9b168f1/test/xapian_server_tests.erl | erlang | This module is a `gen_server' that handles a single port connection.
Used for testing, then can be moved to an another file
------------------------------------------------------------------
Tests
------------------------------------------------------------------
------------------------------------------------------------------
Call C++ tests
------------------------------------------------------------------
@doc Check basic memory operations (malloc, free).
@doc This test checks the work of `ResultEncoder'.
@doc Check an exception.
------------------------------------------------------------------
Call test generators
------------------------------------------------------------------
This test tries to create a document with all kinds of fields.
Open test
It is a term without a position.
Posting (a term with a position).
Open test
The DB is empty.
Register the empty server under the local name
Register the empty server under the local name
Register the empty server under the global name
Cannot add this term again, because the action is `add'.
Add an another term...
... and delete it.
Because we use a term as a key, few documents can be matched.
That is why, undefined is returned (and not a document id).
ignore = true catches errors.
Cannot update the document that is not found.
Now we can.
Document was created, but it us empty.
Try the same using the document id as a key.
Whole command:
[update_document:24] OverflowDriverError: "Too short binary."
C++ position: c_src/common/param_decoder.cpp:29
Test DB: add_document_value
*failed*
**error:{x_error,<<"OverflowDriverError">>,<<"Too short binary.">>,update_document,
<<"c_src/common/param_decoder.cpp">>,29}
SET_VALUE
slot
0, % xapian_const:value_type_id(string)
8,0,0,0,83,108,111,116,32,35,49,53, % string
ignore
0, % stop applyDocument
1,1,0,0,0>>
REP_CRT_DOC_MARK
If there is no document, then the new one will be created.
another document id.
Create a new document.
Replace the whole document with the new one.
It returns a document id of replaced document (but it can be more
then once).
The old document was deleted,
the new document was created.
Test few documents with the same term.
Add another document with the same term.
Only one document will left after replace_or_create_document.
was deleted.
Documents are not exist.
REP_DOC_MARK
Nothing was updated.
If there is no document, then there is no an error.
Nothing was created.
Create a new document.
Replace the whole document with the new one.
It returns a document id of replaced document (but it can be more
then once).
The old document was deleted,
the new document was created.
Test few documents with the same term.
Add another document with the same term.
Only one document will left after replace_document.
was deleted.
REP_DOC_MARK
Create a new document.
Create a document with terms
Lookup order test.
It is an important test.
in the beginning of the document.
Create a document with terms
Delete the term
The term list was changed.
Can it be runned twice? - yes. But values will be new.
Records2 = qlc:e(Table),
, ?_assertEqual(Records1, Records2)
Create a document with terms
Create a document with terms
SUDDENLY! PAIN!
In the next string the error can occur.
Create a document with terms
It is a natural join.
Test a term generator
Call with a slot name
Call with a slot number
MSetResourceId =
These elements are sorted by value.
These elements are sorted by freq.
"Red" was converted to <<"Red">> because of lookup function call.
match_spy_info
DocIds =
Call with a slot name
Collect statistic
MSetResourceId =
Has it the same type?
These elements sorted by value.
Terms can be deleted, added or replaced using `#x_term{}'.
Error will be thrown. Value was not changed.
Error will be ignored. Value was not changed.
Start changing of WDF
Cannot remove term, because WDF is not matched.
Delete the term
Cannot delete the term twoce
Cannot update a non-existing term
It will be ignored.
Test, that the stemmed forms of the words was filtered by Stopper.
The standard generator is without any stemmer.
Open test
Open test with the default stemmer
Test a term generator
Test a query parser
"trinitrotoluol",<<>>,undefined}
`exclude' is used for unsetting flags.
Test x_query_parser.features.
Test a term generator
Test a query parser
Empty parsers
Test a term generator
same means the same.
Test a term generator
Test a term generator
Test clear_synonyms.
There are no synonyms in the DB.
Try run it again.
------------------------------------------------------------------
------------------------------------------------------------------
Open test
Check fallback
Try to kill S1.
Check fallback when the transaction process is still alive
Because this process is active, then the monitor process will
Wait for DB closing.
Server3 is still alive.
Open test
Open test
------------------------------------------------------------------
Extracting information
------------------------------------------------------------------
The record will contain information about a document.
docid and data are special fields.
Open test
Open test
Open test
#document{} is the simple container.
Open test
Open test
Show weights.
Open test
Show weights.
Open test
@doc Check an exception.
Open test
------------------------------------------------------------------
Books (query testing)
------------------------------------------------------------------
See Driver::selectEncoderAndRetrieveDocument.
book fields are in Document.
These records are used for tests.
They describe values of documents.
These cases will be runned sequencly.
`Server' will be passed as a parameter.
`Server' will be opened just once for all cases.
Info
Open test
Put the documents into the database
You can get dynamicly calculated fields.
Enquire object can be handled as a resource.
Sort by value in the 'title' slot
telecom OR game
Check documents order
The same case, but it is sorted in the reversed order.
Test the default case.
Sorting by relevance in the reversed order is meaningless.
TODO: more strict testing
telecom OR game
Check documents order
If the client is dead, then its resources will be released.
Each document object will be mapped into a document record.
A document record is a normal erlang record,
it has structure, described by the user,
using the `xapian_record:record' call.
structure of a document record.
Check `lookup' function. This function is used by `qlc' module.
It will be called to find a record by an index.
For each query...
... evaluate (execute) ...
... and print out.
Each document object will be mapped into a document record.
A document record is a normal erlang record,
it has structure, described by the user,
using the `xapian_record:record' call.
structure of a document record.
Check `lookup' function. This function is used by `qlc' module.
It will be called to find a record by an index.
For each query...
... evaluate (execute) ...
... and print out.
Each document object will be mapped into a document record.
A document record is a normal erlang record,
it has structure, described by the user,
using the `xapian_record:record' call.
structure of a document record.
Check `lookup' function. This function is used by `qlc' module.
It will be called to find a record by an index.
For each query...
... evaluate (execute) ...
... and print out.
A reference can be used only with this Server.
Additional parameters can be passed to `Xapian::Enquire'.
We use `#x_enquire' record for this.
as an additional parameter.
All atom props
All pair props
Try call it twice
Try call it twice
Try call it twice
Atoms
Pairs
`Title' and `Body' are queries.
-------------------------------------------------------------------
Multi-DB support
-------------------------------------------------------------------
Table has a pointer on resources.
Simple usage of a merged DB.
Merged server
Merged server
xapian_server:multi_docid
Here check, that value names are stored in the orddict correctly.
Don't change the order of `#x_value_name' here! | -module(xapian_server_tests).
-include_lib("xapian/include/xapian.hrl").
-include_lib("xapian/src/xapian.hrl").
-compile([export_all]).
-import(xapian_helper, [testdb_path/1]).
-define(SRV, xapian_server).
-include_lib("proper/include/proper.hrl").
-include_lib("eunit/include/eunit.hrl").
-include_lib("stdlib/include/qlc.hrl").
-record(document, {docid}).
-record(collapsed, {docid, collapse_key, collapse_count}).
memory_test() ->
{ok, Server} = ?SRV:start_link([], []),
?SRV:internal_test_run(Server, memory, []),
?SRV:close(Server),
ok.
echo_test() ->
{ok, Server} = ?SRV:start_link([], []),
?assertEqual(?SRV:internal_test_run(Server, echo, <<0,5>>), <<0,5>>),
Bin = list_to_binary(lists:duplicate(1100, 1)),
?assertEqual(?SRV:internal_test_run(Server, echo, Bin), Bin),
ok.
-define(DOCUMENT_ID(X), X:32/native-unsigned-integer).
result_encoder_test() ->
{ok, Server} = ?SRV:start_link([], []),
Reply = ?SRV:internal_test_run(Server, result_encoder, [1, 1000]),
Reply = ?SRV:internal_test_run(Server, result_encoder, [1, 1000]),
Reply = ?SRV:internal_test_run(Server, result_encoder, [1, 1000]),
?SRV:close(Server),
?assertEqual(lists:seq(1, 1000), [ Id || <<?DOCUMENT_ID(Id)>> <= Reply ]),
ok.
exception_test() ->
{ok, Server} = ?SRV:start_link([], []),
? assertException(ClassPattern , TermPattern , )
?assertException(error,
#x_error{type = <<"MemoryAllocationDriverError">>},
?SRV:internal_test_run(Server, exception, [])),
?SRV:close(Server),
ok.
wrapper(Name) ->
[{setup,
fun() -> ok end,
fun(_) -> [{atom_to_list(Name), ?MODULE:Name()}] end}].
run_test_generators_once_test_() ->
AllFuns = ?MODULE:module_info(exports),
[wrapper(Name) || {Name, Arity} <- AllFuns,
Arity =:= 0,
lists:suffix("_gen", atom_to_list(Name))].
simple_gen() ->
Path = testdb_path(simple),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = slot1},
#x_prefix_name{name = author, prefix = <<$A>>}],
Document =
[ #x_stemmer{language = <<"english">>}
, #x_data{value = "My test data as iolist"}
, #x_term{value = "Simple"}
, #x_term{value = "term", position=1}
, #x_value{slot = 0, value = "Slot #0"}
, #x_value{slot = slot1, value = "Slot #1"}
, #x_text{value = "Paragraph 1"}
, #x_delta{}
, #x_text{value = <<"Paragraph 2">>}
, #x_text{value = <<"Michael">>, prefix = author}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
DocIdReplaced1 = ?SRV:replace_or_create_document(Server, DocId, Document),
DocIdReplaced2 = ?SRV:replace_or_create_document(Server, "Simple", Document),
?SRV:delete_document(Server, DocId),
?SRV:delete_document(Server, "Simple"),
[ ?_assert(is_integer(DocId))
, ?_assertEqual(DocId, DocIdReplaced1)
, ?_assertEqual(DocId, DocIdReplaced2)
]
after
?SRV:close(Server)
end.
last_document_id_gen() ->
Path = testdb_path(last_docid),
Params = [write, create, overwrite],
Document = [],
{ok, Server} = ?SRV:start_link(Path, Params),
try
NoId = ?SRV:last_document_id(Server),
Add 1 document , check a last i d.
DocId = ?SRV:add_document(Server, Document),
Last = ?SRV:last_document_id(Server),
[ {"Db is empty.", ?_assertEqual(undefined, NoId)}
, ?_assertEqual(DocId, Last)
]
after
?SRV:close(Server)
end.
open_and_register_local_name_test() ->
Name = xapian_server_test_local_name,
{ok, Server} = ?SRV:start_link([], [{name, Name}]),
?assertEqual(whereis(Name), Server),
?SRV:close(Server),
?assertNot(is_process_alive(Server)).
open_and_register_local_name2_test() ->
Name = xapian_server_test_local_name2,
{ok, Server} = ?SRV:start_link([], [{name, {local, Name}}]),
?assertEqual(whereis(Name), Server),
?SRV:close(Server).
open_and_register_global_name_test() ->
Name = xapian_server_test_global_name,
{ok, Server} = ?SRV:start_link([], [{name, {global, Name}}]),
?assertEqual(global:whereis_name(Name), Server),
?SRV:close(Server).
update_document_test() ->
Path = testdb_path(update_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, []),
The document with DocId will be extended .
DocId1 = ?SRV:update_document(Server, DocId, [#x_term{value = "more"}]),
?assertEqual(DocId, DocId1),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
?SRV:update_document(Server, DocId,
[#x_term{action = add, value = "more", ignore = false}])),
?SRV:update_document(Server, DocId,
[#x_term{action = add, value = "other", ignore = false}]),
?assert(?SRV:is_document_exist(Server, "other")),
?SRV:update_document(Server, DocId,
[#x_term{action = remove, value = "other", ignore = false}]),
?assertNot(?SRV:is_document_exist(Server, "other")),
Can not find a document , using " bad_term " as UID .
?debugMsg("UPD_DOC_BAD_ID_MARK"),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
?SRV:update_document(Server, "bad_term", [])),
One document with the term " more " was found .
?assertEqual(undefined,
?SRV:update_or_create_document(Server, "more",
[#x_term{action = add, value = "more", ignore = true}])),
?assertNot(?SRV:is_document_exist(Server, "fail")),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
?SRV:update_document(Server, "fail", [])),
?assertNot(?SRV:is_document_exist(Server, "fail")),
DocId2 = ?SRV:update_or_create_document(Server, "fail", []),
?assert(?SRV:is_document_exist(Server, DocId2)),
?assertNot(?SRV:is_document_exist(Server, "fail")),
DocId3 = ?SRV:update_or_create_document(Server, DocId2, []),
?assertEqual(DocId2, DocId3)
after
?SRV:close(Server)
end.
update_document_value_test() ->
Path = testdb_path(update_document_value),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, []),
The document with DocId will be extended .
< < " Slot # 15 " > > = < < 83,108,111,116,32,35,49,53 > >
byte_size(<<"Slot # 15 " > > ) = 8
= ERROR REPORT==== 16 - Sep-2015::03:15:10 = = =
Data : < < 27,15,0,0,0,0,8,0,0,0,83,108,111,116,32,35,49,53,1,0,1,1,0,0,0 > >
in function xapian_server : client_error_handler/1 ( src / xapian_server.erl , line 1285 )
in call from xapian_server_tests : update_document_value_test/0 ( test / xapian_server_tests.erl , line 241 )
Doc = [#x_value{slot = 15, value = <<"Slot #15">>}],
DocId1 = ?SRV:update_document(Server, DocId, Doc),
?assertEqual(DocId, DocId1)
after
?SRV:close(Server)
end.
add_document_value_test() ->
Path = testdb_path(add_document_value),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Doc = [#x_value{slot = 15, value = <<"Slot #15">>}],
DocId = ?SRV:add_document(Server, Doc)
after
?SRV:close(Server)
end.
replace_or_create_document_test() ->
Path = testdb_path(replace_or_create_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Try update using non - existed DocId .
?assertNot(?SRV:is_document_exist(Server, 1)),
DocId = ?SRV:replace_or_create_document(Server, "bad_term", []),
?assertEqual(DocId, 1),
?assert(?SRV:is_document_exist(Server, 1)),
?SRV:delete_document(Server, DocId),
?assertNot(?SRV:is_document_exist(Server, 1)),
DocId0 = ?SRV:replace_or_create_document(Server, "bad_term", []),
Even when the first document is deleted , the new document will have
?assertEqual(DocId0, 2),
?assertNot(?SRV:is_document_exist(Server, "bad_term")),
?assert(?SRV:is_document_exist(Server, DocId0)),
DocId1 = ?SRV:add_document(Server, [#x_term{value = "good_term"}]),
?assert(?SRV:is_document_exist(Server, "good_term")),
DocId2 = ?SRV:replace_or_create_document(Server, "good_term",
[#x_term{value = "nice_term"}]),
?assertEqual(DocId1, DocId2),
?assertNot(?SRV:is_document_exist(Server, "good_term")),
?assert(?SRV:is_document_exist(Server, "nice_term")),
DocId3 = ?SRV:add_document(Server, [#x_term{value = "nice_term"}]),
and DocId3 are still here .
?assert(?SRV:is_document_exist(Server, DocId2)),
?assert(?SRV:is_document_exist(Server, DocId3)),
DocId4 = ?SRV:replace_or_create_document(Server, "nice_term",
[#x_term{value = "mass_term"}]),
Only document with is here , other document with the same term
?assertEqual(DocId4, DocId2),
Ids = all_record_ids(Server, "mass_term"),
?assertEqual(Ids, [DocId4]),
?assertNot(?SRV:is_document_exist(Server, DocId3))
after
?SRV:close(Server)
end.
delete_document_gen() ->
Path = testdb_path(delete_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Exists1 = ?SRV:delete_document(Server, "test"),
Exists2 = ?SRV:delete_document(Server, 1),
DocId1 = ?SRV:add_document(Server, [#x_term{value = "term"}]),
DocId2 = ?SRV:add_document(Server, []),
Exists3 = ?SRV:delete_document(Server, "term"),
Exists4 = ?SRV:delete_document(Server, DocId2),
Exists5 = ?SRV:is_document_exist(Server, DocId1),
Exists6 = ?SRV:is_document_exist(Server, DocId2),
[ ?_assertNot(Exists1)
, ?_assertNot(Exists2)
, ?_assert(Exists3)
, ?_assert(Exists4)
, ?_assertNot(Exists5)
, ?_assertNot(Exists6)
]
after
?SRV:close(Server)
end.
replace_document_test() ->
Path = testdb_path(replace_document),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Try update using non - existed DocId .
?assertNot(?SRV:is_document_exist(Server, 1)),
DocId = ?SRV:replace_document(Server, "bad_term", []),
?assertEqual(DocId, undefined),
?assertNot(?SRV:is_document_exist(Server, 1)),
DocId0 = ?SRV:replace_document(Server, "bad_term", []),
?assertEqual(DocId0, undefined),
?assertNot(?SRV:is_document_exist(Server, "bad_term")),
DocId1 = ?SRV:add_document(Server, [#x_term{value = "good_term"}]),
?assert(?SRV:is_document_exist(Server, "good_term")),
?assert(?SRV:is_document_exist(Server, DocId1)),
?assertEqual(DocId1, 1),
DocId2 = ?SRV:replace_document(Server, "good_term",
[#x_term{value = "nice_term"}]),
?assertEqual(DocId1, DocId2),
?assertNot(?SRV:is_document_exist(Server, "good_term")),
?assert(?SRV:is_document_exist(Server, "nice_term")),
DocId3 = ?SRV:add_document(Server, [#x_term{value = "nice_term"}]),
and DocId3 are still here .
?assert(?SRV:is_document_exist(Server, DocId2)),
?assert(?SRV:is_document_exist(Server, DocId3)),
DocId4 = ?SRV:replace_document(Server, "nice_term",
[#x_term{value = "mass_term"}]),
Only document with is here , other document with the same term
?assertEqual(DocId4, DocId2),
Ids = all_record_ids(Server, "mass_term"),
?assertEqual(Ids, [DocId4]),
?assertNot(?SRV:is_document_exist(Server, DocId3))
after
?SRV:close(Server)
end.
replace_document_by_id_test() ->
Path = testdb_path(replace_document_by_id),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, [#x_term{value = "new"}]),
DocId2 = ?SRV:replace_document(Server, DocId1,
[#x_term{value = "other"}]),
?assertEqual(DocId1, DocId2),
Ids = all_record_ids(Server, "other"),
?assertEqual(Ids, [DocId2])
after
?SRV:close(Server)
end.
is_document_exists_gen() ->
Path = testdb_path(is_document_exists),
Params = [write, create, overwrite],
Doc =
[ #x_term{value = "monad"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
BeforeAddTerm = ?SRV:is_document_exist(Server, "monad"),
BeforeAddId = ?SRV:is_document_exist(Server, 1),
?SRV:add_document(Server, Doc),
AfterAddTerm = ?SRV:is_document_exist(Server, "monad"),
AfterAddId = ?SRV:is_document_exist(Server, 1),
[ ?_assertNot(BeforeAddTerm)
, ?_assertNot(BeforeAddId)
, ?_assert(AfterAddTerm)
, ?_assert(AfterAddId)
]
after
?SRV:close(Server)
end.
frequency_test() ->
Path = testdb_path(frequency),
Params = [write, create, overwrite],
Doc =
[ #x_term{value = "term", frequency = {cur, 1}}
, #x_term{value = "term", frequency = {abs, 5}}
, #x_term{value = "term", frequency = {cur, -1}}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Doc)
after
?SRV:close(Server)
end.
term_actions_test() ->
Path = testdb_path(actions),
Params = [write, create, overwrite],
Doc =
[ #x_term{action = add, value = "term"}
, #x_term{action = update, value = "term"}
, #x_term{action = set, value = "term"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Doc)
after
?SRV:close(Server)
end.
-record(term, {value, wdf}).
-record(term_value, {value}).
-record(term_ext, {value, positions, position_count, freq, wdf}).
-record(term_pos, {value, positions, position_count}).
-record(short_term, {wdf}).
-record(term_freq, {value, freq}).
term_qlc_gen() ->
Path = testdb_path(term_qlc),
Params = [write, create, overwrite],
TermNames =
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(1, 100)],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term, record_info(fields, term)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Records = qlc:e(Table),
Values = [Value || #term{value = Value} <- Records],
Not1Wdf = [X || X = #term{wdf = Wdf} <- Records, Wdf =/= 1],
Actually , it tests the fact , that skip_to ( " " ) move an TermIterator
OrderTestQuery = qlc:q([Value || #term{value = Value} <- Table,
Value =:= "2" orelse Value =:= "1" orelse Value =:= "3"]),
OrderTestValues = qlc:e(OrderTestQuery),
[ ?_assertEqual(Values, lists:sort(TermNames))
, ?_assertEqual(Not1Wdf, [])
, ?_assertEqual(OrderTestValues, [<<"1">>, <<"2">>, <<"3">>])
]
after
?SRV:close(Server)
end.
term_qlc_invalidation_gen() ->
Path = testdb_path(term_qlc_inv),
Params = [write, create, overwrite],
TermNames =
["cat", "dog"],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term, record_info(fields, term)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Records1 = qlc:e(Table),
?SRV:update_document(Server, DocId,
[#x_term{value = "dog", action = remove}]),
The must be the same . - no
[ ?_assertEqual(Records1, [#term{value = <<"cat">>, wdf = 1},
#term{value = <<"dog">>, wdf = 1}])
we lost one dog :(
]
after
?SRV:close(Server)
end.
short_term_qlc_gen() ->
Path = testdb_path(short_term_qlc),
Params = [write, create, overwrite],
TermNames =
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(1, 100)],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(short_term,
record_info(fields, short_term)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Q = qlc:q([Wdf || #short_term{wdf = Wdf} <- Table]),
WdfSum = qlc:fold(fun erlang:'+'/2, 0, Q),
[ ?_assertEqual(WdfSum, 100) ]
after
?SRV:close(Server)
end.
term_ext_qlc_gen() ->
Path = testdb_path(term_ext_qlc),
Params = [write, create, overwrite],
TermNames =
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(1, 100)],
Fields = [#x_term{value = Term} || Term <- TermNames],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term_ext,
record_info(fields, term_ext)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Records = qlc:e(Table),
Not0Pos =
[X || X = #term_ext{position_count = Count} <- Records, Count =/= 0],
NotEmptyPos =
[X || X = #term_ext{positions = Poss} <- Records, Poss =/= []],
Shared table : changes in 1 query handler do n't modify the second one .
QH1 = qlc:q([V || #term_ext{value=V} <- Table]),
QH2 = qlc:q([V || #term_ext{value=V} <- Table]),
C1 = qlc:cursor(QH1),
C2 = qlc:cursor(QH2),
C1E1 = qlc:next_answers(C1, 1),
C2E1 = qlc:next_answers(C2, 1),
C1E2 = qlc:next_answers(C1, 1),
C1E3 = qlc:next_answers(C1, 1),
C2E2 = qlc:next_answers(C2, 1),
C2E3 = qlc:next_answers(C2, 1),
C2E4 = qlc:next_answers(C2, 1),
C1E4 = qlc:next_answers(C1, 1),
C2E5 = qlc:next_answers(C2, 25),
C1E5 = qlc:next_answers(C1, 25),
C1E6 = qlc:next_answers(C1, 25),
C2E6 = qlc:next_answers(C2, 25),
[ ?_assertEqual(Not0Pos, [])
, ?_assertEqual(NotEmptyPos, [])
, {"Shared term QLC table.",
[ ?_assertEqual(C1E1, C2E1)
, ?_assertEqual(C1E2, C2E2)
, ?_assertEqual(C1E3, C2E3)
, ?_assertEqual(C1E4, C2E4)
, ?_assertEqual(C1E5, C2E5)
, ?_assertEqual(C1E6, C2E6)
]}
]
after
?SRV:close(Server)
end.
term_numbers(From, To) ->
[erlang:list_to_binary(erlang:integer_to_list(X))
|| X <- lists:seq(From, To)].
terms(TermNames) ->
[#x_term{value = Term} || Term <- TermNames].
term_qlc_join_gen() ->
Path = testdb_path(term_qlc_join),
Params = [write, create, overwrite],
TermNames0to99 = term_numbers(0, 99),
TermNames100to199 = term_numbers(100, 199),
TermNames200to299 = term_numbers(200, 299),
TermNames300to399 = term_numbers(300, 399),
{ok, Server} = ?SRV:start_link(Path, Params),
try
Doc1Terms = TermNames0to99 ++ TermNames100to199,
Doc2Terms = TermNames100to199 ++ TermNames200to299,
Doc3Terms = TermNames200to299 ++ TermNames300to399,
Doc1Id = ?SRV:add_document(Server, terms(Doc1Terms)),
Doc2Id = ?SRV:add_document(Server, terms(Doc2Terms)),
Doc3Id = ?SRV:add_document(Server, terms(Doc3Terms)),
Meta = xapian_term_record:record(term, record_info(fields, term)),
Table1 = xapian_term_qlc:document_term_table(Server, Doc1Id, Meta),
Table2 = xapian_term_qlc:document_term_table(Server, Doc2Id, Meta),
Table3 = xapian_term_qlc:document_term_table(Server, Doc3Id, Meta),
Search terms from 2 documents with the same names .
Q12 = qlc:q([Value1 || #term{value = Value1} <- Table1,
#term{value = Value2} <- Table2,
Value1 =:= Value2]),
Q23 = qlc:q([Value1 || #term{value = Value1} <- Table2,
#term{value = Value2} <- Table3,
Value1 =:= Value2]),
Q1223 = qlc:append(Q12, Q23),
QE12 = qlc:e(Q12),
QE23 = qlc:e(Q23),
QE1223 = qlc:e(Q1223),
{"Natural join of the terms from two document.",
[ ?_assertEqual(QE12, TermNames100to199)
, ?_assertEqual(QE23, TermNames200to299)
, ?_assertEqual(QE1223, TermNames100to199 ++ TermNames200to299)
]}
after
?SRV:close(Server)
end.
term_pos_qlc_gen() ->
Path = testdb_path(term_pos_qlc),
Params = [write, create, overwrite],
Fields =
[ #x_term{value = "term1", position = [1,2,3]}
, #x_term{value = "term2", position = [3,2,1]}
, #x_term{value = "term3", position = [1]}
, #x_term{value = "term3", position = [2,3]}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Fields),
Meta = xapian_term_record:record(term_pos,
record_info(fields, term_pos)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Term1Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"term1">>} <- Table])),
Term2Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"term2">>} <- Table])),
Term3Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"term3">>} <- Table])),
AllRecords =
qlc:e(qlc:q([X || X <- Table])),
Term1 = #term_pos{
value = <<"term1">>, position_count = 3, positions = [1,2,3]},
Term2 = #term_pos{
value = <<"term2">>, position_count = 3, positions = [1,2,3]},
Term3 = #term_pos{
value = <<"term3">>, position_count = 3, positions = [1,2,3]},
[ ?_assertEqual([Term1], Term1Records)
, ?_assertEqual([Term2], Term2Records)
, ?_assertEqual([Term3], Term3Records)
, ?_assertEqual(erlang:length(AllRecords), 3)
]
after
?SRV:close(Server)
end.
term_generator_features_gen() ->
Path = testdb_path(tg_features),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog.",
features = [default, {except, spelling},
spelling, {except, [spelling]}]}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Document),
[]
after
?SRV:close(Server)
end.
text_position_gen() ->
Path = testdb_path(text_pos),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog.",
Positions : 6 7 8 9 10 11 12 13 14
position = 5}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
Meta = xapian_term_record:record(term_pos,
record_info(fields, term_pos)),
Table = xapian_term_qlc:document_term_table(Server, DocId, Meta),
Term1Records =
qlc:e(qlc:q([X || X = #term_pos{value = <<"the">>} <- Table])),
Term1 = #term_pos{
value = <<"the">>, position_count = 2, positions = [6, 12]},
[ ?_assertEqual([Term1], Term1Records)
]
after
?SRV:close(Server)
end.
value_count_match_spy_gen() ->
Path = testdb_path(value_count_mspy),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = color}],
{ok, Server} = ?SRV:start_link(Path, Params),
try
There are 2 " green " documents .
Colors = ["Red", "Blue", "green", "white", "black", "green"],
[add_color_document(Server, Color) || Color <- Colors],
SpySlot1 = xapian_match_spy:value_count(Server, color),
xapian_match_spy:value_count(Server, 1),
Query = "",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetParams = #x_match_set{
enquire = EnquireResourceId,
spies = [SpySlot1]},
?SRV:match_set(Server, MSetParams),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
Table = xapian_term_qlc:value_count_match_spy_table(
Server, SpySlot1, Meta),
TopTable = xapian_term_qlc:top_value_count_match_spy_table(
Server, SpySlot1, 100, Meta),
Values = qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table])),
Erlang did not match it , but Xapian did .
RedValues = qlc:e(qlc:q([Value
|| #term_freq{value = Value} <- Table, Value =:= "Red"])),
OrderValues = qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table,
Value =:= "white" orelse Value =:= "black"])),
TopAlphOrderValues =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- TopTable,
Value =:= "white" orelse Value =:= "black"])),
TopFreqOrderValues =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- TopTable,
Value =:= "white" orelse Value =:= "green"])),
SpySlot1Slot = ?SRV:match_spy_info(Server, SpySlot1, value_slot),
SpySlot1DocCount = ?SRV:match_spy_info(Server, SpySlot1, document_count),
SpySlot1Info = ?SRV:match_spy_info(Server, SpySlot1,
[value_slot, document_count]),
[ ?_assertEqual(Values,
[<<"Blue">>, <<"Red">>, <<"black">>, <<"green">>, <<"white">>])
, ?_assertEqual(RedValues, [<<"Red">>])
, {"Check order",
[ ?_assertEqual(OrderValues, [<<"black">>, <<"white">>])
, ?_assertEqual(TopAlphOrderValues, [<<"black">>, <<"white">>])
, ?_assertEqual(TopFreqOrderValues, [<<"green">>, <<"white">>])
]}
, ?_assertEqual(RedValues, [<<"Red">>])
, {"xapian_server:match_spy_info/3",
[ ?_assertEqual(SpySlot1Info, [{value_slot, SpySlot1Slot}
,{document_count, SpySlot1DocCount}])
]}
]
after
?SRV:close(Server)
end.
add_color_document(Server, Color) ->
Document = [ #x_value{slot = color, value = Color} ],
?SRV:add_document(Server, Document).
float_value_count_match_spy_gen() ->
Path = testdb_path(value_count_mspy),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = page_count, type = float}],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Doc1 = [ #x_value{slot = page_count, value = 10} ],
Doc2 = [ #x_value{slot = page_count, value = 100} ],
Doc3 = [ #x_value{slot = page_count, value = 200} ],
Doc4 = [ #x_value{slot = page_count, value = 20} ],
Docs = [ Doc1, Doc2, Doc3, Doc4 ],
[ ?SRV:add_document(Server, Doc) || Doc <- Docs ],
SpySlot1Res = xapian_match_spy:value_count(Server, page_count),
Query = "",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetParams = #x_match_set{
enquire = EnquireResourceId,
spies = [SpySlot1Res]},
?SRV:match_set(Server, MSetParams),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
Slot1 = xapian_server:match_spy_info(Server, SpySlot1Res, value_slot),
?assertEqual(Slot1, 1),
ValueType = xapian_server:slot_to_type(Server, Slot1),
?assertEqual(ValueType, float),
Table = xapian_term_qlc:value_count_match_spy_table(
Server, SpySlot1Res, Meta),
Values =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table])),
FilteredValues =
qlc:e(qlc:q([Value || #term_freq{value = Value} <- Table, Value =:= 10])),
JoinValues =
qlc:e(qlc:q([V1 || #term_freq{value = V1} <- Table,
#term_freq{value = V2} <- Table, V1 =:= V2])),
[ {"Float values inside MatchSpy.",
?_assertEqual(Values, [10.0, 20.0, 100.0, 200.0])}
, {"Join float values.",
?_assertEqual(JoinValues, [10.0, 20.0, 100.0, 200.0])}
, {"Lookup float values.",
?_assertEqual(FilteredValues, [10.0])}
]
after
?SRV:close(Server)
end.
term_advanced_actions_gen() ->
Path = testdb_path(adv_actions),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, []),
U = fun(Doc) ->
?SRV:update_document(Server, DocId, Doc)
end,
Meta = xapian_term_record:record(term, record_info(fields, term)),
FindTermFn =
fun(Value) ->
DocRes = xapian_server:document(Server, DocId),
Table = xapian_term_qlc:document_term_table(
Server, DocRes, Meta, [ignore_empty]),
?SRV:release_resource(Server, DocRes),
qlc:e(qlc:q([X || X = #term{value = V} <- Table, V =:= Value]))
end,
FF = fun() -> FindTermFn("term") end,
UU = fun(Field) -> U([Field]), FF() end,
Term = #x_term{value = "term"},
TermAdd = Term#x_term{action = add},
TermAddNotIgnore = Term#x_term{action = add, ignore = false},
TermUpdate = Term#x_term{action = update},
TermUpdateNotIgnore = TermUpdate#x_term{ignore = false},
TermSet = Term#x_term{action = set},
TermDec = TermSet#x_term{frequency = -1},
TermSetAbs = TermSet#x_term{frequency = {abs, 10}},
TermRemoveIgnore = Term#x_term{action = remove, frequency = 0},
TermRemove = TermRemoveIgnore#x_term{ignore = false},
TermRemove2 = TermRemove#x_term{frequency = 123},
Terms1 = FF(),
Terms2 = UU(TermAddNotIgnore),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
UU(TermAddNotIgnore)),
Terms3 = FF(),
Terms4 = UU(TermAdd),
Terms5 = UU(TermUpdate),
Terms6 = UU(TermSet),
Terms7 = UU(TermDec),
Terms8 = UU(TermSetAbs),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
UU(TermRemove2)),
Terms9 = FF(),
Terms10 = UU(TermRemove),
?assertError(#x_error{type = <<"InvalidArgumentError">>},
UU(TermRemove)),
Terms11 = FF(),
?assertError(#x_error{type = <<"BadArgumentDriverError">>},
UU(TermUpdateNotIgnore)),
Terms12 = FF(),
Terms13 = UU(TermUpdate),
NormTerm1 = #term{value = <<"term">>, wdf = 1},
NormTerm2 = #term{value = <<"term">>, wdf = 2},
NormTerm3 = #term{value = <<"term">>, wdf = 3},
NormTerm4 = #term{value = <<"term">>, wdf = 10},
[ ?_assertEqual(Terms1, [])
, ?_assertEqual(Terms2, [NormTerm1])
, ?_assertEqual(Terms3, [NormTerm1])
, ?_assertEqual(Terms4, [NormTerm1])
, ?_assertEqual(Terms5, [NormTerm2])
, ?_assertEqual(Terms6, [NormTerm3])
, ?_assertEqual(Terms7, [NormTerm2])
, ?_assertEqual(Terms8, [NormTerm4])
, ?_assertEqual(Terms9, [NormTerm4])
, ?_assertEqual(Terms10, [])
, ?_assertEqual(Terms11, [])
, ?_assertEqual(Terms12, [])
, ?_assertEqual(Terms13, [])
]
after
?SRV:close(Server)
end.
term_generator_gen() ->
Path = testdb_path(term_generator),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
Meta = xapian_term_record:record(term, record_info(fields, term)),
Stopper = xapian_resource:simple_stopper(["my", "as", "the", "a", "an"]),
Document1 =
[ #x_text{value = "My text is inside the #x_text record."}
],
Document2 =
[ #x_term_generator{stopper = Stopper}
, #x_text{value = "My text is inside the #x_text record."}
],
{ok, Server} = ?SRV:start_link(Path, Params),
ExtractDocTerms = fun(Doc) ->
TermTable = xapian_term_qlc:document_term_table(Server, Doc, Meta),
Values = qlc:q([Val || #term{value = Val} <- TermTable]),
qlc:e(Values)
end,
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
Terms2 = ExtractDocTerms(DocId2),
Terms1 = ExtractDocTerms(DocId1),
[ {"Does the stopper actually work?"
,[?_assert(lists:member(<<"Zmy">>, Terms1))
,?_assertNot(lists:member(<<"Zmy">>, Terms2))
]}
]
after
?SRV:close(Server)
end.
standard_term_generator_gen() ->
Path = testdb_path(std_term_generator),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
Meta = xapian_term_record:record(term, record_info(fields, term)),
The default generator uses the " english " stemmer .
Document1 =
[ #x_term_generator{name = default}
, #x_text{value = "cats"}
],
Document2 =
[ #x_term_generator{name = standard}
, #x_text{value = "cats"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
ExtractDocTerms = fun(Doc) ->
TermTable = xapian_term_qlc:document_term_table(Server, Doc, Meta),
Values = qlc:q([Val || #term{value = Val} <- TermTable]),
qlc:e(Values)
end,
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
Terms2 = ExtractDocTerms(DocId2),
Terms1 = ExtractDocTerms(DocId1),
[ {"Is #x_term_generator.name respected?"
,[?_assertEqual(Terms1, [<<"Zcat">>,<<"cats">>])
,?_assertEqual(Terms2, [<<"cats">>])
]}
]
after
?SRV:close(Server)
end.
term_generator_from_resource_gen() ->
Path = testdb_path(res_term_generator),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
The default generator uses the " english " stemmer .
Document1 =
[ #x_term_generator{name = default}
, #x_text{value = "cats"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
TGRes = xapian_server:term_generator(Server, #x_term_generator{}),
TGRec = #x_term_generator{name = TGRes},
?SRV:add_document(Server, [TGRec|Document1]),
[]
after
?SRV:close(Server)
end.
reopen_test() ->
Path = testdb_path(reopen),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
?SRV:close(Server),
{ok, ReadOnlyServer} = ?SRV:start_link(Path, []),
?SRV:close(ReadOnlyServer).
-record(stemmer_test_record, {docid, data}).
cancel_default_stemmer_gen() ->
Path = testdb_path(cancel_stemmer),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>}],
Document =
[ #x_stemmer{language = none}
, #x_text{value = "cats"}],
Meta = xapian_term_record:record(term, record_info(fields, term)),
{ok, Server} = ?SRV:start_link(Path, Params),
The default generator uses the " english " stemmer .
ExtractDocTerms = fun(Doc) ->
TermTable = xapian_term_qlc:document_term_table(Server, Doc, Meta),
Values = qlc:q([Val || #term{value = Val} <- TermTable]),
qlc:e(Values)
end,
try
DocId = ?SRV:add_document(Server, Document),
Terms = ExtractDocTerms(DocId),
[ {"Is #x_stemmer.language = none respected?"
,?_assertEqual(Terms, [<<"cats">>])}
]
after
?SRV:close(Server)
end.
stemmer_gen() ->
Path = testdb_path(stemmer),
Params = [write, create, overwrite,
#x_stemmer{language = <<"english">>},
#x_prefix_name{name = author, prefix = <<$A>>, is_boolean=true}],
Document =
[ #x_data{value = "My test data as iolist (NOT INDEXED)"}
, #x_text{value = "Return a list of available languages."}
, #x_text{value = "And filter it."}
, #x_delta{position=300}
, #x_text{value = "And other string is here."}
, #x_text{value = <<"Michael">>, prefix = author}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
?assert(is_integer(DocId)),
Offset = 0,
PageSize = 10,
Meta = xapian_record:record(stemmer_test_record,
record_info(fields, stemmer_test_record)),
Q0 = #x_query_string{parser=standard, value="return"},
Q1 = #x_query_string{value="return AND list"},
Q2 = #x_query_string{value="author:michael"},
Q3 = #x_query_string{value="author:olly list"},
Q4 = #x_query_string{value="author:Michael"},
Q5 = #x_query_string{value="retur*", features=[default, wildcard]},
Q6 = #x_query_string{value="other AND Return"},
Q7 = #x_query_string{value="list NEAR here"},
Q8 = #x_query_string{value="list NEAR filter"},
{ x_query_string,{x_query_parser , default,{x_stemmer , ' , [ ] } ,
Q9Stem = #x_stemmer{language=da},
Q9Parser = #x_query_parser{stemmer=Q9Stem,
stemming_strategy=none,
max_wildcard_expansion=0,
default_op='AND'},
Q9 = #x_query_string{value="return", parser=Q9Parser},
Q10 = #x_query_string{value="test -return"},
Q11 = #x_query_string{value="test -return",
features=[default, {except, lovehate}]},
Q12 = #x_query_string{value="test -return",
features=[default, {except, [lovehate]}]},
F = fun(Query) ->
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList]),
RecList
end,
Qs =
[Q0, Q1, Q2, Q3, Q4, Q5, Q6, Q7, Q8, Q9, Q10, Q11, Q12],
[R0, R1, R2, R3, R4, R5, R6, R7, R8, R9, R10, R11, R12] =
lists:map(F, Qs),
[ ?_assert(is_list(R0) andalso length(R0) =:= 1)
, ?_assertEqual(R1, R0)
, ?_assertEqual(R2, R0)
, ?_assertEqual(R3, [])
, ?_assertEqual(R4, [])
, ?_assertEqual(R5, R0)
, ?_assertEqual(R6, R0)
, ?_assertEqual(R7, [])
, ?_assertEqual(R8, R0)
, ?_assertEqual(R9, R0)
, ?_assertEqual(R10, [])
, ?_assertEqual(R11, R0)
, ?_assertEqual(R12, R0)
]
after
?SRV:close(Server)
end.
query_parser_test() ->
Path = testdb_path(parser),
Params = [write, create, overwrite,
#x_value_name{slot = 0, name = num, type = float}],
Document =
[ #x_data{value = "My test data as iolist (NOT INDEXED)"}
, #x_text{value = "The quick brown fox jumps over the lazy dog."}
, #x_value{slot = num, value = 1}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
?assert(is_integer(DocId)),
Offset = 0,
PageSize = 10,
Meta = xapian_record:record(document, record_info(fields, document)),
F = fun(Query) ->
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList]),
RecList
end,
P1 = #x_query_parser{},
P2 = #x_query_parser{default_op='AND'},
P4 = #x_query_parser{name=standard},
NVRP = xapian_resource:number_value_range_processor(num, "mm", suffix),
P5 = #x_query_parser{value_range_processors = [NVRP]},
Q1 = #x_query_string{parser=P1, value="dog"},
Q2 = #x_query_string{parser=P2, value="dog fox"},
Q3 = #x_query_string{parser=standard, value="dog"},
Q4 = #x_query_string{parser=P4, value="dog"},
Q5 = #x_query_string{parser=P5, value="1..2mm"},
F(Q1),
F(Q2),
F(Q3),
F(Q4),
F(Q5)
after
?SRV:close(Server)
end.
parse_string_gen() ->
Path = testdb_path(parse_string),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog."}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
?assert(is_integer(DocId)),
P1 = #x_query_parser{},
CP is a compiled query parser ( as a resource ) .
CP1 = xapian_server:query_parser(Server, P1),
S1 = #x_query_string{parser=CP1, value="dog"},
CS1 = xapian_server:parse_string(Server, S1, corrected_query_string),
CQ is a compiled query ( as a resource ) .
CQ1 = xapian_server:parse_string(Server, S1, query_resource),
Fs1 = xapian_server:parse_string(Server, S1, [corrected_query_string,
query_resource]),
Ids1 = all_record_ids(Server, CQ1),
, ?_assertMatch([{corrected_query_string, same}
,{query_resource, _}], Fs1)
, ?_assertEqual(Ids1, [DocId])
]
after
?SRV:close(Server)
end.
parse_string_spelling_correction_gen() ->
Path = testdb_path(ps_spell),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog.",
features = [spelling]}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_document(Server, Document),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
Table = xapian_term_qlc:spelling_table(Server, Meta),
BrownQuery = qlc:q([Value || #term_freq{value = Value} <- Table,
Value =:= <<"brown">>]),
BrownRecords = qlc:e(BrownQuery),
Records = qlc:e(Table),
io:format(user, "~n~p~n", [Records]),
P1 = #x_query_parser{},
CP is a compiled query parser ( as a resource ) .
CP1 = xapian_server:query_parser(Server, P1),
S1 = #x_query_string{parser=CP1, value="bown",
features = [default, spelling_correction]},
CS1 = xapian_server:parse_string(Server, S1, corrected_query_string),
[ ?_assertEqual(CS1, <<"brown">>)
, ?_assertMatch([_], BrownRecords)
]
after
?SRV:close(Server)
end.
add_spelling_gen() ->
Path = testdb_path(add_spelling),
Params = [write, create, overwrite],
Document =
[ #x_text{value = "The quick brown fox jumps over the lazy dog."}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?SRV:add_spelling(Server, Document),
Meta = xapian_term_record:record(term_freq,
record_info(fields, term_freq)),
Table1 = xapian_term_qlc:spelling_table(Server, Meta),
Records1 = qlc:e(Table1),
Spelling = [#x_term{value = "cat", frequency = 1}
,#x_term{value = "dog", frequency = 1}
,#x_term{value = "fox", frequency = {cur, -1}}
,#x_term{value = "the", frequency = {cur, -1}}
,#x_term{value = "lazy", frequency = {cur, 5}}
,#x_term{value = "over", frequency = {abs, 5}}
,#x_term{value = "quick", frequency = -5}
,#x_term{value = "jumps", action = remove}
],
?SRV:add_spelling(Server, Spelling),
Corrected = ?SRV:get_spelling_suggestion(Server, <<"lazzy">>),
Table2 = xapian_term_qlc:spelling_table(Server, Meta),
Records2 = qlc:e(Table2),
io:format(user, "~n Before: ~p\tAfter: ~p~n", [Records1, Records2]),
[?_assertEqual(Records1, [#term_freq{value = <<"brown">>, freq = 1}
,#term_freq{value = <<"dog">>, freq = 1}
,#term_freq{value = <<"fox">>, freq = 1}
,#term_freq{value = <<"jumps">>, freq = 1}
,#term_freq{value = <<"lazy">>, freq = 1}
,#term_freq{value = <<"over">>, freq = 1}
,#term_freq{value = <<"quick">>, freq = 1}
,#term_freq{value = <<"the">>, freq = 2}
])
,?_assertEqual(Records2, [#term_freq{value = <<"brown">>, freq = 1}
,#term_freq{value = <<"cat">>, freq = 1}
,#term_freq{value = <<"dog">>, freq = 2}
,#term_freq{value = <<"lazy">>, freq = 6}
,#term_freq{value = <<"over">>, freq = 5}
,#term_freq{value = <<"the">>, freq = 1}
]),
{"get_spelling_suggestion test",
?_assertEqual(Corrected, <<"lazy">>)}
]
after
?SRV:close(Server)
end.
synonym_gen() ->
Path = testdb_path(synonym),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Synonyms = ["trial", "examination", "exam", "proof",
"evaluation", "assay", "check"],
ExpectedRecords2 =
[#term_value{value = list_to_binary(X)}
|| X <- lists:sort(Synonyms)],
ExpectedRecords3 =
ExpectedRecords2 -- [#term_value{value = <<"check">>}],
[?SRV:add_synonym(Server, "test", Synonym)
|| Synonym <- Synonyms],
Meta = xapian_term_record:record(term_value,
record_info(fields, term_value)),
Table1 = xapian_term_qlc:synonym_key_table(Server, "", Meta),
Records1 = qlc:e(Table1),
Table2 = xapian_term_qlc:synonym_table(Server, "test", Meta),
Records2 = qlc:e(Table2),
?SRV:remove_synonym(Server, "test", "check"),
Table3 = xapian_term_qlc : synonym_table(Server , " test " , Meta ) ,
Table3 = Table2,
Records3 = qlc:e(Table3),
io:format(user, "~n~p~n", [Records2]),
?SRV:clear_synonyms(Server, "test"),
?assertError(#x_error{type = <<"EmptySetDriverError">>},
xapian_term_qlc:synonym_table(Server, "test", Meta)),
?SRV:clear_synonyms(Server, "test"),
[?_assertEqual(Records1, [#term_value{value = <<"test">>}])
,?_assertEqual(Records2, ExpectedRecords2)
,?_assertEqual(Records3, ExpectedRecords3)
]
after
?SRV:close(Server)
end.
Transations tests
transaction_gen() ->
Path1 = testdb_path(transaction1),
Path2 = testdb_path(transaction2),
Params = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
Fun = fun([_S1, _S2]) ->
test_result
end,
BadFun = fun([_S1, _S2]) ->
erlang:exit(badcat)
end,
BadFun2 = fun([S1, _S2]) ->
Server1 will be killed because of supervision .
erlang:exit(S1, hello)
end,
BadFun3 = fun([S1, _S2]) ->
erlang:exit(S1, hello),
Sleep for 1 second .
kill it , because one of the servers is dead .
timer:sleep(1000)
end,
Result1 = ?SRV:transaction([Server1, Server2], Fun, infinity),
Result2 = ?SRV:transaction([Server1, Server2], BadFun),
erlang:unlink(Server1),
timer:sleep(1000),
Result3 = ?SRV:transaction([Server1, Server2], BadFun2),
Server1 was killed . Server2 will replace it .
{ok, Server3} = ?SRV:start_link(Path1, Params),
erlang:unlink(Server2),
timer:sleep(1000),
Result4 = ?SRV:transaction([Server2, Server3], BadFun3),
?SRV:close(Server3),
#x_transaction_result{
is_committed=Committed1,
is_consistent=Consistent1
} = Result1,
#x_transaction_result{
is_committed=Committed2,
is_consistent=Consistent2
} = Result2,
#x_transaction_result{
is_committed=Committed3,
is_consistent=Consistent3
} = Result3,
#x_transaction_result{
is_committed=Committed4,
is_consistent=Consistent4
} = Result4,
{"Check transactions' results for good and bad functions.",
[ ?_assertEqual(Committed1, true)
, ?_assertEqual(Consistent1, true)
, ?_assertEqual(Committed2, false)
, ?_assertEqual(Consistent2, true)
, ?_assertEqual(Committed3, false)
, ?_assertEqual(Consistent3, false)
, ?_assertEqual(Committed4, false)
, ?_assertEqual(Consistent4, false)
, ?_assertEqual(erlang:is_process_alive(Server1), false)
, ?_assertEqual(erlang:is_process_alive(Server2), false)
, ?_assertEqual(erlang:is_process_alive(Server3), false)
]}.
transaction_timeout_gen() ->
Path1 = testdb_path(tt1),
Path2 = testdb_path(tt2),
Params = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
Fun = fun([_S1, _S2]) ->
timer:sleep(infinity)
end,
Result1 = ?SRV:transaction([Server1, Server2], Fun, 100),
?SRV:close(Server1),
?SRV:close(Server2),
#x_transaction_result{
is_committed=Committed1,
is_consistent=Consistent1
} = Result1,
{"The transaction is killed by timeout.",
[ ?_assertEqual(Committed1, false)
, ?_assertEqual(Consistent1, true)
]}.
transaction_readonly_error_gen() ->
Path1 = testdb_path(transaction1),
Path2 = testdb_path(transaction4),
Params1 = [],
Params2 = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params1),
{ok, Server2} = ?SRV:start_link(Path2, Params2),
Fun = fun([_S1, _S2]) ->
test_result
end,
Result1 = ?SRV:transaction([Server1, Server2], Fun, infinity),
Result2 = ?SRV:transaction([Server2, Server1], Fun, infinity),
?SRV:close(Server1),
?SRV:close(Server2),
#x_transaction_result{
is_committed=Committed1,
is_consistent=Consistent1,
reason=Reason1
} = Result1,
#x_transaction_result{
is_committed=Committed2,
is_consistent=Consistent2,
reason=Reason2
} = Result2,
{"Cannot start transaction for readonly server.",
[ {"read_only @ write",
[ ?_assertEqual(Committed1, false)
, ?_assertEqual(Consistent1, true)
, ?_assertEqual(Reason1, readonly_db)
]}
, {"write @ read_only",
[ ?_assertEqual(Committed2, false)
, ?_assertEqual(Consistent2, true)
, ?_assertEqual(Reason2, readonly_db)
]}
]}.
is a value .
-record(rec_test, {docid, slot1, data}).
-record(rec_test2, {docid, slot1, slot2, data}).
-record(short_rec_test, {data}).
read_document_test() ->
Path = testdb_path(read_document),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = slot1}],
Stem = xapian_resource:simple_stemmer(<<"english">>),
Document =
[ #x_term_generator{stemmer = Stem}
, #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = "Slot #0"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId = ?SRV:add_document(Server, Document),
Meta = xapian_record:record(rec_test, record_info(fields, rec_test)),
Rec = ?SRV:read_document(Server, DocId, Meta),
?assertEqual(Rec#rec_test.docid, 1),
?assertEqual(Rec#rec_test.slot1, <<"Slot #0">>),
?assertEqual(Rec#rec_test.data, <<"My test data as iolist">>)
after
?SRV:close(Server)
end.
document_info_test() ->
Path = testdb_path(read_document),
Params = [write, create, overwrite,
#x_value_name{slot = 1, name = slot1}],
Document =
[ #x_stemmer{language = <<"english">>}
, #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = "Slot #0"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Meta = xapian_record:record(rec_test, record_info(fields, rec_test)),
Rec = ?SRV:document_info(Server, Document, Meta),
?assertEqual(Rec#rec_test.docid, undefined),
?assertEqual(Rec#rec_test.slot1, <<"Slot #0">>),
?assertEqual(Rec#rec_test.data, <<"My test data as iolist">>)
after
?SRV:close(Server)
end.
read_float_value_gen() ->
Path = testdb_path(read_float),
Params = [write, create, overwrite
, #x_value_name{slot = 1, name = slot1, type = float}
, #x_value_name{slot = 2, name = slot2, type = string}
],
Document1 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = 7}
],
Document2 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = 66}
, #x_value{slot = slot2, value = "tentacle"}
],
Document3 =
[ #x_data{value = "My test data as iolist"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
Meta = xapian_record:record(rec_test2, record_info(fields, rec_test2)),
Rec1 = ?SRV:read_document(Server, DocId1, Meta),
Rec2 = ?SRV:read_document(Server, DocId2, Meta),
Rec3 = ?SRV:read_document(Server, DocId3, Meta),
Meta2 = xapian_record:record(document, record_info(fields, document)),
Offset = 0,
PageSize = 10,
Query68 = #x_query_value_range{slot=slot1, from=6, to=8},
Query8 = #x_query_value{op=lower, slot=slot1, value=8},
Query7 = #x_query_value_range{slot=slot1, from=7, to=7},
RecList68 = ?SRV:query_page(Server, Offset, PageSize, Query68, Meta2),
RecList8 = ?SRV:query_page(Server, Offset, PageSize, Query8, Meta2),
RecList7 = ?SRV:query_page(Server, Offset, PageSize, Query7, Meta2),
[ ?_assertEqual(Rec1#rec_test2.docid, 1)
, ?_assertEqual(Rec1#rec_test2.slot1, 7.0)
, ?_assertEqual(Rec1#rec_test2.slot2, undefined)
, ?_assertEqual(Rec1#rec_test2.data, <<"My test data as iolist">>)
, ?_assertEqual(Rec2#rec_test2.docid, 2)
, ?_assertEqual(Rec2#rec_test2.slot1, 66.0)
, ?_assertEqual(Rec2#rec_test2.slot2, <<"tentacle">>)
, ?_assertEqual(Rec2#rec_test2.data, <<"My test data as iolist">>)
, ?_assertEqual(Rec3#rec_test2.docid, 3)
, ?_assertEqual(Rec3#rec_test2.slot1, undefined)
, ?_assertEqual(Rec3#rec_test2.slot2, undefined)
, ?_assertEqual(Rec3#rec_test2.data, <<"My test data as iolist">>)
, ?_assertEqual(RecList68, [#document{docid=1}])
, ?_assertEqual(RecList7, [#document{docid=1}])
, ?_assertEqual(RecList8, [#document{docid=1}])
]
after
?SRV:close(Server)
end.
append_bytes_value_gen() ->
Path = testdb_path(bytes),
Params = [write, create, overwrite
, #x_value_name{slot = 1, name = slot1, type = string}
, #x_value_name{slot = 2, name = slot2, type = bytes}
],
Document1 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot1, value = <<128>>}
],
Document2 =
[ #x_data{value = "My test data as iolist"}
, #x_value{slot = slot2, value = <<128>>}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
?assertError(#x_server_error{reason={not_unicode,{error,<<>>,<<128>>}}},
?SRV:add_document(Server, Document1)),
DocId1 = ?SRV:add_document(Server, Document2),
Meta = xapian_record:record(rec_test2, record_info(fields, rec_test2)),
Rec1 = ?SRV:read_document(Server, DocId1, Meta),
[ ?_assertEqual(Rec1#rec_test2.docid, 1)
, ?_assertEqual(Rec1#rec_test2.slot1, undefined)
, ?_assertEqual(Rec1#rec_test2.slot2, <<128>>)
]
after
?SRV:close(Server)
end.
cutoff_gen() ->
Path = testdb_path(cutoff),
Params = [write, create, overwrite
, #x_value_name{slot = 0, name = slot0}
],
Document1 =
[ #x_text{value = "cat dog penguin"}
],
Document2 =
[ #x_text{value = "cat dog"}
],
Document3 =
[ #x_text{value = "cat"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
Query = #x_query_string{value = "cat dog"},
Enquire1 = #x_enquire{percent_cutoff = 0, value=Query},
AllIds1 = all_record_ids(Server, Enquire1),
Enquire2 = #x_enquire{percent_cutoff = 50, value=Query},
AllIds2 = all_record_ids(Server, Enquire2),
Meta = xapian_record:record(doc_weight,
[docid, percent, weight]),
MSetResourceId = ?SRV:match_set(Server, Enquire1),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
io:format(user, "~p~n", [ qlc:e(Table) ]),
[?_assertEqual(AllIds1, [DocId2, DocId1, DocId3])
,?_assertEqual(AllIds2, [DocId2, DocId1])
]
after
?SRV:close(Server)
end.
docid_order_gen() ->
Path = testdb_path(docid_order),
Params = [write, create, overwrite
, #x_value_name{slot = 0, name = slot0}
],
Document1 =
[ #x_text{value = "cat dog penguin"}
],
Document2 =
[ #x_text{value = "cat dog"}
],
Document3 =
[ #x_text{value = "cat"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
Query = #x_query_string{value = "cat dog"},
Enquire1 = #x_enquire{weighting_scheme = xapian_resource:bool_weight(),
value=Query,
docid_order = desc},
AllIds1 = all_record_ids(Server, Enquire1),
Enquire2 = Enquire1#x_enquire{docid_order = asc},
AllIds2 = all_record_ids(Server, Enquire2),
Meta = xapian_record:record(doc_weight,
[docid, percent, weight]),
MSetResourceId = ?SRV:match_set(Server, Enquire1),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
io:format(user, "~p~n", [ qlc:e(Table) ]),
[{"docic=desc weighting_scheme=bool"
,?_assertEqual(AllIds1, [DocId3, DocId2, DocId1])}
,{"docic=asc weighting_scheme=bool"
,?_assertEqual(AllIds2, [DocId1, DocId2, DocId3])}
]
after
?SRV:close(Server)
end.
collapse_key_gen() ->
Path = testdb_path(collapse_key),
Params = [write, create, overwrite
, #x_value_name{slot = 0, name = slot0}
],
Document1 =
[ #x_value{slot = slot0, value = "a"}
],
Document2 =
[ #x_value{slot = slot0, value = "a"}
],
Document3 =
[ #x_value{slot = slot0, value = "b"}
],
Document4 =
[ #x_value{slot = slot0, value = "b"}
],
{ok, Server} = ?SRV:start_link(Path, Params),
try
DocId1 = ?SRV:add_document(Server, Document1),
DocId2 = ?SRV:add_document(Server, Document2),
DocId3 = ?SRV:add_document(Server, Document3),
DocId4 = ?SRV:add_document(Server, Document4),
Query = "",
Enquire1 = #x_enquire{collapse_key = slot0, value=Query},
Enquire2 = #x_enquire{collapse_key = slot0, collapse_max=2, value=Query},
Records1 = collapsed_records(Server, Enquire1),
Records2 = collapsed_records(Server, Enquire2),
MSet = #x_match_set{enquire = Enquire1},
MSetResourceId = ?SRV:match_set(Server, MSet),
MatchesCounts = xapian_server:mset_info(Server, MSetResourceId,
[uncollapsed_matches_lower_bound
,uncollapsed_matches_estimated
,uncollapsed_matches_upper_bound
]),
[{uncollapsed_matches_lower_bound, LowerBound}
,{uncollapsed_matches_estimated, Estimated}
,{uncollapsed_matches_upper_bound, UpperBound}] = MatchesCounts,
[?_assertEqual(Records1,
[#collapsed{docid = DocId1, collapse_key = <<"a">>, collapse_count = 1}
,#collapsed{docid = DocId3, collapse_key = <<"b">>, collapse_count = 1}
])
,?_assertEqual(Records2,
[#collapsed{docid = DocId1, collapse_key = <<"a">>, collapse_count = 0}
,#collapsed{docid = DocId2, collapse_key = <<"a">>, collapse_count = 0}
,#collapsed{docid = DocId3, collapse_key = <<"b">>, collapse_count = 0}
,#collapsed{docid = DocId4, collapse_key = <<"b">>, collapse_count = 0}
])
,{"Uncollapsed matches counts."
,[?_assert(LowerBound =< Estimated), ?_assert(Estimated =< UpperBound)]}
]
after
?SRV:close(Server)
end.
collapsed_records(Server, Enquire) ->
MSet = #x_match_set{enquire = Enquire},
MSetResourceId = ?SRV:match_set(Server, MSet),
Meta = xapian_record:record(collapsed, record_info(fields, collapsed)),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
qlc:e(Table).
short_record_test() ->
Path = testdb_path(short_rec_test),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
Document = [#x_data{value = "ok"}],
DocId = ?SRV:add_document(Server, Document),
Meta = xapian_record:record(short_rec_test, record_info(fields, short_rec_test)),
Rec = ?SRV:read_document(Server, DocId, Meta),
?assertEqual(Rec#short_rec_test.data, <<"ok">>),
?SRV:close(Server).
read_bad_docid_test() ->
Path = testdb_path(read_document),
Params = [#x_value_name{slot = 1, name = slot1}],
{ok, Server} = ?SRV:start_link(Path, Params),
Meta = xapian_record:record(rec_test, record_info(fields, rec_test)),
DocId = 2,
? assertException(ClassPattern , TermPattern , )
?assertException(error,
#x_error{type = <<"DocNotFoundError">>},
?SRV:read_document(Server, DocId, Meta)),
?SRV:close(Server).
book_ext fields are both in Document and in Iterator .
book_iter fields are both in in Iterator .
-record(book, {docid, author, title, data}).
-record(book_ext, {docid, author, title, data, rank, weight, percent}).
-record(book_iter, {docid, rank, weight, percent}).
cases_gen() ->
Cases =
[ fun single_term_query_page_case/1
, fun value_range_query_page_case/1
, fun query_value_equal_case/1
, fun double_terms_or_query_page_case/1
, fun special_fields_query_page_case/1
, fun document_case/1
, fun enquire_case/1
, fun enquire_sort_order_case/1
, fun enquire_key_maker_case/1
, fun resource_cleanup_on_process_down_case/1
, fun enquire_to_mset_case/1
, fun qlc_mset_case/1
, fun qlc_mset_doc_case/1
, fun qlc_mset_iter_case/1
, fun create_user_resource_case/1
, fun release_resource_case/1
, fun release_table_case/1
, fun release_table2_case/1
Advanced enquires
, fun advanced_enquire_case/1
, fun advanced_enquire_weight_case/1
, fun match_set_info_case/1
, fun database_info_case/1
],
Server = query_page_setup(),
One setup for each test
{setup,
fun() -> Server end,
fun query_page_clean/1,
[Case(Server) || Case <- Cases]}.
query_page_setup() ->
Path = testdb_path(query_page),
ValueNames = [ #x_value_name{slot = 1, name = author}
, #x_value_name{slot = 2, name = title}],
Params = [write, create, overwrite] ++ ValueNames,
{ok, Server} = ?SRV:start_link(Path, Params),
Base = [#x_stemmer{language = <<"english">>}],
Document1 = Base ++
[ #x_data{value = "Non-indexed data here"}
, #x_text{value = "erlang/OTP"}
, #x_text{value = "concurrency"}
, #x_term{value = "telecom"}
, #x_value{slot = title, value = "Software for a Concurrent World"}
, #x_value{slot = author, value = "Joe Armstrong"}
],
Document2 = Base ++
[ #x_stemmer{language = <<"english">>}
, #x_text{value = "C++"}
, #x_term{value = "game"}
, #x_value{slot = title, value = "Code Complete: "
"A Practical Handbook of Software Construction"}
, #x_value{slot = author, value = "Steve McConnell"}
],
[1, 2] =
[ ?SRV:add_document(Server, Document) || Document <- [Document1, Document2] ],
Server.
query_page_clean(Server) ->
?SRV:close(Server).
single_term_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = "erlang",
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"erlang", Case}.
value_range_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = #x_query_value_range{slot=author,
from="Joe Armstrong",
to="Joe Armstrong"},
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"Joe Armstrong - Joe Armstrong", Case}.
query_value_equal_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = #x_query_value{op=equal,
slot=author,
value="Joe Armstrong"},
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"Joe Armstrong - Joe Armstrong", Case}.
double_terms_or_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = #x_query{op='OR', value=[<<"erlang">>, "c++"]},
Meta = xapian_record:record(book, record_info(fields, book)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"erlang OR c++", Case}.
special_fields_query_page_case(Server) ->
Case = fun() ->
Offset = 0,
PageSize = 10,
Query = "erlang",
Meta = xapian_record:record(book_ext, record_info(fields, book_ext)),
RecList = ?SRV:query_page(Server, Offset, PageSize, Query, Meta),
io:format(user, "~n~p~n", [RecList])
end,
{"erlang (with rank, weight, percent)", Case}.
document_case(Server) ->
DocRes1 = xapian_server:document(Server, "telecom"),
DocRes2 = xapian_server:document(Server, 1),
?assertError(badarg, xapian_server:document(Server, [])),
Meta = xapian_term_record:record(term, record_info(fields, term)),
AllDocumentTermsFn =
fun(DocRes) ->
Table = xapian_term_qlc:document_term_table(
Server, DocRes, Meta, [ignore_empty]),
?SRV:release_resource(Server, DocRes),
qlc:e(Table)
end,
Doc1Terms = AllDocumentTermsFn(DocRes1),
Doc2Terms = AllDocumentTermsFn(DocRes2),
[ {"Get a document resource by a term or by an id.",
[?_assertEqual(Doc1Terms, Doc2Terms)]}
].
Xapian uses ` Xapian::Enquire ' class as a hub for making queries .
enquire_case(Server) ->
Case = fun() ->
Query = "erlang",
ResourceId = ?SRV:enquire(Server, Query),
io:format(user, "~n~p~n", [ResourceId]),
?SRV:release_resource(Server, ResourceId)
end,
{"Simple enquire resource", Case}.
enquire_sort_order_case(Server) ->
Case = fun() ->
Order = #x_sort_order{type=value, value=title},
Query = #x_query{op = 'OR', value = ["telecom", "game"]},
EnquireDescriptor = #x_enquire{order=Order, value=Query},
AllIds = all_record_ids(Server, EnquireDescriptor),
Were two documents selected ?
?assertMatch([_, _], AllIds),
Code = 2 , Software = 1
?assertMatch([2, 1], AllIds),
RevOrder1 = #x_sort_order{type=value, value=title, is_reversed = true},
RevEnquireDescriptor1 = #x_enquire{order=RevOrder1, value=Query},
RevAllIds1 = all_record_ids(Server, RevEnquireDescriptor1),
RevOrder2 = #x_sort_order{type=relevance, is_reversed = false},
RevEnquireDescriptor2 = #x_enquire{order=RevOrder2, value=Query},
RevAllIds2 = all_record_ids(Server, RevEnquireDescriptor2),
RevOrder3 = #x_sort_order{type=relevance, is_reversed = true},
RevEnquireDescriptor3 = #x_enquire{order=RevOrder3, value=Query},
?assertError(#x_server_error{reason=badarg},
all_record_ids(Server, RevEnquireDescriptor3)),
?assertEqual(RevAllIds1, lists:reverse(AllIds)),
?assertEqual(RevAllIds2, AllIds)
end,
{"Enquire with sorting", Case}.
enquire_key_maker_case(Server) ->
Case = fun() ->
KeyMakerCon = xapian_resource:multi_value_key_maker([author, title]),
Order = #x_sort_order{type=key, value=KeyMakerCon},
Query = #x_query{op = 'OR', value = ["telecom", "game"]},
EnquireDescriptor = #x_enquire{order=Order, value=Query},
AllIds = all_record_ids(Server, EnquireDescriptor),
Code = 2 , Software = 1
?assertMatch([1, 2], AllIds)
end,
{"Enquire with sorting", Case}.
resource_cleanup_on_process_down_case(Server) ->
Case = fun() ->
Home = self(),
Ref = make_ref(),
spawn_link(fun() ->
Query = "erlang",
ResourceId = ?SRV:enquire(Server, Query),
Home ! {resource_id, Ref, ResourceId}
end),
ResourceId =
receive
{resource_id, Ref, ResourceIdI} ->
ResourceIdI
end,
?assertError(elem_not_found, ?SRV:release_resource(Server, ResourceId))
end,
{"Check garbidge collection for resources", Case}.
enquire_to_mset_case(Server) ->
Case = fun() ->
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
io:format(user, "~n ~p ~p~n", [EnquireResourceId, MSetResourceId]),
?SRV:release_resource(Server, EnquireResourceId),
?SRV:release_resource(Server, MSetResourceId)
end,
{"Check conversation", Case}.
qlc_mset_case(Server) ->
Case = fun() ->
Query is a query to make for retrieving documents from Xapian .
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Meta is a record , which contains some information about
The definition of Meta is incapsulated inside ` xapian_record ' module .
Meta = xapian_record:record(book_ext, record_info(fields, book_ext)),
Create QlcTable from MSet .
After creation of QlcTable , MSet can be removed .
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, MSetResourceId),
QueryAll is a list of all matched records .
QueryAll = Table,
QueryFilter = qlc:q(
[X || X=#book_ext{docid=DocId} <- Table, DocId =:= 1]),
Queries = [QueryAll, QueryFilter],
[begin
Records = qlc:e(Q),
io:format(user, "~n ~p~n", [Records])
end || Q <- Queries
],
This case will cause an error , because DocId > 0 .
QueryBadFilter = qlc:q(
[X || X=#book_ext{docid=DocId} <- Table, DocId =:= 0]),
?assertError(bad_docid, qlc:e(QueryBadFilter))
end,
{"Check internal_qlc_init", Case}.
qlc_mset_doc_case(Server) ->
Case = fun() ->
Query is a query to make for retrieving documents from Xapian .
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Meta is a record , which contains some information about
The definition of Meta is incapsulated inside ` xapian_record ' module .
Meta = xapian_record:record(book, record_info(fields, book)),
Create QlcTable from MSet .
After creation of QlcTable , MSet can be removed .
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, MSetResourceId),
QueryAll is a list of all matched records .
QueryAll = Table,
QueryFilter = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 1]),
QueryFilter2 = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 1 orelse DocId =:= 2]),
QueryFilter3 = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 2 orelse DocId =:= 1]),
Queries = [QueryAll, QueryFilter, QueryFilter2, QueryFilter3],
[begin
Records = qlc:e(Q),
io:format(user, "~n ~p~n", [Records])
end || Q <- Queries
],
This case will cause an error , because DocId > 0 .
QueryBadFilter = qlc:q(
[X || X=#book{docid=DocId} <- Table, DocId =:= 0]),
?assertError(bad_docid, qlc:e(QueryBadFilter))
end,
{"Check an iterator source.", Case}.
qlc_mset_iter_case(Server) ->
Case = fun() ->
Query is a query to make for retrieving documents from Xapian .
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Meta is a record , which contains some information about
The definition of Meta is incapsulated inside ` xapian_record ' module .
Meta = xapian_record:record(book_iter, record_info(fields, book_iter)),
Create QlcTable from MSet .
After creation of QlcTable , MSet can be removed .
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, MSetResourceId),
QueryAll is a list of all matched records .
QueryAll = Table,
QueryFilter = qlc:q(
[X || X=#book_iter{docid=DocId} <- Table, DocId =:= 1]),
Queries = [QueryAll, QueryFilter],
[begin
Records = qlc:e(Q),
io:format(user, "~n ~p~n", [Records])
end || Q <- Queries
],
This case will cause an error , because DocId > 0 .
QueryBadFilter = qlc:q(
[X || X=#book_iter{docid=DocId} <- Table, DocId =:= 0]),
?assertError(bad_docid, qlc:e(QueryBadFilter))
end,
{"Check an iterator source.", Case}.
create_user_resource_case(Server) ->
Case = fun() ->
User - defined resource is an object , which is created on C++ side .
We using Erlang references for returning it back to the user .
ResourceId = ?SRV:internal_create_resource(Server, bool_weight),
io:format(user, "User-defined resource ~p~n", [ResourceId])
end,
{"Check creation of user-defined resources", Case}.
advanced_enquire_case(Server) ->
Case = fun() ->
Query = #x_enquire{
value = "Erlang"
},
EnquireResourceId = ?SRV:enquire(Server, Query),
?assert(is_reference(EnquireResourceId)),
?SRV:release_resource(Server, EnquireResourceId)
end,
{"Check #x_enquire{}", Case}.
We can pass other ` Xapian::Weight ' object , stored as an user resource .
We create new ` Xapian::BoolWeight ' object as a resource and pass it back
advanced_enquire_weight_case(Server) ->
Case = fun() ->
Query = #x_enquire{
value = "Erlang",
weighting_scheme = xapian_resource:bool_weight()
},
EnquireResourceId = ?SRV:enquire(Server, Query),
?assert(is_reference(EnquireResourceId)),
?SRV:release_resource(Server, EnquireResourceId)
end,
{"Check #x_enquire{weight=Xapian::BoolWeight}", Case}.
match_set_info_case(Server) ->
Case = fun() ->
Query = "erlang",
EnquireResourceId = ?SRV:enquire(Server, Query),
?assert(is_reference(EnquireResourceId)),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
try
Info =
?SRV:mset_info(Server, MSetResourceId, [matches_lower_bound, size]),
?assertEqual(1, ?SRV:mset_info(Server, MSetResourceId, size)),
PropKeys = xapian_mset_info:properties(),
AllItems1 = ?SRV:mset_info(Server, MSetResourceId, PropKeys),
AllItems2 = ?SRV:mset_info(Server, MSetResourceId),
?assertEqual(AllItems1, AllItems2),
io:format(user, "~nMSet Info: ~p~n", [Info]),
[Pair1Key, Pair2Key] =
PairProps = [{term_weight, "erlang"}, {term_freq, "erlang"}],
PairPropResult = ?SRV:mset_info(Server, MSetResourceId, PairProps),
?assertMatch([{Pair1Key, _0dot4}, {Pair2Key, 1}],
PairPropResult)
after
?SRV:release_resource(Server, EnquireResourceId),
?SRV:release_resource(Server, MSetResourceId)
end
end,
{"Check mset_info function.", Case}.
release_resource_case(Server) ->
Case = fun() ->
EnquireResourceId = ?SRV:enquire(Server, "erlang"),
?SRV:release_resource(Server, EnquireResourceId),
?assertError(elem_not_found,
?SRV:release_resource(Server, EnquireResourceId))
end,
{"Check xapian_server:release_resource", Case}.
release_table_case(Server) ->
Case = fun() ->
Create a Qlc Table for query " erlang " .
Table = mset_table(Server, "erlang", document),
Ref = ?SRV:qlc_table_to_reference(Server, Table),
?SRV:release_table(Server, Table),
?assertError(elem_not_found,
?SRV:release_resource(Server, Ref))
end,
{"Try delete the reference after deleting the table.", Case}.
release_table2_case(Server) ->
Case = fun() ->
Create a Qlc Table for query " erlang " .
Table = mset_table(Server, "erlang", document),
Ref = ?SRV:qlc_table_to_reference(Server, Table),
?SRV:release_resource(Server, Ref),
?assertError(elem_not_found,
?SRV:release_table(Server, Table))
end,
{"Try delete the table after deleting the reference.", Case}.
database_info_case(Server) ->
Case = fun() ->
Info =
?SRV:database_info(Server, [document_count]),
io:format(user, "~nDB Info: ~p~n", [Info]),
AllItems1 = ?SRV:database_info(Server, xapian_db_info:properties()),
AllItems2 = ?SRV:database_info(Server),
?assertEqual(AllItems1, AllItems2),
?assertEqual(?SRV:database_info(Server, [{term_exists, <<"erlang">>}]),
[{{term_exists, <<"erlang">>}, true}]),
?assertEqual(?SRV:database_info(Server, [{term_exists, <<"prolog">>}]),
[{{term_exists, <<"prolog">>}, false}]),
?assert(?SRV:database_info(Server, {term_exists, <<"erlang">>})),
?assertNot(?SRV:database_info(Server, {term_exists, <<"prolog">>})),
?assertEqual(1, ?SRV:database_info(Server,
{term_freq, <<"erlang">>})),
?assertEqual(undefined, ?SRV:database_info(Server,
{term_freq, <<"prolog">>})),
?assertEqual(undefined, ?SRV:database_info(Server,
{collection_freq, <<"prolog">>})),
?assert(is_integer(?SRV:database_info(Server,
{document_length, 1}))),
?assertEqual(undefined, ?SRV:database_info(Server,
{document_length, 1000})),
?assertEqual(2, ?SRV:database_info(Server,
{value_freq, 1})),
?assertEqual(2, ?SRV:database_info(Server,
{value_freq, author})),
?assertEqual(<<"Joe Armstrong">>,
?SRV:database_info(Server,
{value_lower_bound, author})),
?assertEqual(<<"Steve McConnell">>,
?SRV:database_info(Server,
{value_upper_bound, author})),
?assertEqual(1,
?SRV:database_info(Server,
{wdf_upper_bound, "erlang"})),
?assertEqual(undefined,
?SRV:database_info(Server,
{wdf_upper_bound, "php"}))
end,
{"Check database_info function.", Case}.
metadata_gen() ->
Path = testdb_path(metadata),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
?SRV:set_metadata(Server, "key", "value"),
Info =
?SRV:database_info(Server, {metadata, "key"}),
Info2 =
?SRV:database_info(Server, {metadata, "bad_key"}),
?SRV:close(Server),
[?_assertEqual(Info, <<"value">>)
,?_assertEqual(Info2, <<"">>)
].
extra_weight_gen() ->
Path = testdb_path(extra_weight),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
Terms = ["Sxapian", "weight"],
Document = [#x_term{value = X} || X <- Terms],
DocId = ?SRV:add_document(Server, Document),
Query = extra_weight_query(2.5, "Sxapian", "weight"),
Ids = all_record_ids(Server, Query),
[?_assertEqual(Ids, [DocId])].
large_db_and_qlc_test() ->
Path = testdb_path(large_db_and_qlc),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
try
Terms = ["xapian", "erlang"],
Document = [#x_term{value = X} || X <- Terms],
ExpectedDocIds = lists:seq(1, 1000),
DocIds = [begin
?SRV:add_document(Server, Document)
end || _ <- ExpectedDocIds],
?assertEqual(DocIds, ExpectedDocIds),
Query = "erlang",
Cursor = all_record_cursor(Server, Query),
try
cursor_walk(1, 1001, Cursor)
after
qlc:delete_cursor(Cursor)
end
after
?SRV:close(Server)
end.
large_db_and_qlc_mset_with_joins_test() ->
Path = testdb_path(large_db_and_qlc_joins),
Params = [write, create, overwrite],
{ok, Server} = ?SRV:start_link(Path, Params),
ExpectedDocIds = lists:seq(1, 1000),
DocIds = [begin
Document = [ #x_term{value = integer_to_list(Id)} ],
?SRV:add_document(Server, Document)
end || Id <- ExpectedDocIds],
?assertEqual(DocIds, ExpectedDocIds),
Query = "",
Table = mset_table(Server, Query, document),
QH1 = qlc:q([Id || #document{docid=Id} <- Table, Id =< 500]),
QH2 = qlc:q([Id || #document{docid=Id} <- Table, Id > 500]),
QH3 = qlc:append(QH1, QH2),
Cursor = qlc:cursor(QH3),
try
cursor_walk(1, 1001, Cursor)
after
qlc:delete_cursor(Cursor)
end.
I d = : =
cursor_walk(Id, Id, Cursor) ->
Result = qlc:next_answers(Cursor, 1),
?assertEqual(Result, []),
[];
cursor_walk(Id, Max, Cursor) ->
Result = qlc:next_answers(Cursor, 1),
?assertEqual(Result, [Id]),
cursor_walk(Id+1, Max, Cursor).
extra_weight_query(Factor, Title, Body) ->
Scale = #x_query_scale_weight{factor = Factor, value = Title},
#x_query{value = [Scale, Body]}.
-record(mdocument, {docid, db_name, multi_docid, db_number}).
mset_table(Server, Query, document) ->
Meta = xapian_record:record(document, record_info(fields, document)),
mset_table(Server, Query, Meta);
mset_table(Server, Query, mdocument) ->
Meta = xapian_record:record(mdocument, record_info(fields, mdocument)),
mset_table(Server, Query, Meta);
mset_table(Server, Query, Meta) ->
EnquireResourceId = ?SRV:enquire(Server, Query),
MSetResourceId = ?SRV:match_set(Server, EnquireResourceId),
Table = xapian_mset_qlc:table(Server, MSetResourceId, Meta),
?SRV:release_resource(Server, EnquireResourceId),
?SRV:release_resource(Server, MSetResourceId),
Table.
all_record_ids(Server, Query) ->
Table = mset_table(Server, Query, document),
Ids = qlc:e(qlc:q([Id || #document{docid=Id} <- Table])),
Ids.
all_record_cursor(Server, Query) ->
Table = mset_table(Server, Query, document),
qlc:cursor(qlc:q([Id || #document{docid=Id} <- Table])).
all_multidb_records(Server, Query) ->
Table = mset_table(Server, Query, mdocument),
qlc:e(Table).
record_by_id(Server, Query, Id) ->
Table = mset_table(Server, Query, mdocument),
qlc:e(qlc:q([X || X=#mdocument{docid=DocId} <- Table, Id =:= DocId])).
multidb_record_by_id(Server, Query, Id) ->
Table = mset_table(Server, Query, mdocument),
qlc:e(qlc:q([X || X=#mdocument{multi_docid=DocId} <- Table, Id =:= DocId])).
multi_db_gen() ->
Path1 = #x_database{name=multi1, path=testdb_path(multi1)},
Path2 = #x_database{name=multi2, path=testdb_path(multi2)},
Params = [write, create, overwrite],
Document = [#x_term{value = "test"}],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
DocId1 = ?SRV:add_document(Server1, Document),
DocId2 = ?SRV:add_document(Server2, Document),
?SRV:close(Server1),
?SRV:close(Server2),
{ok, Server} = ?SRV:start_link([Path1, Path2], []),
Query = "test",
Ids = all_record_ids(Server, Query),
Records = all_multidb_records(Server, Query),
DbNames = elements(#mdocument.db_name, Records),
MultiIds = elements(#mdocument.multi_docid, Records),
DbNums = elements(#mdocument.db_number, Records),
LookupRecords1 = record_by_id(Server, Query, 1),
LookupRecords2 = record_by_id(Server, Query, 5),
LookupRecords3 = multidb_record_by_id(Server, Query, 1),
LookupRecords4 = multidb_record_by_id(Server, Query, 2),
LookupRecords5 = multidb_record_by_id(Server, Query, 5),
[?_assertEqual([DocId1, DocId2], [1,1])
,?_assertEqual(Ids, [1,1])
,?_assertEqual(DbNames, [multi1, multi2])
,?_assertEqual(MultiIds, [1,2])
,?_assertEqual(DbNums, [1,2])
,{"Document is not found by id.",
[?_assertEqual(LookupRecords5, [])
,?_assertEqual(LookupRecords2, [])]}
,?_assertEqual(length(LookupRecords1), 2)
,?_assertEqual(length(LookupRecords3), 1)
,?_assertEqual(length(LookupRecords4), 1)
].
multi_docid_gen() ->
Path1 = #x_database{name=multi_docid1, path=testdb_path(multi1)},
Path2 = #x_database{name=multi_docid2, path=testdb_path(multi2)},
Params = [write, create, overwrite],
{ok, Server1} = ?SRV:start_link(Path1, Params),
{ok, Server2} = ?SRV:start_link(Path2, Params),
?SRV:close(Server1),
?SRV:close(Server2),
{ok, Server} = ?SRV:start_link([Path1, Path2], []),
[ ?_assertEqual(1, ?SRV:multi_docid(Server, 1, multi_docid1))
, ?_assertEqual(2, ?SRV:multi_docid(Server, 1, multi_docid2))
, ?_assertEqual(3, ?SRV:multi_docid(Server, 2, multi_docid1))
, ?_assertEqual(4, ?SRV:multi_docid(Server, 2, multi_docid2))
].
elements(Pos, Records) ->
[erlang:element(Pos, Rec) || Rec <- Records].
remote_db_test() ->
Params = [writable, link, {port, 6666}],
DBList = [testdb_path(tcp_remote)],
xapian_utility:tcp_server(DBList, Params),
timer:sleep(1000),
DBConfig = #x_tcp_database{port = 6666, host = "127.0.0.1"},
{ok, Server} = ?SRV:start_link(DBConfig, [write]),
?SRV:close(Server).
get_state_fields_gen() ->
Params =
[ #x_value_name{slot = 2, name = slot2, type = string}
, #x_value_name{slot = 1, name = slot1, type = float}
],
{ok, Server} = ?SRV:start_link([], Params),
try
N2S = ?SRV:name_to_slot(Server),
Num1_1 = xapian_common:slot_id(slot1, N2S),
Num1_2 = xapian_common:slot_id(1, N2S),
Num2_1 = xapian_common:slot_id(slot2, N2S),
Num2_2 = xapian_common:slot_id(2, N2S),
Slot2SlotTests = [{"Slot number is the same.",
[ ?_assertEqual(Num1_2, 1)
, ?_assertEqual(Num2_2, 2)]}],
Name2SlotTests = [{"Name to number conversation.",
[ ?_assertEqual(Num1_1, 1)
, ?_assertEqual(Num2_1, 2)]}],
S2T = ?SRV:slot_to_type(Server),
Type1 = xapian_common:slot_type(1, S2T),
Type2 = xapian_common:slot_type(2, S2T),
SlotTypeTests2 = [{"Name or number to type conversation.",
[ ?_assertEqual(Type1, float)
, ?_assertEqual(Type2, string)
]}],
Slot1_1 = ?SRV:name_to_slot(Server, slot1),
Slot1_2 = ?SRV:name_to_slot(Server, 1),
Slot2_1 = ?SRV:name_to_slot(Server, slot2),
Slot2_2 = ?SRV:name_to_slot(Server, 2),
Slot2SlotTests2 = [{"Slot number is the same.",
[ ?_assertEqual(Slot1_2, 1)
, ?_assertEqual(Slot2_2, 2)]}],
Name2SlotTests2 = [{"Name to number conversation.",
[ ?_assertEqual(Slot1_1, 1)
, ?_assertEqual(Slot2_1, 2)]}],
SlotType1_1 = ?SRV:slot_to_type(Server, slot1),
SlotType1_2 = ?SRV:slot_to_type(Server, 1),
SlotType2_1 = ?SRV:slot_to_type(Server, slot2),
SlotType2_2 = ?SRV:slot_to_type(Server, 2),
SlotTypeTests = [{"Name or number to type conversation.",
[ ?_assertEqual(SlotType1_1, float)
, ?_assertEqual(SlotType1_2, float)
, ?_assertEqual(SlotType2_1, string)
, ?_assertEqual(SlotType2_2, string)
]}],
Slot2SlotTests ++ Name2SlotTests ++
Slot2SlotTests2 ++ Name2SlotTests2 ++
SlotTypeTests ++ SlotTypeTests2
after
?SRV:close(Server)
end.
|
991346f0859d397544aeace886cbae5fd2e3e8c1a70bf115d0776e5c52506fa9 | nikita-volkov/rebase | Function.hs | module Rebase.Data.Function
(
module Data.Function
)
where
import Data.Function
| null | https://raw.githubusercontent.com/nikita-volkov/rebase/7c77a0443e80bdffd4488a4239628177cac0761b/library/Rebase/Data/Function.hs | haskell | module Rebase.Data.Function
(
module Data.Function
)
where
import Data.Function
| |
3d705ad79f2aa3848303601cfdb4d28a47d13e6d9db460561ca1051c9d265d69 | ocaml/odoc | link.ml | module Url = Odoc_document.Url
(* Translation from Url.Path *)
module Path = struct
let for_printing url = List.map snd @@ Url.Path.to_list url
let segment_to_string (kind, name) =
match kind with
| `Module | `Page -> name
| _ -> Format.asprintf "%a-%s" Url.Path.pp_kind kind name
let is_leaf_page url = url.Url.Path.kind = `LeafPage
let get_dir_and_file is_flat url =
let l = Url.Path.to_list url in
let is_dir =
if is_flat then function `Page -> `Always | _ -> `Never
else function `LeafPage -> `Never | `File -> `Never | _ -> `Always
in
let dir, file = Url.Path.split ~is_dir l in
let dir = List.map segment_to_string dir in
let file =
match file with
| [] -> "index.html"
| [ (`LeafPage, name) ] -> name ^ ".html"
| [ (`File, name) ] -> name
| xs ->
assert is_flat;
String.concat "-" (List.map segment_to_string xs) ^ ".html"
in
(dir, file)
let for_linking ~is_flat url =
let dir, file = get_dir_and_file is_flat url in
dir @ [ file ]
let as_filename ~is_flat (url : Url.Path.t) =
Fpath.(v @@ String.concat Fpath.dir_sep @@ for_linking ~is_flat url)
end
type resolve = Current of Url.Path.t | Base of string
let rec drop_shared_prefix l1 l2 =
match (l1, l2) with
| l1 :: l1s, l2 :: l2s when l1 = l2 -> drop_shared_prefix l1s l2s
| _, _ -> (l1, l2)
let href ~config ~resolve t =
let { Url.Anchor.page; anchor; _ } = t in
let target_loc = Path.for_linking ~is_flat:(Config.flat config) page in
If xref_base_uri is defined , do not perform relative URI resolution .
match resolve with
| Base xref_base_uri -> (
let page = xref_base_uri ^ String.concat "/" target_loc in
match anchor with "" -> page | anchor -> page ^ "#" ^ anchor)
| Current path -> (
let current_loc = Path.for_linking ~is_flat:(Config.flat config) path in
let current_from_common_ancestor, target_from_common_ancestor =
drop_shared_prefix current_loc target_loc
in
let relative_target =
match current_from_common_ancestor with
| [] ->
(* We're already on the right page *)
(* If we're already on the right page, the target from our common
ancestor can't be anything other than the empty list *)
assert (target_from_common_ancestor = []);
[]
| [ _ ] ->
(* We're already in the right dir *)
target_from_common_ancestor
| l ->
(* We need to go up some dirs *)
List.map (fun _ -> "..") (List.tl l) @ target_from_common_ancestor
in
let remove_index_html l =
match List.rev l with
| "index.html" :: rest -> List.rev ("" :: rest)
| _ -> l
in
let relative_target =
if Config.semantic_uris config then remove_index_html relative_target
else relative_target
in
match (relative_target, anchor) with
| [], "" -> "#"
| page, "" -> String.concat "/" page
| page, anchor -> String.concat "/" page ^ "#" ^ anchor)
| null | https://raw.githubusercontent.com/ocaml/odoc/08ea62de6eb64d3d8931cdabd20b354e18e8f10e/src/html/link.ml | ocaml | Translation from Url.Path
We're already on the right page
If we're already on the right page, the target from our common
ancestor can't be anything other than the empty list
We're already in the right dir
We need to go up some dirs | module Url = Odoc_document.Url
module Path = struct
let for_printing url = List.map snd @@ Url.Path.to_list url
let segment_to_string (kind, name) =
match kind with
| `Module | `Page -> name
| _ -> Format.asprintf "%a-%s" Url.Path.pp_kind kind name
let is_leaf_page url = url.Url.Path.kind = `LeafPage
let get_dir_and_file is_flat url =
let l = Url.Path.to_list url in
let is_dir =
if is_flat then function `Page -> `Always | _ -> `Never
else function `LeafPage -> `Never | `File -> `Never | _ -> `Always
in
let dir, file = Url.Path.split ~is_dir l in
let dir = List.map segment_to_string dir in
let file =
match file with
| [] -> "index.html"
| [ (`LeafPage, name) ] -> name ^ ".html"
| [ (`File, name) ] -> name
| xs ->
assert is_flat;
String.concat "-" (List.map segment_to_string xs) ^ ".html"
in
(dir, file)
let for_linking ~is_flat url =
let dir, file = get_dir_and_file is_flat url in
dir @ [ file ]
let as_filename ~is_flat (url : Url.Path.t) =
Fpath.(v @@ String.concat Fpath.dir_sep @@ for_linking ~is_flat url)
end
type resolve = Current of Url.Path.t | Base of string
let rec drop_shared_prefix l1 l2 =
match (l1, l2) with
| l1 :: l1s, l2 :: l2s when l1 = l2 -> drop_shared_prefix l1s l2s
| _, _ -> (l1, l2)
let href ~config ~resolve t =
let { Url.Anchor.page; anchor; _ } = t in
let target_loc = Path.for_linking ~is_flat:(Config.flat config) page in
If xref_base_uri is defined , do not perform relative URI resolution .
match resolve with
| Base xref_base_uri -> (
let page = xref_base_uri ^ String.concat "/" target_loc in
match anchor with "" -> page | anchor -> page ^ "#" ^ anchor)
| Current path -> (
let current_loc = Path.for_linking ~is_flat:(Config.flat config) path in
let current_from_common_ancestor, target_from_common_ancestor =
drop_shared_prefix current_loc target_loc
in
let relative_target =
match current_from_common_ancestor with
| [] ->
assert (target_from_common_ancestor = []);
[]
| [ _ ] ->
target_from_common_ancestor
| l ->
List.map (fun _ -> "..") (List.tl l) @ target_from_common_ancestor
in
let remove_index_html l =
match List.rev l with
| "index.html" :: rest -> List.rev ("" :: rest)
| _ -> l
in
let relative_target =
if Config.semantic_uris config then remove_index_html relative_target
else relative_target
in
match (relative_target, anchor) with
| [], "" -> "#"
| page, "" -> String.concat "/" page
| page, anchor -> String.concat "/" page ^ "#" ^ anchor)
|
72ebedca59617d3bb8ce8e27666400806ff6d029a4fa0c9cce28f11c3d96795c | racehub/stripe-clj | charge_test.clj | (ns stripe.charge-test
(:use clojure.test
stripe.charge)
(:require [stripe.balance :as b]
[stripe.test :as t]
[stripe.test-data :as td]))
(deftest charge-test
"Test for charging a customer."
(t/with-customer [created td/customer-data]
(let [id (:id created)
charge (create-charge
{:amount 2500
:customer id})]
(is (true? (:paid charge))
"Charge is fully paid.")
(is (= 2500 (:amount charge))
"And equal to the supplied amount.")
(is (= 2500 (amount-available charge))
"No refunds recorded at first.")
(is (= id (:customer charge))
"The customer on the req is the supplied customer.")
(is (= charge (retrieve-charge (:id charge)))
"The retrieve API works.")
(let [fetched (retrieve-charge (:id charge) {:expand :balance_transaction})
balance-tx (:balance_transaction fetched)]
(is (= balance-tx (b/get-balance-tx (:id balance-tx)))
"Balance transaction expansion works."))
(let [refunded (refund-charge {:id (:id charge)
:amount 100})
fully-refunded (refund-charge {:id (:id charge)})]
(is (= 2400 (amount-available refunded))
"Now the funds are dwindling.")
(is (zero? (amount-available fully-refunded))
"Refunding without an amount fully refunds the charge.")))))
| null | https://raw.githubusercontent.com/racehub/stripe-clj/f8199bceacbec9d15ac58f32508aa1e78aa1b491/test/clj/stripe/charge_test.clj | clojure | (ns stripe.charge-test
(:use clojure.test
stripe.charge)
(:require [stripe.balance :as b]
[stripe.test :as t]
[stripe.test-data :as td]))
(deftest charge-test
"Test for charging a customer."
(t/with-customer [created td/customer-data]
(let [id (:id created)
charge (create-charge
{:amount 2500
:customer id})]
(is (true? (:paid charge))
"Charge is fully paid.")
(is (= 2500 (:amount charge))
"And equal to the supplied amount.")
(is (= 2500 (amount-available charge))
"No refunds recorded at first.")
(is (= id (:customer charge))
"The customer on the req is the supplied customer.")
(is (= charge (retrieve-charge (:id charge)))
"The retrieve API works.")
(let [fetched (retrieve-charge (:id charge) {:expand :balance_transaction})
balance-tx (:balance_transaction fetched)]
(is (= balance-tx (b/get-balance-tx (:id balance-tx)))
"Balance transaction expansion works."))
(let [refunded (refund-charge {:id (:id charge)
:amount 100})
fully-refunded (refund-charge {:id (:id charge)})]
(is (= 2400 (amount-available refunded))
"Now the funds are dwindling.")
(is (zero? (amount-available fully-refunded))
"Refunding without an amount fully refunds the charge.")))))
| |
ac5a9d17c0a737cb16867dfe67b859efdc13155372bf2e3cf11eb03db00ac40e | hozumi/datomic-session | datomic_session.clj | (ns datomic-session
(:require [clojure.data :as data]
[datomic.api :as d]
[ring.middleware.session.store :as rs]))
(defn key->eid [db key-attr key]
(ffirst
(d/q '[:find ?eid
:in $ ?key-attr ?key
:where
[?eid ?key-attr ?key]]
db key-attr key)))
(defn str->uuid [s]
(when s
(try (java.util.UUID/fromString s)
(catch java.lang.IllegalArgumentException e nil))))
(defn diff-tx-data [eid old-m new-m]
(let [[old-only new-only] (data/diff old-m new-m)
retracts (->> old-only
(remove (fn [[k]] (get new-only k)))
(map (fn [[k v]] [:db/retract eid k v])))]
(if (seq new-only)
(conj retracts (assoc new-only :db/id eid))
retracts)))
(deftype DatomicStore [conn key-attr partition auto-key-change?]
rs/SessionStore
(read-session [_ key]
(let [uuid-key (str->uuid key)]
(into {} (when uuid-key
(let [db (d/db conn)]
(d/entity db (key->eid db key-attr uuid-key)))))))
(write-session [_ key data]
(let [uuid-key (str->uuid key)
db (when uuid-key (d/db conn))
eid (when uuid-key (key->eid db key-attr uuid-key))
key-change? (or (not eid) auto-key-change?)
uuid-key (if key-change?
(java.util.UUID/randomUUID) uuid-key)]
(if eid
(let [old-data (into {} (d/entity db eid))
tx-data (diff-tx-data eid old-data (assoc data key-attr uuid-key))]
(when (seq tx-data)
@(d/transact conn tx-data)))
@(d/transact conn
[(assoc data
:db/id (d/tempid partition)
key-attr uuid-key)]))
(str uuid-key)))
(delete-session [_ key]
(when-let [uuid-key (str->uuid key)]
(when-let [eid (key->eid (d/db conn) key-attr uuid-key)]
@(d/transact conn [[:db.fn/retractEntity eid]])))
nil))
(defn datomic-store [{:keys [conn key-attr partition auto-key-change?]
:or {key-attr :session/key partition :db.part/user}}]
(DatomicStore. conn key-attr partition auto-key-change?))
| null | https://raw.githubusercontent.com/hozumi/datomic-session/3daf5b5fcd60dd547a660e16d674d245890dc74f/src/datomic_session.clj | clojure | (ns datomic-session
(:require [clojure.data :as data]
[datomic.api :as d]
[ring.middleware.session.store :as rs]))
(defn key->eid [db key-attr key]
(ffirst
(d/q '[:find ?eid
:in $ ?key-attr ?key
:where
[?eid ?key-attr ?key]]
db key-attr key)))
(defn str->uuid [s]
(when s
(try (java.util.UUID/fromString s)
(catch java.lang.IllegalArgumentException e nil))))
(defn diff-tx-data [eid old-m new-m]
(let [[old-only new-only] (data/diff old-m new-m)
retracts (->> old-only
(remove (fn [[k]] (get new-only k)))
(map (fn [[k v]] [:db/retract eid k v])))]
(if (seq new-only)
(conj retracts (assoc new-only :db/id eid))
retracts)))
(deftype DatomicStore [conn key-attr partition auto-key-change?]
rs/SessionStore
(read-session [_ key]
(let [uuid-key (str->uuid key)]
(into {} (when uuid-key
(let [db (d/db conn)]
(d/entity db (key->eid db key-attr uuid-key)))))))
(write-session [_ key data]
(let [uuid-key (str->uuid key)
db (when uuid-key (d/db conn))
eid (when uuid-key (key->eid db key-attr uuid-key))
key-change? (or (not eid) auto-key-change?)
uuid-key (if key-change?
(java.util.UUID/randomUUID) uuid-key)]
(if eid
(let [old-data (into {} (d/entity db eid))
tx-data (diff-tx-data eid old-data (assoc data key-attr uuid-key))]
(when (seq tx-data)
@(d/transact conn tx-data)))
@(d/transact conn
[(assoc data
:db/id (d/tempid partition)
key-attr uuid-key)]))
(str uuid-key)))
(delete-session [_ key]
(when-let [uuid-key (str->uuid key)]
(when-let [eid (key->eid (d/db conn) key-attr uuid-key)]
@(d/transact conn [[:db.fn/retractEntity eid]])))
nil))
(defn datomic-store [{:keys [conn key-attr partition auto-key-change?]
:or {key-attr :session/key partition :db.part/user}}]
(DatomicStore. conn key-attr partition auto-key-change?))
| |
aa7981ce221d7ff4cee2ed1c624ad3444f164ed83711906e137b401aeb5c01ce | phoe-trash/gateway | already-logged-in.lisp | ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;;;; GATEWAY
" phoe " Herda 2016
already-logged-in.lisp
(in-package #:gateway)
#|
Error ALREADY-LOGGED-IN
Should be signaled when the user tries to perform a LOGIN command while
already being logged in.
Arguments:
* AUTH: the NAMED object representing whomever the user is already logged
in as.
|#
(define-gateway-error already-logged-in
((auth :reader already-logged-in-auth
:initarg :auth
:initform (error "Must provide previous auth.")))
(owner connection condition)
(((name (name (already-logged-in-auth condition))))
("Player is already logged in as ~S." name)
(declare (ignore owner))
(data-send connection `(:error :type :already-logged-in :name ,name))))
| null | https://raw.githubusercontent.com/phoe-trash/gateway/a8d579ccbafcaee8678caf59d365ec2eab0b1a7e/_old/errors/already-logged-in.lisp | lisp |
GATEWAY
Error ALREADY-LOGGED-IN
Should be signaled when the user tries to perform a LOGIN command while
already being logged in.
Arguments:
* AUTH: the NAMED object representing whomever the user is already logged
in as.
| " phoe " Herda 2016
already-logged-in.lisp
(in-package #:gateway)
(define-gateway-error already-logged-in
((auth :reader already-logged-in-auth
:initarg :auth
:initform (error "Must provide previous auth.")))
(owner connection condition)
(((name (name (already-logged-in-auth condition))))
("Player is already logged in as ~S." name)
(declare (ignore owner))
(data-send connection `(:error :type :already-logged-in :name ,name))))
|
850f49dfddde7ac04bf1789f6185688bf13e230203d230000cdfc668cf51c594 | stepcut/plugins | Plugin.hs | module Plugin where
resource = 1 :: Int
| null | https://raw.githubusercontent.com/stepcut/plugins/52c660b5bc71182627d14c1d333d0234050cac01/testsuite/pdynload/typealias/Plugin.hs | haskell | module Plugin where
resource = 1 :: Int
| |
1cb09b6e4691d66e470271508ca1b8b870fd27a3dbda806b06796d1d3f52cd2c | scalaris-team/scalaris | api_tx_concurrent_SUITE.erl | 2008 - 2014 Zuse Institute Berlin
%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
% you may not use this file except in compliance with the License.
% You may obtain a copy of the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
% See the License for the specific language governing permissions and
% limitations under the License.
@author < >
%% @doc Unit tests for src/api_tx
%% @end
%% @version $Id$
-module(api_tx_concurrent_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("unittest.hrl").
all() ->
[increment_test_2, increment_test_4, increment_test_8].
suite() -> [ {timetrap, {seconds, 620}} ].
init_per_suite(Config) ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(4, [{config, [{log_path, PrivDir}]}]),
Config.
end_per_suite(_Config) ->
ok.
inc(Key) ->
{TLog1, [ReadResult]} = api_tx:req_list([{read, Key}]),
case ReadResult of
{ok, Value} ->
{_TLog, [{ok}, CommitResult]} =
api_tx:req_list(TLog1, [{write, Key, Value + 1}, {commit}]),
CommitResult;
Fail -> Fail
end.
process(Parent, Key, Count) ->
process_iter(Parent, Key, Count, 0).
process_iter(Parent, _Key, 0, AbortCount) ->
Parent ! {done, AbortCount};
process_iter(Parent, Key, Count, AbortCount) ->
Result = inc(Key),
case Result of
{ok} -> process_iter(Parent, Key, Count - 1, AbortCount);
{fail, abort, _} -> process_iter(Parent, Key, Count, AbortCount + 1)
end.
increment_test_8(_Config) -> increment_test_n(_Config, 8).
increment_test_4(_Config) -> increment_test_n(_Config, 4).
increment_test_2(_Config) -> increment_test_n(_Config, 2).
increment_test_n(_Config, N) ->
Key = "i",
?equals(api_tx:write("i", 0), {ok}),
Self = self(),
Count = 200 div N,
_ = [ spawn(api_tx_concurrent_SUITE, process, [Self, Key, Count])
|| _ <- lists:seq(1, N) ],
Aborts = wait_for_done(N),
ct:pal("aborts: ~w~n", [Aborts]),
{ok, Total} = api_tx:read(Key),
?equals(N*Count, Total),
ok.
wait_for_done(0) ->
[];
wait_for_done(Count) ->
receive
{done, Aborts} ->
[Aborts |wait_for_done(Count - 1)]
end.
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/test/api_tx_concurrent_SUITE.erl | erlang |
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
@doc Unit tests for src/api_tx
@end
@version $Id$ | 2008 - 2014 Zuse Institute Berlin
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
@author < >
-module(api_tx_concurrent_SUITE).
-author('').
-vsn('$Id$').
-compile(export_all).
-include("unittest.hrl").
all() ->
[increment_test_2, increment_test_4, increment_test_8].
suite() -> [ {timetrap, {seconds, 620}} ].
init_per_suite(Config) ->
{priv_dir, PrivDir} = lists:keyfind(priv_dir, 1, Config),
unittest_helper:make_ring(4, [{config, [{log_path, PrivDir}]}]),
Config.
end_per_suite(_Config) ->
ok.
inc(Key) ->
{TLog1, [ReadResult]} = api_tx:req_list([{read, Key}]),
case ReadResult of
{ok, Value} ->
{_TLog, [{ok}, CommitResult]} =
api_tx:req_list(TLog1, [{write, Key, Value + 1}, {commit}]),
CommitResult;
Fail -> Fail
end.
process(Parent, Key, Count) ->
process_iter(Parent, Key, Count, 0).
process_iter(Parent, _Key, 0, AbortCount) ->
Parent ! {done, AbortCount};
process_iter(Parent, Key, Count, AbortCount) ->
Result = inc(Key),
case Result of
{ok} -> process_iter(Parent, Key, Count - 1, AbortCount);
{fail, abort, _} -> process_iter(Parent, Key, Count, AbortCount + 1)
end.
increment_test_8(_Config) -> increment_test_n(_Config, 8).
increment_test_4(_Config) -> increment_test_n(_Config, 4).
increment_test_2(_Config) -> increment_test_n(_Config, 2).
increment_test_n(_Config, N) ->
Key = "i",
?equals(api_tx:write("i", 0), {ok}),
Self = self(),
Count = 200 div N,
_ = [ spawn(api_tx_concurrent_SUITE, process, [Self, Key, Count])
|| _ <- lists:seq(1, N) ],
Aborts = wait_for_done(N),
ct:pal("aborts: ~w~n", [Aborts]),
{ok, Total} = api_tx:read(Key),
?equals(N*Count, Total),
ok.
wait_for_done(0) ->
[];
wait_for_done(Count) ->
receive
{done, Aborts} ->
[Aborts |wait_for_done(Count - 1)]
end.
|
045560ae913d7f5771db9b51159f68f0dd0a721d85511a60112e29c83c602a33 | upgradingdave/cljs | dev.cljs | (ns up.img.exif.dev
(:require
[devcards.core :as dc]
[reagent.core :as r]
[up.img.exif.core :as exif]
[up.img.core :as img])
(:require-macros
[devcards.core :as dc :refer [defcard deftest defcard-doc]]
[cljs.test :refer [is testing]]))
(def data (r/atom {}))
(defcard
"### Exif"
(dc/reagent
(fn [data _] [exif/exif-editor data]))
data
{:inspect-data true})
;;TODO: in progress
(defn new-canvas []
(doto (js/document.createElement "canvas")))
(defn fix-orientation
"Given image and exif orientation, ensure the photo is displayed
rightside up"
[img exif-orientation]
(let [width (.-width img)
height (.-height img)
canvas (js/document.getElementById "canvas")
ctx (.getContext canvas "2d")]
(js/console.log "width")
(js/console.log width)
(js/console.log "height")
(js/console.log height)
(case exif-orientation
1 (.transform ctx 1 0 0 1 0 0)
2 (.transform ctx -1 0 0 1 width 0)
3 (.transform ctx -1 0 0 -1 width height)
4 (.transform ctx 1 0 0 -1 0 height)
5 (.transform ctx 0 1 1 0 0 0)
6 (.transform ctx 0 1 -1 0 height 0)
7 (.transform ctx 0 -1 -1 0 height width)
8 (.transform ctx 0 -1 1 0 0 width))
(.drawImage ctx img 0 0 width height 0 0 width height)))
(defn get-orientation [img data]
(js/EXIF.getData
img
(fn []
(this-as
this
(let [exifdata (js->clj (.-exifdata this))
orientation (get exifdata "Orientation")]
(swap! data assoc-in [:orientation] orientation)
(fix-orientation this orientation))))))
(deftest file-api-supported
(testing "sanity"
(is (= true true))))
| null | https://raw.githubusercontent.com/upgradingdave/cljs/1026b6db905214586fb7e04800df078da19b37cc/src/cljs/up/img/exif/dev.cljs | clojure | TODO: in progress | (ns up.img.exif.dev
(:require
[devcards.core :as dc]
[reagent.core :as r]
[up.img.exif.core :as exif]
[up.img.core :as img])
(:require-macros
[devcards.core :as dc :refer [defcard deftest defcard-doc]]
[cljs.test :refer [is testing]]))
(def data (r/atom {}))
(defcard
"### Exif"
(dc/reagent
(fn [data _] [exif/exif-editor data]))
data
{:inspect-data true})
(defn new-canvas []
(doto (js/document.createElement "canvas")))
(defn fix-orientation
"Given image and exif orientation, ensure the photo is displayed
rightside up"
[img exif-orientation]
(let [width (.-width img)
height (.-height img)
canvas (js/document.getElementById "canvas")
ctx (.getContext canvas "2d")]
(js/console.log "width")
(js/console.log width)
(js/console.log "height")
(js/console.log height)
(case exif-orientation
1 (.transform ctx 1 0 0 1 0 0)
2 (.transform ctx -1 0 0 1 width 0)
3 (.transform ctx -1 0 0 -1 width height)
4 (.transform ctx 1 0 0 -1 0 height)
5 (.transform ctx 0 1 1 0 0 0)
6 (.transform ctx 0 1 -1 0 height 0)
7 (.transform ctx 0 -1 -1 0 height width)
8 (.transform ctx 0 -1 1 0 0 width))
(.drawImage ctx img 0 0 width height 0 0 width height)))
(defn get-orientation [img data]
(js/EXIF.getData
img
(fn []
(this-as
this
(let [exifdata (js->clj (.-exifdata this))
orientation (get exifdata "Orientation")]
(swap! data assoc-in [:orientation] orientation)
(fix-orientation this orientation))))))
(deftest file-api-supported
(testing "sanity"
(is (= true true))))
|
92d25c2c399451232ff2c8d7878bb5b7998eb0dded9b1414d6b0fba7c68230ba | johnlawrenceaspden/hobby-code | clojure-is-fast-version-1.4.clj | ;; Is Clojure Still Fast ?
;; Once upon a time I wrote a blog post saying that clojure was fast. It still is, and optimizing
;; it is now much easier than it used to be, but it doesn't seem to be *quite* as fast as it once
;; was.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; I optimized a program to solve a differential equation in a
;; simple-minded way:
;; The equation was: dy/dt = f(t,y) where f(t, y) = t-y and y=0 when t=0
;; ( the exact solution is: y=e^(-t)+t-1 )
Here 's a program to solve it using Euler 's method
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
;; For every go round the loop we have to:
;; compare its with 0,
;; branch depending on the result,
;; add t0 to h,
call f with t0 and y0 ,
;; multiply h and the result,
add that to y0 ,
;; jump.
;; So if this was an assembly language program that worked the way
you 'd expect , each loop would take 7 cycles .
According to /proc / cpuinfo , my netbook runs at 1.662 GHz .
(def *cpuspeed* 1.662)
;; We care about how many cycles each iteration of the solver takes:
(defmacro cyclesperit [expr its]
`(let [start# (. System (nanoTime))
ret# ( ~@expr (/ 1.0 ~its) ~its )
finish# (. System (nanoTime))]
(int (/ (* *cpuspeed* (- finish# start#)) ~its))))
;; With the program as written, this estimate turns out to have been a little optimistic.
;; The figures in the original post were on a desktop machine that I no longer have, which was more
;; powerful than my current netbook.
To two significant figures , the results of the timing expression
(cyclesperit (solveit 0.0 1.0) 1000000)
;; are:
On my old desktop with clojure 1.2 : 2300 cycles
On my netbook with clojure 1.2 : 2800 cycles
On my netbook with clojure 1.3 : 2500 cycles
On my netbook with clojure 1.4 : 2400 cycles
;; So it looks like my netbook is not only slower in clock speed than my desktop was, but also in
terms of cycles / iteration . That 's not surprising as the netbook has an Atom processor , optimized
;; for low power rather than for speed.
;; But it also looks as though clojure has been speeding up slightly, which has almost made up for that.
I 'm also assuming that the JVM itself has n't changed much since the original blog post . The
netbook timings were all done today on the same JVM , but the desktop timings are from a while
;; back, so that might account for some differences.
So we 're looking at a slowdown of about 300 times over what we could probably
;; achieve coding in assembler or in C with a good optimizing compiler (and of
course I 'm assuming that floating point operations take one cycle each )
;; This is about the sort of speed that you'd expect from a dynamic language
;; without any optimization or type hinting.
;; In the original blog post I went through a number of faster versions:
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; In solveit-2 I explicitly typed the loop variables, inlined the function call, and made an
;; internal target for recur. That speeded things up considerably:
(defn solveit-2 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
;; Let's time that and see how it goes:
(cyclesperit (solveit-2 0.0 1.0) 10000000)
On my old desktop with clojure 1.2 : 490 cycles
On my netbook with clojure 1.2 : 600 cycles
On my netbook with clojure 1.3 : 44 cycles
On my netbook with clojure 1.4 : 44 cycles
;; Wow!
Again we see that my netbook is around 20 % slower , but the performance change between clojure 1.2
and clojure 1.3 was incredible .
There 's now about a factor of 6 between the clojure version and the imaginary assembler program
;; on the imaginary cpu in my head. And all I've done is to declare the types and inline the functions.
This is actually so impressive that I want to examine the three things in detail , to see how the
three things combine to cause the speedup . As I remember from clojure 1.2 , you needed all three
changes to see any great difference . In 1.4 you can type hint separately from making an internal loop target .
All timings with clojure 1.4.0 .
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
;; type hints:
(defn ^double tf [^double t ^double y] (- t y))
(defn solveit-1-1 [^double t0 ^double y0 ^double h ^long its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (tf t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
289
;; Inline f
(defn solveit-1-2 [ ^double t0 ^double y0 ^double h ^long its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
44
Internal loop target
(defn solveit-1-3 [ ^double t0 ^double y0 ^double h ^long its]
(loop [t0 t0, y0 y0, h h, its its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
44
;; Original version
(defn solveit-2 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
;; Let's time that and see how it goes:
44
This is pretty awesome . Type hints alone are giving us a factor of eight speedup , and inlining
the function then gives us another factor of 6 . The internal loop target , which was originally a
;; bit of a hack to allow clojure to deduce types doesn't make any difference to the hinted version,
;; but in fact that still works, and gives the compiler enough information to get the same speed.
;; So far I really could not be more impressed.
;; In the original post, I then used a profiler to find out where the loop was running slow, and
;; did some strange things to make it fast:
(defn solveit-3 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its (int 0))
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
(cyclesperit (solveit-3 0.0 1.0) 10000000)
On my old desktop with clojure 1.2 : 70 cycles
On my netbook with clojure 1.2 : 90 cycles
On my netbook with clojure 1.3 : 100 cycles
On my netbook with clojure 1.4 : 100 cycles
;; Originally, that (int 0) instead of 0 in the comparison made all the difference, causing a huge
;; and unexpected speedup.
The odd thing here is that the same program actually runs slightly slower in clojure 1.3/1.4 than
it does in clojure 1.2 . Not much , and they 're all considerably slower than clojure 1.4 's best
;; shot so far.
;; What I think is happening here is that that int cast is taking up unnecessary time in the later clojures,
whereas in 1.2 it was the final piece of the puzzle as far as the compiler was concerned .
;; To test this:
(defn solveit-3-a [ t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (long its)]
(if (> its (long 0 ))
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
44
solveit-3 - a runs at the same 44 cycles / iteration as we 've been seeing before . I get the
;; impression that modern clojure prefers longs to ints.
;; The final craziness was to take the (int 0) out of the loop entirely:
(defn solveit-4 [t0 y0 h its]
(let [zero (int 0)]
(loop [t0 (double t0) y0 (double y0) h (double h) its (int its)]
(if (> its zero)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))))
(cyclesperit (solveit-4 0.0 1.0) 100000000)
;; And this is where it gets weird:
On my old desktop with clojure 1.2 : 23 cycles
On my netbook with clojure 1.2 : 32 cycles
On my netbook with clojure 1.3 : 45 cycles
On my netbook with clojure 1.4 : 45 cycles
Clojure 1.3/1.4 is now running at its usual ( fast ) speed , despite the ints rather than longs in solveit-4
But Clojure 1.2 on this program is able to run about 33 % faster than the modern versions .
;; So it actually looks as though whatever awesomeness has caused the phenomenal speedup between
clojure 1.2 and 1.3 has actually slightly slowed down the optimized version .
;; I'm imagining that this is something to do with using longs rather than ints.
But I have n't done any kind of close profiling to see if I can make 1.4 run any faster than 44 cycles / loop .
Someone in a comment on my earlier post said that they were seeing 8 cycles / second on their
Macintosh , so it might just be that there 's something weird about the Atom processor .
At any rate , it looks as though optimizing numeric code in Clojure is now dead easy . You just
;; tell it the types, inline function calls, and that's as good as it gets.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
;; For those in real need of cycles and willing to take risks to save them, there's:
(set! *unchecked-math* true)
Which I do n't think has an equivalent in 1.2 , which cuts a few cycles off :
(defn solveit-4 [t0 y0 h its]
(let [zero (long 0)]
(loop [t0 (double t0) y0 (double y0) h (double h) its (long its)]
(if (> its zero)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))))
37
;; I do wonder what is going on here. I'm not sure what checking is here to be turned off.
;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;
Incidentally , the following two java programs also produce the numbers that are seen above :
public class {
;; public static void main (String[] args){
double cpuspeed = 1.662 ;
int its = 10000000 ;
;; double t=0;
;; double y=0;
;; double h=1.0/its;
;; Long start = System.nanoTime();
;; for(int i = 0; i < its; i++){
;; y = y+h*(t-y);
;; t = t+h;
;; }
;; Long finish = System.nanoTime();
;; System.out.println("y=" + y + " t=" +t);
;; System.out.println("cycles/iteration: " + ((int) ((cpuspeed * (finish - start)) / its)));
;; }
;; }
;; y=0.3678794227282174 t=0.99999999975017
cycles / iteration : 32
public class {
;; public static void main (String[] args){
double cpuspeed = 1.662 ;
long its = 10000000 ;
;; double t=0;
;; double y=0;
;; double h=1.0/its;
;; long start = System.nanoTime();
;; for(long i = 0; i < its; i++){
;; y = y+h*(t-y);
;; t = t+h;
;; }
;; long finish = System.nanoTime();
;; System.out.println("y=" + y + " t=" +t);
;; System.out.println("cycles/iteration: " + ((int) ((cpuspeed * (finish - start)) / its)));
;; }
;; }
;; y=0.3678794227282174 t=0.99999999975017
cycles / iteration : 37
| null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/clojure-is-fast-version-1.4.clj | clojure | Is Clojure Still Fast ?
Once upon a time I wrote a blog post saying that clojure was fast. It still is, and optimizing
it is now much easier than it used to be, but it doesn't seem to be *quite* as fast as it once
was.
I optimized a program to solve a differential equation in a
simple-minded way:
The equation was: dy/dt = f(t,y) where f(t, y) = t-y and y=0 when t=0
( the exact solution is: y=e^(-t)+t-1 )
For every go round the loop we have to:
compare its with 0,
branch depending on the result,
add t0 to h,
multiply h and the result,
jump.
So if this was an assembly language program that worked the way
We care about how many cycles each iteration of the solver takes:
With the program as written, this estimate turns out to have been a little optimistic.
The figures in the original post were on a desktop machine that I no longer have, which was more
powerful than my current netbook.
are:
So it looks like my netbook is not only slower in clock speed than my desktop was, but also in
for low power rather than for speed.
But it also looks as though clojure has been speeding up slightly, which has almost made up for that.
back, so that might account for some differences.
achieve coding in assembler or in C with a good optimizing compiler (and of
This is about the sort of speed that you'd expect from a dynamic language
without any optimization or type hinting.
In the original blog post I went through a number of faster versions:
In solveit-2 I explicitly typed the loop variables, inlined the function call, and made an
internal target for recur. That speeded things up considerably:
Let's time that and see how it goes:
Wow!
on the imaginary cpu in my head. And all I've done is to declare the types and inline the functions.
type hints:
Inline f
Original version
Let's time that and see how it goes:
bit of a hack to allow clojure to deduce types doesn't make any difference to the hinted version,
but in fact that still works, and gives the compiler enough information to get the same speed.
So far I really could not be more impressed.
In the original post, I then used a profiler to find out where the loop was running slow, and
did some strange things to make it fast:
Originally, that (int 0) instead of 0 in the comparison made all the difference, causing a huge
and unexpected speedup.
shot so far.
What I think is happening here is that that int cast is taking up unnecessary time in the later clojures,
To test this:
impression that modern clojure prefers longs to ints.
The final craziness was to take the (int 0) out of the loop entirely:
And this is where it gets weird:
So it actually looks as though whatever awesomeness has caused the phenomenal speedup between
I'm imagining that this is something to do with using longs rather than ints.
tell it the types, inline function calls, and that's as good as it gets.
For those in real need of cycles and willing to take risks to save them, there's:
I do wonder what is going on here. I'm not sure what checking is here to be turned off.
public static void main (String[] args){
double t=0;
double y=0;
double h=1.0/its;
Long start = System.nanoTime();
for(int i = 0; i < its; i++){
y = y+h*(t-y);
t = t+h;
}
Long finish = System.nanoTime();
System.out.println("y=" + y + " t=" +t);
System.out.println("cycles/iteration: " + ((int) ((cpuspeed * (finish - start)) / its)));
}
}
y=0.3678794227282174 t=0.99999999975017
public static void main (String[] args){
double t=0;
double y=0;
double h=1.0/its;
long start = System.nanoTime();
for(long i = 0; i < its; i++){
y = y+h*(t-y);
t = t+h;
}
long finish = System.nanoTime();
System.out.println("y=" + y + " t=" +t);
System.out.println("cycles/iteration: " + ((int) ((cpuspeed * (finish - start)) / its)));
}
}
y=0.3678794227282174 t=0.99999999975017 |
Here 's a program to solve it using Euler 's method
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
call f with t0 and y0 ,
add that to y0 ,
you 'd expect , each loop would take 7 cycles .
According to /proc / cpuinfo , my netbook runs at 1.662 GHz .
(def *cpuspeed* 1.662)
(defmacro cyclesperit [expr its]
`(let [start# (. System (nanoTime))
ret# ( ~@expr (/ 1.0 ~its) ~its )
finish# (. System (nanoTime))]
(int (/ (* *cpuspeed* (- finish# start#)) ~its))))
To two significant figures , the results of the timing expression
(cyclesperit (solveit 0.0 1.0) 1000000)
On my old desktop with clojure 1.2 : 2300 cycles
On my netbook with clojure 1.2 : 2800 cycles
On my netbook with clojure 1.3 : 2500 cycles
On my netbook with clojure 1.4 : 2400 cycles
terms of cycles / iteration . That 's not surprising as the netbook has an Atom processor , optimized
I 'm also assuming that the JVM itself has n't changed much since the original blog post . The
netbook timings were all done today on the same JVM , but the desktop timings are from a while
So we 're looking at a slowdown of about 300 times over what we could probably
course I 'm assuming that floating point operations take one cycle each )
(defn solveit-2 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
(cyclesperit (solveit-2 0.0 1.0) 10000000)
On my old desktop with clojure 1.2 : 490 cycles
On my netbook with clojure 1.2 : 600 cycles
On my netbook with clojure 1.3 : 44 cycles
On my netbook with clojure 1.4 : 44 cycles
Again we see that my netbook is around 20 % slower , but the performance change between clojure 1.2
and clojure 1.3 was incredible .
There 's now about a factor of 6 between the clojure version and the imaginary assembler program
This is actually so impressive that I want to examine the three things in detail , to see how the
three things combine to cause the speedup . As I remember from clojure 1.2 , you needed all three
changes to see any great difference . In 1.4 you can type hint separately from making an internal loop target .
All timings with clojure 1.4.0 .
(defn f [t y] (- t y))
(defn solveit [t0 y0 h its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (f t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
(defn ^double tf [^double t ^double y] (- t y))
(defn solveit-1-1 [^double t0 ^double y0 ^double h ^long its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (tf t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
289
(defn solveit-1-2 [ ^double t0 ^double y0 ^double h ^long its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))
44
Internal loop target
(defn solveit-1-3 [ ^double t0 ^double y0 ^double h ^long its]
(loop [t0 t0, y0 y0, h h, its its]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
44
(defn solveit-2 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its 0)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
44
This is pretty awesome . Type hints alone are giving us a factor of eight speedup , and inlining
the function then gives us another factor of 6 . The internal loop target , which was originally a
(defn solveit-3 [t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (int its)]
(if (> its (int 0))
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
(cyclesperit (solveit-3 0.0 1.0) 10000000)
On my old desktop with clojure 1.2 : 70 cycles
On my netbook with clojure 1.2 : 90 cycles
On my netbook with clojure 1.3 : 100 cycles
On my netbook with clojure 1.4 : 100 cycles
The odd thing here is that the same program actually runs slightly slower in clojure 1.3/1.4 than
it does in clojure 1.2 . Not much , and they 're all considerably slower than clojure 1.4 's best
whereas in 1.2 it was the final piece of the puzzle as far as the compiler was concerned .
(defn solveit-3-a [ t0 y0 h its]
(loop [t0 (double t0), y0 (double y0), h (double h), its (long its)]
(if (> its (long 0 ))
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its])))
44
solveit-3 - a runs at the same 44 cycles / iteration as we 've been seeing before . I get the
(defn solveit-4 [t0 y0 h its]
(let [zero (int 0)]
(loop [t0 (double t0) y0 (double y0) h (double h) its (int its)]
(if (> its zero)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))))
(cyclesperit (solveit-4 0.0 1.0) 100000000)
On my old desktop with clojure 1.2 : 23 cycles
On my netbook with clojure 1.2 : 32 cycles
On my netbook with clojure 1.3 : 45 cycles
On my netbook with clojure 1.4 : 45 cycles
Clojure 1.3/1.4 is now running at its usual ( fast ) speed , despite the ints rather than longs in solveit-4
But Clojure 1.2 on this program is able to run about 33 % faster than the modern versions .
clojure 1.2 and 1.3 has actually slightly slowed down the optimized version .
But I have n't done any kind of close profiling to see if I can make 1.4 run any faster than 44 cycles / loop .
Someone in a comment on my earlier post said that they were seeing 8 cycles / second on their
Macintosh , so it might just be that there 's something weird about the Atom processor .
At any rate , it looks as though optimizing numeric code in Clojure is now dead easy . You just
(set! *unchecked-math* true)
Which I do n't think has an equivalent in 1.2 , which cuts a few cycles off :
(defn solveit-4 [t0 y0 h its]
(let [zero (long 0)]
(loop [t0 (double t0) y0 (double y0) h (double h) its (long its)]
(if (> its zero)
(let [t1 (+ t0 h)
y1 (+ y0 (* h (- t0 y0)))]
(recur t1 y1 h (dec its)))
[t0 y0 h its]))))
37
Incidentally , the following two java programs also produce the numbers that are seen above :
public class {
cycles / iteration : 32
public class {
cycles / iteration : 37
|
20c1c69e3a490feffc4a1c04722ca71d1d71898699fb16027b27dcc08c752a1c | iu-parfunc/haskell_dsl_tour | Helpers.hs | # LANGUAGE TypeSynonymInstances , FlexibleInstances , FlexibleContexts , DeriveDataTypeable #
module FrameworkHs.Helpers
(
-- * Types for compiler configuration and construction
P423Config (.. )
, PassM, getConfig, runPassM, orPassM
, P423Exception ( AssemblyFailedException
, ParseErrorException
, ASTParseException
, NoValidTestsException
, NoInvalidTestsException
, PassFailureException
, WrapperFailureException
)
, shortExcDescrip
, passFailure, passFailureM
-- , parseFailure
, parseFailureM
, P423Pass ( P423Pass
, pass
, passName
, wrapperName
, trace
)
, Option (..)
-- * Helpers for representations
, fixnumBits
, shiftFixnum
, maskFixnum
, maskVector
, maskPair
, maskProcedure
, maskBoolean
, tagFixnum
, tagPair
, tagProcedure
, tagVector
, tagBoolean
, tagNonfixnum
, repTrue
, repFalse
, repNil
, repVoid
, dispCar
, dispCdr
, dispVectorData
, dispVectorLength
, dispProcedureData
, dispProcedureCode
, sizePair
-- * An alternative `Show` class for printing to X86 assembly code:
, X86Print, format
, OpCode
-- * Emitting text to a handle
, GenM, Gen, gen, genLn, genJustLn
, hPutGenM, runGenM, showGen
, emitOp1, emitOp2, emitOp3
, emitLabelLabel
, emitLabel
, emitJumpLabel, emitJump
, emitEntry, emitExit
-- * Shorthands for common emissions:
, pushq, popq
, movq, leaq
-- * Pretty printing:
, PP(..), ppSexp, pppSexp
-- * Parsing
, parseListWithFinal
, parseUVar
, parseFVar
, parseLabel
, parseRelop
, parseBinop
, parseReg
, parseInt32
, parseInt64
, parseValPrim, parseEffectPrim, parsePredPrim
-- * Misc numeric and string helpers
, isInt32
, isInt64
, isUInt6
, isFixnum
, wordShift
, ash
, chomp
) where
import Prelude hiding (LT, EQ, GT)
import Blaze.ByteString.Builder as BBB
import Blaze.ByteString.Builder.Char8 (fromChar, fromString, fromShow)
import Data.List (intersperse)
import Data.Set (size, fromList)
import Data.Char (isDigit, isSpace, isAlpha)
import Data.Int
import Data.Bits
import Data.ByteString (ByteString, hPut)
import Data . ByteString ( ByteString , hPut )
import Data.ByteString.Char8 (unpack)
import Control.Monad (unless, mapM_)
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.Error
import qualified Data.Set as S
import Text.Parsec.Error (ParseError)
import qualified Text.PrettyPrint.HughesPJ as P
import Control.Exception
import Data.Typeable
import System.IO
import FrameworkHs.Prims
import FrameworkHs.SExpReader.LispData
data P423Config =
P423Config
{ framePointerRegister :: Reg
, allocationPointerRegister :: Reg
, returnAddressRegister :: Reg
, returnValueRegister :: Reg
, parameterRegisters :: [Reg]
, runWrappers :: Bool
}
-- | A monad for implementing passes. It provides access to the global
-- configuration, and also handles errors.
type PassM = ReaderT P423Config (Either String)
-- | Getting the configuration
getConfig :: (MonadReader P423Config m) => m P423Config
getConfig = ask
-- | A compiler pass with metadata
data P423Pass a b =
P423Pass
{ pass :: P423Config -> a -> b -- ^ The implementation of the pass
, passName :: String -- ^ The canonical name of the pass
, wrapperName :: String -- ^ The name of the "wrapper" for
-- interpreting the pass's output
, trace :: Bool -- ^ Debug this pass?
}
| This runs a PassM computation with a given configuration . Any uncaught failures
becoming true errors .
runPassM :: P423Config -> PassM a -> a
runPassM conf m =
case runReaderT m conf of
Left str -> error str
Right x -> x
| Backtracking . If the first action throws an exception , try the second .
orPassM :: PassM a -> PassM a -> PassM a
orPassM m1 m2 = do
cfg <- getConfig
case runReaderT m1 cfg of
Left _ -> m2
Right x -> return x
-- | Throwing an error inside a compiler pass.
passFailureM :: String -> String -> PassM a
passFailureM who e = lift $ Left (who ++ ": " ++ e)
-- passFailureM = return . Left . PassFailureException ""
-- | Throwing an error, non-monadic version.
passFailure :: String -> String -> a
passFailure who e = throw $ PassFailureException who e
-- | Optional information
data Option a = Default | Option a
split :: Char -> String -> (String,String)
split s [] = ([],[])
split s (c:cs)
| (c == s) = ([],cs)
| otherwise = (c:before,cs')
where (before,cs') = split s cs
------------------------------------------------------------
-- Exceptions ----------------------------------------------
data P423Exception = AssemblyFailedException String
| ASTParseException String
| ParseErrorException ParseError
| NoValidTestsException
| NoInvalidTestsException
| PassFailureException String String
| WrapperFailureException String String
deriving (Typeable)
instance Exception P423Exception
instance Show P423Exception where
show e@(AssemblyFailedException e') = shortExcDescrip e ++ ": " ++ show e'
show e@(ParseErrorException e') = shortExcDescrip e ++ ": " ++ show e'
show e@(ASTParseException s) = shortExcDescrip e ++ ": " ++ s
show e@(NoValidTestsException) = shortExcDescrip e
show e@(NoInvalidTestsException) = shortExcDescrip e
show e@(PassFailureException p e ' ) = shortExcDescrip e + + " : " + + e '
show e@(PassFailureException p e') = shortExcDescrip e
show e@(WrapperFailureException w e') = shortExcDescrip e ++ ": " ++ e'
shortExcDescrip :: P423Exception -> String
shortExcDescrip e = case e of
(AssemblyFailedException e) -> "Assembly failure"
(ParseErrorException pe) -> "SExp parse failure in tests"
(ASTParseException s) -> "AST parse failure"
(NoValidTestsException) -> "Couldn't find valid tests"
(NoInvalidTestsException) -> "Couldn't find invalid tests"
(PassFailureException p e) -> "Pass failure, " ++ p ++ ": " ++ e
(WrapperFailureException w e) -> "Wrapper failure (" ++ w ++ ")"
------------------------------------------------------------
-- Emitting ------------------------------------------------
-- | Implementation type for a code generator
type GenM = WriterT Builder PassM
-- | A code generator with only an output and no result
type Gen = GenM ()
-- | Add to the output of a generator
gen :: (X86Print a) => a -> Gen
gen a = tell $ format a
-- | Add a newline along with the given output
genLn :: (X86Print a) => a -> Gen
genLn a = do
gen a
genJustLn
genJustLn :: Gen
genJustLn = tell $ fromChar '\n'
-- | Put the output of running a generator action to a handle
hPutGenM :: P423Config -> GenM a -> Handle -> IO ()
hPutGenM c g h = case runReaderT (runWriterT g) c of
Left s -> throwIO (userError $ "Error during code generation: " ++ s)
Right (_, b) -> hPut h $ BBB.toByteString b
-- | Get the result and output of a generator action
runGenM :: P423Config -> GenM a -> Either String (a, ByteString)
runGenM c g = case runReaderT (runWriterT g) c of
Left s -> Left s
Right (x, bu) -> Right (x, BBB.toByteString bu)
-- | Given a P423Config, show the output of a Gen
showGen :: P423Config -> Gen -> String
showGen c g = show $ case runGenM c g of
Left s -> "Error: " ++ s
Right ((), b) -> show b
class X86Print a where
format :: a -> Builder
instance Show Builder where
show = show . toByteString
instance X86Print Builder where
format = id
instance X86Print ByteString where
format = pp
instance X86Print String where
format = pp
instance X86Print Integer where
format i = fromString "$" `mappend` pp i
instance X86Print Reg where
format r = fromString "%" `mappend` pp r
instance X86Print Label where
format (L name ind) = mconcat [fromString "L", pp ind, fromString "(%rip)"]
instance X86Print Disp where
format (D reg off) = mconcat [pp off, fromString "(%", pp reg, fromString ")"]
instance X86Print Ind where
format (I bReg iReg) = mconcat [fromString "(%", pp bReg, fromString ", %", pp iReg, fromString ")"]
type OpCode = String
-- | Emit an opcode with no arguments
emitOp1 :: OpCode -> Gen
emitOp1 op = do gen " "
genLn op
| Emit an opcode with one argument
emitOp2 :: (X86Print a) => OpCode -> a -> Gen
emitOp2 op a = do
gen " "
gen op
gen " "
genLn a
| Emit an opcode with two arguments
emitOp3 :: (X86Print a, X86Print b) => OpCode -> a -> b -> Gen
emitOp3 op a b = do
gen " "
gen op
gen " "
gen a
gen ", "
genLn b
-- | Emit a label from a the `Label' type
emitLabelLabel :: Label -> Gen
emitLabelLabel (L name ind) = do
gen "L"
gen $ pp ind
genLn ":"
-- | Emit a label from a literal
emitLabel :: (X86Print a) => a -> Gen
emitLabel a = do
gen a
genLn ":"
-- | Emit an opcode with a label as its operand
emitJumpLabel :: OpCode -> Label -> Gen
emitJumpLabel op (L name ind) = emitOp2 op (fromString "L" `mappend` pp ind)
-- | Emit a jump opcode
emitJump :: (X86Print a) => OpCode -> a -> Gen
emitJump op a = emitOp2 op (fromString "*" `mappend` format a)
--emitOp1 :: Handle -> OpCode -> IO ()
--emitOp1 h op = hPutStrLn h (" " ++ op)
--
--emitOp2 :: (X86Print a) => Handle -> OpCode -> a -> IO ()
--emitOp2 h op a = hPutStrLn h (" " ++ op ++ " " ++ format a)
--
--emitOp3 :: (X86Print a, X86Print b) => Handle -> OpCode -> a -> b -> IO ()
--emitOp3 h op a b = hPutStrLn h (" " ++ op ++ " " ++ format a ++ ", " ++ format b)
--
--emitLabel :: (X86Print a) => Handle -> a -> IO ()
--emitLabel h a = hPutStrLn h (format a ++ ":")
--
--emitJumpLabel :: Handle -> OpCode -> Label -> IO ()
emitJumpLabel h op ( L name ) = emitOp2 h op ( " L " + + pp ind )
--
--emitJump :: (X86Print a) => Handle -> OpCode -> a -> IO ()
--emitJump = emitOp2
-- | Emit the pushq opcode
pushq :: (X86Print a) => a -> Gen
pushq = emitOp2 "pushq"
-- | Emit the popq opcode
popq :: (X86Print a) => a -> Gen
popq = emitOp2 "popq"
| Emit the movq opcode
movq :: (X86Print a, X86Print b) => a -> b -> Gen
movq = emitOp3 "movq"
-- | Emit the leaq opcode
leaq :: (X86Print a, X86Print b) => a -> b -> Gen
leaq = emitOp3 "leaq"
pushq , popq : : ( X86Print a ) = > Handle - > a - > IO ( )
movq , : : ( X86Print a , X86Print b ) = > Handle - > a - > b - > IO ( )
pushq h = emitOp2 h " pushq "
popq h = emitOp2 h " "
movq h = emitOp3 h " movq "
--leaq h = emitOp3 h "leaq"
-- | Emit the boilderplate code for entering the scheme runtime
emitEntry :: Gen
emitEntry = do
c <- getConfig
emitOp2 ".globl" "_scheme_entry"
emitLabel "_scheme_entry"
pushq RBX
pushq RBP
pushq R12
pushq R13
pushq R14
pushq R15
movq RDI (framePointerRegister c)
movq RSI (allocationPointerRegister c)
leaq "_scheme_exit(%rip)" (returnAddressRegister c)
--emitEntry :: P423Config -> Handle -> IO ()
emitEntry c h =
-- do emitOp2 h ".globl" "_scheme_entry"
-- emitLabel h "_scheme_entry"
-- pushq h RBX
-- pushq h RBP
-- pushq h R12
-- pushq h R13
-- pushq h R14
-- pushq h R15
movq h RDI ( framePointerRegister c )
movq h RSI ( allocationPointerRegister c )
-- leaq h "_scheme_exit(%rip)" (returnAddressRegister c)
-- | Emit the boilerplate code for exiting the scheme runtime
emitExit :: Gen
emitExit = do
emitLabel "_scheme_exit"
c <- getConfig
unless (returnValueRegister c == RAX)
(movq (returnValueRegister c) RAX)
popq R15
popq R14
popq R13
popq R12
popq RBP
popq RBX
emitOp1 "ret"
--emitExit :: P423Config -> Handle -> IO ()
--emitExit c h =
-- do emitLabel h "_scheme_exit"
-- unless (returnValueRegister c == RAX)
( movq h ( returnValueRegister c ) RAX )
popq h R15
popq h R14
-- popq h R13
popq h R12
-- popq h RBP
popq h RBX
emitOp1 h " ret "
------------------------------------------------------------
-- Pretty Printing -----------------------------------------
class PP a where
| Print to a Scheme SExp representation .
pp :: a -> Builder
| Pretty print the same Scheme SExp representation :
ppp :: a -> P.Doc
ppp = P.text . unpack . BBB.toByteString . pp
| Build a list SExp
ppSexp :: [Builder] -> Builder
ppSexp ls = fromString "(" `mappend` mconcat (intersperse (fromString " ") ls) `mappend` fromString ")"
| Build a multi - line pretty - printed SExp
-- pppSexp :: [P.Doc] -> P.Doc
-- pppSexp ls = P.parens$ P.sep ls
-- Getting a hang for keywords is a bit hard:
pppSexp :: [P.Doc] -> P.Doc
-- pppSexp [] = P.parens P.empty
-- pppSexp [a,b] = P.parens$ P.sep [a,b]
pppSexp (h1:h2:ls) | isSchemeKwd (P.render h1) = P.parens$ P.sep$ (h1 P.<+> h2):ls
-- | otherwise =
pppSexp ls = P.parens$ P.sep ls
isSchemeKwd :: String -> Bool
isSchemeKwd = all ( \c - > isAlpha c || c=='- ' )
isSchemeKwd = flip elem ["locals","letrec","lambda","register-conflict"]
instance PP Builder where
pp = id
instance PP ByteString where
pp = fromByteString
instance PP String where
pp = fromString
ppp = P.text
instance PP Bool where
pp = fromString . show
instance PP Integer where
pp = fromShow
ppp = P.text . show
instance PP UVar where
pp (UV name ind) = mconcat [fromString name, fromString ".", fromShow ind]
instance PP FVar where
pp (FV ind) = mconcat [fromString "fv", fromShow ind]
instance PP Label where
pp (L name ind) = mconcat [fromString name, fromChar '$', fromShow ind]
instance PP Disp where
pp (D r i) = ppSexp [fromString "disp", (pp r), (pp i)]
instance PP Ind where
pp (I r1 r2) = ppSexp [fromString "index", (pp r1), (pp r2)]
instance PP Relop where
pp r = case r of
LT -> fromString "<"
LTE -> fromString "<="
EQ -> fromString "="
GTE -> fromString ">="
GT -> fromString ">"
instance PP Binop where
pp b = case b of
MUL -> fromString "*"
ADD -> fromString "+"
SUB -> fromString "-"
LOGAND -> fromString "logand"
LOGOR -> fromString "logor"
SRA -> fromString "sra"
instance PP Reg where
pp r = case r of
RAX -> fromString "rax"
RCX -> fromString "rcx"
RDX -> fromString "rdx"
RBX -> fromString "rbx"
RBP -> fromString "rbp"
RSI -> fromString "rsi"
RDI -> fromString "rdi"
R8 -> fromString "r8"
R9 -> fromString "r9"
R10 -> fromString "r10"
R11 -> fromString "r11"
R12 -> fromString "r12"
R13 -> fromString "r13"
R14 -> fromString "r14"
R15 -> fromString "r15"
instance PP EffectPrim where
pp b = case b of
SetCar -> fromString "set-car!"
SetCdr -> fromString "set-cdr!"
VectorSet -> fromString "vector-set!"
ProcedureSet -> fromString "procedure-set!"
instance PP PredPrim where
pp p = fromString $ case p of
Lt -> "<" ; Lte -> "<=" ; Eq -> "=" ; Gte -> ">=" ; Gt -> ">"
BooleanP -> "boolean?" ; EqP -> "eq?" ; FixnumP -> "fixnum?"
NullP -> "null?" ; PairP -> "pair?" ; VectorP -> "vector?"
ProcedureP -> "procedure?"
instance PP ValPrim where
pp p = fromString$ case p of
Times -> "*" ; Plus -> "+" ; Minus -> "-"; Car -> "car" ; Cdr -> "cdr" ; Cons -> "cons"
MakeVector -> "make-vector" ; VectorLength -> "vector-length" ; VectorRef -> "vector-ref"
Void -> "void"
MakeProcedure -> "make-procedure" ; ProcedureCode -> "procedure-code" ; ProcedureRef -> "procedure-ref"
instance PP Immediate where
pp p = fromString$ case p of
Fixnum i -> show i
NullList -> "()"
HashT -> "#t"
HashF -> "#f"
instance PP Datum where
pp p = case p of
PairDatum car cdr ->
case gatherPairs cdr of
Just ls -> parens (pp car `mappend` (mconcat (map ((spc `mappend`) . pp) ls)))
Nothing -> parens (pp car `mappend` (fromString " . ") `mappend` pp cdr)
VectorDatum ls -> fromString "#" `mappend`
parens (mconcat (intersperse spc (map pp ls)))
ImmediateDatum i -> pp i
where
spc = fromString " "
parens bld = fromString "(" `mappend` bld `mappend` fromString ")"
gatherPairs (ImmediateDatum NullList) = Just []
gatherPairs (PairDatum x y) =
case gatherPairs y of
Nothing -> Nothing
Just ls -> Just (x:ls)
gatherPairs _ = Nothing
------------------------------------------------------------
-- Parsing -------------------------------------------------
-- | Throwing an error inside the "parser".
parseFailureM :: String -> PassM a
parseFailureM = lift . Left
parseFailureM = return . Left . ParseErrorException
-- | Throwing an error inside the parser, non-monadic version.
-- parseFailure :: String -> a
-- parseFailure = throw . ParseErrorException
-- | Parse a number
parseSuffix :: String -> PassM Integer
parseSuffix i@('0':rest) =
if (null rest)
then return 0
else parseFailureM ("parseSuffix: Leading zero in index: " ++ i)
parseSuffix i =
if (and $ map isDigit i)
then return $ read i
else parseFailureM ("parseSuffix: Not a number: " ++ i)
parseListWithFinal :: (LispVal -> PassM a) -> (LispVal -> PassM b) ->
[LispVal] -> PassM ([a],b)
parseListWithFinal fa fb [] = parseFailureM ("parseListWithFinal: List must have at least one element")
parseListWithFinal fa fb [b] =
do b <- fb b
return ([],b)
parseListWithFinal fa fb (a:asb) =
do a <- fa a
(as,b) <- parseListWithFinal fa fb asb
return (a:as,b)
parseUVar :: LispVal -> PassM UVar
parseUVar (Symbol s) = case (split '.' s) of
(_,"") -> parseFailureM ("parseUVar: No index: " ++ s)
(name,ind) -> do ind <- parseSuffix ind; return (UV name ind)
parseUVar e = parseFailureM ("parseUVar: Not a symbol: " ++ show e)
parseFVar :: LispVal -> PassM FVar
parseFVar (Symbol s) = case s of
('f':'v':ind) -> do ind <- parseSuffix ind; return (FV ind)
_ -> parseFailureM ("parseFVar: Not a framevar: " ++ s)
parseFVar e = parseFailureM ("parseFVar: Not a symbol: " ++ show e)
parseLabel :: LispVal -> PassM Label
parseLabel (Symbol s) = case (split '$' s) of
(_,"") -> parseFailureM ("parseLabel: No index: " ++ s)
(name,ind) -> do ind <- parseSuffix ind; return (L name ind)
parseLabel e = parseFailureM ("parseLabel: Not a symbol: " ++ show e)
-- parseLabel :: LispVal -> Exc Label
-- parseLabel (Symbol s) = case (split '$' s) of
-- (_,"") -> failure ("No index: " ++ s)
( name , ) - > do ; return ( L name )
-- parseLabel e = failure ("Not a symbol: " ++ show e)
parseRelop :: LispVal -> PassM Relop
parseRelop (Symbol s) = case s of
"<" -> return LT
"<=" -> return LTE
"=" -> return EQ
">=" -> return GTE
">" -> return GT
e -> parseFailureM ("parseRelop: Not a relop: " ++ e)
parseRelop e = parseFailureM ("parseRelop: Not a symbol: " ++ show e)
parseBinop :: LispVal -> PassM Binop
parseBinop (Symbol s) = case s of
"logand" -> return LOGAND
"logor" -> return LOGOR
"sra" -> return SRA
"*" -> return MUL
"+" -> return ADD
"-" -> return SUB
e -> parseFailureM ("parseBinop: Not a binop: " ++ e)
parseBinop e = parseFailureM ("parseBinop: Not a symbol: " ++ show e)
parseReg :: LispVal -> PassM Reg
parseReg (Symbol s) = case s of
"rax" -> return RAX
"rcx" -> return RCX
"rdx" -> return RDX
"rbp" -> return RBP
"rbx" -> return RBX
"rsi" -> return RSI
"rdi" -> return RDI
"r8" -> return R8
"r9" -> return R9
"r10" -> return R10
"r11" -> return R11
"r12" -> return R12
"r13" -> return R13
"r14" -> return R14
"r15" -> return R15
e -> parseFailureM ("parseReg: Not a register: " ++ e)
parseReg e = parseFailureM ("parseReg: Not a symbol: " ++ show e)
parseInt32 :: LispVal -> PassM Integer
parseInt32 (IntNumber i) = if isInt32 n
then return n
else parseFailureM ("parseInt32: Out of range: " ++ show i)
where n = fromIntegral i
parseInt32 e = parseFailureM ("parseInt32: Not an int: " ++ show e)
parseInt64 :: LispVal -> PassM Integer
parseInt64 (IntNumber i) = if isInt64 n
then return (fromIntegral n)
else parseFailureM ("parseInt64: Out of range: " ++ show i)
where n = fromIntegral i
parseInt64 e = parseFailureM ("parseInt64: Not an int: " ++ show e)
-- TODO: Could use a single association list to go both directions:
parseValPrim :: LispVal -> PassM ValPrim
parseValPrim (Symbol s) = case s of
"*" -> return Times
"+" -> return Plus
"-" -> return Minus
"car" -> return Car
"cdr" -> return Cdr
"cons" -> return Cons
"make-vector" -> return MakeVector
"vector-length" -> return VectorLength
"vector-ref" -> return VectorRef
"void" -> return Void
e -> parseFailureM ("parseValPrim: Not a value primitive: " ++ e)
parseValPrim e = parseFailureM ("parseValPrim: Not a symbol: " ++ show e)
parsePredPrim :: LispVal -> PassM PredPrim
parsePredPrim (Symbol s) = case s of
"<" -> return Lt
"<=" -> return Lte
"=" -> return Eq
">=" -> return Gte
">" -> return Gt
"boolean?" -> return BooleanP
"eq?" -> return EqP
"fixnum?" -> return FixnumP
"null?" -> return NullP
"pair?" -> return PairP
"vector?" -> return VectorP
"procedure?" -> return ProcedureP
e -> parseFailureM ("parsePredPrim: Not a pred primitive: " ++ e)
parsePredPrim e = parseFailureM ("parsePredPrim: Not a symbol: " ++ show e)
parseEffectPrim :: LispVal -> PassM EffectPrim
parseEffectPrim (Symbol s) = case s of
"set-car!" -> return SetCar
"set-cdr!" -> return SetCdr
"vector-set!"-> return VectorSet
e -> parseFailureM ("parseEffectPrim: Not an effect primitive: " ++ e)
parseEffectPrim e = parseFailureM ("parseEffectPrim: Not a symbol: " ++ show e)
------------------------------------------------------------
-- Parse Helpers -------------------------------------------
inBitRange :: (Integral a) => Integer -> a -> Bool
inBitRange r i = (((- (2 ^ (r-1))) <= n) && (n <= ((2 ^ (r-1)) - 1)))
where n = fromIntegral i
isInt32 = inBitRange 32
isInt64 = inBitRange 64
isFixnum :: Integral a => a -> Bool
isFixnum = inBitRange fixnumBits
isUInt6 :: Integer -> Bool
isUInt6 i = (0 <= i) && (i <= 63)
class SuffixTerm a where
extractSuffix :: a -> Integer
uniqueSuffixes :: [a] -> Bool
uniqueSuffixes as = isSet $ map extractSuffix as
isSet :: Ord a => [a] -> Bool
isSet ls = length ls == S.size (S.fromList ls)
instance SuffixTerm UVar where
extractSuffix (UV name ind) = ind
instance SuffixTerm FVar where
extractSuffix (FV ind) = ind
instance SuffixTerm Label where
extractSuffix (L name ind) = ind
wordShift :: Integer
wordShift = 3
ash :: Integer -> Integer -> Integer
ash n = (* (2 ^ n))
-- | Remove whitespace from both ends of a string.
chomp :: String -> String
chomp = reverse . dropWhile isSpace . reverse
-- | Bit range of a valid boxed signed immediate integer
fixnumBits :: Integer
fixnumBits = 64 - (fromIntegral shiftFixnum)
maskFixnum :: Int64
0b111
maskPair :: Int64
0b111
maskVector :: Int64
0b111
maskProcedure :: Int64
0b111
maskBoolean :: Int64
0b11110111
-- | Left-shift for integer immediates
shiftFixnum :: Int
shiftFixnum = 3
-- | Tag for fixnum values
tagFixnum :: Integer
tagFixnum = 0x0
-- | Tag for pair values
tagPair :: Integer
tagPair = 0x1
-- | Tag for procedure values
tagProcedure :: Integer
tagProcedure = 0x2
tagVector :: Integer
tagVector = 0x3
tagBoolean :: Integer
tagBoolean = 0x6
tagNonfixnum :: Integer
tagNonfixnum = 0x6
repFalse :: Integer
repFalse = shiftL 0x0 shiftFixnum + tagNonfixnum
repTrue :: Integer
repTrue = shiftL 0x1 shiftFixnum + tagNonfixnum
repNil :: Integer
repNil = shiftL 0x2 shiftFixnum + tagNonfixnum
repVoid :: Integer
repVoid = shiftL 0x3 shiftFixnum + tagNonfixnum
dispCar :: Integer
dispCar = 0
dispCdr :: Integer
dispCdr = 8
sizePair :: Integer
sizePair = 2 * dispCdr
dispVectorLength :: Integer
dispVectorLength = 0
dispVectorData :: Integer
dispVectorData = 8
dispProcedureCode :: Integer
dispProcedureCode = 0
dispProcedureData :: Integer
dispProcedureData = 8
| null | https://raw.githubusercontent.com/iu-parfunc/haskell_dsl_tour/f75a7e492a1e5d219a77fb128f70441d54a706eb/middle_end/nanopass/exercise/FrameworkHs/Helpers.hs | haskell | * Types for compiler configuration and construction
, parseFailure
* Helpers for representations
* An alternative `Show` class for printing to X86 assembly code:
* Emitting text to a handle
* Shorthands for common emissions:
* Pretty printing:
* Parsing
* Misc numeric and string helpers
| A monad for implementing passes. It provides access to the global
configuration, and also handles errors.
| Getting the configuration
| A compiler pass with metadata
^ The implementation of the pass
^ The canonical name of the pass
^ The name of the "wrapper" for
interpreting the pass's output
^ Debug this pass?
| Throwing an error inside a compiler pass.
passFailureM = return . Left . PassFailureException ""
| Throwing an error, non-monadic version.
| Optional information
----------------------------------------------------------
Exceptions ----------------------------------------------
----------------------------------------------------------
Emitting ------------------------------------------------
| Implementation type for a code generator
| A code generator with only an output and no result
| Add to the output of a generator
| Add a newline along with the given output
| Put the output of running a generator action to a handle
| Get the result and output of a generator action
| Given a P423Config, show the output of a Gen
| Emit an opcode with no arguments
| Emit a label from a the `Label' type
| Emit a label from a literal
| Emit an opcode with a label as its operand
| Emit a jump opcode
emitOp1 :: Handle -> OpCode -> IO ()
emitOp1 h op = hPutStrLn h (" " ++ op)
emitOp2 :: (X86Print a) => Handle -> OpCode -> a -> IO ()
emitOp2 h op a = hPutStrLn h (" " ++ op ++ " " ++ format a)
emitOp3 :: (X86Print a, X86Print b) => Handle -> OpCode -> a -> b -> IO ()
emitOp3 h op a b = hPutStrLn h (" " ++ op ++ " " ++ format a ++ ", " ++ format b)
emitLabel :: (X86Print a) => Handle -> a -> IO ()
emitLabel h a = hPutStrLn h (format a ++ ":")
emitJumpLabel :: Handle -> OpCode -> Label -> IO ()
emitJump :: (X86Print a) => Handle -> OpCode -> a -> IO ()
emitJump = emitOp2
| Emit the pushq opcode
| Emit the popq opcode
| Emit the leaq opcode
leaq h = emitOp3 h "leaq"
| Emit the boilderplate code for entering the scheme runtime
emitEntry :: P423Config -> Handle -> IO ()
do emitOp2 h ".globl" "_scheme_entry"
emitLabel h "_scheme_entry"
pushq h RBX
pushq h RBP
pushq h R12
pushq h R13
pushq h R14
pushq h R15
leaq h "_scheme_exit(%rip)" (returnAddressRegister c)
| Emit the boilerplate code for exiting the scheme runtime
emitExit :: P423Config -> Handle -> IO ()
emitExit c h =
do emitLabel h "_scheme_exit"
unless (returnValueRegister c == RAX)
popq h R13
popq h RBP
----------------------------------------------------------
Pretty Printing -----------------------------------------
pppSexp :: [P.Doc] -> P.Doc
pppSexp ls = P.parens$ P.sep ls
Getting a hang for keywords is a bit hard:
pppSexp [] = P.parens P.empty
pppSexp [a,b] = P.parens$ P.sep [a,b]
| otherwise =
----------------------------------------------------------
Parsing -------------------------------------------------
| Throwing an error inside the "parser".
| Throwing an error inside the parser, non-monadic version.
parseFailure :: String -> a
parseFailure = throw . ParseErrorException
| Parse a number
parseLabel :: LispVal -> Exc Label
parseLabel (Symbol s) = case (split '$' s) of
(_,"") -> failure ("No index: " ++ s)
parseLabel e = failure ("Not a symbol: " ++ show e)
TODO: Could use a single association list to go both directions:
----------------------------------------------------------
Parse Helpers -------------------------------------------
| Remove whitespace from both ends of a string.
| Bit range of a valid boxed signed immediate integer
| Left-shift for integer immediates
| Tag for fixnum values
| Tag for pair values
| Tag for procedure values | # LANGUAGE TypeSynonymInstances , FlexibleInstances , FlexibleContexts , DeriveDataTypeable #
module FrameworkHs.Helpers
(
P423Config (.. )
, PassM, getConfig, runPassM, orPassM
, P423Exception ( AssemblyFailedException
, ParseErrorException
, ASTParseException
, NoValidTestsException
, NoInvalidTestsException
, PassFailureException
, WrapperFailureException
)
, shortExcDescrip
, passFailure, passFailureM
, parseFailureM
, P423Pass ( P423Pass
, pass
, passName
, wrapperName
, trace
)
, Option (..)
, fixnumBits
, shiftFixnum
, maskFixnum
, maskVector
, maskPair
, maskProcedure
, maskBoolean
, tagFixnum
, tagPair
, tagProcedure
, tagVector
, tagBoolean
, tagNonfixnum
, repTrue
, repFalse
, repNil
, repVoid
, dispCar
, dispCdr
, dispVectorData
, dispVectorLength
, dispProcedureData
, dispProcedureCode
, sizePair
, X86Print, format
, OpCode
, GenM, Gen, gen, genLn, genJustLn
, hPutGenM, runGenM, showGen
, emitOp1, emitOp2, emitOp3
, emitLabelLabel
, emitLabel
, emitJumpLabel, emitJump
, emitEntry, emitExit
, pushq, popq
, movq, leaq
, PP(..), ppSexp, pppSexp
, parseListWithFinal
, parseUVar
, parseFVar
, parseLabel
, parseRelop
, parseBinop
, parseReg
, parseInt32
, parseInt64
, parseValPrim, parseEffectPrim, parsePredPrim
, isInt32
, isInt64
, isUInt6
, isFixnum
, wordShift
, ash
, chomp
) where
import Prelude hiding (LT, EQ, GT)
import Blaze.ByteString.Builder as BBB
import Blaze.ByteString.Builder.Char8 (fromChar, fromString, fromShow)
import Data.List (intersperse)
import Data.Set (size, fromList)
import Data.Char (isDigit, isSpace, isAlpha)
import Data.Int
import Data.Bits
import Data.ByteString (ByteString, hPut)
import Data . ByteString ( ByteString , hPut )
import Data.ByteString.Char8 (unpack)
import Control.Monad (unless, mapM_)
import Control.Monad.Reader
import Control.Monad.Writer
import Control.Monad.Error
import qualified Data.Set as S
import Text.Parsec.Error (ParseError)
import qualified Text.PrettyPrint.HughesPJ as P
import Control.Exception
import Data.Typeable
import System.IO
import FrameworkHs.Prims
import FrameworkHs.SExpReader.LispData
data P423Config =
P423Config
{ framePointerRegister :: Reg
, allocationPointerRegister :: Reg
, returnAddressRegister :: Reg
, returnValueRegister :: Reg
, parameterRegisters :: [Reg]
, runWrappers :: Bool
}
type PassM = ReaderT P423Config (Either String)
getConfig :: (MonadReader P423Config m) => m P423Config
getConfig = ask
data P423Pass a b =
P423Pass
}
| This runs a PassM computation with a given configuration . Any uncaught failures
becoming true errors .
runPassM :: P423Config -> PassM a -> a
runPassM conf m =
case runReaderT m conf of
Left str -> error str
Right x -> x
| Backtracking . If the first action throws an exception , try the second .
orPassM :: PassM a -> PassM a -> PassM a
orPassM m1 m2 = do
cfg <- getConfig
case runReaderT m1 cfg of
Left _ -> m2
Right x -> return x
passFailureM :: String -> String -> PassM a
passFailureM who e = lift $ Left (who ++ ": " ++ e)
passFailure :: String -> String -> a
passFailure who e = throw $ PassFailureException who e
data Option a = Default | Option a
split :: Char -> String -> (String,String)
split s [] = ([],[])
split s (c:cs)
| (c == s) = ([],cs)
| otherwise = (c:before,cs')
where (before,cs') = split s cs
data P423Exception = AssemblyFailedException String
| ASTParseException String
| ParseErrorException ParseError
| NoValidTestsException
| NoInvalidTestsException
| PassFailureException String String
| WrapperFailureException String String
deriving (Typeable)
instance Exception P423Exception
instance Show P423Exception where
show e@(AssemblyFailedException e') = shortExcDescrip e ++ ": " ++ show e'
show e@(ParseErrorException e') = shortExcDescrip e ++ ": " ++ show e'
show e@(ASTParseException s) = shortExcDescrip e ++ ": " ++ s
show e@(NoValidTestsException) = shortExcDescrip e
show e@(NoInvalidTestsException) = shortExcDescrip e
show e@(PassFailureException p e ' ) = shortExcDescrip e + + " : " + + e '
show e@(PassFailureException p e') = shortExcDescrip e
show e@(WrapperFailureException w e') = shortExcDescrip e ++ ": " ++ e'
shortExcDescrip :: P423Exception -> String
shortExcDescrip e = case e of
(AssemblyFailedException e) -> "Assembly failure"
(ParseErrorException pe) -> "SExp parse failure in tests"
(ASTParseException s) -> "AST parse failure"
(NoValidTestsException) -> "Couldn't find valid tests"
(NoInvalidTestsException) -> "Couldn't find invalid tests"
(PassFailureException p e) -> "Pass failure, " ++ p ++ ": " ++ e
(WrapperFailureException w e) -> "Wrapper failure (" ++ w ++ ")"
type GenM = WriterT Builder PassM
type Gen = GenM ()
gen :: (X86Print a) => a -> Gen
gen a = tell $ format a
genLn :: (X86Print a) => a -> Gen
genLn a = do
gen a
genJustLn
genJustLn :: Gen
genJustLn = tell $ fromChar '\n'
hPutGenM :: P423Config -> GenM a -> Handle -> IO ()
hPutGenM c g h = case runReaderT (runWriterT g) c of
Left s -> throwIO (userError $ "Error during code generation: " ++ s)
Right (_, b) -> hPut h $ BBB.toByteString b
runGenM :: P423Config -> GenM a -> Either String (a, ByteString)
runGenM c g = case runReaderT (runWriterT g) c of
Left s -> Left s
Right (x, bu) -> Right (x, BBB.toByteString bu)
showGen :: P423Config -> Gen -> String
showGen c g = show $ case runGenM c g of
Left s -> "Error: " ++ s
Right ((), b) -> show b
class X86Print a where
format :: a -> Builder
instance Show Builder where
show = show . toByteString
instance X86Print Builder where
format = id
instance X86Print ByteString where
format = pp
instance X86Print String where
format = pp
instance X86Print Integer where
format i = fromString "$" `mappend` pp i
instance X86Print Reg where
format r = fromString "%" `mappend` pp r
instance X86Print Label where
format (L name ind) = mconcat [fromString "L", pp ind, fromString "(%rip)"]
instance X86Print Disp where
format (D reg off) = mconcat [pp off, fromString "(%", pp reg, fromString ")"]
instance X86Print Ind where
format (I bReg iReg) = mconcat [fromString "(%", pp bReg, fromString ", %", pp iReg, fromString ")"]
type OpCode = String
emitOp1 :: OpCode -> Gen
emitOp1 op = do gen " "
genLn op
| Emit an opcode with one argument
emitOp2 :: (X86Print a) => OpCode -> a -> Gen
emitOp2 op a = do
gen " "
gen op
gen " "
genLn a
| Emit an opcode with two arguments
emitOp3 :: (X86Print a, X86Print b) => OpCode -> a -> b -> Gen
emitOp3 op a b = do
gen " "
gen op
gen " "
gen a
gen ", "
genLn b
emitLabelLabel :: Label -> Gen
emitLabelLabel (L name ind) = do
gen "L"
gen $ pp ind
genLn ":"
emitLabel :: (X86Print a) => a -> Gen
emitLabel a = do
gen a
genLn ":"
emitJumpLabel :: OpCode -> Label -> Gen
emitJumpLabel op (L name ind) = emitOp2 op (fromString "L" `mappend` pp ind)
emitJump :: (X86Print a) => OpCode -> a -> Gen
emitJump op a = emitOp2 op (fromString "*" `mappend` format a)
emitJumpLabel h op ( L name ) = emitOp2 h op ( " L " + + pp ind )
pushq :: (X86Print a) => a -> Gen
pushq = emitOp2 "pushq"
popq :: (X86Print a) => a -> Gen
popq = emitOp2 "popq"
| Emit the movq opcode
movq :: (X86Print a, X86Print b) => a -> b -> Gen
movq = emitOp3 "movq"
leaq :: (X86Print a, X86Print b) => a -> b -> Gen
leaq = emitOp3 "leaq"
pushq , popq : : ( X86Print a ) = > Handle - > a - > IO ( )
movq , : : ( X86Print a , X86Print b ) = > Handle - > a - > b - > IO ( )
pushq h = emitOp2 h " pushq "
popq h = emitOp2 h " "
movq h = emitOp3 h " movq "
emitEntry :: Gen
emitEntry = do
c <- getConfig
emitOp2 ".globl" "_scheme_entry"
emitLabel "_scheme_entry"
pushq RBX
pushq RBP
pushq R12
pushq R13
pushq R14
pushq R15
movq RDI (framePointerRegister c)
movq RSI (allocationPointerRegister c)
leaq "_scheme_exit(%rip)" (returnAddressRegister c)
emitEntry c h =
movq h RDI ( framePointerRegister c )
movq h RSI ( allocationPointerRegister c )
emitExit :: Gen
emitExit = do
emitLabel "_scheme_exit"
c <- getConfig
unless (returnValueRegister c == RAX)
(movq (returnValueRegister c) RAX)
popq R15
popq R14
popq R13
popq R12
popq RBP
popq RBX
emitOp1 "ret"
( movq h ( returnValueRegister c ) RAX )
popq h R15
popq h R14
popq h R12
popq h RBX
emitOp1 h " ret "
class PP a where
| Print to a Scheme SExp representation .
pp :: a -> Builder
| Pretty print the same Scheme SExp representation :
ppp :: a -> P.Doc
ppp = P.text . unpack . BBB.toByteString . pp
| Build a list SExp
ppSexp :: [Builder] -> Builder
ppSexp ls = fromString "(" `mappend` mconcat (intersperse (fromString " ") ls) `mappend` fromString ")"
| Build a multi - line pretty - printed SExp
pppSexp :: [P.Doc] -> P.Doc
pppSexp (h1:h2:ls) | isSchemeKwd (P.render h1) = P.parens$ P.sep$ (h1 P.<+> h2):ls
pppSexp ls = P.parens$ P.sep ls
isSchemeKwd :: String -> Bool
isSchemeKwd = all ( \c - > isAlpha c || c=='- ' )
isSchemeKwd = flip elem ["locals","letrec","lambda","register-conflict"]
instance PP Builder where
pp = id
instance PP ByteString where
pp = fromByteString
instance PP String where
pp = fromString
ppp = P.text
instance PP Bool where
pp = fromString . show
instance PP Integer where
pp = fromShow
ppp = P.text . show
instance PP UVar where
pp (UV name ind) = mconcat [fromString name, fromString ".", fromShow ind]
instance PP FVar where
pp (FV ind) = mconcat [fromString "fv", fromShow ind]
instance PP Label where
pp (L name ind) = mconcat [fromString name, fromChar '$', fromShow ind]
instance PP Disp where
pp (D r i) = ppSexp [fromString "disp", (pp r), (pp i)]
instance PP Ind where
pp (I r1 r2) = ppSexp [fromString "index", (pp r1), (pp r2)]
instance PP Relop where
pp r = case r of
LT -> fromString "<"
LTE -> fromString "<="
EQ -> fromString "="
GTE -> fromString ">="
GT -> fromString ">"
instance PP Binop where
pp b = case b of
MUL -> fromString "*"
ADD -> fromString "+"
SUB -> fromString "-"
LOGAND -> fromString "logand"
LOGOR -> fromString "logor"
SRA -> fromString "sra"
instance PP Reg where
pp r = case r of
RAX -> fromString "rax"
RCX -> fromString "rcx"
RDX -> fromString "rdx"
RBX -> fromString "rbx"
RBP -> fromString "rbp"
RSI -> fromString "rsi"
RDI -> fromString "rdi"
R8 -> fromString "r8"
R9 -> fromString "r9"
R10 -> fromString "r10"
R11 -> fromString "r11"
R12 -> fromString "r12"
R13 -> fromString "r13"
R14 -> fromString "r14"
R15 -> fromString "r15"
instance PP EffectPrim where
pp b = case b of
SetCar -> fromString "set-car!"
SetCdr -> fromString "set-cdr!"
VectorSet -> fromString "vector-set!"
ProcedureSet -> fromString "procedure-set!"
instance PP PredPrim where
pp p = fromString $ case p of
Lt -> "<" ; Lte -> "<=" ; Eq -> "=" ; Gte -> ">=" ; Gt -> ">"
BooleanP -> "boolean?" ; EqP -> "eq?" ; FixnumP -> "fixnum?"
NullP -> "null?" ; PairP -> "pair?" ; VectorP -> "vector?"
ProcedureP -> "procedure?"
instance PP ValPrim where
pp p = fromString$ case p of
Times -> "*" ; Plus -> "+" ; Minus -> "-"; Car -> "car" ; Cdr -> "cdr" ; Cons -> "cons"
MakeVector -> "make-vector" ; VectorLength -> "vector-length" ; VectorRef -> "vector-ref"
Void -> "void"
MakeProcedure -> "make-procedure" ; ProcedureCode -> "procedure-code" ; ProcedureRef -> "procedure-ref"
instance PP Immediate where
pp p = fromString$ case p of
Fixnum i -> show i
NullList -> "()"
HashT -> "#t"
HashF -> "#f"
instance PP Datum where
pp p = case p of
PairDatum car cdr ->
case gatherPairs cdr of
Just ls -> parens (pp car `mappend` (mconcat (map ((spc `mappend`) . pp) ls)))
Nothing -> parens (pp car `mappend` (fromString " . ") `mappend` pp cdr)
VectorDatum ls -> fromString "#" `mappend`
parens (mconcat (intersperse spc (map pp ls)))
ImmediateDatum i -> pp i
where
spc = fromString " "
parens bld = fromString "(" `mappend` bld `mappend` fromString ")"
gatherPairs (ImmediateDatum NullList) = Just []
gatherPairs (PairDatum x y) =
case gatherPairs y of
Nothing -> Nothing
Just ls -> Just (x:ls)
gatherPairs _ = Nothing
parseFailureM :: String -> PassM a
parseFailureM = lift . Left
parseFailureM = return . Left . ParseErrorException
parseSuffix :: String -> PassM Integer
parseSuffix i@('0':rest) =
if (null rest)
then return 0
else parseFailureM ("parseSuffix: Leading zero in index: " ++ i)
parseSuffix i =
if (and $ map isDigit i)
then return $ read i
else parseFailureM ("parseSuffix: Not a number: " ++ i)
parseListWithFinal :: (LispVal -> PassM a) -> (LispVal -> PassM b) ->
[LispVal] -> PassM ([a],b)
parseListWithFinal fa fb [] = parseFailureM ("parseListWithFinal: List must have at least one element")
parseListWithFinal fa fb [b] =
do b <- fb b
return ([],b)
parseListWithFinal fa fb (a:asb) =
do a <- fa a
(as,b) <- parseListWithFinal fa fb asb
return (a:as,b)
parseUVar :: LispVal -> PassM UVar
parseUVar (Symbol s) = case (split '.' s) of
(_,"") -> parseFailureM ("parseUVar: No index: " ++ s)
(name,ind) -> do ind <- parseSuffix ind; return (UV name ind)
parseUVar e = parseFailureM ("parseUVar: Not a symbol: " ++ show e)
parseFVar :: LispVal -> PassM FVar
parseFVar (Symbol s) = case s of
('f':'v':ind) -> do ind <- parseSuffix ind; return (FV ind)
_ -> parseFailureM ("parseFVar: Not a framevar: " ++ s)
parseFVar e = parseFailureM ("parseFVar: Not a symbol: " ++ show e)
parseLabel :: LispVal -> PassM Label
parseLabel (Symbol s) = case (split '$' s) of
(_,"") -> parseFailureM ("parseLabel: No index: " ++ s)
(name,ind) -> do ind <- parseSuffix ind; return (L name ind)
parseLabel e = parseFailureM ("parseLabel: Not a symbol: " ++ show e)
( name , ) - > do ; return ( L name )
parseRelop :: LispVal -> PassM Relop
parseRelop (Symbol s) = case s of
"<" -> return LT
"<=" -> return LTE
"=" -> return EQ
">=" -> return GTE
">" -> return GT
e -> parseFailureM ("parseRelop: Not a relop: " ++ e)
parseRelop e = parseFailureM ("parseRelop: Not a symbol: " ++ show e)
parseBinop :: LispVal -> PassM Binop
parseBinop (Symbol s) = case s of
"logand" -> return LOGAND
"logor" -> return LOGOR
"sra" -> return SRA
"*" -> return MUL
"+" -> return ADD
"-" -> return SUB
e -> parseFailureM ("parseBinop: Not a binop: " ++ e)
parseBinop e = parseFailureM ("parseBinop: Not a symbol: " ++ show e)
parseReg :: LispVal -> PassM Reg
parseReg (Symbol s) = case s of
"rax" -> return RAX
"rcx" -> return RCX
"rdx" -> return RDX
"rbp" -> return RBP
"rbx" -> return RBX
"rsi" -> return RSI
"rdi" -> return RDI
"r8" -> return R8
"r9" -> return R9
"r10" -> return R10
"r11" -> return R11
"r12" -> return R12
"r13" -> return R13
"r14" -> return R14
"r15" -> return R15
e -> parseFailureM ("parseReg: Not a register: " ++ e)
parseReg e = parseFailureM ("parseReg: Not a symbol: " ++ show e)
parseInt32 :: LispVal -> PassM Integer
parseInt32 (IntNumber i) = if isInt32 n
then return n
else parseFailureM ("parseInt32: Out of range: " ++ show i)
where n = fromIntegral i
parseInt32 e = parseFailureM ("parseInt32: Not an int: " ++ show e)
parseInt64 :: LispVal -> PassM Integer
parseInt64 (IntNumber i) = if isInt64 n
then return (fromIntegral n)
else parseFailureM ("parseInt64: Out of range: " ++ show i)
where n = fromIntegral i
parseInt64 e = parseFailureM ("parseInt64: Not an int: " ++ show e)
parseValPrim :: LispVal -> PassM ValPrim
parseValPrim (Symbol s) = case s of
"*" -> return Times
"+" -> return Plus
"-" -> return Minus
"car" -> return Car
"cdr" -> return Cdr
"cons" -> return Cons
"make-vector" -> return MakeVector
"vector-length" -> return VectorLength
"vector-ref" -> return VectorRef
"void" -> return Void
e -> parseFailureM ("parseValPrim: Not a value primitive: " ++ e)
parseValPrim e = parseFailureM ("parseValPrim: Not a symbol: " ++ show e)
parsePredPrim :: LispVal -> PassM PredPrim
parsePredPrim (Symbol s) = case s of
"<" -> return Lt
"<=" -> return Lte
"=" -> return Eq
">=" -> return Gte
">" -> return Gt
"boolean?" -> return BooleanP
"eq?" -> return EqP
"fixnum?" -> return FixnumP
"null?" -> return NullP
"pair?" -> return PairP
"vector?" -> return VectorP
"procedure?" -> return ProcedureP
e -> parseFailureM ("parsePredPrim: Not a pred primitive: " ++ e)
parsePredPrim e = parseFailureM ("parsePredPrim: Not a symbol: " ++ show e)
parseEffectPrim :: LispVal -> PassM EffectPrim
parseEffectPrim (Symbol s) = case s of
"set-car!" -> return SetCar
"set-cdr!" -> return SetCdr
"vector-set!"-> return VectorSet
e -> parseFailureM ("parseEffectPrim: Not an effect primitive: " ++ e)
parseEffectPrim e = parseFailureM ("parseEffectPrim: Not a symbol: " ++ show e)
inBitRange :: (Integral a) => Integer -> a -> Bool
inBitRange r i = (((- (2 ^ (r-1))) <= n) && (n <= ((2 ^ (r-1)) - 1)))
where n = fromIntegral i
isInt32 = inBitRange 32
isInt64 = inBitRange 64
isFixnum :: Integral a => a -> Bool
isFixnum = inBitRange fixnumBits
isUInt6 :: Integer -> Bool
isUInt6 i = (0 <= i) && (i <= 63)
class SuffixTerm a where
extractSuffix :: a -> Integer
uniqueSuffixes :: [a] -> Bool
uniqueSuffixes as = isSet $ map extractSuffix as
isSet :: Ord a => [a] -> Bool
isSet ls = length ls == S.size (S.fromList ls)
instance SuffixTerm UVar where
extractSuffix (UV name ind) = ind
instance SuffixTerm FVar where
extractSuffix (FV ind) = ind
instance SuffixTerm Label where
extractSuffix (L name ind) = ind
wordShift :: Integer
wordShift = 3
ash :: Integer -> Integer -> Integer
ash n = (* (2 ^ n))
chomp :: String -> String
chomp = reverse . dropWhile isSpace . reverse
fixnumBits :: Integer
fixnumBits = 64 - (fromIntegral shiftFixnum)
maskFixnum :: Int64
0b111
maskPair :: Int64
0b111
maskVector :: Int64
0b111
maskProcedure :: Int64
0b111
maskBoolean :: Int64
0b11110111
shiftFixnum :: Int
shiftFixnum = 3
tagFixnum :: Integer
tagFixnum = 0x0
tagPair :: Integer
tagPair = 0x1
tagProcedure :: Integer
tagProcedure = 0x2
tagVector :: Integer
tagVector = 0x3
tagBoolean :: Integer
tagBoolean = 0x6
tagNonfixnum :: Integer
tagNonfixnum = 0x6
repFalse :: Integer
repFalse = shiftL 0x0 shiftFixnum + tagNonfixnum
repTrue :: Integer
repTrue = shiftL 0x1 shiftFixnum + tagNonfixnum
repNil :: Integer
repNil = shiftL 0x2 shiftFixnum + tagNonfixnum
repVoid :: Integer
repVoid = shiftL 0x3 shiftFixnum + tagNonfixnum
dispCar :: Integer
dispCar = 0
dispCdr :: Integer
dispCdr = 8
sizePair :: Integer
sizePair = 2 * dispCdr
dispVectorLength :: Integer
dispVectorLength = 0
dispVectorData :: Integer
dispVectorData = 8
dispProcedureCode :: Integer
dispProcedureCode = 0
dispProcedureData :: Integer
dispProcedureData = 8
|
d2400aa9bbda7053adad6153ab43b1082eaf4c5ef6a1bb75b078e6fa7735d57a | emqx/emqx-sn | emqx_sn_registry.erl | %%--------------------------------------------------------------------
Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
%%
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
%% you may not use this file except in compliance with the License.
%% You may obtain a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS ,
%% WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
%% See the License for the specific language governing permissions and
%% limitations under the License.
%%--------------------------------------------------------------------
-module(emqx_sn_registry).
-behaviour(gen_server).
-include("emqx_sn.hrl").
-define(LOG(Level, Format, Args),
emqx_logger:Level("MQTT-SN(registry): " ++ Format, Args)).
-export([ start_link/2
, stop/1
]).
-export([ register_topic/3
, unregister_topic/2
]).
-export([ lookup_topic/3
, lookup_topic_id/3
]).
%% gen_server callbacks
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-define(TAB, ?MODULE).
-record(state, {tab, max_predef_topic_id = 0}).
-type(registry() :: {ets:tab(), pid()}).
%%-----------------------------------------------------------------------------
-spec(start_link(atom(), list()) -> {ok, pid()} | ignore | {error, Reason :: term()}).
start_link(Tab, PredefTopics) ->
gen_server:start_link(?MODULE, [Tab, PredefTopics], []).
-spec(stop(registry()) -> ok).
stop({_Tab, Pid}) ->
gen_server:stop(Pid, normal, infinity).
-spec(register_topic(registry(), binary(), binary()) -> integer() | {error, term()}).
register_topic({_, Pid}, ClientId, TopicName) when is_binary(TopicName) ->
case emqx_topic:wildcard(TopicName) of
false ->
gen_server:call(Pid, {register, ClientId, TopicName});
: in case of “ accepted ” the value that will be used as topic
%% id by the gateway when sending PUBLISH messages to the client (not
%% relevant in case of subscriptions to a short topic name or to a topic
%% name which contains wildcard characters)
true -> {error, wildcard_topic}
end.
-spec(lookup_topic(registry(), binary(), pos_integer()) -> undefined | binary()).
lookup_topic({Tab, _Pid}, ClientId, TopicId) when is_integer(TopicId) ->
case lookup_element(Tab, {predef, TopicId}, 2) of
undefined ->
lookup_element(Tab, {ClientId, TopicId}, 2);
Topic -> Topic
end.
-spec(lookup_topic_id(registry(), binary(), binary())
-> undefined
| pos_integer()
| {predef, integer()}).
lookup_topic_id({Tab, _Pid}, ClientId, TopicName) when is_binary(TopicName) ->
case lookup_element(Tab, {predef, TopicName}, 2) of
undefined ->
lookup_element(Tab, {ClientId, TopicName}, 2);
TopicId ->
{predef, TopicId}
end.
@private
lookup_element(Tab, Key, Pos) ->
try ets:lookup_element(Tab, Key, Pos) catch error:badarg -> undefined end.
-spec(unregister_topic(registry(), binary()) -> ok).
unregister_topic({_Tab, Pid}, ClientId) ->
gen_server:call(Pid, {unregister, ClientId}).
%%-----------------------------------------------------------------------------
init([Tab, PredefTopics]) ->
{ predef , } - > TopicName
{ predef , TopicName } - > TopicId
{ ClientId , - > TopicName
{ ClientId , TopicName } - > TopicId
_ = ets:new(Tab, [set, public, named_table, {read_concurrency, true}]),
MaxPredefId = lists:foldl(
fun({TopicId, TopicName}, AccId) ->
_ = ets:insert(Tab, {{predef, TopicId}, TopicName}),
_ = ets:insert(Tab, {{predef, TopicName}, TopicId}),
if TopicId > AccId -> TopicId; true -> AccId end
end, 0, PredefTopics),
{ok, #state{tab = Tab, max_predef_topic_id = MaxPredefId}}.
handle_call({register, ClientId, TopicName}, _From,
State = #state{tab = Tab, max_predef_topic_id = PredefId}) ->
case lookup_topic_id({Tab, self()}, ClientId, TopicName) of
{predef, PredefTopicId} when is_integer(PredefTopicId) ->
{reply, PredefTopicId, State};
TopicId when is_integer(TopicId) ->
{reply, TopicId, State};
undefined ->
case next_topic_id(Tab, PredefId, ClientId) of
TopicId when TopicId >= 16#FFFF ->
{reply, {error, too_large}, State};
TopicId ->
_ = ets:insert(Tab, {{ClientId, next_topic_id}, TopicId + 1}),
_ = ets:insert(Tab, {{ClientId, TopicName}, TopicId}),
_ = ets:insert(Tab, {{ClientId, TopicId}, TopicName}),
{reply, TopicId, State}
end
end;
handle_call({unregister, ClientId}, _From, State = #state{tab = Tab}) ->
ets:match_delete(Tab, {{ClientId, '_'}, '_'}),
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected request: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected msg: ~p", [Msg]),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
%%-----------------------------------------------------------------------------
next_topic_id(Tab, PredefId, ClientId) ->
case ets:lookup(Tab, {ClientId, next_topic_id}) of
[{_, Id}] -> Id;
[] -> PredefId + 1
end.
| null | https://raw.githubusercontent.com/emqx/emqx-sn/8f94f68f3740c328bd905e5d4581d58e9013fbba/src/emqx_sn_registry.erl | erlang | --------------------------------------------------------------------
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
--------------------------------------------------------------------
gen_server callbacks
-----------------------------------------------------------------------------
id by the gateway when sending PUBLISH messages to the client (not
relevant in case of subscriptions to a short topic name or to a topic
name which contains wildcard characters)
-----------------------------------------------------------------------------
----------------------------------------------------------------------------- | Copyright ( c ) 2020 EMQ Technologies Co. , Ltd. All Rights Reserved .
Licensed under the Apache License , Version 2.0 ( the " License " ) ;
distributed under the License is distributed on an " AS IS " BASIS ,
-module(emqx_sn_registry).
-behaviour(gen_server).
-include("emqx_sn.hrl").
-define(LOG(Level, Format, Args),
emqx_logger:Level("MQTT-SN(registry): " ++ Format, Args)).
-export([ start_link/2
, stop/1
]).
-export([ register_topic/3
, unregister_topic/2
]).
-export([ lookup_topic/3
, lookup_topic_id/3
]).
-export([ init/1
, handle_call/3
, handle_cast/2
, handle_info/2
, terminate/2
, code_change/3
]).
-define(TAB, ?MODULE).
-record(state, {tab, max_predef_topic_id = 0}).
-type(registry() :: {ets:tab(), pid()}).
-spec(start_link(atom(), list()) -> {ok, pid()} | ignore | {error, Reason :: term()}).
start_link(Tab, PredefTopics) ->
gen_server:start_link(?MODULE, [Tab, PredefTopics], []).
-spec(stop(registry()) -> ok).
stop({_Tab, Pid}) ->
gen_server:stop(Pid, normal, infinity).
-spec(register_topic(registry(), binary(), binary()) -> integer() | {error, term()}).
register_topic({_, Pid}, ClientId, TopicName) when is_binary(TopicName) ->
case emqx_topic:wildcard(TopicName) of
false ->
gen_server:call(Pid, {register, ClientId, TopicName});
: in case of “ accepted ” the value that will be used as topic
true -> {error, wildcard_topic}
end.
-spec(lookup_topic(registry(), binary(), pos_integer()) -> undefined | binary()).
lookup_topic({Tab, _Pid}, ClientId, TopicId) when is_integer(TopicId) ->
case lookup_element(Tab, {predef, TopicId}, 2) of
undefined ->
lookup_element(Tab, {ClientId, TopicId}, 2);
Topic -> Topic
end.
-spec(lookup_topic_id(registry(), binary(), binary())
-> undefined
| pos_integer()
| {predef, integer()}).
lookup_topic_id({Tab, _Pid}, ClientId, TopicName) when is_binary(TopicName) ->
case lookup_element(Tab, {predef, TopicName}, 2) of
undefined ->
lookup_element(Tab, {ClientId, TopicName}, 2);
TopicId ->
{predef, TopicId}
end.
@private
lookup_element(Tab, Key, Pos) ->
try ets:lookup_element(Tab, Key, Pos) catch error:badarg -> undefined end.
-spec(unregister_topic(registry(), binary()) -> ok).
unregister_topic({_Tab, Pid}, ClientId) ->
gen_server:call(Pid, {unregister, ClientId}).
init([Tab, PredefTopics]) ->
{ predef , } - > TopicName
{ predef , TopicName } - > TopicId
{ ClientId , - > TopicName
{ ClientId , TopicName } - > TopicId
_ = ets:new(Tab, [set, public, named_table, {read_concurrency, true}]),
MaxPredefId = lists:foldl(
fun({TopicId, TopicName}, AccId) ->
_ = ets:insert(Tab, {{predef, TopicId}, TopicName}),
_ = ets:insert(Tab, {{predef, TopicName}, TopicId}),
if TopicId > AccId -> TopicId; true -> AccId end
end, 0, PredefTopics),
{ok, #state{tab = Tab, max_predef_topic_id = MaxPredefId}}.
handle_call({register, ClientId, TopicName}, _From,
State = #state{tab = Tab, max_predef_topic_id = PredefId}) ->
case lookup_topic_id({Tab, self()}, ClientId, TopicName) of
{predef, PredefTopicId} when is_integer(PredefTopicId) ->
{reply, PredefTopicId, State};
TopicId when is_integer(TopicId) ->
{reply, TopicId, State};
undefined ->
case next_topic_id(Tab, PredefId, ClientId) of
TopicId when TopicId >= 16#FFFF ->
{reply, {error, too_large}, State};
TopicId ->
_ = ets:insert(Tab, {{ClientId, next_topic_id}, TopicId + 1}),
_ = ets:insert(Tab, {{ClientId, TopicName}, TopicId}),
_ = ets:insert(Tab, {{ClientId, TopicId}, TopicName}),
{reply, TopicId, State}
end
end;
handle_call({unregister, ClientId}, _From, State = #state{tab = Tab}) ->
ets:match_delete(Tab, {{ClientId, '_'}, '_'}),
{reply, ok, State};
handle_call(Req, _From, State) ->
?LOG(error, "Unexpected request: ~p", [Req]),
{reply, ignored, State}.
handle_cast(Msg, State) ->
?LOG(error, "Unexpected msg: ~p", [Msg]),
{noreply, State}.
handle_info(Info, State) ->
?LOG(error, "Unexpected info: ~p", [Info]),
{noreply, State}.
terminate(_Reason, _State) ->
ok.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
next_topic_id(Tab, PredefId, ClientId) ->
case ets:lookup(Tab, {ClientId, next_topic_id}) of
[{_, Id}] -> Id;
[] -> PredefId + 1
end.
|
6077ca56f367e9f4b99511713fdb976419f8d3979208c18dc409fba0d0ee6672 | johnlawrenceaspden/hobby-code | infix.clj | (def AND #(and %1 %2))
(def rank (zipmap [- + * / AND =] (iterate inc 1)))
(defn infix* [[a b & [c d e & more]]]
(cond
(vector? a) (infix* (list* (infix* a) b c d e more))
(vector? c) (infix* (list* a b (infix* c) d e more))
(ifn? b) (if (and d (< (rank b 0) (rank d 0)))
(infix* (list a b (infix* (list* c d e more))))
(infix* (list* (b a c) d e more)))
:else a))
(defn infix [& args]
(infix* args))
(infix 21 / [ 1 + 2 * 3 ])
(defn all-eq? [a b c]
(infix
a = b AND b = c AND a = c ))
(all-eq? 1 2 3)
(print (all-eq? 3 3 3))
(= 3 3 4) | null | https://raw.githubusercontent.com/johnlawrenceaspden/hobby-code/48e2a89d28557994c72299962cd8e3ace6a75b2d/infix.clj | clojure | (def AND #(and %1 %2))
(def rank (zipmap [- + * / AND =] (iterate inc 1)))
(defn infix* [[a b & [c d e & more]]]
(cond
(vector? a) (infix* (list* (infix* a) b c d e more))
(vector? c) (infix* (list* a b (infix* c) d e more))
(ifn? b) (if (and d (< (rank b 0) (rank d 0)))
(infix* (list a b (infix* (list* c d e more))))
(infix* (list* (b a c) d e more)))
:else a))
(defn infix [& args]
(infix* args))
(infix 21 / [ 1 + 2 * 3 ])
(defn all-eq? [a b c]
(infix
a = b AND b = c AND a = c ))
(all-eq? 1 2 3)
(print (all-eq? 3 3 3))
(= 3 3 4) | |
07e1133939b84e035dc6368821aef7d820c9ea513e5aae2c044e5ed1029afda4 | scalaris-team/scalaris | yaws_soap12_lib.erl | %%%-------------------------------------------------------------------
Created : 29 Nov 2006 by < >
Author : ( ) .
Desc . : Common SOAP code .
%%%-------------------------------------------------------------------
modified ( WdJ , May 2007 ): deal with imports in the WSDL .
modified ( WdJ , August 2007 ): the WSDL can contain more than 1 schema
copied from yaws_soap_lib ( , February 2012 ):
%% to be used for soap12 calls
-module(yaws_soap12_lib).
-export([initModel/1, initModel/2,
initModelFile/1,
config_file_xsd/0,
call/3, call/4, call/5, call/6, call/8,
call_attach/4, call_attach/5, call_attach/8,
write_hrl/2, write_hrl/3,
findHeader/2,
parseMessage/2,
makeFault/2,
is_wsdl/1, wsdl_model/1, wsdl_op_service/1,
wsdl_op_port/1, wsdl_op_operation/1,
wsdl_op_binding/1, wsdl_op_address/1,
wsdl_op_action/1, wsdl_operations/1,
get_operation/2
]).
%%% For testing...
-export([qtest/0]).
-include("../include/yaws_soap.hrl").
-include("../include/soap-envelope.hrl").
-include("../include/wsdl11soap12.hrl").
-define(HTTP_REQ_TIMEOUT, 20000).
%%-define(dbg(X,Y),
%% error_logger:info_msg("*dbg ~p(~p): " X,
%% [?MODULE, ?LINE | Y])).
-define(dbg(X,Y), true).
-record(yaws_soap_config, {atts, xsd_path, user_module, wsdl_file, add_files}).
-record(xsd_file, {atts, name, prefix, import_specs}).
-record(import_specs, {atts, namespace, prefix, location}).
-record(namespace_spec, {namespace, prefix}).
-record(namespace_registry, {specs = [], counter = 0}).
-define(DefaultPrefix, "p").
-define(CustomPrefix, "cp").
%%%
%%% Writes the header file (record definitions) for a WSDL file
%%%
write_hrl(WsdlURL, Output) when is_list(WsdlURL) ->
write_hrl(initModel(WsdlURL), Output);
write_hrl(#wsdl{model = Model}, Output) when is_list(Output) ->
erlsom:write_hrl(Model, Output).
write_hrl(WsdlURL, Output, PrefixOrOptions)
when is_list(WsdlURL),is_list(PrefixOrOptions) ->
write_hrl(initModel(WsdlURL, PrefixOrOptions), Output).
%%% For testing only...
qtest() ->
call("",
"GetWeatherByPlaceName",
["Boston"]).
%%% --------------------------------------------------------------------
%%% Access functions
%%% --------------------------------------------------------------------
is_wsdl(Wsdl) when is_record(Wsdl,wsdl) -> true;
is_wsdl(_) -> false.
wsdl_operations(#wsdl{operations = Ops}) -> Ops.
wsdl_model(#wsdl{model = Model}) -> Model.
wsdl_op_service(#operation{service = Service}) -> Service.
wsdl_op_port(#operation{port = Port}) -> Port.
wsdl_op_operation(#operation{operation = Op}) -> Op.
wsdl_op_binding(#operation{binding = Binding}) -> Binding.
wsdl_op_address(#operation{address = Address}) -> Address.
wsdl_op_action(#operation{action = Action}) -> Action.
%%% --------------------------------------------------------------------
%%% For Quick deployment
%%% --------------------------------------------------------------------
call(WsdlURL, Operation, ListOfData) when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call(Wsdl, Operation, ListOfData);
call(Wsdl, Operation, ListOfData) when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(?DefaultPrefix, Operation, ListOfData),
call(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg);
Else ->
Else
end.
%%% --------------------------------------------------------------------
%%% Takes http headers
%%% --------------------------------------------------------------------
call(WsdlURL, Operation, ListOfData, http_headers, HttpHeaders)
when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call(Wsdl, Operation, ListOfData, http_headers, HttpHeaders);
call(Wsdl, Operation, ListOfData, http_headers, HttpHeaders)
when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(?DefaultPrefix, Operation, ListOfData),
call(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg, http_headers, HttpHeaders);
Else ->
Else
end;
%%% --------------------------------------------------------------------
%%% With additional specified prefix
%%% --------------------------------------------------------------------
call(WsdlURL, Operation, ListOfData, prefix, Prefix) when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, Prefix),
call(Wsdl, Operation, ListOfData, prefix, Prefix );
call(Wsdl, Operation, ListOfData, prefix, Prefix) when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(Prefix, Operation, ListOfData),
call(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg);
Else ->
Else
end.
%%% --------------------------------------------------------------------
Takes the actual records for the Header and Body message .
%%% --------------------------------------------------------------------
call(WsdlURL, Operation, Header, Msg) when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call(Wsdl, Operation, Header, Msg);
call(Wsdl, Operation, Header, Msg) when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
call(Wsdl, Operation, Op#operation.port, Op#operation.service,
Header, Msg);
Else ->
Else
end.
mk_msg(_Prefix, _Operation, ListOfData) ->
ListOfData. % rest of record data
get_operation([#operation{operation = X} = Op|_], X) ->
{ok, Op};
get_operation([_|T], Op) ->
get_operation(T, Op);
get_operation([], _Op) ->
{error, "operation not found"}.
%%% --------------------------------------------------------------------
%%% Make a SOAP request (no attachments)
%%% --------------------------------------------------------------------
call(Wsdl, Operation, Port, Service, Headers, Message) ->
call_attach(Wsdl, Operation, Port, Service, Headers, Message, [], []).
%%% --------------------------------------------------------------------
%%% Make a SOAP request (with http artifacts)
%%% --------------------------------------------------------------------
call(Wsdl, Operation, Port, Service, Headers, Message,
http_headers, HttpHeaders) ->
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, [], HttpHeaders);
call(Wsdl, Operation, Port, Service, Headers, Message,
http_details, HttpDetails) ->
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, [], http_details, HttpDetails).
%%% --------------------------------------------------------------------
%%% For Quick deployment (with attachments)
%%% --------------------------------------------------------------------
call_attach(WsdlURL, Operation, ListOfData, Attachments)
when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call_attach(Wsdl, Operation, ListOfData, Attachments);
call_attach(Wsdl, Operation, ListOfData, Attachments)
when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(?DefaultPrefix, Operation, ListOfData),
call_attach(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg, Attachments, []);
Else ->
Else
end.
%%% --------------------------------------------------------------------
Takes the actual records for the Header and Body message
%%% (with attachments)
%%% --------------------------------------------------------------------
call_attach(WsdlURL, Operation, Header, Msg, Attachments)
when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call_attach(Wsdl, Operation, Header, Msg, Attachments);
call_attach(Wsdl, Operation, Header, Msg, Attachments)
when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
call_attach(Wsdl, Operation, Op#operation.port,
Op#operation.service,
Header, Msg, Attachments, []);
Else ->
Else
end.
%%% --------------------------------------------------------------------
%%% Make a SOAP request (with attachments)
%%% --------------------------------------------------------------------
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, Attachments, HttpHeaders) ->
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, Attachments, http_details, [{headers, HttpHeaders}]).
call_attach(#wsdl{operations = Operations, model = Model},
Operation, Port, Service, Headers, Message,
Attachments, http_details, HttpDetails) ->
HttpHeaders = findListValue(headers, HttpDetails),
HttpClientOptions = findListValue(client_options, HttpDetails),
%% find the operation
case findOperation(Operation, Port, Service, Operations) of
#operation{address = URL, action=Action, operation = Operation} ->
%% Add the Soap envelope
Envelope = mk_envelope(Message, Headers),
%% Encode the message
case erlsom:write(Envelope, Model) of
{ok, XmlMessage} ->
{ContentType, Request} =
make_request_body(XmlMessage, Attachments, Action),
?dbg("+++ Request = ~p~n", [Request]),
HttpRes = http_request(URL, Action, Request,
HttpClientOptions, HttpHeaders,
ContentType),
?dbg("+++ HttpRes = ~p~n", [HttpRes]),
case HttpRes of
{ok, _Code, _ReturnHeaders, Body} ->
parseMessage(Body, Model);
Error ->
%% in case of HTTP error: return
%% {error, description}
Error
end;
{error, EncodingError} ->
{error, {encoding_error, EncodingError}}
end;
false ->
{error, {unknown_operation, Operation}}
end.
findListValue(Key, KeyVals) ->
case lists:keyfind(Key, 1, KeyVals) of
{Key, List} ->
List;
false ->
[]
end.
%%%
%%% returns {ok, Header, Body} | {error, Error}
%%%
parseMessage(Message, #wsdl{model = Model}) ->
parseMessage(Message, Model);
%%
parseMessage(Message, Model) ->
Parsed = erlsom:scan(Message, Model),
case Parsed of
{ok, #'soap:Envelope'{'Body' = #'soap:Body'{choice = Body},
'Header' = undefined}, _} ->
{ok, undefined, Body};
{ok, #'soap:Envelope'{'Body' = #'soap:Body'{choice = Body},
'Header' = #'soap:Header'{choice = Header}}, _} ->
{ok, Header, Body};
{error, ErrorMessage} ->
{error, {decoding, ErrorMessage}}
end.
findOperation(_Operation, _Port, _Service, []) ->
false;
findOperation(Operation, Port, Service,
[Op = #operation{operation = Operation,
port = Port, service = Service} | _]) ->
Op;
findOperation(Operation, Port, Service, [#operation{} | Tail]) ->
findOperation(Operation, Port, Service, Tail).
mk_envelope(M, H) when is_tuple(M) -> mk_envelope([M], H);
mk_envelope(M, H) when is_tuple(H) -> mk_envelope(M, [H]);
%%
mk_envelope(Messages, []) when is_list(Messages) ->
#'soap:Envelope'{'Body' = #'soap:Body'{choice = Messages}};
mk_envelope(Messages, Headers) when is_list(Messages),is_list(Headers) ->
#'soap:Envelope'{'Body' = #'soap:Body'{choice = Messages},
'Header' = #'soap:Header'{choice = Headers}}.
%%% --------------------------------------------------------------------
%%% Parse a WSDL file and return a 'Model'
%%% --------------------------------------------------------------------
initModel(WsdlFile) ->
initModel(WsdlFile, ?DefaultPrefix).
%% PrefixOrOptions can be a property list that contains the options
for Erlsom , or a String . If it is a string , this is used as the
Erlsom ' prefix ' option ( and the other options are left unspecified ) .
initModel(WsdlFile, PrefixOrOptions) ->
Options = case is_string(PrefixOrOptions) of
no ->
%% It is an option list
%% Add the default prefix at the end - it will only be used
%% if no other prefix is specified
PrefixOrOptions ++ [{prefix, ?DefaultPrefix}];
_ ->
%% just the prefix
[{prefix, PrefixOrOptions}]
end,
PrivDir = priv_dir(),
initModel2(WsdlFile, Options, PrivDir, undefined, undefined).
initModelFile(ConfigFile) ->
{ok, ConfigSchema} = erlsom:compile_xsd(config_file_xsd()),
%% read (parse) the config file
{ok, Config, _} = erlsom:scan_file(ConfigFile, ConfigSchema),
#yaws_soap_config{xsd_path = XsdPath,
wsdl_file = Wsdl,
add_files = AddFiles} = Config,
#xsd_file{name = WsdlFile, prefix = Prefix, import_specs = Import} = Wsdl,
initModel2(WsdlFile, [{prefix, Prefix}], XsdPath, Import, AddFiles).
priv_dir() ->
yaws:get_priv_dir().
initModel2(WsdlFile, ErlsomOptions, Path, Import, AddFiles) ->
WsdlName = filename:join([Path, "wsdl.xsd"]),
IncludeWsdl = {"/", "wsdl", WsdlName},
{ok, WsdlModel} = erlsom:compile_xsd_file(
filename:join([Path, "wsdl11soap12.xsd"]),
[{prefix, "soap"},
{include_files, [IncludeWsdl]}]),
%% uncomment to generate the wsdl11soap12.hrl file
erlsom : write_hrl(WsdlModel , " /home / kalski / test / wsdl11soap12.hrl " ) ,
add the xsd model ( since xsd is also used in the wsdl )
WsdlModel2 = erlsom:add_xsd_model(WsdlModel),
Options = ErlsomOptions ++ makeOptions(Import),
%% parse Wsdl
{Model, Operations} = parseWsdls([WsdlFile], WsdlModel2,
Options, {undefined, []}),
%% TODO: add files as required
now compile , and add Model
{ok, EnvelopeModel} =
erlsom:compile_xsd_file(
filename:join([Path, "soap-envelope.xsd"]),
[{prefix, "soap"},
{include_files, [{"", undefined,
filename:join([Path, "xml.xsd"])}]}]),
SoapModel = erlsom:add_model(EnvelopeModel, Model),
uncomment to generate the soap-envelope.hrl file
erlsom : write_hrl(EnvelopeModel , " /home / kalski / test / soap - envelope.hrl " ) ,
SoapModel2 = addModels(AddFiles, SoapModel),
#wsdl{operations = Operations, model = SoapModel2}.
%%% --------------------------------------------------------------------
Parse a list of WSDLs and import ( recursively )
%%% Returns {Model, Operations}
%%% --------------------------------------------------------------------
parseWsdls(WsdlFiles, WsdlModel, Options, Acc) ->
parseWsdls(WsdlFiles, WsdlModel, Options, Acc, #namespace_registry{}).
parseWsdls([], _WsdlModel, _Options, Acc, _NSRegistry) ->
Acc;
parseWsdls([WsdlFile | Tail], WsdlModel, Options,
{AccModel, AccOperations}, NSRegistry) ->
WsdlFileNoSpaces = rmsp(WsdlFile),
{ok, WsdlFileContent} = get_url_file(WsdlFileNoSpaces),
{ok, ParsedWsdl, _} = erlsom:scan(WsdlFileContent, WsdlModel),
WsdlTargetNameSpace = getTargetNamespaceFromWsdl(ParsedWsdl),
{Prefix, PrefixlessOptions} = remove_prefix_option(Options),
TNSEnrichedNSRegistry = extend_namespace_registry(WsdlTargetNameSpace,
Prefix, NSRegistry),
get the xsd elements from this model , and hand it over to erlsom_compile .
Xsds = getXsdsFromWsdl(ParsedWsdl),
Now we need to build a list : [ { Namespace , Xsd , Prefix } , ... ] for
all the Xsds in the WSDL .
This list is used when a schema includes one of the other schemas .
The AXIS java2wsdl tool generates wsdls that depend on this feature .
{ImportsEnrichedNSRegistry, ImportList} = makeImportList(
Xsds,
TNSEnrichedNSRegistry, []),
Model2 = addSchemas(Xsds, AccModel, PrefixlessOptions, ImportList),
Ports = getPorts(ParsedWsdl),
Operations = getOperations(ParsedWsdl, Ports),
Imports = getImports(filename:dirname(WsdlFileNoSpaces), ParsedWsdl),
use Options rather than PrefixlessOptions because imports come in
the wsdl targetNamespace
Model3 = addSchemaFiles(Imports, Model2, Options, []),
Acc2 = {Model3, Operations ++ AccOperations},
%% process imports (recursively, so that imports in the imported files are
%% processed as well).
%% For the moment, the namespace is ignored on operations etc.
this makes it a bit easier to deal with imported 's .
TODO uncomment if imports can be WSDL
= parseWsdls(Imports , WsdlModel , Options , Acc2 ,
%% ImportsEnrichedNSRegistry),
parseWsdls(Tail, WsdlModel, PrefixlessOptions, Acc2,
ImportsEnrichedNSRegistry).
remove_prefix_option(Options) ->
case lists:keytake(prefix, 1, Options) of
{value, {prefix, Prefix}, NewOptions} ->
{Prefix, NewOptions};
false ->
{undefined, Options}
end.
%empty registry, initializing
extend_namespace_registry(WsdlTargetNameSpace, undefined,
#namespace_registry{specs = []} = NSRegistry) ->
{NewCounter, NewPrefix} = create_unique_prefix(NSRegistry),
NSRegistry#namespace_registry{
specs = [#namespace_spec{namespace = WsdlTargetNameSpace,
prefix = NewPrefix}], counter = NewCounter};
extend_namespace_registry(WsdlTargetNameSpace, Prefix,
#namespace_registry{specs = []} = NSRegistry) ->
NSRegistry#namespace_registry{
specs = [#namespace_spec{namespace = WsdlTargetNameSpace,
prefix = Prefix}]};
extend_namespace_registry(WsdlTargetNameSpace, _Prefix,
#namespace_registry{specs = Specs} = NSRegistry) ->
case lists:keyfind(WsdlTargetNameSpace, #namespace_spec.namespace, Specs) of
#namespace_spec{} ->
NSRegistry;
false ->
{NewCounter, NewPrefix} = create_unique_prefix(NSRegistry),
NSRegistry#namespace_registry{
specs = [#namespace_spec{namespace = WsdlTargetNameSpace,
prefix = NewPrefix}|Specs],
counter = NewCounter}
end.
create_unique_prefix(#namespace_registry{specs = Specs, counter = Counter} =
NSRegistry) ->
NewCounter = Counter+1,
NewPrefix = ?CustomPrefix ++ integer_to_list(NewCounter),
case lists:keyfind(NewPrefix, #namespace_spec.prefix, Specs) of
#namespace_spec{} ->
create_unique_prefix(NSRegistry#namespace_registry{
counter = Counter+1});
false ->
{NewCounter, NewPrefix}
end.
%%% --------------------------------------------------------------------
build a list : [ { Namespace , Xsd } , ... ] for all the Xsds in the WSDL .
%%% This list is used when a schema inlcudes one of the other schemas.
The AXIS java2wsdl tool generates wsdls that depend on this feature .
makeImportList([], NSRegistry, Acc) ->
{NSRegistry, Acc};
makeImportList([ Xsd | Tail], NSRegistry, Acc) ->
XsdNS = erlsom_lib:getTargetNamespaceFromXsd(Xsd),
NewNSRegistry = extend_namespace_registry(XsdNS, undefined, NSRegistry),
#namespace_spec{prefix = Prefix} =
lists:keyfind(XsdNS, #namespace_spec.namespace,
NewNSRegistry#namespace_registry.specs),
makeImportList(Tail, NewNSRegistry, [{XsdNS, Prefix, Xsd} | Acc]).
getTargetNamespaceFromWsdl(#'wsdl:tDefinitions'{targetNamespace = TNS}) ->
TNS.
%%% --------------------------------------------------------------------
%%% compile each of the schemas, and add it to the model.
%%% Returns Model
( TODO : using the same prefix for all XSDS makes no sense )
%%% --------------------------------------------------------------------
addSchemas([], AccModel, _PrefixlessOptions, _ImportList) ->
AccModel;
addSchemas([Xsd| Tail], AccModel, PrefixlessOptions, ImportList) ->
Model2 = case Xsd of
undefined ->
AccModel;
_ ->
{_, Prefix, _} =
lists:keyfind(
erlsom_lib:getTargetNamespaceFromXsd(Xsd),
1, ImportList),
NewOptions = [{prefix, Prefix}|PrefixlessOptions],
{ok, Model} =
erlsom_compile:compile_parsed_xsd(
Xsd,
[{include_files, ImportList} |NewOptions]),
case AccModel of
undefined -> Model;
_ -> erlsom:add_model(AccModel, Model)
end
end,
addSchemas(Tail, Model2, PrefixlessOptions, ImportList).
%%% --------------------------------------------------------------------
%%% compile each of the schema files, and add it to the model.
%%% Returns Model
( TODO : using the same prefix for all XSD files makes no sense )
%%% --------------------------------------------------------------------
addSchemaFiles([], AccModel, _Options, _ImportList) ->
AccModel;
addSchemaFiles([Xsd| Tail], AccModel, Options, ImportList) ->
{ok, Model} =
erlsom:compile_xsd_file(get_file_with_path(Xsd),
[{include_files, ImportList} |Options]),
Model2 = case AccModel of
undefined -> Model;
_ -> erlsom:add_model(AccModel, Model)
end,
addSchemaFiles(Tail, Model2, Options, ImportList).
%%% --------------------------------------------------------------------
%%% Get a file from an URL spec.
%%% --------------------------------------------------------------------
get_url_file("http://"++_ = URL) ->
case httpc:request(URL) of
{ok,{{_HTTP,200,_OK}, _Headers, Body}} ->
{ok, Body};
{ok,{{_HTTP,RC,Emsg}, _Headers, _Body}} ->
error_logger:error_msg("~p: http-request got: ~p~n",
[?MODULE, {RC, Emsg}]),
{error, "failed to retrieve: "++URL};
{error, Reason} ->
error_logger:error_msg("~p: http-request failed: ~p~n",
[?MODULE, Reason]),
{error, "failed to retrieve: "++URL}
end;
get_url_file("file://"++Fname) ->
{ok, Bin} = file:read_file(Fname),
{ok, binary_to_list(Bin)};
%% added this, since this is what is used in many WSDLs (i.e.: just a filename).
get_url_file(Fname) ->
{ok, Bin} = file:read_file(Fname),
{ok, binary_to_list(Bin)}.
%%% --------------------------------------------------------------------
%%% Make a HTTP Request
%%% --------------------------------------------------------------------
http_request(URL, Action, Request, Options, Headers, ContentType) ->
case code:ensure_loaded(ibrowse) of
{module, ibrowse} ->
%% If ibrowse exist in the path then let's use it...
ibrowse_request(URL, Action, Request, Options,
Headers, ContentType);
_ ->
... otherwise , let 's use the OTP http client .
inets_request(URL, Action, Request, Options,
Headers, ContentType)
end.
inets_request(URL, Action, Request, Options, Headers, ContentType) ->
case Action of
undefined ->
NHeaders = Headers;
_ ->
NHeaders = [{"SOAPAction", Action} | Headers]
end,
NewHeaders = case proplists:get_value("Host", NHeaders) of
undefined ->
[{"Host", "localhost:8800"}|NHeaders];
_ ->
NHeaders
end,
NewOptions = [{cookies, enabled}|Options],
httpc:set_options(NewOptions),
case httpc:request(post,
{URL,NewHeaders,
ContentType,
Request},
[{timeout,?HTTP_REQ_TIMEOUT}],
[{sync, true}, {full_result, true},
{body_format, string}]) of
{ok,{{_HTTP,200,_OK},ResponseHeaders,ResponseBody}} ->
{ok, 200, ResponseHeaders, ResponseBody};
{ok,{{_HTTP,500,_Descr},ResponseHeaders,ResponseBody}} ->
{ok, 500, ResponseHeaders, ResponseBody};
{ok,{{_HTTP,ErrorCode,_Descr},ResponseHeaders,ResponseBody}} ->
{ok, ErrorCode, ResponseHeaders, ResponseBody};
Other ->
Other
end.
ibrowse_request(URL, Action, Request, Options, Headers, ContentType) ->
case start_ibrowse() of
ok ->
NewHeaders = [{"Content-Type", ContentType} |
case Action of
undefined ->
Headers;
_ ->
[{"SOAPAction", Action} | Headers]
end],
IbrowseF = case lists:keyfind(ibrowse_timeout, 1, Options) of
{_, Timeout} ->
fun() ->
ibrowse:send_req(URL, NewHeaders, post,
Request, Options, Timeout)
end;
false ->
fun() ->
ibrowse:send_req(URL, NewHeaders, post,
Request, Options)
end
end,
case IbrowseF() of
{ok, Status, ResponseHeaders, ResponseBody} ->
{ok, list_to_integer(Status), ResponseHeaders,
ResponseBody};
{error, Reason} ->
{error, Reason}
end;
error ->
{error, "could not start ibrowse"}
end.
start_ibrowse() ->
case ibrowse:start() of
{ok, _} -> ok;
{error, {already_started, _}} -> ok;
_ -> error
end.
rmsp(Str) -> string:strip(Str, left).
make_request_body(Content, [], Operation) ->
{"application/soap+xml;charset=UTF-8;action=\"" ++ Operation ++ "\"",
"<?xml version=\"1.0\" encoding=\"utf-8\"?>"++ Content};
make_request_body(Content, AttachedFiles, _Operation) ->
{"application/dime",
yaws_dime:encode("<?xml version=\"1.0\" encoding=\"utf-8\"?>" ++ Content,
AttachedFiles)}.
makeFault(FaultCode, FaultString) ->
try
"<SOAP-ENV:Envelope xmlns:SOAP-ENV=\"/\">"
"<SOAP-ENV:Body>"
"<SOAP-ENV:Fault>"
"<faultcode>SOAP-ENV:" ++ FaultCode ++ "</faultcode>" ++
"<faultstring>" ++ FaultString ++ "</faultstring>" ++
"</SOAP-ENV:Fault>"
"</SOAP-ENV:Body>"
"</SOAP-ENV:Envelope>"
catch
_:_ ->
"<SOAP-ENV:Envelope xmlns:SOAP-ENV=\"/\">"
"<SOAP-ENV:Body>"
"<SOAP-ENV:Fault>"
"<faultcode>SOAP-ENV:Server</faultcode>"
"<faultstring>Server error</faultstring>"
"</SOAP-ENV:Fault>"
"</SOAP-ENV:Body>"
"</SOAP-ENV:Envelope>"
end.
%% record http_header is not defined??
findHeader(Label, Headers) ->
findHeader0(yaws:to_lower(Label), Headers).
findHeader0(_Label, []) ->
undefined;
findHeader0(Label, [{_,_,Hdr,_,Val}|T]) ->
case {Label, yaws:to_lower(Hdr)} of
{X,X} -> Val;
_ -> findHeader0(Label, T)
end;
findHeader0(_Label, undefined) ->
undefined.
makeOptions(undefined) ->
[];
makeOptions(Import) ->
lists:map(fun makeOption/1, Import).
-record(import_specs , , namespace , prefix , location } ) .
makeOption(#import_specs{namespace = Ns, prefix = Pf, location = Lc}) ->
{Ns, Pf, Lc}.
addModels(undefined, Model) ->
Model;
addModels(Import, Model) ->
lists:foldl(fun addModel/2, Model, Import).
%% -record(xsd_file, {atts, name, prefix, import_specs}).
addModel(undefined, Acc) ->
Acc;
addModel(#xsd_file{name = XsdFile, prefix = Prefix, import_specs = Import},
Acc) ->
Options = makeOptions(Import),
{ok, Model2} = erlsom:add_xsd_file(XsdFile, [{prefix, Prefix}|Options],Acc),
Model2.
%% returns [#port{}]
%% -record(port, {service, port, binding, address}).
getPorts(ParsedWsdl) ->
Services = getTopLevelElements(ParsedWsdl, 'wsdl:tService'),
getPortsFromServices(Services, []).
getPortsFromServices([], Acc) ->
Acc;
getPortsFromServices([Service|Tail], Acc) ->
getPortsFromServices(Tail, getPortsFromService(Service) ++ Acc).
getPortsFromService(#'wsdl:tService'{name = Name, port = Ports}) ->
getPortsInfo(Ports, Name, []).
getPortsInfo([], _Name, Acc) ->
Acc;
getPortsInfo([#'wsdl:tPort'{name = Name,
binding = Binding,
choice =
[#'soap:tAddress'{location = URL}]} | Tail],
ServiceName, Acc) ->
getPortsInfo(Tail, ServiceName, [#port{service = ServiceName,
port = Name,
binding = Binding,
address = URL}|Acc]);
%% non-soap bindings are ignored.
getPortsInfo([#'wsdl:tPort'{} | Tail], ServiceName, Acc) ->
getPortsInfo(Tail, ServiceName, Acc).
getTopLevelElements(#'wsdl:tDefinitions'{choice1 = TLElements}, Type) ->
getTopLevelElements(TLElements, Type, []).
getTopLevelElements([], _Type, Acc) ->
Acc;
getTopLevelElements([#'wsdl:anyTopLevelOptionalElement'{choice = Tuple}| Tail],
Type, Acc) ->
case element(1, Tuple) of
Type -> getTopLevelElements(Tail, Type, [Tuple|Acc]);
_ -> getTopLevelElements(Tail, Type, Acc)
end.
get_file_with_path(Url) ->
case Url of
"http://" ++ _ ->
undefined;
"file://" ++ FName ->
FName;
_ ->
Url
end.
getImports(WsdlDirname, Definitions) ->
Imports = getTopLevelElements(Definitions, 'wsdl:tImport'),
lists:map(fun(Import) ->
case WsdlDirname of
"http://" ++ _AbsDirname ->
WsdlDirname ++ "/" ++ Import#'wsdl:tImport'.location;
"file://" ++ _AbsDirname ->
WsdlDirname ++ "/" ++ Import#'wsdl:tImport'.location;
Fname ->
filename:join(Fname, Import#'wsdl:tImport'.location)
end
end, Imports).
%% returns [#operation{}]
getOperations(ParsedWsdl, Ports) ->
Bindings = getTopLevelElements(ParsedWsdl, 'wsdl:tBinding'),
getOperationsFromBindings(Bindings, Ports, []).
getOperationsFromBindings([], _Ports, Acc) ->
Acc;
getOperationsFromBindings([Binding|Tail], Ports, Acc) ->
getOperationsFromBindings(Tail, Ports,
getOperationsFromBinding(Binding, Ports) ++ Acc).
getOperationsFromBinding(#'wsdl:tBinding'{name = BindingName,
type = BindingType,
choice = _Choice,
operation = Operations}, Ports) ->
TODO : get soap info from Choice
getOperationsFromOperations(Operations, BindingName, BindingType,
Operations, Ports, []).
getOperationsFromOperation(BindingName, BindingType, Ports, Name,
Action, Operations, Tail, Acc) ->
%% lookup Binding in Ports, and create a combined result
Ports2 = searchPorts(BindingName, Ports),
%% for each port, make an operation record
CombinedPorts = combinePorts(Ports2, Name, BindingName, Action),
getOperationsFromOperations(
Tail, BindingName, BindingType,
Operations, Ports, CombinedPorts ++ Acc).
getOperationsFromOperations([], _BindingName, _BindingType,
_Operations, _Ports, Acc) ->
Acc;
getOperationsFromOperations([#'wsdl:tBindingOperation'{name = Name,
choice = Choice} | Tail],
BindingName, BindingType, Operations, Ports, Acc) ->
get SOAP action from Choice ,
case Choice of
[#'soap:tOperation'{soapAction = Action}] ->
getOperationsFromOperation(BindingName, BindingType, Ports,
Name, Action, Operations, Tail, Acc);
_ ->
getOperationsFromOperation(BindingName, BindingType, Ports,
Name, undefined, Operations, Tail, Acc)
end.
combinePorts(Ports, Name, BindingName, Action) ->
combinePorts(Ports, Name, BindingName, Action, []).
combinePorts([], _Name, _BindingName, _Action, Acc) ->
Acc;
combinePorts([#port{service = Service,
port = PortName,
address = Address} | Tail],
Name, BindingName, Action, Acc) ->
combinePorts(Tail, Name, BindingName, Action,
[#operation{service = Service,
port = PortName, operation = Name,
binding = BindingName,
address = Address, action = Action} | Acc]).
searchPorts(BindingName, Ports) ->
searchPorts(BindingName, Ports, []).
searchPorts(_BindingName, [], Acc) ->
Acc;
searchPorts(BindingName, [Port | Tail], Acc) ->
PortBinding = erlsom_lib:localName(Port#port.binding),
case PortBinding of
BindingName ->
searchPorts(BindingName, Tail, [Port | Acc]);
_ ->
searchPorts(BindingName, Tail, Acc)
end.
%% copied from yaws/json.erl
is_string([]) -> yes;
is_string(List) -> is_string(List, non_unicode).
is_string([C|Rest], non_unicode)
when C >= 0, C =< 255 -> is_string(Rest, non_unicode);
is_string([C|Rest], _) when C =< 65000 -> is_string(Rest, unicode);
is_string([], non_unicode) -> yes;
is_string([], unicode) -> unicode;
is_string(_, _) -> no.
getXsdsFromWsdl(Definitions) ->
case getTopLevelElements(Definitions, 'wsdl:tTypes') of
[#'wsdl:tTypes'{choice = Xsds}] -> Xsds;
[] -> []
end.
config_file_xsd() ->
"<xs:schema xmlns:xs=\"\">"
" <xs:element name=\"yaws_soap_config\">"
" <xs:complexType>"
" <xs:sequence>"
" <xs:element name=\"xsd_path\" type=\"xs:string\" minOccurs=\"0\"/>"
" <xs:element name=\"user_module\" type=\"xs:string\"/>"
" <xs:element name=\"wsdl_file\" type=\"xsd_file\"/>"
" <xs:element name=\"add_file\" type=\"xsd_file\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>"
" </xs:sequence>"
" </xs:complexType>"
" </xs:element>"
" <xs:complexType name=\"xsd_file\">"
" <xs:sequence>"
" <xs:element name=\"import_specs\" type=\"import_specs\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>"
" </xs:sequence>"
" <xs:attribute name=\"name\" type=\"string\" use=\"required\"/>"
" <xs:attribute name=\"prefix\" type=\"string\"/>"
" </xs:complexType>"
" <xs:complexType name=\"import_specs\">"
" <xs:attribute name=\"namespace\" type=\"string\" use=\"required\"/>"
" <xs:attribute name=\"prefix\" type=\"string\"/>"
" <xs:attribute name=\"location\" type=\"string\"/>"
" </xs:complexType>"
"</xs:schema>".
| null | https://raw.githubusercontent.com/scalaris-team/scalaris/feb894d54e642bb3530e709e730156b0ecc1635f/contrib/yaws/src/yaws_soap12_lib.erl | erlang | -------------------------------------------------------------------
-------------------------------------------------------------------
to be used for soap12 calls
For testing...
-define(dbg(X,Y),
error_logger:info_msg("*dbg ~p(~p): " X,
[?MODULE, ?LINE | Y])).
Writes the header file (record definitions) for a WSDL file
For testing only...
--------------------------------------------------------------------
Access functions
--------------------------------------------------------------------
--------------------------------------------------------------------
For Quick deployment
--------------------------------------------------------------------
--------------------------------------------------------------------
Takes http headers
--------------------------------------------------------------------
--------------------------------------------------------------------
With additional specified prefix
--------------------------------------------------------------------
--------------------------------------------------------------------
--------------------------------------------------------------------
rest of record data
--------------------------------------------------------------------
Make a SOAP request (no attachments)
--------------------------------------------------------------------
--------------------------------------------------------------------
Make a SOAP request (with http artifacts)
--------------------------------------------------------------------
--------------------------------------------------------------------
For Quick deployment (with attachments)
--------------------------------------------------------------------
--------------------------------------------------------------------
(with attachments)
--------------------------------------------------------------------
--------------------------------------------------------------------
Make a SOAP request (with attachments)
--------------------------------------------------------------------
find the operation
Add the Soap envelope
Encode the message
in case of HTTP error: return
{error, description}
returns {ok, Header, Body} | {error, Error}
--------------------------------------------------------------------
Parse a WSDL file and return a 'Model'
--------------------------------------------------------------------
PrefixOrOptions can be a property list that contains the options
It is an option list
Add the default prefix at the end - it will only be used
if no other prefix is specified
just the prefix
read (parse) the config file
uncomment to generate the wsdl11soap12.hrl file
parse Wsdl
TODO: add files as required
--------------------------------------------------------------------
Returns {Model, Operations}
--------------------------------------------------------------------
process imports (recursively, so that imports in the imported files are
processed as well).
For the moment, the namespace is ignored on operations etc.
ImportsEnrichedNSRegistry),
empty registry, initializing
--------------------------------------------------------------------
This list is used when a schema inlcudes one of the other schemas.
--------------------------------------------------------------------
compile each of the schemas, and add it to the model.
Returns Model
--------------------------------------------------------------------
--------------------------------------------------------------------
compile each of the schema files, and add it to the model.
Returns Model
--------------------------------------------------------------------
--------------------------------------------------------------------
Get a file from an URL spec.
--------------------------------------------------------------------
added this, since this is what is used in many WSDLs (i.e.: just a filename).
--------------------------------------------------------------------
Make a HTTP Request
--------------------------------------------------------------------
If ibrowse exist in the path then let's use it...
record http_header is not defined??
-record(xsd_file, {atts, name, prefix, import_specs}).
returns [#port{}]
-record(port, {service, port, binding, address}).
non-soap bindings are ignored.
returns [#operation{}]
lookup Binding in Ports, and create a combined result
for each port, make an operation record
copied from yaws/json.erl | Created : 29 Nov 2006 by < >
Author : ( ) .
Desc . : Common SOAP code .
modified ( WdJ , May 2007 ): deal with imports in the WSDL .
modified ( WdJ , August 2007 ): the WSDL can contain more than 1 schema
copied from yaws_soap_lib ( , February 2012 ):
-module(yaws_soap12_lib).
-export([initModel/1, initModel/2,
initModelFile/1,
config_file_xsd/0,
call/3, call/4, call/5, call/6, call/8,
call_attach/4, call_attach/5, call_attach/8,
write_hrl/2, write_hrl/3,
findHeader/2,
parseMessage/2,
makeFault/2,
is_wsdl/1, wsdl_model/1, wsdl_op_service/1,
wsdl_op_port/1, wsdl_op_operation/1,
wsdl_op_binding/1, wsdl_op_address/1,
wsdl_op_action/1, wsdl_operations/1,
get_operation/2
]).
-export([qtest/0]).
-include("../include/yaws_soap.hrl").
-include("../include/soap-envelope.hrl").
-include("../include/wsdl11soap12.hrl").
-define(HTTP_REQ_TIMEOUT, 20000).
-define(dbg(X,Y), true).
-record(yaws_soap_config, {atts, xsd_path, user_module, wsdl_file, add_files}).
-record(xsd_file, {atts, name, prefix, import_specs}).
-record(import_specs, {atts, namespace, prefix, location}).
-record(namespace_spec, {namespace, prefix}).
-record(namespace_registry, {specs = [], counter = 0}).
-define(DefaultPrefix, "p").
-define(CustomPrefix, "cp").
write_hrl(WsdlURL, Output) when is_list(WsdlURL) ->
write_hrl(initModel(WsdlURL), Output);
write_hrl(#wsdl{model = Model}, Output) when is_list(Output) ->
erlsom:write_hrl(Model, Output).
write_hrl(WsdlURL, Output, PrefixOrOptions)
when is_list(WsdlURL),is_list(PrefixOrOptions) ->
write_hrl(initModel(WsdlURL, PrefixOrOptions), Output).
qtest() ->
call("",
"GetWeatherByPlaceName",
["Boston"]).
is_wsdl(Wsdl) when is_record(Wsdl,wsdl) -> true;
is_wsdl(_) -> false.
wsdl_operations(#wsdl{operations = Ops}) -> Ops.
wsdl_model(#wsdl{model = Model}) -> Model.
wsdl_op_service(#operation{service = Service}) -> Service.
wsdl_op_port(#operation{port = Port}) -> Port.
wsdl_op_operation(#operation{operation = Op}) -> Op.
wsdl_op_binding(#operation{binding = Binding}) -> Binding.
wsdl_op_address(#operation{address = Address}) -> Address.
wsdl_op_action(#operation{action = Action}) -> Action.
call(WsdlURL, Operation, ListOfData) when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call(Wsdl, Operation, ListOfData);
call(Wsdl, Operation, ListOfData) when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(?DefaultPrefix, Operation, ListOfData),
call(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg);
Else ->
Else
end.
call(WsdlURL, Operation, ListOfData, http_headers, HttpHeaders)
when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call(Wsdl, Operation, ListOfData, http_headers, HttpHeaders);
call(Wsdl, Operation, ListOfData, http_headers, HttpHeaders)
when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(?DefaultPrefix, Operation, ListOfData),
call(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg, http_headers, HttpHeaders);
Else ->
Else
end;
call(WsdlURL, Operation, ListOfData, prefix, Prefix) when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, Prefix),
call(Wsdl, Operation, ListOfData, prefix, Prefix );
call(Wsdl, Operation, ListOfData, prefix, Prefix) when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(Prefix, Operation, ListOfData),
call(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg);
Else ->
Else
end.
Takes the actual records for the Header and Body message .
call(WsdlURL, Operation, Header, Msg) when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call(Wsdl, Operation, Header, Msg);
call(Wsdl, Operation, Header, Msg) when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
call(Wsdl, Operation, Op#operation.port, Op#operation.service,
Header, Msg);
Else ->
Else
end.
mk_msg(_Prefix, _Operation, ListOfData) ->
get_operation([#operation{operation = X} = Op|_], X) ->
{ok, Op};
get_operation([_|T], Op) ->
get_operation(T, Op);
get_operation([], _Op) ->
{error, "operation not found"}.
call(Wsdl, Operation, Port, Service, Headers, Message) ->
call_attach(Wsdl, Operation, Port, Service, Headers, Message, [], []).
call(Wsdl, Operation, Port, Service, Headers, Message,
http_headers, HttpHeaders) ->
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, [], HttpHeaders);
call(Wsdl, Operation, Port, Service, Headers, Message,
http_details, HttpDetails) ->
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, [], http_details, HttpDetails).
call_attach(WsdlURL, Operation, ListOfData, Attachments)
when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call_attach(Wsdl, Operation, ListOfData, Attachments);
call_attach(Wsdl, Operation, ListOfData, Attachments)
when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
Msg = mk_msg(?DefaultPrefix, Operation, ListOfData),
call_attach(Wsdl, Operation, Op#operation.port,
Op#operation.service, [], Msg, Attachments, []);
Else ->
Else
end.
Takes the actual records for the Header and Body message
call_attach(WsdlURL, Operation, Header, Msg, Attachments)
when is_list(WsdlURL) ->
Wsdl = initModel(WsdlURL, ?DefaultPrefix),
call_attach(Wsdl, Operation, Header, Msg, Attachments);
call_attach(Wsdl, Operation, Header, Msg, Attachments)
when is_record(Wsdl, wsdl) ->
case get_operation(Wsdl#wsdl.operations, Operation) of
{ok, Op} ->
call_attach(Wsdl, Operation, Op#operation.port,
Op#operation.service,
Header, Msg, Attachments, []);
Else ->
Else
end.
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, Attachments, HttpHeaders) ->
call_attach(Wsdl, Operation, Port, Service, Headers,
Message, Attachments, http_details, [{headers, HttpHeaders}]).
call_attach(#wsdl{operations = Operations, model = Model},
Operation, Port, Service, Headers, Message,
Attachments, http_details, HttpDetails) ->
HttpHeaders = findListValue(headers, HttpDetails),
HttpClientOptions = findListValue(client_options, HttpDetails),
case findOperation(Operation, Port, Service, Operations) of
#operation{address = URL, action=Action, operation = Operation} ->
Envelope = mk_envelope(Message, Headers),
case erlsom:write(Envelope, Model) of
{ok, XmlMessage} ->
{ContentType, Request} =
make_request_body(XmlMessage, Attachments, Action),
?dbg("+++ Request = ~p~n", [Request]),
HttpRes = http_request(URL, Action, Request,
HttpClientOptions, HttpHeaders,
ContentType),
?dbg("+++ HttpRes = ~p~n", [HttpRes]),
case HttpRes of
{ok, _Code, _ReturnHeaders, Body} ->
parseMessage(Body, Model);
Error ->
Error
end;
{error, EncodingError} ->
{error, {encoding_error, EncodingError}}
end;
false ->
{error, {unknown_operation, Operation}}
end.
findListValue(Key, KeyVals) ->
case lists:keyfind(Key, 1, KeyVals) of
{Key, List} ->
List;
false ->
[]
end.
parseMessage(Message, #wsdl{model = Model}) ->
parseMessage(Message, Model);
parseMessage(Message, Model) ->
Parsed = erlsom:scan(Message, Model),
case Parsed of
{ok, #'soap:Envelope'{'Body' = #'soap:Body'{choice = Body},
'Header' = undefined}, _} ->
{ok, undefined, Body};
{ok, #'soap:Envelope'{'Body' = #'soap:Body'{choice = Body},
'Header' = #'soap:Header'{choice = Header}}, _} ->
{ok, Header, Body};
{error, ErrorMessage} ->
{error, {decoding, ErrorMessage}}
end.
findOperation(_Operation, _Port, _Service, []) ->
false;
findOperation(Operation, Port, Service,
[Op = #operation{operation = Operation,
port = Port, service = Service} | _]) ->
Op;
findOperation(Operation, Port, Service, [#operation{} | Tail]) ->
findOperation(Operation, Port, Service, Tail).
mk_envelope(M, H) when is_tuple(M) -> mk_envelope([M], H);
mk_envelope(M, H) when is_tuple(H) -> mk_envelope(M, [H]);
mk_envelope(Messages, []) when is_list(Messages) ->
#'soap:Envelope'{'Body' = #'soap:Body'{choice = Messages}};
mk_envelope(Messages, Headers) when is_list(Messages),is_list(Headers) ->
#'soap:Envelope'{'Body' = #'soap:Body'{choice = Messages},
'Header' = #'soap:Header'{choice = Headers}}.
initModel(WsdlFile) ->
initModel(WsdlFile, ?DefaultPrefix).
for Erlsom , or a String . If it is a string , this is used as the
Erlsom ' prefix ' option ( and the other options are left unspecified ) .
initModel(WsdlFile, PrefixOrOptions) ->
Options = case is_string(PrefixOrOptions) of
no ->
PrefixOrOptions ++ [{prefix, ?DefaultPrefix}];
_ ->
[{prefix, PrefixOrOptions}]
end,
PrivDir = priv_dir(),
initModel2(WsdlFile, Options, PrivDir, undefined, undefined).
initModelFile(ConfigFile) ->
{ok, ConfigSchema} = erlsom:compile_xsd(config_file_xsd()),
{ok, Config, _} = erlsom:scan_file(ConfigFile, ConfigSchema),
#yaws_soap_config{xsd_path = XsdPath,
wsdl_file = Wsdl,
add_files = AddFiles} = Config,
#xsd_file{name = WsdlFile, prefix = Prefix, import_specs = Import} = Wsdl,
initModel2(WsdlFile, [{prefix, Prefix}], XsdPath, Import, AddFiles).
priv_dir() ->
yaws:get_priv_dir().
initModel2(WsdlFile, ErlsomOptions, Path, Import, AddFiles) ->
WsdlName = filename:join([Path, "wsdl.xsd"]),
IncludeWsdl = {"/", "wsdl", WsdlName},
{ok, WsdlModel} = erlsom:compile_xsd_file(
filename:join([Path, "wsdl11soap12.xsd"]),
[{prefix, "soap"},
{include_files, [IncludeWsdl]}]),
erlsom : write_hrl(WsdlModel , " /home / kalski / test / wsdl11soap12.hrl " ) ,
add the xsd model ( since xsd is also used in the wsdl )
WsdlModel2 = erlsom:add_xsd_model(WsdlModel),
Options = ErlsomOptions ++ makeOptions(Import),
{Model, Operations} = parseWsdls([WsdlFile], WsdlModel2,
Options, {undefined, []}),
now compile , and add Model
{ok, EnvelopeModel} =
erlsom:compile_xsd_file(
filename:join([Path, "soap-envelope.xsd"]),
[{prefix, "soap"},
{include_files, [{"", undefined,
filename:join([Path, "xml.xsd"])}]}]),
SoapModel = erlsom:add_model(EnvelopeModel, Model),
uncomment to generate the soap-envelope.hrl file
erlsom : write_hrl(EnvelopeModel , " /home / kalski / test / soap - envelope.hrl " ) ,
SoapModel2 = addModels(AddFiles, SoapModel),
#wsdl{operations = Operations, model = SoapModel2}.
Parse a list of WSDLs and import ( recursively )
parseWsdls(WsdlFiles, WsdlModel, Options, Acc) ->
parseWsdls(WsdlFiles, WsdlModel, Options, Acc, #namespace_registry{}).
parseWsdls([], _WsdlModel, _Options, Acc, _NSRegistry) ->
Acc;
parseWsdls([WsdlFile | Tail], WsdlModel, Options,
{AccModel, AccOperations}, NSRegistry) ->
WsdlFileNoSpaces = rmsp(WsdlFile),
{ok, WsdlFileContent} = get_url_file(WsdlFileNoSpaces),
{ok, ParsedWsdl, _} = erlsom:scan(WsdlFileContent, WsdlModel),
WsdlTargetNameSpace = getTargetNamespaceFromWsdl(ParsedWsdl),
{Prefix, PrefixlessOptions} = remove_prefix_option(Options),
TNSEnrichedNSRegistry = extend_namespace_registry(WsdlTargetNameSpace,
Prefix, NSRegistry),
get the xsd elements from this model , and hand it over to erlsom_compile .
Xsds = getXsdsFromWsdl(ParsedWsdl),
Now we need to build a list : [ { Namespace , Xsd , Prefix } , ... ] for
all the Xsds in the WSDL .
This list is used when a schema includes one of the other schemas .
The AXIS java2wsdl tool generates wsdls that depend on this feature .
{ImportsEnrichedNSRegistry, ImportList} = makeImportList(
Xsds,
TNSEnrichedNSRegistry, []),
Model2 = addSchemas(Xsds, AccModel, PrefixlessOptions, ImportList),
Ports = getPorts(ParsedWsdl),
Operations = getOperations(ParsedWsdl, Ports),
Imports = getImports(filename:dirname(WsdlFileNoSpaces), ParsedWsdl),
use Options rather than PrefixlessOptions because imports come in
the wsdl targetNamespace
Model3 = addSchemaFiles(Imports, Model2, Options, []),
Acc2 = {Model3, Operations ++ AccOperations},
this makes it a bit easier to deal with imported 's .
TODO uncomment if imports can be WSDL
= parseWsdls(Imports , WsdlModel , Options , Acc2 ,
parseWsdls(Tail, WsdlModel, PrefixlessOptions, Acc2,
ImportsEnrichedNSRegistry).
remove_prefix_option(Options) ->
case lists:keytake(prefix, 1, Options) of
{value, {prefix, Prefix}, NewOptions} ->
{Prefix, NewOptions};
false ->
{undefined, Options}
end.
extend_namespace_registry(WsdlTargetNameSpace, undefined,
#namespace_registry{specs = []} = NSRegistry) ->
{NewCounter, NewPrefix} = create_unique_prefix(NSRegistry),
NSRegistry#namespace_registry{
specs = [#namespace_spec{namespace = WsdlTargetNameSpace,
prefix = NewPrefix}], counter = NewCounter};
extend_namespace_registry(WsdlTargetNameSpace, Prefix,
#namespace_registry{specs = []} = NSRegistry) ->
NSRegistry#namespace_registry{
specs = [#namespace_spec{namespace = WsdlTargetNameSpace,
prefix = Prefix}]};
extend_namespace_registry(WsdlTargetNameSpace, _Prefix,
#namespace_registry{specs = Specs} = NSRegistry) ->
case lists:keyfind(WsdlTargetNameSpace, #namespace_spec.namespace, Specs) of
#namespace_spec{} ->
NSRegistry;
false ->
{NewCounter, NewPrefix} = create_unique_prefix(NSRegistry),
NSRegistry#namespace_registry{
specs = [#namespace_spec{namespace = WsdlTargetNameSpace,
prefix = NewPrefix}|Specs],
counter = NewCounter}
end.
create_unique_prefix(#namespace_registry{specs = Specs, counter = Counter} =
NSRegistry) ->
NewCounter = Counter+1,
NewPrefix = ?CustomPrefix ++ integer_to_list(NewCounter),
case lists:keyfind(NewPrefix, #namespace_spec.prefix, Specs) of
#namespace_spec{} ->
create_unique_prefix(NSRegistry#namespace_registry{
counter = Counter+1});
false ->
{NewCounter, NewPrefix}
end.
build a list : [ { Namespace , Xsd } , ... ] for all the Xsds in the WSDL .
The AXIS java2wsdl tool generates wsdls that depend on this feature .
makeImportList([], NSRegistry, Acc) ->
{NSRegistry, Acc};
makeImportList([ Xsd | Tail], NSRegistry, Acc) ->
XsdNS = erlsom_lib:getTargetNamespaceFromXsd(Xsd),
NewNSRegistry = extend_namespace_registry(XsdNS, undefined, NSRegistry),
#namespace_spec{prefix = Prefix} =
lists:keyfind(XsdNS, #namespace_spec.namespace,
NewNSRegistry#namespace_registry.specs),
makeImportList(Tail, NewNSRegistry, [{XsdNS, Prefix, Xsd} | Acc]).
getTargetNamespaceFromWsdl(#'wsdl:tDefinitions'{targetNamespace = TNS}) ->
TNS.
( TODO : using the same prefix for all XSDS makes no sense )
addSchemas([], AccModel, _PrefixlessOptions, _ImportList) ->
AccModel;
addSchemas([Xsd| Tail], AccModel, PrefixlessOptions, ImportList) ->
Model2 = case Xsd of
undefined ->
AccModel;
_ ->
{_, Prefix, _} =
lists:keyfind(
erlsom_lib:getTargetNamespaceFromXsd(Xsd),
1, ImportList),
NewOptions = [{prefix, Prefix}|PrefixlessOptions],
{ok, Model} =
erlsom_compile:compile_parsed_xsd(
Xsd,
[{include_files, ImportList} |NewOptions]),
case AccModel of
undefined -> Model;
_ -> erlsom:add_model(AccModel, Model)
end
end,
addSchemas(Tail, Model2, PrefixlessOptions, ImportList).
( TODO : using the same prefix for all XSD files makes no sense )
addSchemaFiles([], AccModel, _Options, _ImportList) ->
AccModel;
addSchemaFiles([Xsd| Tail], AccModel, Options, ImportList) ->
{ok, Model} =
erlsom:compile_xsd_file(get_file_with_path(Xsd),
[{include_files, ImportList} |Options]),
Model2 = case AccModel of
undefined -> Model;
_ -> erlsom:add_model(AccModel, Model)
end,
addSchemaFiles(Tail, Model2, Options, ImportList).
get_url_file("http://"++_ = URL) ->
case httpc:request(URL) of
{ok,{{_HTTP,200,_OK}, _Headers, Body}} ->
{ok, Body};
{ok,{{_HTTP,RC,Emsg}, _Headers, _Body}} ->
error_logger:error_msg("~p: http-request got: ~p~n",
[?MODULE, {RC, Emsg}]),
{error, "failed to retrieve: "++URL};
{error, Reason} ->
error_logger:error_msg("~p: http-request failed: ~p~n",
[?MODULE, Reason]),
{error, "failed to retrieve: "++URL}
end;
get_url_file("file://"++Fname) ->
{ok, Bin} = file:read_file(Fname),
{ok, binary_to_list(Bin)};
get_url_file(Fname) ->
{ok, Bin} = file:read_file(Fname),
{ok, binary_to_list(Bin)}.
http_request(URL, Action, Request, Options, Headers, ContentType) ->
case code:ensure_loaded(ibrowse) of
{module, ibrowse} ->
ibrowse_request(URL, Action, Request, Options,
Headers, ContentType);
_ ->
... otherwise , let 's use the OTP http client .
inets_request(URL, Action, Request, Options,
Headers, ContentType)
end.
inets_request(URL, Action, Request, Options, Headers, ContentType) ->
case Action of
undefined ->
NHeaders = Headers;
_ ->
NHeaders = [{"SOAPAction", Action} | Headers]
end,
NewHeaders = case proplists:get_value("Host", NHeaders) of
undefined ->
[{"Host", "localhost:8800"}|NHeaders];
_ ->
NHeaders
end,
NewOptions = [{cookies, enabled}|Options],
httpc:set_options(NewOptions),
case httpc:request(post,
{URL,NewHeaders,
ContentType,
Request},
[{timeout,?HTTP_REQ_TIMEOUT}],
[{sync, true}, {full_result, true},
{body_format, string}]) of
{ok,{{_HTTP,200,_OK},ResponseHeaders,ResponseBody}} ->
{ok, 200, ResponseHeaders, ResponseBody};
{ok,{{_HTTP,500,_Descr},ResponseHeaders,ResponseBody}} ->
{ok, 500, ResponseHeaders, ResponseBody};
{ok,{{_HTTP,ErrorCode,_Descr},ResponseHeaders,ResponseBody}} ->
{ok, ErrorCode, ResponseHeaders, ResponseBody};
Other ->
Other
end.
ibrowse_request(URL, Action, Request, Options, Headers, ContentType) ->
case start_ibrowse() of
ok ->
NewHeaders = [{"Content-Type", ContentType} |
case Action of
undefined ->
Headers;
_ ->
[{"SOAPAction", Action} | Headers]
end],
IbrowseF = case lists:keyfind(ibrowse_timeout, 1, Options) of
{_, Timeout} ->
fun() ->
ibrowse:send_req(URL, NewHeaders, post,
Request, Options, Timeout)
end;
false ->
fun() ->
ibrowse:send_req(URL, NewHeaders, post,
Request, Options)
end
end,
case IbrowseF() of
{ok, Status, ResponseHeaders, ResponseBody} ->
{ok, list_to_integer(Status), ResponseHeaders,
ResponseBody};
{error, Reason} ->
{error, Reason}
end;
error ->
{error, "could not start ibrowse"}
end.
start_ibrowse() ->
case ibrowse:start() of
{ok, _} -> ok;
{error, {already_started, _}} -> ok;
_ -> error
end.
rmsp(Str) -> string:strip(Str, left).
make_request_body(Content, [], Operation) ->
{"application/soap+xml;charset=UTF-8;action=\"" ++ Operation ++ "\"",
"<?xml version=\"1.0\" encoding=\"utf-8\"?>"++ Content};
make_request_body(Content, AttachedFiles, _Operation) ->
{"application/dime",
yaws_dime:encode("<?xml version=\"1.0\" encoding=\"utf-8\"?>" ++ Content,
AttachedFiles)}.
makeFault(FaultCode, FaultString) ->
try
"<SOAP-ENV:Envelope xmlns:SOAP-ENV=\"/\">"
"<SOAP-ENV:Body>"
"<SOAP-ENV:Fault>"
"<faultcode>SOAP-ENV:" ++ FaultCode ++ "</faultcode>" ++
"<faultstring>" ++ FaultString ++ "</faultstring>" ++
"</SOAP-ENV:Fault>"
"</SOAP-ENV:Body>"
"</SOAP-ENV:Envelope>"
catch
_:_ ->
"<SOAP-ENV:Envelope xmlns:SOAP-ENV=\"/\">"
"<SOAP-ENV:Body>"
"<SOAP-ENV:Fault>"
"<faultcode>SOAP-ENV:Server</faultcode>"
"<faultstring>Server error</faultstring>"
"</SOAP-ENV:Fault>"
"</SOAP-ENV:Body>"
"</SOAP-ENV:Envelope>"
end.
findHeader(Label, Headers) ->
findHeader0(yaws:to_lower(Label), Headers).
findHeader0(_Label, []) ->
undefined;
findHeader0(Label, [{_,_,Hdr,_,Val}|T]) ->
case {Label, yaws:to_lower(Hdr)} of
{X,X} -> Val;
_ -> findHeader0(Label, T)
end;
findHeader0(_Label, undefined) ->
undefined.
makeOptions(undefined) ->
[];
makeOptions(Import) ->
lists:map(fun makeOption/1, Import).
-record(import_specs , , namespace , prefix , location } ) .
makeOption(#import_specs{namespace = Ns, prefix = Pf, location = Lc}) ->
{Ns, Pf, Lc}.
addModels(undefined, Model) ->
Model;
addModels(Import, Model) ->
lists:foldl(fun addModel/2, Model, Import).
addModel(undefined, Acc) ->
Acc;
addModel(#xsd_file{name = XsdFile, prefix = Prefix, import_specs = Import},
Acc) ->
Options = makeOptions(Import),
{ok, Model2} = erlsom:add_xsd_file(XsdFile, [{prefix, Prefix}|Options],Acc),
Model2.
getPorts(ParsedWsdl) ->
Services = getTopLevelElements(ParsedWsdl, 'wsdl:tService'),
getPortsFromServices(Services, []).
getPortsFromServices([], Acc) ->
Acc;
getPortsFromServices([Service|Tail], Acc) ->
getPortsFromServices(Tail, getPortsFromService(Service) ++ Acc).
getPortsFromService(#'wsdl:tService'{name = Name, port = Ports}) ->
getPortsInfo(Ports, Name, []).
getPortsInfo([], _Name, Acc) ->
Acc;
getPortsInfo([#'wsdl:tPort'{name = Name,
binding = Binding,
choice =
[#'soap:tAddress'{location = URL}]} | Tail],
ServiceName, Acc) ->
getPortsInfo(Tail, ServiceName, [#port{service = ServiceName,
port = Name,
binding = Binding,
address = URL}|Acc]);
getPortsInfo([#'wsdl:tPort'{} | Tail], ServiceName, Acc) ->
getPortsInfo(Tail, ServiceName, Acc).
getTopLevelElements(#'wsdl:tDefinitions'{choice1 = TLElements}, Type) ->
getTopLevelElements(TLElements, Type, []).
getTopLevelElements([], _Type, Acc) ->
Acc;
getTopLevelElements([#'wsdl:anyTopLevelOptionalElement'{choice = Tuple}| Tail],
Type, Acc) ->
case element(1, Tuple) of
Type -> getTopLevelElements(Tail, Type, [Tuple|Acc]);
_ -> getTopLevelElements(Tail, Type, Acc)
end.
get_file_with_path(Url) ->
case Url of
"http://" ++ _ ->
undefined;
"file://" ++ FName ->
FName;
_ ->
Url
end.
getImports(WsdlDirname, Definitions) ->
Imports = getTopLevelElements(Definitions, 'wsdl:tImport'),
lists:map(fun(Import) ->
case WsdlDirname of
"http://" ++ _AbsDirname ->
WsdlDirname ++ "/" ++ Import#'wsdl:tImport'.location;
"file://" ++ _AbsDirname ->
WsdlDirname ++ "/" ++ Import#'wsdl:tImport'.location;
Fname ->
filename:join(Fname, Import#'wsdl:tImport'.location)
end
end, Imports).
getOperations(ParsedWsdl, Ports) ->
Bindings = getTopLevelElements(ParsedWsdl, 'wsdl:tBinding'),
getOperationsFromBindings(Bindings, Ports, []).
getOperationsFromBindings([], _Ports, Acc) ->
Acc;
getOperationsFromBindings([Binding|Tail], Ports, Acc) ->
getOperationsFromBindings(Tail, Ports,
getOperationsFromBinding(Binding, Ports) ++ Acc).
getOperationsFromBinding(#'wsdl:tBinding'{name = BindingName,
type = BindingType,
choice = _Choice,
operation = Operations}, Ports) ->
TODO : get soap info from Choice
getOperationsFromOperations(Operations, BindingName, BindingType,
Operations, Ports, []).
getOperationsFromOperation(BindingName, BindingType, Ports, Name,
Action, Operations, Tail, Acc) ->
Ports2 = searchPorts(BindingName, Ports),
CombinedPorts = combinePorts(Ports2, Name, BindingName, Action),
getOperationsFromOperations(
Tail, BindingName, BindingType,
Operations, Ports, CombinedPorts ++ Acc).
getOperationsFromOperations([], _BindingName, _BindingType,
_Operations, _Ports, Acc) ->
Acc;
getOperationsFromOperations([#'wsdl:tBindingOperation'{name = Name,
choice = Choice} | Tail],
BindingName, BindingType, Operations, Ports, Acc) ->
get SOAP action from Choice ,
case Choice of
[#'soap:tOperation'{soapAction = Action}] ->
getOperationsFromOperation(BindingName, BindingType, Ports,
Name, Action, Operations, Tail, Acc);
_ ->
getOperationsFromOperation(BindingName, BindingType, Ports,
Name, undefined, Operations, Tail, Acc)
end.
combinePorts(Ports, Name, BindingName, Action) ->
combinePorts(Ports, Name, BindingName, Action, []).
combinePorts([], _Name, _BindingName, _Action, Acc) ->
Acc;
combinePorts([#port{service = Service,
port = PortName,
address = Address} | Tail],
Name, BindingName, Action, Acc) ->
combinePorts(Tail, Name, BindingName, Action,
[#operation{service = Service,
port = PortName, operation = Name,
binding = BindingName,
address = Address, action = Action} | Acc]).
searchPorts(BindingName, Ports) ->
searchPorts(BindingName, Ports, []).
searchPorts(_BindingName, [], Acc) ->
Acc;
searchPorts(BindingName, [Port | Tail], Acc) ->
PortBinding = erlsom_lib:localName(Port#port.binding),
case PortBinding of
BindingName ->
searchPorts(BindingName, Tail, [Port | Acc]);
_ ->
searchPorts(BindingName, Tail, Acc)
end.
is_string([]) -> yes;
is_string(List) -> is_string(List, non_unicode).
is_string([C|Rest], non_unicode)
when C >= 0, C =< 255 -> is_string(Rest, non_unicode);
is_string([C|Rest], _) when C =< 65000 -> is_string(Rest, unicode);
is_string([], non_unicode) -> yes;
is_string([], unicode) -> unicode;
is_string(_, _) -> no.
getXsdsFromWsdl(Definitions) ->
case getTopLevelElements(Definitions, 'wsdl:tTypes') of
[#'wsdl:tTypes'{choice = Xsds}] -> Xsds;
[] -> []
end.
config_file_xsd() ->
"<xs:schema xmlns:xs=\"\">"
" <xs:element name=\"yaws_soap_config\">"
" <xs:complexType>"
" <xs:sequence>"
" <xs:element name=\"xsd_path\" type=\"xs:string\" minOccurs=\"0\"/>"
" <xs:element name=\"user_module\" type=\"xs:string\"/>"
" <xs:element name=\"wsdl_file\" type=\"xsd_file\"/>"
" <xs:element name=\"add_file\" type=\"xsd_file\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>"
" </xs:sequence>"
" </xs:complexType>"
" </xs:element>"
" <xs:complexType name=\"xsd_file\">"
" <xs:sequence>"
" <xs:element name=\"import_specs\" type=\"import_specs\" minOccurs=\"0\" maxOccurs=\"unbounded\"/>"
" </xs:sequence>"
" <xs:attribute name=\"name\" type=\"string\" use=\"required\"/>"
" <xs:attribute name=\"prefix\" type=\"string\"/>"
" </xs:complexType>"
" <xs:complexType name=\"import_specs\">"
" <xs:attribute name=\"namespace\" type=\"string\" use=\"required\"/>"
" <xs:attribute name=\"prefix\" type=\"string\"/>"
" <xs:attribute name=\"location\" type=\"string\"/>"
" </xs:complexType>"
"</xs:schema>".
|
b576ee9e16d35768da50f570f9ae1fd7c37e8e35f136487e70855667064a57ea | helvm/helma | FileExtra.hs | module HelVM.HelMA.Automata.FALSE.FileExtra where
import HelVM.HelMA.Automata.FileExtra
import HelVM.HelMA.Automaton.API.IOTypes
readFFile :: FilePath -> IO Source
readFFile = readSourceFile . buildAbsoluteFFileName
buildAbsoluteFFileName :: FilePath -> FilePath
buildAbsoluteFFileName = buildAbsoluteLangFileName lang
buildAbsoluteFIlFileName :: FilePath -> FilePath
buildAbsoluteFIlFileName = buildAbsoluteIlFileName lang
buildAbsoluteFOutFileName :: FilePath -> FilePath
buildAbsoluteFOutFileName = buildAbsoluteOutFileName lang
buildAbsoluteFLogFileName :: FilePath -> FilePath
buildAbsoluteFLogFileName = buildAbsoluteLogFileName lang
lang :: FilePath
lang = "f"
| null | https://raw.githubusercontent.com/helvm/helma/bd325a18d0a67c277ca38b608fd395ecb79a1dab/hs/test/HelVM/HelMA/Automata/FALSE/FileExtra.hs | haskell | module HelVM.HelMA.Automata.FALSE.FileExtra where
import HelVM.HelMA.Automata.FileExtra
import HelVM.HelMA.Automaton.API.IOTypes
readFFile :: FilePath -> IO Source
readFFile = readSourceFile . buildAbsoluteFFileName
buildAbsoluteFFileName :: FilePath -> FilePath
buildAbsoluteFFileName = buildAbsoluteLangFileName lang
buildAbsoluteFIlFileName :: FilePath -> FilePath
buildAbsoluteFIlFileName = buildAbsoluteIlFileName lang
buildAbsoluteFOutFileName :: FilePath -> FilePath
buildAbsoluteFOutFileName = buildAbsoluteOutFileName lang
buildAbsoluteFLogFileName :: FilePath -> FilePath
buildAbsoluteFLogFileName = buildAbsoluteLogFileName lang
lang :: FilePath
lang = "f"
| |
559d760407faa02599bfe69e4b919d981618e9231379e6309521005b099e9ae3 | mzp/bs-lwt | lwt_simple_top.ml | Lightweight thread library for OCaml
*
* Module Lwt_simple_top
* Copyright ( C ) 2009
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation , with linking exceptions ;
* either version 2.1 of the License , or ( at your option ) any later
* version . See COPYING file for details .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA
* 02111 - 1307 , USA .
*
* Module Lwt_simple_top
* Copyright (C) 2009 Jérémie Dimino
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, with linking exceptions;
* either version 2.1 of the License, or (at your option) any later
* version. See COPYING file for details.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*)
(* Integration with the toplevel for people who do not use the
enhanced toplevel (the utop project). This module is deprecated. *)
[@@@ocaml.deprecated
" Use utop. See
"]
open Lwt.Infix
let read_input_non_interactive prompt buffer len =
let rec loop i =
if i = len then
Lwt.return (i, false)
else
Lwt_io.read_char_opt Lwt_io.stdin >>= function
| Some c ->
Bytes.set buffer i c;
if c = '\n' then
Lwt.return (i + 1, false)
else
loop (i + 1)
| None ->
Lwt.return (i, true)
in
Lwt_main.run (Lwt_io.write Lwt_io.stdout prompt >>= fun () -> loop 0)
let () =
Toploop.read_interactive_input := read_input_non_interactive
| null | https://raw.githubusercontent.com/mzp/bs-lwt/f37a3c47d038f4efcd65912c41fab95d1e6633ce/lwt/src/simple_top/lwt_simple_top.ml | ocaml | Integration with the toplevel for people who do not use the
enhanced toplevel (the utop project). This module is deprecated. | Lightweight thread library for OCaml
*
* Module Lwt_simple_top
* Copyright ( C ) 2009
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation , with linking exceptions ;
* either version 2.1 of the License , or ( at your option ) any later
* version . See COPYING file for details .
*
* This program is distributed in the hope that it will be useful , but
* WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA
* 02111 - 1307 , USA .
*
* Module Lwt_simple_top
* Copyright (C) 2009 Jérémie Dimino
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License as
* published by the Free Software Foundation, with linking exceptions;
* either version 2.1 of the License, or (at your option) any later
* version. See COPYING file for details.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA
* 02111-1307, USA.
*)
[@@@ocaml.deprecated
" Use utop. See
"]
open Lwt.Infix
let read_input_non_interactive prompt buffer len =
let rec loop i =
if i = len then
Lwt.return (i, false)
else
Lwt_io.read_char_opt Lwt_io.stdin >>= function
| Some c ->
Bytes.set buffer i c;
if c = '\n' then
Lwt.return (i + 1, false)
else
loop (i + 1)
| None ->
Lwt.return (i, true)
in
Lwt_main.run (Lwt_io.write Lwt_io.stdout prompt >>= fun () -> loop 0)
let () =
Toploop.read_interactive_input := read_input_non_interactive
|
c012381b1d13de828c309a8b062f3b2448959cf82e8f65c442501c19fc8489b7 | nasa/Common-Metadata-Repository | granule.clj | (ns cmr.umm-spec.umm-g.granule
"Contains functions for parsing UMM-G JSON into umm-lib granule model
and generating UMM-G JSON from umm-lib granule model."
(:require
[cmr.umm-spec.umm-g.additional-attribute :as aa]
[cmr.umm-spec.umm-g.data-granule :as data-granule]
[cmr.umm-spec.umm-g.measured-parameters :as measured-parameters]
[cmr.umm-spec.umm-g.orbit-calculated-spatial-domain :as ocsd]
[cmr.umm-spec.umm-g.platform :as platform]
[cmr.umm-spec.umm-g.projects :as projects]
[cmr.umm-spec.umm-g.related-url :as related-url]
[cmr.umm-spec.umm-g.spatial :as spatial]
[cmr.umm-spec.umm-g.tiling-system :as tiling-system]
[cmr.umm.umm-collection :as umm-c]
[cmr.umm.umm-granule :as g])
(:import cmr.umm.umm_granule.UmmGranule))
(def ^:private umm-g-metadata-specification
"Defines the current UMM-G MetadataSpecification"
{:URL ""
:Name "UMM-G"
:Version "1.6.4"})
(defn- get-date-by-type
"Returns the date of the given type from the given provider dates"
[provider-dates date-type]
(some #(when (= date-type (:Type %)) (:Date %)) provider-dates))
(defn- umm-g->DataProviderTimestamps
"Returns a UMM DataProviderTimestamps from a parsed XML structure"
[umm-g]
(let [provider-dates (:ProviderDates umm-g)
;; umm-lib Granule model does not have create-time, so ignore it for now.
; create-time (get-date-by-type provider-dates "Create")
insert-time (get-date-by-type provider-dates "Insert")
update-time (get-date-by-type provider-dates "Update")
delete-time (get-date-by-type provider-dates "Delete")]
(g/map->DataProviderTimestamps
{;;:create-time create-time
:insert-time insert-time
:update-time update-time
:delete-time delete-time})))
(defn- umm-g->CollectionRef
"Returns a UMM ref element from a parsed UMM-G JSON"
[umm-g-json]
(let [collection-ref (:CollectionReference umm-g-json)]
(g/map->CollectionRef {:entry-title (:EntryTitle collection-ref)
:short-name (:ShortName collection-ref)
:version-id (:Version collection-ref)})))
(defn umm-g->Temporal
"Returns a UMM Temporal from a parsed UMM-G JSON"
[umm-g-json]
(when-let [temporal (:TemporalExtent umm-g-json)]
(let [range-date-time (when-let [range-date-time (:RangeDateTime temporal)]
(umm-c/map->RangeDateTime
{:beginning-date-time (:BeginningDateTime range-date-time)
:ending-date-time (:EndingDateTime range-date-time)}))
single-date-time (:SingleDateTime temporal)]
(g/map->GranuleTemporal
{:range-date-time range-date-time
:single-date-time single-date-time}))))
(defn umm-g->PGEVersionClass
"Returns a UMM PGEVersionClass from a parsed UMM-G JSON's PGEVersionClass"
[pge-version-class]
(when-let [{:keys [PGEName PGEVersion]} pge-version-class]
(g/map->PGEVersionClass {:pge-name PGEName
:pge-version PGEVersion})))
(defn PGEVersionClass->umm-g-pge-version-class
[pge-version-class]
(when-let [{:keys [pge-name pge-version]} pge-version-class]
{:PGEName pge-name
:PGEVersion pge-version}))
(defn umm-g->Granule
"Returns a UMM Granule from a parsed UMM-G JSON"
[umm-g-json]
(let [coll-ref (umm-g->CollectionRef umm-g-json)]
(g/map->UmmGranule
{:granule-ur (:GranuleUR umm-g-json)
:data-provider-timestamps (umm-g->DataProviderTimestamps umm-g-json)
:collection-ref coll-ref
:data-granule (data-granule/umm-g-data-granule->DataGranule (:DataGranule umm-g-json))
:pge-version-class (umm-g->PGEVersionClass (:PGEVersionClass umm-g-json))
:temporal (umm-g->Temporal umm-g-json)
:orbit-calculated-spatial-domains (ocsd/umm-g-orbit-calculated-spatial-domains->OrbitCalculatedSpatialDomains
(:OrbitCalculatedSpatialDomains umm-g-json))
:platform-refs (platform/umm-g-platforms->PlatformRefs (:Platforms umm-g-json))
:project-refs (projects/umm-g-projects->ProjectRefs (:Projects umm-g-json))
:access-value (get-in umm-g-json [:AccessConstraints :Value])
:cloud-cover (:CloudCover umm-g-json)
:two-d-coordinate-system (tiling-system/umm-g-tiling-identification-system->TwoDCoordinateSystem
(:TilingIdentificationSystem umm-g-json))
:spatial-coverage (spatial/umm-g-spatial-extent->SpatialCoverage umm-g-json)
:related-urls (related-url/umm-g-related-urls->RelatedURLs (:RelatedUrls umm-g-json))
:measured-parameters (measured-parameters/umm-g-measured-parameters->MeasuredParameters
(:MeasuredParameters umm-g-json))
:product-specific-attributes (aa/umm-g-additional-attributes->ProductSpecificAttributeRefs
(:AdditionalAttributes umm-g-json))})))
(defn Granule->umm-g
"Returns UMM-G JSON from a umm-lib Granule"
[granule]
(let [{:keys [granule-ur data-granule pge-version-class access-value temporal orbit-calculated-spatial-domains
platform-refs project-refs cloud-cover related-urls product-specific-attributes
spatial-coverage orbit two-d-coordinate-system measured-parameters
collection-ref data-provider-timestamps]} granule
{:keys [entry-title short-name version-id entry-id]} collection-ref
{:keys [insert-time update-time delete-time]} data-provider-timestamps
insert-time (when insert-time
{:Date (str insert-time)
:Type "Insert"})
update-time (when update-time
{:Date (str update-time)
:Type "Update"})
delete-time (when delete-time
{:Date (str delete-time)
:Type "Delete"})]
{:GranuleUR granule-ur
:ProviderDates (vec (remove nil? [insert-time update-time delete-time]))
:CollectionReference (if (some? entry-title)
{:EntryTitle entry-title}
{:ShortName short-name
:Version version-id})
:TemporalExtent (if-let [single-date-time (:single-date-time temporal)]
{:SingleDateTime (str single-date-time)}
(when-let [range-date-time (:range-date-time temporal)]
{:RangeDateTime
{:BeginningDateTime (str (:beginning-date-time range-date-time))
:EndingDateTime (when-let [ending-date-time (:ending-date-time range-date-time)]
(str ending-date-time))}}))
:SpatialExtent (spatial/SpatialCoverage->umm-g-spatial-extent spatial-coverage)
:OrbitCalculatedSpatialDomains (ocsd/OrbitCalculatedSpatialDomains->umm-g-orbit-calculated-spatial-domains
orbit-calculated-spatial-domains)
:Platforms (platform/PlatformRefs->umm-g-platforms platform-refs)
:CloudCover cloud-cover
:AccessConstraints (when access-value {:Value access-value})
:Projects (projects/ProjectRefs->umm-g-projects project-refs)
:DataGranule (data-granule/DataGranule->umm-g-data-granule data-granule)
:PGEVersionClass (PGEVersionClass->umm-g-pge-version-class pge-version-class)
:TilingIdentificationSystem (tiling-system/TwoDCoordinateSystem->umm-g-tiling-identification-system
two-d-coordinate-system)
:AdditionalAttributes (aa/ProductSpecificAttributeRefs->umm-g-additional-attributes
product-specific-attributes)
:MeasuredParameters (measured-parameters/MeasuredParameters->umm-g-measured-parameters
measured-parameters)
:RelatedUrls (related-url/RelatedURLs->umm-g-related-urls related-urls)
:MetadataSpecification umm-g-metadata-specification}))
| null | https://raw.githubusercontent.com/nasa/Common-Metadata-Repository/aa5fbda43920dd9884c72282b5d69fd49a1d2bae/umm-spec-lib/src/cmr/umm_spec/umm_g/granule.clj | clojure | umm-lib Granule model does not have create-time, so ignore it for now.
create-time (get-date-by-type provider-dates "Create")
:create-time create-time | (ns cmr.umm-spec.umm-g.granule
"Contains functions for parsing UMM-G JSON into umm-lib granule model
and generating UMM-G JSON from umm-lib granule model."
(:require
[cmr.umm-spec.umm-g.additional-attribute :as aa]
[cmr.umm-spec.umm-g.data-granule :as data-granule]
[cmr.umm-spec.umm-g.measured-parameters :as measured-parameters]
[cmr.umm-spec.umm-g.orbit-calculated-spatial-domain :as ocsd]
[cmr.umm-spec.umm-g.platform :as platform]
[cmr.umm-spec.umm-g.projects :as projects]
[cmr.umm-spec.umm-g.related-url :as related-url]
[cmr.umm-spec.umm-g.spatial :as spatial]
[cmr.umm-spec.umm-g.tiling-system :as tiling-system]
[cmr.umm.umm-collection :as umm-c]
[cmr.umm.umm-granule :as g])
(:import cmr.umm.umm_granule.UmmGranule))
(def ^:private umm-g-metadata-specification
"Defines the current UMM-G MetadataSpecification"
{:URL ""
:Name "UMM-G"
:Version "1.6.4"})
(defn- get-date-by-type
"Returns the date of the given type from the given provider dates"
[provider-dates date-type]
(some #(when (= date-type (:Type %)) (:Date %)) provider-dates))
(defn- umm-g->DataProviderTimestamps
"Returns a UMM DataProviderTimestamps from a parsed XML structure"
[umm-g]
(let [provider-dates (:ProviderDates umm-g)
insert-time (get-date-by-type provider-dates "Insert")
update-time (get-date-by-type provider-dates "Update")
delete-time (get-date-by-type provider-dates "Delete")]
(g/map->DataProviderTimestamps
:insert-time insert-time
:update-time update-time
:delete-time delete-time})))
(defn- umm-g->CollectionRef
"Returns a UMM ref element from a parsed UMM-G JSON"
[umm-g-json]
(let [collection-ref (:CollectionReference umm-g-json)]
(g/map->CollectionRef {:entry-title (:EntryTitle collection-ref)
:short-name (:ShortName collection-ref)
:version-id (:Version collection-ref)})))
(defn umm-g->Temporal
"Returns a UMM Temporal from a parsed UMM-G JSON"
[umm-g-json]
(when-let [temporal (:TemporalExtent umm-g-json)]
(let [range-date-time (when-let [range-date-time (:RangeDateTime temporal)]
(umm-c/map->RangeDateTime
{:beginning-date-time (:BeginningDateTime range-date-time)
:ending-date-time (:EndingDateTime range-date-time)}))
single-date-time (:SingleDateTime temporal)]
(g/map->GranuleTemporal
{:range-date-time range-date-time
:single-date-time single-date-time}))))
(defn umm-g->PGEVersionClass
"Returns a UMM PGEVersionClass from a parsed UMM-G JSON's PGEVersionClass"
[pge-version-class]
(when-let [{:keys [PGEName PGEVersion]} pge-version-class]
(g/map->PGEVersionClass {:pge-name PGEName
:pge-version PGEVersion})))
(defn PGEVersionClass->umm-g-pge-version-class
[pge-version-class]
(when-let [{:keys [pge-name pge-version]} pge-version-class]
{:PGEName pge-name
:PGEVersion pge-version}))
(defn umm-g->Granule
"Returns a UMM Granule from a parsed UMM-G JSON"
[umm-g-json]
(let [coll-ref (umm-g->CollectionRef umm-g-json)]
(g/map->UmmGranule
{:granule-ur (:GranuleUR umm-g-json)
:data-provider-timestamps (umm-g->DataProviderTimestamps umm-g-json)
:collection-ref coll-ref
:data-granule (data-granule/umm-g-data-granule->DataGranule (:DataGranule umm-g-json))
:pge-version-class (umm-g->PGEVersionClass (:PGEVersionClass umm-g-json))
:temporal (umm-g->Temporal umm-g-json)
:orbit-calculated-spatial-domains (ocsd/umm-g-orbit-calculated-spatial-domains->OrbitCalculatedSpatialDomains
(:OrbitCalculatedSpatialDomains umm-g-json))
:platform-refs (platform/umm-g-platforms->PlatformRefs (:Platforms umm-g-json))
:project-refs (projects/umm-g-projects->ProjectRefs (:Projects umm-g-json))
:access-value (get-in umm-g-json [:AccessConstraints :Value])
:cloud-cover (:CloudCover umm-g-json)
:two-d-coordinate-system (tiling-system/umm-g-tiling-identification-system->TwoDCoordinateSystem
(:TilingIdentificationSystem umm-g-json))
:spatial-coverage (spatial/umm-g-spatial-extent->SpatialCoverage umm-g-json)
:related-urls (related-url/umm-g-related-urls->RelatedURLs (:RelatedUrls umm-g-json))
:measured-parameters (measured-parameters/umm-g-measured-parameters->MeasuredParameters
(:MeasuredParameters umm-g-json))
:product-specific-attributes (aa/umm-g-additional-attributes->ProductSpecificAttributeRefs
(:AdditionalAttributes umm-g-json))})))
(defn Granule->umm-g
"Returns UMM-G JSON from a umm-lib Granule"
[granule]
(let [{:keys [granule-ur data-granule pge-version-class access-value temporal orbit-calculated-spatial-domains
platform-refs project-refs cloud-cover related-urls product-specific-attributes
spatial-coverage orbit two-d-coordinate-system measured-parameters
collection-ref data-provider-timestamps]} granule
{:keys [entry-title short-name version-id entry-id]} collection-ref
{:keys [insert-time update-time delete-time]} data-provider-timestamps
insert-time (when insert-time
{:Date (str insert-time)
:Type "Insert"})
update-time (when update-time
{:Date (str update-time)
:Type "Update"})
delete-time (when delete-time
{:Date (str delete-time)
:Type "Delete"})]
{:GranuleUR granule-ur
:ProviderDates (vec (remove nil? [insert-time update-time delete-time]))
:CollectionReference (if (some? entry-title)
{:EntryTitle entry-title}
{:ShortName short-name
:Version version-id})
:TemporalExtent (if-let [single-date-time (:single-date-time temporal)]
{:SingleDateTime (str single-date-time)}
(when-let [range-date-time (:range-date-time temporal)]
{:RangeDateTime
{:BeginningDateTime (str (:beginning-date-time range-date-time))
:EndingDateTime (when-let [ending-date-time (:ending-date-time range-date-time)]
(str ending-date-time))}}))
:SpatialExtent (spatial/SpatialCoverage->umm-g-spatial-extent spatial-coverage)
:OrbitCalculatedSpatialDomains (ocsd/OrbitCalculatedSpatialDomains->umm-g-orbit-calculated-spatial-domains
orbit-calculated-spatial-domains)
:Platforms (platform/PlatformRefs->umm-g-platforms platform-refs)
:CloudCover cloud-cover
:AccessConstraints (when access-value {:Value access-value})
:Projects (projects/ProjectRefs->umm-g-projects project-refs)
:DataGranule (data-granule/DataGranule->umm-g-data-granule data-granule)
:PGEVersionClass (PGEVersionClass->umm-g-pge-version-class pge-version-class)
:TilingIdentificationSystem (tiling-system/TwoDCoordinateSystem->umm-g-tiling-identification-system
two-d-coordinate-system)
:AdditionalAttributes (aa/ProductSpecificAttributeRefs->umm-g-additional-attributes
product-specific-attributes)
:MeasuredParameters (measured-parameters/MeasuredParameters->umm-g-measured-parameters
measured-parameters)
:RelatedUrls (related-url/RelatedURLs->umm-g-related-urls related-urls)
:MetadataSpecification umm-g-metadata-specification}))
|
dc19d789329de2f3c05b35accbde8468999603f8c9a8b2c0bd0e7176a744e317 | Quid2/flat | AsSize.hs | # LANGUAGE InstanceSigs #
# LANGUAGE NoMonomorphismRestriction #
{-# LANGUAGE ScopedTypeVariables #-}
|
Wrapper type to decode a value to its size in bits .
See also " Flat . AsBin " .
In 0.5.X this type was called @SizeOf@.
@since 0.6
Wrapper type to decode a value to its size in bits.
See also "Flat.AsBin".
In 0.5.X this type was called @SizeOf@.
@since 0.6
-}
module Flat.AsSize(AsSize(..)) where
import Flat.Class (Flat (..))
import Flat.Decoder.Prim (sizeOf)
import Flat.Decoder.Types (Get)
import Flat.Types (NumBits)
-- $setup
-- >>> :set -XScopedTypeVariables
-- >>> import Flat.Instances.Base
-- >>> import Flat.Instances.Text
-- >>> import Flat.Decoder.Types
-- >>> import Flat.Types
-- >>> import Flat.Run
-- >>> import Data.Word
-- >>> import qualified Data.Text as T
|
Useful to skip unnecessary values and to check encoding sizes .
Examples :
Ignore the second and fourth component of a tuple :
> > > let v = flat ( ' a',"abc",'z',True ) in unflat v : : Decoded ( , , , )
Right ( ' a',AsSize 28,'z',AsSize 1 )
Notice the variable size encoding of Words :
> > > unflat ( flat ( 1::Word16,1::Word64 ) ) : : Decoded ( AsSize Word16,AsSize Word64 )
Right ( AsSize 8,AsSize 8)
Text :
> > > unflat ( flat ( T.pack " " , T.pack " a",T.pack " 主",UTF8Text $ T.pack " 主",UTF16Text $ T.pack " 主",UTF16Text $ T.pack " a " ) ) : : Decoded ( , , , , , UTF16Text )
Right ( AsSize 16,AsSize 32,AsSize 48,AsSize 48,AsSize 40,AsSize 40 )
Various encodings :
> > > unflat ( flat ( False,[T.pack " " , T.pack " a",T.pack " 主"],'a ' ) ) : : Decoded ( , [ T.Text],AsSize )
Right ( AsSize 1,AsSize 96,AsSize 8)
Useful to skip unnecessary values and to check encoding sizes.
Examples:
Ignore the second and fourth component of a tuple:
>>> let v = flat ('a',"abc",'z',True) in unflat v :: Decoded (Char,AsSize String,Char,AsSize Bool)
Right ('a',AsSize 28,'z',AsSize 1)
Notice the variable size encoding of Words:
>>> unflat (flat (1::Word16,1::Word64)) :: Decoded (AsSize Word16,AsSize Word64)
Right (AsSize 8,AsSize 8)
Text:
>>> unflat (flat (T.pack "",T.pack "a",T.pack "主",UTF8Text $ T.pack "主",UTF16Text $ T.pack "主",UTF16Text $ T.pack "a")) :: Decoded (AsSize T.Text,AsSize T.Text,AsSize T.Text,AsSize UTF8Text,AsSize UTF16Text,AsSize UTF16Text)
Right (AsSize 16,AsSize 32,AsSize 48,AsSize 48,AsSize 40,AsSize 40)
Various encodings:
>>> unflat (flat (False,[T.pack "",T.pack "a",T.pack "主"],'a')) :: Decoded (AsSize Bool,AsSize [T.Text],AsSize Char)
Right (AsSize 1,AsSize 96,AsSize 8)
-}
newtype AsSize a = AsSize NumBits deriving (Eq,Ord,Show)
instance Flat a => Flat (AsSize a) where
size :: Flat a => AsSize a -> NumBits -> NumBits
size = error "unused"
encode = error "unused"
decode :: Flat a => Get (AsSize a)
decode = AsSize <$> sizeOf (decode :: Get a)
| null | https://raw.githubusercontent.com/Quid2/flat/6a07e4004bb6d415710a98cebdac4aadc1bf59d5/src/Flat/AsSize.hs | haskell | # LANGUAGE ScopedTypeVariables #
$setup
>>> :set -XScopedTypeVariables
>>> import Flat.Instances.Base
>>> import Flat.Instances.Text
>>> import Flat.Decoder.Types
>>> import Flat.Types
>>> import Flat.Run
>>> import Data.Word
>>> import qualified Data.Text as T | # LANGUAGE InstanceSigs #
# LANGUAGE NoMonomorphismRestriction #
|
Wrapper type to decode a value to its size in bits .
See also " Flat . AsBin " .
In 0.5.X this type was called @SizeOf@.
@since 0.6
Wrapper type to decode a value to its size in bits.
See also "Flat.AsBin".
In 0.5.X this type was called @SizeOf@.
@since 0.6
-}
module Flat.AsSize(AsSize(..)) where
import Flat.Class (Flat (..))
import Flat.Decoder.Prim (sizeOf)
import Flat.Decoder.Types (Get)
import Flat.Types (NumBits)
|
Useful to skip unnecessary values and to check encoding sizes .
Examples :
Ignore the second and fourth component of a tuple :
> > > let v = flat ( ' a',"abc",'z',True ) in unflat v : : Decoded ( , , , )
Right ( ' a',AsSize 28,'z',AsSize 1 )
Notice the variable size encoding of Words :
> > > unflat ( flat ( 1::Word16,1::Word64 ) ) : : Decoded ( AsSize Word16,AsSize Word64 )
Right ( AsSize 8,AsSize 8)
Text :
> > > unflat ( flat ( T.pack " " , T.pack " a",T.pack " 主",UTF8Text $ T.pack " 主",UTF16Text $ T.pack " 主",UTF16Text $ T.pack " a " ) ) : : Decoded ( , , , , , UTF16Text )
Right ( AsSize 16,AsSize 32,AsSize 48,AsSize 48,AsSize 40,AsSize 40 )
Various encodings :
> > > unflat ( flat ( False,[T.pack " " , T.pack " a",T.pack " 主"],'a ' ) ) : : Decoded ( , [ T.Text],AsSize )
Right ( AsSize 1,AsSize 96,AsSize 8)
Useful to skip unnecessary values and to check encoding sizes.
Examples:
Ignore the second and fourth component of a tuple:
>>> let v = flat ('a',"abc",'z',True) in unflat v :: Decoded (Char,AsSize String,Char,AsSize Bool)
Right ('a',AsSize 28,'z',AsSize 1)
Notice the variable size encoding of Words:
>>> unflat (flat (1::Word16,1::Word64)) :: Decoded (AsSize Word16,AsSize Word64)
Right (AsSize 8,AsSize 8)
Text:
>>> unflat (flat (T.pack "",T.pack "a",T.pack "主",UTF8Text $ T.pack "主",UTF16Text $ T.pack "主",UTF16Text $ T.pack "a")) :: Decoded (AsSize T.Text,AsSize T.Text,AsSize T.Text,AsSize UTF8Text,AsSize UTF16Text,AsSize UTF16Text)
Right (AsSize 16,AsSize 32,AsSize 48,AsSize 48,AsSize 40,AsSize 40)
Various encodings:
>>> unflat (flat (False,[T.pack "",T.pack "a",T.pack "主"],'a')) :: Decoded (AsSize Bool,AsSize [T.Text],AsSize Char)
Right (AsSize 1,AsSize 96,AsSize 8)
-}
newtype AsSize a = AsSize NumBits deriving (Eq,Ord,Show)
instance Flat a => Flat (AsSize a) where
size :: Flat a => AsSize a -> NumBits -> NumBits
size = error "unused"
encode = error "unused"
decode :: Flat a => Get (AsSize a)
decode = AsSize <$> sizeOf (decode :: Get a)
|
f407c9725a9fe0b49acdbeeaf3dab4e86519c1786ac80c54a1f5f2f339a0eff0 | huiqing/percept2 | percept2_sampling.erl | Copyright ( c ) 2012 ,
%% All rights reserved.
%%
%% Redistribution and use in source and binary forms, with or without
%% modification, are permitted provided that the following conditions are met:
%% %% Redistributions of source code must retain the above copyright
%% notice, this list of conditions and the following disclaimer.
%% %% Redistributions in binary form must reproduce the above copyright
%% notice, this list of conditions and the following disclaimer in the
%% documentation and/or other materials provided with the distribution.
%% %% Neither the name of the copyright holders nor the
%% names of its contributors may be used to endorse or promote products
%% derived from this software without specific prior written permission.
%%
%% THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS''
%% AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
%% ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS
BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
%% CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
%% SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
%% BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
%% WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
%% OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
%% ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@author < >
%%
%%@doc
%% This module provides a collection of functions for reporting information
%% regarding memory usage, garbage collection, scheduler utilization, and
%% message/run queue length, etc. This is done by sampling-based profiling, i.e.
the profiler probes the running Erlang system at regular intervals . Sampling
%% profiling is typically less numerically accurate and specific, but has less
%% impact on the system. Data collected by the profiler are stored in files,
and the Gnuplot tool can be used for graph visualisation of the data .
%%
The following Erlang functions are used for the purpose of data collection
%% <a href="#statistics-1">erlang:statistics/1</a>,
< a href=" / doc / man / erlang.html#memory-1">erlang : > ,
%% <a href="#system_info-1">erlang:system_info/1</a>
%% and <a href="#process_info-2">erlang:process_info/1</a>.
-module(percept2_sampling).
-export([start/3, start/4, start/5, stop/0]).
-export([init/5]).
%%@hidden
-type sample_item()::
'run_queue'|'run_queues'|'scheduler_utilisation'|
'process_count'| 'schedulers_online'|'mem_info'|
{'message_queue_len', pid()|regname()}|'all'.
the ' all ' options covers all the the options apart from ' message_queue_len ' .
-type entry_mfa() :: {atom(), atom(),list()}.
-type regname() :: atom().
-type milliseconds()::non_neg_integer().
-type seconds()::non_neg_integer().
-record(run_queue_info,
{timestamp::float(),
run_queue=0::non_neg_integer()
}).
-record(run_queues_info,
{timestamp::float(),
run_queues::non_neg_integer()
}).
-record(scheduler_utilisation_info,
{timestamp::float(),
scheduler_utilisation::[{integer(), number(), number()}]
}).
-record(process_count_info, {
timestamp::float(),
process_count::non_neg_integer()}).
-record(schedulers_online_info, {
timestamp::float(),
schedulers_online::non_neg_integer()}).
-record(mem_info, {
timestamp ::float(),
total ::float(),
processes ::float(),
ets ::float(),
atom ::float(),
code ::float(),
binary ::float()
}).
-record(message_queue_len_info, {
timestamp ::float(),
message_queue_len ::non_neg_integer()}).
-compile(export_all).
-define(INTERVAL, 10). % in milliseconds
-define(seconds(EndTs,StartTs),
timer:now_diff(EndTs, StartTs)/1000000).
-define(debug , 9 ) .
%%-define(debug, 0).
-ifdef(debug).
dbg(Level, F, A) when Level >= ?debug ->
io:format(F, A),
ok;
dbg(_, _, _) ->
ok.
-define(dbg(Level, F, A), dbg((Level), (F), (A))).
-else.
-define(dbg(Level, F, A), ok).
-endif.
%%@hidden
-spec(sample_items()->[atom()]).
sample_items()->
['run_queue',
'run_queues',
'scheduler_utilisation',
'process_count',
'schedulers_online',
'mem_info'
].
%%@hidden
-spec(check_sample_items([sample_item()]) -> [sample_item()]).
check_sample_items(Items) ->
check_sample_items_1(Items, []).
check_sample_items_1([{'message_queue_len', Proc}|Items], Acc)->
check_sample_items_1(Items, [{'message_queue_len', Proc}|Acc]);
check_sample_items_1(['all'|Items], Acc) ->
check_sample_items_1(Items, sample_items()++Acc);
check_sample_items_1([Item|Items], Acc) ->
case lists:member(Item, sample_items()) of
true ->
check_sample_items_1(Items, [Item|Acc]);
false ->
error(lists:flatten(io_lib:format("Invalid option:~p", [Item])))
end;
check_sample_items_1([], Acc) ->
lists:usort(Acc).
check_out_dir(Dir) ->
case filelib:is_dir(Dir) of
false -> error(lists:flatten(
io_lib:format(
"Invalid directory:~p", [Dir])));
true -> ok
end.
%%@doc Start the profiler and collects information about the system.
%%
%% The type of information collected is specified by `Items':
%%<ul>
` run_queue ' : returns the sum length of all run queues , that is , the total number of processes that are ready to run .
%%</ul>
%%<ul>
%% `run_queues': returns the length of each run queue, that is, the number of processes that are ready to run in each run queue.
%%</ul>
%%<ul>
%% `scheduler_utilisation': returns the scheduler-utilisation rate per scheduler.
%%</ul>
%%<ul>
%% `schedulers_online': returns the amount of schedulers online.
%%</ul>
%%<ul>
%% `process_count': returns the number of processes currently existing at the local node as an integer.
%%</ul>
%%<ul>
` mem_info ' : returns information about memory dynamically allocated by the Erlang emulator . Information
%% about the following memory types is collected:
processes , ets , atom , code and binary . See < a href=" / doc / man / erlang.html#memory-1">erlang : > .
%%</ul>
%%<ul>
%% `message_queue_len': returns the number of messages currently in the message queue of the process.
%%</ul>
%%<ul>
` all ' : this option covers all the above options apart from ` message_queue_len ' .
%%</ul>
%%If an entry function is specified, this function profiles the system
%% for the whole duration until the entry function returns; otherwise it profiles
%% the system for the time period specified. The system is probed at the default
time interval , which is 10 milliseconds . It is also possible to stop the sampling
%% manually using <a href="percept2_sampling.html#stop-0">stop/0</a>,
%%
%% `OutDir' tells the tool where to put the data files generated. A data file is generated
%% for each type of information in `Items'. For an item `A', the name of the data file would be
%% `sample_A.dat'.
%%
Sampling data is formatted in a way so that the graph plotting tool ` Gnuplot '
%% can be used for visualisation. A pre-defined plotting script is available for
%% each type of information collected, and these scripts are in the `percept2/gplt' directory.
If you are familiar with Gnuplot , you could generate the diagrams in Gnuplot command - line .
Alternately , you could visualise the sampling data through Percept2 , which uses Gnuplot to
%% generate the graphs behind the scene. (It is likely that we will get rid of the dependence to
Gnuplot in the future ) .
%%
%% To visualise the sampling data, one could select the `Visualise sampling data' from the Percept2 main menu,
%% and this should lead to a page as shown in the screenshot next.
%%
< img src="percept2_sample.png " alt="Visualise sampling data " width="850 " height="500 " > < /img >
%%
%% In this page, select the type of data you would like to see, enter the data file name, and the
path leading to this file , then click on the ` Generate Graph ' button . This should leads to a page showing
%% the graph. The screenshot next shows an example output.
%%
< "
%% alt="the front page of Percept2" width="850" height="500"> </img>
%%
-spec(start(Items :: [sample_item()],
EntryOrTime :: entry_mfa() | milliseconds(),
OutDir :: file:filename()) ->
ok). %%[sample_items()],
start(Items, Time, OutDir) when is_integer(Time) ->
start(Items, Time, ?INTERVAL,
fun(_) -> true end, OutDir);
start(Items, Entry={_Mod, _Fun, _Args}, OutDir) ->
start(Items, Entry, ?INTERVAL, fun(_) -> true end, OutDir).
%%@doc Start the profiler and collects information about the system.
%%
Different from < a href="percept2_sampling.html#start-3">start/3</a > ,
%% this function allows the user to specify the time interval.
-spec(start(Items :: [any()], EntryOrTime :: entry_mfa() | seconds(),
TimeInterval :: milliseconds(), OutDir :: file:filename()) ->
ok). %%[sample_items()],
start(Items, Time, TimeInterval, OutDir) when is_integer(Time) ->
start(Items, Time, TimeInterval, fun(_) -> true end, OutDir);
start(Items, Entry={_Mod, _Fun, _Args}, TimeInterval, OutDir) ->
start(Items, Entry, TimeInterval, fun(_) -> true end, OutDir).
%%@doc Start the profiler and collects information about the system.
%%
%% Apart from allowing the user to specify the time interval, this
%% function also allows the user to supply a filter function, so that
%% only those data that satisfy certain condition are logged.
See < a href="percept2_sampling.html#start-3">start/3</a > .
-spec(start(Items :: [any()], EntryOrTime :: entry_mfa() | seconds(),
TimeInterval :: milliseconds(), fun((_) -> boolean()),
OutDir :: file:filename()) ->
ok). %%[sample_items()],
start(Items, _Entry={Mod, Fun, Args}, TimeInterval, FilterFun, OutDir) ->
ok=check_out_dir(OutDir),
Items1=check_sample_items(Items),
Pid = start_sampling(Items1, TimeInterval, FilterFun, OutDir),
erlang:apply(Mod, Fun, Args),
stop(Pid);
start(Items, Time, TimeInterval, FilterFun, OutDir)
when is_integer(Time)->
ok=check_out_dir(OutDir),
Items1=check_sample_items(Items),
try
Pid=start_sampling(Items1, TimeInterval, FilterFun, OutDir),
erlang:start_timer(Time*1000, Pid, stop),
Pid
catch
throw:Term -> Term;
exit:Reason -> {'EXIT',Reason};
error:Reason -> {'EXIT',{Reason,erlang:get_stacktrace()}}
end.
%%%----------------------------%%%
Internal functions % % %
%%%----------------------------%%%
start_sampling(Items, TimeInterval, FilterFun, OutDir) ->
case lists:member('scheduler_utilisation', Items) of
true ->
erlang:system_flag(scheduler_wall_time, true);
_ -> ok
end,
spawn_link(?MODULE, init, [erlang:timestamp(), Items, TimeInterval, FilterFun, OutDir]).
%%@doc Stop the sampling.
-spec (stop() ->{error, not_started}|ok).
stop() ->
case whereis(percept2_sampling) of
undefined ->
{error, not_started};
Pid ->
Pid ! stop,
ok
end.
stop(Pid) ->
Pid!stop,
ok.
%%@private
init(StartTs, Items, Interval, FilterFun, OutDir) ->
register(percept2_sampling, self()),
create_ets_tables(Items),
sampling_loop(StartTs, Interval, Items, FilterFun, OutDir).
sampling_loop(StartTs, Interval, Items, FilterFun, OutDir) ->
receive
stop ->
write_data(Items, OutDir);
{timeout, _TimerRef, stop} ->
write_data(Items, OutDir),
io:format("Done.\n")
after Interval->
do_sampling(Items,StartTs),
sampling_loop(StartTs, Interval, Items, FilterFun, OutDir)
end.
do_sampling([{Item, Args}|Items],StartTs) ->
do_sample({Item, Args},StartTs),
do_sampling(Items,StartTs);
do_sampling([Item|Items],StartTs) ->
do_sample(Item, StartTs),
do_sampling(Items,StartTs);
do_sampling([],_) -> ok.
mk_ets_tab_name(Item)->
list_to_atom(atom_to_list(Item)++"_tab").
mk_file_name(Item) ->
"sample_"++atom_to_list(Item)++".dat".
create_ets_tables([{Item, _}|Items]) ->
TabName = mk_ets_tab_name(Item),
ets:new(TabName, [named_table, ordered_set, protected, {keypos, 2}]),
create_ets_tables(Items);
create_ets_tables([Item|Items]) ->
TabName = mk_ets_tab_name(Item),
ets:new(TabName, [named_table, ordered_set, protected, {keypos, 2}]),
create_ets_tables(Items);
create_ets_tables([]) ->
ok.
do_sample(mem_info, StartTs) ->
[{total, Total}, {processes, Processes}, {ets, ETS},
{atom, Atom}, {code, Code}, {binary, Binary}] =
erlang:memory([total, processes, ets, atom, code, binary]),
Info=#mem_info{timestamp=?seconds(erlang:timestamp(), StartTs),
total=to_megabytes(Total),
processes=to_megabytes(Processes),
ets=to_megabytes(ETS),
atom=to_megabytes(Atom),
code=to_megabytes(Code),
binary=to_megabytes(Binary)},
?dbg(0, "MemInfo:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(mem_info), Info);
do_sample(run_queue, StartTs) ->
RunQueue= erlang:statistics(run_queue),
Info=#run_queue_info{timestamp=?seconds(erlang:timestamp(), StartTs),
run_queue = RunQueue},
?dbg(0, "RunQueue:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(run_queue), Info);
do_sample(run_queues,StartTs) ->
RunQueues= erlang:statistics(run_queues),
Info=#run_queues_info{timestamp=?seconds(erlang:timestamp(), StartTs),
run_queues = RunQueues},
?dbg(0, "RunQueues:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(run_queues), Info);
do_sample(scheduler_utilisation,StartTs) ->
SchedulerWallTime=erlang:statistics(scheduler_wall_time),
Info=#scheduler_utilisation_info{
timestamp=?seconds(erlang:timestamp(), StartTs),
scheduler_utilisation = lists:usort(SchedulerWallTime)},
?dbg(0, "Scheduler walltime:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(scheduler_utilisation), Info);
do_sample(schedulers_online,StartTs)->
SchedulersOnline = erlang:system_info(schedulers_online),
Info=#schedulers_online_info{timestamp=?seconds(erlang:timestamp(), StartTs),
schedulers_online = SchedulersOnline},
?dbg(0, "Schedulers online:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(schedulers_online), Info);
do_sample(process_count, StartTs) ->
ProcessCount = erlang:system_info(process_count),
Info=#process_count_info{timestamp=?seconds(erlang:timestamp(), StartTs),
process_count = ProcessCount},
?dbg(0, "Process count:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(process_count), Info);
do_sample({message_queue_len, RegName}, StartTs) when is_atom(RegName) ->
case whereis(RegName) of
undefined ->ok;
Pid ->
do_sample({message_queue_len,Pid},StartTs)
end;
do_sample({message_queue_len,Pid},StartTs) ->
[{message_queue_len, MsgQueueLen}] = erlang:process_info(Pid, [message_queue_len]),
Info = #message_queue_len_info{timestamp=?seconds(erlang:timestamp(), StartTs),
message_queue_len = MsgQueueLen
},
?dbg(0, "Message queue length:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(message_queue_len), Info).
do_write_sample_info(Item, OutDir) ->
OutFile = filename:join(OutDir, mk_file_name(Item)),
{ok, FD} = file:open(OutFile, [write]),
Tab = mk_ets_tab_name(Item),
String=read_data_from_tab(Item),
ok=file:write(FD, String),
true = ets:delete(Tab),
ok = file:close(FD).
read_data_from_tab(mem_info) ->
Tab = mk_ets_tab_name(mem_info),
lists:flatten(["#mem_info\n"|ets:foldr(fun(_Data={_, Secs, Total, Procs, ETS, Atom, Code, Binary}, Acc) ->
[io_lib:format("~p ~p ~p ~p ~p ~p ~p \n",
[Secs, Total, Procs, ETS, Atom, Code, Binary])|Acc]
end,[],Tab)]);
read_data_from_tab(run_queue) ->
Tab = mk_ets_tab_name(run_queue),
lists:flatten(["#run_queue\n"|ets:foldr(fun(_Data={_, Secs, RunQueue}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs,RunQueue])|Acc]
end, [], Tab)]);
read_data_from_tab(run_queues) ->
Tab = mk_ets_tab_name(run_queues),
lists:flatten(["#run_queues\n"|ets:foldr(fun(_Data={_, Secs, RunQueues}, Acc) ->
{_, RunQueues1} = lists:foldl(
fun(Len, {Sum, RQAcc}) ->
{Len+Sum,[Len+Sum|RQAcc]}
end, {0, []}, tuple_to_list(RunQueues)),
Str=lists:flatten([" "++integer_to_list(Len)++" "
||Len<-RunQueues1]),
[io_lib:format("~p ~s \n",
[Secs,Str])|Acc]
end,[], Tab)]);
read_data_from_tab(scheduler_utilisation) ->
Tab = mk_ets_tab_name(scheduler_utilisation),
{_, Acc1}=ets:foldr(
fun(_Data={_, Secs, SchedulerWallTime1}, {SchedulerWallTime0, Acc}) ->
case SchedulerWallTime0 of
none ->
{SchedulerWallTime1, Acc};
_ ->
SchedUtilisation=[(A1 - A0)/(T1 - T0)||
{{I, A0, T0}, {I, A1, T1}}<-lists:zip(SchedulerWallTime0,
SchedulerWallTime1)],
{_, SchedUtilisation1} = lists:foldl(
fun(Util, {Sum, UtilAcc}) ->
{Util+Sum,[Util+Sum|UtilAcc]}
end, {0, []}, SchedUtilisation),
Str=[io_lib:format(" ~p", [Val])
||Val<-SchedUtilisation1],
{SchedulerWallTime1,[io_lib:format("~p ",[Secs]), Str++" \n"|Acc]}
end
end,{none, ["#scheduler_utilisation\n"]}, Tab),
lists:flatten(["#scheduler_utilisation\n"|Acc1]);
read_data_from_tab(process_count) ->
Tab = mk_ets_tab_name(process_count),
lists:flatten(["#process_count\n"|ets:foldr(fun(_Data={_, Secs, ProcsCount}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs,ProcsCount])|Acc]
end,[], Tab)]);
read_data_from_tab(schedulers_online) ->
Tab = mk_ets_tab_name(schedulers_online),
lists:flatten(["#schedulers_online\n"|ets:foldr(fun(_Data={_, Secs, ProcsCount}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs,ProcsCount])|Acc]
end,[], Tab)]);
read_data_from_tab(message_queue_len) ->
Tab = mk_ets_tab_name(message_queue_len),
lists:flatten(["#message_queue_len\n"|ets:foldr(fun(_Data={_, Secs, MsgQueueLen}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs, MsgQueueLen])|Acc]
end,[], Tab)]).
write_data([{Item, _Args}|Items], OutDir) ->
do_write_sample_info(Item, OutDir),
write_data(Items,OutDir);
write_data([Item|Items], OutDir) ->
do_write_sample_info(Item, OutDir),
write_data(Items, OutDir);
write_data([], _) ->
ok.
to_megabytes(Bytes) ->
Bytes/1000000.
%% Example commands
percept2_sampling : sample ( [ ' all'[["c:/cygwin / home / hl / test " ] , 5 , 40 , 2 , 4 , 0.8 ,
%% ["c:/cygwin/home/hl/test"],8]},"../profile_data").
percept2_sampling : , { ' message_queue_len ' , ' percept2_db ' } ] , { percept2 , analyze , [ [ " sim_code.dat " ] ] } , " . " ) .
| null | https://raw.githubusercontent.com/huiqing/percept2/fa796a730d6727210a71f185e6a39a960c2dcb90/src/percept2_sampling.erl | erlang | All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
%% Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
%% Redistributions in binary form must reproduce the above copyright
notice, this list of conditions and the following disclaimer in the
documentation and/or other materials provided with the distribution.
%% Neither the name of the copyright holders nor the
names of its contributors may be used to endorse or promote products
derived from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS ''AS IS''
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDERS AND CONTRIBUTORS
CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR
BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR
OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF
ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@doc
This module provides a collection of functions for reporting information
regarding memory usage, garbage collection, scheduler utilization, and
message/run queue length, etc. This is done by sampling-based profiling, i.e.
profiling is typically less numerically accurate and specific, but has less
impact on the system. Data collected by the profiler are stored in files,
<a href="#statistics-1">erlang:statistics/1</a>,
<a href="#system_info-1">erlang:system_info/1</a>
and <a href="#process_info-2">erlang:process_info/1</a>.
@hidden
in milliseconds
-define(debug, 0).
@hidden
@hidden
@doc Start the profiler and collects information about the system.
The type of information collected is specified by `Items':
<ul>
</ul>
<ul>
`run_queues': returns the length of each run queue, that is, the number of processes that are ready to run in each run queue.
</ul>
<ul>
`scheduler_utilisation': returns the scheduler-utilisation rate per scheduler.
</ul>
<ul>
`schedulers_online': returns the amount of schedulers online.
</ul>
<ul>
`process_count': returns the number of processes currently existing at the local node as an integer.
</ul>
<ul>
about the following memory types is collected:
</ul>
<ul>
`message_queue_len': returns the number of messages currently in the message queue of the process.
</ul>
<ul>
</ul>
If an entry function is specified, this function profiles the system
for the whole duration until the entry function returns; otherwise it profiles
the system for the time period specified. The system is probed at the default
manually using <a href="percept2_sampling.html#stop-0">stop/0</a>,
`OutDir' tells the tool where to put the data files generated. A data file is generated
for each type of information in `Items'. For an item `A', the name of the data file would be
`sample_A.dat'.
can be used for visualisation. A pre-defined plotting script is available for
each type of information collected, and these scripts are in the `percept2/gplt' directory.
generate the graphs behind the scene. (It is likely that we will get rid of the dependence to
To visualise the sampling data, one could select the `Visualise sampling data' from the Percept2 main menu,
and this should lead to a page as shown in the screenshot next.
In this page, select the type of data you would like to see, enter the data file name, and the
the graph. The screenshot next shows an example output.
alt="the front page of Percept2" width="850" height="500"> </img>
[sample_items()],
@doc Start the profiler and collects information about the system.
this function allows the user to specify the time interval.
[sample_items()],
@doc Start the profiler and collects information about the system.
Apart from allowing the user to specify the time interval, this
function also allows the user to supply a filter function, so that
only those data that satisfy certain condition are logged.
[sample_items()],
----------------------------%%%
% %
----------------------------%%%
@doc Stop the sampling.
@private
Example commands
["c:/cygwin/home/hl/test"],8]},"../profile_data"). | Copyright ( c ) 2012 ,
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
BE LIABLE FOR ANY DIRECT , INDIRECT , INCIDENTAL , SPECIAL , EXEMPLARY , OR
@author < >
the profiler probes the running Erlang system at regular intervals . Sampling
and the Gnuplot tool can be used for graph visualisation of the data .
The following Erlang functions are used for the purpose of data collection
< a href=" / doc / man / erlang.html#memory-1">erlang : > ,
-module(percept2_sampling).
-export([start/3, start/4, start/5, stop/0]).
-export([init/5]).
-type sample_item()::
'run_queue'|'run_queues'|'scheduler_utilisation'|
'process_count'| 'schedulers_online'|'mem_info'|
{'message_queue_len', pid()|regname()}|'all'.
the ' all ' options covers all the the options apart from ' message_queue_len ' .
-type entry_mfa() :: {atom(), atom(),list()}.
-type regname() :: atom().
-type milliseconds()::non_neg_integer().
-type seconds()::non_neg_integer().
-record(run_queue_info,
{timestamp::float(),
run_queue=0::non_neg_integer()
}).
-record(run_queues_info,
{timestamp::float(),
run_queues::non_neg_integer()
}).
-record(scheduler_utilisation_info,
{timestamp::float(),
scheduler_utilisation::[{integer(), number(), number()}]
}).
-record(process_count_info, {
timestamp::float(),
process_count::non_neg_integer()}).
-record(schedulers_online_info, {
timestamp::float(),
schedulers_online::non_neg_integer()}).
-record(mem_info, {
timestamp ::float(),
total ::float(),
processes ::float(),
ets ::float(),
atom ::float(),
code ::float(),
binary ::float()
}).
-record(message_queue_len_info, {
timestamp ::float(),
message_queue_len ::non_neg_integer()}).
-compile(export_all).
-define(seconds(EndTs,StartTs),
timer:now_diff(EndTs, StartTs)/1000000).
-define(debug , 9 ) .
-ifdef(debug).
dbg(Level, F, A) when Level >= ?debug ->
io:format(F, A),
ok;
dbg(_, _, _) ->
ok.
-define(dbg(Level, F, A), dbg((Level), (F), (A))).
-else.
-define(dbg(Level, F, A), ok).
-endif.
-spec(sample_items()->[atom()]).
sample_items()->
['run_queue',
'run_queues',
'scheduler_utilisation',
'process_count',
'schedulers_online',
'mem_info'
].
-spec(check_sample_items([sample_item()]) -> [sample_item()]).
check_sample_items(Items) ->
check_sample_items_1(Items, []).
check_sample_items_1([{'message_queue_len', Proc}|Items], Acc)->
check_sample_items_1(Items, [{'message_queue_len', Proc}|Acc]);
check_sample_items_1(['all'|Items], Acc) ->
check_sample_items_1(Items, sample_items()++Acc);
check_sample_items_1([Item|Items], Acc) ->
case lists:member(Item, sample_items()) of
true ->
check_sample_items_1(Items, [Item|Acc]);
false ->
error(lists:flatten(io_lib:format("Invalid option:~p", [Item])))
end;
check_sample_items_1([], Acc) ->
lists:usort(Acc).
check_out_dir(Dir) ->
case filelib:is_dir(Dir) of
false -> error(lists:flatten(
io_lib:format(
"Invalid directory:~p", [Dir])));
true -> ok
end.
` run_queue ' : returns the sum length of all run queues , that is , the total number of processes that are ready to run .
` mem_info ' : returns information about memory dynamically allocated by the Erlang emulator . Information
processes , ets , atom , code and binary . See < a href=" / doc / man / erlang.html#memory-1">erlang : > .
` all ' : this option covers all the above options apart from ` message_queue_len ' .
time interval , which is 10 milliseconds . It is also possible to stop the sampling
Sampling data is formatted in a way so that the graph plotting tool ` Gnuplot '
If you are familiar with Gnuplot , you could generate the diagrams in Gnuplot command - line .
Alternately , you could visualise the sampling data through Percept2 , which uses Gnuplot to
Gnuplot in the future ) .
< img src="percept2_sample.png " alt="Visualise sampling data " width="850 " height="500 " > < /img >
path leading to this file , then click on the ` Generate Graph ' button . This should leads to a page showing
< "
-spec(start(Items :: [sample_item()],
EntryOrTime :: entry_mfa() | milliseconds(),
OutDir :: file:filename()) ->
start(Items, Time, OutDir) when is_integer(Time) ->
start(Items, Time, ?INTERVAL,
fun(_) -> true end, OutDir);
start(Items, Entry={_Mod, _Fun, _Args}, OutDir) ->
start(Items, Entry, ?INTERVAL, fun(_) -> true end, OutDir).
Different from < a href="percept2_sampling.html#start-3">start/3</a > ,
-spec(start(Items :: [any()], EntryOrTime :: entry_mfa() | seconds(),
TimeInterval :: milliseconds(), OutDir :: file:filename()) ->
start(Items, Time, TimeInterval, OutDir) when is_integer(Time) ->
start(Items, Time, TimeInterval, fun(_) -> true end, OutDir);
start(Items, Entry={_Mod, _Fun, _Args}, TimeInterval, OutDir) ->
start(Items, Entry, TimeInterval, fun(_) -> true end, OutDir).
See < a href="percept2_sampling.html#start-3">start/3</a > .
-spec(start(Items :: [any()], EntryOrTime :: entry_mfa() | seconds(),
TimeInterval :: milliseconds(), fun((_) -> boolean()),
OutDir :: file:filename()) ->
start(Items, _Entry={Mod, Fun, Args}, TimeInterval, FilterFun, OutDir) ->
ok=check_out_dir(OutDir),
Items1=check_sample_items(Items),
Pid = start_sampling(Items1, TimeInterval, FilterFun, OutDir),
erlang:apply(Mod, Fun, Args),
stop(Pid);
start(Items, Time, TimeInterval, FilterFun, OutDir)
when is_integer(Time)->
ok=check_out_dir(OutDir),
Items1=check_sample_items(Items),
try
Pid=start_sampling(Items1, TimeInterval, FilterFun, OutDir),
erlang:start_timer(Time*1000, Pid, stop),
Pid
catch
throw:Term -> Term;
exit:Reason -> {'EXIT',Reason};
error:Reason -> {'EXIT',{Reason,erlang:get_stacktrace()}}
end.
start_sampling(Items, TimeInterval, FilterFun, OutDir) ->
case lists:member('scheduler_utilisation', Items) of
true ->
erlang:system_flag(scheduler_wall_time, true);
_ -> ok
end,
spawn_link(?MODULE, init, [erlang:timestamp(), Items, TimeInterval, FilterFun, OutDir]).
-spec (stop() ->{error, not_started}|ok).
stop() ->
case whereis(percept2_sampling) of
undefined ->
{error, not_started};
Pid ->
Pid ! stop,
ok
end.
stop(Pid) ->
Pid!stop,
ok.
init(StartTs, Items, Interval, FilterFun, OutDir) ->
register(percept2_sampling, self()),
create_ets_tables(Items),
sampling_loop(StartTs, Interval, Items, FilterFun, OutDir).
sampling_loop(StartTs, Interval, Items, FilterFun, OutDir) ->
receive
stop ->
write_data(Items, OutDir);
{timeout, _TimerRef, stop} ->
write_data(Items, OutDir),
io:format("Done.\n")
after Interval->
do_sampling(Items,StartTs),
sampling_loop(StartTs, Interval, Items, FilterFun, OutDir)
end.
do_sampling([{Item, Args}|Items],StartTs) ->
do_sample({Item, Args},StartTs),
do_sampling(Items,StartTs);
do_sampling([Item|Items],StartTs) ->
do_sample(Item, StartTs),
do_sampling(Items,StartTs);
do_sampling([],_) -> ok.
mk_ets_tab_name(Item)->
list_to_atom(atom_to_list(Item)++"_tab").
mk_file_name(Item) ->
"sample_"++atom_to_list(Item)++".dat".
create_ets_tables([{Item, _}|Items]) ->
TabName = mk_ets_tab_name(Item),
ets:new(TabName, [named_table, ordered_set, protected, {keypos, 2}]),
create_ets_tables(Items);
create_ets_tables([Item|Items]) ->
TabName = mk_ets_tab_name(Item),
ets:new(TabName, [named_table, ordered_set, protected, {keypos, 2}]),
create_ets_tables(Items);
create_ets_tables([]) ->
ok.
do_sample(mem_info, StartTs) ->
[{total, Total}, {processes, Processes}, {ets, ETS},
{atom, Atom}, {code, Code}, {binary, Binary}] =
erlang:memory([total, processes, ets, atom, code, binary]),
Info=#mem_info{timestamp=?seconds(erlang:timestamp(), StartTs),
total=to_megabytes(Total),
processes=to_megabytes(Processes),
ets=to_megabytes(ETS),
atom=to_megabytes(Atom),
code=to_megabytes(Code),
binary=to_megabytes(Binary)},
?dbg(0, "MemInfo:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(mem_info), Info);
do_sample(run_queue, StartTs) ->
RunQueue= erlang:statistics(run_queue),
Info=#run_queue_info{timestamp=?seconds(erlang:timestamp(), StartTs),
run_queue = RunQueue},
?dbg(0, "RunQueue:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(run_queue), Info);
do_sample(run_queues,StartTs) ->
RunQueues= erlang:statistics(run_queues),
Info=#run_queues_info{timestamp=?seconds(erlang:timestamp(), StartTs),
run_queues = RunQueues},
?dbg(0, "RunQueues:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(run_queues), Info);
do_sample(scheduler_utilisation,StartTs) ->
SchedulerWallTime=erlang:statistics(scheduler_wall_time),
Info=#scheduler_utilisation_info{
timestamp=?seconds(erlang:timestamp(), StartTs),
scheduler_utilisation = lists:usort(SchedulerWallTime)},
?dbg(0, "Scheduler walltime:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(scheduler_utilisation), Info);
do_sample(schedulers_online,StartTs)->
SchedulersOnline = erlang:system_info(schedulers_online),
Info=#schedulers_online_info{timestamp=?seconds(erlang:timestamp(), StartTs),
schedulers_online = SchedulersOnline},
?dbg(0, "Schedulers online:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(schedulers_online), Info);
do_sample(process_count, StartTs) ->
ProcessCount = erlang:system_info(process_count),
Info=#process_count_info{timestamp=?seconds(erlang:timestamp(), StartTs),
process_count = ProcessCount},
?dbg(0, "Process count:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(process_count), Info);
do_sample({message_queue_len, RegName}, StartTs) when is_atom(RegName) ->
case whereis(RegName) of
undefined ->ok;
Pid ->
do_sample({message_queue_len,Pid},StartTs)
end;
do_sample({message_queue_len,Pid},StartTs) ->
[{message_queue_len, MsgQueueLen}] = erlang:process_info(Pid, [message_queue_len]),
Info = #message_queue_len_info{timestamp=?seconds(erlang:timestamp(), StartTs),
message_queue_len = MsgQueueLen
},
?dbg(0, "Message queue length:\n~p\n", [Info]),
ets:insert(mk_ets_tab_name(message_queue_len), Info).
do_write_sample_info(Item, OutDir) ->
OutFile = filename:join(OutDir, mk_file_name(Item)),
{ok, FD} = file:open(OutFile, [write]),
Tab = mk_ets_tab_name(Item),
String=read_data_from_tab(Item),
ok=file:write(FD, String),
true = ets:delete(Tab),
ok = file:close(FD).
read_data_from_tab(mem_info) ->
Tab = mk_ets_tab_name(mem_info),
lists:flatten(["#mem_info\n"|ets:foldr(fun(_Data={_, Secs, Total, Procs, ETS, Atom, Code, Binary}, Acc) ->
[io_lib:format("~p ~p ~p ~p ~p ~p ~p \n",
[Secs, Total, Procs, ETS, Atom, Code, Binary])|Acc]
end,[],Tab)]);
read_data_from_tab(run_queue) ->
Tab = mk_ets_tab_name(run_queue),
lists:flatten(["#run_queue\n"|ets:foldr(fun(_Data={_, Secs, RunQueue}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs,RunQueue])|Acc]
end, [], Tab)]);
read_data_from_tab(run_queues) ->
Tab = mk_ets_tab_name(run_queues),
lists:flatten(["#run_queues\n"|ets:foldr(fun(_Data={_, Secs, RunQueues}, Acc) ->
{_, RunQueues1} = lists:foldl(
fun(Len, {Sum, RQAcc}) ->
{Len+Sum,[Len+Sum|RQAcc]}
end, {0, []}, tuple_to_list(RunQueues)),
Str=lists:flatten([" "++integer_to_list(Len)++" "
||Len<-RunQueues1]),
[io_lib:format("~p ~s \n",
[Secs,Str])|Acc]
end,[], Tab)]);
read_data_from_tab(scheduler_utilisation) ->
Tab = mk_ets_tab_name(scheduler_utilisation),
{_, Acc1}=ets:foldr(
fun(_Data={_, Secs, SchedulerWallTime1}, {SchedulerWallTime0, Acc}) ->
case SchedulerWallTime0 of
none ->
{SchedulerWallTime1, Acc};
_ ->
SchedUtilisation=[(A1 - A0)/(T1 - T0)||
{{I, A0, T0}, {I, A1, T1}}<-lists:zip(SchedulerWallTime0,
SchedulerWallTime1)],
{_, SchedUtilisation1} = lists:foldl(
fun(Util, {Sum, UtilAcc}) ->
{Util+Sum,[Util+Sum|UtilAcc]}
end, {0, []}, SchedUtilisation),
Str=[io_lib:format(" ~p", [Val])
||Val<-SchedUtilisation1],
{SchedulerWallTime1,[io_lib:format("~p ",[Secs]), Str++" \n"|Acc]}
end
end,{none, ["#scheduler_utilisation\n"]}, Tab),
lists:flatten(["#scheduler_utilisation\n"|Acc1]);
read_data_from_tab(process_count) ->
Tab = mk_ets_tab_name(process_count),
lists:flatten(["#process_count\n"|ets:foldr(fun(_Data={_, Secs, ProcsCount}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs,ProcsCount])|Acc]
end,[], Tab)]);
read_data_from_tab(schedulers_online) ->
Tab = mk_ets_tab_name(schedulers_online),
lists:flatten(["#schedulers_online\n"|ets:foldr(fun(_Data={_, Secs, ProcsCount}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs,ProcsCount])|Acc]
end,[], Tab)]);
read_data_from_tab(message_queue_len) ->
Tab = mk_ets_tab_name(message_queue_len),
lists:flatten(["#message_queue_len\n"|ets:foldr(fun(_Data={_, Secs, MsgQueueLen}, Acc) ->
[io_lib:format("~p ~p \n",
[Secs, MsgQueueLen])|Acc]
end,[], Tab)]).
write_data([{Item, _Args}|Items], OutDir) ->
do_write_sample_info(Item, OutDir),
write_data(Items,OutDir);
write_data([Item|Items], OutDir) ->
do_write_sample_info(Item, OutDir),
write_data(Items, OutDir);
write_data([], _) ->
ok.
to_megabytes(Bytes) ->
Bytes/1000000.
percept2_sampling : sample ( [ ' all'[["c:/cygwin / home / hl / test " ] , 5 , 40 , 2 , 4 , 0.8 ,
percept2_sampling : , { ' message_queue_len ' , ' percept2_db ' } ] , { percept2 , analyze , [ [ " sim_code.dat " ] ] } , " . " ) .
|
6972b2aa811b67494be367683dff79902534b4684c257fa3c02a57a596009443 | apache/couchdb-couch-index | couch_index_updater.erl | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
% use this file except in compliance with the License. You may obtain a copy of
% the License at
%
% -2.0
%
% Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
% WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
% License for the specific language governing permissions and limitations under
% the License.
-module(couch_index_updater).
-behaviour(gen_server).
%% API
-export([start_link/2, run/2, is_running/1, update/2, restart/2]).
%% for upgrades
-export([update/3]).
%% gen_server callbacks
-export([init/1, terminate/2, code_change/3]).
-export([handle_call/3, handle_cast/2, handle_info/2]).
-include_lib("couch/include/couch_db.hrl").
-record(st, {
idx,
mod,
pid=nil
}).
start_link(Index, Module) ->
gen_server:start_link(?MODULE, {Index, Module}, []).
run(Pid, IdxState) ->
gen_server:call(Pid, {update, IdxState}).
is_running(Pid) ->
gen_server:call(Pid, is_running).
update(Mod, State) ->
update(nil, Mod, State).
restart(Pid, IdxState) ->
gen_server:call(Pid, {restart, IdxState}).
init({Index, Module}) ->
process_flag(trap_exit, true),
{ok, #st{idx=Index, mod=Module}}.
terminate(_Reason, State) ->
couch_util:shutdown_sync(State#st.pid),
ok.
handle_call({update, _IdxState}, _From, #st{pid=Pid}=State) when is_pid(Pid) ->
{reply, ok, State};
handle_call({update, IdxState}, _From, #st{idx=Idx, mod=Mod}=State) ->
Args = [Mod:get(db_name, IdxState), Mod:get(idx_name, IdxState)],
couch_log:info("Starting index update for db: ~s idx: ~s", Args),
Pid = spawn_link(?MODULE, update, [Idx, Mod, IdxState]),
{reply, ok, State#st{pid=Pid}};
handle_call({restart, IdxState}, _From, #st{idx=Idx, mod=Mod}=State) ->
Args = [Mod:get(db_name, IdxState), Mod:get(idx_name, IdxState)],
couch_log:info("Restarting index update for db: ~s idx: ~s", Args),
case is_pid(State#st.pid) of
true -> couch_util:shutdown_sync(State#st.pid);
_ -> ok
end,
Pid = spawn_link(?MODULE, update, [Idx, State#st.mod, IdxState]),
{reply, ok, State#st{pid=Pid}};
handle_call(is_running, _From, #st{pid=Pid}=State) when is_pid(Pid) ->
{reply, true, State};
handle_call(is_running, _From, State) ->
{reply, false, State}.
handle_cast(_Mesg, State) ->
{stop, unknown_cast, State}.
handle_info({'EXIT', _, {updated, Pid, IdxState}}, #st{pid=Pid}=State) ->
Mod = State#st.mod,
Args = [Mod:get(db_name, IdxState), Mod:get(idx_name, IdxState)],
couch_log:info("Index update finished for db: ~s idx: ~s", Args),
ok = gen_server:cast(State#st.idx, {updated, IdxState}),
{noreply, State#st{pid=undefined}};
handle_info({'EXIT', _, {reset, Pid}}, #st{idx=Idx, pid=Pid}=State) ->
{ok, NewIdxState} = gen_server:call(State#st.idx, reset),
Pid2 = spawn_link(?MODULE, update, [Idx, State#st.mod, NewIdxState]),
{noreply, State#st{pid=Pid2}};
handle_info({'EXIT', Pid, normal}, #st{pid=Pid}=State) ->
{noreply, State#st{pid=undefined}};
handle_info({'EXIT', Pid, {{nocatch, Error}, _Trace}}, State) ->
handle_info({'EXIT', Pid, Error}, State);
handle_info({'EXIT', Pid, Error}, #st{pid=Pid}=State) ->
ok = gen_server:cast(State#st.idx, {update_error, Error}),
{noreply, State#st{pid=undefined}};
handle_info({'EXIT', Pid, _Reason}, #st{idx=Pid}=State) ->
{stop, normal, State};
handle_info({'EXIT', _Pid, normal}, State) ->
{noreply, State};
handle_info(_Mesg, State) ->
{stop, unknown_info, State}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
update(Idx, Mod, IdxState) ->
DbName = Mod:get(db_name, IdxState),
CurrSeq = Mod:get(update_seq, IdxState),
UpdateOpts = Mod:get(update_options, IdxState),
CommittedOnly = lists:member(committed_only, UpdateOpts),
IncludeDesign = lists:member(include_design, UpdateOpts),
DocOpts = case lists:member(local_seq, UpdateOpts) of
true -> [conflicts, deleted_conflicts, local_seq];
_ -> [conflicts, deleted_conflicts]
end,
couch_util:with_db(DbName, fun(Db) ->
DbUpdateSeq = couch_db:get_update_seq(Db),
DbCommittedSeq = couch_db:get_committed_update_seq(Db),
PurgedIdxState = case purge_index(Db, Mod, IdxState) of
{ok, IdxState0} -> IdxState0;
reset -> exit({reset, self()})
end,
NumChanges = couch_db:count_changes_since(Db, CurrSeq),
GetSeq = fun
(#full_doc_info{update_seq=Seq}) -> Seq;
(#doc_info{high_seq=Seq}) -> Seq
end,
GetInfo = fun
(#full_doc_info{id=Id, update_seq=Seq, deleted=Del}=FDI) ->
{Id, Seq, Del, couch_doc:to_doc_info(FDI)};
(#doc_info{id=Id, high_seq=Seq, revs=[RI|_]}=DI) ->
{Id, Seq, RI#rev_info.deleted, DI}
end,
LoadDoc = fun(DI) ->
{DocId, Seq, Deleted, DocInfo} = GetInfo(DI),
case {IncludeDesign, DocId} of
{false, <<"_design/", _/binary>>} ->
{nil, Seq};
_ when Deleted ->
{#doc{id=DocId, deleted=true}, Seq};
_ ->
{ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts),
{Doc, Seq}
end
end,
Proc = fun(DocInfo, _, {IdxStateAcc, _}) ->
case CommittedOnly and (GetSeq(DocInfo) > DbCommittedSeq) of
true ->
{stop, {IdxStateAcc, false}};
false ->
{Doc, Seq} = LoadDoc(DocInfo),
{ok, NewSt} = Mod:process_doc(Doc, Seq, IdxStateAcc),
garbage_collect(),
{ok, {NewSt, true}}
end
end,
{ok, InitIdxState} = Mod:start_update(Idx, PurgedIdxState, NumChanges),
Acc0 = {InitIdxState, true},
{ok, _, Acc} = couch_db:enum_docs_since(Db, CurrSeq, Proc, Acc0, []),
{ProcIdxSt, SendLast} = Acc,
% If we didn't bail due to hitting the last committed seq we need
% to send our last update_seq through.
{ok, LastIdxSt} = case SendLast of
true ->
Mod:process_doc(nil, DbUpdateSeq, ProcIdxSt);
_ ->
{ok, ProcIdxSt}
end,
{ok, FinalIdxState} = Mod:finish_update(LastIdxSt),
exit({updated, self(), FinalIdxState})
end).
purge_index(Db, Mod, IdxState) ->
DbPurgeSeq = couch_db:get_purge_seq(Db),
IdxPurgeSeq = Mod:get(purge_seq, IdxState),
if
DbPurgeSeq == IdxPurgeSeq ->
{ok, IdxState};
DbPurgeSeq == IdxPurgeSeq + 1 ->
{ok, PurgedIdRevs} = couch_db:get_last_purged(Db),
Mod:purge(Db, DbPurgeSeq, PurgedIdRevs, IdxState);
true ->
reset
end.
| null | https://raw.githubusercontent.com/apache/couchdb-couch-index/f0a6854e578469612937a766632fdcdc52ee9c65/src/couch_index_updater.erl | erlang | use this file except in compliance with the License. You may obtain a copy of
the License at
-2.0
Unless required by applicable law or agreed to in writing, software
WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
License for the specific language governing permissions and limitations under
the License.
API
for upgrades
gen_server callbacks
If we didn't bail due to hitting the last committed seq we need
to send our last update_seq through. | Licensed under the Apache License , Version 2.0 ( the " License " ) ; you may not
distributed under the License is distributed on an " AS IS " BASIS , WITHOUT
-module(couch_index_updater).
-behaviour(gen_server).
-export([start_link/2, run/2, is_running/1, update/2, restart/2]).
-export([update/3]).
-export([init/1, terminate/2, code_change/3]).
-export([handle_call/3, handle_cast/2, handle_info/2]).
-include_lib("couch/include/couch_db.hrl").
-record(st, {
idx,
mod,
pid=nil
}).
start_link(Index, Module) ->
gen_server:start_link(?MODULE, {Index, Module}, []).
run(Pid, IdxState) ->
gen_server:call(Pid, {update, IdxState}).
is_running(Pid) ->
gen_server:call(Pid, is_running).
update(Mod, State) ->
update(nil, Mod, State).
restart(Pid, IdxState) ->
gen_server:call(Pid, {restart, IdxState}).
init({Index, Module}) ->
process_flag(trap_exit, true),
{ok, #st{idx=Index, mod=Module}}.
terminate(_Reason, State) ->
couch_util:shutdown_sync(State#st.pid),
ok.
handle_call({update, _IdxState}, _From, #st{pid=Pid}=State) when is_pid(Pid) ->
{reply, ok, State};
handle_call({update, IdxState}, _From, #st{idx=Idx, mod=Mod}=State) ->
Args = [Mod:get(db_name, IdxState), Mod:get(idx_name, IdxState)],
couch_log:info("Starting index update for db: ~s idx: ~s", Args),
Pid = spawn_link(?MODULE, update, [Idx, Mod, IdxState]),
{reply, ok, State#st{pid=Pid}};
handle_call({restart, IdxState}, _From, #st{idx=Idx, mod=Mod}=State) ->
Args = [Mod:get(db_name, IdxState), Mod:get(idx_name, IdxState)],
couch_log:info("Restarting index update for db: ~s idx: ~s", Args),
case is_pid(State#st.pid) of
true -> couch_util:shutdown_sync(State#st.pid);
_ -> ok
end,
Pid = spawn_link(?MODULE, update, [Idx, State#st.mod, IdxState]),
{reply, ok, State#st{pid=Pid}};
handle_call(is_running, _From, #st{pid=Pid}=State) when is_pid(Pid) ->
{reply, true, State};
handle_call(is_running, _From, State) ->
{reply, false, State}.
handle_cast(_Mesg, State) ->
{stop, unknown_cast, State}.
handle_info({'EXIT', _, {updated, Pid, IdxState}}, #st{pid=Pid}=State) ->
Mod = State#st.mod,
Args = [Mod:get(db_name, IdxState), Mod:get(idx_name, IdxState)],
couch_log:info("Index update finished for db: ~s idx: ~s", Args),
ok = gen_server:cast(State#st.idx, {updated, IdxState}),
{noreply, State#st{pid=undefined}};
handle_info({'EXIT', _, {reset, Pid}}, #st{idx=Idx, pid=Pid}=State) ->
{ok, NewIdxState} = gen_server:call(State#st.idx, reset),
Pid2 = spawn_link(?MODULE, update, [Idx, State#st.mod, NewIdxState]),
{noreply, State#st{pid=Pid2}};
handle_info({'EXIT', Pid, normal}, #st{pid=Pid}=State) ->
{noreply, State#st{pid=undefined}};
handle_info({'EXIT', Pid, {{nocatch, Error}, _Trace}}, State) ->
handle_info({'EXIT', Pid, Error}, State);
handle_info({'EXIT', Pid, Error}, #st{pid=Pid}=State) ->
ok = gen_server:cast(State#st.idx, {update_error, Error}),
{noreply, State#st{pid=undefined}};
handle_info({'EXIT', Pid, _Reason}, #st{idx=Pid}=State) ->
{stop, normal, State};
handle_info({'EXIT', _Pid, normal}, State) ->
{noreply, State};
handle_info(_Mesg, State) ->
{stop, unknown_info, State}.
code_change(_OldVsn, State, _Extra) ->
{ok, State}.
update(Idx, Mod, IdxState) ->
DbName = Mod:get(db_name, IdxState),
CurrSeq = Mod:get(update_seq, IdxState),
UpdateOpts = Mod:get(update_options, IdxState),
CommittedOnly = lists:member(committed_only, UpdateOpts),
IncludeDesign = lists:member(include_design, UpdateOpts),
DocOpts = case lists:member(local_seq, UpdateOpts) of
true -> [conflicts, deleted_conflicts, local_seq];
_ -> [conflicts, deleted_conflicts]
end,
couch_util:with_db(DbName, fun(Db) ->
DbUpdateSeq = couch_db:get_update_seq(Db),
DbCommittedSeq = couch_db:get_committed_update_seq(Db),
PurgedIdxState = case purge_index(Db, Mod, IdxState) of
{ok, IdxState0} -> IdxState0;
reset -> exit({reset, self()})
end,
NumChanges = couch_db:count_changes_since(Db, CurrSeq),
GetSeq = fun
(#full_doc_info{update_seq=Seq}) -> Seq;
(#doc_info{high_seq=Seq}) -> Seq
end,
GetInfo = fun
(#full_doc_info{id=Id, update_seq=Seq, deleted=Del}=FDI) ->
{Id, Seq, Del, couch_doc:to_doc_info(FDI)};
(#doc_info{id=Id, high_seq=Seq, revs=[RI|_]}=DI) ->
{Id, Seq, RI#rev_info.deleted, DI}
end,
LoadDoc = fun(DI) ->
{DocId, Seq, Deleted, DocInfo} = GetInfo(DI),
case {IncludeDesign, DocId} of
{false, <<"_design/", _/binary>>} ->
{nil, Seq};
_ when Deleted ->
{#doc{id=DocId, deleted=true}, Seq};
_ ->
{ok, Doc} = couch_db:open_doc_int(Db, DocInfo, DocOpts),
{Doc, Seq}
end
end,
Proc = fun(DocInfo, _, {IdxStateAcc, _}) ->
case CommittedOnly and (GetSeq(DocInfo) > DbCommittedSeq) of
true ->
{stop, {IdxStateAcc, false}};
false ->
{Doc, Seq} = LoadDoc(DocInfo),
{ok, NewSt} = Mod:process_doc(Doc, Seq, IdxStateAcc),
garbage_collect(),
{ok, {NewSt, true}}
end
end,
{ok, InitIdxState} = Mod:start_update(Idx, PurgedIdxState, NumChanges),
Acc0 = {InitIdxState, true},
{ok, _, Acc} = couch_db:enum_docs_since(Db, CurrSeq, Proc, Acc0, []),
{ProcIdxSt, SendLast} = Acc,
{ok, LastIdxSt} = case SendLast of
true ->
Mod:process_doc(nil, DbUpdateSeq, ProcIdxSt);
_ ->
{ok, ProcIdxSt}
end,
{ok, FinalIdxState} = Mod:finish_update(LastIdxSt),
exit({updated, self(), FinalIdxState})
end).
purge_index(Db, Mod, IdxState) ->
DbPurgeSeq = couch_db:get_purge_seq(Db),
IdxPurgeSeq = Mod:get(purge_seq, IdxState),
if
DbPurgeSeq == IdxPurgeSeq ->
{ok, IdxState};
DbPurgeSeq == IdxPurgeSeq + 1 ->
{ok, PurgedIdRevs} = couch_db:get_last_purged(Db),
Mod:purge(Db, DbPurgeSeq, PurgedIdRevs, IdxState);
true ->
reset
end.
|
e5a4ce8028d6a2bc4abb020b1f7b2efcf8d9158b78d09e1ec11072bddbf64eec | hyperfiddle/electric | match1.clj | (ns dustin.match2)
(def *type-registry (atom {}))
(defmacro typedef [T]
(swap! *type-registry assoc T {})
`(def T ^{:type T} {}))
(typedef A)
(declare match | >> _)
(let [instance ^{:type `A} {::a 42}]
(match instance
| (A 42) = nil
| (::B (::A 42)) = nil
| (::B (::A _)) = nil
| _ = nil))
(defn sign [x]
(match x
:| (> x 0) := 1
:| (= x 0) := 0
:| (< x 0) := -1
))
| null | https://raw.githubusercontent.com/hyperfiddle/electric/1c6c3891cbf13123fef8d33e6555d300f0dac134/scratch/dustin/y2020/match1.clj | clojure | (ns dustin.match2)
(def *type-registry (atom {}))
(defmacro typedef [T]
(swap! *type-registry assoc T {})
`(def T ^{:type T} {}))
(typedef A)
(declare match | >> _)
(let [instance ^{:type `A} {::a 42}]
(match instance
| (A 42) = nil
| (::B (::A 42)) = nil
| (::B (::A _)) = nil
| _ = nil))
(defn sign [x]
(match x
:| (> x 0) := 1
:| (= x 0) := 0
:| (< x 0) := -1
))
| |
49ff7caf45a7d919195f98837148e636ad9dad2b29feadf981e7ee7368946635 | plumatic/hiphip | double.clj | (ns hiphip.double
"Utilities for double arrays"
(:refer-clojure :exclude [amap areduce alength aget aset aclone])
(:require [hiphip.impl.core :as impl])
(:import hiphip.double_.Helpers))
(def +type+ 'double)
(load-string (impl/slurp-from-classpath "hiphip/type_impl.clj"))
| null | https://raw.githubusercontent.com/plumatic/hiphip/d839359cc1e4c453cd6ffe0b5857de550b3d7489/src/hiphip/double.clj | clojure | (ns hiphip.double
"Utilities for double arrays"
(:refer-clojure :exclude [amap areduce alength aget aset aclone])
(:require [hiphip.impl.core :as impl])
(:import hiphip.double_.Helpers))
(def +type+ 'double)
(load-string (impl/slurp-from-classpath "hiphip/type_impl.clj"))
| |
ca630052107859079e09512a578e65c5a756b3d6afa27997f4b09f9ed31b28b1 | bsaleil/lc | array1.scm | ARRAY1 -- One of the and benchmarks .
(define (create-x n)
(define result (make-vector n))
(do ((i 0 (+ i 1)))
((>= i n) result)
(vector-set! result i i)))
(define (create-y x)
(let* ((n (vector-length x))
(result (make-vector n)))
(do ((i (- n 1) (- i 1)))
((< i 0) result)
(vector-set! result i (vector-ref x i)))))
(define (my-try n)
(vector-length (create-y (create-x n))))
(define (go n)
(let loop ((repeat 100)
(result '()))
(if (> repeat 0)
(loop (- repeat 1) (my-try n))
result)))
(println (go 5))
5
| null | https://raw.githubusercontent.com/bsaleil/lc/ee7867fd2bdbbe88924300e10b14ea717ee6434b/unit-tests/benchmarks/array1.scm | scheme | ARRAY1 -- One of the and benchmarks .
(define (create-x n)
(define result (make-vector n))
(do ((i 0 (+ i 1)))
((>= i n) result)
(vector-set! result i i)))
(define (create-y x)
(let* ((n (vector-length x))
(result (make-vector n)))
(do ((i (- n 1) (- i 1)))
((< i 0) result)
(vector-set! result i (vector-ref x i)))))
(define (my-try n)
(vector-length (create-y (create-x n))))
(define (go n)
(let loop ((repeat 100)
(result '()))
(if (> repeat 0)
(loop (- repeat 1) (my-try n))
result)))
(println (go 5))
5
| |
ebe29c9f8d2027a65a16bdbe88bfdab0ee1fba7815e132bafdd3936b22a2b239 | KestrelInstitute/Specware | tokenizer.lisp | -*- Mode : LISP ; Package : Parser ; Base : 10 ; Syntax : Common - Lisp -*-
(in-package :Parser4)
;;; ========================================================================
(defun ctp-arg-test (arg value example)
(when (null value)
(warn "create-tokenizer-parameters missing keyword arg ~S, e.g. ~A"
arg
example)))
(defun create-tokenizer-parameters (&key
;;
name
;;
size-of-character-set
;;
word-symbol-start-chars
word-symbol-continue-chars
;;
non-word-symbol-start-chars
non-word-symbol-continue-chars
;;
number-start-chars
number-continue-chars
;;
digits-may-start-symbols?
;;
string-quote-char
string-escape-char
;;
whitespace-chars
;;
separator-chars
;;
comment-to-eol-chars
;;
extended-comment-delimiters
pragma-delimiters
;;
ad-hoc-keywords
ad-hoc-symbols
ad-hoc-numbers
;;
case-sensitive?
;;
)
(ctp-arg-test :word-symbol-start-chars word-symbol-start-chars "the alphabet")
(ctp-arg-test :word-symbol-continue-chars word-symbol-continue-chars "the alphabet, digits, and underbar")
(ctp-arg-test :non-word-symbol-start-chars non-word-symbol-start-chars "some chars like !@$^&*~+-=|<>?/.")
(ctp-arg-test :non-word-symbol-continue-chars non-word-symbol-continue-chars "some chars like !@$^&*~+-=|<>?/.")
(ctp-arg-test :number-start-chars number-start-chars "the digits, plus, minus, and maybe dot and/or slash")
(ctp-arg-test :number-continue-chars number-continue-chars "the digits, and maybe dot and/or slash")
(ctp-arg-test :comment-to-eol-chars comment-to-eol-chars "semi-colon (#\;) or percent (#\%) ")
(let ((whitespace-table (make-array size-of-character-set :initial-element 0))
(word-symbol-table (make-array size-of-character-set :initial-element 0))
(non-word-symbol-table (make-array size-of-character-set :initial-element 0))
(number-table (make-array size-of-character-set :initial-element 0))
(string-table (make-array size-of-character-set :initial-element 0))
(comment-table (make-array size-of-character-set :initial-element 0))
(ad-hoc-table (make-array size-of-character-set :initial-element 0))
(separator-tokens (make-array size-of-character-set :initial-element 0))
(cp-descriptors '())
)
Note : in the following , we consistently assign the problematic codes first , so that legal codes can override them
;; in cases where a character has both an illegal and a legal code for some context.
;;
;; whitespace-table is used when scanning whitespace...
;;
;; codes that are illegal after whitespace is started:
(assign-tokenizer-codes whitespace-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes whitespace-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-codes whitespace-table number-continue-chars +number-continue-code+)
;; codes that are legal after whitespace is started:
(assign-tokenizer-code whitespace-table #\# +char-literal-start-code+) ; first, so it can be overridden
(assign-tokenizer-code whitespace-table #\_ +wildcard-code+) ; first, so it can be overridden
(assign-tokenizer-codes whitespace-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes whitespace-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-codes whitespace-table number-start-chars +number-start-code+)
(assign-tokenizer-code whitespace-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes whitespace-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes whitespace-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes whitespace-table separator-chars +separator-code+)
;;
;; word-symbol-table
;;
;; codes that are illegal after a word symbol is started:
(assign-tokenizer-codes word-symbol-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes word-symbol-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes word-symbol-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
;; codes that are legal after a word symbol is started:
(assign-tokenizer-code word-symbol-table #\# +char-literal-start-code+) ; first, so it can be overridden
(assign-tokenizer-code word-symbol-table #\_ +syllable-separator-code+) ; first, so it can be overridden
(assign-tokenizer-codes word-symbol-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-codes word-symbol-table number-start-chars +number-start-code+) ; probably overridden by +word-symbol-continue-code+
(assign-tokenizer-code word-symbol-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes word-symbol-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes word-symbol-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes word-symbol-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes word-symbol-table separator-chars +separator-code+)
;;
;; non-word-symbol-table
;;
;; codes that are illegal after a non-word symbol is started:
(assign-tokenizer-codes non-word-symbol-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-codes non-word-symbol-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes non-word-symbol-table word-symbol-continue-chars +word-symbol-continue-code+)
;; codes that are legal after a non-word symbol is started:
(assign-tokenizer-code non-word-symbol-table #\# +char-literal-start-code+) ; first, so it can be overridden
(assign-tokenizer-code non-word-symbol-table #\_ +syllable-separator-code+) ; first, so it can be overridden
(assign-tokenizer-codes non-word-symbol-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes non-word-symbol-table number-start-chars +number-start-code+) ; proably survive as final code
(assign-tokenizer-code non-word-symbol-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes non-word-symbol-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes non-word-symbol-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes non-word-symbol-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-codes non-word-symbol-table separator-chars +separator-code+)
;;
;; number-table is used when scanning numbers...
;;
;; codes that are illegal after a number is started:
(assign-tokenizer-codes number-table number-start-chars +number-start-code+)
(assign-tokenizer-codes number-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes number-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-code number-table #\_ +syllable-separator-code+) ; first, so it can be overridden
;; codes that are illegal after a number is started, but might become legal:
(assign-tokenizer-codes number-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes number-table non-word-symbol-start-chars +non-word-symbol-start-code+)
;; codes that are legal after a number is started:
(assign-tokenizer-code number-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes number-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes number-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes number-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes number-table separator-chars +separator-code+)
;;
;; string-table is used when scanning strings
;;
(assign-tokenizer-code string-table string-quote-char +string-quote-code+)
(assign-tokenizer-code string-table string-escape-char +string-escape-code+)
;;
;;
(dolist (quad extended-comment-delimiters)
(let* ((prefix (first quad))
(postfix (second quad))
(recursive? (third quad))
(eof-ok? (fourth quad))
(pragma? nil))
(unless (and (stringp prefix)
(> (length prefix) 0)
(stringp postfix)
(> (length postfix) 0)
(member recursive? '(t nil))
(member eof-ok? '(t nil)))
(break "Bad description of extended comment delimiters. Want (prefix postfix recursive? eof-ok?) : ~S"
quad))
(push (make-cp-descriptor :prefix prefix
:postfix postfix
:recursive? recursive?
:eof-ok? eof-ok?
:pragma? pragma?)
cp-descriptors)
(setf (svref comment-table (char-code (schar prefix 0)))
+maybe-open-comment-or-pragma-code+)))
(dolist (quad pragma-delimiters)
(let* ((prefix (first quad))
(postfix (second quad))
(recursive? (third quad))
(eof-ok? (fourth quad))
(pragma? t))
(unless (and (stringp prefix)
(> (length prefix) 0)
(stringp postfix)
(> (length postfix) 0)
(member recursive? '(t nil))
(member eof-ok? '(t nil)))
(break "Bad description of pragma delimiters. Want (prefix postfix recursive? eof-ok?) : ~S"
quad))
(push (make-cp-descriptor :prefix prefix
:postfix postfix
:recursive? recursive?
:eof-ok? eof-ok?
:pragma? pragma?)
cp-descriptors)
(setf (svref comment-table (char-code (schar prefix 0)))
+maybe-open-comment-or-pragma-code+)))
;; move longest prefixes to front of list, so that something
;; such as "//@" would be recognized before "//", etc.
(setq cp-descriptors
(sort cp-descriptors
#'(lambda (x y)
(> (length (cp-descriptor-prefix x))
(length (cp-descriptor-prefix y))))))
;;
(dolist (char separator-chars)
(setf (svref separator-tokens (char-code char)) (string char)))
(dolist (string ad-hoc-keywords)
(setf (svref ad-hoc-table (char-code (schar string 0)))
+maybe-start-of-ad-hoc-token+))
(dolist (string ad-hoc-symbols)
(setf (svref ad-hoc-table (char-code (schar string 0)))
+maybe-start-of-ad-hoc-token+))
(dolist (string ad-hoc-numbers)
(setf (svref ad-hoc-table (char-code (schar string 0)))
+maybe-start-of-ad-hoc-token+))
(let ((ht-ad-hoc-types (make-hash-table
:test (if case-sensitive?
#+allegro 'string= #-allegro 'equal
#+allegro 'string-equal #-allegro 'equal
))))
(dolist (keyword-string ad-hoc-keywords)
(setf (gethash keyword-string ht-ad-hoc-types) :AD-HOC-KEYWORD-ONLY))
(dolist (symbol-string ad-hoc-symbols)
(let ((old-value (gethash symbol-string ht-ad-hoc-types)))
(setf (gethash symbol-string ht-ad-hoc-types)
(if (null old-value)
:AD-HOC-SYMBOL-ONLY
:AD-HOC-KEYWORD-AND-SYMBOL-ONLY))))
(dolist (number-string ad-hoc-numbers)
(let ((old-value (gethash number-string ht-ad-hoc-types)))
(setf (gethash number-string ht-ad-hoc-types)
(ecase old-value
((nil) :AD-HOC-NUMBER-ONLY)
(:KEYWORD :AD-HOC-KEYWORD-AND-NUMBER-ONLY)
(:SYMBOL :AD-HOC-SYMBOL-AND-NUMBER-ONLY)
(:KEYWORD-AND-SYMBOL :AD-HOC-KEYWORD-AND-SYMBOL-AND-NUMBER-ONLY)))))
;;
(when-debugging
(when *verbose?*
(let ((alist `((,+number-start-code+ . +number-start-code+)
(,+number-continue-code+ . +number-continue-code+)
(,+word-symbol-start-code+ . +word-symbol-start-code+)
(,+word-symbol-continue-code+ . +word-symbol-continue-code+)
(,+non-word-symbol-start-code+ . +non-word-symbol-start-code+)
(,+non-word-symbol-continue-code+ . +non-word-symbol-continue-code+)
(,+separator-code+ . +separator-code+)
(,+string-quote-code+ . +string-quote-code+)
(,+string-escape-code+ . +string-escape-code+)
(,+comment-to-eol-code+ . +comment-to-eol-code+)
(,+whitespace-code+ . +whitespace-code+)
(,+char-literal-start-code+ . +char-literal-start-code+)
(,+syllable-separator-code+ . +syllable-separator-code+)
(,+wildcard-code+ . +wildcard-code+)
(0 . "...")
)))
(comment "============================================================================")
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref whitespace-table i)))
(comment "At whitespace ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref word-symbol-table i)))
(comment"At word symbol ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref non-word-symbol-table i)))
(comment"At non-word symbol ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref number-table i)))
(comment "At number ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref string-table i)))
(comment "At string ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(when (= (svref comment-table i) +maybe-open-comment-or-pragma-code+)
(comment "The character ~D (~S) may start an extended comment or a pragma"
i (code-char i))))
(terpri)
(dolist (x ad-hoc-keywords) (comment "Ad-hoc-keyword : ~S" x))
(dolist (x ad-hoc-symbols) (comment "Ad-hoc-symbol : ~S" x))
(dolist (x ad-hoc-numbers) (comment "Ad-hoc-number : ~S" x))
(terpri)
(maphash #'(lambda (key value) (comment "ad-hoc-type for ~S = ~S" key value))
ht-ad-hoc-types)
(terpri)
(comment "============================================================================"))))
(let ((ad-hoc-strings
;; sort the strings in descending length so that "__" will be seen before "_", "??" before "?" etc.
(sort (append ad-hoc-keywords
ad-hoc-symbols
ad-hoc-numbers)
#'(lambda (x y)
(> (length x) (length y))))))
(make-tokenizer-parameters :name name
:whitespace-table whitespace-table
:word-symbol-table word-symbol-table
:non-word-symbol-table non-word-symbol-table
:number-table number-table
:string-table string-table
:digits-may-start-symbols? digits-may-start-symbols?
:comment-table comment-table
:separator-tokens separator-tokens
:cp-descriptors cp-descriptors
:ad-hoc-types-ht ht-ad-hoc-types
:ad-hoc-table ad-hoc-table
:ad-hoc-strings ad-hoc-strings
))
)))
(defun assign-tokenizer-codes (table chars code)
(setq chars (coerce chars 'list))
(dotimes (i (length chars))
(setf (svref table (char-code (nth i chars))) code)))
(defun assign-tokenizer-code (table char code)
(unless (null char)
(setf (svref table (char-code char)) code)))
;;; ========================================================================
(defun tokenize-file (session file tokenizer)
(incf-timing-data 'start-tokenize-file)
(let ((all-tokens
;; the tokenizer will call extract-tokens-from-file, using language-specific parameters
(funcall tokenizer file))
(comment-tokens '())
(non-comment-tokens '())
(comment-eof-error? nil))
;; each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
(incf-timing-data 'tokenize-file)
(dolist (token all-tokens)
(cond ((member (first token) '(:COMMENT-TO-EOL :EXTENDED-COMMENT))
(push token comment-tokens))
(t
(when (eq (first token) :EXTENDED-COMMENT-ERROR)
(setq comment-eof-error? t))
(push token non-comment-tokens))))
(setq non-comment-tokens (nreverse non-comment-tokens))
(setq comment-tokens (nreverse comment-tokens))
(incf-timing-data 'tokenize-file)
(let ((result
(install-tokens session non-comment-tokens comment-tokens)))
(incf-timing-data 'install-tokens)
(values result (length all-tokens) comment-eof-error?))))
;;; ========================================================================
(defun extract-tokens-from-file (file tokenizer-parameters)
(let ((whitespace-table (tokenizer-parameters-whitespace-table tokenizer-parameters))
(word-symbol-table (tokenizer-parameters-word-symbol-table tokenizer-parameters))
(non-word-symbol-table (tokenizer-parameters-non-word-symbol-table tokenizer-parameters))
(number-table (tokenizer-parameters-number-table tokenizer-parameters))
(string-table (tokenizer-parameters-string-table tokenizer-parameters))
(comment-table (tokenizer-parameters-comment-table tokenizer-parameters))
(separator-tokens (tokenizer-parameters-separator-tokens tokenizer-parameters))
(cp-descriptors (tokenizer-parameters-cp-descriptors tokenizer-parameters))
(digits-may-start-symbols? (tokenizer-parameters-digits-may-start-symbols? tokenizer-parameters))
(ht-ad-hoc-types (tokenizer-parameters-ad-hoc-types-ht tokenizer-parameters))
(ad-hoc-table (tokenizer-parameters-ad-hoc-table tokenizer-parameters))
(ad-hoc-strings (tokenizer-parameters-ad-hoc-strings tokenizer-parameters)))
(let ((tokens nil))
;; each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
(with-open-file (stream file :element-type 'unsigned-byte :direction :input) ; TODO: this will change when we support unicode
(let ((ps-stream (make-pseudo-stream :unread-chars nil :stream stream))
The upper - left corner of the file is considered 1:0:1 ( line 1 , column 0 , byte 1 )
so the character one to the left of that is 1:-1:0 ( line 1 , column -1 , byte 0 ) .
So we are at 1:-1 before we read the first character .
(pre-line 1) (pre-column -1) (pre-byte 0))
(loop do
(multiple-value-bind (type value
first-byte first-line first-column
last-byte last-line last-column)
(extract-token-from-pseudo-stream ps-stream
pre-byte pre-line pre-column
whitespace-table
word-symbol-table
non-word-symbol-table
number-table
string-table
digits-may-start-symbols?
comment-table
separator-tokens
cp-descriptors
ad-hoc-table
ad-hoc-strings)
(cond ((eq type :EOF)
(return nil))
(t
(push (list (or (and (or (eq type :AD-HOC)
(eq type :SYMBOL))
(gethash value ht-ad-hoc-types))
type)
value
(list first-byte first-line first-column)
(list last-byte last-line last-column))
tokens)))
(setq pre-byte last-byte
pre-line last-line
pre-column last-column)))))
(nreverse tokens))))
;;; ========================================================================
(defun extract-token-from-pseudo-stream (ps-stream
pre-byte pre-line pre-column
whitespace-table
word-symbol-table
non-word-symbol-table
number-table
string-table
digits-may-start-symbols?
comment-table
separator-tokens
cp-descriptors
ad-hoc-table
ad-hoc-strings)
;; each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
(when digits-may-start-symbols?
(error "The option digits-may-start-symbols? is currently diabled."))
(let* ((current-byte pre-byte)
(current-line pre-line)
(current-column pre-column)
(first-byte )
(first-line )
(first-column )
(last-byte )
(last-line )
(last-column )
(char )
(char-code )
(token-chars nil)
(cp-descriptor nil)
(hex-char-1 )
(hex-char-code-1 )
(hex-char-2 )
(hex-char-code-2 )
(*extended-comment-state* (make-extended-comment-state)))
(declare (special *extended-comment-state*))
(macrolet ((local-warn (prefix line column byte msg &rest args)
`(warn "At line ~3D:~2D ~?"
;; ,prefix
,line ,column ; ,byte
,msg (list ,@args)))
(warn-here (msg &rest args)
`(local-warn "At" current-line current-column current-byte
,msg ,@args))
(local-read-char (char-var char-code-var eof-action newline-action open-extended-comment-action open-pragma-action)
`(progn
(setq ,char-var (ps-read-char ps-stream))
(incf current-byte)
(if (eq ,char-var +tokenizer-eof+)
,eof-action
(progn
(setq ,char-code-var (char-code ,char-var))
(cond ((eq ,char-var #\newline)
;; we proceed to line+1 : -1, so that the next character read
;; (which will be the leftmost on the line) will be at line+1 : 0
;; current-byte was incremented above, so we don't need to touch that here
(incf current-line)
(setq current-column -1)
,newline-action)
(t
(incf current-column)))
;; extended-comments and pragmas are similar,
;; but pragmas will be recognized in fewer places (following whitespace)
;; give pragmas precedence, as their openings may be encoded as something
;; like //@ when comments are //
,@(if (and (null open-extended-comment-action) (null open-pragma-action))
()
`((when (and (eq (svref comment-table ,char-code-var)
+maybe-open-comment-or-pragma-code+)
(not (null (setq cp-descriptor
(applicable-cp-descriptor
,char-var
ps-stream
cp-descriptors)))))
(if (cp-descriptor-pragma? cp-descriptor)
,open-pragma-action
,open-extended-comment-action))))
))))
(local-unread-char (char-var)
`(progn
(ps-unread-char ,char-var ps-stream)
;; ?? If we do this repeatedly, unreading newlines, can we end up at a column left of -1 ??
;; If that happens, we could decrement the line, but then what should the column be??
(decf current-byte)
(decf current-column)
))
(set-first-positions ()
inclusive -- first character of token
`(setq first-byte current-byte
first-line current-line
first-column current-column))
(set-last-positions ()
;; inclusive -- last character of token
`(setq last-byte current-byte
last-line current-line
last-column current-column))
(return-values-using-prior-last (type value)
`(return-from extract-token-from-pseudo-stream
(values ,type ,value
first-byte first-line first-column
last-byte last-line last-column)))
(return-values (type value)
`(progn
(set-last-positions)
(return-values-using-prior-last ,type ,value)))
(termination-warning (char-var char-code-var kind-of-token misc-chars kind-of-char)
`(local-warn "After"
last-line (1+ last-column) (1+ last-byte)
"Terminating ~A \"~A~A\" with ~S (hex code ~2,'0X)~A."
,kind-of-token
,misc-chars
(coerce (reverse token-chars) 'string)
,char-var ,char-code-var
,kind-of-char))
(look-for-ad-hoc-tokens (char-var char-code-var)
`(unless (eq (svref ad-hoc-table ,char-code-var) 0)
(dolist (ad-hoc-string ad-hoc-strings)
(debugging-comment "Looking for ad-hoc-string ~S starting with ~S" ad-hoc-string ,char-var)
(when (eq (schar ad-hoc-string 0) ,char-var)
(let ((found-ad-hoc-string?
(dotimes (i (1- (length ad-hoc-string)) t)
(let ((local-char (ps-read-char ps-stream)))
(debugging-comment "Looking for ad-hoc-string ~S, now at ~S" ad-hoc-string local-char)
(when (eq ,char-var +tokenizer-eof+)
(debugging-comment "Saw EOF")
from dotimes
;; Note: ad-hoc tokens take
;; precedence over open extended
;; comments, so we won't look here
;; to see if a comment is
;; starting.
(let ((current-string-index (+ i 1)))
(cond ((eq local-char (schar ad-hoc-string current-string-index))
(debugging-comment " extending match."))
(t
(debugging-comment " match to ~S failed." ad-hoc-string)
;; put back the char that doesn't match
(ps-unread-char local-char ps-stream)
put back all but the first char
(dotimes (j i)
(ps-unread-char (schar ad-hoc-string (- current-string-index 1 j))
ps-stream))
(return nil))))))))
(debugging-comment "Found? ~S" found-ad-hoc-string?)
(when found-ad-hoc-string?
;; If an ad-hoc-token is found, make sure it is not the start of a longer token
(let ((next-char (ps-read-char ps-stream)))
(unless (eq next-char +tokenizer-eof+)
(let* ((this-char-dispatch-code (svref word-symbol-table ,char-code-var))
(next-char-code (char-code next-char))
(next-char-dispatch-code (svref word-symbol-table next-char-code)))
in all cases ( except eof , of course ) , put back the next char
(ps-unread-char next-char ps-stream)
;; then see if ad-hoc string should go back...
(when (or (and (or (eq this-char-dispatch-code #.+word-symbol-start-code+)
(eq this-char-dispatch-code #.+word-symbol-continue-code+))
(or (eq next-char-dispatch-code #.+word-symbol-start-code+)
(eq next-char-dispatch-code #.+word-symbol-continue-code+)
(eq next-char-dispatch-code #.+syllable-separator-code+)))
(and (or (eq this-char-dispatch-code #.+non-word-symbol-start-code+)
(eq this-char-dispatch-code #.+non-word-symbol-continue-code+))
(or (eq next-char-dispatch-code #.+non-word-symbol-start-code+)
(eq next-char-dispatch-code #.+non-word-symbol-continue-code+)
(eq next-char-dispatch-code #.+syllable-separator-code+))))
put back all but the first char of the ad - hoc - string
(let ((n (1- (length ad-hoc-string))))
(dotimes (i n)
(ps-unread-char (schar ad-hoc-string (- n i))
ps-stream)))
(return nil)))))
(debugging-comment "Found match to ~S." ad-hoc-string)
;; char-var was seen via local-read-char, so the current position is already
;; set to point at it
(set-first-positions)
(dotimes (i (1- (length ad-hoc-string)))
(let ((temp-char (schar ad-hoc-string (+ i 1))))
(incf current-byte)
(cond ((eq temp-char #\newline)
;; we proceed to line+1 : -1, so that the next character read
;; (which will be the leftmost on the line) will be at line+1 : 0
;; current-byte was incremented above, so we don't need to touch that here
(incf current-line)
(setq current-column -1))
(t
(incf current-column)))))
(return-values :AD-HOC ad-hoc-string)))))))
)
(tagbody
(go start-scan-for-new-token)
;;
;; ======================================================================
WHITESPACE
;; ======================================================================
;;
unrecognized-char-while-scanning-whitespace
(warn-here "Unrecognized ~6S (hex code ~2,'0X) while scanning whitespace -- treated as whitespace"
char char-code)
;;
continue-whitespace
start-scan-for-new-token
;;
(local-read-char char char-code
(return-values :EOF nil)
()
(go start-extended-comment)
(go start-pragma))
ignore-erroneous-pragma
(look-for-ad-hoc-tokens char char-code)
;;
(case (svref whitespace-table char-code)
;; majority
(#.+whitespace-code+ (go continue-whitespace))
;; normal termination
(#.+word-symbol-start-code+ (go start-word-symbol))
(#.+non-word-symbol-start-code+ (go start-non-word-symbol))
(#.+wildcard-code+ (go start-wildcard))
(#.+number-start-code+ (go start-number))
(#.+string-quote-code+ (go start-string))
(#.+separator-code+ (go start-separator))
(#.+comment-to-eol-code+ (go start-comment-to-eol))
(#.+char-literal-start-code+ (go start-char-literal))
;; peculiar termination
(#.+word-symbol-continue-code+ (go weird-middle-of-word-symbol-after-whitespace))
(#.+non-word-symbol-continue-code+ (go weird-middle-of-non-word-symbol-after-whitespace))
(#.+number-continue-code+ (go weird-middle-of-number-after-whitespace))
(otherwise (go unrecognized-char-while-scanning-whitespace)))
;;
;; ========================================
;;
weird-middle-of-word-symbol-after-whitespace
;;
(set-first-positions)
(warn-here "Ignoring illegal start for word symbol: ~S" char)
(return-values :ERROR (format nil "~A" char))
;;
;; ========================================
;;
weird-middle-of-non-word-symbol-after-whitespace
;;
(set-first-positions)
(warn-here "Ignoring illegal start for non-word symbol: ~S" char)
(return-values :ERROR (format nil "~A" char))
;;
;; ========================================
;;
weird-middle-of-number-after-whitespace
;;
(set-first-positions)
(warn-here "Ignoring illegal start for number: ~S" char)
(return-values :ERROR (format nil "~A" char))
;;
;; ======================================================================
;; COMMENT TO END OF LINE
;; ======================================================================
;;
start-comment-to-eol
(set-first-positions)
continue-comment-to-eol
;;
(push char token-chars)
(local-read-char char char-code
(return-values :COMMENT-TO-EOL
(coerce (nreverse token-chars) 'string))
(return-values :COMMENT-TO-EOL
(coerce (nreverse token-chars) 'string))
()
())
(go continue-comment-to-eol)
;;
;; ======================================================================
;; SEPARATOR
;; ======================================================================
;;
start-separator
;;
(set-first-positions)
;;
(return-values :SYMBOL (svref separator-tokens char-code))
;;
;; ======================================================================
;; WILDCARD (single underbar), but also __, ___, etc.
;; ======================================================================
;;
start-wildcard
;;
(set-first-positions)
;;
;; extend-wildcard
;;
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-word-symbol-with-eof)
()
(go terminate-word-symbol-with-extended-comment)
())
(case (svref whitespace-table char-code)
(#.+wildcard-code+
( go extend - wildcard ) ; disabled per Lambert 's request
(warn-here "Wildcards are a single underbar -- double underbar is not recognized.")
(return-values :ERROR "__")))
(local-unread-char char)
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
;; ======================================================================
;; WORD-SYMBOL
;; ======================================================================
;;
start-word-symbol
;;
(set-first-positions)
;;
extend-word-symbol
;;
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-word-symbol-with-eof)
()
(go terminate-word-symbol-with-extended-comment)
())
;;
;; look for ad hoc symbols that happen to start with word symbol char
;;
(case (svref word-symbol-table char-code)
;; majority
(#.+word-symbol-continue-code+ (go extend-word-symbol))
(#.+syllable-separator-code+ (go extend-symbol-with-new-syllable))
;; normal termination
(#.+whitespace-code+ (go terminate-word-symbol-with-whitespace))
;; less likely
(#.+non-word-symbol-start-code+ (go terminate-word-symbol-with-start-non-word-symbol))
(#.+separator-code+ (go terminate-word-symbol-with-start-separator))
(#.+comment-to-eol-code+ (go terminate-word-symbol-with-start-comment-to-eol))
;; unlikely
(#.+word-symbol-start-code+ (go terminate-word-symbol-with-start-word-symbol))
(#.+number-start-code+ (go terminate-word-symbol-with-start-number))
(#.+string-quote-code+ (go terminate-word-symbol-with-start-string))
(#.+char-literal-start-code+ (go terminate-word-symbol-with-start-char-literal))
;; weird
(#.+non-word-symbol-continue-code+ (go terminate-word-symbol-with-continue-non-word-symbol))
(#.+number-continue-code+ (go terminate-word-symbol-with-continue-number))
(otherwise (go unrecognized-char-while-scanning-word-symbol)))
terminate-word-symbol-with-start-non-word-symbol
(go terminate-word-symbol)
unrecognized-char-while-scanning-word-symbol
(termination-warning char char-code "word symbol" "" ", which is unrecognized")
(go terminate-word-symbol)
;;
terminate-word-symbol-with-continue-number ; weird
(termination-warning char char-code "word symbol" "" ", which can continue but not start a number")
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
;;
terminate-word-symbol-with-continue-non-word-symbol ; weird
(termination-warning char char-code "word symbol" "" ", which can continue but not start a non-word symbol")
(go terminate-word-symbol)
;;
terminate-word-symbol-with-start-word-symbol
(termination-warning char char-code "word symbol" "" ", which can start a word symbol but not continue one")
(go terminate-word-symbol)
;;
terminate-word-symbol-with-start-number
;;(termination-warning char char-code "word symbol" "" "is a beginning of a number")
(go terminate-word-symbol)
;;
terminate-word-symbol-with-start-separator
terminate-word-symbol-with-start-string
terminate-word-symbol-with-start-char-literal
terminate-word-symbol-with-start-comment-to-eol
terminate-word-symbol-with-whitespace
terminate-word-symbol-with-extended-comment
terminate-word-symbol
;;
;; Last-byte, last-line, last-column all refer to the last character of the symbol we've been scanning.
is the first character past that position .
;; We put char back into the stream, and tell our caller the last-xxx values.
;; Those become the initial values in the next call here, and they are
;; incremented when the char we're pushing here is then popped.
(local-unread-char char)
;;
terminate-word-symbol-with-eof
;;
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
;;
;; ======================================================================
;; NON-WORD-SYMBOL
;; ======================================================================
;;
start-non-word-symbol
;;
(set-first-positions)
;;
extend-non-word-symbol
;;
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-non-word-symbol-with-eof)
()
(go terminate-non-word-symbol-with-extended-comment)
())
;;
(case (svref non-word-symbol-table char-code)
;; majority
(#.+non-word-symbol-continue-code+ (go extend-non-word-symbol))
(#.+syllable-separator-code+ (go extend-symbol-with-new-syllable))
;; non-word termination
(#.+whitespace-code+ (go terminate-non-word-symbol-with-whitespace))
;; less likely
(#.+word-symbol-start-code+ (go terminate-non-word-symbol-with-start-word-symbol))
(#.+separator-code+ (go terminate-non-word-symbol-with-start-separator))
(#.+comment-to-eol-code+ (go terminate-non-word-symbol-with-start-comment-to-eol))
;; unlikely
(#.+non-word-symbol-start-code+ (go terminate-non-word-symbol-with-start-non-word-symbol))
(#.+number-start-code+ (go terminate-non-word-symbol-with-start-number))
(#.+string-quote-code+ (go terminate-non-word-symbol-with-start-string))
(#.+char-literal-start-code+ (go terminate-non-word-symbol-with-start-char-literal))
;; weird
(#.+word-symbol-continue-code+ (go terminate-non-word-symbol-with-continue-word-symbol))
(#.+number-continue-code+ (go terminate-non-word-symbol-with-continue-number))
(otherwise (go unrecognized-char-while-scanning-non-word-symbol)))
unrecognized-char-while-scanning-non-word-symbol
(termination-warning char char-code "non-word symbol" "" ", which is unrecognized")
(go terminate-non-word-symbol)
;;
terminate-non-word-symbol-with-continue-number ; weird
(termination-warning char char-code "non-word symbol" "" ", which can continue but not start a number")
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
;;
terminate-non-word-symbol-with-continue-word-symbol
;; with forms such as "::?", where the question mark is dubious, print a warning
(termination-warning char char-code "non-word symbol" "" ", which can continue but not start a word symbol")
(go terminate-non-word-symbol)
;;
terminate-non-word-symbol-with-start-non-word-symbol
(termination-warning char char-code "non-word symbol" "" ", which can start a non-word symbol but not continue one")
(go terminate-non-word-symbol)
;;
terminate-non-word-symbol-with-start-number
;;(termination-warning char char-code "non-word symbol" "" ", which is beginning of a number.")
(go terminate-non-word-symbol)
;;
terminate-non-word-symbol-with-start-word-symbol
terminate-non-word-symbol-with-start-separator
terminate-non-word-symbol-with-start-string
terminate-non-word-symbol-with-start-char-literal
terminate-non-word-symbol-with-start-comment-to-eol
terminate-non-word-symbol-with-whitespace
terminate-non-word-symbol-with-extended-comment
terminate-non-word-symbol
;;
;; Last-byte, last-line, last-column all refer to the last character of the symbol we've been scanning.
is the first character past that position .
;; We put char back into the stream, and tell our caller the last-xxx values.
;; Those become the initial values in the next call here, and they are
;; incremented when the char we're pushing here is then popped.
(local-unread-char char)
;;
terminate-non-word-symbol-with-eof
;;
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
;; ======================================================================
SYLLABLE
;; ======================================================================
;;
extend-symbol-with-new-syllable
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-word-symbol-with-eof)
()
(go terminate-word-symbol-with-extended-comment)
())
;;
(case (svref word-symbol-table char-code)
;; normal continutation
(#.+word-symbol-start-code+ (go extend-word-symbol))
(#.+word-symbol-continue-code+ (go extend-word-symbol))
(#.+non-word-symbol-start-code+ (go extend-non-word-symbol))
(#.+non-word-symbol-continue-code+ (go extend-non-word-symbol))
TODO : We wish to disallow multiple consecutive underbars , but for the moment C code generation and Snark use " _ _ " in names
(#.+syllable-separator-code+ (go extend-symbol-with-new-syllable))
(#.+wildcard-code+ (go extend-symbol-with-new-syllable)) ; shouldn't happen here, but just in case
;;
(otherwise (go terminate-symbol-but-preserve-wildcard)))
terminate-symbol-but-preserve-wildcard
;;
;; We want patterns such as "_+_" or "_::_" to tokenize as
;; ("_" "+" "_") or ("_" "::" "_"), respectively.
;;
Assume the first underbar is already handled .
;;
;; Put the terminating char (which could be almost anything) and the underbar back
;; for for future processing, so the next pass will see the underbar followed by
;; the terminating char.
;;
(local-unread-char char)
(local-unread-char #\_)
;;
;; cdr in following removes trailing underbar from middle token being returned.
;;
(return-values :SYMBOL (coerce (nreverse (cdr token-chars)) 'string))
;;
;; ======================================================================
;; CHARACTER
;; ======================================================================
;;
Note : # \abcde = > two tokens : ( : CHARACTER # \a ) (: SYMBOL " bcde " )
;;
start-char-literal
(set-first-positions)
(set-last-positions)
(local-read-char char char-code
(termination-warning char char-code "partial character literal" "#" ", which is eof")
(termination-warning char char-code "partial character literal" "#" "")
(termination-warning char char-code "partial character literal" "#" ", which starts an extended comment")
(termination-warning char char-code "partial character literal" "#" ", which starts a pragma")
)
(set-last-positions)
(cond ((eq char #\\)
(local-read-char char char-code
(termination-warning char char-code "partial non-word character literal" "#\\" ", which is eof")
(termination-warning char char-code "partial non-word character literal" "#\\" "")
(termination-warning char char-code "partial non-word character literal" "#\\" ", which starts an extended comment")
(termination-warning char char-code "partial non-word character literal" "#\\" ", which starts a pragma")
)
(case char
#-gcl (#\a (return-values :CHARACTER #-mcl #\bel #+mcl #\bell ))
(#\b (return-values :CHARACTER #\backspace ))
(#\t (return-values :CHARACTER #\tab ))
(#\n (return-values :CHARACTER #\newline ))
#-(or mcl gcl) (#\v (return-values :CHARACTER #\vt ))
(#\f (return-values :CHARACTER #\page ))
(#\r (return-values :CHARACTER #\return ))
(#\s (return-values :CHARACTER #\space ))
(#\\ (return-values :CHARACTER #\\ ))
(#\" (return-values :CHARACTER #\" ))
(#\# (return-values :CHARACTER #\# ))
(#\x (progn
(set-last-positions)
(local-read-char
hex-char-1 hex-char-code-1
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" ", which is eof")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" "")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" ", which starts an extended comment")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" ", which starts a pragma")
)
(set-last-positions)
(local-read-char
hex-char-2 hex-char-code-2
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) ", which is eof")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) "")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) ", which starts an extended comment")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) ", which starts a pragma")
)
(let ((high-nibble (convert-hex-char-to-number hex-char-1))
(low-nibble (convert-hex-char-to-number hex-char-2)))
(when (or (null high-nibble) (null low-nibble))
(let ((token (format nil "#x\\~A~A" hex-char-1 hex-char-2)))
(warn-here "Unrecognized character literal, chars after \"#\\x\" are ~S ~S, with hex codes ~2,'0X ~2,'0X"
;; token
hex-char-1
hex-char-2
hex-char-code-1
hex-char-code-2)
(return-values :ERROR token)))
(return-values :CHARACTER (code-char (+ (ash high-nibble 4) low-nibble))))))
(otherwise
(let ((token (format nil "#\\~A" char)))
(warn-here "Unrecognized character literal, char after \"#\\\" is ~S with hex code ~2,'0X"
;; token
char
char-code)
(return-values :ERROR token)))))
(t
(return-values-using-prior-last :CHARACTER char)))
;; ======================================================================
;; NUMBER
;; ======================================================================
;;
start-number
;;
(set-first-positions)
(when (eq char #\0)
;; special cases for hex, octal, binary
(set-last-positions)
(local-read-char char char-code
(go terminate-zero-with-eof)
()
(go terminate-number-with-extended-comment)
())
(case char
((#\X #\x)
(local-read-char char char-code
(go terminate-hex-with-eof)
()
(go terminate-hex-with-extended-comment)
())
at this point , have seen # \0 # \x char
(loop while (member char '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9
#\a #\b #\c #\d #\e #\f
#\A #\B #\C #\D #\E #\F)
:test 'eq)
do
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-hex-with-eof)
()
(go terminate-hex-with-extended-comment)
()))
at this point , have seen # \0 # \X token - chars non - hex - char ,
;; where all token-chars are hex
(if (null token-chars)
(go terminate-hex-prematurely)
(go terminate-hex-cleanly)))
((#\O #\o)
(local-read-char char char-code
(go terminate-octal-with-eof)
()
(go terminate-octal-with-extended-comment)
())
at this point , have seen # \0 # \O char
(loop while (member char '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8) :test 'eq) do
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-octal-with-eof)
()
(go terminate-octal-with-extended-comment)
()))
at this point , have seen # \0 # \O token - chars non - octal - char ,
;; where all token-chars are octal
(if (null token-chars)
(go terminate-octal-prematurely)
(go terminate-octal-cleanly)))
((#\B #\b)
(local-read-char char char-code
(go terminate-binary-with-eof)
()
(go terminate-binary-with-extended-comment)
())
at this point , have seen # \0 # \B char
(loop while (or (eq char '#\0) (eq char #\1)) do
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-binary-with-eof)
()
(go terminate-binary-with-extended-comment)
()))
at this point , have seen # \0 # \B token - chars non - octal - char ,
;; where all token-chars are octal
(if (null token-chars)
(go terminate-binary-prematurely)
(go terminate-binary-cleanly)))
(t
;; else fall through to ordinary number
;; at this point, have seen #\0 char, where char is not among "XxOoBb"
(push #\0 token-chars)
(go extend-number-after-initial-zero))))
;;
extend-number
;;
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-number-with-eof)
()
(go terminate-number-with-extended-comment)
())
;;
extend-number-after-initial-zero
(case (svref number-table char-code)
;; majority
(#.+number-continue-code+ (go extend-number))
;; normal termination
(#.+whitespace-code+ (go terminate-number-with-whitespace))
;; e.g. 123ABC
(#.+word-symbol-start-code+ (go terminate-number-with-start-word-symbol))
(#.+word-symbol-continue-code+ (go terminate-number-with-continue-word-symbol))
;; e.g. 123ABC
(#.+non-word-symbol-start-code+ (go terminate-number-with-start-non-word-symbol))
(#.+non-word-symbol-continue-code+ (go terminate-number-with-continue-non-word-symbol))
;; less likely
(#.+separator-code+ (go terminate-number-with-start-separator))
(#.+comment-to-eol-code+ (go terminate-number-with-start-comment-to-eol))
;; unlikely
(#.+number-start-code+ (go terminate-number-with-start-number))
(#.+string-quote-code+ (go terminate-number-with-start-string))
(#.+char-literal-start-code+ (go terminate-number-with-start-char-literal))
;;
(otherwise (go unrecognized-char-while-scanning-number)))
;;
unrecognized-char-while-scanning-number
(termination-warning char char-code "number" "" ", which is unrecognized")
(go terminate-number-unexpectedly)
;;
terminate-number-with-start-word-symbol
;; (termination-warning char char-code "number" "" ", which starts a word symbol") ; sigh -- this would be triggered by "import Foo#A3B", so suppress it
(go terminate-number-unexpectedly)
;;
terminate-number-with-continue-word-symbol
;;(termination-warning char char-code "number" "" ", which continues but does not start a word symbol")
(go terminate-number-unexpectedly)
;;
terminate-number-with-continue-non-word-symbol
;;(termination-warning char char-code "number" "" ", which continues but does not start a non-word symbol")
(go terminate-number-unexpectedly)
;;
terminate-number-with-start-number
(termination-warning char char-code "number" "" ", which starts a new number")
(go terminate-number-unexpectedly)
terminate-number-unexpectedly
terminate-number-with-start-non-word-symbol;; e.g. +, -, =, etc.
terminate-number-with-start-separator
terminate-number-with-start-string
terminate-number-with-start-char-literal
terminate-number-with-start-comment-to-eol
terminate-number-with-whitespace
terminate-number-with-extended-comment
;;
;; Last-byte, last-line, last-column all refer to the last character of the number we've been scanning.
is the first character past that position .
;; We put char back into the stream, and tell our caller the last-xxx values.
;; Those become the initial values in the next call here, and they are
;; incremented when the char we're pushing here is then popped.
(local-unread-char char)
;;
terminate-number-with-eof
;;
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string)))
;;
terminate-zero-with-eof
(return-values-using-prior-last :NUMBER 0)
;;
;;
terminate-hex-prematurely
(termination-warning char char-code "hex number" "" ", but there are no hex digits")
terminate-hex-with-extended-comment
(termination-warning char char-code "hex number" "" ", which is not a hex char or expected whitespace or punctuation")
terminate-hex-cleanly
(local-unread-char char)
terminate-hex-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string) :radix 16))
;;
;;
terminate-octal-prematurely
(termination-warning char char-code "octal number" "" ", but there are no octal digits")
terminate-octal-with-extended-comment
(termination-warning char char-code "octal number" "" ", which is not an octal char or expected whitespace or punctuation")
terminate-octal-cleanly
(local-unread-char char)
terminate-octal-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string) :radix 8))
;;
;;
terminate-binary-prematurely
(termination-warning char char-code "binary number" "" ", but there are no binary digits")
terminate-binary-with-extended-comment
(termination-warning char char-code "binary number" "" ", which is not a binary char or expected whitespace or punctuation")
terminate-binary-cleanly
(local-unread-char char)
terminate-binary-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string) :radix 2))
;;
;; ======================================================================
;; STRING
;; ======================================================================
;;
escape-next-char-in-string
;;
(local-read-char char char-code
(let ((token (format nil "~A\\" (coerce (nreverse token-chars) 'string))))
(warn-here "EOF immediately after escape character in string ~S" token)
(return-values :ERROR token))
()
()
())
(case char
#-gcl (#\a (push #-mcl #\bel #+mcl #\bell token-chars))
(#\b (push #\backspace token-chars))
(#\t (push #\tab token-chars))
(#\n (push #\newline token-chars))
#-(or mcl gcl) (#\v (push #\vt token-chars))
(#\f (push #\page token-chars))
(#\r (push #\return token-chars))
(#\s (push #\space token-chars))
(#\\ (push #\\ token-chars))
(#\" (push #\" token-chars))
(#\# (push #\# token-chars)) ; Allow \# within string for # to stop emacs from getting confused
(#\x (progn
(set-last-positions)
(local-read-char
hex-char-1 hex-char-code-1
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" ", which is eof")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" "")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" ", which starts an extended comment")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" ", which starts a pragma")
)
(set-last-positions)
(local-read-char
hex-char-2 hex-char-code-2
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) ", which is eof")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) "")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) ", which starts an extended comment")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) ", which starts a pragma")
)
(let ((high-nibble (convert-hex-char-to-number hex-char-1))
(low-nibble (convert-hex-char-to-number hex-char-2)))
(when (or (null high-nibble) (null low-nibble))
(let ((token (format nil "#\\x~A~A" hex-char-1 hex-char-2)))
(warn-here "Unrecognized character literal, chars after \"#\\x\" are ~S ~S, with hex codes ~2,'0X ~2,'0X"
;; token
hex-char-1
hex-char-2
hex-char-code-1
hex-char-code-2)
(return-values :ERROR token)))
(push (code-char (+ (ash high-nibble 4) low-nibble)) token-chars))))
(otherwise
(let ((token (format nil "#\\~A" char)))
(warn-here "Unrecognized character literal, char after \"#\\\" is ~S, with hex code ~2,'0X"
;; token
char
char-code)
(return-values :ERROR token))))
(go extend-string)
;;
start-string
;;
(set-first-positions)
;;
extend-string
;;
(local-read-char char char-code
(let ((token (coerce (nreverse token-chars) 'string)))
(warn-here "EOF inside string starting at line ~S, column ~S" first-line first-column)
(return-values :ERROR token))
()
()
())
(case (svref string-table char-code)
(#.+string-quote-code+ (go close-string))
(#.+string-escape-code+ (go escape-next-char-in-string))
(otherwise (push char token-chars) (go extend-string)))
;;
close-string
;;
(return-values :STRING (coerce (nreverse token-chars) 'string))
;;
;; ======================================================================
;; EXTENDED COMMENT
;; ======================================================================
;;
start-extended-comment
;;
(set-first-positions)
;;
(multiple-value-bind (error? comment-chars last-byte last-line last-column)
(skip-extended-comment char ps-stream cp-descriptor cp-descriptors
Note : Pragma bodies are treated as ordinary text ,
;; not as recursively nested structures.
;; This can cause a minor problem in the unusual
;; situation where the body of a pragma contains
an unmatched ( open or close ) extended comemnt
;; delimiter. In those rare cases, the user will
;; need to manually adjust the body of their pragma
;; if they wish to block-comment around it.
comment-table
first-byte
first-line
first-column)
(return-values-using-prior-last (if error? :EXTENDED-COMMENT-ERROR :EXTENDED-COMMENT)
(coerce (nreverse comment-chars) 'string)))
;;
;; ======================================================================
PRAGMA
;; ======================================================================
;;
start-pragma
;;
(set-first-positions)
;;
(multiple-value-bind (error? pragma-chars last-byte last-line last-column)
;; scan-pragma calls skip-extended-comment with the recursive? and eof-ok?
;; flags set to false
(scan-pragma char ps-stream cp-descriptor
first-byte
first-line
first-column)
(cond (error?
(dolist (char pragma-chars)
(ps-unread-char char ps-stream))
(setq current-byte first-byte
current-line first-line
current-column first-column)
(local-read-char char char-code
(return-values :EOF nil)
()
()
())
(go ignore-erroneous-pragma))
(t
(let* ((prefix (cp-descriptor-prefix cp-descriptor))
(postfix (cp-descriptor-postfix cp-descriptor))
(start (length prefix))
(end (- (length pragma-chars) (length postfix)))
(body-chars (subseq (nreverse pragma-chars) start end))
(body (coerce body-chars 'string)))
(return-values-using-prior-last :PRAGMA (list prefix body postfix))))))
;;
;; ========================================
))))
(defun convert-hex-char-to-number (x)
(case x
(#\0 0)
(#\1 1)
(#\2 2)
(#\3 3)
(#\4 4)
(#\5 5)
(#\6 6)
(#\7 7)
(#\8 8)
(#\9 9)
((#\a #\A) 10)
((#\b #\B) 11)
((#\c #\C) 12)
((#\d #\D) 13)
((#\e #\E) 14)
((#\f #\F) 15)
(otherwise nil)))
(defun applicable-cp-descriptor (first-char ps-stream cp-descriptors)
(dolist (cp-descriptor cp-descriptors)
(when (pseudo-stream-has-prefix? first-char ps-stream (cp-descriptor-prefix cp-descriptor))
(return cp-descriptor))))
(defun pseudo-stream-has-prefix? (first-char ps-stream prefix)
(and (eq (schar prefix 0) first-char)
(let* ((lookahead-chars nil)
(result
(dotimes (i (1- (length prefix))
;; if all chars match, the result is t
t)
(let ((char (ps-read-char ps-stream)))
(cond ((eq char +tokenizer-eof+)
if eof intervenes , the result is nil
(return nil))
((eq char (schar prefix (1+ i)))
(push char lookahead-chars))
(t
;; if some char is a mismatch, the result is nil
(ps-unread-char char ps-stream)
(return nil)))))))
;; back out so stream is in original state
(dolist (char lookahead-chars)
(ps-unread-char char ps-stream))
result)))
(defstruct extended-comment-state
error?
cp-descriptors
comment-table
byte
line
column
chars)
(defvar *extended-comment-state* (make-extended-comment-state))
(defun scan-pragma (first-char ps-stream cp-descriptor first-byte first-line first-column)
;; scan similarly to an extended comment, but not recursive
(skip-extended-comment first-char ps-stream cp-descriptor '()
#() ; comment table will be ignored
first-byte first-line first-column))
(defun skip-extended-comment (first-char ps-stream cp-descriptor cp-descriptors
comment-table
first-byte first-line first-column)
(let ((ec-state *extended-comment-state*))
(setf (extended-comment-state-error? ec-state) nil)
(setf (extended-comment-state-cp-descriptors ec-state) cp-descriptors)
(setf (extended-comment-state-comment-table ec-state) comment-table)
(setf (extended-comment-state-byte ec-state) first-byte)
(setf (extended-comment-state-line ec-state) first-line)
(setf (extended-comment-state-column ec-state) first-column)
(setf (extended-comment-state-chars ec-state) (list first-char))
(aux-skip-extended-comment ps-stream cp-descriptor ec-state)
(values (extended-comment-state-error? ec-state)
(extended-comment-state-chars ec-state)
(extended-comment-state-byte ec-state)
(extended-comment-state-line ec-state)
(extended-comment-state-column ec-state))))
(defun aux-skip-extended-comment (ps-stream cp-descriptor ec-state)
(let* ((prefix (cp-descriptor-prefix cp-descriptor))
(postfix (cp-descriptor-postfix cp-descriptor))
(recursive? (cp-descriptor-recursive? cp-descriptor))
(eof-ok? (cp-descriptor-eof-ok? cp-descriptor))
(open-size (1- (length prefix)))
(close-size (1- (length postfix)))
(close-char-0 (schar postfix 0))
(comment-table (extended-comment-state-comment-table ec-state)))
;; skip past prefix
(dotimes (i open-size)
(push (ps-read-char ps-stream)
(extended-comment-state-chars ec-state)))
(incf (extended-comment-state-byte ec-state) open-size)
(incf (extended-comment-state-column ec-state) open-size)
;; scan for postfix or recursive prefix
(do ((char (ps-read-char ps-stream) (ps-read-char ps-stream)))
((eq char +tokenizer-eof+)
(cond (eof-ok?
nil)
(t
(setf (extended-comment-state-error? ec-state) t)
t)))
(push char (extended-comment-state-chars ec-state))
(incf (extended-comment-state-byte ec-state))
(cond ((eq char #\newline)
(incf (extended-comment-state-line ec-state))
(setf (extended-comment-state-column ec-state) -1))
(t
(incf (extended-comment-state-column ec-state))))
(cond ((and (eq char close-char-0)
(pseudo-stream-has-prefix? char ps-stream postfix))
;; skip past postfix
(dotimes (i close-size)
(push (ps-read-char ps-stream)
(extended-comment-state-chars ec-state)))
(incf (extended-comment-state-byte ec-state) close-size)
(incf (extended-comment-state-column ec-state) close-size)
(return-from aux-skip-extended-comment nil))
((and recursive?
(eq (svref comment-table (char-code char))
+maybe-open-comment-or-pragma-code+))
;; recur if both outer and inner extended comments are recursive
(let ((new-cp-descriptor (applicable-cp-descriptor
char ps-stream
(extended-comment-state-cp-descriptors ec-state))))
(when (not (null new-cp-descriptor))
(let ((inner-is-recursive? (cp-descriptor-recursive? new-cp-descriptor)))
(when inner-is-recursive?
(aux-skip-extended-comment ps-stream new-cp-descriptor ec-state))))))))))
;;; ========================================================================
(defun install-tokens (session tokens comments)
;; each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
(when (null +token-rule+) (break "???"))
(let ((locations (make-array (1+ (length tokens)) :initial-element nil))
(pre-index 0)
(pre-location (make-parser-location
:index 0
:post-nodes nil))
(last-node nil))
(setf (svref locations pre-index) pre-location)
(setf (parse-session-locations session) locations)
(setf (parse-session-comments session) comments)
(dolist (token tokens)
(let* ((token-start-byte (first (third token)))
(pre-comments '())
(post-index (1+ pre-index))
(node (create-parser-node :rule +token-rule+
:semantics token
:pre-index pre-index
:post-index-ptr (list post-index)
:parents nil
:children nil
)))
; (format t "install-tokens ~%")
; (describe node)
(setq last-node node)
(push node (parser-location-post-nodes pre-location))
(when-debugging (when *verbose?* (show-node node "Created ")))
(setf (parser-location-position pre-location) (third token))
( third comment ) is pre - position of comment : ( byte line column )
(loop while (and (not (null comments))
(< (first (third (first comments)))
token-start-byte) )
do (push (pop comments) pre-comments))
(setf (parser-location-pre-comments pre-location) pre-comments)
(when-debugging
(when *verbose?*
(unless (null pre-comments)
(comment "Pre-Comemnts for ~6D: ~S" pre-index pre-comments))))
(setq pre-index post-index)
(setq pre-location (make-parser-location
:index post-index
:post-nodes nil))
(setf (svref locations pre-index) pre-location)))
(debugging-comment "Pre-Comments for ~6D (eof): ~S" pre-index comments)
(let ((eof-location pre-location))
(setf (parser-location-pre-comments eof-location) comments)
(if (null last-node)
(debugging-comment "No tokens")
(let* ((last-token (if (null comments)
(parser-node-semantics last-node)
(first (last comments))))
;; each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
(last-pos (fourth last-token)))
(debugging-comment "Last token: ~S" last-token)
(setf (parser-location-position eof-location) last-pos))))
locations))
| null | https://raw.githubusercontent.com/KestrelInstitute/Specware/2be6411c55f26432bf5c9e2f7778128898220c24/Library/Algorithms/Parsing/Chart/Handwritten/Lisp/tokenizer.lisp | lisp | Package : Parser ; Base : 10 ; Syntax : Common - Lisp -*-
========================================================================
in cases where a character has both an illegal and a legal code for some context.
whitespace-table is used when scanning whitespace...
codes that are illegal after whitespace is started:
codes that are legal after whitespace is started:
first, so it can be overridden
first, so it can be overridden
word-symbol-table
codes that are illegal after a word symbol is started:
codes that are legal after a word symbol is started:
first, so it can be overridden
first, so it can be overridden
probably overridden by +word-symbol-continue-code+
non-word-symbol-table
codes that are illegal after a non-word symbol is started:
codes that are legal after a non-word symbol is started:
first, so it can be overridden
first, so it can be overridden
proably survive as final code
number-table is used when scanning numbers...
codes that are illegal after a number is started:
first, so it can be overridden
codes that are illegal after a number is started, but might become legal:
codes that are legal after a number is started:
string-table is used when scanning strings
move longest prefixes to front of list, so that something
such as "//@" would be recognized before "//", etc.
sort the strings in descending length so that "__" will be seen before "_", "??" before "?" etc.
========================================================================
the tokenizer will call extract-tokens-from-file, using language-specific parameters
each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
========================================================================
each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
TODO: this will change when we support unicode
========================================================================
each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
,prefix
,byte
we proceed to line+1 : -1, so that the next character read
(which will be the leftmost on the line) will be at line+1 : 0
current-byte was incremented above, so we don't need to touch that here
extended-comments and pragmas are similar,
but pragmas will be recognized in fewer places (following whitespace)
give pragmas precedence, as their openings may be encoded as something
like //@ when comments are //
?? If we do this repeatedly, unreading newlines, can we end up at a column left of -1 ??
If that happens, we could decrement the line, but then what should the column be??
inclusive -- last character of token
Note: ad-hoc tokens take
precedence over open extended
comments, so we won't look here
to see if a comment is
starting.
put back the char that doesn't match
If an ad-hoc-token is found, make sure it is not the start of a longer token
then see if ad-hoc string should go back...
char-var was seen via local-read-char, so the current position is already
set to point at it
we proceed to line+1 : -1, so that the next character read
(which will be the leftmost on the line) will be at line+1 : 0
current-byte was incremented above, so we don't need to touch that here
======================================================================
======================================================================
majority
normal termination
peculiar termination
========================================
========================================
========================================
======================================================================
COMMENT TO END OF LINE
======================================================================
======================================================================
SEPARATOR
======================================================================
======================================================================
WILDCARD (single underbar), but also __, ___, etc.
======================================================================
extend-wildcard
disabled per Lambert 's request
======================================================================
WORD-SYMBOL
======================================================================
look for ad hoc symbols that happen to start with word symbol char
majority
normal termination
less likely
unlikely
weird
weird
weird
(termination-warning char char-code "word symbol" "" "is a beginning of a number")
Last-byte, last-line, last-column all refer to the last character of the symbol we've been scanning.
We put char back into the stream, and tell our caller the last-xxx values.
Those become the initial values in the next call here, and they are
incremented when the char we're pushing here is then popped.
======================================================================
NON-WORD-SYMBOL
======================================================================
majority
non-word termination
less likely
unlikely
weird
weird
with forms such as "::?", where the question mark is dubious, print a warning
(termination-warning char char-code "non-word symbol" "" ", which is beginning of a number.")
Last-byte, last-line, last-column all refer to the last character of the symbol we've been scanning.
We put char back into the stream, and tell our caller the last-xxx values.
Those become the initial values in the next call here, and they are
incremented when the char we're pushing here is then popped.
======================================================================
======================================================================
normal continutation
shouldn't happen here, but just in case
We want patterns such as "_+_" or "_::_" to tokenize as
("_" "+" "_") or ("_" "::" "_"), respectively.
Put the terminating char (which could be almost anything) and the underbar back
for for future processing, so the next pass will see the underbar followed by
the terminating char.
cdr in following removes trailing underbar from middle token being returned.
======================================================================
CHARACTER
======================================================================
token
token
======================================================================
NUMBER
======================================================================
special cases for hex, octal, binary
where all token-chars are hex
where all token-chars are octal
where all token-chars are octal
else fall through to ordinary number
at this point, have seen #\0 char, where char is not among "XxOoBb"
majority
normal termination
e.g. 123ABC
e.g. 123ABC
less likely
unlikely
(termination-warning char char-code "number" "" ", which starts a word symbol") ; sigh -- this would be triggered by "import Foo#A3B", so suppress it
(termination-warning char char-code "number" "" ", which continues but does not start a word symbol")
(termination-warning char char-code "number" "" ", which continues but does not start a non-word symbol")
e.g. +, -, =, etc.
Last-byte, last-line, last-column all refer to the last character of the number we've been scanning.
We put char back into the stream, and tell our caller the last-xxx values.
Those become the initial values in the next call here, and they are
incremented when the char we're pushing here is then popped.
======================================================================
STRING
======================================================================
Allow \# within string for # to stop emacs from getting confused
token
token
======================================================================
EXTENDED COMMENT
======================================================================
not as recursively nested structures.
This can cause a minor problem in the unusual
situation where the body of a pragma contains
delimiter. In those rare cases, the user will
need to manually adjust the body of their pragma
if they wish to block-comment around it.
======================================================================
======================================================================
scan-pragma calls skip-extended-comment with the recursive? and eof-ok?
flags set to false
========================================
if all chars match, the result is t
if some char is a mismatch, the result is nil
back out so stream is in original state
scan similarly to an extended comment, but not recursive
comment table will be ignored
skip past prefix
scan for postfix or recursive prefix
skip past postfix
recur if both outer and inner extended comments are recursive
========================================================================
each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column))
(format t "install-tokens ~%")
(describe node)
each token looks like: (:kind <semantics> (start-byte start-line start-column) (end-byte end-line end-column)) |
(in-package :Parser4)
(defun ctp-arg-test (arg value example)
(when (null value)
(warn "create-tokenizer-parameters missing keyword arg ~S, e.g. ~A"
arg
example)))
(defun create-tokenizer-parameters (&key
name
size-of-character-set
word-symbol-start-chars
word-symbol-continue-chars
non-word-symbol-start-chars
non-word-symbol-continue-chars
number-start-chars
number-continue-chars
digits-may-start-symbols?
string-quote-char
string-escape-char
whitespace-chars
separator-chars
comment-to-eol-chars
extended-comment-delimiters
pragma-delimiters
ad-hoc-keywords
ad-hoc-symbols
ad-hoc-numbers
case-sensitive?
)
(ctp-arg-test :word-symbol-start-chars word-symbol-start-chars "the alphabet")
(ctp-arg-test :word-symbol-continue-chars word-symbol-continue-chars "the alphabet, digits, and underbar")
(ctp-arg-test :non-word-symbol-start-chars non-word-symbol-start-chars "some chars like !@$^&*~+-=|<>?/.")
(ctp-arg-test :non-word-symbol-continue-chars non-word-symbol-continue-chars "some chars like !@$^&*~+-=|<>?/.")
(ctp-arg-test :number-start-chars number-start-chars "the digits, plus, minus, and maybe dot and/or slash")
(ctp-arg-test :number-continue-chars number-continue-chars "the digits, and maybe dot and/or slash")
(ctp-arg-test :comment-to-eol-chars comment-to-eol-chars "semi-colon (#\;) or percent (#\%) ")
(let ((whitespace-table (make-array size-of-character-set :initial-element 0))
(word-symbol-table (make-array size-of-character-set :initial-element 0))
(non-word-symbol-table (make-array size-of-character-set :initial-element 0))
(number-table (make-array size-of-character-set :initial-element 0))
(string-table (make-array size-of-character-set :initial-element 0))
(comment-table (make-array size-of-character-set :initial-element 0))
(ad-hoc-table (make-array size-of-character-set :initial-element 0))
(separator-tokens (make-array size-of-character-set :initial-element 0))
(cp-descriptors '())
)
Note : in the following , we consistently assign the problematic codes first , so that legal codes can override them
(assign-tokenizer-codes whitespace-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes whitespace-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-codes whitespace-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes whitespace-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes whitespace-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-codes whitespace-table number-start-chars +number-start-code+)
(assign-tokenizer-code whitespace-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes whitespace-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes whitespace-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes whitespace-table separator-chars +separator-code+)
(assign-tokenizer-codes word-symbol-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes word-symbol-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes word-symbol-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-codes word-symbol-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-code word-symbol-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes word-symbol-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes word-symbol-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes word-symbol-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes word-symbol-table separator-chars +separator-code+)
(assign-tokenizer-codes non-word-symbol-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-codes non-word-symbol-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes non-word-symbol-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes non-word-symbol-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-code non-word-symbol-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes non-word-symbol-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes non-word-symbol-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes non-word-symbol-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-codes non-word-symbol-table separator-chars +separator-code+)
(assign-tokenizer-codes number-table number-start-chars +number-start-code+)
(assign-tokenizer-codes number-table word-symbol-continue-chars +word-symbol-continue-code+)
(assign-tokenizer-codes number-table non-word-symbol-continue-chars +non-word-symbol-continue-code+)
(assign-tokenizer-codes number-table word-symbol-start-chars +word-symbol-start-code+)
(assign-tokenizer-codes number-table non-word-symbol-start-chars +non-word-symbol-start-code+)
(assign-tokenizer-code number-table string-quote-char +string-quote-code+)
(assign-tokenizer-codes number-table comment-to-eol-chars +comment-to-eol-code+)
(assign-tokenizer-codes number-table whitespace-chars +whitespace-code+)
(assign-tokenizer-codes number-table number-continue-chars +number-continue-code+)
(assign-tokenizer-codes number-table separator-chars +separator-code+)
(assign-tokenizer-code string-table string-quote-char +string-quote-code+)
(assign-tokenizer-code string-table string-escape-char +string-escape-code+)
(dolist (quad extended-comment-delimiters)
(let* ((prefix (first quad))
(postfix (second quad))
(recursive? (third quad))
(eof-ok? (fourth quad))
(pragma? nil))
(unless (and (stringp prefix)
(> (length prefix) 0)
(stringp postfix)
(> (length postfix) 0)
(member recursive? '(t nil))
(member eof-ok? '(t nil)))
(break "Bad description of extended comment delimiters. Want (prefix postfix recursive? eof-ok?) : ~S"
quad))
(push (make-cp-descriptor :prefix prefix
:postfix postfix
:recursive? recursive?
:eof-ok? eof-ok?
:pragma? pragma?)
cp-descriptors)
(setf (svref comment-table (char-code (schar prefix 0)))
+maybe-open-comment-or-pragma-code+)))
(dolist (quad pragma-delimiters)
(let* ((prefix (first quad))
(postfix (second quad))
(recursive? (third quad))
(eof-ok? (fourth quad))
(pragma? t))
(unless (and (stringp prefix)
(> (length prefix) 0)
(stringp postfix)
(> (length postfix) 0)
(member recursive? '(t nil))
(member eof-ok? '(t nil)))
(break "Bad description of pragma delimiters. Want (prefix postfix recursive? eof-ok?) : ~S"
quad))
(push (make-cp-descriptor :prefix prefix
:postfix postfix
:recursive? recursive?
:eof-ok? eof-ok?
:pragma? pragma?)
cp-descriptors)
(setf (svref comment-table (char-code (schar prefix 0)))
+maybe-open-comment-or-pragma-code+)))
(setq cp-descriptors
(sort cp-descriptors
#'(lambda (x y)
(> (length (cp-descriptor-prefix x))
(length (cp-descriptor-prefix y))))))
(dolist (char separator-chars)
(setf (svref separator-tokens (char-code char)) (string char)))
(dolist (string ad-hoc-keywords)
(setf (svref ad-hoc-table (char-code (schar string 0)))
+maybe-start-of-ad-hoc-token+))
(dolist (string ad-hoc-symbols)
(setf (svref ad-hoc-table (char-code (schar string 0)))
+maybe-start-of-ad-hoc-token+))
(dolist (string ad-hoc-numbers)
(setf (svref ad-hoc-table (char-code (schar string 0)))
+maybe-start-of-ad-hoc-token+))
(let ((ht-ad-hoc-types (make-hash-table
:test (if case-sensitive?
#+allegro 'string= #-allegro 'equal
#+allegro 'string-equal #-allegro 'equal
))))
(dolist (keyword-string ad-hoc-keywords)
(setf (gethash keyword-string ht-ad-hoc-types) :AD-HOC-KEYWORD-ONLY))
(dolist (symbol-string ad-hoc-symbols)
(let ((old-value (gethash symbol-string ht-ad-hoc-types)))
(setf (gethash symbol-string ht-ad-hoc-types)
(if (null old-value)
:AD-HOC-SYMBOL-ONLY
:AD-HOC-KEYWORD-AND-SYMBOL-ONLY))))
(dolist (number-string ad-hoc-numbers)
(let ((old-value (gethash number-string ht-ad-hoc-types)))
(setf (gethash number-string ht-ad-hoc-types)
(ecase old-value
((nil) :AD-HOC-NUMBER-ONLY)
(:KEYWORD :AD-HOC-KEYWORD-AND-NUMBER-ONLY)
(:SYMBOL :AD-HOC-SYMBOL-AND-NUMBER-ONLY)
(:KEYWORD-AND-SYMBOL :AD-HOC-KEYWORD-AND-SYMBOL-AND-NUMBER-ONLY)))))
(when-debugging
(when *verbose?*
(let ((alist `((,+number-start-code+ . +number-start-code+)
(,+number-continue-code+ . +number-continue-code+)
(,+word-symbol-start-code+ . +word-symbol-start-code+)
(,+word-symbol-continue-code+ . +word-symbol-continue-code+)
(,+non-word-symbol-start-code+ . +non-word-symbol-start-code+)
(,+non-word-symbol-continue-code+ . +non-word-symbol-continue-code+)
(,+separator-code+ . +separator-code+)
(,+string-quote-code+ . +string-quote-code+)
(,+string-escape-code+ . +string-escape-code+)
(,+comment-to-eol-code+ . +comment-to-eol-code+)
(,+whitespace-code+ . +whitespace-code+)
(,+char-literal-start-code+ . +char-literal-start-code+)
(,+syllable-separator-code+ . +syllable-separator-code+)
(,+wildcard-code+ . +wildcard-code+)
(0 . "...")
)))
(comment "============================================================================")
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref whitespace-table i)))
(comment "At whitespace ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref word-symbol-table i)))
(comment"At word symbol ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref non-word-symbol-table i)))
(comment"At non-word symbol ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref number-table i)))
(comment "At number ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(let ((n (svref string-table i)))
(comment "At string ~3D (~12S) => ~A"
i (code-char i) (cdr (assoc n alist)))))
(terpri)
(dotimes (i size-of-character-set)
(when (= (svref comment-table i) +maybe-open-comment-or-pragma-code+)
(comment "The character ~D (~S) may start an extended comment or a pragma"
i (code-char i))))
(terpri)
(dolist (x ad-hoc-keywords) (comment "Ad-hoc-keyword : ~S" x))
(dolist (x ad-hoc-symbols) (comment "Ad-hoc-symbol : ~S" x))
(dolist (x ad-hoc-numbers) (comment "Ad-hoc-number : ~S" x))
(terpri)
(maphash #'(lambda (key value) (comment "ad-hoc-type for ~S = ~S" key value))
ht-ad-hoc-types)
(terpri)
(comment "============================================================================"))))
(let ((ad-hoc-strings
(sort (append ad-hoc-keywords
ad-hoc-symbols
ad-hoc-numbers)
#'(lambda (x y)
(> (length x) (length y))))))
(make-tokenizer-parameters :name name
:whitespace-table whitespace-table
:word-symbol-table word-symbol-table
:non-word-symbol-table non-word-symbol-table
:number-table number-table
:string-table string-table
:digits-may-start-symbols? digits-may-start-symbols?
:comment-table comment-table
:separator-tokens separator-tokens
:cp-descriptors cp-descriptors
:ad-hoc-types-ht ht-ad-hoc-types
:ad-hoc-table ad-hoc-table
:ad-hoc-strings ad-hoc-strings
))
)))
(defun assign-tokenizer-codes (table chars code)
(setq chars (coerce chars 'list))
(dotimes (i (length chars))
(setf (svref table (char-code (nth i chars))) code)))
(defun assign-tokenizer-code (table char code)
(unless (null char)
(setf (svref table (char-code char)) code)))
(defun tokenize-file (session file tokenizer)
(incf-timing-data 'start-tokenize-file)
(let ((all-tokens
(funcall tokenizer file))
(comment-tokens '())
(non-comment-tokens '())
(comment-eof-error? nil))
(incf-timing-data 'tokenize-file)
(dolist (token all-tokens)
(cond ((member (first token) '(:COMMENT-TO-EOL :EXTENDED-COMMENT))
(push token comment-tokens))
(t
(when (eq (first token) :EXTENDED-COMMENT-ERROR)
(setq comment-eof-error? t))
(push token non-comment-tokens))))
(setq non-comment-tokens (nreverse non-comment-tokens))
(setq comment-tokens (nreverse comment-tokens))
(incf-timing-data 'tokenize-file)
(let ((result
(install-tokens session non-comment-tokens comment-tokens)))
(incf-timing-data 'install-tokens)
(values result (length all-tokens) comment-eof-error?))))
(defun extract-tokens-from-file (file tokenizer-parameters)
(let ((whitespace-table (tokenizer-parameters-whitespace-table tokenizer-parameters))
(word-symbol-table (tokenizer-parameters-word-symbol-table tokenizer-parameters))
(non-word-symbol-table (tokenizer-parameters-non-word-symbol-table tokenizer-parameters))
(number-table (tokenizer-parameters-number-table tokenizer-parameters))
(string-table (tokenizer-parameters-string-table tokenizer-parameters))
(comment-table (tokenizer-parameters-comment-table tokenizer-parameters))
(separator-tokens (tokenizer-parameters-separator-tokens tokenizer-parameters))
(cp-descriptors (tokenizer-parameters-cp-descriptors tokenizer-parameters))
(digits-may-start-symbols? (tokenizer-parameters-digits-may-start-symbols? tokenizer-parameters))
(ht-ad-hoc-types (tokenizer-parameters-ad-hoc-types-ht tokenizer-parameters))
(ad-hoc-table (tokenizer-parameters-ad-hoc-table tokenizer-parameters))
(ad-hoc-strings (tokenizer-parameters-ad-hoc-strings tokenizer-parameters)))
(let ((tokens nil))
(let ((ps-stream (make-pseudo-stream :unread-chars nil :stream stream))
The upper - left corner of the file is considered 1:0:1 ( line 1 , column 0 , byte 1 )
so the character one to the left of that is 1:-1:0 ( line 1 , column -1 , byte 0 ) .
So we are at 1:-1 before we read the first character .
(pre-line 1) (pre-column -1) (pre-byte 0))
(loop do
(multiple-value-bind (type value
first-byte first-line first-column
last-byte last-line last-column)
(extract-token-from-pseudo-stream ps-stream
pre-byte pre-line pre-column
whitespace-table
word-symbol-table
non-word-symbol-table
number-table
string-table
digits-may-start-symbols?
comment-table
separator-tokens
cp-descriptors
ad-hoc-table
ad-hoc-strings)
(cond ((eq type :EOF)
(return nil))
(t
(push (list (or (and (or (eq type :AD-HOC)
(eq type :SYMBOL))
(gethash value ht-ad-hoc-types))
type)
value
(list first-byte first-line first-column)
(list last-byte last-line last-column))
tokens)))
(setq pre-byte last-byte
pre-line last-line
pre-column last-column)))))
(nreverse tokens))))
(defun extract-token-from-pseudo-stream (ps-stream
pre-byte pre-line pre-column
whitespace-table
word-symbol-table
non-word-symbol-table
number-table
string-table
digits-may-start-symbols?
comment-table
separator-tokens
cp-descriptors
ad-hoc-table
ad-hoc-strings)
(when digits-may-start-symbols?
(error "The option digits-may-start-symbols? is currently diabled."))
(let* ((current-byte pre-byte)
(current-line pre-line)
(current-column pre-column)
(first-byte )
(first-line )
(first-column )
(last-byte )
(last-line )
(last-column )
(char )
(char-code )
(token-chars nil)
(cp-descriptor nil)
(hex-char-1 )
(hex-char-code-1 )
(hex-char-2 )
(hex-char-code-2 )
(*extended-comment-state* (make-extended-comment-state)))
(declare (special *extended-comment-state*))
(macrolet ((local-warn (prefix line column byte msg &rest args)
`(warn "At line ~3D:~2D ~?"
,msg (list ,@args)))
(warn-here (msg &rest args)
`(local-warn "At" current-line current-column current-byte
,msg ,@args))
(local-read-char (char-var char-code-var eof-action newline-action open-extended-comment-action open-pragma-action)
`(progn
(setq ,char-var (ps-read-char ps-stream))
(incf current-byte)
(if (eq ,char-var +tokenizer-eof+)
,eof-action
(progn
(setq ,char-code-var (char-code ,char-var))
(cond ((eq ,char-var #\newline)
(incf current-line)
(setq current-column -1)
,newline-action)
(t
(incf current-column)))
,@(if (and (null open-extended-comment-action) (null open-pragma-action))
()
`((when (and (eq (svref comment-table ,char-code-var)
+maybe-open-comment-or-pragma-code+)
(not (null (setq cp-descriptor
(applicable-cp-descriptor
,char-var
ps-stream
cp-descriptors)))))
(if (cp-descriptor-pragma? cp-descriptor)
,open-pragma-action
,open-extended-comment-action))))
))))
(local-unread-char (char-var)
`(progn
(ps-unread-char ,char-var ps-stream)
(decf current-byte)
(decf current-column)
))
(set-first-positions ()
inclusive -- first character of token
`(setq first-byte current-byte
first-line current-line
first-column current-column))
(set-last-positions ()
`(setq last-byte current-byte
last-line current-line
last-column current-column))
(return-values-using-prior-last (type value)
`(return-from extract-token-from-pseudo-stream
(values ,type ,value
first-byte first-line first-column
last-byte last-line last-column)))
(return-values (type value)
`(progn
(set-last-positions)
(return-values-using-prior-last ,type ,value)))
(termination-warning (char-var char-code-var kind-of-token misc-chars kind-of-char)
`(local-warn "After"
last-line (1+ last-column) (1+ last-byte)
"Terminating ~A \"~A~A\" with ~S (hex code ~2,'0X)~A."
,kind-of-token
,misc-chars
(coerce (reverse token-chars) 'string)
,char-var ,char-code-var
,kind-of-char))
(look-for-ad-hoc-tokens (char-var char-code-var)
`(unless (eq (svref ad-hoc-table ,char-code-var) 0)
(dolist (ad-hoc-string ad-hoc-strings)
(debugging-comment "Looking for ad-hoc-string ~S starting with ~S" ad-hoc-string ,char-var)
(when (eq (schar ad-hoc-string 0) ,char-var)
(let ((found-ad-hoc-string?
(dotimes (i (1- (length ad-hoc-string)) t)
(let ((local-char (ps-read-char ps-stream)))
(debugging-comment "Looking for ad-hoc-string ~S, now at ~S" ad-hoc-string local-char)
(when (eq ,char-var +tokenizer-eof+)
(debugging-comment "Saw EOF")
from dotimes
(let ((current-string-index (+ i 1)))
(cond ((eq local-char (schar ad-hoc-string current-string-index))
(debugging-comment " extending match."))
(t
(debugging-comment " match to ~S failed." ad-hoc-string)
(ps-unread-char local-char ps-stream)
put back all but the first char
(dotimes (j i)
(ps-unread-char (schar ad-hoc-string (- current-string-index 1 j))
ps-stream))
(return nil))))))))
(debugging-comment "Found? ~S" found-ad-hoc-string?)
(when found-ad-hoc-string?
(let ((next-char (ps-read-char ps-stream)))
(unless (eq next-char +tokenizer-eof+)
(let* ((this-char-dispatch-code (svref word-symbol-table ,char-code-var))
(next-char-code (char-code next-char))
(next-char-dispatch-code (svref word-symbol-table next-char-code)))
in all cases ( except eof , of course ) , put back the next char
(ps-unread-char next-char ps-stream)
(when (or (and (or (eq this-char-dispatch-code #.+word-symbol-start-code+)
(eq this-char-dispatch-code #.+word-symbol-continue-code+))
(or (eq next-char-dispatch-code #.+word-symbol-start-code+)
(eq next-char-dispatch-code #.+word-symbol-continue-code+)
(eq next-char-dispatch-code #.+syllable-separator-code+)))
(and (or (eq this-char-dispatch-code #.+non-word-symbol-start-code+)
(eq this-char-dispatch-code #.+non-word-symbol-continue-code+))
(or (eq next-char-dispatch-code #.+non-word-symbol-start-code+)
(eq next-char-dispatch-code #.+non-word-symbol-continue-code+)
(eq next-char-dispatch-code #.+syllable-separator-code+))))
put back all but the first char of the ad - hoc - string
(let ((n (1- (length ad-hoc-string))))
(dotimes (i n)
(ps-unread-char (schar ad-hoc-string (- n i))
ps-stream)))
(return nil)))))
(debugging-comment "Found match to ~S." ad-hoc-string)
(set-first-positions)
(dotimes (i (1- (length ad-hoc-string)))
(let ((temp-char (schar ad-hoc-string (+ i 1))))
(incf current-byte)
(cond ((eq temp-char #\newline)
(incf current-line)
(setq current-column -1))
(t
(incf current-column)))))
(return-values :AD-HOC ad-hoc-string)))))))
)
(tagbody
(go start-scan-for-new-token)
WHITESPACE
unrecognized-char-while-scanning-whitespace
(warn-here "Unrecognized ~6S (hex code ~2,'0X) while scanning whitespace -- treated as whitespace"
char char-code)
continue-whitespace
start-scan-for-new-token
(local-read-char char char-code
(return-values :EOF nil)
()
(go start-extended-comment)
(go start-pragma))
ignore-erroneous-pragma
(look-for-ad-hoc-tokens char char-code)
(case (svref whitespace-table char-code)
(#.+whitespace-code+ (go continue-whitespace))
(#.+word-symbol-start-code+ (go start-word-symbol))
(#.+non-word-symbol-start-code+ (go start-non-word-symbol))
(#.+wildcard-code+ (go start-wildcard))
(#.+number-start-code+ (go start-number))
(#.+string-quote-code+ (go start-string))
(#.+separator-code+ (go start-separator))
(#.+comment-to-eol-code+ (go start-comment-to-eol))
(#.+char-literal-start-code+ (go start-char-literal))
(#.+word-symbol-continue-code+ (go weird-middle-of-word-symbol-after-whitespace))
(#.+non-word-symbol-continue-code+ (go weird-middle-of-non-word-symbol-after-whitespace))
(#.+number-continue-code+ (go weird-middle-of-number-after-whitespace))
(otherwise (go unrecognized-char-while-scanning-whitespace)))
weird-middle-of-word-symbol-after-whitespace
(set-first-positions)
(warn-here "Ignoring illegal start for word symbol: ~S" char)
(return-values :ERROR (format nil "~A" char))
weird-middle-of-non-word-symbol-after-whitespace
(set-first-positions)
(warn-here "Ignoring illegal start for non-word symbol: ~S" char)
(return-values :ERROR (format nil "~A" char))
weird-middle-of-number-after-whitespace
(set-first-positions)
(warn-here "Ignoring illegal start for number: ~S" char)
(return-values :ERROR (format nil "~A" char))
start-comment-to-eol
(set-first-positions)
continue-comment-to-eol
(push char token-chars)
(local-read-char char char-code
(return-values :COMMENT-TO-EOL
(coerce (nreverse token-chars) 'string))
(return-values :COMMENT-TO-EOL
(coerce (nreverse token-chars) 'string))
()
())
(go continue-comment-to-eol)
start-separator
(set-first-positions)
(return-values :SYMBOL (svref separator-tokens char-code))
start-wildcard
(set-first-positions)
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-word-symbol-with-eof)
()
(go terminate-word-symbol-with-extended-comment)
())
(case (svref whitespace-table char-code)
(#.+wildcard-code+
(warn-here "Wildcards are a single underbar -- double underbar is not recognized.")
(return-values :ERROR "__")))
(local-unread-char char)
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
start-word-symbol
(set-first-positions)
extend-word-symbol
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-word-symbol-with-eof)
()
(go terminate-word-symbol-with-extended-comment)
())
(case (svref word-symbol-table char-code)
(#.+word-symbol-continue-code+ (go extend-word-symbol))
(#.+syllable-separator-code+ (go extend-symbol-with-new-syllable))
(#.+whitespace-code+ (go terminate-word-symbol-with-whitespace))
(#.+non-word-symbol-start-code+ (go terminate-word-symbol-with-start-non-word-symbol))
(#.+separator-code+ (go terminate-word-symbol-with-start-separator))
(#.+comment-to-eol-code+ (go terminate-word-symbol-with-start-comment-to-eol))
(#.+word-symbol-start-code+ (go terminate-word-symbol-with-start-word-symbol))
(#.+number-start-code+ (go terminate-word-symbol-with-start-number))
(#.+string-quote-code+ (go terminate-word-symbol-with-start-string))
(#.+char-literal-start-code+ (go terminate-word-symbol-with-start-char-literal))
(#.+non-word-symbol-continue-code+ (go terminate-word-symbol-with-continue-non-word-symbol))
(#.+number-continue-code+ (go terminate-word-symbol-with-continue-number))
(otherwise (go unrecognized-char-while-scanning-word-symbol)))
terminate-word-symbol-with-start-non-word-symbol
(go terminate-word-symbol)
unrecognized-char-while-scanning-word-symbol
(termination-warning char char-code "word symbol" "" ", which is unrecognized")
(go terminate-word-symbol)
(termination-warning char char-code "word symbol" "" ", which can continue but not start a number")
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
(termination-warning char char-code "word symbol" "" ", which can continue but not start a non-word symbol")
(go terminate-word-symbol)
terminate-word-symbol-with-start-word-symbol
(termination-warning char char-code "word symbol" "" ", which can start a word symbol but not continue one")
(go terminate-word-symbol)
terminate-word-symbol-with-start-number
(go terminate-word-symbol)
terminate-word-symbol-with-start-separator
terminate-word-symbol-with-start-string
terminate-word-symbol-with-start-char-literal
terminate-word-symbol-with-start-comment-to-eol
terminate-word-symbol-with-whitespace
terminate-word-symbol-with-extended-comment
terminate-word-symbol
is the first character past that position .
(local-unread-char char)
terminate-word-symbol-with-eof
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
start-non-word-symbol
(set-first-positions)
extend-non-word-symbol
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-non-word-symbol-with-eof)
()
(go terminate-non-word-symbol-with-extended-comment)
())
(case (svref non-word-symbol-table char-code)
(#.+non-word-symbol-continue-code+ (go extend-non-word-symbol))
(#.+syllable-separator-code+ (go extend-symbol-with-new-syllable))
(#.+whitespace-code+ (go terminate-non-word-symbol-with-whitespace))
(#.+word-symbol-start-code+ (go terminate-non-word-symbol-with-start-word-symbol))
(#.+separator-code+ (go terminate-non-word-symbol-with-start-separator))
(#.+comment-to-eol-code+ (go terminate-non-word-symbol-with-start-comment-to-eol))
(#.+non-word-symbol-start-code+ (go terminate-non-word-symbol-with-start-non-word-symbol))
(#.+number-start-code+ (go terminate-non-word-symbol-with-start-number))
(#.+string-quote-code+ (go terminate-non-word-symbol-with-start-string))
(#.+char-literal-start-code+ (go terminate-non-word-symbol-with-start-char-literal))
(#.+word-symbol-continue-code+ (go terminate-non-word-symbol-with-continue-word-symbol))
(#.+number-continue-code+ (go terminate-non-word-symbol-with-continue-number))
(otherwise (go unrecognized-char-while-scanning-non-word-symbol)))
unrecognized-char-while-scanning-non-word-symbol
(termination-warning char char-code "non-word symbol" "" ", which is unrecognized")
(go terminate-non-word-symbol)
(termination-warning char char-code "non-word symbol" "" ", which can continue but not start a number")
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
terminate-non-word-symbol-with-continue-word-symbol
(termination-warning char char-code "non-word symbol" "" ", which can continue but not start a word symbol")
(go terminate-non-word-symbol)
terminate-non-word-symbol-with-start-non-word-symbol
(termination-warning char char-code "non-word symbol" "" ", which can start a non-word symbol but not continue one")
(go terminate-non-word-symbol)
terminate-non-word-symbol-with-start-number
(go terminate-non-word-symbol)
terminate-non-word-symbol-with-start-word-symbol
terminate-non-word-symbol-with-start-separator
terminate-non-word-symbol-with-start-string
terminate-non-word-symbol-with-start-char-literal
terminate-non-word-symbol-with-start-comment-to-eol
terminate-non-word-symbol-with-whitespace
terminate-non-word-symbol-with-extended-comment
terminate-non-word-symbol
is the first character past that position .
(local-unread-char char)
terminate-non-word-symbol-with-eof
(return-values-using-prior-last :SYMBOL (coerce (nreverse token-chars) 'string))
SYLLABLE
extend-symbol-with-new-syllable
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-word-symbol-with-eof)
()
(go terminate-word-symbol-with-extended-comment)
())
(case (svref word-symbol-table char-code)
(#.+word-symbol-start-code+ (go extend-word-symbol))
(#.+word-symbol-continue-code+ (go extend-word-symbol))
(#.+non-word-symbol-start-code+ (go extend-non-word-symbol))
(#.+non-word-symbol-continue-code+ (go extend-non-word-symbol))
TODO : We wish to disallow multiple consecutive underbars , but for the moment C code generation and Snark use " _ _ " in names
(#.+syllable-separator-code+ (go extend-symbol-with-new-syllable))
(otherwise (go terminate-symbol-but-preserve-wildcard)))
terminate-symbol-but-preserve-wildcard
Assume the first underbar is already handled .
(local-unread-char char)
(local-unread-char #\_)
(return-values :SYMBOL (coerce (nreverse (cdr token-chars)) 'string))
Note : # \abcde = > two tokens : ( : CHARACTER # \a ) (: SYMBOL " bcde " )
start-char-literal
(set-first-positions)
(set-last-positions)
(local-read-char char char-code
(termination-warning char char-code "partial character literal" "#" ", which is eof")
(termination-warning char char-code "partial character literal" "#" "")
(termination-warning char char-code "partial character literal" "#" ", which starts an extended comment")
(termination-warning char char-code "partial character literal" "#" ", which starts a pragma")
)
(set-last-positions)
(cond ((eq char #\\)
(local-read-char char char-code
(termination-warning char char-code "partial non-word character literal" "#\\" ", which is eof")
(termination-warning char char-code "partial non-word character literal" "#\\" "")
(termination-warning char char-code "partial non-word character literal" "#\\" ", which starts an extended comment")
(termination-warning char char-code "partial non-word character literal" "#\\" ", which starts a pragma")
)
(case char
#-gcl (#\a (return-values :CHARACTER #-mcl #\bel #+mcl #\bell ))
(#\b (return-values :CHARACTER #\backspace ))
(#\t (return-values :CHARACTER #\tab ))
(#\n (return-values :CHARACTER #\newline ))
#-(or mcl gcl) (#\v (return-values :CHARACTER #\vt ))
(#\f (return-values :CHARACTER #\page ))
(#\r (return-values :CHARACTER #\return ))
(#\s (return-values :CHARACTER #\space ))
(#\\ (return-values :CHARACTER #\\ ))
(#\" (return-values :CHARACTER #\" ))
(#\# (return-values :CHARACTER #\# ))
(#\x (progn
(set-last-positions)
(local-read-char
hex-char-1 hex-char-code-1
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" ", which is eof")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" "")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" ", which starts an extended comment")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character literal" "#\\x" ", which starts a pragma")
)
(set-last-positions)
(local-read-char
hex-char-2 hex-char-code-2
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) ", which is eof")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) "")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) ", which starts an extended comment")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character literal" (format nil "#\\x~A" hex-char-1) ", which starts a pragma")
)
(let ((high-nibble (convert-hex-char-to-number hex-char-1))
(low-nibble (convert-hex-char-to-number hex-char-2)))
(when (or (null high-nibble) (null low-nibble))
(let ((token (format nil "#x\\~A~A" hex-char-1 hex-char-2)))
(warn-here "Unrecognized character literal, chars after \"#\\x\" are ~S ~S, with hex codes ~2,'0X ~2,'0X"
hex-char-1
hex-char-2
hex-char-code-1
hex-char-code-2)
(return-values :ERROR token)))
(return-values :CHARACTER (code-char (+ (ash high-nibble 4) low-nibble))))))
(otherwise
(let ((token (format nil "#\\~A" char)))
(warn-here "Unrecognized character literal, char after \"#\\\" is ~S with hex code ~2,'0X"
char
char-code)
(return-values :ERROR token)))))
(t
(return-values-using-prior-last :CHARACTER char)))
start-number
(set-first-positions)
(when (eq char #\0)
(set-last-positions)
(local-read-char char char-code
(go terminate-zero-with-eof)
()
(go terminate-number-with-extended-comment)
())
(case char
((#\X #\x)
(local-read-char char char-code
(go terminate-hex-with-eof)
()
(go terminate-hex-with-extended-comment)
())
at this point , have seen # \0 # \x char
(loop while (member char '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8 #\9
#\a #\b #\c #\d #\e #\f
#\A #\B #\C #\D #\E #\F)
:test 'eq)
do
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-hex-with-eof)
()
(go terminate-hex-with-extended-comment)
()))
at this point , have seen # \0 # \X token - chars non - hex - char ,
(if (null token-chars)
(go terminate-hex-prematurely)
(go terminate-hex-cleanly)))
((#\O #\o)
(local-read-char char char-code
(go terminate-octal-with-eof)
()
(go terminate-octal-with-extended-comment)
())
at this point , have seen # \0 # \O char
(loop while (member char '(#\0 #\1 #\2 #\3 #\4 #\5 #\6 #\7 #\8) :test 'eq) do
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-octal-with-eof)
()
(go terminate-octal-with-extended-comment)
()))
at this point , have seen # \0 # \O token - chars non - octal - char ,
(if (null token-chars)
(go terminate-octal-prematurely)
(go terminate-octal-cleanly)))
((#\B #\b)
(local-read-char char char-code
(go terminate-binary-with-eof)
()
(go terminate-binary-with-extended-comment)
())
at this point , have seen # \0 # \B char
(loop while (or (eq char '#\0) (eq char #\1)) do
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-binary-with-eof)
()
(go terminate-binary-with-extended-comment)
()))
at this point , have seen # \0 # \B token - chars non - octal - char ,
(if (null token-chars)
(go terminate-binary-prematurely)
(go terminate-binary-cleanly)))
(t
(push #\0 token-chars)
(go extend-number-after-initial-zero))))
extend-number
(push char token-chars)
(set-last-positions)
(local-read-char char char-code
(go terminate-number-with-eof)
()
(go terminate-number-with-extended-comment)
())
extend-number-after-initial-zero
(case (svref number-table char-code)
(#.+number-continue-code+ (go extend-number))
(#.+whitespace-code+ (go terminate-number-with-whitespace))
(#.+word-symbol-start-code+ (go terminate-number-with-start-word-symbol))
(#.+word-symbol-continue-code+ (go terminate-number-with-continue-word-symbol))
(#.+non-word-symbol-start-code+ (go terminate-number-with-start-non-word-symbol))
(#.+non-word-symbol-continue-code+ (go terminate-number-with-continue-non-word-symbol))
(#.+separator-code+ (go terminate-number-with-start-separator))
(#.+comment-to-eol-code+ (go terminate-number-with-start-comment-to-eol))
(#.+number-start-code+ (go terminate-number-with-start-number))
(#.+string-quote-code+ (go terminate-number-with-start-string))
(#.+char-literal-start-code+ (go terminate-number-with-start-char-literal))
(otherwise (go unrecognized-char-while-scanning-number)))
unrecognized-char-while-scanning-number
(termination-warning char char-code "number" "" ", which is unrecognized")
(go terminate-number-unexpectedly)
terminate-number-with-start-word-symbol
(go terminate-number-unexpectedly)
terminate-number-with-continue-word-symbol
(go terminate-number-unexpectedly)
terminate-number-with-continue-non-word-symbol
(go terminate-number-unexpectedly)
terminate-number-with-start-number
(termination-warning char char-code "number" "" ", which starts a new number")
(go terminate-number-unexpectedly)
terminate-number-unexpectedly
terminate-number-with-start-separator
terminate-number-with-start-string
terminate-number-with-start-char-literal
terminate-number-with-start-comment-to-eol
terminate-number-with-whitespace
terminate-number-with-extended-comment
is the first character past that position .
(local-unread-char char)
terminate-number-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string)))
terminate-zero-with-eof
(return-values-using-prior-last :NUMBER 0)
terminate-hex-prematurely
(termination-warning char char-code "hex number" "" ", but there are no hex digits")
terminate-hex-with-extended-comment
(termination-warning char char-code "hex number" "" ", which is not a hex char or expected whitespace or punctuation")
terminate-hex-cleanly
(local-unread-char char)
terminate-hex-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string) :radix 16))
terminate-octal-prematurely
(termination-warning char char-code "octal number" "" ", but there are no octal digits")
terminate-octal-with-extended-comment
(termination-warning char char-code "octal number" "" ", which is not an octal char or expected whitespace or punctuation")
terminate-octal-cleanly
(local-unread-char char)
terminate-octal-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string) :radix 8))
terminate-binary-prematurely
(termination-warning char char-code "binary number" "" ", but there are no binary digits")
terminate-binary-with-extended-comment
(termination-warning char char-code "binary number" "" ", which is not a binary char or expected whitespace or punctuation")
terminate-binary-cleanly
(local-unread-char char)
terminate-binary-with-eof
(return-values-using-prior-last :NUMBER (parse-integer (coerce (nreverse token-chars) 'string) :radix 2))
escape-next-char-in-string
(local-read-char char char-code
(let ((token (format nil "~A\\" (coerce (nreverse token-chars) 'string))))
(warn-here "EOF immediately after escape character in string ~S" token)
(return-values :ERROR token))
()
()
())
(case char
#-gcl (#\a (push #-mcl #\bel #+mcl #\bell token-chars))
(#\b (push #\backspace token-chars))
(#\t (push #\tab token-chars))
(#\n (push #\newline token-chars))
#-(or mcl gcl) (#\v (push #\vt token-chars))
(#\f (push #\page token-chars))
(#\r (push #\return token-chars))
(#\s (push #\space token-chars))
(#\\ (push #\\ token-chars))
(#\" (push #\" token-chars))
(#\x (progn
(set-last-positions)
(local-read-char
hex-char-1 hex-char-code-1
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" ", which is eof")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" "")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" ", which starts an extended comment")
(termination-warning hex-char-1 hex-char-code-1 "partial hex character lliteral" "#\\x" ", which starts a pragma")
)
(set-last-positions)
(local-read-char
hex-char-2 hex-char-code-2
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) ", which is eof")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) "")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) ", which starts an extended comment")
(termination-warning hex-char-2 hex-char-code-2 "partial hex character lliteral" (format nil "#\\x~A" hex-char-1) ", which starts a pragma")
)
(let ((high-nibble (convert-hex-char-to-number hex-char-1))
(low-nibble (convert-hex-char-to-number hex-char-2)))
(when (or (null high-nibble) (null low-nibble))
(let ((token (format nil "#\\x~A~A" hex-char-1 hex-char-2)))
(warn-here "Unrecognized character literal, chars after \"#\\x\" are ~S ~S, with hex codes ~2,'0X ~2,'0X"
hex-char-1
hex-char-2
hex-char-code-1
hex-char-code-2)
(return-values :ERROR token)))
(push (code-char (+ (ash high-nibble 4) low-nibble)) token-chars))))
(otherwise
(let ((token (format nil "#\\~A" char)))
(warn-here "Unrecognized character literal, char after \"#\\\" is ~S, with hex code ~2,'0X"
char
char-code)
(return-values :ERROR token))))
(go extend-string)
start-string
(set-first-positions)
extend-string
(local-read-char char char-code
(let ((token (coerce (nreverse token-chars) 'string)))
(warn-here "EOF inside string starting at line ~S, column ~S" first-line first-column)
(return-values :ERROR token))
()
()
())
(case (svref string-table char-code)
(#.+string-quote-code+ (go close-string))
(#.+string-escape-code+ (go escape-next-char-in-string))
(otherwise (push char token-chars) (go extend-string)))
close-string
(return-values :STRING (coerce (nreverse token-chars) 'string))
start-extended-comment
(set-first-positions)
(multiple-value-bind (error? comment-chars last-byte last-line last-column)
(skip-extended-comment char ps-stream cp-descriptor cp-descriptors
Note : Pragma bodies are treated as ordinary text ,
an unmatched ( open or close ) extended comemnt
comment-table
first-byte
first-line
first-column)
(return-values-using-prior-last (if error? :EXTENDED-COMMENT-ERROR :EXTENDED-COMMENT)
(coerce (nreverse comment-chars) 'string)))
PRAGMA
start-pragma
(set-first-positions)
(multiple-value-bind (error? pragma-chars last-byte last-line last-column)
(scan-pragma char ps-stream cp-descriptor
first-byte
first-line
first-column)
(cond (error?
(dolist (char pragma-chars)
(ps-unread-char char ps-stream))
(setq current-byte first-byte
current-line first-line
current-column first-column)
(local-read-char char char-code
(return-values :EOF nil)
()
()
())
(go ignore-erroneous-pragma))
(t
(let* ((prefix (cp-descriptor-prefix cp-descriptor))
(postfix (cp-descriptor-postfix cp-descriptor))
(start (length prefix))
(end (- (length pragma-chars) (length postfix)))
(body-chars (subseq (nreverse pragma-chars) start end))
(body (coerce body-chars 'string)))
(return-values-using-prior-last :PRAGMA (list prefix body postfix))))))
))))
(defun convert-hex-char-to-number (x)
(case x
(#\0 0)
(#\1 1)
(#\2 2)
(#\3 3)
(#\4 4)
(#\5 5)
(#\6 6)
(#\7 7)
(#\8 8)
(#\9 9)
((#\a #\A) 10)
((#\b #\B) 11)
((#\c #\C) 12)
((#\d #\D) 13)
((#\e #\E) 14)
((#\f #\F) 15)
(otherwise nil)))
(defun applicable-cp-descriptor (first-char ps-stream cp-descriptors)
(dolist (cp-descriptor cp-descriptors)
(when (pseudo-stream-has-prefix? first-char ps-stream (cp-descriptor-prefix cp-descriptor))
(return cp-descriptor))))
(defun pseudo-stream-has-prefix? (first-char ps-stream prefix)
(and (eq (schar prefix 0) first-char)
(let* ((lookahead-chars nil)
(result
(dotimes (i (1- (length prefix))
t)
(let ((char (ps-read-char ps-stream)))
(cond ((eq char +tokenizer-eof+)
if eof intervenes , the result is nil
(return nil))
((eq char (schar prefix (1+ i)))
(push char lookahead-chars))
(t
(ps-unread-char char ps-stream)
(return nil)))))))
(dolist (char lookahead-chars)
(ps-unread-char char ps-stream))
result)))
(defstruct extended-comment-state
error?
cp-descriptors
comment-table
byte
line
column
chars)
(defvar *extended-comment-state* (make-extended-comment-state))
(defun scan-pragma (first-char ps-stream cp-descriptor first-byte first-line first-column)
(skip-extended-comment first-char ps-stream cp-descriptor '()
first-byte first-line first-column))
(defun skip-extended-comment (first-char ps-stream cp-descriptor cp-descriptors
comment-table
first-byte first-line first-column)
(let ((ec-state *extended-comment-state*))
(setf (extended-comment-state-error? ec-state) nil)
(setf (extended-comment-state-cp-descriptors ec-state) cp-descriptors)
(setf (extended-comment-state-comment-table ec-state) comment-table)
(setf (extended-comment-state-byte ec-state) first-byte)
(setf (extended-comment-state-line ec-state) first-line)
(setf (extended-comment-state-column ec-state) first-column)
(setf (extended-comment-state-chars ec-state) (list first-char))
(aux-skip-extended-comment ps-stream cp-descriptor ec-state)
(values (extended-comment-state-error? ec-state)
(extended-comment-state-chars ec-state)
(extended-comment-state-byte ec-state)
(extended-comment-state-line ec-state)
(extended-comment-state-column ec-state))))
(defun aux-skip-extended-comment (ps-stream cp-descriptor ec-state)
(let* ((prefix (cp-descriptor-prefix cp-descriptor))
(postfix (cp-descriptor-postfix cp-descriptor))
(recursive? (cp-descriptor-recursive? cp-descriptor))
(eof-ok? (cp-descriptor-eof-ok? cp-descriptor))
(open-size (1- (length prefix)))
(close-size (1- (length postfix)))
(close-char-0 (schar postfix 0))
(comment-table (extended-comment-state-comment-table ec-state)))
(dotimes (i open-size)
(push (ps-read-char ps-stream)
(extended-comment-state-chars ec-state)))
(incf (extended-comment-state-byte ec-state) open-size)
(incf (extended-comment-state-column ec-state) open-size)
(do ((char (ps-read-char ps-stream) (ps-read-char ps-stream)))
((eq char +tokenizer-eof+)
(cond (eof-ok?
nil)
(t
(setf (extended-comment-state-error? ec-state) t)
t)))
(push char (extended-comment-state-chars ec-state))
(incf (extended-comment-state-byte ec-state))
(cond ((eq char #\newline)
(incf (extended-comment-state-line ec-state))
(setf (extended-comment-state-column ec-state) -1))
(t
(incf (extended-comment-state-column ec-state))))
(cond ((and (eq char close-char-0)
(pseudo-stream-has-prefix? char ps-stream postfix))
(dotimes (i close-size)
(push (ps-read-char ps-stream)
(extended-comment-state-chars ec-state)))
(incf (extended-comment-state-byte ec-state) close-size)
(incf (extended-comment-state-column ec-state) close-size)
(return-from aux-skip-extended-comment nil))
((and recursive?
(eq (svref comment-table (char-code char))
+maybe-open-comment-or-pragma-code+))
(let ((new-cp-descriptor (applicable-cp-descriptor
char ps-stream
(extended-comment-state-cp-descriptors ec-state))))
(when (not (null new-cp-descriptor))
(let ((inner-is-recursive? (cp-descriptor-recursive? new-cp-descriptor)))
(when inner-is-recursive?
(aux-skip-extended-comment ps-stream new-cp-descriptor ec-state))))))))))
(defun install-tokens (session tokens comments)
(when (null +token-rule+) (break "???"))
(let ((locations (make-array (1+ (length tokens)) :initial-element nil))
(pre-index 0)
(pre-location (make-parser-location
:index 0
:post-nodes nil))
(last-node nil))
(setf (svref locations pre-index) pre-location)
(setf (parse-session-locations session) locations)
(setf (parse-session-comments session) comments)
(dolist (token tokens)
(let* ((token-start-byte (first (third token)))
(pre-comments '())
(post-index (1+ pre-index))
(node (create-parser-node :rule +token-rule+
:semantics token
:pre-index pre-index
:post-index-ptr (list post-index)
:parents nil
:children nil
)))
(setq last-node node)
(push node (parser-location-post-nodes pre-location))
(when-debugging (when *verbose?* (show-node node "Created ")))
(setf (parser-location-position pre-location) (third token))
( third comment ) is pre - position of comment : ( byte line column )
(loop while (and (not (null comments))
(< (first (third (first comments)))
token-start-byte) )
do (push (pop comments) pre-comments))
(setf (parser-location-pre-comments pre-location) pre-comments)
(when-debugging
(when *verbose?*
(unless (null pre-comments)
(comment "Pre-Comemnts for ~6D: ~S" pre-index pre-comments))))
(setq pre-index post-index)
(setq pre-location (make-parser-location
:index post-index
:post-nodes nil))
(setf (svref locations pre-index) pre-location)))
(debugging-comment "Pre-Comments for ~6D (eof): ~S" pre-index comments)
(let ((eof-location pre-location))
(setf (parser-location-pre-comments eof-location) comments)
(if (null last-node)
(debugging-comment "No tokens")
(let* ((last-token (if (null comments)
(parser-node-semantics last-node)
(first (last comments))))
(last-pos (fourth last-token)))
(debugging-comment "Last token: ~S" last-token)
(setf (parser-location-position eof-location) last-pos))))
locations))
|
c6de675f0db01cae13f01757931e022bb696f5fd4ba5399f229d8428f8054ccf | schemeorg-community/index.scheme.org | srfi.219.scm | (((name . "define")
(signature
syntax-rules
()
((_ variable expression))
((_ (variable parameter1 ...) body))
((_ (variable parameter1 ... . parameter) body))
((_ ((variable inner-param1 ...) outter-param1 ...) body))
((_ ((variable inner-param1 ...) outter-param1 ... . outter-rest) body))
((_ ((variable inner-param1 ... . inner-rest) outter-param1 ...) body))
((_
((variable inner-param1 ... . inner-rest) outter-param1 ... . outter-rest)
body)))))
| null | https://raw.githubusercontent.com/schemeorg-community/index.scheme.org/32e1afcfe423a158ac8ce014f5c0b8399d12a1ea/types/srfi.219.scm | scheme | (((name . "define")
(signature
syntax-rules
()
((_ variable expression))
((_ (variable parameter1 ...) body))
((_ (variable parameter1 ... . parameter) body))
((_ ((variable inner-param1 ...) outter-param1 ...) body))
((_ ((variable inner-param1 ...) outter-param1 ... . outter-rest) body))
((_ ((variable inner-param1 ... . inner-rest) outter-param1 ...) body))
((_
((variable inner-param1 ... . inner-rest) outter-param1 ... . outter-rest)
body)))))
| |
2b561826f60fdbce57922f1aba579ac907c4c96cc4aeb49e7ddf84e00b444a33 | facebook/infer | IStd.ml |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
include Core
[@@@warning "-unused-value-declaration"]
(* easier to write Unix than Core_unix *)
module Unix = Core_unix
(* we don't care about the _unix distinction *)
module Filename = struct
include Filename
include Filename_unix
end
(* we don't care about the _unix distinction *)
module Sys = struct
include Sys
include Sys_unix
end
(* Compare police: generic compare mostly disabled. *)
let compare = No_polymorphic_compare.compare
let equal = No_polymorphic_compare.equal
let ( = ) = No_polymorphic_compare.( = )
let failwith _ : [`use_Logging_die_instead] = assert false
let failwithf _ : [`use_Logging_die_instead] = assert false
let invalid_arg _ : [`use_Logging_die_instead] = assert false
let invalid_argf _ : [`use_Logging_die_instead] = assert false
let exit = `In_general_prefer_using_Logging_exit_over_Pervasives_exit
[@@@warning "+unused-value-declaration"]
module ANSITerminal : module type of ANSITerminal = struct
include ANSITerminal
(* more careful about when the channel is connected to a tty *)
let print_string = if Unix.(isatty stdout) then print_string else fun _ -> Stdlib.print_string
let prerr_string = if Unix.(isatty stderr) then prerr_string else fun _ -> Stdlib.prerr_string
let printf styles fmt = Format.ksprintf (fun s -> print_string styles s) fmt
let eprintf styles fmt = Format.ksprintf (fun s -> prerr_string styles s) fmt
let sprintf = if Unix.(isatty stderr) then sprintf else fun _ -> Printf.sprintf
end
| null | https://raw.githubusercontent.com/facebook/infer/b1024aade8b9c7d7ba88ca381a5b8610534ddf02/infer/src/istd/IStd.ml | ocaml | easier to write Unix than Core_unix
we don't care about the _unix distinction
we don't care about the _unix distinction
Compare police: generic compare mostly disabled.
more careful about when the channel is connected to a tty |
* Copyright ( c ) Facebook , Inc. and its affiliates .
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree .
* Copyright (c) Facebook, Inc. and its affiliates.
*
* This source code is licensed under the MIT license found in the
* LICENSE file in the root directory of this source tree.
*)
include Core
[@@@warning "-unused-value-declaration"]
module Unix = Core_unix
module Filename = struct
include Filename
include Filename_unix
end
module Sys = struct
include Sys
include Sys_unix
end
let compare = No_polymorphic_compare.compare
let equal = No_polymorphic_compare.equal
let ( = ) = No_polymorphic_compare.( = )
let failwith _ : [`use_Logging_die_instead] = assert false
let failwithf _ : [`use_Logging_die_instead] = assert false
let invalid_arg _ : [`use_Logging_die_instead] = assert false
let invalid_argf _ : [`use_Logging_die_instead] = assert false
let exit = `In_general_prefer_using_Logging_exit_over_Pervasives_exit
[@@@warning "+unused-value-declaration"]
module ANSITerminal : module type of ANSITerminal = struct
include ANSITerminal
let print_string = if Unix.(isatty stdout) then print_string else fun _ -> Stdlib.print_string
let prerr_string = if Unix.(isatty stderr) then prerr_string else fun _ -> Stdlib.prerr_string
let printf styles fmt = Format.ksprintf (fun s -> print_string styles s) fmt
let eprintf styles fmt = Format.ksprintf (fun s -> prerr_string styles s) fmt
let sprintf = if Unix.(isatty stderr) then sprintf else fun _ -> Printf.sprintf
end
|
805087aa3180fdeecaf0f8ee3c38d24da43ce1d23d5afb19631faadd7d59f6df | tsloughter/kakapo | kakapo_sup.erl | -module(kakapo_sup).
-behaviour(supervisor).
%% API
-export([start_link/0]).
%% Supervisor callbacks
-export([init/1]).
%% ===================================================================
%% API functions
%% ===================================================================
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
%% ===================================================================
%% Supervisor callbacks
%% ===================================================================
init(_Args) ->
RestartStrategy = one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Dispatch = cowboy_router:compile([
{ HostMatch , list({PathMatch , Handler , Opts } ) }
{'_', [{'_', kakapo_route_handler, []}]}
]),
ListenPort = list_to_integer(os:getenv("PORT")),
ChildSpecs = [ranch:child_spec(kakapo_mesh_cowboy, 100,
ranch_tcp, [{port, ListenPort}],
cowboy_protocol, [{env, [{dispatch, Dispatch}]}])],
{ok, {SupFlags, ChildSpecs}}.
| null | https://raw.githubusercontent.com/tsloughter/kakapo/7f2062029a2a26825055ef19ebe8d043d300df6b/apps/kakapo/src/kakapo_sup.erl | erlang | API
Supervisor callbacks
===================================================================
API functions
===================================================================
===================================================================
Supervisor callbacks
=================================================================== | -module(kakapo_sup).
-behaviour(supervisor).
-export([start_link/0]).
-export([init/1]).
start_link() ->
supervisor:start_link({local, ?MODULE}, ?MODULE, []).
init(_Args) ->
RestartStrategy = one_for_one,
MaxRestarts = 1000,
MaxSecondsBetweenRestarts = 3600,
SupFlags = {RestartStrategy, MaxRestarts, MaxSecondsBetweenRestarts},
Dispatch = cowboy_router:compile([
{ HostMatch , list({PathMatch , Handler , Opts } ) }
{'_', [{'_', kakapo_route_handler, []}]}
]),
ListenPort = list_to_integer(os:getenv("PORT")),
ChildSpecs = [ranch:child_spec(kakapo_mesh_cowboy, 100,
ranch_tcp, [{port, ListenPort}],
cowboy_protocol, [{env, [{dispatch, Dispatch}]}])],
{ok, {SupFlags, ChildSpecs}}.
|
c2af1f2559d4e25ea9d0135a171c8eff13d77bf4477b7ebf860c17372f440ebd | Beluga-lang/Beluga | substitution.ml | * Substitutions
@author
@author Brigitte Pientka
*)
open Support
open Support.Equality
open Syntax.Int.LF
module LF = struct
exception NotComposable of string
(**************************)
(* Explicit Substitutions *)
(**************************)
i d = ^0
*
* Invariant :
*
* cPsi |- i d : cPsi i d is patsub
*
* Note : we do not take into account weakening here .
*
* Invariant:
*
* cPsi |- id : cPsi id is patsub
*
* Note: we do not take into account weakening here.
*)
let id = Shift 0
shift = ^1
*
* Invariant :
*
* cPsi , x : tA |- ^ : cPsi ^ is patsub
*
* Invariant:
*
* cPsi, x:tA |- ^ : cPsi ^ is patsub
*)
let shift = Shift 1
invShift = ^-1 = _ .^0
*
* Invariant :
*
* Psi |- ^-1 : Psi , A ^-1 is patsub
*
* Invariant:
*
* Psi |- ^-1 : Psi, A ^-1 is patsub
*)
let invShift = Dot (Undef, id)
let rec shiftComp n s2 =
match (n, s2) with
| (0, s) -> s
| (n, EmptySub) -> raise (NotComposable (Format.asprintf "Shift %d, EmptySub" n))
| (n, Undefs) -> Undefs
| (n, SVar (s, k, r)) -> SVar (s, (k + n), r)
| (n, MSVar (k, ((s, t), r))) -> MSVar (k + n, ((s, t), r))
| (n, FSVar (k, (s, tau))) -> FSVar (k + n, (s, tau))
| (n, Shift m) -> Shift (n + m)
| (n, Dot (_, s)) -> shiftComp (n - 1) s
comp s1 s2 = s '
*
* Invariant :
*
* If Psi ' |- s1 : Psi
* and Psi '' |- s2 : Psi '
* then s ' o s2
* and Psi '' |- s1 o s2 : Psi
*
* If s1 , s2 patsub
* then s ' patsub
*
* Invariant:
*
* If Psi' |- s1 : Psi
* and Psi'' |- s2 : Psi'
* then s' = s1 o s2
* and Psi'' |- s1 o s2 : Psi
*
* If s1, s2 patsub
* then s' patsub
*)
let rec comp s1 s2 =
match (s1, s2) with
| (EmptySub, s2) -> EmptySub
| (Undefs, s2) -> Undefs
| (s, Shift 0) -> s (* Optimization *)
| (Shift n, s2) -> shiftComp n s2
| (SVar (s, n, tau), s2) ->
SVar (s, n, comp tau s2)
| (MSVar (n, ((s, theta), tau)), s2) ->
MSVar (n, ((s, theta), comp tau s2))
| (FSVar (n, (s, tau)), s2) ->
FSVar (n, (s, comp tau s2))
| (Dot (ft, s), s') ->
] , Shift k ) = s[tau ]
* where s : : Psi[Phi ] and k
*
* ] , Shift k ) = Dot ( Id(1 ) , ... Dot ( Id(k0 ) , s[tau ] ) )
* where s : : Psi[Phi ] and k '
* k = k ' + k0
* where s :: Psi[Phi] and |Psi| = k
*
* comp(s[tau], Shift k) = Dot (Id(1), ... Dot (Id(k0), s[tau]))
* where s :: Psi[Phi] and |Psi| = k'
* k = k' + k0
*)
let h = frontSub ft s' in
Dot (h, comp s s')
bvarSub n s = Ft '
*
* Invariant :
*
* If Psi |- s < = Psi ' Psi ' |- n < = A
* then Ft ' = Ftn if s = Ft1 .. Ftn .. ^k
* or Ft ' = ^(n + k ) if s = Ft1 .. and m < n
* and Psi |- Ft ' < = [ s]A
*
* Invariant:
*
* If Psi |- s <= Psi' Psi' |- n <= A
* then Ft' = Ftn if s = Ft1 .. Ftn .. ^k
* or Ft' = ^(n + k) if s = Ft1 .. Ftm ^k and m < n
* and Psi |- Ft' <= [s]A
*)
and bvarSub n s =
match (n, s) with
| (_, Undefs) -> Undef
| (1, Dot (ft, _s)) -> ft
| (n, Dot (_ft, s)) -> bvarSub (n - 1) s
| (n, Shift k) -> Head (BVar (n + k))
| ( n , MSVar ( s , ( _ cshift , k ) , ( mt , sigma ) ) ) - >
( * Should be fixed ; we really need phat of n to avoid printing
Free BVar ( n + k ) ...
( Head ( HClo ( phat . BVar n + k , s , sigma ) )
-bp
(* Should be fixed; we really need phat of n to avoid printing
Free BVar (n + k) ...
(Head (HClo (phat. BVar n + k, s, sigma ))
-bp *)
Head (HMClo (n + k, s, (mt, sigma)))
Can this happen ?
*)
| (n, SVar (s, k, sigma)) ->
Should be fixed ; we really need phat of n to avoid printing
Free BVar ( n + k ) ... -bp
Free BVar (n + k) ... -bp *)
Head (HClo (n + k, s, sigma))
| (n, MSVar (k, ((s, t), sigma))) ->
Head (HMClo (n + k, ((s, t), sigma)))
( print_string " [ bvarSub ] n , MSVar - not implemented " ;
raise ( NotComposable " grr " ) )
raise (NotComposable "grr"))
*)
frontSub Ft s = Ft '
*
* Invariant :
*
* If Psi |- s : Psi ' Psi ' |- Ft : A
* then Ft ' = Ft [ s ]
* and Psi |- Ft ' : [ s]A
*
* Invariant:
*
* If Psi |- s : Psi' Psi' |- Ft : A
* then Ft' = Ft [s]
* and Psi |- Ft' : [s]A
*)
and frontSub ft s =
match ft with
| Head (HClo (n, s', sigma)) -> Head (HClo (n, s', comp sigma s))
| Head (HMClo (n, ((s', theta), sigma))) -> Head (HMClo (n, ((s', theta), comp sigma s)))
| Head (BVar n) -> bvarSub n s
| Head (FVar _) -> ft
| Head (MVar (u, s')) -> Head (MVar (u, comp s' s))
| Head (PVar (u, s')) -> Head (PVar (u, comp s' s))
| Head (Proj (BVar n, k)) ->
begin match bvarSub n s with
| Head (BVar x) ->
Head (Proj (BVar x, k))
| Head (PVar _ as h) ->
Head (Proj (h, k))
| Obj (Tuple (_, tuple)) ->
let rec nth s =
function
| (Last u, 1) -> u
| (Cons (u, _), 1) -> u
| (Cons (u, tuple), n) -> nth (Dot (Obj u, s)) (tuple, n - 1)
in
(* Obj (Clo (nth s (tuple, k))) *)
Obj (nth s (tuple, k))
| Obj (Lam _ ) -> failwith "Found Lam - should be tuple"
| Obj (Clo (Tuple (_, tuple), s')) ->
let rec nth s =
function
| (Last u, 1) -> u
| (Cons (u, _), 1) -> u
| (Cons (u, tuple), n) -> nth (Dot (Obj u, s)) (tuple, n - 1)
in
(* Obj (Clo (nth s (tuple, k))) *)
Obj (Clo (nth s (tuple, k), s'))
| Obj (Clo ((Root (_, (PVar _ ), Nil, _)), _ )) -> failwith "Found Clo - PVar "
| Obj (Clo ((Root (_, (BVar _ ), Nil, _)), _ )) -> failwith "Found Clo - BVar "
| Obj (Clo ((Root (_ , h, Nil, _ )), _ )) -> failwith "Found Clo with root that is not a var - should not happen"
| Obj (Clo ((Root (_ , h, _, _ )), _ )) -> failwith "Found Clo with root that has a non-empty spine - should not happen"
| Obj (Clo (_ , _ ) ) -> failwith ("BVar n = " ^ string_of_int n ^ " stands for Clo which is not a tuple – cannot take proj " ^ string_of_int k )
| Obj (Root (_, (PVar _ as h), Nil, _)) -> Head (Proj (h, k))
| Obj (Root (_, (BVar _ as h), Nil, _)) -> Head (Proj (h, k))
| Obj (LFHole (_, _ , _ )) -> failwith "Found Obj which LFHole – cannot take a proj."
| Obj (Root (_,Proj (h, _ ), _, _ )) -> failwith "Found Obj which is Proj ? – but we cannot take the projection of a projection; incompatible with taking a proj."
| Obj (Root ( _, _, _ , _ )) -> failwith "Root Obj incompatible with projections"
| Head (HClo (_, _, _) as h) -> Head (Proj (h, k))
| Head (HMClo (_, _) as h) -> Head (Proj (h, k))
| Head (Proj (h, _ )) -> failwith "Found head that is a Proj?? - nested Proj not allowed"
| Head (MPVar _ ) -> failwith "Found head that is MPVar"
| Head (FPVar _ ) -> failwith "Found head that is FPVar"
| Head (MVar _ ) -> failwith "Found head that is MVar"
| Head (AnnH _ ) -> failwith "Found head that is AnnH"
| Head (MMVar _ ) -> failwith "Found head that is MMVar"
| Head _ -> failwith "Found head that is not a BVar or PVar"
end
| Head (Proj (h, k)) ->
begin match frontSub (Head h) s with
| Head h' ->
Head (Proj (h', k))
| Obj (Tuple (_, tuple)) ->
let rec nth s =
function
| (Last u, 1) -> (u, s)
| (Cons (u, _), 1) -> (u, s)
| (Cons (u, tuple), n) -> nth (Dot (Obj u, s)) (tuple, n - 1)
in
(* Obj (Clo (nth s (tuple, k))) *)
Obj (Pair.fst (nth s (tuple, k)))
end
| Head (AnnH (h, a)) ->
let Head h' = frontSub (Head h) s in
Head (AnnH (h', a))
| Head (Const c) -> Head (Const c)
| Obj ( Root ( _ , h , ) ) - > frontSub ( Head h ) s
| Obj u -> Obj (Clo (u, s))
| Undef -> Undef
| Head (MMVar (n, s')) -> Head (MMVar (n, comp s' s))
| Head (FPVar (_n, _s' )) -> ft
| Head ( HMClo ( n , s ' , ( theta , sigma ) ) ) - >
* Head ( HMClo ( n , s ' , ( mt , comp sigma s ) ) ? ?
* Head (HMClo (n, s', (mt, comp sigma s)) ?? *)
dot1 ( s ) = s '
*
* Invariant :
*
* If Psi |- s : Psi '
* then s ' = 1 . ( s o ^ )
* and for all A s.t . Psi ' |- A : type
* Psi , [ s]A |- s ' : Psi ' , A
*
* If s patsub then s ' patsub
*
* Invariant:
*
* If Psi |- s : Psi'
* then s' = 1. (s o ^)
* and for all A s.t. Psi' |- A : type
* Psi, [s]A |- s' : Psi', A
*
* If s patsub then s' patsub
*)
first line is an optimization
roughly 15 % on standard suite for Twelf 1.1
Sat Feb 14 10:16:16 1998 -fp
and dot1 s =
match s with
| Shift 0 -> s
| s -> Dot (Head (BVar 1), comp s shift)
decSub ( x : tA ) s = ( x : tA[s ] )
*
* Invariant :
*
* If D ; Psi |- s < = Psi ' D ; Psi ' |- A < = type
* then D ; Psi |- [ s]A ] < = type
*
* Invariant:
*
* If D ; Psi |- s <= Psi' D ; Psi' |- A <= type
* then D ; Psi |- [s]A] <= type
*)
First line is an optimization suggested by cs
(* Dec[id] = Dec *)
Sat Feb 14 18:37:44 1998 -fp
(* seems to have no statistically significant effect *)
undo for now Sat Feb 14 20:22:29 1998 -fp
fun decSub ( D , Shift ( 0 ) ) = D
| decSub ( Dec ( x , A ) , s ) = Dec ( x , Clo ( A , s ) )
fun decSub (D, Shift (0)) = D
| decSub (Dec (x, A), s) = Dec (x, Clo (A, s))
*)
let decSub (TypDecl (x, tA)) s = TypDecl (x, TClo (tA, s))
invDot1 ( s ) = s '
* invDot1 ( 1 . s ' o ^ ) = s '
*
* Invariant :
*
* s = 1 . s ' o ^
* If Psi ' |- s ' : Psi
* ( so Psi',A[s ] |- s : Psi , A )
* invDot1 (1. s' o ^) = s'
*
* Invariant:
*
* s = 1 . s' o ^
* If Psi' |- s' : Psi
* (so Psi',A[s] |- s : Psi,A)
*)
let invDot1 s = comp ( comp shift s ) invShift
(***************************)
(* Inverting Substitutions *)
(***************************)
invert s = s '
*
* Invariant :
*
* If D ; Psi |- s < = Psi ' ( and s patsub )
* then D ; Psi ' |- s ' < = Psi
* = i d ( comp s ' s = i d )
*
* Invariant:
*
* If D ; Psi |- s <= Psi' (and s patsub)
* then D ; Psi' |- s' <= Psi
* s.t. s o s' = id (comp s' s = id)
*)
let invert s =
let rec lookup n s p =
match s with
| EmptySub -> None
| Undefs -> None
| Shift _ -> None
| Dot (Undef, s') -> lookup (n + 1) s' p
| Dot (Head (BVar k), s') ->
if k = p
then Some (Head (BVar n))
else lookup (n + 1) s' p
in
let rec invert'' p si =
match p with
| 0 -> si
| p ->
let front =
match lookup 1 s p with
| Some h -> h
| None -> Undef
in
invert'' (p - 1) (Dot (front, si))
in
let rec invert' n s maxoffset =
match s with
| EmptySub ->
invert'' maxoffset Undefs
| Undefs ->
invert'' maxoffset Undefs
| Shift p ->
invert'' p (Shift n)
| Dot (Head (BVar k), s') ->
invert' (n + 1) s' (max k maxoffset)
| Dot (_, s') -> (* Is this really necessary? -ac *)
invert' (n + 1) s' maxoffset
in
invert' 0 s 0
strengthen s Psi = Psi '
*
* If D ; Psi '' |- s : Psi ( * and s is a pattern sub
*
* If D ; Psi'' |- s : Psi (* and s is a pattern sub *)
* then D ; Psi' |- s : Psi and Psi' subcontext of Psi
*)
let rec strengthen s cPsi =
match (s, cPsi) with
0
Null
| (Shift _, CtxVar psi) ->
CtxVar psi
k = 1
let t' = comp t invShift in
Psi |- x : A dec where = x : A
* Psi ' |- t ' : Psi
* Psi ' |- x:[t']A dec
* Psi' |- t' : Psi
* Psi' |- x:[t']A dec
*)
DDec (strengthen t' cPsi, decSub decl t')
| (Dot (Undef, t), DDec (cPsi, _)) ->
strengthen t cPsi
| (Shift n, cPsi) ->
strengthen (Dot (Head (BVar (n + 1)), Shift (n + 1))) cPsi
isId : sub - > bool
*
* Invariant :
*
* Given Psi |- s : Psi ' , s weakensub
* isId s returns true iff s = i d and Psi ' = Psi .
*
* Invariant:
*
* Given Psi |- s: Psi', s weakensub
* isId s returns true iff s = id and Psi' = Psi.
*)
let isId s =
let rec isId' s k' =
match s with
| Shift k -> k = k'
| Dot (Head (BVar n), s') -> n = (k' + 1) && isId' s' (k' + 1)
| _ -> false
in
isId' s 0
cloInv ( U , w ) = U[w^-1 ]
*
* Invariant :
*
* If Psi |- M < = A
* Psi |- w < = Psi ' w pattern subst
* [ w^-1]M defined ( without pruning or constraints )
*
* then Psi ' |- [ w^-1]M : [ w^-1]A
*
* Effects : None
*
* Invariant:
*
* If Psi |- M <= A
* Psi |- w <= Psi' w pattern subst
* [w^-1]M defined (without pruning or constraints)
*
* then Psi' |- [w^-1]M : [w^-1]A
*
* Effects: None
*)
let cloInv ( tM , w ) = Clo ( tM , invert w )
(* compInv s w = t
*
* Invariant:
*
* If D ; Psi |- s <= Psi1
* D ; Psi |- w <= Psi'
* then t = s o (w^-1)
* and D ; Psi' |- t <= Psi1
*)
(* let compInv s w = comp s (invert w) *)
isMId t = B
*
* Invariant :
*
* If |- t : ' , t weaken_msub
* then B holds
* iff t = i d , ' = cD
*
* Invariant:
*
* If cD |- t: cD', t weaken_msub
* then B holds
* iff t = id, cD' = cD
*)
let isMId t =
let rec isId' s k' =
match s with
| MShift k -> k = k'
| MDot (MV n, s') -> n = k' && isId' s' (k' + 1)
| _ -> false
in
isId' t 0
applyMSub n t = MFt '
Invariant :
If D |- t < = D ' n - th element in D ' = A[Psi ]
then Ft ' = if t = Ft_1 .. Ft_n .. ^0
and D ; [ |t|]Psi |- Ft ' < = [ |t|]A
Invariant:
If D |- t <= D' n-th element in D' = A[Psi]
then Ft' = Ft_n if t = Ft_1 .. Ft_n .. ^0
and D ; [|t|]Psi |- Ft' <= [|t|]A
*)
let rec applyMSub n t =
match (n, t) with
| (1, MDot (ft, _t)) -> ft
| (n, MDot (_ft, t)) -> applyMSub (n - 1) t
| (n, MShift k) -> MV (k + n)
identity :
*
* identity cPsi = id(cPsi ) ,
* e.g.
* identity ( psi , x : A , y : B ) = Dot ( Head ( BVar 1 , Dot ( Head ( BVar 2 , Shift ( NoCtxShift , 2 ) ) ) ) )
*
* identity cPsi = id(cPsi),
* e.g.
* identity (psi, x:A, y:B) = Dot (Head (BVar 1, Dot (Head (BVar 2, Shift (NoCtxShift, 2)))))
*)
let identity cPsi =
let rec inner n =
function
| Null -> Shift n
| CtxVar _ -> Shift n
| DDec (cPsi, _) ->
let n' = n + 1 in
Dot (Head (BVar n'), inner n' cPsi)
in
inner 0 cPsi
justCtxVar :
*
* justCtxVar cPsi = id[\psi ] where is cPsi 's context variable
* e.g.
* justCtxVar ( psi , x : A , y : B ) = Shift ( NoCtxShift , 2 )
*
* justCtxVar cPsi = id[\psi] where \psi is cPsi's context variable
* e.g.
* justCtxVar (psi, x:A, y:B) = Shift (NoCtxShift, 2)
*)
let justCtxVar cPsi =
let rec inner n =
function
| Null -> Shift n
| CtxVar _ -> Shift n
| DDec (cPsi, _) -> let n = n + 1 in inner n cPsi
in
inner 0 cPsi
end
| null | https://raw.githubusercontent.com/Beluga-lang/Beluga/2b78691e1f3d850a6488ce9cdbba95e8dfbab739/src/core/substitution.ml | ocaml | ************************
Explicit Substitutions
************************
Optimization
Should be fixed; we really need phat of n to avoid printing
Free BVar (n + k) ...
(Head (HClo (phat. BVar n + k, s, sigma ))
-bp
Obj (Clo (nth s (tuple, k)))
Obj (Clo (nth s (tuple, k)))
Obj (Clo (nth s (tuple, k)))
Dec[id] = Dec
seems to have no statistically significant effect
*************************
Inverting Substitutions
*************************
Is this really necessary? -ac
and s is a pattern sub
compInv s w = t
*
* Invariant:
*
* If D ; Psi |- s <= Psi1
* D ; Psi |- w <= Psi'
* then t = s o (w^-1)
* and D ; Psi' |- t <= Psi1
let compInv s w = comp s (invert w) | * Substitutions
@author
@author Brigitte Pientka
*)
open Support
open Support.Equality
open Syntax.Int.LF
module LF = struct
exception NotComposable of string
i d = ^0
*
* Invariant :
*
* cPsi |- i d : cPsi i d is patsub
*
* Note : we do not take into account weakening here .
*
* Invariant:
*
* cPsi |- id : cPsi id is patsub
*
* Note: we do not take into account weakening here.
*)
let id = Shift 0
shift = ^1
*
* Invariant :
*
* cPsi , x : tA |- ^ : cPsi ^ is patsub
*
* Invariant:
*
* cPsi, x:tA |- ^ : cPsi ^ is patsub
*)
let shift = Shift 1
invShift = ^-1 = _ .^0
*
* Invariant :
*
* Psi |- ^-1 : Psi , A ^-1 is patsub
*
* Invariant:
*
* Psi |- ^-1 : Psi, A ^-1 is patsub
*)
let invShift = Dot (Undef, id)
let rec shiftComp n s2 =
match (n, s2) with
| (0, s) -> s
| (n, EmptySub) -> raise (NotComposable (Format.asprintf "Shift %d, EmptySub" n))
| (n, Undefs) -> Undefs
| (n, SVar (s, k, r)) -> SVar (s, (k + n), r)
| (n, MSVar (k, ((s, t), r))) -> MSVar (k + n, ((s, t), r))
| (n, FSVar (k, (s, tau))) -> FSVar (k + n, (s, tau))
| (n, Shift m) -> Shift (n + m)
| (n, Dot (_, s)) -> shiftComp (n - 1) s
comp s1 s2 = s '
*
* Invariant :
*
* If Psi ' |- s1 : Psi
* and Psi '' |- s2 : Psi '
* then s ' o s2
* and Psi '' |- s1 o s2 : Psi
*
* If s1 , s2 patsub
* then s ' patsub
*
* Invariant:
*
* If Psi' |- s1 : Psi
* and Psi'' |- s2 : Psi'
* then s' = s1 o s2
* and Psi'' |- s1 o s2 : Psi
*
* If s1, s2 patsub
* then s' patsub
*)
let rec comp s1 s2 =
match (s1, s2) with
| (EmptySub, s2) -> EmptySub
| (Undefs, s2) -> Undefs
| (Shift n, s2) -> shiftComp n s2
| (SVar (s, n, tau), s2) ->
SVar (s, n, comp tau s2)
| (MSVar (n, ((s, theta), tau)), s2) ->
MSVar (n, ((s, theta), comp tau s2))
| (FSVar (n, (s, tau)), s2) ->
FSVar (n, (s, comp tau s2))
| (Dot (ft, s), s') ->
] , Shift k ) = s[tau ]
* where s : : Psi[Phi ] and k
*
* ] , Shift k ) = Dot ( Id(1 ) , ... Dot ( Id(k0 ) , s[tau ] ) )
* where s : : Psi[Phi ] and k '
* k = k ' + k0
* where s :: Psi[Phi] and |Psi| = k
*
* comp(s[tau], Shift k) = Dot (Id(1), ... Dot (Id(k0), s[tau]))
* where s :: Psi[Phi] and |Psi| = k'
* k = k' + k0
*)
let h = frontSub ft s' in
Dot (h, comp s s')
bvarSub n s = Ft '
*
* Invariant :
*
* If Psi |- s < = Psi ' Psi ' |- n < = A
* then Ft ' = Ftn if s = Ft1 .. Ftn .. ^k
* or Ft ' = ^(n + k ) if s = Ft1 .. and m < n
* and Psi |- Ft ' < = [ s]A
*
* Invariant:
*
* If Psi |- s <= Psi' Psi' |- n <= A
* then Ft' = Ftn if s = Ft1 .. Ftn .. ^k
* or Ft' = ^(n + k) if s = Ft1 .. Ftm ^k and m < n
* and Psi |- Ft' <= [s]A
*)
and bvarSub n s =
match (n, s) with
| (_, Undefs) -> Undef
| (1, Dot (ft, _s)) -> ft
| (n, Dot (_ft, s)) -> bvarSub (n - 1) s
| (n, Shift k) -> Head (BVar (n + k))
| ( n , MSVar ( s , ( _ cshift , k ) , ( mt , sigma ) ) ) - >
( * Should be fixed ; we really need phat of n to avoid printing
Free BVar ( n + k ) ...
( Head ( HClo ( phat . BVar n + k , s , sigma ) )
-bp
Head (HMClo (n + k, s, (mt, sigma)))
Can this happen ?
*)
| (n, SVar (s, k, sigma)) ->
Should be fixed ; we really need phat of n to avoid printing
Free BVar ( n + k ) ... -bp
Free BVar (n + k) ... -bp *)
Head (HClo (n + k, s, sigma))
| (n, MSVar (k, ((s, t), sigma))) ->
Head (HMClo (n + k, ((s, t), sigma)))
( print_string " [ bvarSub ] n , MSVar - not implemented " ;
raise ( NotComposable " grr " ) )
raise (NotComposable "grr"))
*)
frontSub Ft s = Ft '
*
* Invariant :
*
* If Psi |- s : Psi ' Psi ' |- Ft : A
* then Ft ' = Ft [ s ]
* and Psi |- Ft ' : [ s]A
*
* Invariant:
*
* If Psi |- s : Psi' Psi' |- Ft : A
* then Ft' = Ft [s]
* and Psi |- Ft' : [s]A
*)
and frontSub ft s =
match ft with
| Head (HClo (n, s', sigma)) -> Head (HClo (n, s', comp sigma s))
| Head (HMClo (n, ((s', theta), sigma))) -> Head (HMClo (n, ((s', theta), comp sigma s)))
| Head (BVar n) -> bvarSub n s
| Head (FVar _) -> ft
| Head (MVar (u, s')) -> Head (MVar (u, comp s' s))
| Head (PVar (u, s')) -> Head (PVar (u, comp s' s))
| Head (Proj (BVar n, k)) ->
begin match bvarSub n s with
| Head (BVar x) ->
Head (Proj (BVar x, k))
| Head (PVar _ as h) ->
Head (Proj (h, k))
| Obj (Tuple (_, tuple)) ->
let rec nth s =
function
| (Last u, 1) -> u
| (Cons (u, _), 1) -> u
| (Cons (u, tuple), n) -> nth (Dot (Obj u, s)) (tuple, n - 1)
in
Obj (nth s (tuple, k))
| Obj (Lam _ ) -> failwith "Found Lam - should be tuple"
| Obj (Clo (Tuple (_, tuple), s')) ->
let rec nth s =
function
| (Last u, 1) -> u
| (Cons (u, _), 1) -> u
| (Cons (u, tuple), n) -> nth (Dot (Obj u, s)) (tuple, n - 1)
in
Obj (Clo (nth s (tuple, k), s'))
| Obj (Clo ((Root (_, (PVar _ ), Nil, _)), _ )) -> failwith "Found Clo - PVar "
| Obj (Clo ((Root (_, (BVar _ ), Nil, _)), _ )) -> failwith "Found Clo - BVar "
| Obj (Clo ((Root (_ , h, Nil, _ )), _ )) -> failwith "Found Clo with root that is not a var - should not happen"
| Obj (Clo ((Root (_ , h, _, _ )), _ )) -> failwith "Found Clo with root that has a non-empty spine - should not happen"
| Obj (Clo (_ , _ ) ) -> failwith ("BVar n = " ^ string_of_int n ^ " stands for Clo which is not a tuple – cannot take proj " ^ string_of_int k )
| Obj (Root (_, (PVar _ as h), Nil, _)) -> Head (Proj (h, k))
| Obj (Root (_, (BVar _ as h), Nil, _)) -> Head (Proj (h, k))
| Obj (LFHole (_, _ , _ )) -> failwith "Found Obj which LFHole – cannot take a proj."
| Obj (Root (_,Proj (h, _ ), _, _ )) -> failwith "Found Obj which is Proj ? – but we cannot take the projection of a projection; incompatible with taking a proj."
| Obj (Root ( _, _, _ , _ )) -> failwith "Root Obj incompatible with projections"
| Head (HClo (_, _, _) as h) -> Head (Proj (h, k))
| Head (HMClo (_, _) as h) -> Head (Proj (h, k))
| Head (Proj (h, _ )) -> failwith "Found head that is a Proj?? - nested Proj not allowed"
| Head (MPVar _ ) -> failwith "Found head that is MPVar"
| Head (FPVar _ ) -> failwith "Found head that is FPVar"
| Head (MVar _ ) -> failwith "Found head that is MVar"
| Head (AnnH _ ) -> failwith "Found head that is AnnH"
| Head (MMVar _ ) -> failwith "Found head that is MMVar"
| Head _ -> failwith "Found head that is not a BVar or PVar"
end
| Head (Proj (h, k)) ->
begin match frontSub (Head h) s with
| Head h' ->
Head (Proj (h', k))
| Obj (Tuple (_, tuple)) ->
let rec nth s =
function
| (Last u, 1) -> (u, s)
| (Cons (u, _), 1) -> (u, s)
| (Cons (u, tuple), n) -> nth (Dot (Obj u, s)) (tuple, n - 1)
in
Obj (Pair.fst (nth s (tuple, k)))
end
| Head (AnnH (h, a)) ->
let Head h' = frontSub (Head h) s in
Head (AnnH (h', a))
| Head (Const c) -> Head (Const c)
| Obj ( Root ( _ , h , ) ) - > frontSub ( Head h ) s
| Obj u -> Obj (Clo (u, s))
| Undef -> Undef
| Head (MMVar (n, s')) -> Head (MMVar (n, comp s' s))
| Head (FPVar (_n, _s' )) -> ft
| Head ( HMClo ( n , s ' , ( theta , sigma ) ) ) - >
* Head ( HMClo ( n , s ' , ( mt , comp sigma s ) ) ? ?
* Head (HMClo (n, s', (mt, comp sigma s)) ?? *)
dot1 ( s ) = s '
*
* Invariant :
*
* If Psi |- s : Psi '
* then s ' = 1 . ( s o ^ )
* and for all A s.t . Psi ' |- A : type
* Psi , [ s]A |- s ' : Psi ' , A
*
* If s patsub then s ' patsub
*
* Invariant:
*
* If Psi |- s : Psi'
* then s' = 1. (s o ^)
* and for all A s.t. Psi' |- A : type
* Psi, [s]A |- s' : Psi', A
*
* If s patsub then s' patsub
*)
first line is an optimization
roughly 15 % on standard suite for Twelf 1.1
Sat Feb 14 10:16:16 1998 -fp
and dot1 s =
match s with
| Shift 0 -> s
| s -> Dot (Head (BVar 1), comp s shift)
decSub ( x : tA ) s = ( x : tA[s ] )
*
* Invariant :
*
* If D ; Psi |- s < = Psi ' D ; Psi ' |- A < = type
* then D ; Psi |- [ s]A ] < = type
*
* Invariant:
*
* If D ; Psi |- s <= Psi' D ; Psi' |- A <= type
* then D ; Psi |- [s]A] <= type
*)
First line is an optimization suggested by cs
Sat Feb 14 18:37:44 1998 -fp
undo for now Sat Feb 14 20:22:29 1998 -fp
fun decSub ( D , Shift ( 0 ) ) = D
| decSub ( Dec ( x , A ) , s ) = Dec ( x , Clo ( A , s ) )
fun decSub (D, Shift (0)) = D
| decSub (Dec (x, A), s) = Dec (x, Clo (A, s))
*)
let decSub (TypDecl (x, tA)) s = TypDecl (x, TClo (tA, s))
invDot1 ( s ) = s '
* invDot1 ( 1 . s ' o ^ ) = s '
*
* Invariant :
*
* s = 1 . s ' o ^
* If Psi ' |- s ' : Psi
* ( so Psi',A[s ] |- s : Psi , A )
* invDot1 (1. s' o ^) = s'
*
* Invariant:
*
* s = 1 . s' o ^
* If Psi' |- s' : Psi
* (so Psi',A[s] |- s : Psi,A)
*)
let invDot1 s = comp ( comp shift s ) invShift
invert s = s '
*
* Invariant :
*
* If D ; Psi |- s < = Psi ' ( and s patsub )
* then D ; Psi ' |- s ' < = Psi
* = i d ( comp s ' s = i d )
*
* Invariant:
*
* If D ; Psi |- s <= Psi' (and s patsub)
* then D ; Psi' |- s' <= Psi
* s.t. s o s' = id (comp s' s = id)
*)
let invert s =
let rec lookup n s p =
match s with
| EmptySub -> None
| Undefs -> None
| Shift _ -> None
| Dot (Undef, s') -> lookup (n + 1) s' p
| Dot (Head (BVar k), s') ->
if k = p
then Some (Head (BVar n))
else lookup (n + 1) s' p
in
let rec invert'' p si =
match p with
| 0 -> si
| p ->
let front =
match lookup 1 s p with
| Some h -> h
| None -> Undef
in
invert'' (p - 1) (Dot (front, si))
in
let rec invert' n s maxoffset =
match s with
| EmptySub ->
invert'' maxoffset Undefs
| Undefs ->
invert'' maxoffset Undefs
| Shift p ->
invert'' p (Shift n)
| Dot (Head (BVar k), s') ->
invert' (n + 1) s' (max k maxoffset)
invert' (n + 1) s' maxoffset
in
invert' 0 s 0
strengthen s Psi = Psi '
*
* If D ; Psi '' |- s : Psi ( * and s is a pattern sub
*
* then D ; Psi' |- s : Psi and Psi' subcontext of Psi
*)
let rec strengthen s cPsi =
match (s, cPsi) with
0
Null
| (Shift _, CtxVar psi) ->
CtxVar psi
k = 1
let t' = comp t invShift in
Psi |- x : A dec where = x : A
* Psi ' |- t ' : Psi
* Psi ' |- x:[t']A dec
* Psi' |- t' : Psi
* Psi' |- x:[t']A dec
*)
DDec (strengthen t' cPsi, decSub decl t')
| (Dot (Undef, t), DDec (cPsi, _)) ->
strengthen t cPsi
| (Shift n, cPsi) ->
strengthen (Dot (Head (BVar (n + 1)), Shift (n + 1))) cPsi
isId : sub - > bool
*
* Invariant :
*
* Given Psi |- s : Psi ' , s weakensub
* isId s returns true iff s = i d and Psi ' = Psi .
*
* Invariant:
*
* Given Psi |- s: Psi', s weakensub
* isId s returns true iff s = id and Psi' = Psi.
*)
let isId s =
let rec isId' s k' =
match s with
| Shift k -> k = k'
| Dot (Head (BVar n), s') -> n = (k' + 1) && isId' s' (k' + 1)
| _ -> false
in
isId' s 0
cloInv ( U , w ) = U[w^-1 ]
*
* Invariant :
*
* If Psi |- M < = A
* Psi |- w < = Psi ' w pattern subst
* [ w^-1]M defined ( without pruning or constraints )
*
* then Psi ' |- [ w^-1]M : [ w^-1]A
*
* Effects : None
*
* Invariant:
*
* If Psi |- M <= A
* Psi |- w <= Psi' w pattern subst
* [w^-1]M defined (without pruning or constraints)
*
* then Psi' |- [w^-1]M : [w^-1]A
*
* Effects: None
*)
let cloInv ( tM , w ) = Clo ( tM , invert w )
isMId t = B
*
* Invariant :
*
* If |- t : ' , t weaken_msub
* then B holds
* iff t = i d , ' = cD
*
* Invariant:
*
* If cD |- t: cD', t weaken_msub
* then B holds
* iff t = id, cD' = cD
*)
let isMId t =
let rec isId' s k' =
match s with
| MShift k -> k = k'
| MDot (MV n, s') -> n = k' && isId' s' (k' + 1)
| _ -> false
in
isId' t 0
applyMSub n t = MFt '
Invariant :
If D |- t < = D ' n - th element in D ' = A[Psi ]
then Ft ' = if t = Ft_1 .. Ft_n .. ^0
and D ; [ |t|]Psi |- Ft ' < = [ |t|]A
Invariant:
If D |- t <= D' n-th element in D' = A[Psi]
then Ft' = Ft_n if t = Ft_1 .. Ft_n .. ^0
and D ; [|t|]Psi |- Ft' <= [|t|]A
*)
let rec applyMSub n t =
match (n, t) with
| (1, MDot (ft, _t)) -> ft
| (n, MDot (_ft, t)) -> applyMSub (n - 1) t
| (n, MShift k) -> MV (k + n)
identity :
*
* identity cPsi = id(cPsi ) ,
* e.g.
* identity ( psi , x : A , y : B ) = Dot ( Head ( BVar 1 , Dot ( Head ( BVar 2 , Shift ( NoCtxShift , 2 ) ) ) ) )
*
* identity cPsi = id(cPsi),
* e.g.
* identity (psi, x:A, y:B) = Dot (Head (BVar 1, Dot (Head (BVar 2, Shift (NoCtxShift, 2)))))
*)
let identity cPsi =
let rec inner n =
function
| Null -> Shift n
| CtxVar _ -> Shift n
| DDec (cPsi, _) ->
let n' = n + 1 in
Dot (Head (BVar n'), inner n' cPsi)
in
inner 0 cPsi
justCtxVar :
*
* justCtxVar cPsi = id[\psi ] where is cPsi 's context variable
* e.g.
* justCtxVar ( psi , x : A , y : B ) = Shift ( NoCtxShift , 2 )
*
* justCtxVar cPsi = id[\psi] where \psi is cPsi's context variable
* e.g.
* justCtxVar (psi, x:A, y:B) = Shift (NoCtxShift, 2)
*)
let justCtxVar cPsi =
let rec inner n =
function
| Null -> Shift n
| CtxVar _ -> Shift n
| DDec (cPsi, _) -> let n = n + 1 in inner n cPsi
in
inner 0 cPsi
end
|
8d72a631d311aec92752c080bd3073e4c0a59cadfcda6f082b881734132e3001 | heyarne/airsonic-ui | subs.cljs | (ns bulma.dropdown.subs
(:require [re-frame.core :as rf]))
;; NOTE: This is almost the same as bulma.modal.subs
;; Maybe we can provide some abstraction that covers both, but maybe we shouldn't
(defn visible-dropdown
"Gives us the ID of the currently visible dropdown"
[db _]
(get-in db [:bulma :visible-dropdown]))
(rf/reg-sub ::visible-dropdown visible-dropdown)
(defn visible?
"Predicate to check the visibility of a single modal"
[visible-dropdown [_ dropdown-id]]
(= visible-dropdown dropdown-id))
(rf/reg-sub
::visible?
:<- [::visible-dropdown]
visible?)
| null | https://raw.githubusercontent.com/heyarne/airsonic-ui/7adb03d6e2ba0ff764796a57b7e87f62b242c9b7/src/cljs/bulma/dropdown/subs.cljs | clojure | NOTE: This is almost the same as bulma.modal.subs
Maybe we can provide some abstraction that covers both, but maybe we shouldn't | (ns bulma.dropdown.subs
(:require [re-frame.core :as rf]))
(defn visible-dropdown
"Gives us the ID of the currently visible dropdown"
[db _]
(get-in db [:bulma :visible-dropdown]))
(rf/reg-sub ::visible-dropdown visible-dropdown)
(defn visible?
"Predicate to check the visibility of a single modal"
[visible-dropdown [_ dropdown-id]]
(= visible-dropdown dropdown-id))
(rf/reg-sub
::visible?
:<- [::visible-dropdown]
visible?)
|
ee5f62ac6dd5429815a4e2f2591c071ad3b82cfd99b0b2664bc2f9e53f233814 | orestis/reseda | nasa_apod.cljs | (ns reseda.demo.nasa-apod
(:require [reseda.demo.util :refer [$] :as util]
[reseda.state :as rs]
[reseda.react :as rr]
[reseda.react.experimental :as rre]
[cljs-bean.core :refer [bean]]
["react" :as react]))
(def api-key "HquDsZLQArdVX1iaFoZGnWMD1AvoOkUEhlTtboCe" #_"DEMO_KEY")
(defn date->query [date]
(let [d (.getDate date)
m (-> (.getMonth date)
inc)
y (.getFullYear date)]
(str y "-" (when (< m 10) "0") m "-" (when (< d 10) "0") d)))
(defn query-url [date]
(str "="
api-key
"&date=" (date->query date)))
(def day-in-millis (* 24 60 60 1000))
(defn change-date [d amount]
(-> (.getTime d)
(+ (* amount day-in-millis))
(js/Date.)))
(defn fetch-apod [date]
(-> date
(query-url)
(util/make-request)
(.then (fn [text]
(-> text (js/JSON.parse) (js->clj :keywordize-keys true))))
(.then (fn [apod]
(if (and (:url apod) (= "image" (:media_type apod)))
(assoc apod :suspense-url (rr/suspending-image (:url apod)))
apod)))
(rr/suspending-value)))
(def now (js/Date.))
(defonce app-state
(atom {:date now
:apod (fetch-apod now)}))
(defonce app-store (rs/new-store app-state))
(defonce ms-store (rre/wrap-store app-store))
(def the-store ms-store)
(def useStore rre/useStore)
(defn date-button-clicked [current-date direction]
(let [new-date (change-date current-date direction)]
(when (<= new-date (js/Date.))
(swap! app-state assoc
:date new-date
:apod (fetch-apod new-date)))))
(defn DatePicker []
(let [current-date (useStore the-store :date)
[startTransition isPending] (react/unstable_useTransition #js {:timeoutMs 1500})]
($ "div" #js {:style #js {:display "flex"
:justifyContent "space-between"
:alignItems "center"}}
($ "button" #js {:onClick (fn []
(startTransition #(date-button-clicked current-date -1)))}
"Previous Day")
($ "strong" #js {:style (when isPending
#js {:opacity "50%"})}
(str (date->query current-date)))
($ "button" #js {:onClick (fn []
(startTransition #(date-button-clicked current-date +1)))}
"Next Day"))))
(defn ApodMedia [props]
(let [{:keys [suspense-url url media_type]} (bean props)]
(case media_type
"image" ($ "img" #js {:style #js {:width "100%"}
:src @suspense-url})
"video" ($ "iframe" #js {:src url
:type "text/html"
:width "640px"
:height "360px"})
($ "pre" nil "Unknown media type: " media_type url)
)))
(defn ApodComponent [props]
(let [apod (:apod (bean props))]
($ "article" #js {:style #js {:width "100%"}}
($ "h4" nil (:title apod))
($ "section" nil
($ "figure" nil
($ ApodMedia #js {:url (:url apod)
:suspense-url (:suspense-url apod)
:media_type (:media_type apod)})
($ "figcaption" nil
(:date apod) " "
"Copyright: " (:copyright apod)))
($ "p" nil (:explanation apod))))))
(defn ApodLoader []
(let [apod (useStore the-store :apod)]
($ ApodComponent #js {:apod @apod})))
(defn NasaApodDemo []
($ "section" nil
($ "h2" nil "Astronomy Picture of the day")
($ "div" #js {}
($ DatePicker)
($ "hr")
($ react/Suspense #js {:fallback ($ "div" nil "Loading apod...")}
($ ApodLoader)))))
(comment
(:apod @app-state)
(swap! app-state assoc :date (js/Date.) :apod (fetch-apod now))
(js/console.log @(:suspense-url @(:apod @app-state)))
) | null | https://raw.githubusercontent.com/orestis/reseda/f14bf45e889859a0730a29e78a0d6c8dbec5c07e/src/reseda/demo/nasa_apod.cljs | clojure | (ns reseda.demo.nasa-apod
(:require [reseda.demo.util :refer [$] :as util]
[reseda.state :as rs]
[reseda.react :as rr]
[reseda.react.experimental :as rre]
[cljs-bean.core :refer [bean]]
["react" :as react]))
(def api-key "HquDsZLQArdVX1iaFoZGnWMD1AvoOkUEhlTtboCe" #_"DEMO_KEY")
(defn date->query [date]
(let [d (.getDate date)
m (-> (.getMonth date)
inc)
y (.getFullYear date)]
(str y "-" (when (< m 10) "0") m "-" (when (< d 10) "0") d)))
(defn query-url [date]
(str "="
api-key
"&date=" (date->query date)))
(def day-in-millis (* 24 60 60 1000))
(defn change-date [d amount]
(-> (.getTime d)
(+ (* amount day-in-millis))
(js/Date.)))
(defn fetch-apod [date]
(-> date
(query-url)
(util/make-request)
(.then (fn [text]
(-> text (js/JSON.parse) (js->clj :keywordize-keys true))))
(.then (fn [apod]
(if (and (:url apod) (= "image" (:media_type apod)))
(assoc apod :suspense-url (rr/suspending-image (:url apod)))
apod)))
(rr/suspending-value)))
(def now (js/Date.))
(defonce app-state
(atom {:date now
:apod (fetch-apod now)}))
(defonce app-store (rs/new-store app-state))
(defonce ms-store (rre/wrap-store app-store))
(def the-store ms-store)
(def useStore rre/useStore)
(defn date-button-clicked [current-date direction]
(let [new-date (change-date current-date direction)]
(when (<= new-date (js/Date.))
(swap! app-state assoc
:date new-date
:apod (fetch-apod new-date)))))
(defn DatePicker []
(let [current-date (useStore the-store :date)
[startTransition isPending] (react/unstable_useTransition #js {:timeoutMs 1500})]
($ "div" #js {:style #js {:display "flex"
:justifyContent "space-between"
:alignItems "center"}}
($ "button" #js {:onClick (fn []
(startTransition #(date-button-clicked current-date -1)))}
"Previous Day")
($ "strong" #js {:style (when isPending
#js {:opacity "50%"})}
(str (date->query current-date)))
($ "button" #js {:onClick (fn []
(startTransition #(date-button-clicked current-date +1)))}
"Next Day"))))
(defn ApodMedia [props]
(let [{:keys [suspense-url url media_type]} (bean props)]
(case media_type
"image" ($ "img" #js {:style #js {:width "100%"}
:src @suspense-url})
"video" ($ "iframe" #js {:src url
:type "text/html"
:width "640px"
:height "360px"})
($ "pre" nil "Unknown media type: " media_type url)
)))
(defn ApodComponent [props]
(let [apod (:apod (bean props))]
($ "article" #js {:style #js {:width "100%"}}
($ "h4" nil (:title apod))
($ "section" nil
($ "figure" nil
($ ApodMedia #js {:url (:url apod)
:suspense-url (:suspense-url apod)
:media_type (:media_type apod)})
($ "figcaption" nil
(:date apod) " "
"Copyright: " (:copyright apod)))
($ "p" nil (:explanation apod))))))
(defn ApodLoader []
(let [apod (useStore the-store :apod)]
($ ApodComponent #js {:apod @apod})))
(defn NasaApodDemo []
($ "section" nil
($ "h2" nil "Astronomy Picture of the day")
($ "div" #js {}
($ DatePicker)
($ "hr")
($ react/Suspense #js {:fallback ($ "div" nil "Loading apod...")}
($ ApodLoader)))))
(comment
(:apod @app-state)
(swap! app-state assoc :date (js/Date.) :apod (fetch-apod now))
(js/console.log @(:suspense-url @(:apod @app-state)))
) | |
edc2715096f266ab7cacac56e88cb323b5091774fe0885a62aee59737e8a4e36 | bcc32/projecteuler-ocaml | import.ml | open! Core
include Cmdliner
include Euler_solution_helpers
let error_s sexp =
Or_error.error_s sexp |> Result.map_error ~f:(fun e -> `Msg (Error.to_string_hum e))
;;
| null | https://raw.githubusercontent.com/bcc32/projecteuler-ocaml/712f85902c70adc1ec13dcbbee456c8bfa8450b2/bin/import.ml | ocaml | open! Core
include Cmdliner
include Euler_solution_helpers
let error_s sexp =
Or_error.error_s sexp |> Result.map_error ~f:(fun e -> `Msg (Error.to_string_hum e))
;;
| |
d09fdd3c65da274a25efadf2d7999343d81ce27a09ef37195e5fc4ed8b70ba48 | elaforge/karya | Sel.hs | Copyright 2015
-- This program is distributed under the terms of the GNU General Public
-- License 3.0, see COPYING or -3.0.txt
-- | The selection type.
module Ui.Sel where
import qualified Prelude
import Prelude hiding (min, max)
import qualified Data.Tuple as Tuple
import qualified Util.Num as Num
import qualified Ui.Types as Types
import Global
import Types
-- | Index into the the selection list.
type Num = Int
data Selection = Selection {
-- | The position the selection was established at. Since a selection can
-- logically go off the edge of a block, this is not necessarily a valid
-- TrackNum!
start_track :: !TrackNum
, start_pos :: !TrackTime
-- | The position the selection is now at. The tracks are an inclusive
range , the pos are half - open . This is because these pairs are meant to
be symmetrical , but the c++ layer only supports half - open pos ranges .
-- I don't think there's much I can do about this.
--
Unlike ' start_track ' , this should be a valid , because cmds want
-- to use it as the focused track.
, cur_track :: !TrackNum
, cur_pos :: !TrackTime
, orientation :: !Orientation
} deriving (Eq, Ord, Show, Read)
instance Pretty Selection where
pretty (Selection strack spos ctrack cpos orientation) =
"Selection" <> o <> pretty (strack, spos) <> "--"
<> pretty (ctrack, cpos)
where
o = case orientation of
None -> "_"
Positive -> "+"
Negative -> "-"
-- | None is used for display selections, which don't need arrows on them.
data Orientation = None | Positive | Negative
deriving (Eq, Ord, Enum, Bounded, Show, Read)
-- | A point is a selection with no duration.
point :: TrackNum -> TrackTime -> Orientation -> Selection
point tracknum pos orientation = Selection
{ start_track = tracknum, start_pos = pos
, cur_track = tracknum, cur_pos = pos
, orientation = orientation
}
is_point :: Selection -> Bool
is_point sel = start_pos sel == cur_pos sel
modify_tracks :: (TrackNum -> TrackNum) -> Selection -> Selection
modify_tracks f sel = sel
{ start_track = f (start_track sel)
, cur_track = f (cur_track sel)
}
expand_tracks :: TrackNum -> Selection -> Selection
expand_tracks n sel
| cur > start = sel { cur_track = cur + n }
| otherwise = sel { start_track = start + n }
where
start = start_track sel
cur = cur_track sel
-- | Start and end tracks, from small to large.
track_range :: Selection -> (TrackNum, TrackNum)
track_range sel = (Prelude.min track0 track1, Prelude.max track0 track1)
where (track0, track1) = (start_track sel, cur_track sel)
| TrackNums covered by the selection . Since Selections may have out of
-- range tracks, I need the number of tracks to generate a list of valid
-- TrackNums.
tracknums :: TrackNum -> Selection -> [TrackNum]
tracknums tracks sel
| tracks <= 0 = []
| otherwise = [Num.clamp 0 (tracks-1) start .. Num.clamp 0 (tracks-1) end]
where (start, end) = track_range sel
min :: Selection -> TrackTime
min sel = Prelude.min (start_pos sel) (cur_pos sel)
max :: Selection -> TrackTime
max sel = Prelude.max (start_pos sel) (cur_pos sel)
-- | Start and end points, from small to large.
range :: Selection -> (TrackTime, TrackTime)
range sel = (min sel, max sel)
event_orientation :: Selection -> Types.Orientation
event_orientation sel = case orientation sel of
Negative -> Types.Negative
_ -> Types.Positive
duration :: Selection -> TrackTime
duration sel = abs (start_pos sel - cur_pos sel)
set_duration :: TrackTime -> Selection -> Selection
set_duration dur sel
| cur > start = sel { cur_pos = start + Prelude.max 0 dur }
| otherwise = sel { start_pos = cur + Prelude.max 0 dur }
where
start = start_pos sel
cur = cur_pos sel
-- | Extend the current track and pos, but keep the start track and pos the
-- same.
merge :: Selection -> Selection -> Selection
merge (Selection strack spos _ _ _) (Selection _ _ ctrack cpos orient) =
Selection strack spos ctrack cpos orient
-- | Make a selection that covers both the given selections. It tries to set
-- start and cur values based on the direction of the merge, assuming you are
starting with the first selection and adding the second .
union :: Selection -> Selection -> Selection
union sel1 sel2 = Selection strack spos ctrack cpos (orientation sel2)
where
(strack, ctrack) =
if cur_track sel2 >= cur_track sel1 then se else Tuple.swap se
where
se = (Prelude.min s1 s2, Prelude.max e1 e2)
(s1, e1) = track_range sel1
(s2, e2) = track_range sel2
(spos, cpos) = if cur_pos sel2 >= cur_pos sel1 then se else Tuple.swap se
where
se = (Prelude.min s1 s2, Prelude.max e1 e2)
(s1, e1) = range sel1
(s2, e2) = range sel2
move :: TrackTime -> Selection -> Selection
move t sel = sel { start_pos = start_pos sel + t, cur_pos = cur_pos sel + t }
| null | https://raw.githubusercontent.com/elaforge/karya/471a2131f5a68b3b10b1a138e6f9ed1282980a18/Ui/Sel.hs | haskell | This program is distributed under the terms of the GNU General Public
License 3.0, see COPYING or -3.0.txt
| The selection type.
| Index into the the selection list.
| The position the selection was established at. Since a selection can
logically go off the edge of a block, this is not necessarily a valid
TrackNum!
| The position the selection is now at. The tracks are an inclusive
I don't think there's much I can do about this.
to use it as the focused track.
| None is used for display selections, which don't need arrows on them.
| A point is a selection with no duration.
| Start and end tracks, from small to large.
range tracks, I need the number of tracks to generate a list of valid
TrackNums.
| Start and end points, from small to large.
| Extend the current track and pos, but keep the start track and pos the
same.
| Make a selection that covers both the given selections. It tries to set
start and cur values based on the direction of the merge, assuming you are | Copyright 2015
module Ui.Sel where
import qualified Prelude
import Prelude hiding (min, max)
import qualified Data.Tuple as Tuple
import qualified Util.Num as Num
import qualified Ui.Types as Types
import Global
import Types
type Num = Int
data Selection = Selection {
start_track :: !TrackNum
, start_pos :: !TrackTime
range , the pos are half - open . This is because these pairs are meant to
be symmetrical , but the c++ layer only supports half - open pos ranges .
Unlike ' start_track ' , this should be a valid , because cmds want
, cur_track :: !TrackNum
, cur_pos :: !TrackTime
, orientation :: !Orientation
} deriving (Eq, Ord, Show, Read)
instance Pretty Selection where
pretty (Selection strack spos ctrack cpos orientation) =
"Selection" <> o <> pretty (strack, spos) <> "--"
<> pretty (ctrack, cpos)
where
o = case orientation of
None -> "_"
Positive -> "+"
Negative -> "-"
data Orientation = None | Positive | Negative
deriving (Eq, Ord, Enum, Bounded, Show, Read)
point :: TrackNum -> TrackTime -> Orientation -> Selection
point tracknum pos orientation = Selection
{ start_track = tracknum, start_pos = pos
, cur_track = tracknum, cur_pos = pos
, orientation = orientation
}
is_point :: Selection -> Bool
is_point sel = start_pos sel == cur_pos sel
modify_tracks :: (TrackNum -> TrackNum) -> Selection -> Selection
modify_tracks f sel = sel
{ start_track = f (start_track sel)
, cur_track = f (cur_track sel)
}
expand_tracks :: TrackNum -> Selection -> Selection
expand_tracks n sel
| cur > start = sel { cur_track = cur + n }
| otherwise = sel { start_track = start + n }
where
start = start_track sel
cur = cur_track sel
track_range :: Selection -> (TrackNum, TrackNum)
track_range sel = (Prelude.min track0 track1, Prelude.max track0 track1)
where (track0, track1) = (start_track sel, cur_track sel)
| TrackNums covered by the selection . Since Selections may have out of
tracknums :: TrackNum -> Selection -> [TrackNum]
tracknums tracks sel
| tracks <= 0 = []
| otherwise = [Num.clamp 0 (tracks-1) start .. Num.clamp 0 (tracks-1) end]
where (start, end) = track_range sel
min :: Selection -> TrackTime
min sel = Prelude.min (start_pos sel) (cur_pos sel)
max :: Selection -> TrackTime
max sel = Prelude.max (start_pos sel) (cur_pos sel)
range :: Selection -> (TrackTime, TrackTime)
range sel = (min sel, max sel)
event_orientation :: Selection -> Types.Orientation
event_orientation sel = case orientation sel of
Negative -> Types.Negative
_ -> Types.Positive
duration :: Selection -> TrackTime
duration sel = abs (start_pos sel - cur_pos sel)
set_duration :: TrackTime -> Selection -> Selection
set_duration dur sel
| cur > start = sel { cur_pos = start + Prelude.max 0 dur }
| otherwise = sel { start_pos = cur + Prelude.max 0 dur }
where
start = start_pos sel
cur = cur_pos sel
merge :: Selection -> Selection -> Selection
merge (Selection strack spos _ _ _) (Selection _ _ ctrack cpos orient) =
Selection strack spos ctrack cpos orient
starting with the first selection and adding the second .
union :: Selection -> Selection -> Selection
union sel1 sel2 = Selection strack spos ctrack cpos (orientation sel2)
where
(strack, ctrack) =
if cur_track sel2 >= cur_track sel1 then se else Tuple.swap se
where
se = (Prelude.min s1 s2, Prelude.max e1 e2)
(s1, e1) = track_range sel1
(s2, e2) = track_range sel2
(spos, cpos) = if cur_pos sel2 >= cur_pos sel1 then se else Tuple.swap se
where
se = (Prelude.min s1 s2, Prelude.max e1 e2)
(s1, e1) = range sel1
(s2, e2) = range sel2
move :: TrackTime -> Selection -> Selection
move t sel = sel { start_pos = start_pos sel + t, cur_pos = cur_pos sel + t }
|
609a4dd718303ca43c1b0ef8680ccc12392284bc229ae421bf98bdd36761dad4 | gndl/graffophone | graffophone.ml |
* Copyright ( C ) 2015
*
* All rights reserved . This file is distributed under the terms of the
* GNU General Public License version 3.0 .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2015 Gaëtan Dubreil
*
* All rights reserved.This file is distributed under the terms of the
* GNU General Public License version 3.0.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open Usual
let () =
Printexc.record_backtrace true;
try
Device.initialize();
PluginsManager.loadPlugins ();
let ssnCtrl = new SessionControler.c in
let graphView = new GraphView.c ssnCtrl#graph in
let appView = new ApplicationView.c ssnCtrl graphView in
ssnCtrl#init();
appView#init();
GtkThread.main ();
Device.terminate();
with exc -> (
traceMagenta(Printexc.to_string exc);
traceYellow(Printexc.get_backtrace())
)
| null | https://raw.githubusercontent.com/gndl/graffophone/71a12fcf8e799bb8ebfc37141b300ecbc9475c43/src/graffophone.ml | ocaml |
* Copyright ( C ) 2015
*
* All rights reserved . This file is distributed under the terms of the
* GNU General Public License version 3.0 .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place - Suite 330 , Boston , MA 02111 - 1307 , USA .
* Copyright (C) 2015 Gaëtan Dubreil
*
* All rights reserved.This file is distributed under the terms of the
* GNU General Public License version 3.0.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
*)
open Usual
let () =
Printexc.record_backtrace true;
try
Device.initialize();
PluginsManager.loadPlugins ();
let ssnCtrl = new SessionControler.c in
let graphView = new GraphView.c ssnCtrl#graph in
let appView = new ApplicationView.c ssnCtrl graphView in
ssnCtrl#init();
appView#init();
GtkThread.main ();
Device.terminate();
with exc -> (
traceMagenta(Printexc.to_string exc);
traceYellow(Printexc.get_backtrace())
)
| |
165d125a87a57029dab57b15efec95ad3f79da6ae9675e5353daa7f851f470a6 | jamesmacaulay/cljs-promises | async.cljs | (ns cljs-promises.async
(:require [cljs.core.async :as async]
[cljs.core.async.impl.protocols :as impl]
[cljs.core.async.impl.dispatch :as dispatch]
[cljs-promises.core]))
(defn extend-promises-as-channels!
"If you want, you can globally extend Promise to act as a one-way channel which
can only be taken from, and which starts producing a never-ending stream of constants
once the promise resolves. `value-transform` and `error-transform` are functions
which are applied to the value or error when the Promise resolves or rejects.
Both `value-transform` and `error-transform` default to identity. Takes an optional
`promise-constructor` to extend, defaulting to `js/Promise`."
([]
(extend-promises-as-channels! identity))
([value-transform]
(extend-promises-as-channels! value-transform identity))
([value-transform error-transform]
(extend-promises-as-channels! value-transform error-transform js/Promise))
([value-transform error-transform promise-constructor]
(extend-type promise-constructor
impl/ReadPort
(take! [promise handler]
(.then promise
;; `cljs.core.async.impl.dispatch/run` runs the handler in the next
;; run of the event loop, making it possible here for errors avoid
;; promises' error capturing. Otherwise, errors from the promise can
;; never escape `go` blocks.
(fn [val]
(dispatch/run #((impl/commit handler) (value-transform val))))
(fn [err]
(dispatch/run #((impl/commit handler) (error-transform err)))))
nil) ;; return nil to say "we're waiting on the value"
impl/Channel
(close! [_]))))
(defn extend-promises-as-pair-channels!
"Globally extends Promises with `extend-promises-as-channels!` such that the
values taken from them are vector pairs of [value nil] in the case of fulfillment,
or [nil error] in the case of rejection. Takes an optional `promise-constructor`
to extend, defaulting to `js/Promise`."
([]
(extend-promises-as-pair-channels! js/Promise))
([promise-constructor]
(extend-promises-as-channels! (fn [val] [val nil])
(fn [err] [nil err])
promise-constructor)))
(defn consume-pair
"When passed a [value nil] pair, returns value. When passed a [nil error] pair,
throws error. See also `cljs-promises.async/<?`."
[[val err]]
(if err
(throw err)
val))
(defn value-port
"Wraps a promise and returns a ReadPort (a read-only channel-like). When the
promise fulfills with a value, that value is sent constantly on the channel
(the value might be nil). When the promise rejects with an error, nil is sent
constantly."
[promise]
(reify
impl/ReadPort
(take! [_ handler]
(.then promise
(fn [val]
(dispatch/run #((impl/commit handler) val)))
(fn [_]
(dispatch/run #((impl/commit handler) nil))))
nil)))
(defn error-port
"The reverse of `value-port`, passing along errors when `promise` rejects and sending
nils when `promise` fulfills with a value."
[promise]
(reify
impl/ReadPort
(take! [_ handler]
(.then promise
(fn [_]
(dispatch/run #((impl/commit handler) nil)))
(fn [err]
(dispatch/run #((impl/commit handler) err))))
nil)))
(defn pair-port
"Returns a ReadPort which sends [value nil] when `promise` fulfills, and [nil error]
when `promise` rejects."
[promise]
(reify
impl/ReadPort
(take! [_ handler]
(.then promise
(fn [val]
(dispatch/run #((impl/commit handler) [val nil])))
(fn [err]
(dispatch/run #((impl/commit handler) [nil err]))))
nil)))
(defn take-as-promise!
"Waits for the next value from `ch` and returns a promise of that value."
[ch]
(cljs-promises.core/promise (fn [resolve _]
(async/take! ch resolve))))
| null | https://raw.githubusercontent.com/jamesmacaulay/cljs-promises/9919912301eff6a0d5dd6af42be231c60eeaef01/src/cljs_promises/async.cljs | clojure | `cljs.core.async.impl.dispatch/run` runs the handler in the next
run of the event loop, making it possible here for errors avoid
promises' error capturing. Otherwise, errors from the promise can
never escape `go` blocks.
return nil to say "we're waiting on the value" | (ns cljs-promises.async
(:require [cljs.core.async :as async]
[cljs.core.async.impl.protocols :as impl]
[cljs.core.async.impl.dispatch :as dispatch]
[cljs-promises.core]))
(defn extend-promises-as-channels!
"If you want, you can globally extend Promise to act as a one-way channel which
can only be taken from, and which starts producing a never-ending stream of constants
once the promise resolves. `value-transform` and `error-transform` are functions
which are applied to the value or error when the Promise resolves or rejects.
Both `value-transform` and `error-transform` default to identity. Takes an optional
`promise-constructor` to extend, defaulting to `js/Promise`."
([]
(extend-promises-as-channels! identity))
([value-transform]
(extend-promises-as-channels! value-transform identity))
([value-transform error-transform]
(extend-promises-as-channels! value-transform error-transform js/Promise))
([value-transform error-transform promise-constructor]
(extend-type promise-constructor
impl/ReadPort
(take! [promise handler]
(.then promise
(fn [val]
(dispatch/run #((impl/commit handler) (value-transform val))))
(fn [err]
(dispatch/run #((impl/commit handler) (error-transform err)))))
impl/Channel
(close! [_]))))
(defn extend-promises-as-pair-channels!
"Globally extends Promises with `extend-promises-as-channels!` such that the
values taken from them are vector pairs of [value nil] in the case of fulfillment,
or [nil error] in the case of rejection. Takes an optional `promise-constructor`
to extend, defaulting to `js/Promise`."
([]
(extend-promises-as-pair-channels! js/Promise))
([promise-constructor]
(extend-promises-as-channels! (fn [val] [val nil])
(fn [err] [nil err])
promise-constructor)))
(defn consume-pair
"When passed a [value nil] pair, returns value. When passed a [nil error] pair,
throws error. See also `cljs-promises.async/<?`."
[[val err]]
(if err
(throw err)
val))
(defn value-port
"Wraps a promise and returns a ReadPort (a read-only channel-like). When the
promise fulfills with a value, that value is sent constantly on the channel
(the value might be nil). When the promise rejects with an error, nil is sent
constantly."
[promise]
(reify
impl/ReadPort
(take! [_ handler]
(.then promise
(fn [val]
(dispatch/run #((impl/commit handler) val)))
(fn [_]
(dispatch/run #((impl/commit handler) nil))))
nil)))
(defn error-port
"The reverse of `value-port`, passing along errors when `promise` rejects and sending
nils when `promise` fulfills with a value."
[promise]
(reify
impl/ReadPort
(take! [_ handler]
(.then promise
(fn [_]
(dispatch/run #((impl/commit handler) nil)))
(fn [err]
(dispatch/run #((impl/commit handler) err))))
nil)))
(defn pair-port
"Returns a ReadPort which sends [value nil] when `promise` fulfills, and [nil error]
when `promise` rejects."
[promise]
(reify
impl/ReadPort
(take! [_ handler]
(.then promise
(fn [val]
(dispatch/run #((impl/commit handler) [val nil])))
(fn [err]
(dispatch/run #((impl/commit handler) [nil err]))))
nil)))
(defn take-as-promise!
"Waits for the next value from `ch` and returns a promise of that value."
[ch]
(cljs-promises.core/promise (fn [resolve _]
(async/take! ch resolve))))
|
e38a1c45b31aa113b39886531c944b5779b7d14de39d06a5275f6c6c1afc73e6 | libguestfs/virt-v2v | output_vdsm.ml | virt - v2v
* Copyright ( C ) 2009 - 2021 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2009-2021 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
open Printf
open Unix
open Std_utils
open Tools_utils
open Common_gettext.Gettext
open Types
open Utils
open Output
module VDSM = struct
type poptions = Types.output_allocation * string * string * string *
string list * string list * string * string *
string * Create_ovf.ovf_flavour
type t = string * string * int64 list
let to_string options = "-o vdsm"
let query_output_options () =
let ovf_flavours_str = String.concat "|" Create_ovf.ovf_flavours in
printf (f_"Output options (-oo) which can be used with -o vdsm:
-oo vdsm-compat=0.10|1.1 Write qcow2 with compat=0.10|1.1
(default: 0.10)
-oo vdsm-vm-uuid=UUID VM UUID (required)
-oo vdsm-ovf-output=DIR OVF metadata directory (required)
-oo vdsm-ovf-flavour=%s
Set the type of generated OVF (default: rhvexp)
For each disk you must supply one of each of these options:
-oo vdsm-image-uuid=UUID Image directory UUID
-oo vdsm-vol-uuid=UUID Disk volume UUID
") ovf_flavours_str
let parse_options options source =
if options.output_password <> None then
error_option_cannot_be_used_in_output_mode "vdsm" "-op";
let vm_uuid = ref None in
let ovf_output = ref None in (* default "." *)
let compat = ref "0.10" in
let ovf_flavour = ref Create_ovf.RHVExportStorageDomain in
let image_uuids = ref [] in
let vol_uuids = ref [] in
List.iter (
function
| "vdsm-compat", "0.10" -> compat := "0.10"
| "vdsm-compat", "1.1" -> compat := "1.1"
| "vdsm-compat", v ->
error (f_"-o vdsm: unknown vdsm-compat level ‘%s’") v
| "vdsm-vm-uuid", v ->
if !vm_uuid <> None then
error (f_"-o vdsm: -oo vdsm-vm-uuid set more than once");
vm_uuid := Some v;
| "vdsm-ovf-output", v ->
if !ovf_output <> None then
error (f_"-o vdsm: -oo vdsm-ovf-output set more than once");
ovf_output := Some v;
| "vdsm-ovf-flavour", v ->
ovf_flavour := Create_ovf.ovf_flavour_of_string v
| "vdsm-image-uuid", v ->
List.push_front v image_uuids
| "vdsm-vol-uuid", v ->
List.push_front v vol_uuids
| k, _ ->
error (f_"-o vdsm: unknown output option ‘-oo %s’") k
) options.output_options;
let compat = !compat in
let image_uuids = List.rev !image_uuids in
let vol_uuids = List.rev !vol_uuids in
if image_uuids = [] || vol_uuids = [] then
error (f_"-o vdsm: either -oo vdsm-vol-uuid or \
-oo vdsm-vm-uuid was not specified");
let vm_uuid =
match !vm_uuid with
| None ->
error (f_"-o vdsm: -oo vdsm-image-uuid was not specified")
| Some uuid -> uuid in
let ovf_output = Option.value ~default:"." !ovf_output in
let ovf_flavour = !ovf_flavour in
(* -os must be set, but at this point we cannot check it. *)
let output_storage =
match options.output_storage with
| None -> error (f_"-o vdsm: -os option was not specified")
| Some d when not (is_directory d) ->
error (f_"-os %s: output directory does not exist \
or is not a directory") d
| Some d -> d in
let output_name = Option.value ~default:source.s_name options.output_name in
(options.output_alloc, options.output_format,
output_name, output_storage,
image_uuids, vol_uuids, vm_uuid, ovf_output,
compat, ovf_flavour)
let setup dir options source =
error_if_disk_count_gt dir 23;
let disks = get_disks dir in
let output_alloc, output_format,
output_name, output_storage,
image_uuids, vol_uuids, vm_uuid, ovf_output,
compat, ovf_flavour = options in
if List.length image_uuids <> List.length disks ||
List.length vol_uuids <> List.length disks then
error (f_"the number of ‘-oo vdsm-image-uuid’ and ‘-oo vdsm-vol-uuid’ \
parameters passed on the command line has to match the \
number of guest disk images (for this guest: %d)")
(List.length disks);
let dd_mp, dd_uuid =
let fields =
String.nsplit "/" output_storage in (* ... "data-center" "UUID" *)
let fields = List.rev fields in (* "UUID" "data-center" ... *)
let fields = List.dropwhile ((=) "") fields in
match fields with
| uuid :: rest when String.length uuid = 36 ->
let mp = String.concat "/" (List.rev rest) in
mp, uuid
| _ ->
error (f_"vdsm: invalid -os parameter \
does not contain a valid UUID: %s")
output_storage in
debug "VDSM: DD mountpoint: %s\nVDSM: DD UUID: %s" dd_mp dd_uuid;
Note that VDSM has to create all these directories .
let images_dir = dd_mp // dd_uuid // "images" in
List.iter (
fun image_uuid ->
let d = images_dir // image_uuid in
if not (is_directory d) then
error (f_"image directory (%s) does not exist or is not a directory")
d
) image_uuids;
Note that VDSM has to create this directory too .
if not (is_directory ovf_output) then
error (f_"OVF (metadata) directory (%s) does not exist or \
is not a directory")
ovf_output;
debug "VDSM: OVF (metadata) directory: %s" ovf_output;
The final directory structure should look like this :
* /<MP>/<ESD_UUID>/images/
* < IMAGE_UUID_1>/<VOL_UUID_1 > # first disk
* < IMAGE_UUID_1>/<VOL_UUID_1>.meta # first disk
* < IMAGE_UUID_2>/<VOL_UUID_2 > # second disk
* < IMAGE_UUID_2>/<VOL_UUID_2>.meta # second disk
* < IMAGE_UUID_3>/<VOL_UUID_3 > # etc
* < IMAGE_UUID_3>/<VOL_UUID_3>.meta #
* /<MP>/<ESD_UUID>/images/
* <IMAGE_UUID_1>/<VOL_UUID_1> # first disk
* <IMAGE_UUID_1>/<VOL_UUID_1>.meta # first disk
* <IMAGE_UUID_2>/<VOL_UUID_2> # second disk
* <IMAGE_UUID_2>/<VOL_UUID_2>.meta # second disk
* <IMAGE_UUID_3>/<VOL_UUID_3> # etc
* <IMAGE_UUID_3>/<VOL_UUID_3>.meta #
*)
(* Create the target filenames. *)
let filenames =
List.map (
fun (image_uuid, vol_uuid) ->
let filename = images_dir // image_uuid // vol_uuid in
debug "VDSM: disk: %s" filename;
filename
) (List.combine image_uuids vol_uuids) in
Generate the .meta files associated with each volume .
let sizes = List.map snd disks in
let metas =
Create_ovf.create_meta_files output_alloc output_format
dd_uuid image_uuids sizes in
List.iter (
fun (filename, meta) ->
let meta_filename = filename ^ ".meta" in
with_open_out meta_filename (fun chan -> output_string chan meta)
) (List.combine filenames metas);
Set up the NBD servers .
List.iter (
fun ((i, size), filename) ->
let socket = sprintf "%s/out%d" dir i in
On_exit.unlink socket;
(* Create the actual output disk. *)
output_to_local_file output_alloc output_format filename size socket
) (List.combine disks filenames);
(* Save parameters since we need them during finalization. *)
let t = dd_mp, dd_uuid, sizes in
t
let finalize dir options t source inspect target_meta =
let output_alloc, output_format,
output_name, output_storage,
image_uuids, vol_uuids, vm_uuid, ovf_output,
compat, ovf_flavour = options in
let dd_mp, dd_uuid, sizes = t in
(* Create the metadata. *)
let ovf = Create_ovf.create_ovf source inspect target_meta sizes
output_alloc output_format output_name dd_uuid
image_uuids
vol_uuids
dir
vm_uuid
ovf_flavour in
(* Write it to the metadata file. *)
let file = ovf_output // vm_uuid ^ ".ovf" in
with_open_out file (fun chan -> DOM.doc_to_chan chan ovf)
let request_size = None
end
| null | https://raw.githubusercontent.com/libguestfs/virt-v2v/8ad152afc4dced17e26b40d3fe8f585da99c8816/output/output_vdsm.ml | ocaml | default "."
-os must be set, but at this point we cannot check it.
... "data-center" "UUID"
"UUID" "data-center" ...
Create the target filenames.
Create the actual output disk.
Save parameters since we need them during finalization.
Create the metadata.
Write it to the metadata file. | virt - v2v
* Copyright ( C ) 2009 - 2021 Red Hat Inc.
*
* This program is free software ; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation ; either version 2 of the License , or
* ( at your option ) any later version .
*
* This program is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the
* GNU General Public License for more details .
*
* You should have received a copy of the GNU General Public License along
* with this program ; if not , write to the Free Software Foundation , Inc. ,
* 51 Franklin Street , Fifth Floor , Boston , USA .
* Copyright (C) 2009-2021 Red Hat Inc.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*)
open Printf
open Unix
open Std_utils
open Tools_utils
open Common_gettext.Gettext
open Types
open Utils
open Output
module VDSM = struct
type poptions = Types.output_allocation * string * string * string *
string list * string list * string * string *
string * Create_ovf.ovf_flavour
type t = string * string * int64 list
let to_string options = "-o vdsm"
let query_output_options () =
let ovf_flavours_str = String.concat "|" Create_ovf.ovf_flavours in
printf (f_"Output options (-oo) which can be used with -o vdsm:
-oo vdsm-compat=0.10|1.1 Write qcow2 with compat=0.10|1.1
(default: 0.10)
-oo vdsm-vm-uuid=UUID VM UUID (required)
-oo vdsm-ovf-output=DIR OVF metadata directory (required)
-oo vdsm-ovf-flavour=%s
Set the type of generated OVF (default: rhvexp)
For each disk you must supply one of each of these options:
-oo vdsm-image-uuid=UUID Image directory UUID
-oo vdsm-vol-uuid=UUID Disk volume UUID
") ovf_flavours_str
let parse_options options source =
if options.output_password <> None then
error_option_cannot_be_used_in_output_mode "vdsm" "-op";
let vm_uuid = ref None in
let compat = ref "0.10" in
let ovf_flavour = ref Create_ovf.RHVExportStorageDomain in
let image_uuids = ref [] in
let vol_uuids = ref [] in
List.iter (
function
| "vdsm-compat", "0.10" -> compat := "0.10"
| "vdsm-compat", "1.1" -> compat := "1.1"
| "vdsm-compat", v ->
error (f_"-o vdsm: unknown vdsm-compat level ‘%s’") v
| "vdsm-vm-uuid", v ->
if !vm_uuid <> None then
error (f_"-o vdsm: -oo vdsm-vm-uuid set more than once");
vm_uuid := Some v;
| "vdsm-ovf-output", v ->
if !ovf_output <> None then
error (f_"-o vdsm: -oo vdsm-ovf-output set more than once");
ovf_output := Some v;
| "vdsm-ovf-flavour", v ->
ovf_flavour := Create_ovf.ovf_flavour_of_string v
| "vdsm-image-uuid", v ->
List.push_front v image_uuids
| "vdsm-vol-uuid", v ->
List.push_front v vol_uuids
| k, _ ->
error (f_"-o vdsm: unknown output option ‘-oo %s’") k
) options.output_options;
let compat = !compat in
let image_uuids = List.rev !image_uuids in
let vol_uuids = List.rev !vol_uuids in
if image_uuids = [] || vol_uuids = [] then
error (f_"-o vdsm: either -oo vdsm-vol-uuid or \
-oo vdsm-vm-uuid was not specified");
let vm_uuid =
match !vm_uuid with
| None ->
error (f_"-o vdsm: -oo vdsm-image-uuid was not specified")
| Some uuid -> uuid in
let ovf_output = Option.value ~default:"." !ovf_output in
let ovf_flavour = !ovf_flavour in
let output_storage =
match options.output_storage with
| None -> error (f_"-o vdsm: -os option was not specified")
| Some d when not (is_directory d) ->
error (f_"-os %s: output directory does not exist \
or is not a directory") d
| Some d -> d in
let output_name = Option.value ~default:source.s_name options.output_name in
(options.output_alloc, options.output_format,
output_name, output_storage,
image_uuids, vol_uuids, vm_uuid, ovf_output,
compat, ovf_flavour)
let setup dir options source =
error_if_disk_count_gt dir 23;
let disks = get_disks dir in
let output_alloc, output_format,
output_name, output_storage,
image_uuids, vol_uuids, vm_uuid, ovf_output,
compat, ovf_flavour = options in
if List.length image_uuids <> List.length disks ||
List.length vol_uuids <> List.length disks then
error (f_"the number of ‘-oo vdsm-image-uuid’ and ‘-oo vdsm-vol-uuid’ \
parameters passed on the command line has to match the \
number of guest disk images (for this guest: %d)")
(List.length disks);
let dd_mp, dd_uuid =
let fields =
let fields = List.dropwhile ((=) "") fields in
match fields with
| uuid :: rest when String.length uuid = 36 ->
let mp = String.concat "/" (List.rev rest) in
mp, uuid
| _ ->
error (f_"vdsm: invalid -os parameter \
does not contain a valid UUID: %s")
output_storage in
debug "VDSM: DD mountpoint: %s\nVDSM: DD UUID: %s" dd_mp dd_uuid;
Note that VDSM has to create all these directories .
let images_dir = dd_mp // dd_uuid // "images" in
List.iter (
fun image_uuid ->
let d = images_dir // image_uuid in
if not (is_directory d) then
error (f_"image directory (%s) does not exist or is not a directory")
d
) image_uuids;
Note that VDSM has to create this directory too .
if not (is_directory ovf_output) then
error (f_"OVF (metadata) directory (%s) does not exist or \
is not a directory")
ovf_output;
debug "VDSM: OVF (metadata) directory: %s" ovf_output;
The final directory structure should look like this :
* /<MP>/<ESD_UUID>/images/
* < IMAGE_UUID_1>/<VOL_UUID_1 > # first disk
* < IMAGE_UUID_1>/<VOL_UUID_1>.meta # first disk
* < IMAGE_UUID_2>/<VOL_UUID_2 > # second disk
* < IMAGE_UUID_2>/<VOL_UUID_2>.meta # second disk
* < IMAGE_UUID_3>/<VOL_UUID_3 > # etc
* < IMAGE_UUID_3>/<VOL_UUID_3>.meta #
* /<MP>/<ESD_UUID>/images/
* <IMAGE_UUID_1>/<VOL_UUID_1> # first disk
* <IMAGE_UUID_1>/<VOL_UUID_1>.meta # first disk
* <IMAGE_UUID_2>/<VOL_UUID_2> # second disk
* <IMAGE_UUID_2>/<VOL_UUID_2>.meta # second disk
* <IMAGE_UUID_3>/<VOL_UUID_3> # etc
* <IMAGE_UUID_3>/<VOL_UUID_3>.meta #
*)
let filenames =
List.map (
fun (image_uuid, vol_uuid) ->
let filename = images_dir // image_uuid // vol_uuid in
debug "VDSM: disk: %s" filename;
filename
) (List.combine image_uuids vol_uuids) in
Generate the .meta files associated with each volume .
let sizes = List.map snd disks in
let metas =
Create_ovf.create_meta_files output_alloc output_format
dd_uuid image_uuids sizes in
List.iter (
fun (filename, meta) ->
let meta_filename = filename ^ ".meta" in
with_open_out meta_filename (fun chan -> output_string chan meta)
) (List.combine filenames metas);
Set up the NBD servers .
List.iter (
fun ((i, size), filename) ->
let socket = sprintf "%s/out%d" dir i in
On_exit.unlink socket;
output_to_local_file output_alloc output_format filename size socket
) (List.combine disks filenames);
let t = dd_mp, dd_uuid, sizes in
t
let finalize dir options t source inspect target_meta =
let output_alloc, output_format,
output_name, output_storage,
image_uuids, vol_uuids, vm_uuid, ovf_output,
compat, ovf_flavour = options in
let dd_mp, dd_uuid, sizes = t in
let ovf = Create_ovf.create_ovf source inspect target_meta sizes
output_alloc output_format output_name dd_uuid
image_uuids
vol_uuids
dir
vm_uuid
ovf_flavour in
let file = ovf_output // vm_uuid ^ ".ovf" in
with_open_out file (fun chan -> DOM.doc_to_chan chan ovf)
let request_size = None
end
|
c382914de7195f006fe8bc71facdef4b75494aee9c2d783a838ee27c73be72e1 | brownplt/LambdaS5 | ljs_pretty_value.ml | open Prelude
open Ljs_values
open Ljs_pretty
open Format
open FormatExt
let pretty_var_loc loc = text ("#" ^ Store.print_loc loc)
let pretty_obj_loc loc = text ("@" ^ Store.print_loc loc)
let pretty_env env =
let pretty_bind (var, loc) = horz [text var; text "="; pretty_var_loc loc] in
braces (vert (map pretty_bind (IdMap.bindings env)))
let pretty_value value = match value with
| ObjLoc loc -> pretty_obj_loc loc
| Closure (env, args, body) ->
vert [text "let";
pretty_env env;
horz [text "in func";
parens (squish (intersperse (text ",") (map text args)))];
braces (exp body)]
| primitive -> text (Ljs_values.pretty_value primitive)
let rec pretty_value_store v store = match v with
| ObjLoc loc -> pretty_obj store (get_obj store loc)
| _ -> pretty_value v
and pretty_obj store (avs, props) =
let proplist = IdMap.fold (fun k v l -> (k, v)::l) props [] in
match proplist with
| [] -> braces (pretty_attrsv avs store)
| _ ->
braces (vert [pretty_attrsv avs store;
vert (vert_intersperse (text ",")
(map (fun p -> pretty_prop p store) proplist))])
and pretty_attrsv ({ proto = p; code = c; extensible = b; klass = k; primval = pv } : attrsv) store =
let proto = [horz [text "#proto:"; pretty_value p]] in
let primval = match pv with None -> []
| Some v -> [horz [text "#prim:"; pretty_value v]] in
let code = match c with None -> []
| Some v -> [horz [text "#code:"; pretty_value v]] in
brackets (horzOrVert (map (fun x -> squish [x; (text ",")])
(primval@
proto@
code@
[horz [text "#class:"; text ("\"" ^ k ^ "\"")];
horz [text "#extensible:"; text (string_of_bool b)]])))
and pretty_prop (f, prop) store = match prop with
| Data ({value=v; writable=w}, enum, config) ->
horz [text ("'" ^ f ^ "'"); text ":";
braces (horzOrVert [horz [text "#value";
pretty_value v;
text ","];
horz [text "#writable"; text (string_of_bool w); text ","];
horz [text "#configurable"; text (string_of_bool config)]])]
| Accessor ({getter=g; setter=s}, enum, config) ->
horz [text ("'" ^ f ^ "'"); text ":"; braces (vert [horz [text "#getter";
pretty_value g; text ","];
horz[text "#setter";
pretty_value s]])]
let string_of_value v store =
FormatExt.to_string (fun v -> pretty_value_store v store) v
let string_of_obj obj store =
FormatExt.to_string (fun obj -> pretty_obj store obj) obj
let string_of_env env =
FormatExt.to_string pretty_env env
(* Stores can be very large. This function avoids mapping over them,
which tends to overflow the stack. *)
let print_store store = match store with
| (obj_store, value_store) ->
let pretty_bind printer pretty_loc (loc, value) =
horzOrVert [horz [pretty_loc loc; text "="]; printer value] in
let print_binding pretty_loc printer binding =
print_endline
(FormatExt.to_string (pretty_bind printer pretty_loc) binding) in
let print_bindings pretty_loc printer store =
List.iter (print_binding pretty_loc printer) (Store.bindings store) in
print_bindings pretty_obj_loc (pretty_obj store) obj_store;
print_bindings pretty_var_loc pretty_value value_store
let print_values store =
let pretty_binding (loc, value) =
horzOrVert [horz [pretty_var_loc loc; text "="]; pretty_value value] in
let print_binding binding =
print_endline (FormatExt.to_string pretty_binding binding) in
List.iter print_binding (Store.bindings (snd store))
let print_objects store =
let pretty_binding (loc, value) =
horzOrVert [horz [pretty_obj_loc loc; text "="]; pretty_obj store value] in
let print_binding binding =
print_endline (FormatExt.to_string pretty_binding binding) in
List.iter print_binding (Store.bindings (fst store))
| null | https://raw.githubusercontent.com/brownplt/LambdaS5/f0bf5c7baf1daa4ead4e398ba7d430bedb7de9cf/src/ljs/ljs_pretty_value.ml | ocaml | Stores can be very large. This function avoids mapping over them,
which tends to overflow the stack. | open Prelude
open Ljs_values
open Ljs_pretty
open Format
open FormatExt
let pretty_var_loc loc = text ("#" ^ Store.print_loc loc)
let pretty_obj_loc loc = text ("@" ^ Store.print_loc loc)
let pretty_env env =
let pretty_bind (var, loc) = horz [text var; text "="; pretty_var_loc loc] in
braces (vert (map pretty_bind (IdMap.bindings env)))
let pretty_value value = match value with
| ObjLoc loc -> pretty_obj_loc loc
| Closure (env, args, body) ->
vert [text "let";
pretty_env env;
horz [text "in func";
parens (squish (intersperse (text ",") (map text args)))];
braces (exp body)]
| primitive -> text (Ljs_values.pretty_value primitive)
let rec pretty_value_store v store = match v with
| ObjLoc loc -> pretty_obj store (get_obj store loc)
| _ -> pretty_value v
and pretty_obj store (avs, props) =
let proplist = IdMap.fold (fun k v l -> (k, v)::l) props [] in
match proplist with
| [] -> braces (pretty_attrsv avs store)
| _ ->
braces (vert [pretty_attrsv avs store;
vert (vert_intersperse (text ",")
(map (fun p -> pretty_prop p store) proplist))])
and pretty_attrsv ({ proto = p; code = c; extensible = b; klass = k; primval = pv } : attrsv) store =
let proto = [horz [text "#proto:"; pretty_value p]] in
let primval = match pv with None -> []
| Some v -> [horz [text "#prim:"; pretty_value v]] in
let code = match c with None -> []
| Some v -> [horz [text "#code:"; pretty_value v]] in
brackets (horzOrVert (map (fun x -> squish [x; (text ",")])
(primval@
proto@
code@
[horz [text "#class:"; text ("\"" ^ k ^ "\"")];
horz [text "#extensible:"; text (string_of_bool b)]])))
and pretty_prop (f, prop) store = match prop with
| Data ({value=v; writable=w}, enum, config) ->
horz [text ("'" ^ f ^ "'"); text ":";
braces (horzOrVert [horz [text "#value";
pretty_value v;
text ","];
horz [text "#writable"; text (string_of_bool w); text ","];
horz [text "#configurable"; text (string_of_bool config)]])]
| Accessor ({getter=g; setter=s}, enum, config) ->
horz [text ("'" ^ f ^ "'"); text ":"; braces (vert [horz [text "#getter";
pretty_value g; text ","];
horz[text "#setter";
pretty_value s]])]
let string_of_value v store =
FormatExt.to_string (fun v -> pretty_value_store v store) v
let string_of_obj obj store =
FormatExt.to_string (fun obj -> pretty_obj store obj) obj
let string_of_env env =
FormatExt.to_string pretty_env env
let print_store store = match store with
| (obj_store, value_store) ->
let pretty_bind printer pretty_loc (loc, value) =
horzOrVert [horz [pretty_loc loc; text "="]; printer value] in
let print_binding pretty_loc printer binding =
print_endline
(FormatExt.to_string (pretty_bind printer pretty_loc) binding) in
let print_bindings pretty_loc printer store =
List.iter (print_binding pretty_loc printer) (Store.bindings store) in
print_bindings pretty_obj_loc (pretty_obj store) obj_store;
print_bindings pretty_var_loc pretty_value value_store
let print_values store =
let pretty_binding (loc, value) =
horzOrVert [horz [pretty_var_loc loc; text "="]; pretty_value value] in
let print_binding binding =
print_endline (FormatExt.to_string pretty_binding binding) in
List.iter print_binding (Store.bindings (snd store))
let print_objects store =
let pretty_binding (loc, value) =
horzOrVert [horz [pretty_obj_loc loc; text "="]; pretty_obj store value] in
let print_binding binding =
print_endline (FormatExt.to_string pretty_binding binding) in
List.iter print_binding (Store.bindings (fst store))
|
0af076ebc0fda32b1b1365232f477529d7755dd93527b19c3efbe378904dd666 | seven1240/idp_proxy | idp_proxy.erl | @author author < >
YYYY author .
%% @doc TEMPLATE.
-module(idp_proxy).
-author('author <>').
-export([start/0, stop/0]).
ensure_started(App) ->
case application:start(App) of
ok ->
ok;
{error, {already_started, App}} ->
ok
end.
@spec start ( ) - > ok
%% @doc Start the idp_proxy server.
start() ->
idp_proxy_deps:ensure(),
ensure_started(crypto),
application:start(idp_proxy).
stop ( ) - > ok
%% @doc Stop the idp_proxy server.
stop() ->
Res = application:stop(idp_proxy),
application:stop(crypto),
Res.
| null | https://raw.githubusercontent.com/seven1240/idp_proxy/bdae5f9b4f6844cb49df108a4cab7658df8e46e8/src/idp_proxy.erl | erlang | @doc TEMPLATE.
@doc Start the idp_proxy server.
@doc Stop the idp_proxy server. | @author author < >
YYYY author .
-module(idp_proxy).
-author('author <>').
-export([start/0, stop/0]).
ensure_started(App) ->
case application:start(App) of
ok ->
ok;
{error, {already_started, App}} ->
ok
end.
@spec start ( ) - > ok
start() ->
idp_proxy_deps:ensure(),
ensure_started(crypto),
application:start(idp_proxy).
stop ( ) - > ok
stop() ->
Res = application:stop(idp_proxy),
application:stop(crypto),
Res.
|
abf08bab96e62157e2e458f58e408b8456902fac9506db5acbf22bef9ffda303 | dym/movitz | named-integers.lisp | ;;;;------------------------------------------------------------------
;;;;
Copyright ( C ) 2001 - 2004 ,
Department of Computer Science , University of Tromso , Norway .
;;;;
;;;; For distribution policy, see the accompanying file COPYING.
;;;;
;;;; Filename: named-integers.lisp
;;;; Description:
Author : < >
Created at : Fri Jan 4 16:13:46 2002
;;;;
$ I d : named - integers.lisp , v 1.6 2004/12/10 12:47:22 ffjeld Exp $
;;;;
;;;;------------------------------------------------------------------
(require :lib/package)
(require :lib/malloc-init)
(provide :lib/named-integers)
(in-package muerte.lib)
(eval-when (:compile-toplevel :load-toplevel)
(defun name->integer (map name)
(if (integerp name)
name
(or (ecase (car map)
(:enum (position name (cdr map)))
(:assoc (cdr (assoc name (cdr map))))
(:rassoc (car (rassoc name (cdr map)))))
(error "No integer named ~S in ~S." name map))))
(defun names->integer (map &rest names)
(declare (dynamic-extent names))
(loop for name in names
sum (name->integer map name))))
(defmacro with-named-integers-syntax (name-maps &body body)
`(macrolet
,(mapcar (lambda (name-map)
(destructuring-bind (name map)
name-map
`(,name (&rest names)
(apply 'muerte.lib:names->integer ,map names))))
name-maps)
,@body))
(define-compile-time-variable *name-to-integer-tables*
(make-hash-table :test 'eq))
(define-compile-time-variable *integer-to-name-tables*
(make-hash-table :test 'eql))
(defmacro define-named-integer (type-name (&key only-constants (prefix-constants t) export-constants)
&rest integer-names)
(loop
with name-to-int-variable =
(intern (format nil "*~A-~A*" type-name 'name-to-integer))
with int-to-name-variable =
(intern (format nil "*~A-~A*" type-name 'integer-to-name))
for (integer name) in integer-names
as constant-name = (intern (if prefix-constants
(format nil "+~A-~A+"
(symbol-name type-name)
(symbol-name name))
(format nil "+~A+" (symbol-name name))))
collect
`(defconstant ,constant-name ,integer) into constant-declarations
when export-constants
collect constant-name into constant-exports
unless only-constants
collect integer into integer-list
and collect name into name-list
finally
(return
`(progn
,@(unless only-constants
`((define-compile-time-variable ,name-to-int-variable (make-hash-table :test 'eq))
(define-compile-time-variable ,int-to-name-variable (make-hash-table :test 'eql))
(eval-when (:compile-toplevel)
(setf (gethash ',type-name *name-to-integer-tables*) ,name-to-int-variable
(gethash ',type-name *integer-to-name-tables*) ,int-to-name-variable)
(mapcar (lambda (i n)
(setf (gethash i ,int-to-name-variable) n)
(setf (gethash n ,name-to-int-variable) i))
',integer-list
',name-list))))
(eval-when (:compile-toplevel)
, @constant - declarations
(export ',constant-exports))
,@constant-declarations
',type-name))))
(defmacro named-integer-case (type keyform &rest cases)
(let ((table (gethash type *name-to-integer-tables*)))
(assert table (type) "No such named-integer type: ~S." type)
(flet ((map-name (name)
(or (and (integerp name) name)
(gethash name table)
name)))
(list* 'case keyform
(loop for (keys . forms) in cases
if (atom keys)
collect (cons (map-name keys) forms)
else collect (cons (mapcar #'map-name keys) forms))))))
(defun integer-name (type integer &optional (errorp t) (default integer))
(let ((table (gethash type *integer-to-name-tables*)))
(assert table (type)
"No such named-integer type: ~S." type)
(or (gethash integer table)
(if errorp
(error "Integer ~S has no name in type ~S." integer type)
default))))
(defun named-integer (type name &optional (errorp t) (default name))
(let ((table (gethash type *name-to-integer-tables*)))
(assert table (type)
"No such named-integer type: ~S." type)
(or (gethash name table)
(if errorp
(error "~S is not defined in named-integer type ~S." name type)
default))))
| null | https://raw.githubusercontent.com/dym/movitz/56176e1ebe3eabc15c768df92eca7df3c197cb3d/losp/lib/named-integers.lisp | lisp | ------------------------------------------------------------------
For distribution policy, see the accompanying file COPYING.
Filename: named-integers.lisp
Description:
------------------------------------------------------------------ | Copyright ( C ) 2001 - 2004 ,
Department of Computer Science , University of Tromso , Norway .
Author : < >
Created at : Fri Jan 4 16:13:46 2002
$ I d : named - integers.lisp , v 1.6 2004/12/10 12:47:22 ffjeld Exp $
(require :lib/package)
(require :lib/malloc-init)
(provide :lib/named-integers)
(in-package muerte.lib)
(eval-when (:compile-toplevel :load-toplevel)
(defun name->integer (map name)
(if (integerp name)
name
(or (ecase (car map)
(:enum (position name (cdr map)))
(:assoc (cdr (assoc name (cdr map))))
(:rassoc (car (rassoc name (cdr map)))))
(error "No integer named ~S in ~S." name map))))
(defun names->integer (map &rest names)
(declare (dynamic-extent names))
(loop for name in names
sum (name->integer map name))))
(defmacro with-named-integers-syntax (name-maps &body body)
`(macrolet
,(mapcar (lambda (name-map)
(destructuring-bind (name map)
name-map
`(,name (&rest names)
(apply 'muerte.lib:names->integer ,map names))))
name-maps)
,@body))
(define-compile-time-variable *name-to-integer-tables*
(make-hash-table :test 'eq))
(define-compile-time-variable *integer-to-name-tables*
(make-hash-table :test 'eql))
(defmacro define-named-integer (type-name (&key only-constants (prefix-constants t) export-constants)
&rest integer-names)
(loop
with name-to-int-variable =
(intern (format nil "*~A-~A*" type-name 'name-to-integer))
with int-to-name-variable =
(intern (format nil "*~A-~A*" type-name 'integer-to-name))
for (integer name) in integer-names
as constant-name = (intern (if prefix-constants
(format nil "+~A-~A+"
(symbol-name type-name)
(symbol-name name))
(format nil "+~A+" (symbol-name name))))
collect
`(defconstant ,constant-name ,integer) into constant-declarations
when export-constants
collect constant-name into constant-exports
unless only-constants
collect integer into integer-list
and collect name into name-list
finally
(return
`(progn
,@(unless only-constants
`((define-compile-time-variable ,name-to-int-variable (make-hash-table :test 'eq))
(define-compile-time-variable ,int-to-name-variable (make-hash-table :test 'eql))
(eval-when (:compile-toplevel)
(setf (gethash ',type-name *name-to-integer-tables*) ,name-to-int-variable
(gethash ',type-name *integer-to-name-tables*) ,int-to-name-variable)
(mapcar (lambda (i n)
(setf (gethash i ,int-to-name-variable) n)
(setf (gethash n ,name-to-int-variable) i))
',integer-list
',name-list))))
(eval-when (:compile-toplevel)
, @constant - declarations
(export ',constant-exports))
,@constant-declarations
',type-name))))
(defmacro named-integer-case (type keyform &rest cases)
(let ((table (gethash type *name-to-integer-tables*)))
(assert table (type) "No such named-integer type: ~S." type)
(flet ((map-name (name)
(or (and (integerp name) name)
(gethash name table)
name)))
(list* 'case keyform
(loop for (keys . forms) in cases
if (atom keys)
collect (cons (map-name keys) forms)
else collect (cons (mapcar #'map-name keys) forms))))))
(defun integer-name (type integer &optional (errorp t) (default integer))
(let ((table (gethash type *integer-to-name-tables*)))
(assert table (type)
"No such named-integer type: ~S." type)
(or (gethash integer table)
(if errorp
(error "Integer ~S has no name in type ~S." integer type)
default))))
(defun named-integer (type name &optional (errorp t) (default name))
(let ((table (gethash type *name-to-integer-tables*)))
(assert table (type)
"No such named-integer type: ~S." type)
(or (gethash name table)
(if errorp
(error "~S is not defined in named-integer type ~S." name type)
default))))
|
6f63cfd84db1ee9fc14b2e8c5e2db0100edbb58b33d381cf0010ba527ad44079 | qfpl/reflex-workshop | Lists.hs | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
{-# LANGUAGE OverloadedStrings #-}
{-# LANGUAGE GADTs #-}
module Workshop.Collections.Lists (
listsSection
) where
import qualified Data.Map as Map
import Reflex.Dom.Core
import Types.Section
import Types.RouteFragment
import Workshop.Collections.Lists.Displaying
import Workshop.Collections.Lists.Adding
import Workshop.Collections.Lists.Removing
import Workshop.Collections.Lists.Model
listsSection :: MonadWidget t m => Section m
listsSection =
Section
"Lists"
(Page "lists")
"pages/collections/lists.html"
mempty
mempty
(Map.fromList [ ("displaying", exDisplaying)
, ("adding", exAdding)
, ("removing", exRemoving)
, ("model", exModel)
])
| null | https://raw.githubusercontent.com/qfpl/reflex-workshop/244ef13fb4b2e884f455eccc50072e98d1668c9e/src/Workshop/Collections/Lists.hs | haskell | # LANGUAGE OverloadedStrings #
# LANGUAGE GADTs # | |
Copyright : ( c ) 2018 , Commonwealth Scientific and Industrial Research Organisation
License : :
Stability : experimental
Portability : non - portable
Copyright : (c) 2018, Commonwealth Scientific and Industrial Research Organisation
License : BSD3
Maintainer :
Stability : experimental
Portability : non-portable
-}
module Workshop.Collections.Lists (
listsSection
) where
import qualified Data.Map as Map
import Reflex.Dom.Core
import Types.Section
import Types.RouteFragment
import Workshop.Collections.Lists.Displaying
import Workshop.Collections.Lists.Adding
import Workshop.Collections.Lists.Removing
import Workshop.Collections.Lists.Model
listsSection :: MonadWidget t m => Section m
listsSection =
Section
"Lists"
(Page "lists")
"pages/collections/lists.html"
mempty
mempty
(Map.fromList [ ("displaying", exDisplaying)
, ("adding", exAdding)
, ("removing", exRemoving)
, ("model", exModel)
])
|
0b6cb99e9645570b66d140a0f5dd706d8d963bcf641705e9b69182b531464699 | EMSL-NMR-EPR/Haskell-MFAPipe-Executable | Class.hs | -----------------------------------------------------------------------------
-- |
Module : Science . Chemistry . EMU.HasSize . Class
Copyright : 2016 - 17 Pacific Northwest National Laboratory
-- License : ECL-2.0 (see the LICENSE file in the distribution)
--
-- Maintainer :
-- Stability : experimental
-- Portability : portable
--
-- This module exports classes for calculating the size of Elementary
Metabolite Units ( EMU ) types .
-----------------------------------------------------------------------------
module Science.Chemistry.EMU.HasSize.Class
( HasSize(..)
, partitionBySizeWith , partitionBySize
) where
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict
-- | The 'HasSize' class is used for types that have a size.
class HasSize a where
# MINIMAL size #
| @size is the size of @x@.
size :: a -> Int
partitionBySizeWith :: (Foldable f, HasSize b) => (a -> b) -> f a -> IntMap [a]
partitionBySizeWith f = foldr (\x -> Data.IntMap.Strict.alter (Just . (:) x . maybe [] id) (size (f x))) Data.IntMap.Strict.empty
partitionBySize :: (Foldable f, HasSize a) => f a -> IntMap [a]
partitionBySize = partitionBySizeWith id
| null | https://raw.githubusercontent.com/EMSL-NMR-EPR/Haskell-MFAPipe-Executable/8a7fd13202d3b6b7380af52d86e851e995a9b53e/MFAPipe/src/Science/Chemistry/EMU/HasSize/Class.hs | haskell | ---------------------------------------------------------------------------
|
License : ECL-2.0 (see the LICENSE file in the distribution)
Maintainer :
Stability : experimental
Portability : portable
This module exports classes for calculating the size of Elementary
---------------------------------------------------------------------------
| The 'HasSize' class is used for types that have a size. | Module : Science . Chemistry . EMU.HasSize . Class
Copyright : 2016 - 17 Pacific Northwest National Laboratory
Metabolite Units ( EMU ) types .
module Science.Chemistry.EMU.HasSize.Class
( HasSize(..)
, partitionBySizeWith , partitionBySize
) where
import Data.IntMap.Strict (IntMap)
import qualified Data.IntMap.Strict
class HasSize a where
# MINIMAL size #
| @size is the size of @x@.
size :: a -> Int
partitionBySizeWith :: (Foldable f, HasSize b) => (a -> b) -> f a -> IntMap [a]
partitionBySizeWith f = foldr (\x -> Data.IntMap.Strict.alter (Just . (:) x . maybe [] id) (size (f x))) Data.IntMap.Strict.empty
partitionBySize :: (Foldable f, HasSize a) => f a -> IntMap [a]
partitionBySize = partitionBySizeWith id
|
11ce2f8c30bde6ab326a525696bdf5fc016327230346be4213a9470a1aef7118 | tfausak/advent-of-code | 1.hs | -- stack --resolver lts-12.0 script
import Control.Arrow ((&&&))
import qualified Data.Map as Map
main
= print
. uncurry (*)
. (count 2 &&& count 3)
. map (Map.elems . foldr (\ x -> Map.insertWith (+) x 1) Map.empty)
. lines
=<< getContents
count n = length . filter (elem n)
| null | https://raw.githubusercontent.com/tfausak/advent-of-code/26f0d9726b019ff7b97fa7e0f2f995269b399578/2018/2/1.hs | haskell | stack --resolver lts-12.0 script | import Control.Arrow ((&&&))
import qualified Data.Map as Map
main
= print
. uncurry (*)
. (count 2 &&& count 3)
. map (Map.elems . foldr (\ x -> Map.insertWith (+) x 1) Map.empty)
. lines
=<< getContents
count n = length . filter (elem n)
|
7df1c58bd4c788e1bfe5e19f5c1912a8fd6d81965643f9fed0c45a3195254f6a | pesterhazy/cljs-spa-example | test_runner.cljs | (ns cljs-spa.test-runner
(:require [clojure.test :refer-macros [deftest testing is run-tests]]
[goog.object :as gobj]
[figwheel.main.testing :refer-macros [run-tests-async]]
[cljs-spa.core-test] ;; for side-effects
[cljs-test-display.core :as td]))
(defn extra-main []
(js/console.warn "extra-main")
(run-tests (td/init! "app-tests") 'cljs-spa.core-test))
(defn -main [& args] (js/console.warn "-main") (run-tests-async 3000))
Only run this at NS init time when the user
;; is visiting the extra main page
(when (= "/figwheel-extra-main/tests"
(gobj/getValueByKeys goog/global "location" "pathname"))
(extra-main))
| null | https://raw.githubusercontent.com/pesterhazy/cljs-spa-example/ef6e6042d0d0759f8bdfc6be4921b72170fd2352/tests/cljs_spa/test_runner.cljs | clojure | for side-effects
is visiting the extra main page | (ns cljs-spa.test-runner
(:require [clojure.test :refer-macros [deftest testing is run-tests]]
[goog.object :as gobj]
[figwheel.main.testing :refer-macros [run-tests-async]]
[cljs-test-display.core :as td]))
(defn extra-main []
(js/console.warn "extra-main")
(run-tests (td/init! "app-tests") 'cljs-spa.core-test))
(defn -main [& args] (js/console.warn "-main") (run-tests-async 3000))
Only run this at NS init time when the user
(when (= "/figwheel-extra-main/tests"
(gobj/getValueByKeys goog/global "location" "pathname"))
(extra-main))
|
e46fcdedf0433f12f9c3e85075c2dbf2b014afc903fa2f459be09fc2a7122d1c | camfort/reprinter | Reprinter.hs | {-# LANGUAGE RankNTypes #-}
{-# LANGUAGE DeriveDataTypeable #-}
module Text.Reprinter
( module Data.Functor.Identity
, module Data.Generics
, module Data.Generics.Zipper
, Span
, Position
, initPosition
, initCol
, initLine
, mkCol
, mkLine
, advanceCol
, advanceLine
, RefactorType(..)
, Refactorable(..)
, Reprinting
, catchAll
, genReprinting
, reprint
, reprintSort
) where
-- Import solely for re-exporting for library clients
import Data.Functor.Identity
import Data.Generics
import Text.Reprinter.StringLike
import Control.Monad (forM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State.Lazy
import Data.Data
import Data.Generics.Zipper
import Data.List (sortOn)
import Data.Monoid ((<>), mempty)
-- | A line within the source text
newtype Line = Line Int deriving (Data, Eq, Ord, Show)
| Lines start at 1
initLine :: Line
initLine = Line 1
| Smart constructor for a Line , checks that line > = 1
mkLine :: Int -> Either String Line
mkLine l
| l < 1 = Left $ "mkLine: called with: " <> show l <> ". Minimum is 1."
| otherwise = Right (Line l)
-- | A column within the source text
newtype Col = Col Int deriving (Data, Eq, Ord, Show)
| Columns start at 1
initCol :: Col
initCol = Col 1
| Smart constructor for a Col , checks that column > = 1
mkCol :: Int -> Either String Col
mkCol l
| l < 1 = Left $ "mkCol: called with: " <> show l <> ". Minimum is 1."
| otherwise = Right (Col l)
-- | A position in a text (imagine a cursor)
type Position = (Line,Col)
-- | The initial position
initPosition :: Position
initPosition = (initLine,initCol)
-- | Given a position, go down a line, going back to the initial column
advanceLine :: Position -> Position
advanceLine (Line x, _) = (Line (x+1), initCol)
| Given a position , advance by one column
advanceCol :: Position -> Position
advanceCol (ln, Col x) = (ln, Col (x+1))
| Two positions give the lower and upper bounds of a source span
type Span = (Position, Position)
-- | Type of a reprinting function
--
@i@ is the input type ( something with a ' [ Char]'-like interface )
type Reprinting i m = forall node . (Typeable node) => node -> m (Maybe (RefactorType, i, Span))
-- | Specify a refactoring type
data RefactorType = Before | After | Replace
deriving Show -- for debugging
-- | The reprint algorithm takes a refactoring (parameteric in
-- | some monad m) and turns an arbitrary pretty-printable type 'ast'
| into a monadic ' StringLike i ' transformer .
reprint :: (Monad m, Data ast, StringLike i) => Reprinting i m -> ast -> i -> m i
reprint reprinting ast input
-- If the input is empty return empty
| slNull input = return mempty
-- Otherwise proceed with the algorithm
| otherwise = do
-- Initial state comprises start cursor and input source
let state_0 = (initPosition, input)
-- Enter the top-node of a zipper for `ast'
let comp = enter reprinting (toZipper ast)
(out, (_, remaining)) <- runStateT comp state_0
-- Add to the output source the remaining input source
return (out <> remaining)
| Take a refactoring and a zipper producing a stateful ' StringLike i '
-- | transformer with Position state.
enter :: (Monad m, StringLike i) => Reprinting i m -> Zipper ast -> StateT (Position, i) m i
enter reprinting zipper = do
Step 1 : Apply a refactoring
refactoringInfo <- lift (query reprinting zipper)
Step 2 : Deal with refactored code or go to children
output <- case refactoringInfo of
-- No refactoring; go to children
Nothing -> go down'
-- A refactoring was applied
Just r -> splice r
Step 3 : Enter the right sibling of the current context
outputSib <- go right
-- Finally append output of current context/children
-- and right sibling
return (output <> outputSib)
where
go direction =
case direction zipper of
Go to next node if there is one
Just zipper -> enter reprinting zipper
-- Otherwise return the empty string
Nothing -> return mempty
-- | The reprint algorithm takes a refactoring (parameteric in
-- | some monad m) and turns an arbitrary pretty-printable type 'ast'
| into a monadic ' StringLike i ' transformer .
reprintSort :: (Monad m, Data ast, StringLike i) => Reprinting i m -> ast -> i -> m i
reprintSort reprinting ast input
-- If the input is empty return empty
| slNull input = return mempty
-- Otherwise proceed with the algorithm
| otherwise = do
-- Initial state comprises start cursor and input source
let state_0 = (initPosition, input)
-- Enter the top-node of a zipper for `ast'
let comp = enter' reprinting (toZipper ast)
(out, (_, remaining)) <- runStateT comp state_0
-- Add to the output source the remaining input source
return (out <> remaining)
-- | Take a refactoring and a zipper to produce a list of refactorings
enter' :: (Monad m, StringLike i) => Reprinting i m -> Zipper ast
-> StateT (Position, i) m i
enter' reprinting zipper = do
Step 1 : Get refactorings via AST zipper traversal
rs <- lift $ getRefactorings reprinting zipper []
Step 2 : Do the splicing on the sorted refactorings
srcs <- mapM splice (sortBySpan . reverse $ rs)
return $ mconcat srcs
where
sortBySpan = sortOn (\(_,_,sp) -> sp)
getRefactorings :: (Monad m, StringLike i) => Reprinting i m -> Zipper ast -> [(RefactorType, i, Span)]
-> m [(RefactorType, i, Span)]
getRefactorings reprinting zipper acc = do
Step 1 : Apply a refactoring
refactoringInfo <- query reprinting zipper
Step 2 : Deal with refactored code or go to children
acc <- case refactoringInfo of
-- No refactoring; go to children
Nothing -> go down' acc
-- A refactoring was applied, add it to the accumulator
Just r -> return (r : acc)
Step 3 : Enter the left sibling of the current focus
acc <- go right acc
-- Finally return the accumulated refactorings
return acc
where
go direction acc =
case direction zipper of
Go to next node if there is one
Just zipper -> getRefactorings reprinting zipper acc
-- Otherwise return the empty string
Nothing -> return acc
splice :: (Monad m, StringLike i) => (RefactorType, i, Span) -> StateT (Position, i) m i
splice (typ, output, (lb, ub)) = do
(cursor, inp) <- get
case typ of
Replace -> do
-- Get soure up to start of refactored node
let (pre, inp') = splitBySpan (cursor, lb) inp
-- Remove source covered by refactoring
let (_, inp'') = splitBySpan (lb, ub) inp'
put (ub, inp'')
return (pre <> output)
After -> do
-- Get source up to end of the refactored node
let (pre, inp') = splitBySpan (cursor, ub) inp
put (ub, inp')
return (pre <> output)
Before -> do
-- Get source up to start of refactored node
let (pre, inp') = splitBySpan (cursor, lb) inp
-- Discard portion consumed by the refactoring
let (post, inp'') = splitBySpan (lb, ub) inp'
put (ub, inp'')
return (pre <> output <> post)
-- | Given a lower-bound and upper-bound pair of Positions, split the
| incoming ' StringLike i ' based on the distance between the Position pairs .
splitBySpan :: StringLike i => Span -> i -> (i, i)
splitBySpan (lower, upper) =
subtext mempty lower
where
subtext acc cursor input
| cursor < lower =
case slUncons input of
Nothing -> done
Just ('\n', input') -> subtext acc (advanceLine cursor) input'
Just (_, input') -> subtext acc (advanceCol cursor) input'
| cursor < upper =
case slUncons input of
Nothing -> done
Just ('\n', input') -> subtext (slCons '\n' acc) (advanceLine cursor) input'
Just (x, input') -> subtext (slCons x acc) (advanceCol cursor) input'
| otherwise = done
where done = (slReverse acc, input)
-- | Infrastructure for building the reprinter "plugins"
class Refactorable t where
isRefactored :: t -> Maybe RefactorType
getSpan :: t -> Span
-- | Essentially wraps the refactorable interface
genReprinting :: (Monad m, Refactorable t, Typeable t, StringLike i)
=> (t -> m i) -> t -> m (Maybe (RefactorType, i, Span))
genReprinting f z = case isRefactored z of
Nothing -> return Nothing
Just refactorType -> do
output <- f z
return $ Just (refactorType, output, getSpan z)
-- | Catch all generic query
catchAll :: Monad m => a -> m (Maybe b)
catchAll _ = return Nothing
| null | https://raw.githubusercontent.com/camfort/reprinter/bade15439ffe4308fe5f3b631cb7aafe31a2f5ad/src/Text/Reprinter.hs | haskell | # LANGUAGE RankNTypes #
# LANGUAGE DeriveDataTypeable #
Import solely for re-exporting for library clients
| A line within the source text
| A column within the source text
| A position in a text (imagine a cursor)
| The initial position
| Given a position, go down a line, going back to the initial column
| Type of a reprinting function
| Specify a refactoring type
for debugging
| The reprint algorithm takes a refactoring (parameteric in
| some monad m) and turns an arbitrary pretty-printable type 'ast'
If the input is empty return empty
Otherwise proceed with the algorithm
Initial state comprises start cursor and input source
Enter the top-node of a zipper for `ast'
Add to the output source the remaining input source
| transformer with Position state.
No refactoring; go to children
A refactoring was applied
Finally append output of current context/children
and right sibling
Otherwise return the empty string
| The reprint algorithm takes a refactoring (parameteric in
| some monad m) and turns an arbitrary pretty-printable type 'ast'
If the input is empty return empty
Otherwise proceed with the algorithm
Initial state comprises start cursor and input source
Enter the top-node of a zipper for `ast'
Add to the output source the remaining input source
| Take a refactoring and a zipper to produce a list of refactorings
No refactoring; go to children
A refactoring was applied, add it to the accumulator
Finally return the accumulated refactorings
Otherwise return the empty string
Get soure up to start of refactored node
Remove source covered by refactoring
Get source up to end of the refactored node
Get source up to start of refactored node
Discard portion consumed by the refactoring
| Given a lower-bound and upper-bound pair of Positions, split the
| Infrastructure for building the reprinter "plugins"
| Essentially wraps the refactorable interface
| Catch all generic query |
module Text.Reprinter
( module Data.Functor.Identity
, module Data.Generics
, module Data.Generics.Zipper
, Span
, Position
, initPosition
, initCol
, initLine
, mkCol
, mkLine
, advanceCol
, advanceLine
, RefactorType(..)
, Refactorable(..)
, Reprinting
, catchAll
, genReprinting
, reprint
, reprintSort
) where
import Data.Functor.Identity
import Data.Generics
import Text.Reprinter.StringLike
import Control.Monad (forM)
import Control.Monad.Trans.Class (lift)
import Control.Monad.Trans.State.Lazy
import Data.Data
import Data.Generics.Zipper
import Data.List (sortOn)
import Data.Monoid ((<>), mempty)
newtype Line = Line Int deriving (Data, Eq, Ord, Show)
| Lines start at 1
initLine :: Line
initLine = Line 1
| Smart constructor for a Line , checks that line > = 1
mkLine :: Int -> Either String Line
mkLine l
| l < 1 = Left $ "mkLine: called with: " <> show l <> ". Minimum is 1."
| otherwise = Right (Line l)
newtype Col = Col Int deriving (Data, Eq, Ord, Show)
| Columns start at 1
initCol :: Col
initCol = Col 1
| Smart constructor for a Col , checks that column > = 1
mkCol :: Int -> Either String Col
mkCol l
| l < 1 = Left $ "mkCol: called with: " <> show l <> ". Minimum is 1."
| otherwise = Right (Col l)
type Position = (Line,Col)
initPosition :: Position
initPosition = (initLine,initCol)
advanceLine :: Position -> Position
advanceLine (Line x, _) = (Line (x+1), initCol)
| Given a position , advance by one column
advanceCol :: Position -> Position
advanceCol (ln, Col x) = (ln, Col (x+1))
| Two positions give the lower and upper bounds of a source span
type Span = (Position, Position)
@i@ is the input type ( something with a ' [ Char]'-like interface )
type Reprinting i m = forall node . (Typeable node) => node -> m (Maybe (RefactorType, i, Span))
data RefactorType = Before | After | Replace
| into a monadic ' StringLike i ' transformer .
reprint :: (Monad m, Data ast, StringLike i) => Reprinting i m -> ast -> i -> m i
reprint reprinting ast input
| slNull input = return mempty
| otherwise = do
let state_0 = (initPosition, input)
let comp = enter reprinting (toZipper ast)
(out, (_, remaining)) <- runStateT comp state_0
return (out <> remaining)
| Take a refactoring and a zipper producing a stateful ' StringLike i '
enter :: (Monad m, StringLike i) => Reprinting i m -> Zipper ast -> StateT (Position, i) m i
enter reprinting zipper = do
Step 1 : Apply a refactoring
refactoringInfo <- lift (query reprinting zipper)
Step 2 : Deal with refactored code or go to children
output <- case refactoringInfo of
Nothing -> go down'
Just r -> splice r
Step 3 : Enter the right sibling of the current context
outputSib <- go right
return (output <> outputSib)
where
go direction =
case direction zipper of
Go to next node if there is one
Just zipper -> enter reprinting zipper
Nothing -> return mempty
| into a monadic ' StringLike i ' transformer .
reprintSort :: (Monad m, Data ast, StringLike i) => Reprinting i m -> ast -> i -> m i
reprintSort reprinting ast input
| slNull input = return mempty
| otherwise = do
let state_0 = (initPosition, input)
let comp = enter' reprinting (toZipper ast)
(out, (_, remaining)) <- runStateT comp state_0
return (out <> remaining)
enter' :: (Monad m, StringLike i) => Reprinting i m -> Zipper ast
-> StateT (Position, i) m i
enter' reprinting zipper = do
Step 1 : Get refactorings via AST zipper traversal
rs <- lift $ getRefactorings reprinting zipper []
Step 2 : Do the splicing on the sorted refactorings
srcs <- mapM splice (sortBySpan . reverse $ rs)
return $ mconcat srcs
where
sortBySpan = sortOn (\(_,_,sp) -> sp)
getRefactorings :: (Monad m, StringLike i) => Reprinting i m -> Zipper ast -> [(RefactorType, i, Span)]
-> m [(RefactorType, i, Span)]
getRefactorings reprinting zipper acc = do
Step 1 : Apply a refactoring
refactoringInfo <- query reprinting zipper
Step 2 : Deal with refactored code or go to children
acc <- case refactoringInfo of
Nothing -> go down' acc
Just r -> return (r : acc)
Step 3 : Enter the left sibling of the current focus
acc <- go right acc
return acc
where
go direction acc =
case direction zipper of
Go to next node if there is one
Just zipper -> getRefactorings reprinting zipper acc
Nothing -> return acc
splice :: (Monad m, StringLike i) => (RefactorType, i, Span) -> StateT (Position, i) m i
splice (typ, output, (lb, ub)) = do
(cursor, inp) <- get
case typ of
Replace -> do
let (pre, inp') = splitBySpan (cursor, lb) inp
let (_, inp'') = splitBySpan (lb, ub) inp'
put (ub, inp'')
return (pre <> output)
After -> do
let (pre, inp') = splitBySpan (cursor, ub) inp
put (ub, inp')
return (pre <> output)
Before -> do
let (pre, inp') = splitBySpan (cursor, lb) inp
let (post, inp'') = splitBySpan (lb, ub) inp'
put (ub, inp'')
return (pre <> output <> post)
| incoming ' StringLike i ' based on the distance between the Position pairs .
splitBySpan :: StringLike i => Span -> i -> (i, i)
splitBySpan (lower, upper) =
subtext mempty lower
where
subtext acc cursor input
| cursor < lower =
case slUncons input of
Nothing -> done
Just ('\n', input') -> subtext acc (advanceLine cursor) input'
Just (_, input') -> subtext acc (advanceCol cursor) input'
| cursor < upper =
case slUncons input of
Nothing -> done
Just ('\n', input') -> subtext (slCons '\n' acc) (advanceLine cursor) input'
Just (x, input') -> subtext (slCons x acc) (advanceCol cursor) input'
| otherwise = done
where done = (slReverse acc, input)
class Refactorable t where
isRefactored :: t -> Maybe RefactorType
getSpan :: t -> Span
genReprinting :: (Monad m, Refactorable t, Typeable t, StringLike i)
=> (t -> m i) -> t -> m (Maybe (RefactorType, i, Span))
genReprinting f z = case isRefactored z of
Nothing -> return Nothing
Just refactorType -> do
output <- f z
return $ Just (refactorType, output, getSpan z)
catchAll :: Monad m => a -> m (Maybe b)
catchAll _ = return Nothing
|
f45c26a5c12846412c0467fa12a63f2340533152fad41998b59f727cd571028b | V-Wong/COMP3141 | map.hs | import Prelude hiding (map)
map :: (a -> b) -> [a] -> [b]
map f [] = []
map f (x:xs) = f x : map f xs | null | https://raw.githubusercontent.com/V-Wong/COMP3141/f5a92e5833d2bb0a08e68cb2d025e38c8c875563/Random%20Code/map.hs | haskell | import Prelude hiding (map)
map :: (a -> b) -> [a] -> [b]
map f [] = []
map f (x:xs) = f x : map f xs | |
123f077e9b703b5afd3c7c0fd76c7e81ba417668b7906dc02af1ac389febae86 | ocamllabs/ocaml-effects | ccomp.mli | (***********************************************************************)
(* *)
(* OCaml *)
(* *)
, projet Cristal , INRIA Rocquencourt
(* *)
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
(* *)
(***********************************************************************)
(* Compiling C files and building C libraries *)
val command: string -> int
val run_command: string -> unit
val compile_file: output_name:string option -> string -> int
val create_archive: string -> string list -> int
val expand_libname: string -> string
val quote_files: string list -> string
val quote_optfile: string option -> string
make_link_options : string list - > string
type link_mode =
| Exe
| Dll
| MainDll
| Partial
val call_linker: link_mode -> string -> string list -> string -> bool
| null | https://raw.githubusercontent.com/ocamllabs/ocaml-effects/36008b741adc201bf9b547545344507da603ae31/utils/ccomp.mli | ocaml | *********************************************************************
OCaml
*********************************************************************
Compiling C files and building C libraries | , projet Cristal , INRIA Rocquencourt
Copyright 1996 Institut National de Recherche en Informatique et
en Automatique . All rights reserved . This file is distributed
under the terms of the Q Public License version 1.0 .
val command: string -> int
val run_command: string -> unit
val compile_file: output_name:string option -> string -> int
val create_archive: string -> string list -> int
val expand_libname: string -> string
val quote_files: string list -> string
val quote_optfile: string option -> string
make_link_options : string list - > string
type link_mode =
| Exe
| Dll
| MainDll
| Partial
val call_linker: link_mode -> string -> string list -> string -> bool
|
5c45cccf9b104de1a857dc25897d76fe67e140bb013b022c3a093b2994b5933d | lasp-lang/partisan | partisan_peer_socket.erl | %% -------------------------------------------------------------------
%%
Copyright ( c ) 2017 . All Rights Reserved .
%%
This file is provided to you under the Apache License ,
%% Version 2.0 (the "License"); you may not use this file
except in compliance with the License . You may obtain
%% a copy of the License at
%%
%% -2.0
%%
%% Unless required by applicable law or agreed to in writing,
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
%% KIND, either express or implied. See the License for the
%% specific language governing permissions and limitations
%% under the License.
%%
%% -------------------------------------------------------------------
%% -----------------------------------------------------------------------------
%% @doc Wrapper that allows transparent usage of plain TCP or TLS socket
%% for peer connections.
%%
%% This module also implements the monotonic channel functionality.
%% @end
%% -----------------------------------------------------------------------------
-module(partisan_peer_socket).
this macro only exists in OTP-21 and above , where ssl_accept/2 is deprecated
-ifdef(OTP_RELEASE).
-define(ssl_accept(TCPSocket, TLSOpts), ssl:handshake(TCPSocket, TLSOpts)).
-else.
-define(ssl_accept(TCPSocket, TLSOpts), ssl:ssl_accept(TCPSocket, TLSOpts)).
-endif.
-record(connection, {
socket :: gen_tcp:socket() | ssl:sslsocket(),
transport :: gen_tcp | ssl,
control :: inet | ssl,
monotonic = false :: boolean()
}).
-type reason() :: closed | inet:posix().
-type options() :: [gen_tcp:option()] | map().
-type connection() :: #connection{}.
-export_type([connection/0]).
-export([accept/1]).
-export([close/1]).
-export([connect/3]).
-export([connect/4]).
-export([connect/5]).
-export([recv/2]).
-export([recv/3]).
-export([send/2]).
-export([setopts/2]).
-export([socket/1]).
%% =============================================================================
%% API
%% =============================================================================
%% -----------------------------------------------------------------------------
%% @doc Wraps a TCP socket with the appropriate information for
%% transceiving on and controlling the socket later. If TLS/SSL is
%% enabled, this performs the socket upgrade/negotiation before
%% returning the wrapped socket.
%% @end
%% -----------------------------------------------------------------------------
-spec accept(gen_tcp:socket()) -> connection().
accept(TCPSocket) ->
case tls_enabled() of
true ->
TLSOpts = partisan_config:get(tls_server_options),
%% as per #ssl_accept-1
%% The listen socket is to be in mode {active, false} before
%% telling the client that the server is ready to upgrade by
%% calling this function, else the upgrade succeeds or does not
%% succeed depending on timing.
inet:setopts(TCPSocket, [{active, false}]),
{ok, TLSSocket} = ?ssl_accept(TCPSocket, TLSOpts),
%% restore the expected active once setting
ssl:setopts(TLSSocket, [{active, once}]),
#connection{
socket = TLSSocket,
transport = ssl,
control = ssl
};
_ ->
#connection{
socket = TCPSocket,
transport = gen_tcp,
control = inet
}
end.
%% -----------------------------------------------------------------------------
%% @doc
%% @see gen_tcp:send/2
%% @see ssl:send/2
%% @end
%% -----------------------------------------------------------------------------
-spec send(connection(), iodata()) -> ok | {error, reason()}.
send(#connection{monotonic = false} = Conn, Data) ->
Socket = Conn#connection.socket,
Transport = Conn#connection.transport,
send(Transport, Socket, Data);
send(#connection{monotonic = true} = Conn, Data) ->
Socket = Conn#connection.socket,
Transport = Conn#connection.transport,
%% Get the current process message queue length.
{message_queue_len, MQLen} = process_info(self(), message_queue_len),
%% Get last transmission time from process dictionary
Time = get(last_transmission_time),
%% Test for whether we should send or not.
case monotonic_should_send(MQLen, Time) of
false ->
ok;
true ->
%% Update last transmission time on process dictionary
put(last_transmission_time, monotonic_now()),
send(Transport, Socket, Data)
end.
%% -----------------------------------------------------------------------------
%% @doc
%% @see gen_tcp:recv/2
%% @see ssl:recv/2
%% @end
%% -----------------------------------------------------------------------------
-spec recv(connection(), integer()) -> {ok, iodata()} | {error, reason()}.
recv(Conn, Length) ->
recv(Conn, Length, infinity).
%% -----------------------------------------------------------------------------
%% @doc
%% @see gen_tcp:recv/3
%% @see ssl:recv/3
%% @end
%% -----------------------------------------------------------------------------
-spec recv(connection(), integer(), timeout()) ->
{ok, iodata()} | {error, reason()}.
recv(#connection{socket = Socket, transport = Transport}, Length, Timeout) ->
Transport:recv(Socket, Length, Timeout).
%% -----------------------------------------------------------------------------
%% @doc
%% @see inet:setopts/2
%% @see ssl:setopts/2
%% @end
%% -----------------------------------------------------------------------------
-spec setopts(connection(), options()) -> ok | {error, inet:posix()}.
setopts(#connection{} = Connection, Options) when is_map(Options) ->
setopts(Connection, maps:to_list(Options));
setopts(#connection{socket = Socket, control = Control}, Options) ->
Control:setopts(Socket, Options).
%% -----------------------------------------------------------------------------
%% @doc
%% @see gen_tcp:close/1
%% @see ssl:close/1
%% @end
%% -----------------------------------------------------------------------------
-spec close(connection()) -> ok.
close(#connection{socket = Socket, transport = Transport}) ->
Transport:close(Socket).
%% -----------------------------------------------------------------------------
%% @doc
%% @see gen_tcp:connect/3
%% @see ssl:connect/3
%% @end
%% -----------------------------------------------------------------------------
-spec connect(
inet:socket_address() | inet:hostname(), inet:port_number(), options()) ->
{ok, connection()} | {error, inet:posix()}.
connect(Address, Port, Options) ->
connect(Address, Port, Options, infinity).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec connect(
inet:socket_address() | inet:hostname(),
inet:port_number(),
options(),
timeout()) ->
{ok, connection()} | {error, inet:posix()}.
connect(Address, Port, Options, Timeout) ->
connect(Address, Port, Options, Timeout, #{}).
%% -----------------------------------------------------------------------------
%% @doc
%% @end
%% -----------------------------------------------------------------------------
-spec connect(
inet:socket_address() | inet:hostname(),
inet:port_number(),
options(),
timeout(),
map() | list()) -> {ok, connection()} | {error, inet:posix()}.
connect(Address, Port, Options, Timeout, PartisanOptions)
when is_list(PartisanOptions) ->
connect(Address, Port, Options, Timeout, maps:from_list(PartisanOptions));
connect(Address, Port, Options0, Timeout, PartisanOptions)
when is_map(PartisanOptions) ->
Options = connection_options(Options0),
case tls_enabled() of
true ->
TLSOptions = partisan_config:get(tls_client_options),
do_connect(
Address,
Port,
Options ++ TLSOptions,
Timeout,
ssl,
ssl,
PartisanOptions
);
_ ->
do_connect(
Address,
Port,
Options,
Timeout,
gen_tcp,
inet,
PartisanOptions
)
end.
%% -----------------------------------------------------------------------------
%% @doc Returns the wrapped socket from within the connection.
%% @end
%% -----------------------------------------------------------------------------
-spec socket(connection()) -> gen_tcp:socket() | ssl:sslsocket().
socket(Conn) ->
Conn#connection.socket.
%% =============================================================================
%% PRIVATE
%% =============================================================================
@private
do_connect(Address, Port, ConnectOpts, Timeout, Transport, Control, Opts) ->
Monotonic = maps:get(monotonic, Opts, false),
case Transport:connect(Address, Port, ConnectOpts, Timeout) of
{ok, Socket} ->
Connection = #connection{
socket = Socket,
transport = Transport,
control = Control,
monotonic = Monotonic
},
{ok, Connection};
Error ->
Error
end.
@private
connection_options(Options) when is_map(Options) ->
connection_options(maps:to_list(Options));
connection_options(Options) when is_list(Options) ->
Options ++ [{nodelay, true}].
@private
tls_enabled() ->
partisan_config:get(tls).
@private
monotonic_now() ->
erlang:monotonic_time(millisecond).
@private
send(Transport, Socket, Data) ->
%% Transmit the data on the socket.
Transport:send(Socket, Data).
%% Determine if we should transmit:
%%
%% If there's another message in the queue, we can skip
%% sending this message. However, if the arrival rate of
%% messages is too high, we risk starvation where
%% we may never send. Therefore, we must force a transmission
%% after a given period with no transmissions.
%%
@private
monotonic_should_send(MessageQueueLen, LastTransmissionTime) ->
case MessageQueueLen > 0 of
true ->
%% Messages in queue; conditional send.
NowTime = monotonic_now(),
Diff = abs(NowTime - LastTransmissionTime),
SendWindow = partisan_config:get(send_window, 1000),
Diff > SendWindow;
false ->
%% No messages in queue; transmit.
true
end.
| null | https://raw.githubusercontent.com/lasp-lang/partisan/968f72de16ebe20f18cc8b287497b6dd3789f9ab/src/partisan_peer_socket.erl | erlang | -------------------------------------------------------------------
Version 2.0 (the "License"); you may not use this file
a copy of the License at
-2.0
Unless required by applicable law or agreed to in writing,
KIND, either express or implied. See the License for the
specific language governing permissions and limitations
under the License.
-------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc Wrapper that allows transparent usage of plain TCP or TLS socket
for peer connections.
This module also implements the monotonic channel functionality.
@end
-----------------------------------------------------------------------------
=============================================================================
API
=============================================================================
-----------------------------------------------------------------------------
@doc Wraps a TCP socket with the appropriate information for
transceiving on and controlling the socket later. If TLS/SSL is
enabled, this performs the socket upgrade/negotiation before
returning the wrapped socket.
@end
-----------------------------------------------------------------------------
as per #ssl_accept-1
The listen socket is to be in mode {active, false} before
telling the client that the server is ready to upgrade by
calling this function, else the upgrade succeeds or does not
succeed depending on timing.
restore the expected active once setting
-----------------------------------------------------------------------------
@doc
@see gen_tcp:send/2
@see ssl:send/2
@end
-----------------------------------------------------------------------------
Get the current process message queue length.
Get last transmission time from process dictionary
Test for whether we should send or not.
Update last transmission time on process dictionary
-----------------------------------------------------------------------------
@doc
@see gen_tcp:recv/2
@see ssl:recv/2
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@see gen_tcp:recv/3
@see ssl:recv/3
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@see inet:setopts/2
@see ssl:setopts/2
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@see gen_tcp:close/1
@see ssl:close/1
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@see gen_tcp:connect/3
@see ssl:connect/3
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc
@end
-----------------------------------------------------------------------------
-----------------------------------------------------------------------------
@doc Returns the wrapped socket from within the connection.
@end
-----------------------------------------------------------------------------
=============================================================================
PRIVATE
=============================================================================
Transmit the data on the socket.
Determine if we should transmit:
If there's another message in the queue, we can skip
sending this message. However, if the arrival rate of
messages is too high, we risk starvation where
we may never send. Therefore, we must force a transmission
after a given period with no transmissions.
Messages in queue; conditional send.
No messages in queue; transmit. | Copyright ( c ) 2017 . All Rights Reserved .
This file is provided to you under the Apache License ,
except in compliance with the License . You may obtain
software distributed under the License is distributed on an
" AS IS " BASIS , WITHOUT WARRANTIES OR CONDITIONS OF ANY
-module(partisan_peer_socket).
this macro only exists in OTP-21 and above , where ssl_accept/2 is deprecated
-ifdef(OTP_RELEASE).
-define(ssl_accept(TCPSocket, TLSOpts), ssl:handshake(TCPSocket, TLSOpts)).
-else.
-define(ssl_accept(TCPSocket, TLSOpts), ssl:ssl_accept(TCPSocket, TLSOpts)).
-endif.
-record(connection, {
socket :: gen_tcp:socket() | ssl:sslsocket(),
transport :: gen_tcp | ssl,
control :: inet | ssl,
monotonic = false :: boolean()
}).
-type reason() :: closed | inet:posix().
-type options() :: [gen_tcp:option()] | map().
-type connection() :: #connection{}.
-export_type([connection/0]).
-export([accept/1]).
-export([close/1]).
-export([connect/3]).
-export([connect/4]).
-export([connect/5]).
-export([recv/2]).
-export([recv/3]).
-export([send/2]).
-export([setopts/2]).
-export([socket/1]).
-spec accept(gen_tcp:socket()) -> connection().
accept(TCPSocket) ->
case tls_enabled() of
true ->
TLSOpts = partisan_config:get(tls_server_options),
inet:setopts(TCPSocket, [{active, false}]),
{ok, TLSSocket} = ?ssl_accept(TCPSocket, TLSOpts),
ssl:setopts(TLSSocket, [{active, once}]),
#connection{
socket = TLSSocket,
transport = ssl,
control = ssl
};
_ ->
#connection{
socket = TCPSocket,
transport = gen_tcp,
control = inet
}
end.
-spec send(connection(), iodata()) -> ok | {error, reason()}.
send(#connection{monotonic = false} = Conn, Data) ->
Socket = Conn#connection.socket,
Transport = Conn#connection.transport,
send(Transport, Socket, Data);
send(#connection{monotonic = true} = Conn, Data) ->
Socket = Conn#connection.socket,
Transport = Conn#connection.transport,
{message_queue_len, MQLen} = process_info(self(), message_queue_len),
Time = get(last_transmission_time),
case monotonic_should_send(MQLen, Time) of
false ->
ok;
true ->
put(last_transmission_time, monotonic_now()),
send(Transport, Socket, Data)
end.
-spec recv(connection(), integer()) -> {ok, iodata()} | {error, reason()}.
recv(Conn, Length) ->
recv(Conn, Length, infinity).
-spec recv(connection(), integer(), timeout()) ->
{ok, iodata()} | {error, reason()}.
recv(#connection{socket = Socket, transport = Transport}, Length, Timeout) ->
Transport:recv(Socket, Length, Timeout).
-spec setopts(connection(), options()) -> ok | {error, inet:posix()}.
setopts(#connection{} = Connection, Options) when is_map(Options) ->
setopts(Connection, maps:to_list(Options));
setopts(#connection{socket = Socket, control = Control}, Options) ->
Control:setopts(Socket, Options).
-spec close(connection()) -> ok.
close(#connection{socket = Socket, transport = Transport}) ->
Transport:close(Socket).
-spec connect(
inet:socket_address() | inet:hostname(), inet:port_number(), options()) ->
{ok, connection()} | {error, inet:posix()}.
connect(Address, Port, Options) ->
connect(Address, Port, Options, infinity).
-spec connect(
inet:socket_address() | inet:hostname(),
inet:port_number(),
options(),
timeout()) ->
{ok, connection()} | {error, inet:posix()}.
connect(Address, Port, Options, Timeout) ->
connect(Address, Port, Options, Timeout, #{}).
-spec connect(
inet:socket_address() | inet:hostname(),
inet:port_number(),
options(),
timeout(),
map() | list()) -> {ok, connection()} | {error, inet:posix()}.
connect(Address, Port, Options, Timeout, PartisanOptions)
when is_list(PartisanOptions) ->
connect(Address, Port, Options, Timeout, maps:from_list(PartisanOptions));
connect(Address, Port, Options0, Timeout, PartisanOptions)
when is_map(PartisanOptions) ->
Options = connection_options(Options0),
case tls_enabled() of
true ->
TLSOptions = partisan_config:get(tls_client_options),
do_connect(
Address,
Port,
Options ++ TLSOptions,
Timeout,
ssl,
ssl,
PartisanOptions
);
_ ->
do_connect(
Address,
Port,
Options,
Timeout,
gen_tcp,
inet,
PartisanOptions
)
end.
-spec socket(connection()) -> gen_tcp:socket() | ssl:sslsocket().
socket(Conn) ->
Conn#connection.socket.
@private
do_connect(Address, Port, ConnectOpts, Timeout, Transport, Control, Opts) ->
Monotonic = maps:get(monotonic, Opts, false),
case Transport:connect(Address, Port, ConnectOpts, Timeout) of
{ok, Socket} ->
Connection = #connection{
socket = Socket,
transport = Transport,
control = Control,
monotonic = Monotonic
},
{ok, Connection};
Error ->
Error
end.
@private
connection_options(Options) when is_map(Options) ->
connection_options(maps:to_list(Options));
connection_options(Options) when is_list(Options) ->
Options ++ [{nodelay, true}].
@private
tls_enabled() ->
partisan_config:get(tls).
@private
monotonic_now() ->
erlang:monotonic_time(millisecond).
@private
send(Transport, Socket, Data) ->
Transport:send(Socket, Data).
@private
monotonic_should_send(MessageQueueLen, LastTransmissionTime) ->
case MessageQueueLen > 0 of
true ->
NowTime = monotonic_now(),
Diff = abs(NowTime - LastTransmissionTime),
SendWindow = partisan_config:get(send_window, 1000),
Diff > SendWindow;
false ->
true
end.
|
fe24f63988139e0e216e260c3cb9d649aca08fcc21553735600a19d3393f402d | Decentralized-Pictures/T4L3NT | op.mli | (*****************************************************************************)
(* *)
(* Open Source License *)
Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
(* *)
(* Permission is hereby granted, free of charge, to any person obtaining a *)
(* copy of this software and associated documentation files (the "Software"),*)
to deal in the Software without restriction , including without limitation
(* the rights to use, copy, modify, merge, publish, distribute, sublicense, *)
and/or sell copies of the Software , and to permit persons to whom the
(* Software is furnished to do so, subject to the following conditions: *)
(* *)
(* The above copyright notice and this permission notice shall be included *)
(* in all copies or substantial portions of the Software. *)
(* *)
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
(* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, *)
(* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL *)
(* THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER*)
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
(* FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER *)
(* DEALINGS IN THE SOFTWARE. *)
(* *)
(*****************************************************************************)
open Protocol
open Alpha_context
val endorsement :
?delegate:public_key_hash * Slot.t list ->
?slot:Slot.t ->
?level:Raw_level.t ->
?round:Round.t ->
?block_payload_hash:Block_payload_hash.t ->
endorsed_block:Block.t ->
Context.t ->
?signing_context:Context.t ->
unit ->
Kind.endorsement Operation.t tzresult Lwt.t
val preendorsement :
?delegate:public_key_hash * Slot.t list ->
?slot:Slot.t ->
?level:Raw_level.t ->
?round:Round.t ->
?block_payload_hash:Block_payload_hash.t ->
endorsed_block:Block.t ->
Context.t ->
?signing_context:Context.t ->
unit ->
Kind.preendorsement Operation.t tzresult Lwt.t
val miss_signed_endorsement :
?level:Raw_level.t ->
endorsed_block:Block.t ->
Context.t ->
Kind.endorsement Operation.t tzresult Lwt.t
val transaction :
?counter:Z.t ->
?fee:Tez.tez ->
?gas_limit:Gas.Arith.integral ->
?storage_limit:Z.t ->
?parameters:Script.lazy_expr ->
?entrypoint:string ->
Context.t ->
Contract.t ->
Contract.t ->
Tez.t ->
Operation.packed tzresult Lwt.t
val delegation :
?fee:Tez.tez ->
Context.t ->
Contract.t ->
public_key_hash option ->
Operation.packed tzresult Lwt.t
val set_deposits_limit :
?fee:Tez.tez ->
Context.t ->
Contract.t ->
Tez.tez option ->
Operation.packed tzresult Lwt.t
val revelation :
?fee:Tez.tez -> Context.t -> public_key -> Operation.packed tzresult Lwt.t
val failing_noop :
Context.t -> public_key_hash -> string -> Operation.packed tzresult Lwt.t
(** [contract_origination ctxt source] Create a new contract origination
operation, sign it with [source] and returns it alongside the contract
address. The contract address is using the initial origination nonce with the
hash of the operation. If this operation is combine with [combine_operations]
then the contract address is false as the nonce is not based on the correct
operation hash. *)
val contract_origination :
?counter:Z.t ->
?delegate:public_key_hash ->
script:Script.t ->
?preorigination:Contract.contract option ->
?public_key:public_key ->
?credit:Tez.tez ->
?fee:Tez.tez ->
?gas_limit:Gas.Arith.integral ->
?storage_limit:Z.t ->
Context.t ->
Contract.contract ->
(Operation.packed * Contract.contract) tzresult Lwt.t
val originated_contract : Operation.packed -> Contract.contract
val register_global_constant :
?counter:Z.t ->
?public_key:Signature.public_key ->
?fee:Tez.tez ->
?gas_limit:Alpha_context.Gas.Arith.integral ->
?storage_limit:Z.t ->
Context.t ->
(* Account doing the registration *)
source:Contract.t ->
Micheline value to be registered
value:Protocol.Alpha_context.Script.lazy_expr ->
(Protocol.operation, tztrace) result Lwt.t
val double_endorsement :
Context.t ->
Kind.endorsement Operation.t ->
Kind.endorsement Operation.t ->
Operation.packed
val double_preendorsement :
Context.t ->
Kind.preendorsement Operation.t ->
Kind.preendorsement Operation.t ->
Operation.packed
val double_baking :
Context.t ->
Block_header.block_header ->
Block_header.block_header ->
Operation.packed
val activation :
Context.t ->
Signature.Public_key_hash.t ->
Blinded_public_key_hash.activation_code ->
Operation.packed tzresult Lwt.t
val combine_operations :
?public_key:public_key ->
?counter:counter ->
?spurious_operation:packed_operation ->
source:Contract.t ->
Context.t ->
packed_operation list ->
packed_operation tzresult Lwt.t
(** Reveals a seed_nonce that was previously committed at a certain level *)
val seed_nonce_revelation :
Context.t -> Raw_level.t -> Nonce.t -> Operation.packed
(** Propose a list of protocol hashes during the approval voting *)
val proposals :
Context.t ->
Contract.t ->
Protocol_hash.t list ->
Operation.packed tzresult Lwt.t
(** Cast a vote yay, nay or pass *)
val ballot :
Context.t ->
Contract.t ->
Protocol_hash.t ->
Vote.ballot ->
Operation.packed tzresult Lwt.t
val dummy_script : Script.t
val dummy_script_cost : Tez.t
* [ source ] Originate a new tx rollup operation ,
sign it with [ source ] and returns it alongside the tx rollup address . The
tx_rollup address is using the initial origination nonce with the hash of the
operation . If this operation is combined with [ combine_operations ] then the
tx rollup address is false as the nonce is not based on the correct operation
hash .
sign it with [source] and returns it alongside the tx rollup address. The
tx_rollup address is using the initial origination nonce with the hash of the
operation. If this operation is combined with [combine_operations] then the
tx rollup address is false as the nonce is not based on the correct operation
hash. *)
val tx_rollup_origination :
?counter:Z.t ->
?fee:Tez.tez ->
?gas_limit:Gas.Arith.integral ->
?storage_limit:Z.t ->
Context.t ->
Contract.t ->
(Operation.packed * Tx_rollup.t) tzresult Lwt.t
| null | https://raw.githubusercontent.com/Decentralized-Pictures/T4L3NT/6d4d3edb2d73575384282ad5a633518cba3d29e3/src/proto_alpha/lib_protocol/test/helpers/op.mli | ocaml | ***************************************************************************
Open Source License
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and associated documentation files (the "Software"),
the rights to use, copy, modify, merge, publish, distribute, sublicense,
Software is furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
DEALINGS IN THE SOFTWARE.
***************************************************************************
* [contract_origination ctxt source] Create a new contract origination
operation, sign it with [source] and returns it alongside the contract
address. The contract address is using the initial origination nonce with the
hash of the operation. If this operation is combine with [combine_operations]
then the contract address is false as the nonce is not based on the correct
operation hash.
Account doing the registration
* Reveals a seed_nonce that was previously committed at a certain level
* Propose a list of protocol hashes during the approval voting
* Cast a vote yay, nay or pass | Copyright ( c ) 2018 Dynamic Ledger Solutions , Inc. < >
to deal in the Software without restriction , including without limitation
and/or sell copies of the Software , and to permit persons to whom the
THE SOFTWARE IS PROVIDED " AS IS " , WITHOUT WARRANTY OF ANY KIND , EXPRESS OR
LIABILITY , WHETHER IN AN ACTION OF CONTRACT , TORT OR OTHERWISE , ARISING
open Protocol
open Alpha_context
val endorsement :
?delegate:public_key_hash * Slot.t list ->
?slot:Slot.t ->
?level:Raw_level.t ->
?round:Round.t ->
?block_payload_hash:Block_payload_hash.t ->
endorsed_block:Block.t ->
Context.t ->
?signing_context:Context.t ->
unit ->
Kind.endorsement Operation.t tzresult Lwt.t
val preendorsement :
?delegate:public_key_hash * Slot.t list ->
?slot:Slot.t ->
?level:Raw_level.t ->
?round:Round.t ->
?block_payload_hash:Block_payload_hash.t ->
endorsed_block:Block.t ->
Context.t ->
?signing_context:Context.t ->
unit ->
Kind.preendorsement Operation.t tzresult Lwt.t
val miss_signed_endorsement :
?level:Raw_level.t ->
endorsed_block:Block.t ->
Context.t ->
Kind.endorsement Operation.t tzresult Lwt.t
val transaction :
?counter:Z.t ->
?fee:Tez.tez ->
?gas_limit:Gas.Arith.integral ->
?storage_limit:Z.t ->
?parameters:Script.lazy_expr ->
?entrypoint:string ->
Context.t ->
Contract.t ->
Contract.t ->
Tez.t ->
Operation.packed tzresult Lwt.t
val delegation :
?fee:Tez.tez ->
Context.t ->
Contract.t ->
public_key_hash option ->
Operation.packed tzresult Lwt.t
val set_deposits_limit :
?fee:Tez.tez ->
Context.t ->
Contract.t ->
Tez.tez option ->
Operation.packed tzresult Lwt.t
val revelation :
?fee:Tez.tez -> Context.t -> public_key -> Operation.packed tzresult Lwt.t
val failing_noop :
Context.t -> public_key_hash -> string -> Operation.packed tzresult Lwt.t
val contract_origination :
?counter:Z.t ->
?delegate:public_key_hash ->
script:Script.t ->
?preorigination:Contract.contract option ->
?public_key:public_key ->
?credit:Tez.tez ->
?fee:Tez.tez ->
?gas_limit:Gas.Arith.integral ->
?storage_limit:Z.t ->
Context.t ->
Contract.contract ->
(Operation.packed * Contract.contract) tzresult Lwt.t
val originated_contract : Operation.packed -> Contract.contract
val register_global_constant :
?counter:Z.t ->
?public_key:Signature.public_key ->
?fee:Tez.tez ->
?gas_limit:Alpha_context.Gas.Arith.integral ->
?storage_limit:Z.t ->
Context.t ->
source:Contract.t ->
Micheline value to be registered
value:Protocol.Alpha_context.Script.lazy_expr ->
(Protocol.operation, tztrace) result Lwt.t
val double_endorsement :
Context.t ->
Kind.endorsement Operation.t ->
Kind.endorsement Operation.t ->
Operation.packed
val double_preendorsement :
Context.t ->
Kind.preendorsement Operation.t ->
Kind.preendorsement Operation.t ->
Operation.packed
val double_baking :
Context.t ->
Block_header.block_header ->
Block_header.block_header ->
Operation.packed
val activation :
Context.t ->
Signature.Public_key_hash.t ->
Blinded_public_key_hash.activation_code ->
Operation.packed tzresult Lwt.t
val combine_operations :
?public_key:public_key ->
?counter:counter ->
?spurious_operation:packed_operation ->
source:Contract.t ->
Context.t ->
packed_operation list ->
packed_operation tzresult Lwt.t
val seed_nonce_revelation :
Context.t -> Raw_level.t -> Nonce.t -> Operation.packed
val proposals :
Context.t ->
Contract.t ->
Protocol_hash.t list ->
Operation.packed tzresult Lwt.t
val ballot :
Context.t ->
Contract.t ->
Protocol_hash.t ->
Vote.ballot ->
Operation.packed tzresult Lwt.t
val dummy_script : Script.t
val dummy_script_cost : Tez.t
* [ source ] Originate a new tx rollup operation ,
sign it with [ source ] and returns it alongside the tx rollup address . The
tx_rollup address is using the initial origination nonce with the hash of the
operation . If this operation is combined with [ combine_operations ] then the
tx rollup address is false as the nonce is not based on the correct operation
hash .
sign it with [source] and returns it alongside the tx rollup address. The
tx_rollup address is using the initial origination nonce with the hash of the
operation. If this operation is combined with [combine_operations] then the
tx rollup address is false as the nonce is not based on the correct operation
hash. *)
val tx_rollup_origination :
?counter:Z.t ->
?fee:Tez.tez ->
?gas_limit:Gas.Arith.integral ->
?storage_limit:Z.t ->
Context.t ->
Contract.t ->
(Operation.packed * Tx_rollup.t) tzresult Lwt.t
|
45f7f32e4d9646bb0f5198007ceea1d17290f2d9ea3c89bf8b63d232f9150362 | swtwsk/vinci-lang | AST.hs | # LANGUAGE PatternSynonyms #
# LANGUAGE FlexibleInstances #
module Core.AST where
import Control.Monad.Identity (Identity(Identity))
import Data.List (intercalate)
import Core.Ops (BinOp(..), UnOp(..))
import Core.Types
type VarName = String
data VarId f = VarId { _varName :: VarName
, _varType :: f Type }
pattern Var' :: VarName -> Type -> VarId Identity
pattern Var' { varN, varT } = VarId { _varName = varN, _varType = Identity varT }
data Binding f = ProgBinding (Prog f)
| ConstBinding (VarId f) (Expr f)
deriving (Eq, Ord)
data Prog f = Prog (VarId f) [VarId f] (Expr f)
deriving (Eq, Ord)
data Expr f = Var (VarId f)
| Lit Lit
| App (Expr f) (Expr f)
| If (Expr f) (Expr f) (Expr f)
| Cons String [Expr f]
| FieldGet String (Expr f)
| TupleCons [Expr f]
| TupleProj Int (Expr f)
| Let (VarId f) (Expr f) (Expr f)
| LetFun (Prog f) (Expr f)
| BinOp BinOp (Expr f) (Expr f)
| UnOp UnOp (Expr f)
deriving (Eq, Ord)
data Lit = LFloat Double
| LBool Bool
| LInt Int
deriving (Eq, Ord)
varId :: VarName -> Type -> VarId Identity
varId n t = VarId n (Identity t)
progId :: Prog f -> VarName
progId (Prog fId _ _) = _varName fId
resType :: Int -> Type -> Maybe Type
resType argCount t@(TFun _ t2)
| argCount == 0 = pure t
| argCount > 0 = resType (argCount - 1) t2
| otherwise = Nothing
resType argCount t
| argCount == 0 = pure t
| otherwise = Nothing
VARID EQ
class EquableFunctor f where
eqF :: (Eq a) => f a -> f a -> Bool
instance EquableFunctor Maybe where
eqF (Just x) (Just y) = x == y
eqF Nothing Nothing = True
eqF _ _ = False
instance EquableFunctor Identity where
eqF (Identity x) (Identity y) = x == y
instance (EquableFunctor f) => Eq (VarId f) where
(VarId name1 _type1) == (VarId name2 _type2) =
name1 == name2 -- && eqF type1 type2
instance (EquableFunctor f) => Ord (VarId f) where
(VarId name1 _type1) <= (VarId name2 _type2) = name1 <= name2
-- SHOWS
class ShowableFunctor f where
showF :: (Show a) => f a -> String
instance ShowableFunctor Maybe where
showF (Just x) = show x
showF Nothing = ""
instance ShowableFunctor Identity where
showF (Identity x) = show x
instance (ShowableFunctor f) => Show (VarId f) where
show (VarId vName vType) =
let sType = showF vType in
if null sType then vName else "(" ++ vName ++ " : " ++ sType ++ ")"
instance (ShowableFunctor f) => Show (Binding f) where
show (ProgBinding prog) = show prog
show (ConstBinding var c) = show var ++ " = " ++ show c
instance (ShowableFunctor f) => Show (Prog f) where
show (Prog progName args expr) =
"fn " ++ show progName ++ " " ++
unwords (show <$> args) ++ " = " ++ show expr
instance (ShowableFunctor f) => Show (Expr f) where
show (Var n) = show n
show (Lit l) = show l
show (App v1@(Var _) v2@(Var _)) = show v1 ++ " " ++ show v2
show (App v1@(Var _) l2@(Lit _)) = show v1 ++ " " ++ show l2
show (App v1@(Var _) e2) = show v1 ++ " (" ++ show e2 ++ ")"
show (App e1 v2@(Var _)) = "(" ++ show e1 ++ ") " ++ show v2
show (App e1 l2@(Lit _)) = "(" ++ show e1 ++ ") " ++ show l2
show (App e1 e2) = "(" ++ show e1 ++ ")(" ++ show e2 ++ ")"
show (If cond e1 e2) = "if " ++ show cond ++ " then " ++ show e1 ++
" else " ++ show e2
show (Cons structName exprs) =
structName ++ " { " ++ intercalate ", " (show <$> exprs) ++ " }"
show (FieldGet field expr) = "(" ++ show expr ++ ")." ++ field
show (TupleCons exprs) = "(" ++ intercalate ", " (show <$> exprs) ++ ")"
show (TupleProj i e) = "π" ++ show i ++ " " ++ show e
show (Let n e1 e2) = "let " ++ show n ++ " = " ++ show e1 ++ " in " ++ show e2
show (LetFun prog e2) =
"let " ++ show prog ++ " in " ++ show e2
show (BinOp op e1 e2) = show e1 ++ " " ++ show op ++ " " ++ show e2
show (UnOp op e) = show op ++ " " ++ show e
instance Show Lit where
show lit = case lit of
LFloat f -> show f
LBool b -> show b
LInt i -> show i
| null | https://raw.githubusercontent.com/swtwsk/vinci-lang/9c7e01953e0b1cf135af7188e0c71fe6195bdfa1/src/Core/AST.hs | haskell | && eqF type1 type2
SHOWS | # LANGUAGE PatternSynonyms #
# LANGUAGE FlexibleInstances #
module Core.AST where
import Control.Monad.Identity (Identity(Identity))
import Data.List (intercalate)
import Core.Ops (BinOp(..), UnOp(..))
import Core.Types
type VarName = String
data VarId f = VarId { _varName :: VarName
, _varType :: f Type }
pattern Var' :: VarName -> Type -> VarId Identity
pattern Var' { varN, varT } = VarId { _varName = varN, _varType = Identity varT }
data Binding f = ProgBinding (Prog f)
| ConstBinding (VarId f) (Expr f)
deriving (Eq, Ord)
data Prog f = Prog (VarId f) [VarId f] (Expr f)
deriving (Eq, Ord)
data Expr f = Var (VarId f)
| Lit Lit
| App (Expr f) (Expr f)
| If (Expr f) (Expr f) (Expr f)
| Cons String [Expr f]
| FieldGet String (Expr f)
| TupleCons [Expr f]
| TupleProj Int (Expr f)
| Let (VarId f) (Expr f) (Expr f)
| LetFun (Prog f) (Expr f)
| BinOp BinOp (Expr f) (Expr f)
| UnOp UnOp (Expr f)
deriving (Eq, Ord)
data Lit = LFloat Double
| LBool Bool
| LInt Int
deriving (Eq, Ord)
varId :: VarName -> Type -> VarId Identity
varId n t = VarId n (Identity t)
progId :: Prog f -> VarName
progId (Prog fId _ _) = _varName fId
resType :: Int -> Type -> Maybe Type
resType argCount t@(TFun _ t2)
| argCount == 0 = pure t
| argCount > 0 = resType (argCount - 1) t2
| otherwise = Nothing
resType argCount t
| argCount == 0 = pure t
| otherwise = Nothing
VARID EQ
class EquableFunctor f where
eqF :: (Eq a) => f a -> f a -> Bool
instance EquableFunctor Maybe where
eqF (Just x) (Just y) = x == y
eqF Nothing Nothing = True
eqF _ _ = False
instance EquableFunctor Identity where
eqF (Identity x) (Identity y) = x == y
instance (EquableFunctor f) => Eq (VarId f) where
(VarId name1 _type1) == (VarId name2 _type2) =
instance (EquableFunctor f) => Ord (VarId f) where
(VarId name1 _type1) <= (VarId name2 _type2) = name1 <= name2
class ShowableFunctor f where
showF :: (Show a) => f a -> String
instance ShowableFunctor Maybe where
showF (Just x) = show x
showF Nothing = ""
instance ShowableFunctor Identity where
showF (Identity x) = show x
instance (ShowableFunctor f) => Show (VarId f) where
show (VarId vName vType) =
let sType = showF vType in
if null sType then vName else "(" ++ vName ++ " : " ++ sType ++ ")"
instance (ShowableFunctor f) => Show (Binding f) where
show (ProgBinding prog) = show prog
show (ConstBinding var c) = show var ++ " = " ++ show c
instance (ShowableFunctor f) => Show (Prog f) where
show (Prog progName args expr) =
"fn " ++ show progName ++ " " ++
unwords (show <$> args) ++ " = " ++ show expr
instance (ShowableFunctor f) => Show (Expr f) where
show (Var n) = show n
show (Lit l) = show l
show (App v1@(Var _) v2@(Var _)) = show v1 ++ " " ++ show v2
show (App v1@(Var _) l2@(Lit _)) = show v1 ++ " " ++ show l2
show (App v1@(Var _) e2) = show v1 ++ " (" ++ show e2 ++ ")"
show (App e1 v2@(Var _)) = "(" ++ show e1 ++ ") " ++ show v2
show (App e1 l2@(Lit _)) = "(" ++ show e1 ++ ") " ++ show l2
show (App e1 e2) = "(" ++ show e1 ++ ")(" ++ show e2 ++ ")"
show (If cond e1 e2) = "if " ++ show cond ++ " then " ++ show e1 ++
" else " ++ show e2
show (Cons structName exprs) =
structName ++ " { " ++ intercalate ", " (show <$> exprs) ++ " }"
show (FieldGet field expr) = "(" ++ show expr ++ ")." ++ field
show (TupleCons exprs) = "(" ++ intercalate ", " (show <$> exprs) ++ ")"
show (TupleProj i e) = "π" ++ show i ++ " " ++ show e
show (Let n e1 e2) = "let " ++ show n ++ " = " ++ show e1 ++ " in " ++ show e2
show (LetFun prog e2) =
"let " ++ show prog ++ " in " ++ show e2
show (BinOp op e1 e2) = show e1 ++ " " ++ show op ++ " " ++ show e2
show (UnOp op e) = show op ++ " " ++ show e
instance Show Lit where
show lit = case lit of
LFloat f -> show f
LBool b -> show b
LInt i -> show i
|
b01c5fc44554073e481966a0c0ddac0591d72256fe891652152a481f858db6a2 | donut-party/system | multiple_http_servers.clj | (ns donut.examples.multiple-http-servers
(:require
[donut.system :as ds]
[ring.adapter.jetty :as rj]))
(def HTTPServer
#::ds{:start (fn [{:keys [::ds/config]}]
(let [{:keys [handler options]} config]
(rj/run-jetty handler options)))
:stop (fn [{:keys [::ds/instance]}]
(.stop instance))
:config {:handler (ds/local-ref [:handler])
:options {:port (ds/local-ref [:port])
:join? false}}})
(def system
{::ds/defs
{:http-1 {:server HTTPServer
:handler (fn [_req]
{:status 200
:headers {"ContentType" "text/html"}
:body "http server 1"})
:port 8080}
:http-2 {:server HTTPServer
:handler (fn [_req]
{:status 200
:headers {"ContentType" "text/html"}
:body "http server 2"})
:port 9090}}})
| null | https://raw.githubusercontent.com/donut-party/system/ce1ca6b7f8342e478d0a436014cbb4fd140908fe/dev/donut/examples/multiple_http_servers.clj | clojure | (ns donut.examples.multiple-http-servers
(:require
[donut.system :as ds]
[ring.adapter.jetty :as rj]))
(def HTTPServer
#::ds{:start (fn [{:keys [::ds/config]}]
(let [{:keys [handler options]} config]
(rj/run-jetty handler options)))
:stop (fn [{:keys [::ds/instance]}]
(.stop instance))
:config {:handler (ds/local-ref [:handler])
:options {:port (ds/local-ref [:port])
:join? false}}})
(def system
{::ds/defs
{:http-1 {:server HTTPServer
:handler (fn [_req]
{:status 200
:headers {"ContentType" "text/html"}
:body "http server 1"})
:port 8080}
:http-2 {:server HTTPServer
:handler (fn [_req]
{:status 200
:headers {"ContentType" "text/html"}
:body "http server 2"})
:port 9090}}})
| |
88dbd0db429c8e74a5e54654269e11d4dede022120ddbe7e0aef69dc2cc4f977 | logicmoo/wam_common_lisp | translate.lsp | -*- Mode : Lisp ; Package : XLIB ; Syntax : COMMON - LISP ; ; Lowercase : YES -*-
( c ) Copyright Enhancements by , 1994 .
;;;
TEXAS INSTRUMENTS INCORPORATED
;;; P.O. BOX 2909
AUSTIN , TEXAS 78769
;;;
Copyright ( C ) 1987 Texas Instruments Incorporated .
;;;
;;; Permission is granted to any individual or institution to use, copy, modify,
;;; and distribute this software, provided that this complete copyright and
;;; permission notice is maintained, intact, in all copies and supporting
;;; documentation.
;;;
Texas Instruments Incorporated provides this software " as is " without
;;; express or implied warranty.
;;;
(in-package :xlib)
Alist of ( name first - keysym last - keysym )
(defun define-keysym-set (set first-keysym last-keysym)
;; Define all keysyms from first-keysym up to and including
;; last-keysym to be in SET (returned from the keysym-set function).
;; Signals an error if the keysym range overlaps an existing set.
(declare (type keyword set)
(type keysym first-keysym last-keysym))
(when (> first-keysym last-keysym)
(rotatef first-keysym last-keysym))
(setq *keysym-sets* (delete set *keysym-sets* :key #'car))
(dolist (set *keysym-sets*)
(let ((first (second set))
(last (third set)))
(when (or (<= first first-keysym last)
(<= first last-keysym last))
(error "Keysym range overlaps existing set ~s" set))))
(push (list set first-keysym last-keysym) *keysym-sets*)
set)
(defun keysym-set (keysym)
;; Return the character code set name of keysym
(declare (type keysym keysym)
(values keyword))
(dolist (set *keysym-sets*)
(let ((first (second set))
(last (third set)))
(when (<= first keysym last)
(return (first set))))))
Required for ...
(defmacro keysym (keysym &rest bytes)
;; Build a keysym.
;; If KEYSYM is an integer, it is used as the most significant bits of
;; the keysym, and BYTES are used to specify low order bytes. The last
parameter is always byte4 of the keysym . If KEYSYM is not an
;; integer, the keysym associated with KEYSYM is returned.
;;
;; This is a macro and not a function macro to promote compile-time
;; lookup. All arguments are evaluated.
(declare (type t keysym)
(type list bytes)
(values keysym))
(typecase keysym
((integer 0 *)
(dolist (b bytes keysym) (setq keysym (+ (ash keysym 8) b))))
(otherwise
(or (car (character->keysyms keysym))
(error "~s Isn't the name of a keysym" keysym)))))
)
(defvar *keysym->character-map*
(make-hash-table :test (keysym->character-map-test) :size 400))
;; Keysym-mappings are a list of the form (object translate lowercase modifiers mask)
;; With the following accessor macros. Everything after OBJECT is optional.
(defmacro keysym-mapping-object (keysym-mapping)
;; Parameter to translate
`(first ,keysym-mapping))
(defmacro keysym-mapping-translate (keysym-mapping)
;; Function to be called with parameters (display state OBJECT)
;; when translating KEYSYM and modifiers and mask are satisfied.
`(second ,keysym-mapping))
(defmacro keysym-mapping-lowercase (keysym-mapping)
LOWERCASE is used for uppercase alphabetic The value
;; is the associated lowercase keysym.
`(third ,keysym-mapping))
(defmacro keysym-mapping-modifiers (keysym-mapping)
;; MODIFIERS is either a modifier-mask or list containing intermixed
;; keysyms and state-mask-keys specifying when to use this
;; keysym-translation.
`(fourth ,keysym-mapping))
(defmacro keysym-mapping-mask (keysym-mapping)
;; MASK is either a modifier-mask or list containing intermixed
;; keysyms and state-mask-keys specifying which modifiers to look at
;; (i.e. modifiers not specified are don't-cares)
`(fifth ,keysym-mapping))
(defvar *default-keysym-translate-mask*
(the (or (member :modifiers) mask16 list) ; (list (or keysym state-mask-key))
(logand #xff (lognot (make-state-mask :lock))))
"Default keysym state mask to use during keysym-translation.")
(defun define-keysym (object keysym &key lowercase translate modifiers mask display)
;; Define the translation from keysym/modifiers to a (usually
character ) object . ANy previous keysym definition with
;; KEYSYM and MODIFIERS is deleted before adding the new definition.
;;
;; MODIFIERS is either a modifier-mask or list containing intermixed
;; keysyms and state-mask-keys specifying when to use this
keysym - translation . The default is NIL .
;;
;; MASK is either a modifier-mask or list containing intermixed
;; keysyms and state-mask-keys specifying which modifiers to look at
;; (i.e. modifiers not specified are don't-cares).
;; If mask is :MODIFIERS then the mask is the same as the modifiers
;; (i.e. modifiers not specified by modifiers are don't cares)
;; The default mask is *default-keysym-translate-mask*
;;
;; If DISPLAY is specified, the translation will be local to DISPLAY,
;; otherwise it will be the default translation for all displays.
;;
LOWERCASE is used for uppercase alphabetic The value
;; is the associated lowercase keysym. This information is used
;; by the keysym-both-case-p predicate (for caps-lock computations)
;; and by the keysym-downcase function.
;;
;; TRANSLATE will be called with parameters (display state OBJECT)
;; when translating KEYSYM and modifiers and mask are satisfied.
[ e.g ( zerop ( ( logand state ( or mask * default - keysym - translate - mask * ) )
;; (or modifiers 0)))
;; when mask and modifiers aren't lists of keysyms]
;; The default is #'default-keysym-translate
;;
(declare (type (or base-char t) object)
(type keysym keysym)
(type (or null mask16 list) ;; (list (or keysym state-mask-key))
modifiers)
(type (or null (member :modifiers) mask16 list) ;; (list (or keysym state-mask-key))
mask)
(type (or null display) display)
(type (or null keysym) lowercase)
(type (function (display card16 t) t) translate))
(flet ((merge-keysym-mappings (new old)
;; Merge new keysym-mapping with list of old mappings.
Ensure that the mapping with no modifiers or mask comes first .
(let* ((key (keysym-mapping-modifiers new))
(merge (delete key old :key #'cadddr :test #'equal)))
(if key
(nconc merge (list new))
(cons new merge))))
(mask-check (mask)
(unless (or (numberp mask)
(dolist (element mask t)
(unless (or (find element *state-mask-vector*)
(gethash element *keysym->character-map*))
(return nil))))
(x-type-error mask '(or mask16 (list (or modifier-key modifier-keysym)))))))
(let ((entry
;; Create with a single LIST call, to ensure cdr-coding
(cond
(mask
(unless (eq mask :modifiers)
(mask-check mask))
(when (or (null modifiers) (and (numberp modifiers) (zerop modifiers)))
(error "Mask with no modifiers"))
(list object translate lowercase modifiers mask))
(modifiers (mask-check modifiers)
(list object translate lowercase modifiers))
(lowercase (list object translate lowercase))
(translate (list object translate))
(t (list object)))))
(if display
(let ((previous (assoc keysym (display-keysym-translation display))))
(if previous
(setf (cdr previous) (merge-keysym-mappings entry (cdr previous)))
(push (list keysym entry) (display-keysym-translation display))))
(setf (gethash keysym *keysym->character-map*)
(merge-keysym-mappings entry (gethash keysym *keysym->character-map*)))))
object))
(defun undefine-keysym (object keysym &key display modifiers &allow-other-keys)
Undefine the keysym - translation translating KEYSYM to OBJECT with MODIFIERS .
;; If DISPLAY is non-nil, undefine the translation for DISPLAY if it exists.
(declare (type (or base-char t) object)
(type keysym keysym)
(type (or null mask16 list) ;; (list (or keysym state-mask-key))
modifiers)
(type (or null display) display))
(flet ((match (key entry)
(let ((object (car key))
(modifiers (cdr key)))
(or (eql object (keysym-mapping-object entry))
(equal modifiers (keysym-mapping-modifiers entry))))))
(let* (entry
(previous (if display
(cdr (setq entry (assoc keysym (display-keysym-translation display))))
(gethash keysym *keysym->character-map*)))
(key (cons object modifiers)))
(when (and previous (find key previous :test #'match))
(setq previous (delete key previous :test #'match))
(if display
(setf (cdr entry) previous)
(setf (gethash keysym *keysym->character-map*) previous))))))
(defun keysym-downcase (keysym)
;; If keysym has a lower-case equivalent, return it, otherwise return keysym.
(declare (type keysym keysym))
(declare (values keysym))
(let ((translations (gethash keysym *keysym->character-map*)))
(or (and translations (keysym-mapping-lowercase (first translations))) keysym)))
(defun keysym-uppercase-alphabetic-p (keysym)
;; Returns T if keysym is uppercase-alphabetic.
I.E. If it has a lowercase equivalent .
(declare (type keysym keysym))
(declare (values (or null keysym)))
(let ((translations (gethash keysym *keysym->character-map*)))
(and translations
(keysym-mapping-lowercase (first translations)))))
(defun character->keysyms (character &optional display)
;; Given a character, return a list of all matching keysyms.
;; If DISPLAY is given, translations specific to DISPLAY are used,
;; otherwise only global translations are used.
;; Implementation dependent function.
May be slow [ i.e. do a linear search over all known keysyms ]
(declare (type t character)
(type (or null display) display)
(values (list keysym)))
(let ((result nil))
(when display
(dolist (mapping (display-keysym-translation display))
(when (eql character (second mapping))
(push (first mapping) result))))
(maphash #'(lambda (keysym mappings)
(dolist (mapping mappings)
(when (eql (keysym-mapping-object mapping) character)
(pushnew keysym result))))
*keysym->character-map*)
result))
(eval-when (compile eval load) ;; Required for Symbolics...
(defconstant character-set-switch-keysym (keysym 255 126))
(defconstant left-shift-keysym (keysym 255 225))
(defconstant right-shift-keysym (keysym 255 226))
(defconstant left-control-keysym (keysym 255 227))
(defconstant right-control-keysym (keysym 255 228))
(defconstant caps-lock-keysym (keysym 255 229))
(defconstant shift-lock-keysym (keysym 255 230))
(defconstant left-meta-keysym (keysym 255 231))
(defconstant right-meta-keysym (keysym 255 232))
(defconstant left-alt-keysym (keysym 255 233))
(defconstant right-alt-keysym (keysym 255 234))
(defconstant left-super-keysym (keysym 255 235))
(defconstant right-super-keysym (keysym 255 236))
(defconstant left-hyper-keysym (keysym 255 237))
(defconstant right-hyper-keysym (keysym 255 238))
) ;; end eval-when
;;-----------------------------------------------------------------------------
Keysym mapping functions
(defun display-keyboard-mapping (display)
(declare (type display display))
(declare (values (simple-array keysym (display-max-keycode keysyms-per-keycode))))
(or (display-keysym-mapping display)
(setf (display-keysym-mapping display) (keyboard-mapping display))))
(defun keycode->keysym (display keycode keysym-index)
(declare (type display display)
(type card8 keycode)
(type card8 keysym-index)
(values keysym))
(let* ((mapping (display-keyboard-mapping display))
(keysym (aref mapping keycode keysym-index)))
(declare (type (simple-array keysym (* *)) mapping)
(type keysym keysym))
;; The keysym-mapping is brain dammaged.
;; Mappings for both-case alphabetic characters have the
entry for keysym - index zero set to the uppercase keysym
;; (this is normally where the lowercase keysym goes), and the
entry for keysym - index one is zero .
(cond ((zerop keysym-index) ; Lowercase alphabetic keysyms
(keysym-downcase keysym))
((and (zerop keysym) (plusp keysym-index)) ; Get the uppercase keysym
(aref mapping keycode 0))
(t keysym))))
(defun keysym->character (display keysym &optional (state 0))
;; Find the character associated with a keysym.
;; STATE can be used to set character attributes.
;; Implementation dependent function.
(declare (type display display)
(type keysym keysym)
(type card16 state))
(declare (values (or null character)))
(let* ((display-mappings (cdr (assoc keysym (display-keysym-translation display))))
(mapping (or ;; Find the matching display mapping
(dolist (mapping display-mappings)
(when (mapping-matches-p display state mapping)
(return mapping)))
;; Find the matching static mapping
(dolist (mapping (gethash keysym *keysym->character-map*))
(when (mapping-matches-p display state mapping)
(return mapping))))))
(when mapping
(funcall (or (keysym-mapping-translate mapping) 'default-keysym-translate)
display state (keysym-mapping-object mapping)))))
(defun mapping-matches-p (display state mapping)
;; Returns T when the modifiers and mask in MAPPING satisfies STATE for DISPLAY
(declare (type display display)
(type mask16 state)
(type list mapping))
(declare (values boolean))
(flet
((modifiers->mask (display-mapping modifiers errorp &aux (mask 0))
;; Convert MODIFIERS, which is a modifier mask, or a list of state-mask-keys into a mask.
If ERRORP is non - nil , return NIL when an unknown modifier is specified ,
;; otherwise ignore unknown modifiers.
Alist of ( keysym . mask )
(type (or mask16 list) modifiers)
(type mask16 mask))
(declare (values (or null mask16)))
(if (numberp modifiers)
modifiers
(dolist (modifier modifiers mask)
(declare (type symbol modifier))
(let ((bit (position modifier (the simple-vector *state-mask-vector*) :test #'eq)))
(setq mask
(logior mask
(if bit
(ash 1 bit)
(or (cdr (assoc modifier display-mapping))
;; bad modifier
(if errorp
(return-from modifiers->mask nil)
0))))))))))
(let* ((display-mapping (get-display-modifier-mapping display))
(mapping-modifiers (keysym-mapping-modifiers mapping))
(modifiers (or (modifiers->mask display-mapping (or mapping-modifiers 0) t)
(return-from mapping-matches-p nil)))
(mapping-mask (or (keysym-mapping-mask mapping) ; If no mask, use the default.
(if mapping-modifiers ; If no modifiers, match anything.
*default-keysym-translate-mask*
0)))
(mask (if (eq mapping-mask :modifiers)
modifiers
(modifiers->mask display-mapping mapping-mask nil))))
(declare (type mask16 modifiers mask))
(= #-ecl (logand state mask)
#+ecl (the mask16 (logand state mask)) modifiers))))
(defun default-keysym-index (display keycode state)
;; Returns a keysym-index for use with keycode->character
(declare (values card8))
(macrolet ((keystate-p (state keyword)
`(the boolean
(logbitp ,(position keyword *state-mask-vector*)
,state))))
(let* ((mapping (display-keyboard-mapping display))
(keysyms-per-keycode (array-dimension mapping 1))
(symbolp (and (> keysyms-per-keycode 2)
(state-keysymp display state character-set-switch-keysym)))
(result (if symbolp 2 0)))
(declare (type (simple-array keysym (* *)) mapping)
(type boolean symbolp)
(type card8 keysyms-per-keycode result))
(when (and (< result keysyms-per-keycode)
(keysym-shift-p display state (keysym-uppercase-alphabetic-p
(aref mapping keycode 0))))
(incf result))
result)))
(defun keysym-shift-p (display state uppercase-alphabetic-p &key
shift-lock-xors
(control-modifiers
'#.(list left-meta-keysym left-super-keysym left-hyper-keysym)))
(declare (type display display)
(type card16 state)
(type boolean uppercase-alphabetic-p)
(type boolean shift-lock-xors));;; If T, both SHIFT-LOCK and SHIFT is the same
;;; as neither if the character is alphabetic.
(declare (values boolean))
(macrolet ((keystate-p (state keyword)
`(the boolean
(logbitp ,(position keyword *state-mask-vector*)
,state))))
(let* ((controlp (or (keystate-p state :control)
(dolist (modifier control-modifiers)
(when (state-keysymp display state modifier)
(return t)))))
(shiftp (keystate-p state :shift))
(lockp (keystate-p state :lock))
(alphap (or uppercase-alphabetic-p
(not (state-keysymp display #.(make-state-mask :lock)
caps-lock-keysym)))))
(declare (type boolean controlp shiftp lockp alphap))
;; Control keys aren't affected by lock
(unless controlp
;; Not a control character - check state of lock modifier
(when (and lockp
alphap
(or (not shiftp) shift-lock-xors)) ; Lock doesn't unshift unless shift-lock-xors
(setq shiftp (not shiftp))))
shiftp)))
;;; default-keysym-index implements the following tables:
;;;
;;; control shift caps-lock character character
0 0 0 # \a #
0 0 1 # \A #
0 1 0 # \A # \ *
0 1 1 # \A # \ *
;;; 1 0 0 #\control-A #\control-8
;;; 1 0 1 #\control-A #\control-8
;;; 1 1 0 #\control-shift-a #\control-*
;;; 1 1 1 #\control-shift-a #\control-*
;;;
;;; control shift shift-lock character character
0 0 0 # \a #
0 0 1 # \A # \ *
0 1 0 # \A # \ *
0 1 1 # \A #
;;; 1 0 0 #\control-A #\control-8
;;; 1 0 1 #\control-A #\control-*
;;; 1 1 0 #\control-shift-a #\control-*
;;; 1 1 1 #\control-shift-a #\control-8
(defun keycode->character (display keycode state &key keysym-index
(keysym-index-function #'default-keysym-index))
;; keysym-index defaults to the result of keysym-index-function which
;; is called with the following parameters:
( char0 state caps - lock - )
where char0 is the " character " object associated with keysym - index 0 and
;; caps-lock-p is non-nil when the keysym associated with the lock
;; modifier is for caps-lock.
;; STATE can also used for setting character attributes.
;; Implementation dependent function.
(declare (type display display)
(type card8 keycode)
(type card16 state)
(type (or null card8) keysym-index)
(type (or null (function (base-char card16 boolean card8) card8))
keysym-index-function))
(declare (values (or null character)))
(let* ((index (or keysym-index
(funcall keysym-index-function display keycode state)))
(keysym (if index (keycode->keysym display keycode index) 0)))
(declare (type (or null card8) index)
(type keysym keysym))
(when (plusp keysym)
(keysym->character display keysym state))))
(defun get-display-modifier-mapping (display)
(labels ((keysym-replace (display modifiers mask &aux result)
(dolist (modifier modifiers result)
(push (cons (keycode->keysym display modifier 0) mask) result))))
(or (display-modifier-mapping display)
(multiple-value-bind (shift lock control mod1 mod2 mod3 mod4 mod5)
(modifier-mapping display)
(setf (display-modifier-mapping display)
(nconc (keysym-replace display shift #.(make-state-mask :shift))
(keysym-replace display lock #.(make-state-mask :lock))
(keysym-replace display control #.(make-state-mask :control))
(keysym-replace display mod1 #.(make-state-mask :mod-1))
(keysym-replace display mod2 #.(make-state-mask :mod-2))
(keysym-replace display mod3 #.(make-state-mask :mod-3))
(keysym-replace display mod4 #.(make-state-mask :mod-4))
(keysym-replace display mod5 #.(make-state-mask :mod-5))))))))
(defun state-keysymp (display state keysym)
;; Returns T when a modifier key associated with KEYSYM is on in STATE
(declare (type display display)
(type card16 state)
(type keysym keysym))
(declare (values boolean))
(let* ((mapping (get-display-modifier-mapping display))
(mask (assoc keysym mapping)))
(and mask (plusp #-ecl (logand state (cdr mask))
#+ecl (the card16 (logand state (cdr mask)))))))
(defun mapping-notify (display request start count)
;; Called on a mapping-notify event to update
;; the keyboard-mapping cache in DISPLAY
(declare (type display display)
(type (member :modifier :keyboard :pointer) request)
(type card8 start count)
(ignore count start))
Invalidate the keyboard mapping to force the next key translation to get it
(case request
(:modifier
(setf (display-modifier-mapping display) nil))
(:keyboard
(setf (display-keysym-mapping display) nil))))
(defun keysym-in-map-p (display keysym keymap)
;; Returns T if keysym is found in keymap
(declare (type display display)
(type keysym keysym)
(type (bit-vector 256) keymap))
(declare (values boolean))
;; The keysym may appear in the keymap more than once,
;; So we have to search the entire keysym map.
(do* ((min (display-min-keycode display))
(max (display-max-keycode display))
(map (display-keyboard-mapping display))
(jmax (min 2 (array-dimension map 1)))
(i min (1+ i)))
((> i max))
(declare (type card8 min max jmax)
(type (simple-array keysym (* *)) map))
(when (and (plusp (aref keymap i))
(dotimes (j jmax)
(when (= keysym (aref map i j)) (return t))))
(return t))))
(defun character-in-map-p (display character keymap)
;; Implementation dependent function.
;; Returns T if character is found in keymap
(declare (type display display)
(type character character)
(type (bit-vector 256) keymap))
(declare (values boolean))
Check all one bits in keymap
(do* ((min (display-min-keycode display))
(max (display-max-keycode display))
(jmax (array-dimension (display-keyboard-mapping display) 1))
(i min (1+ i)))
((> i max))
(declare (type card8 min max jmax))
(when (and (plusp (aref keymap i))
;; Match when character is in mapping for this keycode
(dotimes (j jmax)
(when (eql character (keycode->character display i 0 :keysym-index j))
(return t))))
(return t))))
(defun keysym->keycodes (display keysym)
;; Return keycodes for keysym, as multiple values
(declare (type display display)
(type keysym keysym))
(declare (values (or null keycode) (or null keycode) (or null keycode)))
;; The keysym may appear in the keymap more than once,
;; So we have to search the entire keysym map.
(do* ((min (display-min-keycode display))
(max (display-max-keycode display))
(map (display-keyboard-mapping display))
(jmax (min 2 (array-dimension map 1)))
(i min (1+ i))
(result nil))
((> i max) (values-list result))
(declare (type card8 min max jmax)
(type (simple-array keysym (* *)) map))
(dotimes (j jmax)
(when (= keysym (aref map i j))
(push i result)))))
| null | https://raw.githubusercontent.com/logicmoo/wam_common_lisp/4396d9e26b050f68182d65c9a2d5a939557616dd/prolog/wam_cl/src/clx/translate.lsp | lisp | Package : XLIB ; Syntax : COMMON - LISP ; ; Lowercase : YES -*-
P.O. BOX 2909
Permission is granted to any individual or institution to use, copy, modify,
and distribute this software, provided that this complete copyright and
permission notice is maintained, intact, in all copies and supporting
documentation.
express or implied warranty.
Define all keysyms from first-keysym up to and including
last-keysym to be in SET (returned from the keysym-set function).
Signals an error if the keysym range overlaps an existing set.
Return the character code set name of keysym
Build a keysym.
If KEYSYM is an integer, it is used as the most significant bits of
the keysym, and BYTES are used to specify low order bytes. The last
integer, the keysym associated with KEYSYM is returned.
This is a macro and not a function macro to promote compile-time
lookup. All arguments are evaluated.
Keysym-mappings are a list of the form (object translate lowercase modifiers mask)
With the following accessor macros. Everything after OBJECT is optional.
Parameter to translate
Function to be called with parameters (display state OBJECT)
when translating KEYSYM and modifiers and mask are satisfied.
is the associated lowercase keysym.
MODIFIERS is either a modifier-mask or list containing intermixed
keysyms and state-mask-keys specifying when to use this
keysym-translation.
MASK is either a modifier-mask or list containing intermixed
keysyms and state-mask-keys specifying which modifiers to look at
(i.e. modifiers not specified are don't-cares)
(list (or keysym state-mask-key))
Define the translation from keysym/modifiers to a (usually
KEYSYM and MODIFIERS is deleted before adding the new definition.
MODIFIERS is either a modifier-mask or list containing intermixed
keysyms and state-mask-keys specifying when to use this
MASK is either a modifier-mask or list containing intermixed
keysyms and state-mask-keys specifying which modifiers to look at
(i.e. modifiers not specified are don't-cares).
If mask is :MODIFIERS then the mask is the same as the modifiers
(i.e. modifiers not specified by modifiers are don't cares)
The default mask is *default-keysym-translate-mask*
If DISPLAY is specified, the translation will be local to DISPLAY,
otherwise it will be the default translation for all displays.
is the associated lowercase keysym. This information is used
by the keysym-both-case-p predicate (for caps-lock computations)
and by the keysym-downcase function.
TRANSLATE will be called with parameters (display state OBJECT)
when translating KEYSYM and modifiers and mask are satisfied.
(or modifiers 0)))
when mask and modifiers aren't lists of keysyms]
The default is #'default-keysym-translate
(list (or keysym state-mask-key))
(list (or keysym state-mask-key))
Merge new keysym-mapping with list of old mappings.
Create with a single LIST call, to ensure cdr-coding
If DISPLAY is non-nil, undefine the translation for DISPLAY if it exists.
(list (or keysym state-mask-key))
If keysym has a lower-case equivalent, return it, otherwise return keysym.
Returns T if keysym is uppercase-alphabetic.
Given a character, return a list of all matching keysyms.
If DISPLAY is given, translations specific to DISPLAY are used,
otherwise only global translations are used.
Implementation dependent function.
Required for Symbolics...
end eval-when
-----------------------------------------------------------------------------
The keysym-mapping is brain dammaged.
Mappings for both-case alphabetic characters have the
(this is normally where the lowercase keysym goes), and the
Lowercase alphabetic keysyms
Get the uppercase keysym
Find the character associated with a keysym.
STATE can be used to set character attributes.
Implementation dependent function.
Find the matching display mapping
Find the matching static mapping
Returns T when the modifiers and mask in MAPPING satisfies STATE for DISPLAY
Convert MODIFIERS, which is a modifier mask, or a list of state-mask-keys into a mask.
otherwise ignore unknown modifiers.
bad modifier
If no mask, use the default.
If no modifiers, match anything.
Returns a keysym-index for use with keycode->character
If T, both SHIFT-LOCK and SHIFT is the same
as neither if the character is alphabetic.
Control keys aren't affected by lock
Not a control character - check state of lock modifier
Lock doesn't unshift unless shift-lock-xors
default-keysym-index implements the following tables:
control shift caps-lock character character
1 0 0 #\control-A #\control-8
1 0 1 #\control-A #\control-8
1 1 0 #\control-shift-a #\control-*
1 1 1 #\control-shift-a #\control-*
control shift shift-lock character character
1 0 0 #\control-A #\control-8
1 0 1 #\control-A #\control-*
1 1 0 #\control-shift-a #\control-*
1 1 1 #\control-shift-a #\control-8
keysym-index defaults to the result of keysym-index-function which
is called with the following parameters:
caps-lock-p is non-nil when the keysym associated with the lock
modifier is for caps-lock.
STATE can also used for setting character attributes.
Implementation dependent function.
Returns T when a modifier key associated with KEYSYM is on in STATE
Called on a mapping-notify event to update
the keyboard-mapping cache in DISPLAY
Returns T if keysym is found in keymap
The keysym may appear in the keymap more than once,
So we have to search the entire keysym map.
Implementation dependent function.
Returns T if character is found in keymap
Match when character is in mapping for this keycode
Return keycodes for keysym, as multiple values
The keysym may appear in the keymap more than once,
So we have to search the entire keysym map. |
( c ) Copyright Enhancements by , 1994 .
TEXAS INSTRUMENTS INCORPORATED
AUSTIN , TEXAS 78769
Copyright ( C ) 1987 Texas Instruments Incorporated .
Texas Instruments Incorporated provides this software " as is " without
(in-package :xlib)
Alist of ( name first - keysym last - keysym )
(defun define-keysym-set (set first-keysym last-keysym)
(declare (type keyword set)
(type keysym first-keysym last-keysym))
(when (> first-keysym last-keysym)
(rotatef first-keysym last-keysym))
(setq *keysym-sets* (delete set *keysym-sets* :key #'car))
(dolist (set *keysym-sets*)
(let ((first (second set))
(last (third set)))
(when (or (<= first first-keysym last)
(<= first last-keysym last))
(error "Keysym range overlaps existing set ~s" set))))
(push (list set first-keysym last-keysym) *keysym-sets*)
set)
(defun keysym-set (keysym)
(declare (type keysym keysym)
(values keyword))
(dolist (set *keysym-sets*)
(let ((first (second set))
(last (third set)))
(when (<= first keysym last)
(return (first set))))))
Required for ...
(defmacro keysym (keysym &rest bytes)
parameter is always byte4 of the keysym . If KEYSYM is not an
(declare (type t keysym)
(type list bytes)
(values keysym))
(typecase keysym
((integer 0 *)
(dolist (b bytes keysym) (setq keysym (+ (ash keysym 8) b))))
(otherwise
(or (car (character->keysyms keysym))
(error "~s Isn't the name of a keysym" keysym)))))
)
(defvar *keysym->character-map*
(make-hash-table :test (keysym->character-map-test) :size 400))
(defmacro keysym-mapping-object (keysym-mapping)
`(first ,keysym-mapping))
(defmacro keysym-mapping-translate (keysym-mapping)
`(second ,keysym-mapping))
(defmacro keysym-mapping-lowercase (keysym-mapping)
LOWERCASE is used for uppercase alphabetic The value
`(third ,keysym-mapping))
(defmacro keysym-mapping-modifiers (keysym-mapping)
`(fourth ,keysym-mapping))
(defmacro keysym-mapping-mask (keysym-mapping)
`(fifth ,keysym-mapping))
(defvar *default-keysym-translate-mask*
(logand #xff (lognot (make-state-mask :lock))))
"Default keysym state mask to use during keysym-translation.")
(defun define-keysym (object keysym &key lowercase translate modifiers mask display)
character ) object . ANy previous keysym definition with
keysym - translation . The default is NIL .
LOWERCASE is used for uppercase alphabetic The value
[ e.g ( zerop ( ( logand state ( or mask * default - keysym - translate - mask * ) )
(declare (type (or base-char t) object)
(type keysym keysym)
modifiers)
mask)
(type (or null display) display)
(type (or null keysym) lowercase)
(type (function (display card16 t) t) translate))
(flet ((merge-keysym-mappings (new old)
Ensure that the mapping with no modifiers or mask comes first .
(let* ((key (keysym-mapping-modifiers new))
(merge (delete key old :key #'cadddr :test #'equal)))
(if key
(nconc merge (list new))
(cons new merge))))
(mask-check (mask)
(unless (or (numberp mask)
(dolist (element mask t)
(unless (or (find element *state-mask-vector*)
(gethash element *keysym->character-map*))
(return nil))))
(x-type-error mask '(or mask16 (list (or modifier-key modifier-keysym)))))))
(let ((entry
(cond
(mask
(unless (eq mask :modifiers)
(mask-check mask))
(when (or (null modifiers) (and (numberp modifiers) (zerop modifiers)))
(error "Mask with no modifiers"))
(list object translate lowercase modifiers mask))
(modifiers (mask-check modifiers)
(list object translate lowercase modifiers))
(lowercase (list object translate lowercase))
(translate (list object translate))
(t (list object)))))
(if display
(let ((previous (assoc keysym (display-keysym-translation display))))
(if previous
(setf (cdr previous) (merge-keysym-mappings entry (cdr previous)))
(push (list keysym entry) (display-keysym-translation display))))
(setf (gethash keysym *keysym->character-map*)
(merge-keysym-mappings entry (gethash keysym *keysym->character-map*)))))
object))
(defun undefine-keysym (object keysym &key display modifiers &allow-other-keys)
Undefine the keysym - translation translating KEYSYM to OBJECT with MODIFIERS .
(declare (type (or base-char t) object)
(type keysym keysym)
modifiers)
(type (or null display) display))
(flet ((match (key entry)
(let ((object (car key))
(modifiers (cdr key)))
(or (eql object (keysym-mapping-object entry))
(equal modifiers (keysym-mapping-modifiers entry))))))
(let* (entry
(previous (if display
(cdr (setq entry (assoc keysym (display-keysym-translation display))))
(gethash keysym *keysym->character-map*)))
(key (cons object modifiers)))
(when (and previous (find key previous :test #'match))
(setq previous (delete key previous :test #'match))
(if display
(setf (cdr entry) previous)
(setf (gethash keysym *keysym->character-map*) previous))))))
(defun keysym-downcase (keysym)
(declare (type keysym keysym))
(declare (values keysym))
(let ((translations (gethash keysym *keysym->character-map*)))
(or (and translations (keysym-mapping-lowercase (first translations))) keysym)))
(defun keysym-uppercase-alphabetic-p (keysym)
I.E. If it has a lowercase equivalent .
(declare (type keysym keysym))
(declare (values (or null keysym)))
(let ((translations (gethash keysym *keysym->character-map*)))
(and translations
(keysym-mapping-lowercase (first translations)))))
(defun character->keysyms (character &optional display)
May be slow [ i.e. do a linear search over all known keysyms ]
(declare (type t character)
(type (or null display) display)
(values (list keysym)))
(let ((result nil))
(when display
(dolist (mapping (display-keysym-translation display))
(when (eql character (second mapping))
(push (first mapping) result))))
(maphash #'(lambda (keysym mappings)
(dolist (mapping mappings)
(when (eql (keysym-mapping-object mapping) character)
(pushnew keysym result))))
*keysym->character-map*)
result))
(defconstant character-set-switch-keysym (keysym 255 126))
(defconstant left-shift-keysym (keysym 255 225))
(defconstant right-shift-keysym (keysym 255 226))
(defconstant left-control-keysym (keysym 255 227))
(defconstant right-control-keysym (keysym 255 228))
(defconstant caps-lock-keysym (keysym 255 229))
(defconstant shift-lock-keysym (keysym 255 230))
(defconstant left-meta-keysym (keysym 255 231))
(defconstant right-meta-keysym (keysym 255 232))
(defconstant left-alt-keysym (keysym 255 233))
(defconstant right-alt-keysym (keysym 255 234))
(defconstant left-super-keysym (keysym 255 235))
(defconstant right-super-keysym (keysym 255 236))
(defconstant left-hyper-keysym (keysym 255 237))
(defconstant right-hyper-keysym (keysym 255 238))
Keysym mapping functions
(defun display-keyboard-mapping (display)
(declare (type display display))
(declare (values (simple-array keysym (display-max-keycode keysyms-per-keycode))))
(or (display-keysym-mapping display)
(setf (display-keysym-mapping display) (keyboard-mapping display))))
(defun keycode->keysym (display keycode keysym-index)
(declare (type display display)
(type card8 keycode)
(type card8 keysym-index)
(values keysym))
(let* ((mapping (display-keyboard-mapping display))
(keysym (aref mapping keycode keysym-index)))
(declare (type (simple-array keysym (* *)) mapping)
(type keysym keysym))
entry for keysym - index zero set to the uppercase keysym
entry for keysym - index one is zero .
(keysym-downcase keysym))
(aref mapping keycode 0))
(t keysym))))
(defun keysym->character (display keysym &optional (state 0))
(declare (type display display)
(type keysym keysym)
(type card16 state))
(declare (values (or null character)))
(let* ((display-mappings (cdr (assoc keysym (display-keysym-translation display))))
(dolist (mapping display-mappings)
(when (mapping-matches-p display state mapping)
(return mapping)))
(dolist (mapping (gethash keysym *keysym->character-map*))
(when (mapping-matches-p display state mapping)
(return mapping))))))
(when mapping
(funcall (or (keysym-mapping-translate mapping) 'default-keysym-translate)
display state (keysym-mapping-object mapping)))))
(defun mapping-matches-p (display state mapping)
(declare (type display display)
(type mask16 state)
(type list mapping))
(declare (values boolean))
(flet
((modifiers->mask (display-mapping modifiers errorp &aux (mask 0))
If ERRORP is non - nil , return NIL when an unknown modifier is specified ,
Alist of ( keysym . mask )
(type (or mask16 list) modifiers)
(type mask16 mask))
(declare (values (or null mask16)))
(if (numberp modifiers)
modifiers
(dolist (modifier modifiers mask)
(declare (type symbol modifier))
(let ((bit (position modifier (the simple-vector *state-mask-vector*) :test #'eq)))
(setq mask
(logior mask
(if bit
(ash 1 bit)
(or (cdr (assoc modifier display-mapping))
(if errorp
(return-from modifiers->mask nil)
0))))))))))
(let* ((display-mapping (get-display-modifier-mapping display))
(mapping-modifiers (keysym-mapping-modifiers mapping))
(modifiers (or (modifiers->mask display-mapping (or mapping-modifiers 0) t)
(return-from mapping-matches-p nil)))
*default-keysym-translate-mask*
0)))
(mask (if (eq mapping-mask :modifiers)
modifiers
(modifiers->mask display-mapping mapping-mask nil))))
(declare (type mask16 modifiers mask))
(= #-ecl (logand state mask)
#+ecl (the mask16 (logand state mask)) modifiers))))
(defun default-keysym-index (display keycode state)
(declare (values card8))
(macrolet ((keystate-p (state keyword)
`(the boolean
(logbitp ,(position keyword *state-mask-vector*)
,state))))
(let* ((mapping (display-keyboard-mapping display))
(keysyms-per-keycode (array-dimension mapping 1))
(symbolp (and (> keysyms-per-keycode 2)
(state-keysymp display state character-set-switch-keysym)))
(result (if symbolp 2 0)))
(declare (type (simple-array keysym (* *)) mapping)
(type boolean symbolp)
(type card8 keysyms-per-keycode result))
(when (and (< result keysyms-per-keycode)
(keysym-shift-p display state (keysym-uppercase-alphabetic-p
(aref mapping keycode 0))))
(incf result))
result)))
(defun keysym-shift-p (display state uppercase-alphabetic-p &key
shift-lock-xors
(control-modifiers
'#.(list left-meta-keysym left-super-keysym left-hyper-keysym)))
(declare (type display display)
(type card16 state)
(type boolean uppercase-alphabetic-p)
(declare (values boolean))
(macrolet ((keystate-p (state keyword)
`(the boolean
(logbitp ,(position keyword *state-mask-vector*)
,state))))
(let* ((controlp (or (keystate-p state :control)
(dolist (modifier control-modifiers)
(when (state-keysymp display state modifier)
(return t)))))
(shiftp (keystate-p state :shift))
(lockp (keystate-p state :lock))
(alphap (or uppercase-alphabetic-p
(not (state-keysymp display #.(make-state-mask :lock)
caps-lock-keysym)))))
(declare (type boolean controlp shiftp lockp alphap))
(unless controlp
(when (and lockp
alphap
(setq shiftp (not shiftp))))
shiftp)))
0 0 0 # \a #
0 0 1 # \A #
0 1 0 # \A # \ *
0 1 1 # \A # \ *
0 0 0 # \a #
0 0 1 # \A # \ *
0 1 0 # \A # \ *
0 1 1 # \A #
(defun keycode->character (display keycode state &key keysym-index
(keysym-index-function #'default-keysym-index))
( char0 state caps - lock - )
where char0 is the " character " object associated with keysym - index 0 and
(declare (type display display)
(type card8 keycode)
(type card16 state)
(type (or null card8) keysym-index)
(type (or null (function (base-char card16 boolean card8) card8))
keysym-index-function))
(declare (values (or null character)))
(let* ((index (or keysym-index
(funcall keysym-index-function display keycode state)))
(keysym (if index (keycode->keysym display keycode index) 0)))
(declare (type (or null card8) index)
(type keysym keysym))
(when (plusp keysym)
(keysym->character display keysym state))))
(defun get-display-modifier-mapping (display)
(labels ((keysym-replace (display modifiers mask &aux result)
(dolist (modifier modifiers result)
(push (cons (keycode->keysym display modifier 0) mask) result))))
(or (display-modifier-mapping display)
(multiple-value-bind (shift lock control mod1 mod2 mod3 mod4 mod5)
(modifier-mapping display)
(setf (display-modifier-mapping display)
(nconc (keysym-replace display shift #.(make-state-mask :shift))
(keysym-replace display lock #.(make-state-mask :lock))
(keysym-replace display control #.(make-state-mask :control))
(keysym-replace display mod1 #.(make-state-mask :mod-1))
(keysym-replace display mod2 #.(make-state-mask :mod-2))
(keysym-replace display mod3 #.(make-state-mask :mod-3))
(keysym-replace display mod4 #.(make-state-mask :mod-4))
(keysym-replace display mod5 #.(make-state-mask :mod-5))))))))
(defun state-keysymp (display state keysym)
(declare (type display display)
(type card16 state)
(type keysym keysym))
(declare (values boolean))
(let* ((mapping (get-display-modifier-mapping display))
(mask (assoc keysym mapping)))
(and mask (plusp #-ecl (logand state (cdr mask))
#+ecl (the card16 (logand state (cdr mask)))))))
(defun mapping-notify (display request start count)
(declare (type display display)
(type (member :modifier :keyboard :pointer) request)
(type card8 start count)
(ignore count start))
Invalidate the keyboard mapping to force the next key translation to get it
(case request
(:modifier
(setf (display-modifier-mapping display) nil))
(:keyboard
(setf (display-keysym-mapping display) nil))))
(defun keysym-in-map-p (display keysym keymap)
(declare (type display display)
(type keysym keysym)
(type (bit-vector 256) keymap))
(declare (values boolean))
(do* ((min (display-min-keycode display))
(max (display-max-keycode display))
(map (display-keyboard-mapping display))
(jmax (min 2 (array-dimension map 1)))
(i min (1+ i)))
((> i max))
(declare (type card8 min max jmax)
(type (simple-array keysym (* *)) map))
(when (and (plusp (aref keymap i))
(dotimes (j jmax)
(when (= keysym (aref map i j)) (return t))))
(return t))))
(defun character-in-map-p (display character keymap)
(declare (type display display)
(type character character)
(type (bit-vector 256) keymap))
(declare (values boolean))
Check all one bits in keymap
(do* ((min (display-min-keycode display))
(max (display-max-keycode display))
(jmax (array-dimension (display-keyboard-mapping display) 1))
(i min (1+ i)))
((> i max))
(declare (type card8 min max jmax))
(when (and (plusp (aref keymap i))
(dotimes (j jmax)
(when (eql character (keycode->character display i 0 :keysym-index j))
(return t))))
(return t))))
(defun keysym->keycodes (display keysym)
(declare (type display display)
(type keysym keysym))
(declare (values (or null keycode) (or null keycode) (or null keycode)))
(do* ((min (display-min-keycode display))
(max (display-max-keycode display))
(map (display-keyboard-mapping display))
(jmax (min 2 (array-dimension map 1)))
(i min (1+ i))
(result nil))
((> i max) (values-list result))
(declare (type card8 min max jmax)
(type (simple-array keysym (* *)) map))
(dotimes (j jmax)
(when (= keysym (aref map i j))
(push i result)))))
|
719bbca3dbe183979a7e9c3aa755a22fcb34c69521a2fab3f734c8c2102a450b | lindenbaum/hinterface | ControlMessageSpec.hs | # LANGUAGE ScopedTypeVariables #
module Foreign.Erlang.ControlMessageSpec (spec) where
import Data.Binary (decode, encode)
import qualified Data.ByteString.Lazy as LBS
import Foreign.Erlang.ControlMessage
import Foreign.Erlang.Term
import Test.Hspec
import Test.QuickCheck
spec :: Spec
spec = describe "ControlMessage" $ do
it "decode . encode = id" $
property $
\(a :: ControlMessage) -> (decode . encode) a `shouldBe` a
it "TICK encodes as expected" $
LBS.unpack (encode TICK) `shouldBe` [0, 0, 0, 0]
it "LINK encodes as expected" $
LBS.unpack (encode (LINK (pid "from" 1 2 3) (pid "to" 4 5 6)))
`shouldBe` [ 0,
0,
0,
44,
112,
131,
104,
3,
97,
1,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6
]
it "SEND encodes as expected" $
LBS.unpack (encode (SEND (pid "to" 4 5 6) (atom "hello")))
`shouldBe` [ 0,
0,
0,
36,
112,
131,
104,
3,
97,
2,
100,
0,
0,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6,
131,
100,
0,
5,
104,
101,
108,
108,
111
]
it "EXIT encodes as expected" $
LBS.unpack (encode (EXIT (pid "from" 1 2 3) (pid "to" 4 5 6) (Atom SmallAtomUtf8 "normal")))
`shouldBe` [ 0,
0,
0,
52,
112,
131,
104,
4,
97,
3,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6,
119,
6,
110,
111,
114,
109,
97,
108
]
it "UNLINK encodes as expected" $
LBS.unpack (encode (UNLINK (pid "from" 1 2 3) (pid "to" 4 5 6)))
`shouldBe` [ 0,
0,
0,
44,
112,
131,
104,
3,
97,
4,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6
]
it "NODE_LINK encodes as expected" $
LBS.unpack (encode NODE_LINK) `shouldBe` [0, 0, 0, 6, 112, 131, 104, 1, 97, 5]
it "REG_SEND encodes as expected" $
LBS.unpack (encode (REG_SEND (pid "from" 1 2 3) "to" (atom "hello")))
`shouldBe` [ 0,
0,
0,
43,
112,
131,
104,
4,
97,
6,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
100,
0,
0,
118,
0,
2,
116,
111,
131,
100,
0,
5,
104,
101,
108,
108,
111
]
it "GROUP_LEADER encodes as expected" $
LBS.unpack (encode (GROUP_LEADER (pid "from" 1 2 3) (pid "to" 4 5 6)))
`shouldBe` [ 0,
0,
0,
44,
112,
131,
104,
3,
97,
7,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6
]
it "EXIT2 encodes as expected" $
LBS.unpack (encode (EXIT2 (pid "from" 1 2 3) (pid "to" 4 5 6) (atom "normal")))
`shouldBe` [ 0,
0,
0,
53,
112,
131,
104,
4,
97,
8,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6,
100,
0,
6,
110,
111,
114,
109,
97,
108
]
| null | https://raw.githubusercontent.com/lindenbaum/hinterface/e49810b5984486cebad6fa06dba0f2c7f2bfd2fa/test/Foreign/Erlang/ControlMessageSpec.hs | haskell | # LANGUAGE ScopedTypeVariables #
module Foreign.Erlang.ControlMessageSpec (spec) where
import Data.Binary (decode, encode)
import qualified Data.ByteString.Lazy as LBS
import Foreign.Erlang.ControlMessage
import Foreign.Erlang.Term
import Test.Hspec
import Test.QuickCheck
spec :: Spec
spec = describe "ControlMessage" $ do
it "decode . encode = id" $
property $
\(a :: ControlMessage) -> (decode . encode) a `shouldBe` a
it "TICK encodes as expected" $
LBS.unpack (encode TICK) `shouldBe` [0, 0, 0, 0]
it "LINK encodes as expected" $
LBS.unpack (encode (LINK (pid "from" 1 2 3) (pid "to" 4 5 6)))
`shouldBe` [ 0,
0,
0,
44,
112,
131,
104,
3,
97,
1,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6
]
it "SEND encodes as expected" $
LBS.unpack (encode (SEND (pid "to" 4 5 6) (atom "hello")))
`shouldBe` [ 0,
0,
0,
36,
112,
131,
104,
3,
97,
2,
100,
0,
0,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6,
131,
100,
0,
5,
104,
101,
108,
108,
111
]
it "EXIT encodes as expected" $
LBS.unpack (encode (EXIT (pid "from" 1 2 3) (pid "to" 4 5 6) (Atom SmallAtomUtf8 "normal")))
`shouldBe` [ 0,
0,
0,
52,
112,
131,
104,
4,
97,
3,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6,
119,
6,
110,
111,
114,
109,
97,
108
]
it "UNLINK encodes as expected" $
LBS.unpack (encode (UNLINK (pid "from" 1 2 3) (pid "to" 4 5 6)))
`shouldBe` [ 0,
0,
0,
44,
112,
131,
104,
3,
97,
4,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6
]
it "NODE_LINK encodes as expected" $
LBS.unpack (encode NODE_LINK) `shouldBe` [0, 0, 0, 6, 112, 131, 104, 1, 97, 5]
it "REG_SEND encodes as expected" $
LBS.unpack (encode (REG_SEND (pid "from" 1 2 3) "to" (atom "hello")))
`shouldBe` [ 0,
0,
0,
43,
112,
131,
104,
4,
97,
6,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
100,
0,
0,
118,
0,
2,
116,
111,
131,
100,
0,
5,
104,
101,
108,
108,
111
]
it "GROUP_LEADER encodes as expected" $
LBS.unpack (encode (GROUP_LEADER (pid "from" 1 2 3) (pid "to" 4 5 6)))
`shouldBe` [ 0,
0,
0,
44,
112,
131,
104,
3,
97,
7,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6
]
it "EXIT2 encodes as expected" $
LBS.unpack (encode (EXIT2 (pid "from" 1 2 3) (pid "to" 4 5 6) (atom "normal")))
`shouldBe` [ 0,
0,
0,
53,
112,
131,
104,
4,
97,
8,
88,
118,
0,
4,
102,
114,
111,
109,
0,
0,
0,
1,
0,
0,
0,
2,
0,
0,
0,
3,
88,
118,
0,
2,
116,
111,
0,
0,
0,
4,
0,
0,
0,
5,
0,
0,
0,
6,
100,
0,
6,
110,
111,
114,
109,
97,
108
]
| |
9b74751e908276d1dd02633bf647303b8c03c16bacbf49bbf1449bc068d7d727 | pat227/ocaml-pgsql-model | types_we_emit.ml | module Types_we_emit = struct
type t =
| Bignum
| CoreInt32
| CoreInt64
| Float
| Date
| Time
(*===TODO===for the really paranoid; introduce a type that extends string and is length aware,
and never permits truncation when storing to the db, although would have to handle runtime exceptions*)
| String
| Bool
[@@deriving show]
(*Return a string we can use in writing a module that is a type.*)
let to_string ~t ~is_nullable =
if is_nullable then
match t with
| Bignum -> "Bignum_extended.t option"
| CoreInt64 -> "CoreInt64_extended.t option"
| CoreInt32 -> "CoreInt32_extended.t option"
| Float -> "Core.Float.t option"
| Date -> "Date_extended.t option"
| Time -> "Date_time_extended.t option"
| String -> "string option"
| Bool -> "bool option"
else
match t with
| Bignum -> "Bignum_extended.t"
| CoreInt64 -> "CoreInt64_extended.t"
| CoreInt32 -> "CoreInt32_extended.t"
| Float -> "Core.Float.t"
| Date -> "Date_extended.t"
| Time -> "Date_time_extended.t"
| String -> "string"
| Bool -> "bool";;
(**
is_optional - is the field, of whatever type, optional in the type t of the module and nullable in the db?
t - the type of the field *)
let converter_of_string_of_type ~is_optional ~t ~fieldname =
let open Core in
match is_optional, t with
false, String ->
String.concat ["Utilities.extract_field_as_string_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, String ->
String.concat ["Utilities.extract_optional_field ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Bool ->
String.concat ["Utilities.parse_bool_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Bool ->
String.concat ["Utilities.parse_optional_bool_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, CoreInt32 ->
String.concat ["Utilities.parse_int32_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, CoreInt32 ->
String.concat ["Utilities.parse_optional_int32_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, CoreInt64 ->
String.concat ["Utilities.parse_int64_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, CoreInt64 ->
String.concat ["Utilities.parse_optional_int64_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Bignum -> String.concat ["Utilities.parse_optional_bignum_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Bignum -> String.concat ["Utilities.parse_bignum_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Float -> String.concat ["Utilities.parse_float_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Float -> String.concat ["Utilities.parse_optional_float_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Date -> String.concat ["Utilities.parse_date_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Date -> String.concat ["Utilities.parse_optional_date_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Time -> String.concat ["Utilities.parse_datetime_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Time -> String.concat ["Utilities.parse_optional_datetime_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
(** is_optional - is the field, of whatever type, optional in the type t of the module and nullable in the db?
t - the type of the field *)
let converter_to_string_of_type ~is_optional ~t =
let open Core in
match is_optional, t with
false, String ->
String.concat ["(conv (fun x -> \"'\" ^ (Mysql.real_escape conn x) ^ \"'\"))"]
| true, String ->
String.concat ["(conv (fun x -> Utilities.serialize_optional_field ~field:x ~conn))"]
| false, Bool ->
String.concat ["(conv (fun x -> if x then \"TRUE\" else \"FALSE\"))"]
| true, Bool ->
String.concat ["(conv (fun x -> Utilities.serialize_optional_bool_field ~field:x))"]
| false, CoreInt32 ->
String.concat ["(conv (fun x -> Core.Int32.to_string x))"]
| true, CoreInt32 ->
String.concat ["(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Core.Int32.to_string i)))"]
| false, CoreInt64 ->
String.concat ["(conv (fun x -> Core.Int64.to_string x))"]
| true, CoreInt64 ->
String.concat ["(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Core.Int64.to_string i)))"]
| false, Bignum -> "(conv (fun x -> Bignum_extended.to_string_hum x))"
| true, Bignum -> "(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Bignum_extended.to_string_hum i)))"
| false, Float -> "(conv (fun x -> Float.to_string_round_trippable x))"
| true, Float -> "(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Float.to_string_round_trippable i)))"
| false, Date -> "(conv (fun x -> (\"'\" ^ (Date_extended.to_string x) ^ \"'\")))"
| true, Date -> "(conv (fun x -> Utilities.serialize_optional_date_field ~field:x))"
| false, Time -> "(conv (fun x -> (\"'\" ^ (Date_time_extended.to_string x) ^ \"'\")))"
| true, Time -> "(conv (fun x -> Utilities.serialize_optional_date_time_field ~field:x))"
end
| null | https://raw.githubusercontent.com/pat227/ocaml-pgsql-model/17f4ad13dd26fbc228867841e61d153bac890e18/src/lib/types_we_emit.ml | ocaml | ===TODO===for the really paranoid; introduce a type that extends string and is length aware,
and never permits truncation when storing to the db, although would have to handle runtime exceptions
Return a string we can use in writing a module that is a type.
*
is_optional - is the field, of whatever type, optional in the type t of the module and nullable in the db?
t - the type of the field
* is_optional - is the field, of whatever type, optional in the type t of the module and nullable in the db?
t - the type of the field | module Types_we_emit = struct
type t =
| Bignum
| CoreInt32
| CoreInt64
| Float
| Date
| Time
| String
| Bool
[@@deriving show]
let to_string ~t ~is_nullable =
if is_nullable then
match t with
| Bignum -> "Bignum_extended.t option"
| CoreInt64 -> "CoreInt64_extended.t option"
| CoreInt32 -> "CoreInt32_extended.t option"
| Float -> "Core.Float.t option"
| Date -> "Date_extended.t option"
| Time -> "Date_time_extended.t option"
| String -> "string option"
| Bool -> "bool option"
else
match t with
| Bignum -> "Bignum_extended.t"
| CoreInt64 -> "CoreInt64_extended.t"
| CoreInt32 -> "CoreInt32_extended.t"
| Float -> "Core.Float.t"
| Date -> "Date_extended.t"
| Time -> "Date_time_extended.t"
| String -> "string"
| Bool -> "bool";;
let converter_of_string_of_type ~is_optional ~t ~fieldname =
let open Core in
match is_optional, t with
false, String ->
String.concat ["Utilities.extract_field_as_string_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, String ->
String.concat ["Utilities.extract_optional_field ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Bool ->
String.concat ["Utilities.parse_bool_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Bool ->
String.concat ["Utilities.parse_optional_bool_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, CoreInt32 ->
String.concat ["Utilities.parse_int32_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, CoreInt32 ->
String.concat ["Utilities.parse_optional_int32_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, CoreInt64 ->
String.concat ["Utilities.parse_int64_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, CoreInt64 ->
String.concat ["Utilities.parse_optional_int64_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Bignum -> String.concat ["Utilities.parse_optional_bignum_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Bignum -> String.concat ["Utilities.parse_bignum_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Float -> String.concat ["Utilities.parse_float_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Float -> String.concat ["Utilities.parse_optional_float_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Date -> String.concat ["Utilities.parse_date_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Date -> String.concat ["Utilities.parse_optional_date_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| false, Time -> String.concat ["Utilities.parse_datetime_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
| true, Time -> String.concat ["Utilities.parse_optional_datetime_field_exn ~fieldname:\"";fieldname;"\" ~qresult ~tuple:tuple_number"]
let converter_to_string_of_type ~is_optional ~t =
let open Core in
match is_optional, t with
false, String ->
String.concat ["(conv (fun x -> \"'\" ^ (Mysql.real_escape conn x) ^ \"'\"))"]
| true, String ->
String.concat ["(conv (fun x -> Utilities.serialize_optional_field ~field:x ~conn))"]
| false, Bool ->
String.concat ["(conv (fun x -> if x then \"TRUE\" else \"FALSE\"))"]
| true, Bool ->
String.concat ["(conv (fun x -> Utilities.serialize_optional_bool_field ~field:x))"]
| false, CoreInt32 ->
String.concat ["(conv (fun x -> Core.Int32.to_string x))"]
| true, CoreInt32 ->
String.concat ["(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Core.Int32.to_string i)))"]
| false, CoreInt64 ->
String.concat ["(conv (fun x -> Core.Int64.to_string x))"]
| true, CoreInt64 ->
String.concat ["(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Core.Int64.to_string i)))"]
| false, Bignum -> "(conv (fun x -> Bignum_extended.to_string_hum x))"
| true, Bignum -> "(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Bignum_extended.to_string_hum i)))"
| false, Float -> "(conv (fun x -> Float.to_string_round_trippable x))"
| true, Float -> "(conv (fun x -> match x with None -> \"NULL\" | Some i -> (Float.to_string_round_trippable i)))"
| false, Date -> "(conv (fun x -> (\"'\" ^ (Date_extended.to_string x) ^ \"'\")))"
| true, Date -> "(conv (fun x -> Utilities.serialize_optional_date_field ~field:x))"
| false, Time -> "(conv (fun x -> (\"'\" ^ (Date_time_extended.to_string x) ^ \"'\")))"
| true, Time -> "(conv (fun x -> Utilities.serialize_optional_date_time_field ~field:x))"
end
|
2ce85c9bd1b49d4b8129a9149bf5d612ba1441fc56ffb3ca024db946b0bfa6d2 | beastaugh/hatt | Propositional.hs | -- | The "Data.Logic.Propositional" module provides a set of functions for
-- parsing, manipulating and generating truth tables for expressions in
-- classical propositional logic.
--
The core of the API is the ' ' data type , which has constructors for all
-- the usual expression forms: variables, standing for atomic propositions;
-- negation, the only unary connective; and the binary connectives of
-- conjunction, disjunction, material implication and logical equivalence.
module Data.Logic.Propositional
( Expr (..)
, Var (..)
, Mapping
, equivalent
, equivalents
, implies
, interpret
, assignments
, values
, variables
, isContingent
, isContradiction
, isTautology
, parseExpr
, show
, showAscii
, truthTable
, truthTableP
) where
import Data.Logic.Propositional.Core
import Data.Logic.Propositional.Parser
import Data.Logic.Propositional.Tables (truthTable, truthTableP)
| null | https://raw.githubusercontent.com/beastaugh/hatt/c727cba2a444a7026c5172e719e2fccaf5805bdc/src/Data/Logic/Propositional.hs | haskell | | The "Data.Logic.Propositional" module provides a set of functions for
parsing, manipulating and generating truth tables for expressions in
classical propositional logic.
the usual expression forms: variables, standing for atomic propositions;
negation, the only unary connective; and the binary connectives of
conjunction, disjunction, material implication and logical equivalence. | The core of the API is the ' ' data type , which has constructors for all
module Data.Logic.Propositional
( Expr (..)
, Var (..)
, Mapping
, equivalent
, equivalents
, implies
, interpret
, assignments
, values
, variables
, isContingent
, isContradiction
, isTautology
, parseExpr
, show
, showAscii
, truthTable
, truthTableP
) where
import Data.Logic.Propositional.Core
import Data.Logic.Propositional.Parser
import Data.Logic.Propositional.Tables (truthTable, truthTableP)
|
29bb2f3f03645d3436ab61294f88faf607f598e5464db887a847f5f83bd67f88 | RJ/mochiweb-websockets | mochiweb_socket_server.erl | @author < >
2007 Mochi Media , Inc.
@doc MochiWeb socket server .
-module(mochiweb_socket_server).
-author('').
-behaviour(gen_server).
-include("internal.hrl").
-export([start/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, terminate/2, code_change/3,
handle_info/2]).
-export([get/2, set/3]).
-record(mochiweb_socket_server,
{port,
loop,
name=undefined,
%% NOTE: This is currently ignored.
max=2048,
ip=any,
listen=null,
nodelay=false,
backlog=128,
active_sockets=0,
acceptor_pool_size=16,
ssl=false,
ssl_opts=[{ssl_imp, new}],
acceptor_pool=sets:new(),
profile_fun=undefined}).
-define(is_old_state(State), not is_record(State, mochiweb_socket_server)).
start(State=#mochiweb_socket_server{}) ->
start_server(State);
start(Options) ->
start(parse_options(Options)).
get(Name, Property) ->
gen_server:call(Name, {get, Property}).
set(Name, profile_fun, Fun) ->
gen_server:cast(Name, {set, profile_fun, Fun});
set(Name, Property, _Value) ->
error_logger:info_msg("?MODULE:set for ~p with ~p not implemented~n",
[Name, Property]).
stop(Name) when is_atom(Name) ->
gen_server:cast(Name, stop);
stop(Pid) when is_pid(Pid) ->
gen_server:cast(Pid, stop);
stop({local, Name}) ->
stop(Name);
stop({global, Name}) ->
stop(Name);
stop(Options) ->
State = parse_options(Options),
stop(State#mochiweb_socket_server.name).
%% Internal API
parse_options(Options) ->
parse_options(Options, #mochiweb_socket_server{}).
parse_options([], State) ->
State;
parse_options([{name, L} | Rest], State) when is_list(L) ->
Name = {local, list_to_atom(L)},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
parse_options([{name, A} | Rest], State) when A =:= undefined ->
parse_options(Rest, State#mochiweb_socket_server{name=A});
parse_options([{name, A} | Rest], State) when is_atom(A) ->
Name = {local, A},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
parse_options([{name, Name} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{name=Name});
parse_options([{port, L} | Rest], State) when is_list(L) ->
Port = list_to_integer(L),
parse_options(Rest, State#mochiweb_socket_server{port=Port});
parse_options([{port, Port} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{port=Port});
parse_options([{ip, Ip} | Rest], State) ->
ParsedIp = case Ip of
any ->
any;
Ip when is_tuple(Ip) ->
Ip;
Ip when is_list(Ip) ->
{ok, IpTuple} = inet_parse:address(Ip),
IpTuple
end,
parse_options(Rest, State#mochiweb_socket_server{ip=ParsedIp});
parse_options([{loop, Loop} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{loop=Loop});
parse_options([{backlog, Backlog} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog});
parse_options([{nodelay, NoDelay} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay});
parse_options([{acceptor_pool_size, Max} | Rest], State) ->
MaxInt = ensure_int(Max),
parse_options(Rest,
State#mochiweb_socket_server{acceptor_pool_size=MaxInt});
parse_options([{max, Max} | Rest], State) ->
error_logger:info_report([{warning, "TODO: max is currently unsupported"},
{max, Max}]),
MaxInt = ensure_int(Max),
parse_options(Rest, State#mochiweb_socket_server{max=MaxInt});
parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) ->
parse_options(Rest, State#mochiweb_socket_server{ssl=Ssl});
parse_options([{ssl_opts, SslOpts} | Rest], State) when is_list(SslOpts) ->
SslOpts1 = [{ssl_imp, new} | proplists:delete(ssl_imp, SslOpts)],
parse_options(Rest, State#mochiweb_socket_server{ssl_opts=SslOpts1});
parse_options([{profile_fun, ProfileFun} | Rest], State) when is_function(ProfileFun) ->
parse_options(Rest, State#mochiweb_socket_server{profile_fun=ProfileFun}).
start_server(State=#mochiweb_socket_server{ssl=Ssl, name=Name}) ->
case Ssl of
true ->
application:start(crypto),
application:start(public_key),
application:start(ssl);
false ->
void
end,
case Name of
undefined ->
gen_server:start_link(?MODULE, State, []);
_ ->
gen_server:start_link(Name, ?MODULE, State, [])
end.
ensure_int(N) when is_integer(N) ->
N;
ensure_int(S) when is_list(S) ->
list_to_integer(S).
ipv6_supported() ->
case (catch inet:getaddr("localhost", inet6)) of
{ok, _Addr} ->
true;
{error, _} ->
false
end.
init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) ->
process_flag(trap_exit, true),
BaseOpts = [binary,
{reuseaddr, true},
{packet, 0},
{backlog, Backlog},
{recbuf, ?RECBUF_SIZE},
{active, false},
{nodelay, NoDelay}],
Opts = case Ip of
any ->
case ipv6_supported() of % IPv4, and IPv6 if supported
true -> [inet, inet6 | BaseOpts];
_ -> BaseOpts
end;
IPv4
[inet, {ip, Ip} | BaseOpts];
{_, _, _, _, _, _, _, _} -> % IPv6
[inet6, {ip, Ip} | BaseOpts]
end,
case listen(Port, Opts, State) of
{stop, eacces} ->
case Port < 1024 of
true ->
case catch fdsrv:start() of
{ok, _} ->
case fdsrv:bind_socket(tcp, Port) of
{ok, Fd} ->
listen(Port, [{fd, Fd} | Opts], State);
_ ->
{stop, fdsrv_bind_failed}
end;
_ ->
{stop, fdsrv_start_failed}
end;
false ->
{stop, eacces}
end;
Other ->
Other
end.
new_acceptor_pool(Listen,
State=#mochiweb_socket_server{acceptor_pool=Pool,
acceptor_pool_size=Size,
loop=Loop}) ->
F = fun (_, S) ->
Pid = mochiweb_acceptor:start_link(self(), Listen, Loop),
sets:add_element(Pid, S)
end,
Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)),
State#mochiweb_socket_server{acceptor_pool=Pool1}.
listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) ->
case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of
{ok, Listen} ->
{ok, ListenPort} = mochiweb_socket:port(Listen),
{ok, new_acceptor_pool(
Listen,
State#mochiweb_socket_server{listen=Listen,
port=ListenPort})};
{error, Reason} ->
{stop, Reason}
end.
do_get(port, #mochiweb_socket_server{port=Port}) ->
Port;
do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) ->
ActiveSockets.
state_to_proplist(#mochiweb_socket_server{name=Name,
port=Port,
active_sockets=ActiveSockets}) ->
[{name, Name}, {port, Port}, {active_sockets, ActiveSockets}].
upgrade_state(State = #mochiweb_socket_server{}) ->
State;
upgrade_state({mochiweb_socket_server, Port, Loop, Name,
Max, IP, Listen, NoDelay, Backlog, ActiveSockets,
AcceptorPoolSize, SSL, SSL_opts,
AcceptorPool}) ->
#mochiweb_socket_server{port=Port, loop=Loop, name=Name, max=Max, ip=IP,
listen=Listen, nodelay=NoDelay, backlog=Backlog,
active_sockets=ActiveSockets,
acceptor_pool_size=AcceptorPoolSize,
ssl=SSL,
ssl_opts=SSL_opts,
acceptor_pool=AcceptorPool}.
handle_call(Req, From, State) when ?is_old_state(State) ->
handle_call(Req, From, upgrade_state(State));
handle_call({get, Property}, _From, State) ->
Res = do_get(Property, State),
{reply, Res, State};
handle_call(_Message, _From, State) ->
Res = error,
{reply, Res, State}.
handle_cast(Req, State) when ?is_old_state(State) ->
handle_cast(Req, upgrade_state(State));
handle_cast({accepted, Pid, Timing},
State=#mochiweb_socket_server{active_sockets=ActiveSockets}) ->
State1 = State#mochiweb_socket_server{active_sockets=1 + ActiveSockets},
case State#mochiweb_socket_server.profile_fun of
undefined ->
undefined;
F when is_function(F) ->
catch F([{timing, Timing} | state_to_proplist(State1)])
end,
{noreply, recycle_acceptor(Pid, State1)};
handle_cast({set, profile_fun, ProfileFun}, State) ->
State1 = case ProfileFun of
ProfileFun when is_function(ProfileFun); ProfileFun =:= undefined ->
State#mochiweb_socket_server{profile_fun=ProfileFun};
_ ->
State
end,
{noreply, State1};
handle_cast(stop, State) ->
{stop, normal, State}.
terminate(Reason, State) when ?is_old_state(State) ->
terminate(Reason, upgrade_state(State));
terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
mochiweb_socket:close(Listen),
case Port < 1024 of
true ->
catch fdsrv:stop(),
ok;
false ->
ok
end.
code_change(_OldVsn, State, _Extra) ->
State.
recycle_acceptor(Pid, State=#mochiweb_socket_server{
acceptor_pool=Pool,
listen=Listen,
loop=Loop,
active_sockets=ActiveSockets}) ->
case sets:is_element(Pid, Pool) of
true ->
Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop),
Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)),
State#mochiweb_socket_server{acceptor_pool=Pool1};
false ->
State#mochiweb_socket_server{active_sockets=ActiveSockets - 1}
end.
handle_info(Msg, State) when ?is_old_state(State) ->
handle_info(Msg, upgrade_state(State));
handle_info({'EXIT', Pid, normal}, State) ->
{noreply, recycle_acceptor(Pid, State)};
handle_info({'EXIT', Pid, Reason},
State=#mochiweb_socket_server{acceptor_pool=Pool}) ->
case sets:is_element(Pid, Pool) of
true ->
%% If there was an unexpected error accepting, log and sleep.
error_logger:error_report({?MODULE, ?LINE,
{acceptor_error, Reason}}),
timer:sleep(100);
false ->
ok
end,
{noreply, recycle_acceptor(Pid, State)};
% this is what release_handler needs to get a list of modules,
% since our supervisor modules list is set to 'dynamic'
see sasl-2.1.9.2 / src / release_handler_1.erl get_dynamic_mods
handle_info({From, Tag, get_modules}, State = #mochiweb_socket_server{name={local,Mod}}) ->
From ! {element(2,Tag), [Mod]},
{noreply, State};
% If for some reason we can't get the module name, send empty list to avoid release_handler timeout:
handle_info({From, Tag, get_modules}, State) ->
error_logger:info_msg("mochiweb_socket_server replying to dynamic modules request as '[]'~n",[]),
From ! {element(2,Tag), []},
{noreply, State};
handle_info(Info, State) ->
error_logger:info_report([{'INFO', Info}, {'State', State}]),
{noreply, State}.
%%
%% Tests
%%
-include_lib("eunit/include/eunit.hrl").
-ifdef(TEST).
upgrade_state_test() ->
OldState = {mochiweb_socket_server,
port, loop, name,
max, ip, listen,
nodelay, backlog,
active_sockets,
acceptor_pool_size,
ssl, ssl_opts, acceptor_pool},
State = upgrade_state(OldState),
CmpState = #mochiweb_socket_server{port=port, loop=loop,
name=name, max=max, ip=ip,
listen=listen, nodelay=nodelay,
backlog=backlog,
active_sockets=active_sockets,
acceptor_pool_size=acceptor_pool_size,
ssl=ssl, ssl_opts=ssl_opts,
acceptor_pool=acceptor_pool,
profile_fun=undefined},
?assertEqual(CmpState, State).
-endif.
| null | https://raw.githubusercontent.com/RJ/mochiweb-websockets/aded564a1a647157258d54f346bbb5231b07550e/src/mochiweb_socket_server.erl | erlang | NOTE: This is currently ignored.
Internal API
IPv4, and IPv6 if supported
IPv6
If there was an unexpected error accepting, log and sleep.
this is what release_handler needs to get a list of modules,
since our supervisor modules list is set to 'dynamic'
If for some reason we can't get the module name, send empty list to avoid release_handler timeout:
Tests
| @author < >
2007 Mochi Media , Inc.
@doc MochiWeb socket server .
-module(mochiweb_socket_server).
-author('').
-behaviour(gen_server).
-include("internal.hrl").
-export([start/1, stop/1]).
-export([init/1, handle_call/3, handle_cast/2, terminate/2, code_change/3,
handle_info/2]).
-export([get/2, set/3]).
-record(mochiweb_socket_server,
{port,
loop,
name=undefined,
max=2048,
ip=any,
listen=null,
nodelay=false,
backlog=128,
active_sockets=0,
acceptor_pool_size=16,
ssl=false,
ssl_opts=[{ssl_imp, new}],
acceptor_pool=sets:new(),
profile_fun=undefined}).
-define(is_old_state(State), not is_record(State, mochiweb_socket_server)).
start(State=#mochiweb_socket_server{}) ->
start_server(State);
start(Options) ->
start(parse_options(Options)).
get(Name, Property) ->
gen_server:call(Name, {get, Property}).
set(Name, profile_fun, Fun) ->
gen_server:cast(Name, {set, profile_fun, Fun});
set(Name, Property, _Value) ->
error_logger:info_msg("?MODULE:set for ~p with ~p not implemented~n",
[Name, Property]).
stop(Name) when is_atom(Name) ->
gen_server:cast(Name, stop);
stop(Pid) when is_pid(Pid) ->
gen_server:cast(Pid, stop);
stop({local, Name}) ->
stop(Name);
stop({global, Name}) ->
stop(Name);
stop(Options) ->
State = parse_options(Options),
stop(State#mochiweb_socket_server.name).
parse_options(Options) ->
parse_options(Options, #mochiweb_socket_server{}).
parse_options([], State) ->
State;
parse_options([{name, L} | Rest], State) when is_list(L) ->
Name = {local, list_to_atom(L)},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
parse_options([{name, A} | Rest], State) when A =:= undefined ->
parse_options(Rest, State#mochiweb_socket_server{name=A});
parse_options([{name, A} | Rest], State) when is_atom(A) ->
Name = {local, A},
parse_options(Rest, State#mochiweb_socket_server{name=Name});
parse_options([{name, Name} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{name=Name});
parse_options([{port, L} | Rest], State) when is_list(L) ->
Port = list_to_integer(L),
parse_options(Rest, State#mochiweb_socket_server{port=Port});
parse_options([{port, Port} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{port=Port});
parse_options([{ip, Ip} | Rest], State) ->
ParsedIp = case Ip of
any ->
any;
Ip when is_tuple(Ip) ->
Ip;
Ip when is_list(Ip) ->
{ok, IpTuple} = inet_parse:address(Ip),
IpTuple
end,
parse_options(Rest, State#mochiweb_socket_server{ip=ParsedIp});
parse_options([{loop, Loop} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{loop=Loop});
parse_options([{backlog, Backlog} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{backlog=Backlog});
parse_options([{nodelay, NoDelay} | Rest], State) ->
parse_options(Rest, State#mochiweb_socket_server{nodelay=NoDelay});
parse_options([{acceptor_pool_size, Max} | Rest], State) ->
MaxInt = ensure_int(Max),
parse_options(Rest,
State#mochiweb_socket_server{acceptor_pool_size=MaxInt});
parse_options([{max, Max} | Rest], State) ->
error_logger:info_report([{warning, "TODO: max is currently unsupported"},
{max, Max}]),
MaxInt = ensure_int(Max),
parse_options(Rest, State#mochiweb_socket_server{max=MaxInt});
parse_options([{ssl, Ssl} | Rest], State) when is_boolean(Ssl) ->
parse_options(Rest, State#mochiweb_socket_server{ssl=Ssl});
parse_options([{ssl_opts, SslOpts} | Rest], State) when is_list(SslOpts) ->
SslOpts1 = [{ssl_imp, new} | proplists:delete(ssl_imp, SslOpts)],
parse_options(Rest, State#mochiweb_socket_server{ssl_opts=SslOpts1});
parse_options([{profile_fun, ProfileFun} | Rest], State) when is_function(ProfileFun) ->
parse_options(Rest, State#mochiweb_socket_server{profile_fun=ProfileFun}).
start_server(State=#mochiweb_socket_server{ssl=Ssl, name=Name}) ->
case Ssl of
true ->
application:start(crypto),
application:start(public_key),
application:start(ssl);
false ->
void
end,
case Name of
undefined ->
gen_server:start_link(?MODULE, State, []);
_ ->
gen_server:start_link(Name, ?MODULE, State, [])
end.
ensure_int(N) when is_integer(N) ->
N;
ensure_int(S) when is_list(S) ->
list_to_integer(S).
ipv6_supported() ->
case (catch inet:getaddr("localhost", inet6)) of
{ok, _Addr} ->
true;
{error, _} ->
false
end.
init(State=#mochiweb_socket_server{ip=Ip, port=Port, backlog=Backlog, nodelay=NoDelay}) ->
process_flag(trap_exit, true),
BaseOpts = [binary,
{reuseaddr, true},
{packet, 0},
{backlog, Backlog},
{recbuf, ?RECBUF_SIZE},
{active, false},
{nodelay, NoDelay}],
Opts = case Ip of
any ->
true -> [inet, inet6 | BaseOpts];
_ -> BaseOpts
end;
IPv4
[inet, {ip, Ip} | BaseOpts];
[inet6, {ip, Ip} | BaseOpts]
end,
case listen(Port, Opts, State) of
{stop, eacces} ->
case Port < 1024 of
true ->
case catch fdsrv:start() of
{ok, _} ->
case fdsrv:bind_socket(tcp, Port) of
{ok, Fd} ->
listen(Port, [{fd, Fd} | Opts], State);
_ ->
{stop, fdsrv_bind_failed}
end;
_ ->
{stop, fdsrv_start_failed}
end;
false ->
{stop, eacces}
end;
Other ->
Other
end.
new_acceptor_pool(Listen,
State=#mochiweb_socket_server{acceptor_pool=Pool,
acceptor_pool_size=Size,
loop=Loop}) ->
F = fun (_, S) ->
Pid = mochiweb_acceptor:start_link(self(), Listen, Loop),
sets:add_element(Pid, S)
end,
Pool1 = lists:foldl(F, Pool, lists:seq(1, Size)),
State#mochiweb_socket_server{acceptor_pool=Pool1}.
listen(Port, Opts, State=#mochiweb_socket_server{ssl=Ssl, ssl_opts=SslOpts}) ->
case mochiweb_socket:listen(Ssl, Port, Opts, SslOpts) of
{ok, Listen} ->
{ok, ListenPort} = mochiweb_socket:port(Listen),
{ok, new_acceptor_pool(
Listen,
State#mochiweb_socket_server{listen=Listen,
port=ListenPort})};
{error, Reason} ->
{stop, Reason}
end.
do_get(port, #mochiweb_socket_server{port=Port}) ->
Port;
do_get(active_sockets, #mochiweb_socket_server{active_sockets=ActiveSockets}) ->
ActiveSockets.
state_to_proplist(#mochiweb_socket_server{name=Name,
port=Port,
active_sockets=ActiveSockets}) ->
[{name, Name}, {port, Port}, {active_sockets, ActiveSockets}].
upgrade_state(State = #mochiweb_socket_server{}) ->
State;
upgrade_state({mochiweb_socket_server, Port, Loop, Name,
Max, IP, Listen, NoDelay, Backlog, ActiveSockets,
AcceptorPoolSize, SSL, SSL_opts,
AcceptorPool}) ->
#mochiweb_socket_server{port=Port, loop=Loop, name=Name, max=Max, ip=IP,
listen=Listen, nodelay=NoDelay, backlog=Backlog,
active_sockets=ActiveSockets,
acceptor_pool_size=AcceptorPoolSize,
ssl=SSL,
ssl_opts=SSL_opts,
acceptor_pool=AcceptorPool}.
handle_call(Req, From, State) when ?is_old_state(State) ->
handle_call(Req, From, upgrade_state(State));
handle_call({get, Property}, _From, State) ->
Res = do_get(Property, State),
{reply, Res, State};
handle_call(_Message, _From, State) ->
Res = error,
{reply, Res, State}.
handle_cast(Req, State) when ?is_old_state(State) ->
handle_cast(Req, upgrade_state(State));
handle_cast({accepted, Pid, Timing},
State=#mochiweb_socket_server{active_sockets=ActiveSockets}) ->
State1 = State#mochiweb_socket_server{active_sockets=1 + ActiveSockets},
case State#mochiweb_socket_server.profile_fun of
undefined ->
undefined;
F when is_function(F) ->
catch F([{timing, Timing} | state_to_proplist(State1)])
end,
{noreply, recycle_acceptor(Pid, State1)};
handle_cast({set, profile_fun, ProfileFun}, State) ->
State1 = case ProfileFun of
ProfileFun when is_function(ProfileFun); ProfileFun =:= undefined ->
State#mochiweb_socket_server{profile_fun=ProfileFun};
_ ->
State
end,
{noreply, State1};
handle_cast(stop, State) ->
{stop, normal, State}.
terminate(Reason, State) when ?is_old_state(State) ->
terminate(Reason, upgrade_state(State));
terminate(_Reason, #mochiweb_socket_server{listen=Listen, port=Port}) ->
mochiweb_socket:close(Listen),
case Port < 1024 of
true ->
catch fdsrv:stop(),
ok;
false ->
ok
end.
code_change(_OldVsn, State, _Extra) ->
State.
recycle_acceptor(Pid, State=#mochiweb_socket_server{
acceptor_pool=Pool,
listen=Listen,
loop=Loop,
active_sockets=ActiveSockets}) ->
case sets:is_element(Pid, Pool) of
true ->
Acceptor = mochiweb_acceptor:start_link(self(), Listen, Loop),
Pool1 = sets:add_element(Acceptor, sets:del_element(Pid, Pool)),
State#mochiweb_socket_server{acceptor_pool=Pool1};
false ->
State#mochiweb_socket_server{active_sockets=ActiveSockets - 1}
end.
handle_info(Msg, State) when ?is_old_state(State) ->
handle_info(Msg, upgrade_state(State));
handle_info({'EXIT', Pid, normal}, State) ->
{noreply, recycle_acceptor(Pid, State)};
handle_info({'EXIT', Pid, Reason},
State=#mochiweb_socket_server{acceptor_pool=Pool}) ->
case sets:is_element(Pid, Pool) of
true ->
error_logger:error_report({?MODULE, ?LINE,
{acceptor_error, Reason}}),
timer:sleep(100);
false ->
ok
end,
{noreply, recycle_acceptor(Pid, State)};
see sasl-2.1.9.2 / src / release_handler_1.erl get_dynamic_mods
handle_info({From, Tag, get_modules}, State = #mochiweb_socket_server{name={local,Mod}}) ->
From ! {element(2,Tag), [Mod]},
{noreply, State};
handle_info({From, Tag, get_modules}, State) ->
error_logger:info_msg("mochiweb_socket_server replying to dynamic modules request as '[]'~n",[]),
From ! {element(2,Tag), []},
{noreply, State};
handle_info(Info, State) ->
error_logger:info_report([{'INFO', Info}, {'State', State}]),
{noreply, State}.
-include_lib("eunit/include/eunit.hrl").
-ifdef(TEST).
upgrade_state_test() ->
OldState = {mochiweb_socket_server,
port, loop, name,
max, ip, listen,
nodelay, backlog,
active_sockets,
acceptor_pool_size,
ssl, ssl_opts, acceptor_pool},
State = upgrade_state(OldState),
CmpState = #mochiweb_socket_server{port=port, loop=loop,
name=name, max=max, ip=ip,
listen=listen, nodelay=nodelay,
backlog=backlog,
active_sockets=active_sockets,
acceptor_pool_size=acceptor_pool_size,
ssl=ssl, ssl_opts=ssl_opts,
acceptor_pool=acceptor_pool,
profile_fun=undefined},
?assertEqual(CmpState, State).
-endif.
|
dcf7b2e49e9751fd254d10ec513bf2d57bcebe96ddbb67e61d09f7446bb1215b | Mallku2/lua-redex-model | expsTests.rkt | #lang racket
; Black-box testing for expression that don't interact with some store
(require redex
"../grammar.rkt"
"../Relations/exps.rkt")
(define (exps-test-suite)
; Operator ()
(test-->> exps
(term (\( (< 1 2 3 >) \)))
1)
(test-->> exps
(term (\( (< >) \)))
(term nil))
(test-->> exps
(term (\( 1 \)))
(term 1))
; Arithmetic Operations
(test-->> exps
(term (1 + 1))
(term 2.0))
(test-->> exps
(term (1 - 1))
(term 0.0))
(test-->> exps
(term (1 * 1))
(term 1.0))
(test-->> exps
(term (1 / 1))
(term 1.0))
(test-->> exps
(term (1 ^ 1))
(term 1.0))
(test-->> exps
(term (1 % 1))
(term 0.0))
; Equality comparison
(test-->> exps
(term (1 == 1))
(term true))
(test-->> exps
(term ("a" == "a"))
(term true))
; Number order comparison
(test-->> exps
(term (1 < 2))
(term true))
(test-->> exps
(term (2 > 1))
(term true))
(test-->> exps
(term (2 < 1))
(term false))
(test-->> exps
(term (2 <= 1))
(term false))
(test-->> exps
(term (1 >= 2))
(term false))
(test-->> exps
(term (2 >= 2))
(term true))
(test-->> exps
(term (1 <= 2))
(term true))
; String order comparison
(test-->> exps
(term ("a" < "a"))
(term false))
(test-->> exps
(term ("a" > "a"))
(term false))
(test-->> exps
(term ("a" < "b"))
(term true))
(test-->> exps
(term ("a" <= "a"))
(term true))
(test-->> exps
(term ("a" <= "b"))
(term true))
(test-->> exps
(term ("b" >= "a"))
(term true))
; String concatenation
(test-->> exps
(term ("a" .. "b"))
(term "ab"))
(test-->> exps
(term ("" .. "b"))
(term "b"))
(test-->> exps
(term ("1" .. 2.0))
(term "12"))
(test-->> exps
(term (1 .. "2.0"))
(term "12.0"))
; String length
(test-->> exps
(term (\# "a"))
(term 1))
; Logical conectives
(test-->> exps
(term (1 and (X ())))
(term (\( (X ()) \))))
(test-->> exps
(term (nil and 2))
(term nil))
(test-->> exps
(term (true and (ref 2)))
(term (\( (ref 2) \))))
(test-->> exps
(term (false and 2))
(term false))
(test-->> exps
(term (1 or 2))
(term 1))
(test-->> exps
(term (false or 2))
(term 2))
(test-->> exps
(term (nil or 2))
(term 2))
(test-->> exps
(term (not 1))
(term false))
(test-->> exps
(term (not nil))
(term true))
(test-->> exps
(term (not false))
(term true))
; Coercion
(test-->> exps
(term ("0x2.0p0" + 1.0))
(term ((2.0 * (2 ^ 0)) + 1.0)))
(test-->> exps
(term (" 0x2.0p0 " + 1.0))
(term ((2.0 * (2 ^ 0)) + 1.0)))
(test-->> exps
(term (1 + "0x1.0p0"))
(term (1 + (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" - 1))
(term ((1.0 * (2 ^ 0)) - 1)))
(test-->> exps
(term (1 - "0x1.0p0"))
(term (1 - (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" * 1))
(term ((1.0 * (2 ^ 0)) * 1)))
(test-->> exps
(term (1 * "0x1.0p0"))
(term (1 * (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" / 1))
(term ((1.0 * (2 ^ 0)) / 1)))
(test-->> exps
(term (1.0 / "0x1.0p0"))
(term (1.0 / (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" ^ 1.0))
(term ((1.0 * (2 ^ 0)) ^ 1.0)))
(test-->> exps
(term (1.0 ^ "0x1.0p0"))
(term (1.0 ^ (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" % 1.0))
(term ((1.0 * (2 ^ 0)) % 1.0)))
(test-->> exps
(term (1.0 % "0x1.0p0"))
(term (1.0 % (1.0 * (2 ^ 0)))))
(test-->> exps
(term (- "0x1.0p0"))
(term (- (1.0 * (2 ^ 0)))))
; Abnormal expressions
(test-->> exps
(term ("a" + 1))
(term (("a" + 1)ArithWrongOps)))
(test-->> exps
(term (1 + "a"))
(term ((1 + "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" + 1))
(term (("0xq" + 1)ArithWrongOps)))
(test-->> exps
(term (1 + "0xq"))
(term ((1 + "0xq")ArithWrongOps)))
(test-->> exps
(term (1 + "0x1.q"))
(term ((1 + "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 + "0x1.1pq"))
(term ((1 + "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" - 1))
(term (("a" - 1)ArithWrongOps)))
(test-->> exps
(term (1 - "a"))
(term ((1 - "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" - 1))
(term (("0xq" - 1)ArithWrongOps)))
(test-->> exps
(term (1 - "0xq"))
(term ((1 - "0xq")ArithWrongOps)))
(test-->> exps
(term (1 - "0x1.q"))
(term ((1 - "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 - "0x1.1pq"))
(term ((1 - "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" * 1))
(term (("a" * 1)ArithWrongOps)))
(test-->> exps
(term (1 * "a"))
(term ((1 * "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" * 1))
(term (("0xq" * 1)ArithWrongOps)))
(test-->> exps
(term (1 * "0xq"))
(term ((1 * "0xq")ArithWrongOps)))
(test-->> exps
(term (1 * "0x1.q"))
(term ((1 * "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 * "0x1.1pq"))
(term ((1 * "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" ^ 1))
(term (("a" ^ 1)ArithWrongOps)))
(test-->> exps
(term (1 ^ "a"))
(term ((1 ^ "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" ^ 1))
(term (("0xq" ^ 1)ArithWrongOps)))
(test-->> exps
(term (1 ^ "0xq"))
(term ((1 ^ "0xq")ArithWrongOps)))
(test-->> exps
(term (1 ^ "0x1.q"))
(term ((1 ^ "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 ^ "0x1.1pq"))
(term ((1 ^ "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" % 1))
(term (("a" % 1)ArithWrongOps)))
(test-->> exps
(term (1 % "a"))
(term ((1 % "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" % 1))
(term (("0xq" % 1)ArithWrongOps)))
(test-->> exps
(term (1 % "0xq"))
(term ((1 % "0xq")ArithWrongOps)))
(test-->> exps
(term (1 % "0x1.q"))
(term ((1 % "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 % "0x1.1pq"))
(term ((1 % "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term (- "a"))
(term ((- "a")NegWrongOp)))
(test-->> exps
(term (- "0xq"))
(term ((- "0xq")NegWrongOp)))
(test-->> exps
(term (- "0x1.q"))
(term ((- "0x1.q")NegWrongOp)))
(test-->> exps
(term (- "0x1.1pq"))
(term ((- "0x1.1pq")NegWrongOp)))
(test-->> exps
(term ("a" .. (objr 1)))
(term (("a" .. (objr 1))StrConcatWrongOps)))
(test-->> exps
(term (\# (objr 1)))
(term ((\# (objr 1))StrLenWrongOp)))
(test-->> exps
(term ("a" == "b"))
(term (("a" == "b")EqFail)))
(test-->> exps
(term (true == 1))
(term ((true == 1)EqFail)))
(test-->> exps
(term ((objr 1) < (objr 1)))
(term (((objr 1) < (objr 1))OrdCompWrongOps)))
(test-->> exps
(term ((objr 1) <= (objr 1)))
(term (((objr 1) <= (objr 1))OrdCompWrongOps)))
(test-results))
(provide exps-test-suite)
| null | https://raw.githubusercontent.com/Mallku2/lua-redex-model/13a1b8cacbdc72a1b5cb1a1f140f21cc974d71c3/Tests/expsTests.rkt | racket | Black-box testing for expression that don't interact with some store
Operator ()
Arithmetic Operations
Equality comparison
Number order comparison
String order comparison
String concatenation
String length
Logical conectives
Coercion
Abnormal expressions | #lang racket
(require redex
"../grammar.rkt"
"../Relations/exps.rkt")
(define (exps-test-suite)
(test-->> exps
(term (\( (< 1 2 3 >) \)))
1)
(test-->> exps
(term (\( (< >) \)))
(term nil))
(test-->> exps
(term (\( 1 \)))
(term 1))
(test-->> exps
(term (1 + 1))
(term 2.0))
(test-->> exps
(term (1 - 1))
(term 0.0))
(test-->> exps
(term (1 * 1))
(term 1.0))
(test-->> exps
(term (1 / 1))
(term 1.0))
(test-->> exps
(term (1 ^ 1))
(term 1.0))
(test-->> exps
(term (1 % 1))
(term 0.0))
(test-->> exps
(term (1 == 1))
(term true))
(test-->> exps
(term ("a" == "a"))
(term true))
(test-->> exps
(term (1 < 2))
(term true))
(test-->> exps
(term (2 > 1))
(term true))
(test-->> exps
(term (2 < 1))
(term false))
(test-->> exps
(term (2 <= 1))
(term false))
(test-->> exps
(term (1 >= 2))
(term false))
(test-->> exps
(term (2 >= 2))
(term true))
(test-->> exps
(term (1 <= 2))
(term true))
(test-->> exps
(term ("a" < "a"))
(term false))
(test-->> exps
(term ("a" > "a"))
(term false))
(test-->> exps
(term ("a" < "b"))
(term true))
(test-->> exps
(term ("a" <= "a"))
(term true))
(test-->> exps
(term ("a" <= "b"))
(term true))
(test-->> exps
(term ("b" >= "a"))
(term true))
(test-->> exps
(term ("a" .. "b"))
(term "ab"))
(test-->> exps
(term ("" .. "b"))
(term "b"))
(test-->> exps
(term ("1" .. 2.0))
(term "12"))
(test-->> exps
(term (1 .. "2.0"))
(term "12.0"))
(test-->> exps
(term (\# "a"))
(term 1))
(test-->> exps
(term (1 and (X ())))
(term (\( (X ()) \))))
(test-->> exps
(term (nil and 2))
(term nil))
(test-->> exps
(term (true and (ref 2)))
(term (\( (ref 2) \))))
(test-->> exps
(term (false and 2))
(term false))
(test-->> exps
(term (1 or 2))
(term 1))
(test-->> exps
(term (false or 2))
(term 2))
(test-->> exps
(term (nil or 2))
(term 2))
(test-->> exps
(term (not 1))
(term false))
(test-->> exps
(term (not nil))
(term true))
(test-->> exps
(term (not false))
(term true))
(test-->> exps
(term ("0x2.0p0" + 1.0))
(term ((2.0 * (2 ^ 0)) + 1.0)))
(test-->> exps
(term (" 0x2.0p0 " + 1.0))
(term ((2.0 * (2 ^ 0)) + 1.0)))
(test-->> exps
(term (1 + "0x1.0p0"))
(term (1 + (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" - 1))
(term ((1.0 * (2 ^ 0)) - 1)))
(test-->> exps
(term (1 - "0x1.0p0"))
(term (1 - (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" * 1))
(term ((1.0 * (2 ^ 0)) * 1)))
(test-->> exps
(term (1 * "0x1.0p0"))
(term (1 * (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" / 1))
(term ((1.0 * (2 ^ 0)) / 1)))
(test-->> exps
(term (1.0 / "0x1.0p0"))
(term (1.0 / (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" ^ 1.0))
(term ((1.0 * (2 ^ 0)) ^ 1.0)))
(test-->> exps
(term (1.0 ^ "0x1.0p0"))
(term (1.0 ^ (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("0x1.0p0" % 1.0))
(term ((1.0 * (2 ^ 0)) % 1.0)))
(test-->> exps
(term (1.0 % "0x1.0p0"))
(term (1.0 % (1.0 * (2 ^ 0)))))
(test-->> exps
(term (- "0x1.0p0"))
(term (- (1.0 * (2 ^ 0)))))
(test-->> exps
(term ("a" + 1))
(term (("a" + 1)ArithWrongOps)))
(test-->> exps
(term (1 + "a"))
(term ((1 + "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" + 1))
(term (("0xq" + 1)ArithWrongOps)))
(test-->> exps
(term (1 + "0xq"))
(term ((1 + "0xq")ArithWrongOps)))
(test-->> exps
(term (1 + "0x1.q"))
(term ((1 + "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 + "0x1.1pq"))
(term ((1 + "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" - 1))
(term (("a" - 1)ArithWrongOps)))
(test-->> exps
(term (1 - "a"))
(term ((1 - "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" - 1))
(term (("0xq" - 1)ArithWrongOps)))
(test-->> exps
(term (1 - "0xq"))
(term ((1 - "0xq")ArithWrongOps)))
(test-->> exps
(term (1 - "0x1.q"))
(term ((1 - "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 - "0x1.1pq"))
(term ((1 - "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" * 1))
(term (("a" * 1)ArithWrongOps)))
(test-->> exps
(term (1 * "a"))
(term ((1 * "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" * 1))
(term (("0xq" * 1)ArithWrongOps)))
(test-->> exps
(term (1 * "0xq"))
(term ((1 * "0xq")ArithWrongOps)))
(test-->> exps
(term (1 * "0x1.q"))
(term ((1 * "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 * "0x1.1pq"))
(term ((1 * "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" ^ 1))
(term (("a" ^ 1)ArithWrongOps)))
(test-->> exps
(term (1 ^ "a"))
(term ((1 ^ "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" ^ 1))
(term (("0xq" ^ 1)ArithWrongOps)))
(test-->> exps
(term (1 ^ "0xq"))
(term ((1 ^ "0xq")ArithWrongOps)))
(test-->> exps
(term (1 ^ "0x1.q"))
(term ((1 ^ "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 ^ "0x1.1pq"))
(term ((1 ^ "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term ("a" % 1))
(term (("a" % 1)ArithWrongOps)))
(test-->> exps
(term (1 % "a"))
(term ((1 % "a")ArithWrongOps)))
(test-->> exps
(term ("0xq" % 1))
(term (("0xq" % 1)ArithWrongOps)))
(test-->> exps
(term (1 % "0xq"))
(term ((1 % "0xq")ArithWrongOps)))
(test-->> exps
(term (1 % "0x1.q"))
(term ((1 % "0x1.q")ArithWrongOps)))
(test-->> exps
(term (1 % "0x1.1pq"))
(term ((1 % "0x1.1pq")ArithWrongOps)))
(test-->> exps
(term (- "a"))
(term ((- "a")NegWrongOp)))
(test-->> exps
(term (- "0xq"))
(term ((- "0xq")NegWrongOp)))
(test-->> exps
(term (- "0x1.q"))
(term ((- "0x1.q")NegWrongOp)))
(test-->> exps
(term (- "0x1.1pq"))
(term ((- "0x1.1pq")NegWrongOp)))
(test-->> exps
(term ("a" .. (objr 1)))
(term (("a" .. (objr 1))StrConcatWrongOps)))
(test-->> exps
(term (\# (objr 1)))
(term ((\# (objr 1))StrLenWrongOp)))
(test-->> exps
(term ("a" == "b"))
(term (("a" == "b")EqFail)))
(test-->> exps
(term (true == 1))
(term ((true == 1)EqFail)))
(test-->> exps
(term ((objr 1) < (objr 1)))
(term (((objr 1) < (objr 1))OrdCompWrongOps)))
(test-->> exps
(term ((objr 1) <= (objr 1)))
(term (((objr 1) <= (objr 1))OrdCompWrongOps)))
(test-results))
(provide exps-test-suite)
|
a3264cac07e18c13bb8211d8990dd3324d95b47778fca55f8cd249465aea0867 | drewolson/aoc-hs | Day11Spec.hs | module Aoc.Year2021.Day11Spec
( spec,
)
where
import Aoc.Year2021.Day11 qualified as Day11
import Test.Hspec (Spec, describe, it, shouldBe)
import Text.RawString.QQ (r)
input :: String
input =
[r|5483143223
2745854711
5264556173
6141336146
6357385478
4167524645
2176841721
6882881134
4846848554
5283751526
|]
spec :: Spec
spec = do
describe "part1" do
it "day 11 part 1 works" do
let result = Day11.part1 input
result `shouldBe` 1656
describe "part2" do
it "day 11 part 2 works" do
let result = Day11.part2 input
result `shouldBe` 195
| null | https://raw.githubusercontent.com/drewolson/aoc-hs/b0f06843e1f7d8c2af1da582e59a72d8f2bb73bf/aoc-year2021/test/Aoc/Year2021/Day11Spec.hs | haskell | module Aoc.Year2021.Day11Spec
( spec,
)
where
import Aoc.Year2021.Day11 qualified as Day11
import Test.Hspec (Spec, describe, it, shouldBe)
import Text.RawString.QQ (r)
input :: String
input =
[r|5483143223
2745854711
5264556173
6141336146
6357385478
4167524645
2176841721
6882881134
4846848554
5283751526
|]
spec :: Spec
spec = do
describe "part1" do
it "day 11 part 1 works" do
let result = Day11.part1 input
result `shouldBe` 1656
describe "part2" do
it "day 11 part 2 works" do
let result = Day11.part2 input
result `shouldBe` 195
| |
6ff2a4a72b159dcb37dd3cb86a4bcb3615f7410aacbf0506d2fc9e3ce9edb95b | bobzhang/fan | gadt_32.ml | (** FAILED *)
type _ u =
|A : int -> int u
|B : bool -> bool u
| null | https://raw.githubusercontent.com/bobzhang/fan/7ed527d96c5a006da43d3813f32ad8a5baa31b7f/src/todoml/tests/gadt_32.ml | ocaml | * FAILED | type _ u =
|A : int -> int u
|B : bool -> bool u
|
5326f8ed8b61f013cbc16cec1979966ec8e80476a1b80a0e12a943c884c6ca6e | scheme/rx | interfaces.scm | (define-interface utilities-interface
(export mapv mapv! vector-every? copy-vector
initialize-vector vector-append
vfold vfold-right
check-arg
deprecated-proc
real->exact-integer))
(define-interface let-opt-expanders-interface
(export expand-let-optionals
expand-let-optionals*))
(define-interface let-opt-interface
(export (let-optionals :syntax)
(let-optionals* :syntax)
(:optional :syntax)))
(define-interface basic-re-interface
(export (re-dsm? (proc (:value) :boolean))
((re-dsm make-re-dsm) (proc (:value :exact-integer :exact-integer) :value))
(re-dsm:body (proc (:value) :value))
(re-dsm:pre-dsm (proc (:value) :exact-integer))
(re-dsm:tsm (proc (:value) :exact-integer))
(re-dsm:posix (proc (:value) :value))
(set-re-dsm:posix (proc (:value :value) :unspecific))
(re-dsm:post-dsm (proc (:value) :exact-integer))
open-dsm
(re-seq? (proc (:value) :boolean))
(really-make-re-seq (proc (:value :exact-integer :value) :value))
(make-re-seq/tsm (proc (:value :exact-integer) :value))
((re-seq make-re-seq) (proc (:value) :value))
(re-seq:elts (proc (:value) :value))
(re-seq:tsm (proc (:value) :exact-integer))
(re-seq:posix (proc (:value) :value))
(set-re-seq:posix (proc (:value :value) :unspecific))
(re-choice? (proc (:value) :boolean))
(really-make-re-choice (proc (:value :exact-integer :value) :value))
(make-re-choice/tsm (proc (:value :exact-integer) :value))
((make-re-choice re-choice) (proc (:value) :value))
(re-choice:elts (proc (:value) :value))
(re-choice:tsm (proc (:value) :exact-integer))
(re-choice:posix (proc (:value) :value))
(set-re-choice:posix (proc (:value :value) :unspecific))
(re-repeat? (proc (:value) :boolean))
(really-make-re-repeat (proc (:exact-integer
:value :value
:exact-integer :value)
:value))
(make-re-repeat/tsm (proc (:exact-integer :value :value :exact-integer )
:value))
((re-repeat make-re-repeat)
(proc (:exact-integer :value :value) :value))
((re-repeat:from re-repeat:tsm)
(proc (:value) :exact-integer))
(re-repeat:to (proc (:value) :value))
((re-repeat:body re-repeat:posix)
(proc (:value) :value))
(set-re-repeat:posix (proc (:value :value) :unspecific))
(re-submatch? (proc (:value) :boolean))
(really-make-re-submatch (proc (:value :exact-integer :exact-integer :value)
:value))
(make-re-submatch/tsm (proc (:value :exact-integer :exact-integer) :value))
((make-re-submatch re-submatch)
(proc (:value &opt :exact-integer :exact-integer) :value))
(re-submatch:body (proc (:value) :value))
((re-submatch:pre-dsm re-submatch:tsm re-submatch:post-dsm)
(proc (:value) :exact-integer))
(re-submatch:posix (proc (:value) :value))
(set-re-submatch:posix (proc (:value :value) :unspecific))
(re-string? (proc (:value) :boolean))
((make-re-string re-string) (proc (:string) :value))
(re-string:chars (proc (:value) :string))
(set-re-string:chars (proc (:value :string) :unspecific))
(re-string:posix (proc (:value) :value))
(set-re-string:posix (proc (:value :value) :unspecific))
re-trivial
(re-trivial? (proc (:value) :boolean))
(re-char-set? (proc (:value) :boolean))
((make-re-char-set re-char-set) (proc (:value) :value))
(re-char-set:cset (proc (:value) :value))
(set-re-char-set:cset (proc (:value :value) :unspecific))
(re-char-set:posix (proc (:value) :value))
(set-re-char-set:posix (proc (:value :value) :unspecific))
re-empty
(re-empty? (proc (:value) :boolean))
re-bos re-eos
re-bol re-eol
((re-bos? re-eos? re-bol? re-eol? re-any?)
(proc (:value) :boolean))
re-any
re-nonl
(regexp? (proc (:value) :boolean))
(re-tsm (proc (:value) :exact-integer))
These guys can be in code produced by RX expander .
(flush-submatches (proc (:value) :value))
(uncase (proc (:value) :value))
(uncase-char-set (proc (:value) :value))
(uncase-string (proc (:string) :value))))
(define-interface re-internals-interface
These are constructors for the Scheme unparser
(export
(make-re-string/posix (proc (:string :string :vector) :value))
((make-re-seq/posix make-re-choice/posix)
(proc (:value :exact-integer :string :vector) :value))
(make-re-char-set/posix (proc (:value :string :vector) :value))
(make-re-repeat/posix (proc (:exact-integer :value :value :exact-integer :string :vector)
:value))
(make-re-dsm/posix (proc (:value :exact-integer :exact-integer :string :vector)
:value))
(make-re-submatch/posix (proc (:value :exact-integer :exact-integer :string :vector) :value))))
(define-interface re-match-internals-interface
(export (regexp-match:string (proc (:value) :string))
(regexp-match:submatches (proc (:value) :vector))))
(define-interface posix-re-interface
(export (regexp->posix-string (proc (:value) :string)) ; posixstr.scm
(posix-string->regexp (proc (:string) :value)))) ; spencer
(define-interface re-subst-interface
(export
(regexp-substitute (proc (:value :value &rest :value) :value))
(regexp-substitute/global (proc (:value :value :string &rest :value) :value))))
(define-interface re-folders-interface
(export
(regexp-fold (proc (:value (proc (:exact-integer :value :value) :value)
:value
:string
&opt (proc (:exact-integer :value) :value)
:exact-integer)
:value))
(regexp-fold-right (proc (:value (proc (:value :exact-integer :value) :value)
:value
:string
&opt (proc (:exact-integer :value) :value)
:exact-integer)
:value))
(regexp-for-each (proc (:value (proc (:value) :unspecific)
:string &opt :exact-integer)
:unspecific))))
(define-interface re-level-0-interface
(compound-interface posix-re-interface
basic-re-interface
(export (regexp-match? (proc (:value) :boolean))
(match:start (proc (:value &opt :exact-integer) :value))
(match:end (proc (:value &opt :exact-integer) :value))
(match:substring (proc (:value &opt :exact-integer) :value))
(regexp-search (proc (:value :string &opt :exact-integer)
:value))
(regexp-search? (proc (:value :string &opt :exact-integer)
:boolean))
(sre->regexp (proc (:value) :value))
(regexp->sre (proc (:value) :value))
(simplify-regexp (proc (:value) :value)))))
(define-interface rx-lib-interface
(compound-interface (export coerce-dynamic-regexp
coerce-dynamic-charset
spec->char-set
flush-submatches
uncase
uncase-char-set
uncase-string)
re-internals-interface))
(define-interface rx-syntax-interface (export (rx :syntax)))
(define-interface sre-syntax-tools-interface
(export (if-sre-form :syntax)
sre-form?
parse-sre parse-sres
regexp->scheme
static-regexp?))
(define-interface re-match-syntax-interface
(export (let-match :syntax)
(if-match :syntax)
(match-cond :syntax)))
(define-interface re-exports-interface
(compound-interface re-level-0-interface
rx-syntax-interface
re-subst-interface
re-match-syntax-interface
re-folders-interface))
| null | https://raw.githubusercontent.com/scheme/rx/dd9037f6f9ea01019390614f6b126b7dd293798d/interfaces.scm | scheme | posixstr.scm
spencer | (define-interface utilities-interface
(export mapv mapv! vector-every? copy-vector
initialize-vector vector-append
vfold vfold-right
check-arg
deprecated-proc
real->exact-integer))
(define-interface let-opt-expanders-interface
(export expand-let-optionals
expand-let-optionals*))
(define-interface let-opt-interface
(export (let-optionals :syntax)
(let-optionals* :syntax)
(:optional :syntax)))
(define-interface basic-re-interface
(export (re-dsm? (proc (:value) :boolean))
((re-dsm make-re-dsm) (proc (:value :exact-integer :exact-integer) :value))
(re-dsm:body (proc (:value) :value))
(re-dsm:pre-dsm (proc (:value) :exact-integer))
(re-dsm:tsm (proc (:value) :exact-integer))
(re-dsm:posix (proc (:value) :value))
(set-re-dsm:posix (proc (:value :value) :unspecific))
(re-dsm:post-dsm (proc (:value) :exact-integer))
open-dsm
(re-seq? (proc (:value) :boolean))
(really-make-re-seq (proc (:value :exact-integer :value) :value))
(make-re-seq/tsm (proc (:value :exact-integer) :value))
((re-seq make-re-seq) (proc (:value) :value))
(re-seq:elts (proc (:value) :value))
(re-seq:tsm (proc (:value) :exact-integer))
(re-seq:posix (proc (:value) :value))
(set-re-seq:posix (proc (:value :value) :unspecific))
(re-choice? (proc (:value) :boolean))
(really-make-re-choice (proc (:value :exact-integer :value) :value))
(make-re-choice/tsm (proc (:value :exact-integer) :value))
((make-re-choice re-choice) (proc (:value) :value))
(re-choice:elts (proc (:value) :value))
(re-choice:tsm (proc (:value) :exact-integer))
(re-choice:posix (proc (:value) :value))
(set-re-choice:posix (proc (:value :value) :unspecific))
(re-repeat? (proc (:value) :boolean))
(really-make-re-repeat (proc (:exact-integer
:value :value
:exact-integer :value)
:value))
(make-re-repeat/tsm (proc (:exact-integer :value :value :exact-integer )
:value))
((re-repeat make-re-repeat)
(proc (:exact-integer :value :value) :value))
((re-repeat:from re-repeat:tsm)
(proc (:value) :exact-integer))
(re-repeat:to (proc (:value) :value))
((re-repeat:body re-repeat:posix)
(proc (:value) :value))
(set-re-repeat:posix (proc (:value :value) :unspecific))
(re-submatch? (proc (:value) :boolean))
(really-make-re-submatch (proc (:value :exact-integer :exact-integer :value)
:value))
(make-re-submatch/tsm (proc (:value :exact-integer :exact-integer) :value))
((make-re-submatch re-submatch)
(proc (:value &opt :exact-integer :exact-integer) :value))
(re-submatch:body (proc (:value) :value))
((re-submatch:pre-dsm re-submatch:tsm re-submatch:post-dsm)
(proc (:value) :exact-integer))
(re-submatch:posix (proc (:value) :value))
(set-re-submatch:posix (proc (:value :value) :unspecific))
(re-string? (proc (:value) :boolean))
((make-re-string re-string) (proc (:string) :value))
(re-string:chars (proc (:value) :string))
(set-re-string:chars (proc (:value :string) :unspecific))
(re-string:posix (proc (:value) :value))
(set-re-string:posix (proc (:value :value) :unspecific))
re-trivial
(re-trivial? (proc (:value) :boolean))
(re-char-set? (proc (:value) :boolean))
((make-re-char-set re-char-set) (proc (:value) :value))
(re-char-set:cset (proc (:value) :value))
(set-re-char-set:cset (proc (:value :value) :unspecific))
(re-char-set:posix (proc (:value) :value))
(set-re-char-set:posix (proc (:value :value) :unspecific))
re-empty
(re-empty? (proc (:value) :boolean))
re-bos re-eos
re-bol re-eol
((re-bos? re-eos? re-bol? re-eol? re-any?)
(proc (:value) :boolean))
re-any
re-nonl
(regexp? (proc (:value) :boolean))
(re-tsm (proc (:value) :exact-integer))
These guys can be in code produced by RX expander .
(flush-submatches (proc (:value) :value))
(uncase (proc (:value) :value))
(uncase-char-set (proc (:value) :value))
(uncase-string (proc (:string) :value))))
(define-interface re-internals-interface
These are constructors for the Scheme unparser
(export
(make-re-string/posix (proc (:string :string :vector) :value))
((make-re-seq/posix make-re-choice/posix)
(proc (:value :exact-integer :string :vector) :value))
(make-re-char-set/posix (proc (:value :string :vector) :value))
(make-re-repeat/posix (proc (:exact-integer :value :value :exact-integer :string :vector)
:value))
(make-re-dsm/posix (proc (:value :exact-integer :exact-integer :string :vector)
:value))
(make-re-submatch/posix (proc (:value :exact-integer :exact-integer :string :vector) :value))))
(define-interface re-match-internals-interface
(export (regexp-match:string (proc (:value) :string))
(regexp-match:submatches (proc (:value) :vector))))
(define-interface posix-re-interface
(define-interface re-subst-interface
(export
(regexp-substitute (proc (:value :value &rest :value) :value))
(regexp-substitute/global (proc (:value :value :string &rest :value) :value))))
(define-interface re-folders-interface
(export
(regexp-fold (proc (:value (proc (:exact-integer :value :value) :value)
:value
:string
&opt (proc (:exact-integer :value) :value)
:exact-integer)
:value))
(regexp-fold-right (proc (:value (proc (:value :exact-integer :value) :value)
:value
:string
&opt (proc (:exact-integer :value) :value)
:exact-integer)
:value))
(regexp-for-each (proc (:value (proc (:value) :unspecific)
:string &opt :exact-integer)
:unspecific))))
(define-interface re-level-0-interface
(compound-interface posix-re-interface
basic-re-interface
(export (regexp-match? (proc (:value) :boolean))
(match:start (proc (:value &opt :exact-integer) :value))
(match:end (proc (:value &opt :exact-integer) :value))
(match:substring (proc (:value &opt :exact-integer) :value))
(regexp-search (proc (:value :string &opt :exact-integer)
:value))
(regexp-search? (proc (:value :string &opt :exact-integer)
:boolean))
(sre->regexp (proc (:value) :value))
(regexp->sre (proc (:value) :value))
(simplify-regexp (proc (:value) :value)))))
(define-interface rx-lib-interface
(compound-interface (export coerce-dynamic-regexp
coerce-dynamic-charset
spec->char-set
flush-submatches
uncase
uncase-char-set
uncase-string)
re-internals-interface))
(define-interface rx-syntax-interface (export (rx :syntax)))
(define-interface sre-syntax-tools-interface
(export (if-sre-form :syntax)
sre-form?
parse-sre parse-sres
regexp->scheme
static-regexp?))
(define-interface re-match-syntax-interface
(export (let-match :syntax)
(if-match :syntax)
(match-cond :syntax)))
(define-interface re-exports-interface
(compound-interface re-level-0-interface
rx-syntax-interface
re-subst-interface
re-match-syntax-interface
re-folders-interface))
|
bdbdbb9ddeb03fba60325c6223d46b724b7f91b5a33d741936324ac83ef6e5b7 | thizanne/cormoran | top.ml | open Batteries
module ProgramState : Domain.ProgramState = struct
type t = Top
let bottom = Top
let is_bottom Top = false
let equal Top Top = true
let top _ = Top
let transfer _ _ Top = Top
let meet_cond _ Top = Top
let join Top Top = Top
let widening Top Top = Top
let print output Top = Unit.print output ()
end
module ThreadAnalysis : Modular.ThreadAnalysis = struct
module StateAbstraction = struct
type t = Top
let is_bottom Top = false
let equal Top Top = true
let join Top Top = Top
let print output Top = Unit.print output ()
let bottom _ _ _ = Top
let top _ _ _ = Top
let meet_cond _ Top = Top
let are_consistent _ = true
let widening Top Top = Top
let meet_label _ _ Top = Top
end
module Interferences = struct
type t = Top
let bottom _ _ = Top
let equal Top Top = true
let join Top Top = Top
let widening Top Top = Top
let print output Top = Unit.print output ()
end
module Application = struct
type state = StateAbstraction.t
type interference = Interferences.t
let apply _ StateAbstraction.Top Interferences.Top =
StateAbstraction.Top, Interferences.Top
let generate _ _ _ StateAbstraction.Top =
StateAbstraction.Top, Interferences.Top
end
end
| null | https://raw.githubusercontent.com/thizanne/cormoran/46d13330ebd1c8224a0603fd473d8e6bed48bf53/src/domains/top.ml | ocaml | open Batteries
module ProgramState : Domain.ProgramState = struct
type t = Top
let bottom = Top
let is_bottom Top = false
let equal Top Top = true
let top _ = Top
let transfer _ _ Top = Top
let meet_cond _ Top = Top
let join Top Top = Top
let widening Top Top = Top
let print output Top = Unit.print output ()
end
module ThreadAnalysis : Modular.ThreadAnalysis = struct
module StateAbstraction = struct
type t = Top
let is_bottom Top = false
let equal Top Top = true
let join Top Top = Top
let print output Top = Unit.print output ()
let bottom _ _ _ = Top
let top _ _ _ = Top
let meet_cond _ Top = Top
let are_consistent _ = true
let widening Top Top = Top
let meet_label _ _ Top = Top
end
module Interferences = struct
type t = Top
let bottom _ _ = Top
let equal Top Top = true
let join Top Top = Top
let widening Top Top = Top
let print output Top = Unit.print output ()
end
module Application = struct
type state = StateAbstraction.t
type interference = Interferences.t
let apply _ StateAbstraction.Top Interferences.Top =
StateAbstraction.Top, Interferences.Top
let generate _ _ _ StateAbstraction.Top =
StateAbstraction.Top, Interferences.Top
end
end
| |
9bab760914f89683125a594536e95a47a1a592a74df84aa8974f478af2c55d9c | skanev/playground | 14-tests.scm | (require rackunit rackunit/text-ui)
(load-relative "../../support/eopl.scm")
(load-relative "../14.scm")
(define eopl-2.14-tests
(test-suite
"Tests for EOPL exercise 2.14"
(check-exn exn? (lambda () (apply-env (empty-env) 'a)))
(check-exn exn? (lambda () (apply-env (extend-env 'a 1 (empty-env)) 'b)))
(check-equal? (apply-env (extend-env 'a 1 (empty-env)) 'a)
1)
(check-equal? (apply-env (extend-env 'a 1 (extend-env 'b 2 (empty-env))) 'b)
2)
(check-equal? (apply-env (extend-env 'a 3
(extend-env 'b 2
(extend-env 'a 1
(empty-env))))
'a)
3)
(check-false (empty-env? (extend-env 'a 1 (empty-env))))
(check-true (empty-env? (empty-env)))
(check-false (has-binding? (empty-env) 'a))
(check-false (has-binding? (extend-env 'a 1 (empty-env)) 'b))
(check-true (has-binding? (extend-env 'a 1 (empty-env)) 'a))
(check-true (has-binding? (extend-env 'a 1 (extend-env 'b 2 (empty-env))) 'b))
))
(exit (run-tests eopl-2.14-tests))
| null | https://raw.githubusercontent.com/skanev/playground/d88e53a7f277b35041c2f709771a0b96f993b310/scheme/eopl/02/tests/14-tests.scm | scheme | (require rackunit rackunit/text-ui)
(load-relative "../../support/eopl.scm")
(load-relative "../14.scm")
(define eopl-2.14-tests
(test-suite
"Tests for EOPL exercise 2.14"
(check-exn exn? (lambda () (apply-env (empty-env) 'a)))
(check-exn exn? (lambda () (apply-env (extend-env 'a 1 (empty-env)) 'b)))
(check-equal? (apply-env (extend-env 'a 1 (empty-env)) 'a)
1)
(check-equal? (apply-env (extend-env 'a 1 (extend-env 'b 2 (empty-env))) 'b)
2)
(check-equal? (apply-env (extend-env 'a 3
(extend-env 'b 2
(extend-env 'a 1
(empty-env))))
'a)
3)
(check-false (empty-env? (extend-env 'a 1 (empty-env))))
(check-true (empty-env? (empty-env)))
(check-false (has-binding? (empty-env) 'a))
(check-false (has-binding? (extend-env 'a 1 (empty-env)) 'b))
(check-true (has-binding? (extend-env 'a 1 (empty-env)) 'a))
(check-true (has-binding? (extend-env 'a 1 (extend-env 'b 2 (empty-env))) 'b))
))
(exit (run-tests eopl-2.14-tests))
| |
efa19daac4fbe4b344be3494a2714426d0b9e5500e8ea14d0f91c36abb06be71 | metabase/metabase | chain_filter_test.clj | (ns metabase.models.params.chain-filter-test
(:require
[cheshire.core :as json]
[clojure.test :refer :all]
[metabase.models :refer [Field FieldValues]]
[metabase.models.field-values :as field-values]
[metabase.models.params.chain-filter :as chain-filter]
[metabase.models.params.field-values :as params.field-values]
[metabase.test :as mt]
[metabase.util :as u]
[toucan.db :as db]))
(defmacro ^:private chain-filter [field field->value & options]
`(chain-filter/chain-filter
(mt/$ids nil ~(symbol (str \% (name field))))
(mt/$ids nil ~(into {} (for [[k v] field->value]
[(symbol (str \% k)) v])))
~@options))
(defmacro ^:private chain-filter-search [field field->value query & options]
`(chain-filter/chain-filter-search
(mt/$ids nil ~(symbol (str \% (name field))))
(mt/$ids nil ~(into {} (for [[k v] field->value]
[(symbol (str \% k)) v])))
~query
~@options))
(defn take-n-values
"Call `take` on the result of chain-filter function.
(take-n-values 1 {:values [1 2 3]
:has_more_values false})
-> {:values [1]
:has_more_values false}"
[n result]
(update result :values #(take n %)))
(deftest chain-filter-test
(testing "Show me expensive restaurants"
(is (= {:values ["Dal Rae Restaurant"
"Lawry's The Prime Rib"
"Pacific Dining Car - Santa Monica"
"Sushi Nakazawa"
"Sushi Yasuda"
"Tanoshi Sushi & Sake Bar"]
:has_more_values false}
(chain-filter venues.name {venues.price 4}))))
(testing "Show me categories that have expensive restaurants"
(is (= {:values ["Japanese" "Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price 4})))
(testing "Should work with string versions of param values"
(is (= {:values ["Japanese" "Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price "4"})))))
(testing "Show me categories starting with s (case-insensitive) that have expensive restaurants"
(is (= {:values ["Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price 4, categories.name [:starts-with "s" {:case-sensitive false}]}))))
(testing "Show me cheap Thai restaurants"
(is (= {:values ["Kinaree Thai Bistro" "Krua Siri"]
:has_more_values false}
(chain-filter venues.name {venues.price 1, categories.name "Thai"}))))
(testing "Show me the categories that have cheap restaurants"
(is (= {:values ["Asian" "BBQ" "Bakery" "Bar" "Burger" "Caribbean"
"Deli" "Karaoke" "Mexican" "Pizza" "Southern" "Thai"]
:has_more_values false}
(chain-filter categories.name {venues.price 1}))))
(testing "Show me cheap restaurants with the word 'taco' in their name (case-insensitive)"
(is (= {:values ["Tacos Villa Corona" "Tito's Tacos"]
:has_more_values false}
(chain-filter venues.name {venues.price 1, venues.name [:contains "tAcO" {:case-sensitive false}]}))))
(testing "Show me the first 3 expensive restaurants"
(is (= {:values ["Dal Rae Restaurant" "Lawry's The Prime Rib" "Pacific Dining Car - Santa Monica"]
:has_more_values true}
(chain-filter venues.name {venues.price 4} :limit 3))))
(testing "Oh yeah, we actually support arbitrary MBQL filter clauses. Neat!"
(is (= {:values ["Festa" "Fred 62"]
:has_more_values false}
(chain-filter venues.name {venues.price [:between 2 3]
venues.name [:starts-with "f" {:case-sensitive false}]})))))
(deftest multiple-values-test
(testing "Chain filtering should support multiple values for a single parameter (as a vector or set of values)"
(testing "Show me restaurants with price = 1 or 2 with the word 'BBQ' in their name (case-sensitive)"
(is (= {:values ["Baby Blues BBQ" "Beachwood BBQ & Brewing" "Bludso's BBQ"]
:has_more_values false}
(chain-filter venues.name {venues.price #{1 2}, venues.name [:contains "BBQ"]}))))
(testing "Show me the possible values of price for Bakery *or* BBQ restaurants"
(is (= {:values [1 2 3]
:has_more_values false}
(chain-filter venues.price {categories.name ["Bakery" "BBQ"]}))))))
(deftest auto-parse-string-params-test
(testing "Parameters that come in as strings (i.e., all of them that come in via the API) should work as intended"
(is (= {:values ["Baby Blues BBQ" "Beachwood BBQ & Brewing" "Bludso's BBQ"]
:has_more_values false}
(chain-filter venues.name {venues.price ["1" "2"], venues.name [:contains "BBQ"]})))))
(deftest unrelated-params-test
(testing "Parameters that are completely unrelated (don't apply to this Table) should just get ignored entirely"
;; there is no way to join from venues -> users so users.id should get ignored
(binding [chain-filter/*enable-reverse-joins* false]
(is (= {:values [1 2 3]
:has_more_values false}
(chain-filter venues.price {categories.name ["Bakery" "BBQ"]
users.id [1 2 3]}))))))
(def ^:private megagraph
"A large graph that is hugely interconnected. All nodes can get to 50 and 50 has an edge to :end. But the fastest
route is [[:start 50] [50 :end]] and we should quickly identify this last route. Basically handy to demonstrate that
we are doing breadth first search rather than depth first search. Depth first would identify 1 -> 2 -> 3 ... 49 ->
50 -> end"
(let [big 50]
(merge-with merge
(reduce (fn [m [x y]] (assoc-in m [x y] [[x y]]))
{}
(for [x (range (inc big))
y (range (inc big))
:when (not= x y)]
[x y]))
{:start (reduce (fn [m x] (assoc m x [[:start x]]))
{}
(range (inc big)))}
{big {:end [[big :end]]}})))
(def ^:private megagraph-single-path
"Similar to the megagraph above, this graph only has a single path through a hugely interconnected graph. A naive
graph traversal will run out of memory or take quite a long time to find the traversal:
[[:start 90] [90 200] [200 :end]]
There is only one path to end (from 200) and only one path to 200 from 90. If you take out the seen nodes this path
will not be found as the traversal advances through all of the 50 paths from start, all of the 50 paths from 1, all
of the 50 paths from 2, ..."
(merge-with merge
every node is linked to every other node ( 1 ... 199 )
(reduce (fn [m [x y]] (assoc-in m [x y] [[x y]]))
{}
(for [x (range 200)
y (range 200)
:when (not= x y)]
[x y]))
{:start (reduce (fn [m x] (assoc m x [[:start x]]))
{}
(range 200))}
only 90 reaches 200 and only 200 ( big ) reaches the end
{90 {200 [[90 200]]}
200 {:end [[200 :end]]}}))
(deftest traverse-graph-test
(testing "If no need to join, returns immediately"
(is (nil? (#'chain-filter/traverse-graph {} :start :start 5))))
(testing "Finds a simple hop"
(let [graph {:start {:end [:start->end]}}]
(is (= [:start->end]
(#'chain-filter/traverse-graph graph :start :end 5))))
(testing "Finds over a few hops"
(let [graph {:start {:a [:start->a]}
:a {:b [:a->b]}
:b {:c [:b->c]}
:c {:end [:c->end]}}]
(is (= [:start->a :a->b :b->c :c->end]
(#'chain-filter/traverse-graph graph :start :end 5)))
(testing "But will not exceed the max depth"
(is (nil? (#'chain-filter/traverse-graph graph :start :end 2))))))
(testing "Can find a path in a dense and large graph"
(is (= [[:start 50] [50 :end]]
(#'chain-filter/traverse-graph megagraph :start :end 5)))
(is (= [[:start 90] [90 200] [200 :end]]
(#'chain-filter/traverse-graph megagraph-single-path :start :end 5))))
(testing "Returns nil if there is no path"
(let [graph {:start {1 [[:start 1]]}
1 {2 [[1 2]]}
no way to get to 3
3 {4 [[3 4]]}
4 {:end [[4 :end]]}}]
(is (nil? (#'chain-filter/traverse-graph graph :start :end 5)))))
(testing "Not fooled by loops"
(let [graph {:start {:a [:start->a]}
:a {:b [:a->b]
:a [:b->a]}
:b {:c [:b->c]
:a [:c->a]
:b [:c->b]}
:c {:end [:c->end]}}]
(is (= [:start->a :a->b :b->c :c->end]
(#'chain-filter/traverse-graph graph :start :end 5)))
(testing "But will not exceed the max depth"
(is (nil? (#'chain-filter/traverse-graph graph :start :end 2))))))))
(deftest find-joins-test
(mt/dataset airports
(mt/$ids nil
(testing "airport -> municipality"
(is (= [{:lhs {:table $$airport, :field %airport.municipality_id}
:rhs {:table $$municipality, :field %municipality.id}}]
(#'chain-filter/find-joins (mt/id) $$airport $$municipality))))
(testing "airport [-> municipality -> region] -> country"
(is (= [{:lhs {:table $$airport, :field %airport.municipality_id}
:rhs {:table $$municipality, :field %municipality.id}}
{:lhs {:table $$municipality, :field %municipality.region_id}
:rhs {:table $$region, :field %region.id}}
{:lhs {:table $$region, :field %region.country_id}
:rhs {:table $$country, :field %country.id}}]
(#'chain-filter/find-joins (mt/id) $$airport $$country))))
(testing "[backwards]"
(testing "municipality -> airport"
(is (= [{:lhs {:table $$municipality, :field %municipality.id}
:rhs {:table $$airport, :field %airport.municipality_id}}]
(#'chain-filter/find-joins (mt/id) $$municipality $$airport))))
(testing "country [-> region -> municipality] -> airport"
(is (= [{:lhs {:table $$country, :field %country.id}
:rhs {:table $$region, :field %region.country_id}}
{:lhs {:table $$region, :field %region.id}
:rhs {:table $$municipality, :field %municipality.region_id}}
{:lhs {:table $$municipality, :field %municipality.id}
:rhs {:table $$airport, :field %airport.municipality_id}}]
(#'chain-filter/find-joins (mt/id) $$country $$airport))))))))
(deftest find-all-joins-test
(testing "With reverse joins disabled"
(binding [chain-filter/*enable-reverse-joins* false]
(mt/$ids nil
(is (= [{:lhs {:table $$venues, :field %venues.category_id}, :rhs {:table $$categories, :field %categories.id}}]
(#'chain-filter/find-all-joins $$venues #{%categories.name %users.id}))))))
(mt/dataset airports
(mt/$ids nil
(testing "airport [-> municipality] -> region"
(testing "even though we're joining against the same Table multiple times, duplicate joins should be removed"
(is (= [{:lhs {:table $$airport, :field %airport.municipality_id}
:rhs {:table $$municipality, :field %municipality.id}}
{:lhs {:table $$municipality, :field %municipality.region_id}
:rhs {:table $$region, :field %region.id}}]
(#'chain-filter/find-all-joins $$airport #{%region.name %municipality.name %region.id}))))))))
(deftest multi-hop-test
(mt/dataset airports
(testing "Should be able to filter against other tables with that require multiple joins\n"
(testing "single direct join: Airport -> Municipality"
(is (= {:values ["San Francisco International Airport"]
:has_more_values false}
(chain-filter airport.name {municipality.name ["San Francisco"]}))))
(testing "2 joins required: Airport -> Municipality -> Region"
(is (= {:values ["Beale Air Force Base"
"Edwards Air Force Base"
"John Wayne Airport-Orange County Airport"]
:has_more_values false}
(take-n-values 3 (chain-filter airport.name {region.name ["California"]})))))
(testing "3 joins required: Airport -> Municipality -> Region -> Country"
(is (= {:values ["Abraham Lincoln Capital Airport"
"Albuquerque International Sunport"
"Altus Air Force Base"]
:has_more_values false}
(take-n-values 3 (chain-filter airport.name {country.name ["United States"]})))))
(testing "4 joins required: Airport -> Municipality -> Region -> Country -> Continent"
(is (= {:values ["Afonso Pena Airport"
"Alejandro Velasco Astete International Airport"
"Carrasco International /General C L Berisso Airport"]
:has_more_values false}
(take-n-values 3 (chain-filter airport.name {continent.name ["South America"]})))))
(testing "[backwards]"
(testing "single direct join: Municipality -> Airport"
(is (= {:values ["San Francisco"]
:has_more_values false}
(chain-filter municipality.name {airport.name ["San Francisco International Airport"]}))))
(testing "2 joins required: Region -> Municipality -> Airport"
(is (= {:values ["California"]
:has_more_values false}
(chain-filter region.name {airport.name ["San Francisco International Airport"]}))))
(testing "3 joins required: Country -> Region -> Municipality -> Airport"
(is (= {:values ["United States"]
:has_more_values false}
(chain-filter country.name {airport.name ["San Francisco International Airport"]}))))
(testing "4 joins required: Continent -> Region -> Municipality -> Airport"
(is (= {:values ["North America"]
:has_more_values false}
(chain-filter continent.name {airport.name ["San Francisco International Airport"]}))))))))
(deftest filterable-field-ids-test
(mt/$ids
(testing (format "venues.price = %d categories.name = %d users.id = %d\n" %venues.price %categories.name %users.id)
(is (= #{%categories.name %users.id}
(chain-filter/filterable-field-ids %venues.price #{%categories.name %users.id})))
(testing "reverse joins disabled: should exclude users.id"
(binding [chain-filter/*enable-reverse-joins* false]
(is (= #{%categories.name}
(chain-filter/filterable-field-ids %venues.price #{%categories.name %users.id})))))
(testing "return nil if filtering-field-ids is empty"
(is (= nil
(chain-filter/filterable-field-ids %venues.price #{})))))))
(deftest chain-filter-search-test
(testing "Show me categories containing 'eak' (case-insensitive) that have expensive restaurants"
(is (= {:values ["Steakhouse"]
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "eak"))))
(testing "Show me cheap restaurants including with 'taco' (case-insensitive)"
(is (= {:values ["Tacos Villa Corona" "Tito's Tacos"]
:has_more_values false}
(chain-filter-search venues.name {venues.price 1} "tAcO"))))
(testing "search for something crazy = should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "zzzzz"))))
(testing "Field that doesn't exist should throw a 404"
(is (thrown-with-msg?
clojure.lang.ExceptionInfo
#"Field [\d,]+ does not exist"
(chain-filter/chain-filter-search Integer/MAX_VALUE nil "s"))))
(testing "Field that isn't type/Text should throw a 400"
(is (thrown-with-msg?
clojure.lang.ExceptionInfo
#"Cannot search against non-Text Field"
(chain-filter/chain-filter-search (mt/$ids %venues.price) nil "s")))))
;;; --------------------------------------------------- Remapping ----------------------------------------------------
(defn do-with-human-readable-values-remapping [thunk]
(mt/with-column-remappings [venues.category_id (values-of categories.name)]
(thunk)))
(defmacro with-human-readable-values-remapping {:style/indent 0} [& body]
`(do-with-human-readable-values-remapping (fn [] ~@body)))
(deftest human-readable-values-remapped-chain-filter-test
(with-human-readable-values-remapping
(testing "Show me category IDs for categories"
there are no restaurants with category 1
(is (= {:values [[2 "American"]
[3 "Artisan"]
[4 "Asian"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id nil)))))
(testing "Show me category IDs for categories that have expensive restaurants"
(is (= {:values [[40 "Japanese"]
[67 "Steakhouse"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id {venues.price 4})))))
(testing "Show me the category 40 (constraints do not support remapping)"
(is (= {:values [[40 "Japanese"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id {venues.category_id 40})))))))
(deftest human-readable-values-remapped-chain-filter-search-test
(with-human-readable-values-remapping
(testing "Show me category IDs [whose name] contains 'bar'"
(testing "\nconstraints = {}"
(is (= {:values [[7 "Bar"]
[74 "Wine Bar"]]
:has_more_values false}
(chain-filter-search venues.category_id {} "bar")))))
(testing "\nconstraints = nil"
(is (= {:values [[7 "Bar"]
[74 "Wine Bar"]]
:has_more_values false}
(chain-filter-search venues.category_id nil "bar"))))
(testing "Show me category IDs [whose name] contains 'house' that have expensive restaurants"
(is (= {:values [[67 "Steakhouse"]]
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "house"))))
(testing "search for something crazy: should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "zzzzz"))))))
(deftest field-to-field-remapped-field-id-test
(is (= (mt/id :venues :name)
(#'chain-filter/remapped-field-id (mt/id :venues :id)))))
(deftest field-to-field-remapped-chain-filter-test
(testing "Field-to-field remapping: venues.category_id -> categories.name\n"
(testing "Show me venue IDs (names)"
(is (= {:values [[29 "20th Century Cafe"]
[8 "25°"]
[93 "33 Taps"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.id nil)))))
(testing "Show me expensive venue IDs (names)"
(is (= {:values [[55 "Dal Rae Restaurant"]
[61 "Lawry's The Prime Rib"]
[16 "Pacific Dining Car - Santa Monica"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.id {venues.price 4})))))))
(deftest field-to-field-remapped-chain-filter-search-test
(testing "Field-to-field remapping: venues.category_id -> categories.name\n"
(testing "Show me venue IDs that [have a remapped name that] contains 'sushi'"
(is (= {:values [[76 "Beyond Sushi"]
[80 "Blue Ribbon Sushi"]
[77 "Sushi Nakazawa"]]
:has_more_values false}
(take-n-values 3 (chain-filter-search venues.id nil "sushi")))))
(testing "Show me venue IDs that [have a remapped name that] contain 'sushi' that are expensive"
(is (= {:values [[77 "Sushi Nakazawa"]
[79 "Sushi Yasuda"]
[81 "Tanoshi Sushi & Sake Bar"]]
:has_more_values false}
(chain-filter-search venues.id {venues.price 4} "sushi"))))
(testing "search for something crazy = should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search venues.id {venues.price 4} "zzzzz"))))))
(defmacro with-fk-field-to-field-remapping {:style/indent 0} [& body]
`(mt/with-column-remappings [~'venues.category_id ~'categories.name]
~@body))
(deftest fk-field-to-field-remapped-field-id-test
(with-fk-field-to-field-remapping
(is (= (mt/id :categories :name)
(#'chain-filter/remapped-field-id (mt/id :venues :category_id))))))
(deftest fk-field-to-field-remapped-chain-filter-test
(with-fk-field-to-field-remapping
(testing "Show me category IDs for categories"
there are no restaurants with category 1
(is (= {:values [[2 "American"]
[3 "Artisan"]
[4 "Asian"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id nil)))))
(testing "Show me category IDs for categories that have expensive restaurants"
(is (= {:values [[40 "Japanese"]
[67 "Steakhouse"]]
:has_more_values false}
(chain-filter venues.category_id {venues.price 4}))))
(testing "Show me the category 40 (constraints do not support remapping)"
(is (= {:values [[40 "Japanese"]]
:has_more_values false}
(chain-filter venues.category_id {venues.category_id 40}))))))
(deftest fk-field-to-field-remapped-chain-filter-search-test
(with-fk-field-to-field-remapping
(testing "Show me categories containing 'ar'"
(testing "\nconstraints = {}"
(is (= {:values [[3 "Artisan"]
[7 "Bar"]
[14 "Caribbean"]]
:has_more_values false}
(take-n-values 3 (chain-filter-search venues.category_id {} "ar")))))
(testing "\nconstraints = nil"
(is (= {:values [[3 "Artisan"]
[7 "Bar"]
[14 "Caribbean"]]
:has_more_values false}
(take-n-values 3 (chain-filter-search venues.category_id nil "ar"))))))
(testing "Show me categories containing 'house' that have expensive restaurants"
(is (= {:values [[67 "Steakhouse"]]
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "house"))))
(testing "search for something crazy = should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "zzzzz"))))))
(deftest use-cached-field-values-test
(testing "chain-filter should use cached FieldValues if applicable (#13832)"
(let [field-id (mt/id :categories :name)]
(mt/with-model-cleanup [FieldValues]
(testing "should created a full FieldValues when constraints is `nil`"
;; warm up the cache
(chain-filter categories.name nil)
(with-redefs [params.field-values/create-advanced-field-values! (fn [& _args]
(assert false "Should not be called"))]
(is (= {:values ["African" "American" "Artisan"]
:has_more_values false}
(take-n-values 3 (chain-filter categories.name nil))))
(is (= 1 (db/count FieldValues :field_id field-id :type :full)))))
(testing "should create a linked-filter FieldValues when have constraints"
;; make sure we have a clean start
(field-values/clear-advanced-field-values-for-field! field-id)
;; warm up the cache
(chain-filter categories.name {venues.price 4})
(with-redefs [params.field-values/create-advanced-field-values! (fn [& _args]
(assert false "Should not be called"))]
(is (= {:values ["Japanese" "Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price 4})))
(is (= 1 (db/count FieldValues :field_id field-id :type :linked-filter)))))
(testing "should do in-memory search with the cached FieldValues when search without constraints"
(mt/with-temp-vals-in-db FieldValues (db/select-one-id FieldValues :field_id field-id :type "full") {:values ["Good" "Bad"]}
(is (= {:values ["Good"]
:has_more_values false}
(chain-filter-search categories.name nil "ood")))))
(testing "search with constraitns"
;; make sure we have a clean start
(field-values/clear-advanced-field-values-for-field! field-id)
(testing "should create a linked-filter FieldValues"
;; warm up the cache
(chain-filter categories.name {venues.price 4})
(is (= 1 (db/count FieldValues :field_id field-id :type "linked-filter"))))
(testing "should search for the values of linked-filter FieldValues"
(db/update-where! FieldValues {:field_id field-id
:type "linked-filter"}
:values (json/generate-string ["Good" "Bad"])
HACK : currently this is hardcoded to true for linked - filter
;; in [[params.field-values/fetch-advanced-field-values]]
;; we want this to false to test this case
:has_more_values false)
(is (= {:values ["Good"]
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "o")))
(testing "Shouldn't use cached FieldValues if has_more_values=true"
(db/update-where! FieldValues {:field_id field-id
:type "linked-filter"}
:has_more_values true)
(is (= {:values ["Steakhouse"]
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "o"))))))))))
(deftest time-interval-test
(testing "chain-filter should accept time interval strings like `past32weeks` for temporal Fields"
(mt/$ids
(is (= [:time-interval $checkins.date -32 :week {:include-current false}]
(#'chain-filter/filter-clause $$checkins %checkins.date "past32weeks"))))))
(mt/defdataset nil-values-dataset
[["tbl"
[{:field-name "mytype", :base-type :type/Text}
{:field-name "myfield", :base-type :type/Text}]
[["value" "value"]
["null" nil]
["empty" ""]]]])
(deftest nil-values-test
(testing "Chain filter fns should work for fields that have nil or empty values (#17659)"
(mt/dataset nil-values-dataset
(mt/$ids tbl
(letfn [(thunk []
(doseq [[field expected-values] {:mytype {:values ["empty" "null" "value"]
:has_more_values false}
:myfield {:values [nil "" "value"]
:has_more_values false}}]
(testing "chain-filter"
sorting can differ a bit based on whether we use FieldValues or not ... not sure why this is
;; the case, but that's not important for this test anyway. Just sort everything
(is (= expected-values
(update (chain-filter/chain-filter (mt/id :tbl field) {}) :values sort))))
(testing "chain-filter-search"
(is (= {:values ["value"]
:has_more_values false}
(chain-filter/chain-filter-search (mt/id :tbl field) {} "val"))))))]
(testing "no FieldValues"
(thunk))
(testing "with FieldValues for myfield"
(mt/with-temp FieldValues [_ {:field_id %myfield, :values ["value" nil ""]}]
(mt/with-temp-vals-in-db Field %myfield {:has_field_values "auto-list"}
(testing "Sanity check: make sure we will actually use the cached FieldValues"
(is (field-values/field-should-have-field-values? %myfield))
(is (#'chain-filter/use-cached-field-values? %myfield)))
(thunk)))))))))
(defn- do-with-clean-field-values-for-field
[field-or-field-id thunk]
(mt/with-model-cleanup [FieldValues]
(let [field-id (u/the-id field-or-field-id)
has_field_values (db/select-one-field :has_field_values Field :id field-id)
fvs (db/select FieldValues :field_id field-id)]
;; switch to "list" to prevent [[field-values/create-or-update-full-field-values!]]
;; from changing this to `nil` if the field is `auto-list` and exceeds threshholds
(db/update! Field field-id :has_field_values "list")
(db/delete! FieldValues :field_id field-id)
(try
(thunk)
(finally
(db/update! Field field-id :has_field_values has_field_values)
(db/insert-many! FieldValues fvs))))))
(defmacro ^:private with-clean-field-values-for-field
"Run `body` with all FieldValues for `field-id` deleted.
Restores the deleted FieldValues when we're done."
{:style/indent 1}
[field-or-field-id & body]
`(do-with-clean-field-values-for-field ~field-or-field-id (fn [] ~@body)))
(deftest chain-filter-has-more-values-test
(testing "the `has_more_values` property should be correct\n"
(testing "for cached fields"
(testing "without contraints"
(with-clean-field-values-for-field (mt/id :categories :name)
(testing "`false` for field has values less than [[field-values/*total-max-length*]] threshold"
(is (= false
(:has_more_values (chain-filter categories.name {})))))
(testing "`true` if the limit option is less than the count of values of fieldvalues"
(is (= true
(:has_more_values (chain-filter categories.name {} :limit 1)))))
(testing "`false` if the limit option is greater the count of values of fieldvalues"
(is (= false
(:has_more_values (chain-filter categories.name {} :limit Integer/MAX_VALUE))))))
(testing "`true` if the values of a field exceeds our [[field-values/*total-max-length*]] limit"
(with-clean-field-values-for-field (mt/id :categories :name)
(binding [field-values/*total-max-length* 10]
(is (= true
(:has_more_values (chain-filter categories.name {}))))))))
(testing "with contraints"
(with-clean-field-values-for-field (mt/id :categories :name)
(testing "`false` for field has values less than [[field-values/*total-max-length*]] threshold"
(is (= false
(:has_more_values (chain-filter categories.name {venues.price 4})))))
(testing "`true` if the limit option is less than the count of values of fieldvalues"
(is (= true
(:has_more_values (chain-filter categories.name {venues.price 4} :limit 1)))))
(testing "`false` if the limit option is greater the count of values of fieldvalues"
(is (= false
(:has_more_values (chain-filter categories.name {venues.price 4} :limit Integer/MAX_VALUE))))))
(with-clean-field-values-for-field (mt/id :categories :name)
(testing "`true` if the values of a field exceeds our [[field-values/*total-max-length*]] limit"
(binding [field-values/*total-max-length* 10]
(is (= true
(:has_more_values (chain-filter categories.name {venues.price 4})))))))))
(testing "for non-cached fields"
(testing "with contraints"
(with-clean-field-values-for-field (mt/id :venues :latitude)
(testing "`false` if we don't specify limit"
(is (= false
(:has_more_values (chain-filter venues.latitude {venues.price 4})))))
(testing "`true` if the limit is less than the number of values the field has"
(is (= true
(:has_more_values (chain-filter venues.latitude {venues.price 4} :limit 1))))))))))
| null | https://raw.githubusercontent.com/metabase/metabase/1f809593c2298ccf9c4070df3fa39d718eddb5d6/test/metabase/models/params/chain_filter_test.clj | clojure | there is no way to join from venues -> users so users.id should get ignored
--------------------------------------------------- Remapping ----------------------------------------------------
warm up the cache
make sure we have a clean start
warm up the cache
make sure we have a clean start
warm up the cache
in [[params.field-values/fetch-advanced-field-values]]
we want this to false to test this case
the case, but that's not important for this test anyway. Just sort everything
switch to "list" to prevent [[field-values/create-or-update-full-field-values!]]
from changing this to `nil` if the field is `auto-list` and exceeds threshholds | (ns metabase.models.params.chain-filter-test
(:require
[cheshire.core :as json]
[clojure.test :refer :all]
[metabase.models :refer [Field FieldValues]]
[metabase.models.field-values :as field-values]
[metabase.models.params.chain-filter :as chain-filter]
[metabase.models.params.field-values :as params.field-values]
[metabase.test :as mt]
[metabase.util :as u]
[toucan.db :as db]))
(defmacro ^:private chain-filter [field field->value & options]
`(chain-filter/chain-filter
(mt/$ids nil ~(symbol (str \% (name field))))
(mt/$ids nil ~(into {} (for [[k v] field->value]
[(symbol (str \% k)) v])))
~@options))
(defmacro ^:private chain-filter-search [field field->value query & options]
`(chain-filter/chain-filter-search
(mt/$ids nil ~(symbol (str \% (name field))))
(mt/$ids nil ~(into {} (for [[k v] field->value]
[(symbol (str \% k)) v])))
~query
~@options))
(defn take-n-values
"Call `take` on the result of chain-filter function.
(take-n-values 1 {:values [1 2 3]
:has_more_values false})
-> {:values [1]
:has_more_values false}"
[n result]
(update result :values #(take n %)))
(deftest chain-filter-test
(testing "Show me expensive restaurants"
(is (= {:values ["Dal Rae Restaurant"
"Lawry's The Prime Rib"
"Pacific Dining Car - Santa Monica"
"Sushi Nakazawa"
"Sushi Yasuda"
"Tanoshi Sushi & Sake Bar"]
:has_more_values false}
(chain-filter venues.name {venues.price 4}))))
(testing "Show me categories that have expensive restaurants"
(is (= {:values ["Japanese" "Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price 4})))
(testing "Should work with string versions of param values"
(is (= {:values ["Japanese" "Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price "4"})))))
(testing "Show me categories starting with s (case-insensitive) that have expensive restaurants"
(is (= {:values ["Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price 4, categories.name [:starts-with "s" {:case-sensitive false}]}))))
(testing "Show me cheap Thai restaurants"
(is (= {:values ["Kinaree Thai Bistro" "Krua Siri"]
:has_more_values false}
(chain-filter venues.name {venues.price 1, categories.name "Thai"}))))
(testing "Show me the categories that have cheap restaurants"
(is (= {:values ["Asian" "BBQ" "Bakery" "Bar" "Burger" "Caribbean"
"Deli" "Karaoke" "Mexican" "Pizza" "Southern" "Thai"]
:has_more_values false}
(chain-filter categories.name {venues.price 1}))))
(testing "Show me cheap restaurants with the word 'taco' in their name (case-insensitive)"
(is (= {:values ["Tacos Villa Corona" "Tito's Tacos"]
:has_more_values false}
(chain-filter venues.name {venues.price 1, venues.name [:contains "tAcO" {:case-sensitive false}]}))))
(testing "Show me the first 3 expensive restaurants"
(is (= {:values ["Dal Rae Restaurant" "Lawry's The Prime Rib" "Pacific Dining Car - Santa Monica"]
:has_more_values true}
(chain-filter venues.name {venues.price 4} :limit 3))))
(testing "Oh yeah, we actually support arbitrary MBQL filter clauses. Neat!"
(is (= {:values ["Festa" "Fred 62"]
:has_more_values false}
(chain-filter venues.name {venues.price [:between 2 3]
venues.name [:starts-with "f" {:case-sensitive false}]})))))
(deftest multiple-values-test
(testing "Chain filtering should support multiple values for a single parameter (as a vector or set of values)"
(testing "Show me restaurants with price = 1 or 2 with the word 'BBQ' in their name (case-sensitive)"
(is (= {:values ["Baby Blues BBQ" "Beachwood BBQ & Brewing" "Bludso's BBQ"]
:has_more_values false}
(chain-filter venues.name {venues.price #{1 2}, venues.name [:contains "BBQ"]}))))
(testing "Show me the possible values of price for Bakery *or* BBQ restaurants"
(is (= {:values [1 2 3]
:has_more_values false}
(chain-filter venues.price {categories.name ["Bakery" "BBQ"]}))))))
(deftest auto-parse-string-params-test
(testing "Parameters that come in as strings (i.e., all of them that come in via the API) should work as intended"
(is (= {:values ["Baby Blues BBQ" "Beachwood BBQ & Brewing" "Bludso's BBQ"]
:has_more_values false}
(chain-filter venues.name {venues.price ["1" "2"], venues.name [:contains "BBQ"]})))))
(deftest unrelated-params-test
(testing "Parameters that are completely unrelated (don't apply to this Table) should just get ignored entirely"
(binding [chain-filter/*enable-reverse-joins* false]
(is (= {:values [1 2 3]
:has_more_values false}
(chain-filter venues.price {categories.name ["Bakery" "BBQ"]
users.id [1 2 3]}))))))
(def ^:private megagraph
"A large graph that is hugely interconnected. All nodes can get to 50 and 50 has an edge to :end. But the fastest
route is [[:start 50] [50 :end]] and we should quickly identify this last route. Basically handy to demonstrate that
we are doing breadth first search rather than depth first search. Depth first would identify 1 -> 2 -> 3 ... 49 ->
50 -> end"
(let [big 50]
(merge-with merge
(reduce (fn [m [x y]] (assoc-in m [x y] [[x y]]))
{}
(for [x (range (inc big))
y (range (inc big))
:when (not= x y)]
[x y]))
{:start (reduce (fn [m x] (assoc m x [[:start x]]))
{}
(range (inc big)))}
{big {:end [[big :end]]}})))
(def ^:private megagraph-single-path
"Similar to the megagraph above, this graph only has a single path through a hugely interconnected graph. A naive
graph traversal will run out of memory or take quite a long time to find the traversal:
[[:start 90] [90 200] [200 :end]]
There is only one path to end (from 200) and only one path to 200 from 90. If you take out the seen nodes this path
will not be found as the traversal advances through all of the 50 paths from start, all of the 50 paths from 1, all
of the 50 paths from 2, ..."
(merge-with merge
every node is linked to every other node ( 1 ... 199 )
(reduce (fn [m [x y]] (assoc-in m [x y] [[x y]]))
{}
(for [x (range 200)
y (range 200)
:when (not= x y)]
[x y]))
{:start (reduce (fn [m x] (assoc m x [[:start x]]))
{}
(range 200))}
only 90 reaches 200 and only 200 ( big ) reaches the end
{90 {200 [[90 200]]}
200 {:end [[200 :end]]}}))
(deftest traverse-graph-test
(testing "If no need to join, returns immediately"
(is (nil? (#'chain-filter/traverse-graph {} :start :start 5))))
(testing "Finds a simple hop"
(let [graph {:start {:end [:start->end]}}]
(is (= [:start->end]
(#'chain-filter/traverse-graph graph :start :end 5))))
(testing "Finds over a few hops"
(let [graph {:start {:a [:start->a]}
:a {:b [:a->b]}
:b {:c [:b->c]}
:c {:end [:c->end]}}]
(is (= [:start->a :a->b :b->c :c->end]
(#'chain-filter/traverse-graph graph :start :end 5)))
(testing "But will not exceed the max depth"
(is (nil? (#'chain-filter/traverse-graph graph :start :end 2))))))
(testing "Can find a path in a dense and large graph"
(is (= [[:start 50] [50 :end]]
(#'chain-filter/traverse-graph megagraph :start :end 5)))
(is (= [[:start 90] [90 200] [200 :end]]
(#'chain-filter/traverse-graph megagraph-single-path :start :end 5))))
(testing "Returns nil if there is no path"
(let [graph {:start {1 [[:start 1]]}
1 {2 [[1 2]]}
no way to get to 3
3 {4 [[3 4]]}
4 {:end [[4 :end]]}}]
(is (nil? (#'chain-filter/traverse-graph graph :start :end 5)))))
(testing "Not fooled by loops"
(let [graph {:start {:a [:start->a]}
:a {:b [:a->b]
:a [:b->a]}
:b {:c [:b->c]
:a [:c->a]
:b [:c->b]}
:c {:end [:c->end]}}]
(is (= [:start->a :a->b :b->c :c->end]
(#'chain-filter/traverse-graph graph :start :end 5)))
(testing "But will not exceed the max depth"
(is (nil? (#'chain-filter/traverse-graph graph :start :end 2))))))))
(deftest find-joins-test
(mt/dataset airports
(mt/$ids nil
(testing "airport -> municipality"
(is (= [{:lhs {:table $$airport, :field %airport.municipality_id}
:rhs {:table $$municipality, :field %municipality.id}}]
(#'chain-filter/find-joins (mt/id) $$airport $$municipality))))
(testing "airport [-> municipality -> region] -> country"
(is (= [{:lhs {:table $$airport, :field %airport.municipality_id}
:rhs {:table $$municipality, :field %municipality.id}}
{:lhs {:table $$municipality, :field %municipality.region_id}
:rhs {:table $$region, :field %region.id}}
{:lhs {:table $$region, :field %region.country_id}
:rhs {:table $$country, :field %country.id}}]
(#'chain-filter/find-joins (mt/id) $$airport $$country))))
(testing "[backwards]"
(testing "municipality -> airport"
(is (= [{:lhs {:table $$municipality, :field %municipality.id}
:rhs {:table $$airport, :field %airport.municipality_id}}]
(#'chain-filter/find-joins (mt/id) $$municipality $$airport))))
(testing "country [-> region -> municipality] -> airport"
(is (= [{:lhs {:table $$country, :field %country.id}
:rhs {:table $$region, :field %region.country_id}}
{:lhs {:table $$region, :field %region.id}
:rhs {:table $$municipality, :field %municipality.region_id}}
{:lhs {:table $$municipality, :field %municipality.id}
:rhs {:table $$airport, :field %airport.municipality_id}}]
(#'chain-filter/find-joins (mt/id) $$country $$airport))))))))
(deftest find-all-joins-test
(testing "With reverse joins disabled"
(binding [chain-filter/*enable-reverse-joins* false]
(mt/$ids nil
(is (= [{:lhs {:table $$venues, :field %venues.category_id}, :rhs {:table $$categories, :field %categories.id}}]
(#'chain-filter/find-all-joins $$venues #{%categories.name %users.id}))))))
(mt/dataset airports
(mt/$ids nil
(testing "airport [-> municipality] -> region"
(testing "even though we're joining against the same Table multiple times, duplicate joins should be removed"
(is (= [{:lhs {:table $$airport, :field %airport.municipality_id}
:rhs {:table $$municipality, :field %municipality.id}}
{:lhs {:table $$municipality, :field %municipality.region_id}
:rhs {:table $$region, :field %region.id}}]
(#'chain-filter/find-all-joins $$airport #{%region.name %municipality.name %region.id}))))))))
(deftest multi-hop-test
(mt/dataset airports
(testing "Should be able to filter against other tables with that require multiple joins\n"
(testing "single direct join: Airport -> Municipality"
(is (= {:values ["San Francisco International Airport"]
:has_more_values false}
(chain-filter airport.name {municipality.name ["San Francisco"]}))))
(testing "2 joins required: Airport -> Municipality -> Region"
(is (= {:values ["Beale Air Force Base"
"Edwards Air Force Base"
"John Wayne Airport-Orange County Airport"]
:has_more_values false}
(take-n-values 3 (chain-filter airport.name {region.name ["California"]})))))
(testing "3 joins required: Airport -> Municipality -> Region -> Country"
(is (= {:values ["Abraham Lincoln Capital Airport"
"Albuquerque International Sunport"
"Altus Air Force Base"]
:has_more_values false}
(take-n-values 3 (chain-filter airport.name {country.name ["United States"]})))))
(testing "4 joins required: Airport -> Municipality -> Region -> Country -> Continent"
(is (= {:values ["Afonso Pena Airport"
"Alejandro Velasco Astete International Airport"
"Carrasco International /General C L Berisso Airport"]
:has_more_values false}
(take-n-values 3 (chain-filter airport.name {continent.name ["South America"]})))))
(testing "[backwards]"
(testing "single direct join: Municipality -> Airport"
(is (= {:values ["San Francisco"]
:has_more_values false}
(chain-filter municipality.name {airport.name ["San Francisco International Airport"]}))))
(testing "2 joins required: Region -> Municipality -> Airport"
(is (= {:values ["California"]
:has_more_values false}
(chain-filter region.name {airport.name ["San Francisco International Airport"]}))))
(testing "3 joins required: Country -> Region -> Municipality -> Airport"
(is (= {:values ["United States"]
:has_more_values false}
(chain-filter country.name {airport.name ["San Francisco International Airport"]}))))
(testing "4 joins required: Continent -> Region -> Municipality -> Airport"
(is (= {:values ["North America"]
:has_more_values false}
(chain-filter continent.name {airport.name ["San Francisco International Airport"]}))))))))
(deftest filterable-field-ids-test
(mt/$ids
(testing (format "venues.price = %d categories.name = %d users.id = %d\n" %venues.price %categories.name %users.id)
(is (= #{%categories.name %users.id}
(chain-filter/filterable-field-ids %venues.price #{%categories.name %users.id})))
(testing "reverse joins disabled: should exclude users.id"
(binding [chain-filter/*enable-reverse-joins* false]
(is (= #{%categories.name}
(chain-filter/filterable-field-ids %venues.price #{%categories.name %users.id})))))
(testing "return nil if filtering-field-ids is empty"
(is (= nil
(chain-filter/filterable-field-ids %venues.price #{})))))))
(deftest chain-filter-search-test
(testing "Show me categories containing 'eak' (case-insensitive) that have expensive restaurants"
(is (= {:values ["Steakhouse"]
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "eak"))))
(testing "Show me cheap restaurants including with 'taco' (case-insensitive)"
(is (= {:values ["Tacos Villa Corona" "Tito's Tacos"]
:has_more_values false}
(chain-filter-search venues.name {venues.price 1} "tAcO"))))
(testing "search for something crazy = should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "zzzzz"))))
(testing "Field that doesn't exist should throw a 404"
(is (thrown-with-msg?
clojure.lang.ExceptionInfo
#"Field [\d,]+ does not exist"
(chain-filter/chain-filter-search Integer/MAX_VALUE nil "s"))))
(testing "Field that isn't type/Text should throw a 400"
(is (thrown-with-msg?
clojure.lang.ExceptionInfo
#"Cannot search against non-Text Field"
(chain-filter/chain-filter-search (mt/$ids %venues.price) nil "s")))))
(defn do-with-human-readable-values-remapping [thunk]
(mt/with-column-remappings [venues.category_id (values-of categories.name)]
(thunk)))
(defmacro with-human-readable-values-remapping {:style/indent 0} [& body]
`(do-with-human-readable-values-remapping (fn [] ~@body)))
(deftest human-readable-values-remapped-chain-filter-test
(with-human-readable-values-remapping
(testing "Show me category IDs for categories"
there are no restaurants with category 1
(is (= {:values [[2 "American"]
[3 "Artisan"]
[4 "Asian"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id nil)))))
(testing "Show me category IDs for categories that have expensive restaurants"
(is (= {:values [[40 "Japanese"]
[67 "Steakhouse"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id {venues.price 4})))))
(testing "Show me the category 40 (constraints do not support remapping)"
(is (= {:values [[40 "Japanese"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id {venues.category_id 40})))))))
(deftest human-readable-values-remapped-chain-filter-search-test
(with-human-readable-values-remapping
(testing "Show me category IDs [whose name] contains 'bar'"
(testing "\nconstraints = {}"
(is (= {:values [[7 "Bar"]
[74 "Wine Bar"]]
:has_more_values false}
(chain-filter-search venues.category_id {} "bar")))))
(testing "\nconstraints = nil"
(is (= {:values [[7 "Bar"]
[74 "Wine Bar"]]
:has_more_values false}
(chain-filter-search venues.category_id nil "bar"))))
(testing "Show me category IDs [whose name] contains 'house' that have expensive restaurants"
(is (= {:values [[67 "Steakhouse"]]
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "house"))))
(testing "search for something crazy: should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "zzzzz"))))))
(deftest field-to-field-remapped-field-id-test
(is (= (mt/id :venues :name)
(#'chain-filter/remapped-field-id (mt/id :venues :id)))))
(deftest field-to-field-remapped-chain-filter-test
(testing "Field-to-field remapping: venues.category_id -> categories.name\n"
(testing "Show me venue IDs (names)"
(is (= {:values [[29 "20th Century Cafe"]
[8 "25°"]
[93 "33 Taps"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.id nil)))))
(testing "Show me expensive venue IDs (names)"
(is (= {:values [[55 "Dal Rae Restaurant"]
[61 "Lawry's The Prime Rib"]
[16 "Pacific Dining Car - Santa Monica"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.id {venues.price 4})))))))
(deftest field-to-field-remapped-chain-filter-search-test
(testing "Field-to-field remapping: venues.category_id -> categories.name\n"
(testing "Show me venue IDs that [have a remapped name that] contains 'sushi'"
(is (= {:values [[76 "Beyond Sushi"]
[80 "Blue Ribbon Sushi"]
[77 "Sushi Nakazawa"]]
:has_more_values false}
(take-n-values 3 (chain-filter-search venues.id nil "sushi")))))
(testing "Show me venue IDs that [have a remapped name that] contain 'sushi' that are expensive"
(is (= {:values [[77 "Sushi Nakazawa"]
[79 "Sushi Yasuda"]
[81 "Tanoshi Sushi & Sake Bar"]]
:has_more_values false}
(chain-filter-search venues.id {venues.price 4} "sushi"))))
(testing "search for something crazy = should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search venues.id {venues.price 4} "zzzzz"))))))
(defmacro with-fk-field-to-field-remapping {:style/indent 0} [& body]
`(mt/with-column-remappings [~'venues.category_id ~'categories.name]
~@body))
(deftest fk-field-to-field-remapped-field-id-test
(with-fk-field-to-field-remapping
(is (= (mt/id :categories :name)
(#'chain-filter/remapped-field-id (mt/id :venues :category_id))))))
(deftest fk-field-to-field-remapped-chain-filter-test
(with-fk-field-to-field-remapping
(testing "Show me category IDs for categories"
there are no restaurants with category 1
(is (= {:values [[2 "American"]
[3 "Artisan"]
[4 "Asian"]]
:has_more_values false}
(take-n-values 3 (chain-filter venues.category_id nil)))))
(testing "Show me category IDs for categories that have expensive restaurants"
(is (= {:values [[40 "Japanese"]
[67 "Steakhouse"]]
:has_more_values false}
(chain-filter venues.category_id {venues.price 4}))))
(testing "Show me the category 40 (constraints do not support remapping)"
(is (= {:values [[40 "Japanese"]]
:has_more_values false}
(chain-filter venues.category_id {venues.category_id 40}))))))
(deftest fk-field-to-field-remapped-chain-filter-search-test
(with-fk-field-to-field-remapping
(testing "Show me categories containing 'ar'"
(testing "\nconstraints = {}"
(is (= {:values [[3 "Artisan"]
[7 "Bar"]
[14 "Caribbean"]]
:has_more_values false}
(take-n-values 3 (chain-filter-search venues.category_id {} "ar")))))
(testing "\nconstraints = nil"
(is (= {:values [[3 "Artisan"]
[7 "Bar"]
[14 "Caribbean"]]
:has_more_values false}
(take-n-values 3 (chain-filter-search venues.category_id nil "ar"))))))
(testing "Show me categories containing 'house' that have expensive restaurants"
(is (= {:values [[67 "Steakhouse"]]
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "house"))))
(testing "search for something crazy = should return empty results"
(is (= {:values []
:has_more_values false}
(chain-filter-search venues.category_id {venues.price 4} "zzzzz"))))))
(deftest use-cached-field-values-test
(testing "chain-filter should use cached FieldValues if applicable (#13832)"
(let [field-id (mt/id :categories :name)]
(mt/with-model-cleanup [FieldValues]
(testing "should created a full FieldValues when constraints is `nil`"
(chain-filter categories.name nil)
(with-redefs [params.field-values/create-advanced-field-values! (fn [& _args]
(assert false "Should not be called"))]
(is (= {:values ["African" "American" "Artisan"]
:has_more_values false}
(take-n-values 3 (chain-filter categories.name nil))))
(is (= 1 (db/count FieldValues :field_id field-id :type :full)))))
(testing "should create a linked-filter FieldValues when have constraints"
(field-values/clear-advanced-field-values-for-field! field-id)
(chain-filter categories.name {venues.price 4})
(with-redefs [params.field-values/create-advanced-field-values! (fn [& _args]
(assert false "Should not be called"))]
(is (= {:values ["Japanese" "Steakhouse"]
:has_more_values false}
(chain-filter categories.name {venues.price 4})))
(is (= 1 (db/count FieldValues :field_id field-id :type :linked-filter)))))
(testing "should do in-memory search with the cached FieldValues when search without constraints"
(mt/with-temp-vals-in-db FieldValues (db/select-one-id FieldValues :field_id field-id :type "full") {:values ["Good" "Bad"]}
(is (= {:values ["Good"]
:has_more_values false}
(chain-filter-search categories.name nil "ood")))))
(testing "search with constraitns"
(field-values/clear-advanced-field-values-for-field! field-id)
(testing "should create a linked-filter FieldValues"
(chain-filter categories.name {venues.price 4})
(is (= 1 (db/count FieldValues :field_id field-id :type "linked-filter"))))
(testing "should search for the values of linked-filter FieldValues"
(db/update-where! FieldValues {:field_id field-id
:type "linked-filter"}
:values (json/generate-string ["Good" "Bad"])
HACK : currently this is hardcoded to true for linked - filter
:has_more_values false)
(is (= {:values ["Good"]
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "o")))
(testing "Shouldn't use cached FieldValues if has_more_values=true"
(db/update-where! FieldValues {:field_id field-id
:type "linked-filter"}
:has_more_values true)
(is (= {:values ["Steakhouse"]
:has_more_values false}
(chain-filter-search categories.name {venues.price 4} "o"))))))))))
(deftest time-interval-test
(testing "chain-filter should accept time interval strings like `past32weeks` for temporal Fields"
(mt/$ids
(is (= [:time-interval $checkins.date -32 :week {:include-current false}]
(#'chain-filter/filter-clause $$checkins %checkins.date "past32weeks"))))))
(mt/defdataset nil-values-dataset
[["tbl"
[{:field-name "mytype", :base-type :type/Text}
{:field-name "myfield", :base-type :type/Text}]
[["value" "value"]
["null" nil]
["empty" ""]]]])
(deftest nil-values-test
(testing "Chain filter fns should work for fields that have nil or empty values (#17659)"
(mt/dataset nil-values-dataset
(mt/$ids tbl
(letfn [(thunk []
(doseq [[field expected-values] {:mytype {:values ["empty" "null" "value"]
:has_more_values false}
:myfield {:values [nil "" "value"]
:has_more_values false}}]
(testing "chain-filter"
sorting can differ a bit based on whether we use FieldValues or not ... not sure why this is
(is (= expected-values
(update (chain-filter/chain-filter (mt/id :tbl field) {}) :values sort))))
(testing "chain-filter-search"
(is (= {:values ["value"]
:has_more_values false}
(chain-filter/chain-filter-search (mt/id :tbl field) {} "val"))))))]
(testing "no FieldValues"
(thunk))
(testing "with FieldValues for myfield"
(mt/with-temp FieldValues [_ {:field_id %myfield, :values ["value" nil ""]}]
(mt/with-temp-vals-in-db Field %myfield {:has_field_values "auto-list"}
(testing "Sanity check: make sure we will actually use the cached FieldValues"
(is (field-values/field-should-have-field-values? %myfield))
(is (#'chain-filter/use-cached-field-values? %myfield)))
(thunk)))))))))
(defn- do-with-clean-field-values-for-field
[field-or-field-id thunk]
(mt/with-model-cleanup [FieldValues]
(let [field-id (u/the-id field-or-field-id)
has_field_values (db/select-one-field :has_field_values Field :id field-id)
fvs (db/select FieldValues :field_id field-id)]
(db/update! Field field-id :has_field_values "list")
(db/delete! FieldValues :field_id field-id)
(try
(thunk)
(finally
(db/update! Field field-id :has_field_values has_field_values)
(db/insert-many! FieldValues fvs))))))
(defmacro ^:private with-clean-field-values-for-field
"Run `body` with all FieldValues for `field-id` deleted.
Restores the deleted FieldValues when we're done."
{:style/indent 1}
[field-or-field-id & body]
`(do-with-clean-field-values-for-field ~field-or-field-id (fn [] ~@body)))
(deftest chain-filter-has-more-values-test
(testing "the `has_more_values` property should be correct\n"
(testing "for cached fields"
(testing "without contraints"
(with-clean-field-values-for-field (mt/id :categories :name)
(testing "`false` for field has values less than [[field-values/*total-max-length*]] threshold"
(is (= false
(:has_more_values (chain-filter categories.name {})))))
(testing "`true` if the limit option is less than the count of values of fieldvalues"
(is (= true
(:has_more_values (chain-filter categories.name {} :limit 1)))))
(testing "`false` if the limit option is greater the count of values of fieldvalues"
(is (= false
(:has_more_values (chain-filter categories.name {} :limit Integer/MAX_VALUE))))))
(testing "`true` if the values of a field exceeds our [[field-values/*total-max-length*]] limit"
(with-clean-field-values-for-field (mt/id :categories :name)
(binding [field-values/*total-max-length* 10]
(is (= true
(:has_more_values (chain-filter categories.name {}))))))))
(testing "with contraints"
(with-clean-field-values-for-field (mt/id :categories :name)
(testing "`false` for field has values less than [[field-values/*total-max-length*]] threshold"
(is (= false
(:has_more_values (chain-filter categories.name {venues.price 4})))))
(testing "`true` if the limit option is less than the count of values of fieldvalues"
(is (= true
(:has_more_values (chain-filter categories.name {venues.price 4} :limit 1)))))
(testing "`false` if the limit option is greater the count of values of fieldvalues"
(is (= false
(:has_more_values (chain-filter categories.name {venues.price 4} :limit Integer/MAX_VALUE))))))
(with-clean-field-values-for-field (mt/id :categories :name)
(testing "`true` if the values of a field exceeds our [[field-values/*total-max-length*]] limit"
(binding [field-values/*total-max-length* 10]
(is (= true
(:has_more_values (chain-filter categories.name {venues.price 4})))))))))
(testing "for non-cached fields"
(testing "with contraints"
(with-clean-field-values-for-field (mt/id :venues :latitude)
(testing "`false` if we don't specify limit"
(is (= false
(:has_more_values (chain-filter venues.latitude {venues.price 4})))))
(testing "`true` if the limit is less than the number of values the field has"
(is (= true
(:has_more_values (chain-filter venues.latitude {venues.price 4} :limit 1))))))))))
|
ab9cc9ab9e9a754531512b1cce78372d008a3f97ad2d079e47a51ed68e0c4f78 | polysemy-research/polysemy-zoo | Cont.hs | # LANGUAGE AllowAmbiguousTypes , Trustworthy #
module Polysemy.Cont
(-- * Effect
Cont(..)
-- * Actions
, jump
, subst
, callCC
-- * Interpretations
, runContPure
, runContM
, contToFinal
-- * Experimental Interpretations
, runContViaFresh
-- * Unsafe Interpretations
, runContUnsafe
-- * Prompt types
, Ref(..)
, ExitRef(..)
, ViaFreshRef
) where
import Data.Void
import Polysemy
import Polysemy.Final
import Polysemy.Cont.Internal
import Polysemy.Error
import Polysemy.Fresh
import Control.Monad.Cont (MonadCont(), ContT(..), runContT)
import qualified Control.Monad.Cont as C (callCC)
-----------------------------------------------------------------------------
-- | Call with current continuation.
-- Executing the provided continuation will abort execution.
--
-- Using the provided continuation
-- will rollback all local effectful state back to the point where
-- 'callCC' was invoked.
--
-- Higher-order effects do not interact with the continuation in any meaningful
way ; i.e. ' Polysemy.Reader.local ' or ' ' does not affect
-- it, and 'Polysemy.Error.catch' will fail to catch any of its exceptions.
-- The only exception to this is if you interpret such effects /and/ 'Cont'
-- in terms of the final monad, and the final monad can perform such interactions
-- in a meaningful manner.
callCC :: forall ref r a
. Member (Cont ref) r
=> ((forall b. a -> Sem r b) -> Sem r a)
-> Sem r a
callCC cc = subst @ref (\ref -> cc (jump ref)) pure
# INLINE callCC #
-----------------------------------------------------------------------------
-- | Runs a 'Cont' effect by providing 'pure' as the final continuation.
--
-- This is a safe variant of 'runContUnsafe', as this may only be used
-- as the final interpreter before 'run'.
runContPure :: Sem '[Cont (Ref (Sem '[]) a)] a -> Sem '[] a
runContPure = runContUnsafe
# INLINE runContPure #
-----------------------------------------------------------------------------
-- | Runs a 'Cont' effect by providing 'pure' as the final continuation.
--
-- This is a safe variant of 'runContUnsafe', as this may only be used
-- as the final interpreter before 'runM'.
runContM :: Sem '[Cont (Ref (Sem '[Embed m]) a), Embed m] a -> Sem '[Embed m] a
runContM = runContUnsafe
# INLINE runContM #
-----------------------------------------------------------------------------
| Runs a ' Cont ' effect in terms of a final ' MonadCont '
--
-- /Beware/: Effects that aren't interpreted in terms of the final monad
-- will have local state semantics in regards to 'Cont' effects
-- interpreted this way. See 'Final'.
contToFinal :: (Member (Final m) r, MonadCont m)
=> Sem (Cont (ExitRef m) ': r) a
-> Sem r a
contToFinal = interpretFinal $ \case
Jump ref a -> pure $ enterExit ref a
Subst main cb -> do
main' <- bindS main
cb' <- bindS cb
s <- getInitialStateS
pure $ C.callCC $ \exit ->
main' (ExitRef (\a -> cb' (a <$ s) >>= vacuous . exit) <$ s)
# INLINE contToFinal #
-----------------------------------------------------------------------------
-- | A highly experimental 'Cont' interpreter that functions
-- through a combination of 'Error' and 'Fresh'. This may be used safely
-- anywhere in the effect stack.
--
-- 'runContViaFresh' is still under development.
-- You're encouraged to experiment with it, but don't rely on it.
For best results , use ' runContViaFresh ' as the first interpreter you run ,
-- such that all other effects are global in respect to it.
--
-- This interpreter may return 'Nothing' if the control flow becomes
-- split into separate, inconsistent parts,
-- such that backtracking fails when trying to invoke continuations.
-- For example, if you reify a continuation inside an
-- 'async':ed thread, and then have that thread return the reified
-- continuation back to the main thread through an 'await', then
-- 'runContViaFresh' will return 'Nothing' upon executing the continuation
-- in the main thread.
runContViaFresh :: forall uniq r a
. (Member (Fresh uniq) r, Eq uniq)
=> Sem (Cont (ViaFreshRef uniq) ': r) a
-> Sem r (Maybe a)
runContViaFresh =
let
hush (Right a) = Just a
hush _ = Nothing
in
fmap hush
. runError
. (`runContT` pure)
. runContViaFreshInC
# INLINE runContViaFresh #
-----------------------------------------------------------------------------
-- | Runs a 'Cont' effect by providing 'pure' as the final continuation.
--
-- __Beware__: This interpreter will invalidate all higher-order effects of any
interpreter run after it ; i.e. ' Polysemy.Reader.local ' and
' ' will be no - ops , ' Polysemy.Error.catch ' will fail
to catch exceptions , and ' Polysemy.Writer.listen ' will always return ' ' .
--
-- __You should therefore use 'runContUnsafe' only /after/ running all__
-- __interpreters for your higher-order effects.__
--
-- Note that 'Final' is a higher-order effect, and thus 'runContUnsafe' can't
safely be used together with ' ' .
runContUnsafe :: Sem (Cont (Ref (Sem r) a) ': r) a -> Sem r a
runContUnsafe = runContWithCUnsafe pure
# INLINE runContUnsafe #
| null | https://raw.githubusercontent.com/polysemy-research/polysemy-zoo/eb0ce40e4d3b9757ede851a3450c05cc42949b49/src/Polysemy/Cont.hs | haskell | * Effect
* Actions
* Interpretations
* Experimental Interpretations
* Unsafe Interpretations
* Prompt types
---------------------------------------------------------------------------
| Call with current continuation.
Executing the provided continuation will abort execution.
Using the provided continuation
will rollback all local effectful state back to the point where
'callCC' was invoked.
Higher-order effects do not interact with the continuation in any meaningful
it, and 'Polysemy.Error.catch' will fail to catch any of its exceptions.
The only exception to this is if you interpret such effects /and/ 'Cont'
in terms of the final monad, and the final monad can perform such interactions
in a meaningful manner.
---------------------------------------------------------------------------
| Runs a 'Cont' effect by providing 'pure' as the final continuation.
This is a safe variant of 'runContUnsafe', as this may only be used
as the final interpreter before 'run'.
---------------------------------------------------------------------------
| Runs a 'Cont' effect by providing 'pure' as the final continuation.
This is a safe variant of 'runContUnsafe', as this may only be used
as the final interpreter before 'runM'.
---------------------------------------------------------------------------
/Beware/: Effects that aren't interpreted in terms of the final monad
will have local state semantics in regards to 'Cont' effects
interpreted this way. See 'Final'.
---------------------------------------------------------------------------
| A highly experimental 'Cont' interpreter that functions
through a combination of 'Error' and 'Fresh'. This may be used safely
anywhere in the effect stack.
'runContViaFresh' is still under development.
You're encouraged to experiment with it, but don't rely on it.
such that all other effects are global in respect to it.
This interpreter may return 'Nothing' if the control flow becomes
split into separate, inconsistent parts,
such that backtracking fails when trying to invoke continuations.
For example, if you reify a continuation inside an
'async':ed thread, and then have that thread return the reified
continuation back to the main thread through an 'await', then
'runContViaFresh' will return 'Nothing' upon executing the continuation
in the main thread.
---------------------------------------------------------------------------
| Runs a 'Cont' effect by providing 'pure' as the final continuation.
__Beware__: This interpreter will invalidate all higher-order effects of any
__You should therefore use 'runContUnsafe' only /after/ running all__
__interpreters for your higher-order effects.__
Note that 'Final' is a higher-order effect, and thus 'runContUnsafe' can't | # LANGUAGE AllowAmbiguousTypes , Trustworthy #
module Polysemy.Cont
Cont(..)
, jump
, subst
, callCC
, runContPure
, runContM
, contToFinal
, runContViaFresh
, runContUnsafe
, Ref(..)
, ExitRef(..)
, ViaFreshRef
) where
import Data.Void
import Polysemy
import Polysemy.Final
import Polysemy.Cont.Internal
import Polysemy.Error
import Polysemy.Fresh
import Control.Monad.Cont (MonadCont(), ContT(..), runContT)
import qualified Control.Monad.Cont as C (callCC)
way ; i.e. ' Polysemy.Reader.local ' or ' ' does not affect
callCC :: forall ref r a
. Member (Cont ref) r
=> ((forall b. a -> Sem r b) -> Sem r a)
-> Sem r a
callCC cc = subst @ref (\ref -> cc (jump ref)) pure
# INLINE callCC #
runContPure :: Sem '[Cont (Ref (Sem '[]) a)] a -> Sem '[] a
runContPure = runContUnsafe
# INLINE runContPure #
runContM :: Sem '[Cont (Ref (Sem '[Embed m]) a), Embed m] a -> Sem '[Embed m] a
runContM = runContUnsafe
# INLINE runContM #
| Runs a ' Cont ' effect in terms of a final ' MonadCont '
contToFinal :: (Member (Final m) r, MonadCont m)
=> Sem (Cont (ExitRef m) ': r) a
-> Sem r a
contToFinal = interpretFinal $ \case
Jump ref a -> pure $ enterExit ref a
Subst main cb -> do
main' <- bindS main
cb' <- bindS cb
s <- getInitialStateS
pure $ C.callCC $ \exit ->
main' (ExitRef (\a -> cb' (a <$ s) >>= vacuous . exit) <$ s)
# INLINE contToFinal #
For best results , use ' runContViaFresh ' as the first interpreter you run ,
runContViaFresh :: forall uniq r a
. (Member (Fresh uniq) r, Eq uniq)
=> Sem (Cont (ViaFreshRef uniq) ': r) a
-> Sem r (Maybe a)
runContViaFresh =
let
hush (Right a) = Just a
hush _ = Nothing
in
fmap hush
. runError
. (`runContT` pure)
. runContViaFreshInC
# INLINE runContViaFresh #
interpreter run after it ; i.e. ' Polysemy.Reader.local ' and
' ' will be no - ops , ' Polysemy.Error.catch ' will fail
to catch exceptions , and ' Polysemy.Writer.listen ' will always return ' ' .
safely be used together with ' ' .
runContUnsafe :: Sem (Cont (Ref (Sem r) a) ': r) a -> Sem r a
runContUnsafe = runContWithCUnsafe pure
# INLINE runContUnsafe #
|
9b1ba63ccd52a1a925a9c41155dd8a11c861c4a7190614c0b3f7d28a3f2bc877 | locusmath/locus | object.clj | (ns locus.set.copresheaf.dependency.core.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.sequence.object :refer :all]
[locus.con.core.setpart :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.mapping.general.core.util :refer :all]
[locus.order.general.core.object :refer :all]
[locus.order.general.core.util :refer :all]
[locus.order.general.skeletal.object :refer :all]
[locus.order.general.discrete.object :refer :all]
[locus.set.quiver.diset.core.object :refer :all]
[locus.set.quiver.relation.binary.product :refer :all]
[locus.set.quiver.relation.binary.br :refer :all]
[locus.set.quiver.relation.binary.sr :refer :all]
[locus.set.copresheaf.bijection.core.object :refer :all]
[locus.set.copresheaf.bijection.core.morphism :refer :all]
[locus.set.copresheaf.structure.core.protocols :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.tree.triangle.core.object :refer :all]
[locus.set.copresheaf.incidence.core.object :refer :all]
[locus.set.tree.cospan.core.object :refer :all]
[locus.set.quiver.diset.core.morphism :refer :all]
[locus.set.copresheaf.dependency.dibijection.object :refer :all]
[locus.set.quiver.unary.core.morphism :refer :all]
[locus.set.copresheaf.dependency.nset.object :refer :all]
[locus.set.copresheaf.dependency.nfunction.object :refer :all]
[locus.set.copresheaf.dependency.nbijection.object :refer :all]
[locus.set.tree.chain.core.object :refer :all]
[locus.set.tree.triangle.core.morphism :refer :all]
[locus.set.copresheaf.incidence.core.morphism :refer :all]
[locus.set.tree.cospan.core.morphism :refer :all]
[locus.set.copresheaf.cube.core.object :refer :all]
[locus.set.tree.chain.core.morphism :refer :all]
[locus.set.copresheaf.indexed.family.object :refer :all]
[locus.set.tree.multicospan.core.object :refer :all]
[locus.set.copresheaf.multispan.core.object :refer :all])
(:import (locus.set.copresheaf.bijection.core.object Bijection)
(locus.set.quiver.diset.core.morphism Difunction)
(locus.set.tree.triangle.core.object SetTriangle)
(locus.set.copresheaf.incidence.core.object Span)
(locus.set.tree.cospan.core.object Cospan)
(locus.set.copresheaf.dependency.dibijection.object Dibijection)
(locus.set.quiver.unary.core.morphism Diamond)
(locus.set.copresheaf.bijection.core.morphism Gem)
(locus.set.copresheaf.dependency.nset.object NSet)
(locus.set.copresheaf.dependency.nfunction.object NFunction)
(locus.set.copresheaf.dependency.nbijection.object NBijection)
(locus.set.tree.chain.core.object SetChain)
(locus.set.tree.triangle.core.morphism TriangleMorphism)
(locus.set.copresheaf.incidence.core.morphism MorphismOfSpans)
(locus.set.tree.cospan.core.morphism MorphismOfCospans)
(locus.set.copresheaf.cube.core.object Cube)
(locus.set.tree.chain.core.morphism ChainMorphism)
(locus.set.mapping.general.core.object SetFunction)
(locus.set.copresheaf.indexed.family.object IndexedFamily)
(locus.order.general.discrete.object DiscretePoset)
(locus.set.tree.multicospan.core.object Multicospan)
(locus.set.copresheaf.multispan.core.object Multispan)))
Copresheaves over preorders Sets^P
; These are generalisations of functional dependencies of relations.
(deftype Dependency [order object-function morphism-function]
StructuredDifunction
(first-function [this]
morphism-function)
(second-function [this]
object-function))
Get the sets and functions associated with dependency copresheaves
(defmethod get-set Dependency
[^Dependency dependency, x]
(object-apply dependency x))
(defmethod get-function Dependency
[^Dependency dependency, x]
(morphism-apply dependency x))
Index preorders for copresheaves over preorders
(defmethod index :default
[obj] nil)
(defmethod index :locus.set.logic.core.set/universal
[coll]
(relational-preposet
(weak-order [#{0}])))
(defmethod index :locus.set.logic.structure.protocols/set-function
[func]
(relational-poset
(weak-order [#{0} #{1}])))
(defmethod index :locus.set.copresheaf.structure.core.protocols/diset
[diset]
(relational-preposet
(weak-order [#{0 1}])))
(defmethod index :locus.set.copresheaf.structure.core.protocols/bijection
[bijection]
(relational-preposet
(total-preorder [#{0 1}])))
(defmethod index SetTriangle
[triangle]
(relational-poset
(total-order 0 1 2)))
(defmethod index Span
[span]
(relational-poset
(weak-order [#{0} #{1 2}])))
(defmethod index Multispan
[multispan]
(let [n (multispan-type multispan)]
(relational-poset
(if (zero? n)
(weak-order [#{0}])
(weak-order
[#{0}
(set (range 1 (inc n)))])))))
(defmethod index Cospan
[cospan]
(relational-poset
(weak-order
['#{(0) (1)}
'#{()}])))
(defmethod index Multicospan
[multicospan]
(let [n (multicospan-type multicospan)]
(relational-poset
(if (zero? n)
(weak-order [#{'()}])
(weak-order
[(set
(map
(fn [i]
(list i))
(range n)))
'#{()}])))))
(defmethod index Difunction
[difunction]
(product
(to-poset (total-order 0 1))
(to-poset (weak-order [#{0 1}]))))
(defmethod index Dibijection
[dibijection]
(product
(to-preposet (total-preorder [#{0 1}]))
(to-preposet (weak-order [#{0 1}]))))
(defmethod index Diamond
[diamond]
(product
(to-poset (total-order 0 1))
(to-poset (total-order 0 1))))
(defmethod index Gem
[gem]
(product
(to-poset (total-order 0 1))
(to-preposet (total-preorder [#{0 1}]))))
(defmethod index MorphismOfCospans
[morphism]
(product
(to-poset (total-order 0 1))
(to-poset (weak-order [#{0 1} #{2}]))))
(defmethod index Cube
[cube]
(product
(to-poset (total-order 0 1))
(product
(to-poset (total-order 0 1))
(to-poset (total-order 0 1)))))
(defmethod index MorphismOfSpans
[morphism]
(product
(to-poset (total-order 0 1))
(to-poset (weak-order [#{0} #{1 2}]))))
(defmethod index TriangleMorphism
[morphism]
(product
(to-poset (total-order 0 1))
(to-poset (total-order 0 1 2))))
(defmethod index NSet
[nset]
(nth-antichain (nset-type nset)))
(defmethod index NFunction
[nfunction]
(product
(to-poset (total-order 0 1))
(nth-antichain (nfunction-type nfunction))))
(defmethod index NBijection
[nbijection]
(product
(to-preposet (total-preorder [#{0 1}]))
(nth-antichain (nbijection-type nbijection))))
(defmethod index SetChain
[^SetChain chain]
(nth-chain (inc (count (composition-sequence chain)))))
(defmethod index ChainMorphism
[morphism]
(product
(to-poset (total-order 0 1))
(index (source-object morphism))))
(defmethod index Dependency
[^Dependency dependency] (.-order dependency))
(defmethod index IndexedFamily
[^IndexedFamily family]
(DiscretePoset. (index-set family)))
Convert presheaves over preorders into a common format
(defmulti to-dependency type)
(defmethod to-dependency Dependency
[dependency] dependency)
(defmethod to-dependency :default
[dependency]
(let [dep (index dependency)]
(if (nil? dep)
(throw (new IllegalArgumentException))
(->Dependency dep (partial get-set dependency) (partial get-function dependency)))))
; The topos of lower bijective triangles consists of presheaves over the total
preorder [ 2,1 ] . Its elements are like special cases of triangle copresheaves over
the total order [ 1,1,1 ] except their lower half is invertible .
(def two-one-preorder
(relational-preposet (total-preorder [#{0 1} #{2}])))
(defn lower-bijective-triangle
[func bijection]
(let [s0 (inputs bijection)
s1 (outputs bijection)
s2 (outputs func)]
(->Dependency
two-one-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [a b]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[0 1] (underlying-function bijection)
[1 0] (underlying-function (inv bijection))
[0 2] (compose func (underlying-function bijection))
[1 2] func)))))
(defn relational-lower-bijective-triangle
[rel]
(lower-bijective-triangle
(relation-transition-map rel 1 2)
(make-bijection-by-function-pair
(relation-transition-map rel 0 1)
(relation-transition-map rel 1 0))))
; The topos of upper bijective triangles consists of presheaves over the total
preorder [ 1,2 ] . Its elements are like triangle copresheaves over the total order
[ 1,1,1 ] except their upper half is invertible .
(def one-two-preorder
(relational-preposet (total-preorder [#{0} #{1 2}])))
(defn upper-bijective-triangle
[bijection func]
(let [s0 (inputs func)
s1 (outputs func)
s2 (outputs bijection)]
(->Dependency
two-one-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[1 2] (underlying-function bijection)
[2 1] (underlying-function (inv bijection))
[0 1] func
[0 2] (compose (underlying-function bijection) func))))))
(defn relational-upper-bijective-triangle
[rel]
(upper-bijective-triangle
(make-bijection-by-function-pair
(relation-transition-map rel 1 2)
(relation-transition-map rel 2 1))
(relation-transition-map rel 0 1)))
; The topos of trijections is a natural generalisation of the topos of bijections to
a morphism on three objects . Like the topos of bijections , it is boolean and all of
; its algebraic properties coincide with the topos of sets.
(def k3-preorder
(relational-preposet
(total-preorder [#{0 1 2}])))
(defn trijection
[f g]
(let [s0 (inputs g)
s1 (outputs g)
s2 (outputs f)]
(->Dependency
k3-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[0 1] (underlying-function g)
[0 2] (underlying-function (compose f g))
[1 0] (underlying-function (inv g))
[1 1] (identity-function s1)
[1 2] (underlying-function f)
[2 0] (underlying-function (inv (compose f g)))
[2 1] (underlying-function (inv f))
[2 2] (identity-function s2))))))
(defn relational-trijection
[rel]
(trijection
(make-bijection-by-function-pair
(relation-transition-map rel 1 2)
(relation-transition-map rel 2 1))
(make-bijection-by-function-pair
(relation-transition-map rel 0 1)
(relation-transition-map rel 1 0))))
The topos of set function pairs is the topos of copresheaves over the index category
; T_2 + 1. So it is another example of a presheaf topos of a preorder.
(def t2-plus-one-order
(relational-poset '#{(0 0) (1 1) (2 2) (1 2)}))
(defn set-function-pair
[coll func]
(let [s0 coll
s1 (inputs func)
s2 (outputs func)]
(Dependency.
t2-plus-one-order
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[1 2] func)))))
The topos of set and bijection pairs is the topos of copresheaves over the index category
; K_2 + 1. So it is another example of a presheaf topos of a preorder.
(def k2-plus-one-preorder
(relational-preposet '#{(0 0) (1 1) (2 2) (1 2) (2 1)}))
(defn set-bijection-pair
[coll bijection]
(let [s0 coll
s1 (inputs bijection)
s2 (outputs bijection)]
(Dependency.
k2-plus-one-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[1 2] (underlying-function bijection)
[2 1] (underlying-function (inv bijection)))))))
; The topos of triples of sets Sets^3 is the next level generalisation of the topos of pairs
of sets Sets^2 to three objects instead of two . It is again a boolean topos , as it
; shares that property with the topos Sets^2 but unlike Sets it is not bivalent.
(def e3-order
(relational-poset (coreflexive-relation #{0 1 2})))
(defn triset
[a b c]
(->Dependency
e3-order
(fn [n]
(case n
0 a
1 b
2 c))
(fn [[i j]]
(case [i j]
[0 0] (identity-function a)
[1 1] (identity-function b)
[2 2] (identity-function c)))))
Copresheaves for generalized incidence structures
(defn nth-span-order
[n]
(relational-poset
(weak-order
[#{0} (set (range 1 (inc n)))])))
(defn nspan
[& funcs]
(let [n (count funcs)]
(letfn [(nth-set [i]
(if (zero? i)
(inputs (first funcs))
(outputs (nth funcs (dec i)))))]
(Dependency.
(nth-span-order n)
nth-set
(fn [[i j]]
(if (= i j)
(identity-function (nth-set i))
(nth funcs (dec j))))))))
Copresheaves for generalized cospans
(defn nth-cospan-order
[n]
(relational-poset
(weak-order
[(set (range 1 (inc n))) #{0}])))
(defn ncospan
[& funcs]
(let [n (count funcs)]
(letfn [(nth-set [i]
(if (zero? i)
(outputs (first funcs))
(inputs (nth funcs (dec i)))))]
(Dependency.
(nth-cospan-order n)
nth-set
(fn [[i j]]
(if (= i j)
(identity-function (nth-set i))
(nth funcs (dec i))))))))
; Multi incidence relations defined by indexed membership relations.
(defn multi-incidence-order
[n]
(relational-poset
(weak-order
[(set (range n))
(set (range n (+ n 2)))])))
(def two-two-order
(multi-incidence-order 2))
(defn nth-member-flags
[vertices edges n]
(seqable-binary-relation
edges
vertices
(fn [[edge vertex]]
(contains? (nth edge n) vertex))))
(defn multi-incidence-structure
([vertices edges]
(multi-incidence-structure vertices edges (count (first edges))))
([vertices edges arity]
(let [edge-index (+ arity 1)
vertex-index (+ arity 2)]
(letfn [(get-component-set [n]
(cond
(< n arity) (nth-member-flags vertices edges n)
(= n edge-index) edges
(= n vertex-index) vertices))
(get-component-function [[i j]]
(cond
(= i j) (identity-function (get-component-set i))
(= j edge-index) (->SetFunction
(nth-member-flags vertices edges i)
edges
first)
(= j vertex-index) (->SetFunction
(nth-member-flags vertices edges i)
vertices
second)))]
(->Dependency
(multi-incidence-order arity)
get-component-set
get-component-function)))))
(defn crown
[vertices edges] (multi-incidence-structure vertices edges 2))
Presheaves over the weak order [ 1,1,2 ]
(def lower-common-tree
(relational-poset
(weak-order [#{0} #{1} #{2 3}])))
(defn combine-prefunction-equal-triangles
[triangle1 triangle2]
(letfn [(get-component-set [n]
(case n
0 (triangle-source triangle1)
1 (triangle-middle triangle1)
2 (triangle-target triangle1)
3 (triangle-target triangle2)))
(get-component-function [[i j]]
(if (= i j)
(identity-function (get-component-set i))
(case [i j]
[0 1] (prefunction triangle1)
[0 2] (compfunction triangle1)
[0 3] (compfunction triangle2)
[1 2] (postfunction triangle1)
[1 3] (postfunction triangle2))))]
(->Dependency
lower-common-tree
get-component-set
get-component-function)))
Presheaves over the weak order [ 2 , 1 , 1 ]
(def upper-common-tree
(relational-poset
(weak-order [#{0 1} #{2} #{3}])))
(defn combine-postfunction-equal-triangles
[triangle1 triangle2]
(letfn [(get-component-set [n]
(case n
0 (triangle-source triangle1)
1 (triangle-source triangle2)
2 (triangle-middle triangle1)
3 (triangle-target triangle1)))
(get-component-function [[i j]]
(if (= i j)
(identity-function (get-component-set i))
(case [i j]
[0 2] (prefunction triangle1)
[0 3] (compfunction triangle1)
[1 2] (prefunction triangle2)
[1 3] (compfunction triangle2)
[2 3] (postfunction triangle1))))]
(->Dependency
upper-common-tree
get-component-set
get-component-function)))
Presheaves over the disjoint union of total orders T_3 + T_3
(def ditriangle-order
(relational-poset
'#{(0 0) (1 1) (2 2) (0 1) (0 2) (1 2) (3 3) (4 4) (5 5) (3 5) (3 4) (4 5)}))
(defn ditriangle
[triangle1 triangle2]
(Dependency.
ditriangle-order
(fn [obj]
(case obj
0 (triangle-source triangle1)
1 (triangle-middle triangle1)
2 (triangle-target triangle1)
3 (triangle-source triangle2)
4 (triangle-middle triangle2)
5 (triangle-target triangle2)))
(fn [[a b]]
(case [a b]
[0 0] (identity-function (triangle-source triangle1))
[1 1] (identity-function (triangle-middle triangle1))
[2 2] (identity-function (triangle-target triangle1))
[3 3] (identity-function (triangle-source triangle2))
[4 4] (identity-function (triangle-middle triangle2))
[5 5] (identity-function (triangle-target triangle2))
[0 1] (prefunction triangle1)
[0 2] (compfunction triangle1)
[1 2] (postfunction triangle1)
[3 4] (prefunction triangle2)
[3 5] (compfunction triangle2)
[4 5] (postfunction triangle2)))))
(defn combine-composable-difunctions
[f g]
(ditriangle
(SetTriangle. (first-function f) (first-function g))
(SetTriangle. (second-function f) (second-function g))))
; Create a multijection copresheaf over a complete thin groupoid
(defn multijection
[& args]
(let [bijections (reverse args)
n (count bijections)]
(letfn [(nth-set [i]
(if (zero? i)
(inputs (first bijections))
(outputs (nth bijections (dec i)))))
(nth-function [[i j]]
(cond
(= i j) (identity-function (nth-set i))
(< i j) (apply
compose
(map
(fn [k]
(underlying-function (nth bijections k)))
(range i j)))
(< j i) (apply
compose
(map
(fn [k]
(underlying-function (inv (nth bijections k))))
(reverse (range j i))))))]
(->Dependency
(nth-complete-preorder (inc n))
nth-set
nth-function))))
; Generalized tuples of sets and functions
(defn height-two-multichain-order
[n k]
(union
(weak-order [(set (range n))])
(set
(mapcat
(fn [i]
(let [start-index (+ n (* 2 i))]
#{(list start-index start-index)
(list start-index (inc start-index))
(list (inc start-index) (inc start-index))}))
(range k)))))
(defn set-and-function-system
[sets functions]
(letfn [(get-component-set [n]
(if (< n (count sets))
(nth sets n)
(let [adjusted-index (- n (count sets))]
(if (even? adjusted-index)
(inputs (nth functions (/ adjusted-index 2)))
(outputs (nth functions (/ (dec adjusted-index) 2)))))))
(get-component-function [[i j]]
(if (= i j)
(identity-function (get-component-set i))
(let [adjusted-index (/ (- i (count sets)) 2)]
(nth functions adjusted-index))))]
(->Dependency
(height-two-multichain-order (count sets) (count functions))
get-component-set
get-component-function)))
; Create dependency by morphism
(defn create-dependency-by-morphism
[^Dependency source, ^Dependency target, ^clojure.lang.IFn func]
(->Dependency
(product
(relational-preposet '#{(0 0) (1 1) (0 1)})
(.-order source))
(fn [[i v]]
(case i
0 (object-apply source v)
1 (object-apply target v)))
(fn [[[a b] [c d]]]
(let [first-arrow [a c]
second-arrow [b d]]
(case first-arrow
[0 0] (morphism-apply source second-arrow)
[1 1] (morphism-apply target second-arrow)
[0 1] (compose (morphism-apply target second-arrow) (func b)))))))
; Create a dependency functor from a relational copresheaf
; The underlying preorder of the ordering relation provided must be a preorder
on the range of the first n elements , which is used as the indices to the
; tuples of the relation.
(defn relational-functional-dependencies
[order rel]
(->Dependency
order
(fn [i]
(set
(map
(fn [tuple]
(nth tuple i))
rel)))
(fn [[i j]]
(relation-transition-map rel i j))))
; We need some way of dealing with functional dependencies
; This is only rudimentary at this stage of support and so it
; is due for a major refactoring involving the core system.
; and our basic notion of querying.
(defn induced-map
[rel source target]
(let [rval (apply
merge
(map
(fn [i]
{(restrict-list i source) (restrict-list i target)})
rel))]
(if (nil? rval)
{}
rval)))
(defn induced-fn
[rel source target]
(fn [source-elements]
(first
(for [i rel
:when (= (restrict-list i source) source-elements)]
(restrict-list i target)))))
(defn induced-function
[rel source target]
(SetFunction.
(project-relation rel source)
(project-relation rel target)
(induced-fn rel source target)))
(defn setwise-relational-functional-dependencies
[dep rel]
(Dependency.
dep
(fn [nums]
(project-relation rel nums))
(fn [[source target]]
(induced-function rel source target))))
; Every single preorder P is naturally associated to a trivial copresheaf in Sets^P
; which takes each object and associates it to a singleton set, and then each
; map of singletons is the unique trivial function between them.
(defn trivial-dependency
[preorder]
(Dependency.
preorder
(fn [obj]
#{obj})
(fn [[a b]]
(pair-function a b))))
; The copresheaf that takes a family of sets and that maps each ordered pair in its
; partial order expressed as a thin category to an inclusion function.
(defn inclusion-dependency
[family]
(Dependency.
(->Poset
family
(family-inclusion-ordering family))
identity
(fn [[a b]]
(inclusion-function a b))))
; Get the information necessary to turn a dependency copresheaf into a visualisation
(defn object-indexed-family
[copresheaf]
(let [cat (.-order copresheaf)]
(into
{}
(map
(fn [object]
[object (object-apply copresheaf object)])
(objects cat)))))
(defn generating-concrete-morphism-triples
[copresheaf]
(let [cat (.-order copresheaf)]
(map
(fn [[a b]]
(list a b (morphism-apply copresheaf [a b])))
(covering-relation (underlying-relation cat)))))
Visualisation of dependency copresheaves
(defmethod visualize Dependency
[^Dependency dependency]
(let [[p v] (generate-copresheaf-data
(object-indexed-family dependency)
(generating-concrete-morphism-triples dependency))]
(visualize-clustered-digraph* "BT" p v)))
| null | https://raw.githubusercontent.com/locusmath/locus/2eb08b9fc0aaa76b181ec625a7bc898f31e436e8/src/clojure/locus/set/copresheaf/dependency/core/object.clj | clojure | These are generalisations of functional dependencies of relations.
The topos of lower bijective triangles consists of presheaves over the total
The topos of upper bijective triangles consists of presheaves over the total
The topos of trijections is a natural generalisation of the topos of bijections to
its algebraic properties coincide with the topos of sets.
T_2 + 1. So it is another example of a presheaf topos of a preorder.
K_2 + 1. So it is another example of a presheaf topos of a preorder.
The topos of triples of sets Sets^3 is the next level generalisation of the topos of pairs
shares that property with the topos Sets^2 but unlike Sets it is not bivalent.
Multi incidence relations defined by indexed membership relations.
Create a multijection copresheaf over a complete thin groupoid
Generalized tuples of sets and functions
Create dependency by morphism
Create a dependency functor from a relational copresheaf
The underlying preorder of the ordering relation provided must be a preorder
tuples of the relation.
We need some way of dealing with functional dependencies
This is only rudimentary at this stage of support and so it
is due for a major refactoring involving the core system.
and our basic notion of querying.
Every single preorder P is naturally associated to a trivial copresheaf in Sets^P
which takes each object and associates it to a singleton set, and then each
map of singletons is the unique trivial function between them.
The copresheaf that takes a family of sets and that maps each ordered pair in its
partial order expressed as a thin category to an inclusion function.
Get the information necessary to turn a dependency copresheaf into a visualisation | (ns locus.set.copresheaf.dependency.core.object
(:require [locus.set.logic.core.set :refer :all]
[locus.set.logic.limit.product :refer :all]
[locus.set.logic.sequence.object :refer :all]
[locus.con.core.setpart :refer :all]
[locus.set.logic.structure.protocols :refer :all]
[locus.set.mapping.general.core.object :refer :all]
[locus.set.mapping.general.core.util :refer :all]
[locus.order.general.core.object :refer :all]
[locus.order.general.core.util :refer :all]
[locus.order.general.skeletal.object :refer :all]
[locus.order.general.discrete.object :refer :all]
[locus.set.quiver.diset.core.object :refer :all]
[locus.set.quiver.relation.binary.product :refer :all]
[locus.set.quiver.relation.binary.br :refer :all]
[locus.set.quiver.relation.binary.sr :refer :all]
[locus.set.copresheaf.bijection.core.object :refer :all]
[locus.set.copresheaf.bijection.core.morphism :refer :all]
[locus.set.copresheaf.structure.core.protocols :refer :all]
[locus.set.quiver.structure.core.protocols :refer :all]
[locus.set.tree.triangle.core.object :refer :all]
[locus.set.copresheaf.incidence.core.object :refer :all]
[locus.set.tree.cospan.core.object :refer :all]
[locus.set.quiver.diset.core.morphism :refer :all]
[locus.set.copresheaf.dependency.dibijection.object :refer :all]
[locus.set.quiver.unary.core.morphism :refer :all]
[locus.set.copresheaf.dependency.nset.object :refer :all]
[locus.set.copresheaf.dependency.nfunction.object :refer :all]
[locus.set.copresheaf.dependency.nbijection.object :refer :all]
[locus.set.tree.chain.core.object :refer :all]
[locus.set.tree.triangle.core.morphism :refer :all]
[locus.set.copresheaf.incidence.core.morphism :refer :all]
[locus.set.tree.cospan.core.morphism :refer :all]
[locus.set.copresheaf.cube.core.object :refer :all]
[locus.set.tree.chain.core.morphism :refer :all]
[locus.set.copresheaf.indexed.family.object :refer :all]
[locus.set.tree.multicospan.core.object :refer :all]
[locus.set.copresheaf.multispan.core.object :refer :all])
(:import (locus.set.copresheaf.bijection.core.object Bijection)
(locus.set.quiver.diset.core.morphism Difunction)
(locus.set.tree.triangle.core.object SetTriangle)
(locus.set.copresheaf.incidence.core.object Span)
(locus.set.tree.cospan.core.object Cospan)
(locus.set.copresheaf.dependency.dibijection.object Dibijection)
(locus.set.quiver.unary.core.morphism Diamond)
(locus.set.copresheaf.bijection.core.morphism Gem)
(locus.set.copresheaf.dependency.nset.object NSet)
(locus.set.copresheaf.dependency.nfunction.object NFunction)
(locus.set.copresheaf.dependency.nbijection.object NBijection)
(locus.set.tree.chain.core.object SetChain)
(locus.set.tree.triangle.core.morphism TriangleMorphism)
(locus.set.copresheaf.incidence.core.morphism MorphismOfSpans)
(locus.set.tree.cospan.core.morphism MorphismOfCospans)
(locus.set.copresheaf.cube.core.object Cube)
(locus.set.tree.chain.core.morphism ChainMorphism)
(locus.set.mapping.general.core.object SetFunction)
(locus.set.copresheaf.indexed.family.object IndexedFamily)
(locus.order.general.discrete.object DiscretePoset)
(locus.set.tree.multicospan.core.object Multicospan)
(locus.set.copresheaf.multispan.core.object Multispan)))
Copresheaves over preorders Sets^P
(deftype Dependency [order object-function morphism-function]
StructuredDifunction
(first-function [this]
morphism-function)
(second-function [this]
object-function))
Get the sets and functions associated with dependency copresheaves
(defmethod get-set Dependency
[^Dependency dependency, x]
(object-apply dependency x))
(defmethod get-function Dependency
[^Dependency dependency, x]
(morphism-apply dependency x))
Index preorders for copresheaves over preorders
(defmethod index :default
[obj] nil)
(defmethod index :locus.set.logic.core.set/universal
[coll]
(relational-preposet
(weak-order [#{0}])))
(defmethod index :locus.set.logic.structure.protocols/set-function
[func]
(relational-poset
(weak-order [#{0} #{1}])))
(defmethod index :locus.set.copresheaf.structure.core.protocols/diset
[diset]
(relational-preposet
(weak-order [#{0 1}])))
(defmethod index :locus.set.copresheaf.structure.core.protocols/bijection
[bijection]
(relational-preposet
(total-preorder [#{0 1}])))
(defmethod index SetTriangle
[triangle]
(relational-poset
(total-order 0 1 2)))
(defmethod index Span
[span]
(relational-poset
(weak-order [#{0} #{1 2}])))
(defmethod index Multispan
[multispan]
(let [n (multispan-type multispan)]
(relational-poset
(if (zero? n)
(weak-order [#{0}])
(weak-order
[#{0}
(set (range 1 (inc n)))])))))
(defmethod index Cospan
[cospan]
(relational-poset
(weak-order
['#{(0) (1)}
'#{()}])))
(defmethod index Multicospan
[multicospan]
(let [n (multicospan-type multicospan)]
(relational-poset
(if (zero? n)
(weak-order [#{'()}])
(weak-order
[(set
(map
(fn [i]
(list i))
(range n)))
'#{()}])))))
(defmethod index Difunction
[difunction]
(product
(to-poset (total-order 0 1))
(to-poset (weak-order [#{0 1}]))))
(defmethod index Dibijection
[dibijection]
(product
(to-preposet (total-preorder [#{0 1}]))
(to-preposet (weak-order [#{0 1}]))))
(defmethod index Diamond
[diamond]
(product
(to-poset (total-order 0 1))
(to-poset (total-order 0 1))))
(defmethod index Gem
[gem]
(product
(to-poset (total-order 0 1))
(to-preposet (total-preorder [#{0 1}]))))
(defmethod index MorphismOfCospans
[morphism]
(product
(to-poset (total-order 0 1))
(to-poset (weak-order [#{0 1} #{2}]))))
(defmethod index Cube
[cube]
(product
(to-poset (total-order 0 1))
(product
(to-poset (total-order 0 1))
(to-poset (total-order 0 1)))))
(defmethod index MorphismOfSpans
[morphism]
(product
(to-poset (total-order 0 1))
(to-poset (weak-order [#{0} #{1 2}]))))
(defmethod index TriangleMorphism
[morphism]
(product
(to-poset (total-order 0 1))
(to-poset (total-order 0 1 2))))
(defmethod index NSet
[nset]
(nth-antichain (nset-type nset)))
(defmethod index NFunction
[nfunction]
(product
(to-poset (total-order 0 1))
(nth-antichain (nfunction-type nfunction))))
(defmethod index NBijection
[nbijection]
(product
(to-preposet (total-preorder [#{0 1}]))
(nth-antichain (nbijection-type nbijection))))
(defmethod index SetChain
[^SetChain chain]
(nth-chain (inc (count (composition-sequence chain)))))
(defmethod index ChainMorphism
[morphism]
(product
(to-poset (total-order 0 1))
(index (source-object morphism))))
(defmethod index Dependency
[^Dependency dependency] (.-order dependency))
(defmethod index IndexedFamily
[^IndexedFamily family]
(DiscretePoset. (index-set family)))
Convert presheaves over preorders into a common format
(defmulti to-dependency type)
(defmethod to-dependency Dependency
[dependency] dependency)
(defmethod to-dependency :default
[dependency]
(let [dep (index dependency)]
(if (nil? dep)
(throw (new IllegalArgumentException))
(->Dependency dep (partial get-set dependency) (partial get-function dependency)))))
preorder [ 2,1 ] . Its elements are like special cases of triangle copresheaves over
the total order [ 1,1,1 ] except their lower half is invertible .
(def two-one-preorder
(relational-preposet (total-preorder [#{0 1} #{2}])))
(defn lower-bijective-triangle
[func bijection]
(let [s0 (inputs bijection)
s1 (outputs bijection)
s2 (outputs func)]
(->Dependency
two-one-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [a b]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[0 1] (underlying-function bijection)
[1 0] (underlying-function (inv bijection))
[0 2] (compose func (underlying-function bijection))
[1 2] func)))))
(defn relational-lower-bijective-triangle
[rel]
(lower-bijective-triangle
(relation-transition-map rel 1 2)
(make-bijection-by-function-pair
(relation-transition-map rel 0 1)
(relation-transition-map rel 1 0))))
preorder [ 1,2 ] . Its elements are like triangle copresheaves over the total order
[ 1,1,1 ] except their upper half is invertible .
(def one-two-preorder
(relational-preposet (total-preorder [#{0} #{1 2}])))
(defn upper-bijective-triangle
[bijection func]
(let [s0 (inputs func)
s1 (outputs func)
s2 (outputs bijection)]
(->Dependency
two-one-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[1 2] (underlying-function bijection)
[2 1] (underlying-function (inv bijection))
[0 1] func
[0 2] (compose (underlying-function bijection) func))))))
(defn relational-upper-bijective-triangle
[rel]
(upper-bijective-triangle
(make-bijection-by-function-pair
(relation-transition-map rel 1 2)
(relation-transition-map rel 2 1))
(relation-transition-map rel 0 1)))
a morphism on three objects . Like the topos of bijections , it is boolean and all of
(def k3-preorder
(relational-preposet
(total-preorder [#{0 1 2}])))
(defn trijection
[f g]
(let [s0 (inputs g)
s1 (outputs g)
s2 (outputs f)]
(->Dependency
k3-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[0 1] (underlying-function g)
[0 2] (underlying-function (compose f g))
[1 0] (underlying-function (inv g))
[1 1] (identity-function s1)
[1 2] (underlying-function f)
[2 0] (underlying-function (inv (compose f g)))
[2 1] (underlying-function (inv f))
[2 2] (identity-function s2))))))
(defn relational-trijection
[rel]
(trijection
(make-bijection-by-function-pair
(relation-transition-map rel 1 2)
(relation-transition-map rel 2 1))
(make-bijection-by-function-pair
(relation-transition-map rel 0 1)
(relation-transition-map rel 1 0))))
The topos of set function pairs is the topos of copresheaves over the index category
(def t2-plus-one-order
(relational-poset '#{(0 0) (1 1) (2 2) (1 2)}))
(defn set-function-pair
[coll func]
(let [s0 coll
s1 (inputs func)
s2 (outputs func)]
(Dependency.
t2-plus-one-order
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[1 2] func)))))
The topos of set and bijection pairs is the topos of copresheaves over the index category
(def k2-plus-one-preorder
(relational-preposet '#{(0 0) (1 1) (2 2) (1 2) (2 1)}))
(defn set-bijection-pair
[coll bijection]
(let [s0 coll
s1 (inputs bijection)
s2 (outputs bijection)]
(Dependency.
k2-plus-one-preorder
(fn [obj]
(case obj
0 s0
1 s1
2 s2))
(fn [[a b]]
(case [a b]
[0 0] (identity-function s0)
[1 1] (identity-function s1)
[2 2] (identity-function s2)
[1 2] (underlying-function bijection)
[2 1] (underlying-function (inv bijection)))))))
of sets Sets^2 to three objects instead of two . It is again a boolean topos , as it
(def e3-order
(relational-poset (coreflexive-relation #{0 1 2})))
(defn triset
[a b c]
(->Dependency
e3-order
(fn [n]
(case n
0 a
1 b
2 c))
(fn [[i j]]
(case [i j]
[0 0] (identity-function a)
[1 1] (identity-function b)
[2 2] (identity-function c)))))
Copresheaves for generalized incidence structures
(defn nth-span-order
[n]
(relational-poset
(weak-order
[#{0} (set (range 1 (inc n)))])))
(defn nspan
[& funcs]
(let [n (count funcs)]
(letfn [(nth-set [i]
(if (zero? i)
(inputs (first funcs))
(outputs (nth funcs (dec i)))))]
(Dependency.
(nth-span-order n)
nth-set
(fn [[i j]]
(if (= i j)
(identity-function (nth-set i))
(nth funcs (dec j))))))))
Copresheaves for generalized cospans
(defn nth-cospan-order
[n]
(relational-poset
(weak-order
[(set (range 1 (inc n))) #{0}])))
(defn ncospan
[& funcs]
(let [n (count funcs)]
(letfn [(nth-set [i]
(if (zero? i)
(outputs (first funcs))
(inputs (nth funcs (dec i)))))]
(Dependency.
(nth-cospan-order n)
nth-set
(fn [[i j]]
(if (= i j)
(identity-function (nth-set i))
(nth funcs (dec i))))))))
(defn multi-incidence-order
[n]
(relational-poset
(weak-order
[(set (range n))
(set (range n (+ n 2)))])))
(def two-two-order
(multi-incidence-order 2))
(defn nth-member-flags
[vertices edges n]
(seqable-binary-relation
edges
vertices
(fn [[edge vertex]]
(contains? (nth edge n) vertex))))
(defn multi-incidence-structure
([vertices edges]
(multi-incidence-structure vertices edges (count (first edges))))
([vertices edges arity]
(let [edge-index (+ arity 1)
vertex-index (+ arity 2)]
(letfn [(get-component-set [n]
(cond
(< n arity) (nth-member-flags vertices edges n)
(= n edge-index) edges
(= n vertex-index) vertices))
(get-component-function [[i j]]
(cond
(= i j) (identity-function (get-component-set i))
(= j edge-index) (->SetFunction
(nth-member-flags vertices edges i)
edges
first)
(= j vertex-index) (->SetFunction
(nth-member-flags vertices edges i)
vertices
second)))]
(->Dependency
(multi-incidence-order arity)
get-component-set
get-component-function)))))
(defn crown
[vertices edges] (multi-incidence-structure vertices edges 2))
Presheaves over the weak order [ 1,1,2 ]
(def lower-common-tree
(relational-poset
(weak-order [#{0} #{1} #{2 3}])))
(defn combine-prefunction-equal-triangles
[triangle1 triangle2]
(letfn [(get-component-set [n]
(case n
0 (triangle-source triangle1)
1 (triangle-middle triangle1)
2 (triangle-target triangle1)
3 (triangle-target triangle2)))
(get-component-function [[i j]]
(if (= i j)
(identity-function (get-component-set i))
(case [i j]
[0 1] (prefunction triangle1)
[0 2] (compfunction triangle1)
[0 3] (compfunction triangle2)
[1 2] (postfunction triangle1)
[1 3] (postfunction triangle2))))]
(->Dependency
lower-common-tree
get-component-set
get-component-function)))
Presheaves over the weak order [ 2 , 1 , 1 ]
(def upper-common-tree
(relational-poset
(weak-order [#{0 1} #{2} #{3}])))
(defn combine-postfunction-equal-triangles
[triangle1 triangle2]
(letfn [(get-component-set [n]
(case n
0 (triangle-source triangle1)
1 (triangle-source triangle2)
2 (triangle-middle triangle1)
3 (triangle-target triangle1)))
(get-component-function [[i j]]
(if (= i j)
(identity-function (get-component-set i))
(case [i j]
[0 2] (prefunction triangle1)
[0 3] (compfunction triangle1)
[1 2] (prefunction triangle2)
[1 3] (compfunction triangle2)
[2 3] (postfunction triangle1))))]
(->Dependency
upper-common-tree
get-component-set
get-component-function)))
Presheaves over the disjoint union of total orders T_3 + T_3
(def ditriangle-order
(relational-poset
'#{(0 0) (1 1) (2 2) (0 1) (0 2) (1 2) (3 3) (4 4) (5 5) (3 5) (3 4) (4 5)}))
(defn ditriangle
[triangle1 triangle2]
(Dependency.
ditriangle-order
(fn [obj]
(case obj
0 (triangle-source triangle1)
1 (triangle-middle triangle1)
2 (triangle-target triangle1)
3 (triangle-source triangle2)
4 (triangle-middle triangle2)
5 (triangle-target triangle2)))
(fn [[a b]]
(case [a b]
[0 0] (identity-function (triangle-source triangle1))
[1 1] (identity-function (triangle-middle triangle1))
[2 2] (identity-function (triangle-target triangle1))
[3 3] (identity-function (triangle-source triangle2))
[4 4] (identity-function (triangle-middle triangle2))
[5 5] (identity-function (triangle-target triangle2))
[0 1] (prefunction triangle1)
[0 2] (compfunction triangle1)
[1 2] (postfunction triangle1)
[3 4] (prefunction triangle2)
[3 5] (compfunction triangle2)
[4 5] (postfunction triangle2)))))
(defn combine-composable-difunctions
[f g]
(ditriangle
(SetTriangle. (first-function f) (first-function g))
(SetTriangle. (second-function f) (second-function g))))
(defn multijection
[& args]
(let [bijections (reverse args)
n (count bijections)]
(letfn [(nth-set [i]
(if (zero? i)
(inputs (first bijections))
(outputs (nth bijections (dec i)))))
(nth-function [[i j]]
(cond
(= i j) (identity-function (nth-set i))
(< i j) (apply
compose
(map
(fn [k]
(underlying-function (nth bijections k)))
(range i j)))
(< j i) (apply
compose
(map
(fn [k]
(underlying-function (inv (nth bijections k))))
(reverse (range j i))))))]
(->Dependency
(nth-complete-preorder (inc n))
nth-set
nth-function))))
(defn height-two-multichain-order
[n k]
(union
(weak-order [(set (range n))])
(set
(mapcat
(fn [i]
(let [start-index (+ n (* 2 i))]
#{(list start-index start-index)
(list start-index (inc start-index))
(list (inc start-index) (inc start-index))}))
(range k)))))
(defn set-and-function-system
[sets functions]
(letfn [(get-component-set [n]
(if (< n (count sets))
(nth sets n)
(let [adjusted-index (- n (count sets))]
(if (even? adjusted-index)
(inputs (nth functions (/ adjusted-index 2)))
(outputs (nth functions (/ (dec adjusted-index) 2)))))))
(get-component-function [[i j]]
(if (= i j)
(identity-function (get-component-set i))
(let [adjusted-index (/ (- i (count sets)) 2)]
(nth functions adjusted-index))))]
(->Dependency
(height-two-multichain-order (count sets) (count functions))
get-component-set
get-component-function)))
(defn create-dependency-by-morphism
[^Dependency source, ^Dependency target, ^clojure.lang.IFn func]
(->Dependency
(product
(relational-preposet '#{(0 0) (1 1) (0 1)})
(.-order source))
(fn [[i v]]
(case i
0 (object-apply source v)
1 (object-apply target v)))
(fn [[[a b] [c d]]]
(let [first-arrow [a c]
second-arrow [b d]]
(case first-arrow
[0 0] (morphism-apply source second-arrow)
[1 1] (morphism-apply target second-arrow)
[0 1] (compose (morphism-apply target second-arrow) (func b)))))))
on the range of the first n elements , which is used as the indices to the
(defn relational-functional-dependencies
[order rel]
(->Dependency
order
(fn [i]
(set
(map
(fn [tuple]
(nth tuple i))
rel)))
(fn [[i j]]
(relation-transition-map rel i j))))
(defn induced-map
[rel source target]
(let [rval (apply
merge
(map
(fn [i]
{(restrict-list i source) (restrict-list i target)})
rel))]
(if (nil? rval)
{}
rval)))
(defn induced-fn
[rel source target]
(fn [source-elements]
(first
(for [i rel
:when (= (restrict-list i source) source-elements)]
(restrict-list i target)))))
(defn induced-function
[rel source target]
(SetFunction.
(project-relation rel source)
(project-relation rel target)
(induced-fn rel source target)))
(defn setwise-relational-functional-dependencies
[dep rel]
(Dependency.
dep
(fn [nums]
(project-relation rel nums))
(fn [[source target]]
(induced-function rel source target))))
(defn trivial-dependency
[preorder]
(Dependency.
preorder
(fn [obj]
#{obj})
(fn [[a b]]
(pair-function a b))))
(defn inclusion-dependency
[family]
(Dependency.
(->Poset
family
(family-inclusion-ordering family))
identity
(fn [[a b]]
(inclusion-function a b))))
(defn object-indexed-family
[copresheaf]
(let [cat (.-order copresheaf)]
(into
{}
(map
(fn [object]
[object (object-apply copresheaf object)])
(objects cat)))))
(defn generating-concrete-morphism-triples
[copresheaf]
(let [cat (.-order copresheaf)]
(map
(fn [[a b]]
(list a b (morphism-apply copresheaf [a b])))
(covering-relation (underlying-relation cat)))))
Visualisation of dependency copresheaves
(defmethod visualize Dependency
[^Dependency dependency]
(let [[p v] (generate-copresheaf-data
(object-indexed-family dependency)
(generating-concrete-morphism-triples dependency))]
(visualize-clustered-digraph* "BT" p v)))
|
587261f7e7d747289a8520c492c4bdf5aa7975ab9f44c9f3376494428da07d37 | argp/bap | batLazyList.ml |
* LazyListLabels - lazily - computed lists
* Copyright ( C ) 2008
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* LazyListLabels - lazily-computed lists
* Copyright (C) 2008 David Teller
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* { 6 Exceptions }
exception No_more_elements
exception Empty_list
exception Invalid_index of int
exception Different_list_size of string
* { 6 Types }
type 'a node_t =
| Nil
| Cons of 'a * 'a t
and 'a t =
('a node_t) Lazy.t
type 'a enumerable = 'a t
type 'a mappable = 'a t
* { 6 Access }
let nil = Lazy.lazy_from_val Nil
let next l = Lazy.force l
let cons h t = Lazy.lazy_from_val (Cons(h, t))
let ( ^:^ ) = cons
let get l = match next l with
| Nil -> None
| Cons (x, rest) -> Some (x, rest)
let peek l = match next l with
| Nil -> None
| Cons (x, _) -> Some x
*
{ 6 Constructors }
{6 Constructors}
*)
let from_while f =
let rec aux () = lazy (
match f () with
| None -> Nil
| Some x -> Cons (x, aux ()) ) in aux ()
let from f =
let f' () =
try Some (f ())
with No_more_elements -> None
in from_while f'
let seq data next cond =
let rec aux data =
if cond data then Cons (data, lazy (aux (next data)))
else Nil
in lazy (aux data)
let unfold (data:'b) (next: 'b -> ('a * 'b) option) =
let rec aux data = match next data with
| Some(a,b) -> Cons(a, lazy (aux b))
| None -> Nil
in lazy (aux data)
let from_loop (data:'b) (next:'b -> ('a * 'b)) : 'a t=
let f' data =
try Some (next data)
with No_more_elements -> None
in unfold data f'
let init n f =
let rec aux i =
if i < n then lazy (Cons (f i, aux ( i + 1 ) ) )
else nil
in if n < 0 then raise (Invalid_argument "LazyList.init")
else aux 0
let make n x =
let rec aux i =
if i < n then lazy (Cons (x, aux ( i + 1 ) ) )
else nil
in if n < 0 then raise (Invalid_argument "LazyList.make")
else aux 0
*
{ 6 Iterators }
{6 Iterators}
*)
let iter f l =
let rec aux l = match next l with
| Cons (x, t) -> (f x; aux t)
| Nil -> ()
in aux l
let iteri f l =
let rec aux i l = match next l with
| Cons (x, t) -> (f i x; aux (i + 1) t)
| Nil -> ()
in aux 0 l
let map f l =
let rec aux rest = match next rest with
| Cons (x, (t : 'a t)) -> Cons (f x, lazy (aux t))
| Nil -> Nil
in lazy (aux l)
let mapi f l =
let rec aux rest i =
match next rest with
| Cons (x, (t : 'a t)) -> Cons (f i x, lazy (aux t ( i + 1 ) ))
| Nil -> Nil
in lazy (aux l 0)
let fold_left f init l =
let rec aux acc rest =
match next rest with
| Cons (x, t) -> aux (f acc x) t
| Nil -> acc
in aux init l
let fold_right f init l =
let rec aux rest = match next rest with
| Cons (x, t) -> f x (aux t)
| Nil -> init
in aux l
let lazy_fold_right f l init =
let rec aux rest = lazy begin
match next rest with
| Cons (x, t) -> f x (aux t)
| Nil -> Lazy.force init
end in
aux l
(** {6 Finding}*)
let may_find p l =
let rec aux l =
match next l with
| Nil -> None
| Cons (x, t) -> if p x then Some x else aux t
in aux l
let may_rfind p l =
let rec aux l acc =
match next l with
| Nil -> acc
| Cons (x, t) -> aux t (if p x then Some x else acc)
in aux l None
let may_findi p l =
let rec aux l i =
match next l with
| Nil -> None
| Cons (x, _) when p i x -> Some (i, x)
| Cons (_, t) -> aux t (i+1)
in aux l 0
let may_rfindi p l =
let rec aux l acc i =
match next l with
| Nil -> acc
| Cons (x, t) -> aux t (if p i x then Some (i, x) else acc) (i+1)
in aux l None 0
let find_exn p e l =
BatOption.get_exn (may_find p l) e
let rfind_exn p e l =
BatOption.get_exn (may_rfind p l) e
let find p l = find_exn p Not_found l
let rfind p l = rfind_exn p Not_found l
let findi p l =
BatOption.get_exn (may_findi p l) Not_found
let rfindi p l =
BatOption.get_exn (may_rfindi p l) Not_found
let index_of e l =
match may_findi (fun _ x -> e = x) l with
| None -> None
| Some (i, _) -> Some i
let rindex_of e l =
match may_rfindi (fun _ x -> e = x) l with
| None -> None
| Some (i, _) -> Some i
let index_ofq e l =
match may_findi (fun _ x -> e == x) l with
| None -> None
| Some (i, _) -> Some i
let rindex_ofq e l =
match may_rfindi (fun _ x -> e == x) l with
| None -> None
| Some (i, _) -> Some i
* { 6 Common functions }
let length l = fold_left (fun n _ -> n + 1) 0 l
let is_empty l = match next l with
| Nil -> true
| Cons _ -> false
let would_at_fail n =
let rec aux l i = match next l with
| Nil -> true
| Cons (_, _) when i = 0 -> false
| Cons (_, t) -> aux t (i - 1)
in aux n
let hd list =
match next list with
| Cons (x, _) -> x
| Nil -> raise Empty_list
let first = hd
let last l =
let rec aux acc l = match next l with
| Nil -> acc
| Cons(x, t) -> aux (Some x) t
in match aux None l with
| None -> raise Empty_list
| Some x -> x
let tl list =
match next list with
| Cons (_, t) -> t
| Nil -> raise Empty_list
let at list n =
let rec aux list i =
match ((next list), i) with
| (Cons (x, _), 0) -> x
| (Cons (_, t), _) -> aux t (i - 1)
| (Nil, _) -> raise (Invalid_index n)
in if n < 0 then raise (Invalid_index n) else aux list n
let nth = at
let rev list = fold_left (fun acc x -> Lazy.lazy_from_val (Cons (x, acc))) nil list
(**Revert a list, convert it to a lazy list.
Used as an optimisation.*)
let rev_of_list (list:'a list) = List.fold_left (fun acc x -> Lazy.lazy_from_val (Cons (x, acc))) nil list
let eager_append (l1 : 'a t) (l2 : 'a t) =
let rec aux list =
match next list with
| Cons (x, t) -> cons x (aux t)
| Nil -> l2
in aux l1
let rev_append (l1 : 'a t) (l2 : 'a t) =
let rec aux list acc =
match next list with
| Cons (x, t) -> aux t (Lazy.lazy_from_val (Cons (x, acc)))
| Nil -> acc
in aux l1 l2
(**Revert a list, convert it to a lazy list and append it.
Used as an optimisation.*)
let rev_append_of_list (l1 : 'a list) (l2 : 'a t) : 'a t =
let rec aux list acc = match list with
| [] -> acc
| h::t -> aux t (cons h acc)
in aux l1 l2
let append (l1 : 'a t) (l2 : 'a t) =
let rec aux list = match next list with
| Cons (x, (t : 'a t)) -> Cons (x, lazy (aux t))
| _ -> Lazy.force l2
in lazy (aux l1)
$ T append
to_list ( append ( of_list [ 1;2 ] ) ( of_list [ 3;4 ] ) ) = [ 1;2;3;4 ]
ignore ( append ( lazy ( failwith " lazy cell " ) ) nil ) ; true
hd ( append ( cons ( ) nil ) ( lazy ( failwith " lazy cell " ) ) ) ; true
to_list (append (of_list [1;2]) (of_list [3;4])) = [1;2;3;4]
ignore (append (lazy (failwith "lazy cell")) nil); true
hd (append (cons () nil) (lazy (failwith "lazy cell"))); true
*)
let ( ^@^ ) = append
let flatten (lol : ('a t) list) =
ListLabels.fold_left ~init: nil ~f: append lol
let concat lol =
lazy_fold_right (fun li rest -> Lazy.force (append li rest)) lol nil
$ T concat
to_list ( concat ( of_list ( List.map of_list [ [ 1;2 ] ; [ 3 ] ; [ 4;5 ] ; [ ] ; [ 6 ] ; [ ] ; [ ] ] ) ) ) = [ 1;2;3;4;5;6 ]
ignore ( concat ( lazy ( Cons ( ( let ( ) = failwith " foo " in nil ) , nil ) ) ) ) ; true
to_list (concat (of_list (List.map of_list [[1;2]; [3]; [4;5]; []; [6]; []; []]))) = [1;2;3;4;5;6]
ignore (concat (lazy (Cons ((let () = failwith "foo" in nil), nil)))); true
*)
*
{ 6 Conversions }
{6 Conversions}
*)
(**
Eager conversion to list.
*)
let to_list l = fold_right (fun x acc -> x :: acc) [] l
(**
Lazy conversion to stream.
*)
let to_stream l =
let rec aux rest =
match next rest with
| Cons (x, t) -> Stream.icons x (Stream.slazy (fun _ -> aux t))
| Nil -> Stream.sempty
in aux l
(**
Eager conversion to array.
*)
let to_array l = Array.of_list (to_list l)
let enum l =
let rec aux l =
let reference = ref l in
BatEnum.make ~next:(fun () -> match next !reference with
| Cons(x,t) -> reference := t; x
| Nil -> raise BatEnum.No_more_elements )
~count:(fun () -> length !reference)
~clone:(fun () -> aux !reference)
in aux l
*
Lazy conversion from lists
Albeit slower than eager conversion , this is the default mechanism for converting from regular
lists to lazy lists . This for two reasons :
* if you 're using lazy lists , total speed probably is n't as much an issue as start - up speed
* this will let you convert regular infinite lists to lazy lists .
Lazy conversion from lists
Albeit slower than eager conversion, this is the default mechanism for converting from regular
lists to lazy lists. This for two reasons :
* if you're using lazy lists, total speed probably isn't as much an issue as start-up speed
* this will let you convert regular infinite lists to lazy lists.
*)
let of_list l =
let rec aux = function
| [] -> nil
| h :: t -> lazy (Cons (h, aux t))
in aux l
(**
Lazy conversion from stream.
*)
let of_stream s =
let rec aux s =
let (__strm : _ Stream.t) = s
in
match Stream.peek __strm with
| Some h -> (Stream.junk __strm; lazy (Cons (h, aux s)))
| None -> nil
in aux s
(**
Eager conversion from lists
*)
let eager_of_list l =
ListLabels.fold_right ~init: nil ~f: (fun x acc -> Lazy.lazy_from_val (Cons (x, acc))) l
(**
Eager conversion from array
*)
let of_array l =
ArrayLabels.fold_right ~init: nil ~f: (fun x acc -> Lazy.lazy_from_val (Cons (x, acc))) l
(**
Lazy conversion from enum
*)
let of_enum e =
let rec aux () =
lazy (match BatEnum.get e with
| Some x -> Cons (x, aux () )
| None -> Nil )
in
aux ()
*
{ 6 Predicates }
{6 Predicates}
*)
let filter f l =
Compute the next accepted predicate without thunkification
| Cons (x, l) when not (f x) -> next_true l
| l -> l
in
let rec aux l = lazy(match next_true l with
| Cons (x, l) -> Cons (x, aux l)
| Nil -> Nil)
in aux l
let filter_map f l =
Compute the next accepted predicate without thunkification
| Cons (x, l) ->
begin
match f x with
| Some v -> Some (v, l)
| None -> next_true l
end
| Nil -> None
in
let rec aux l = lazy(match next_true l with
| Some (x, l) -> Cons (x, aux l)
| None -> Nil)
in aux l
(*let filter f l =
let rec aux rest =
match next rest with
| Cons (x, t) when f x -> Cons (x, lazy (aux t))
| Cons (_, t) -> aux t
| Nil -> Nil
in lazy (aux l)*)
let exists f l =
let rec aux rest = match next rest with
| Cons (x, _) when f x -> true
| Cons (_, t) -> aux t
| Nil -> false
in aux l
$ T exists
exists ( fun x - > x = 3 ) ( append ( of_list [ 0;1;2 ] ) ( map ( fun ( ) - > 3 ) eternity ) )
not ( exists ( fun x - > x < 0 ) ( init 100 ( fun i - > i ) ) )
exists (fun x -> x = 3) (append (of_list [0;1;2]) (map (fun () -> 3) eternity))
not (exists (fun x -> x < 0) (init 100 (fun i -> i)))
*)
let for_all f l =
let rec aux rest = match next rest with
| Cons (x, t) when f x -> aux t
| Cons _ -> false
| Nil -> true
in aux l
$ T for_all
not ( for_all ( fun x - > x < > 3 ) ( append ( of_list [ 0;1;2 ] ) ( map ( fun ( ) - > 3 ) eternity ) ) )
for_all ( fun x - > x > = 0 ) ( init 100 ( fun i - > i ) )
not (for_all (fun x -> x <> 3) (append (of_list [0;1;2]) (map (fun () -> 3) eternity)))
for_all (fun x -> x >= 0) (init 100 (fun i -> i))
*)
let range a b =
let rec increasing lo hi =
if lo > hi then nil else lazy (Cons (lo, increasing (lo + 1) hi))
in
(* and decreasing lo hi = if lo > hi then
nil
else
lazy (Cons hi (decreasing lo (hi - 1)))*)
if b >= a then increasing a b else (*decreasing b a*) nil
let split_at n l =
let rec aux acc l i =
if i = 0 then (rev_of_list acc, l)
else match next l with
| Nil -> raise (Invalid_index n)
| Cons(h, t) -> aux (h::acc) t (i - 1)
in aux [] l n
let split_nth = split_at
let mem e = exists (( = ) e)
let memq e = exists (( == ) e )
let assoc e l = snd (find (fun (a,_) -> a = e) l)
let assq e l = snd (find (fun (a,_) -> a == e) l)
let mem_assoc e l = BatOption.is_some (may_find (fun (a, _) -> a = e) l)
let mem_assq e l = BatOption.is_some (may_find (fun (a, _) -> a == e) l)
(* let rec aux rest = match next rest with
| Cons (h, t) ->
(match f h with
| None -> lazy (aux t)
| Some x -> cons x (lazy (aux t)))
| Nil -> Nil
in lazy (aux l)*)
let unique ?(cmp = compare) l =
let set = ref (BatMap.PMap.create cmp) in
let should_keep x =
if BatMap.PMap.mem x !set then false
else ( set := BatMap.PMap.add x true !set; true )
in
(* use a stateful filter to remove duplicate elements *)
filter should_keep l
let unique_eq ?(eq = (=)) l =
Compute the next accepted predicate without thunkification
| Cons (x, l) when exists (eq x) l -> next_true l
| l -> l
in
let rec aux l = lazy(match next_true l with
| Cons (x, l) -> Cons (x, aux l)
| Nil -> Nil)
in aux l
let remove_if p l =
let rec aux acc l = match next l with
| Nil -> rev_of_list acc
| Cons(h,t) when p h -> rev_append_of_list acc t
| Cons(h,t) -> aux (h::acc) t
in aux [] l
let remove_all_such p l =
filter_map (fun y -> if p y then None else Some y) l
let remove x l =
remove_if ( ( = ) x ) l
let remove_all x l =
remove_all_such ( ( = ) x ) l
(** An infinite list of nothing *)
let rec eternity = lazy (Cons ((), eternity))
let take n l = fst (split_at n l)
let drop n l =
let rec aux l i =
if i = 0 then l
else match next l with
| Nil -> raise (Invalid_index n)
| Cons(_, t) -> aux t (i - 1)
in aux l n
let drop_while p =
let rec aux l = match next l with
| Nil -> nil
| Cons(h,t) when p h -> aux t
| Cons(_,_) -> l
in aux
(* TODO: make lazy *)
let take_while p =
let rec aux acc l = match next l with
| Cons(h,t) when p h -> aux (h::acc) t
| Cons _ | Nil -> rev_of_list acc
in aux []
let sort ?(cmp=Pervasives.compare) l = of_list (List.sort cmp (to_list l))
let stable_sort cmp l = of_list (List.stable_sort cmp (to_list l))
let map2 f l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> lazy (Cons (f h1 h2, aux t1 t2))
| (Nil, Nil) -> nil
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.map2")
in aux l1 l2
let iter2 f l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> f h1 h2; aux t1 t2
| (Nil, Nil) -> ()
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.iter2")
in aux l1 l2
let fold_left2 f acc l1 l2 =
let rec aux acc l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> aux (f acc h1 h2) t1 t2
| (Nil, Nil) -> acc
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.fold_left2")
in aux acc l1 l2
let fold_right2 f l1 l2 acc =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> f h1 h2 (aux t1 t2)
| (Nil, Nil) -> acc
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.fold_right2")
in aux l1 l2
let for_all2 p l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> p h1 h2 && (aux t1 t2)
| (Nil, Nil) -> true
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.for_all2")
in aux l1 l2
let exists2 p l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> p h1 h2 || (aux t1 t2)
| (Nil, Nil) -> false
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.exists2")
in aux l1 l2
let combine l1 l2 =
let rec aux l1 l2 = match (next l1, next l2) with
| (Cons(h1, t1), Cons(h2, t2)) -> lazy (Cons ((h1, h2), ( aux t1 t2 )))
| (Nil, Nil ) -> nil
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.combine")
in aux l1 l2
let uncombine l =
let (l1, l2) = BatEnum.uncombine (enum l) in
(of_enum l1, of_enum l2)
let uncombine l =
let rec aux l = match next l with
| Cons ( ( h1 , h2 ) , t ) - > lazy ( let ( t1 , t2 ) = aux t in
Cons ( h1 , t1 ) , Cons(h2 , t2 ) )
| Nil - > lazy ( Nil , Nil )
in aux l
let rec aux l = match next l with
| Cons ((h1, h2), t) -> lazy (let (t1, t2) = aux t in
Cons (h1, t1), Cons(h2, t2))
| Nil -> lazy (Nil, Nil)
in aux l*)
(*let uncombine l =
unfold l (fun l -> match peek l with
| None -> None
| Cons (h1, h2), t*)
let print ?(first="[^") ?(last="^]") ?(sep="; ") print_a out t =
BatEnum.print ~first ~last ~sep print_a out (enum t)
module Infix = struct
let ( ^:^ ), ( ^@^ ) = ( ^:^ ), ( ^@^ )
end
module Exceptionless = struct
(** Exceptionless counterparts for error-raising operations*)
let find = may_find
let rfind = may_rfind
let findi = may_findi
let rfindi = may_rfindi
let at list n =
let rec aux list i =
match (next list, i) with
| (Cons (x, _), 0) -> `Ok x
| (Cons (_, t), _) -> aux t (i - 1)
| (Nil, _) -> `Invalid_index n
in if n < 0 then `Invalid_index n else aux list n
let assoc a (l:'a t) =
try Some (assoc a l)
with Not_found -> None
let assq a l =
try Some (assq a l)
with Not_found -> None
let split_at n l =
try `Ok (split_at n l)
with Not_found -> `Invalid_index n
end
module Labels = struct
let iter ~f x = iter f x
let iter2 ~f x = iter2 f x
let iteri ~f x = iteri f x
let map ~f x = map f x
let map2 ~f x = map2 f x
let mapi ~f x = mapi f x
let filter ~f = filter f
let exists ~f = exists f
let exists2 ~f = exists2 f
let for_all ~f = for_all f
let for_all2 ~f = for_all2 f
let filter_map ~f = filter_map f
let find ~f = find f
let findi ~f = findi f
let rfind ~f = rfind f
let rfindi ~f = rfindi f
let find_exn ~f = find_exn f
let rfind_exn ~f = rfind_exn f
let remove_if ~f = remove_if f
let remove_all_such ~f= remove_all_such f
let take_while ~f= take_while f
let drop_while ~f= drop_while f
let fold_left ~f ~init = fold_left f init
let fold_right ~f ~init = fold_right f init
let fold_left2 ~f ~init = fold_left2 f init
let fold_right2 ~f l1 l2 ~init = fold_right2 f l1 l2 init
module Exceptionless = struct
let find ~f = Exceptionless.find f
let rfind ~f = Exceptionless.rfind f
let findi ~f = Exceptionless.findi f
let rfindi ~f = Exceptionless.rfindi f
let assq = Exceptionless.assq
let assoc = Exceptionless.assoc
let at = Exceptionless.at
let split_at = Exceptionless.split_at
end
end
| null | https://raw.githubusercontent.com/argp/bap/2f60a35e822200a1ec50eea3a947a322b45da363/batteries/src/batLazyList.ml | ocaml | * {6 Finding}
*Revert a list, convert it to a lazy list.
Used as an optimisation.
*Revert a list, convert it to a lazy list and append it.
Used as an optimisation.
*
Eager conversion to list.
*
Lazy conversion to stream.
*
Eager conversion to array.
*
Lazy conversion from stream.
*
Eager conversion from lists
*
Eager conversion from array
*
Lazy conversion from enum
let filter f l =
let rec aux rest =
match next rest with
| Cons (x, t) when f x -> Cons (x, lazy (aux t))
| Cons (_, t) -> aux t
| Nil -> Nil
in lazy (aux l)
and decreasing lo hi = if lo > hi then
nil
else
lazy (Cons hi (decreasing lo (hi - 1)))
decreasing b a
let rec aux rest = match next rest with
| Cons (h, t) ->
(match f h with
| None -> lazy (aux t)
| Some x -> cons x (lazy (aux t)))
| Nil -> Nil
in lazy (aux l)
use a stateful filter to remove duplicate elements
* An infinite list of nothing
TODO: make lazy
let uncombine l =
unfold l (fun l -> match peek l with
| None -> None
| Cons (h1, h2), t
* Exceptionless counterparts for error-raising operations |
* LazyListLabels - lazily - computed lists
* Copyright ( C ) 2008
*
* This library is free software ; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation ; either
* version 2.1 of the License , or ( at your option ) any later version ,
* with the special exception on linking described in file LICENSE .
*
* This library is distributed in the hope that it will be useful ,
* but WITHOUT ANY WARRANTY ; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE . See the GNU
* Lesser General Public License for more details .
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library ; if not , write to the Free Software
* Foundation , Inc. , 59 Temple Place , Suite 330 , Boston , MA 02111 - 1307 USA
* LazyListLabels - lazily-computed lists
* Copyright (C) 2008 David Teller
*
* This library is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 2.1 of the License, or (at your option) any later version,
* with the special exception on linking described in file LICENSE.
*
* This library is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this library; if not, write to the Free Software
* Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
*)
* { 6 Exceptions }
exception No_more_elements
exception Empty_list
exception Invalid_index of int
exception Different_list_size of string
* { 6 Types }
type 'a node_t =
| Nil
| Cons of 'a * 'a t
and 'a t =
('a node_t) Lazy.t
type 'a enumerable = 'a t
type 'a mappable = 'a t
* { 6 Access }
let nil = Lazy.lazy_from_val Nil
let next l = Lazy.force l
let cons h t = Lazy.lazy_from_val (Cons(h, t))
let ( ^:^ ) = cons
let get l = match next l with
| Nil -> None
| Cons (x, rest) -> Some (x, rest)
let peek l = match next l with
| Nil -> None
| Cons (x, _) -> Some x
*
{ 6 Constructors }
{6 Constructors}
*)
let from_while f =
let rec aux () = lazy (
match f () with
| None -> Nil
| Some x -> Cons (x, aux ()) ) in aux ()
let from f =
let f' () =
try Some (f ())
with No_more_elements -> None
in from_while f'
let seq data next cond =
let rec aux data =
if cond data then Cons (data, lazy (aux (next data)))
else Nil
in lazy (aux data)
let unfold (data:'b) (next: 'b -> ('a * 'b) option) =
let rec aux data = match next data with
| Some(a,b) -> Cons(a, lazy (aux b))
| None -> Nil
in lazy (aux data)
let from_loop (data:'b) (next:'b -> ('a * 'b)) : 'a t=
let f' data =
try Some (next data)
with No_more_elements -> None
in unfold data f'
let init n f =
let rec aux i =
if i < n then lazy (Cons (f i, aux ( i + 1 ) ) )
else nil
in if n < 0 then raise (Invalid_argument "LazyList.init")
else aux 0
let make n x =
let rec aux i =
if i < n then lazy (Cons (x, aux ( i + 1 ) ) )
else nil
in if n < 0 then raise (Invalid_argument "LazyList.make")
else aux 0
*
{ 6 Iterators }
{6 Iterators}
*)
let iter f l =
let rec aux l = match next l with
| Cons (x, t) -> (f x; aux t)
| Nil -> ()
in aux l
let iteri f l =
let rec aux i l = match next l with
| Cons (x, t) -> (f i x; aux (i + 1) t)
| Nil -> ()
in aux 0 l
let map f l =
let rec aux rest = match next rest with
| Cons (x, (t : 'a t)) -> Cons (f x, lazy (aux t))
| Nil -> Nil
in lazy (aux l)
let mapi f l =
let rec aux rest i =
match next rest with
| Cons (x, (t : 'a t)) -> Cons (f i x, lazy (aux t ( i + 1 ) ))
| Nil -> Nil
in lazy (aux l 0)
let fold_left f init l =
let rec aux acc rest =
match next rest with
| Cons (x, t) -> aux (f acc x) t
| Nil -> acc
in aux init l
let fold_right f init l =
let rec aux rest = match next rest with
| Cons (x, t) -> f x (aux t)
| Nil -> init
in aux l
let lazy_fold_right f l init =
let rec aux rest = lazy begin
match next rest with
| Cons (x, t) -> f x (aux t)
| Nil -> Lazy.force init
end in
aux l
let may_find p l =
let rec aux l =
match next l with
| Nil -> None
| Cons (x, t) -> if p x then Some x else aux t
in aux l
let may_rfind p l =
let rec aux l acc =
match next l with
| Nil -> acc
| Cons (x, t) -> aux t (if p x then Some x else acc)
in aux l None
let may_findi p l =
let rec aux l i =
match next l with
| Nil -> None
| Cons (x, _) when p i x -> Some (i, x)
| Cons (_, t) -> aux t (i+1)
in aux l 0
let may_rfindi p l =
let rec aux l acc i =
match next l with
| Nil -> acc
| Cons (x, t) -> aux t (if p i x then Some (i, x) else acc) (i+1)
in aux l None 0
let find_exn p e l =
BatOption.get_exn (may_find p l) e
let rfind_exn p e l =
BatOption.get_exn (may_rfind p l) e
let find p l = find_exn p Not_found l
let rfind p l = rfind_exn p Not_found l
let findi p l =
BatOption.get_exn (may_findi p l) Not_found
let rfindi p l =
BatOption.get_exn (may_rfindi p l) Not_found
let index_of e l =
match may_findi (fun _ x -> e = x) l with
| None -> None
| Some (i, _) -> Some i
let rindex_of e l =
match may_rfindi (fun _ x -> e = x) l with
| None -> None
| Some (i, _) -> Some i
let index_ofq e l =
match may_findi (fun _ x -> e == x) l with
| None -> None
| Some (i, _) -> Some i
let rindex_ofq e l =
match may_rfindi (fun _ x -> e == x) l with
| None -> None
| Some (i, _) -> Some i
* { 6 Common functions }
let length l = fold_left (fun n _ -> n + 1) 0 l
let is_empty l = match next l with
| Nil -> true
| Cons _ -> false
let would_at_fail n =
let rec aux l i = match next l with
| Nil -> true
| Cons (_, _) when i = 0 -> false
| Cons (_, t) -> aux t (i - 1)
in aux n
let hd list =
match next list with
| Cons (x, _) -> x
| Nil -> raise Empty_list
let first = hd
let last l =
let rec aux acc l = match next l with
| Nil -> acc
| Cons(x, t) -> aux (Some x) t
in match aux None l with
| None -> raise Empty_list
| Some x -> x
let tl list =
match next list with
| Cons (_, t) -> t
| Nil -> raise Empty_list
let at list n =
let rec aux list i =
match ((next list), i) with
| (Cons (x, _), 0) -> x
| (Cons (_, t), _) -> aux t (i - 1)
| (Nil, _) -> raise (Invalid_index n)
in if n < 0 then raise (Invalid_index n) else aux list n
let nth = at
let rev list = fold_left (fun acc x -> Lazy.lazy_from_val (Cons (x, acc))) nil list
let rev_of_list (list:'a list) = List.fold_left (fun acc x -> Lazy.lazy_from_val (Cons (x, acc))) nil list
let eager_append (l1 : 'a t) (l2 : 'a t) =
let rec aux list =
match next list with
| Cons (x, t) -> cons x (aux t)
| Nil -> l2
in aux l1
let rev_append (l1 : 'a t) (l2 : 'a t) =
let rec aux list acc =
match next list with
| Cons (x, t) -> aux t (Lazy.lazy_from_val (Cons (x, acc)))
| Nil -> acc
in aux l1 l2
let rev_append_of_list (l1 : 'a list) (l2 : 'a t) : 'a t =
let rec aux list acc = match list with
| [] -> acc
| h::t -> aux t (cons h acc)
in aux l1 l2
let append (l1 : 'a t) (l2 : 'a t) =
let rec aux list = match next list with
| Cons (x, (t : 'a t)) -> Cons (x, lazy (aux t))
| _ -> Lazy.force l2
in lazy (aux l1)
$ T append
to_list ( append ( of_list [ 1;2 ] ) ( of_list [ 3;4 ] ) ) = [ 1;2;3;4 ]
ignore ( append ( lazy ( failwith " lazy cell " ) ) nil ) ; true
hd ( append ( cons ( ) nil ) ( lazy ( failwith " lazy cell " ) ) ) ; true
to_list (append (of_list [1;2]) (of_list [3;4])) = [1;2;3;4]
ignore (append (lazy (failwith "lazy cell")) nil); true
hd (append (cons () nil) (lazy (failwith "lazy cell"))); true
*)
let ( ^@^ ) = append
let flatten (lol : ('a t) list) =
ListLabels.fold_left ~init: nil ~f: append lol
let concat lol =
lazy_fold_right (fun li rest -> Lazy.force (append li rest)) lol nil
$ T concat
to_list ( concat ( of_list ( List.map of_list [ [ 1;2 ] ; [ 3 ] ; [ 4;5 ] ; [ ] ; [ 6 ] ; [ ] ; [ ] ] ) ) ) = [ 1;2;3;4;5;6 ]
ignore ( concat ( lazy ( Cons ( ( let ( ) = failwith " foo " in nil ) , nil ) ) ) ) ; true
to_list (concat (of_list (List.map of_list [[1;2]; [3]; [4;5]; []; [6]; []; []]))) = [1;2;3;4;5;6]
ignore (concat (lazy (Cons ((let () = failwith "foo" in nil), nil)))); true
*)
*
{ 6 Conversions }
{6 Conversions}
*)
let to_list l = fold_right (fun x acc -> x :: acc) [] l
let to_stream l =
let rec aux rest =
match next rest with
| Cons (x, t) -> Stream.icons x (Stream.slazy (fun _ -> aux t))
| Nil -> Stream.sempty
in aux l
let to_array l = Array.of_list (to_list l)
let enum l =
let rec aux l =
let reference = ref l in
BatEnum.make ~next:(fun () -> match next !reference with
| Cons(x,t) -> reference := t; x
| Nil -> raise BatEnum.No_more_elements )
~count:(fun () -> length !reference)
~clone:(fun () -> aux !reference)
in aux l
*
Lazy conversion from lists
Albeit slower than eager conversion , this is the default mechanism for converting from regular
lists to lazy lists . This for two reasons :
* if you 're using lazy lists , total speed probably is n't as much an issue as start - up speed
* this will let you convert regular infinite lists to lazy lists .
Lazy conversion from lists
Albeit slower than eager conversion, this is the default mechanism for converting from regular
lists to lazy lists. This for two reasons :
* if you're using lazy lists, total speed probably isn't as much an issue as start-up speed
* this will let you convert regular infinite lists to lazy lists.
*)
let of_list l =
let rec aux = function
| [] -> nil
| h :: t -> lazy (Cons (h, aux t))
in aux l
let of_stream s =
let rec aux s =
let (__strm : _ Stream.t) = s
in
match Stream.peek __strm with
| Some h -> (Stream.junk __strm; lazy (Cons (h, aux s)))
| None -> nil
in aux s
let eager_of_list l =
ListLabels.fold_right ~init: nil ~f: (fun x acc -> Lazy.lazy_from_val (Cons (x, acc))) l
let of_array l =
ArrayLabels.fold_right ~init: nil ~f: (fun x acc -> Lazy.lazy_from_val (Cons (x, acc))) l
let of_enum e =
let rec aux () =
lazy (match BatEnum.get e with
| Some x -> Cons (x, aux () )
| None -> Nil )
in
aux ()
*
{ 6 Predicates }
{6 Predicates}
*)
let filter f l =
Compute the next accepted predicate without thunkification
| Cons (x, l) when not (f x) -> next_true l
| l -> l
in
let rec aux l = lazy(match next_true l with
| Cons (x, l) -> Cons (x, aux l)
| Nil -> Nil)
in aux l
let filter_map f l =
Compute the next accepted predicate without thunkification
| Cons (x, l) ->
begin
match f x with
| Some v -> Some (v, l)
| None -> next_true l
end
| Nil -> None
in
let rec aux l = lazy(match next_true l with
| Some (x, l) -> Cons (x, aux l)
| None -> Nil)
in aux l
let exists f l =
let rec aux rest = match next rest with
| Cons (x, _) when f x -> true
| Cons (_, t) -> aux t
| Nil -> false
in aux l
$ T exists
exists ( fun x - > x = 3 ) ( append ( of_list [ 0;1;2 ] ) ( map ( fun ( ) - > 3 ) eternity ) )
not ( exists ( fun x - > x < 0 ) ( init 100 ( fun i - > i ) ) )
exists (fun x -> x = 3) (append (of_list [0;1;2]) (map (fun () -> 3) eternity))
not (exists (fun x -> x < 0) (init 100 (fun i -> i)))
*)
let for_all f l =
let rec aux rest = match next rest with
| Cons (x, t) when f x -> aux t
| Cons _ -> false
| Nil -> true
in aux l
$ T for_all
not ( for_all ( fun x - > x < > 3 ) ( append ( of_list [ 0;1;2 ] ) ( map ( fun ( ) - > 3 ) eternity ) ) )
for_all ( fun x - > x > = 0 ) ( init 100 ( fun i - > i ) )
not (for_all (fun x -> x <> 3) (append (of_list [0;1;2]) (map (fun () -> 3) eternity)))
for_all (fun x -> x >= 0) (init 100 (fun i -> i))
*)
let range a b =
let rec increasing lo hi =
if lo > hi then nil else lazy (Cons (lo, increasing (lo + 1) hi))
in
let split_at n l =
let rec aux acc l i =
if i = 0 then (rev_of_list acc, l)
else match next l with
| Nil -> raise (Invalid_index n)
| Cons(h, t) -> aux (h::acc) t (i - 1)
in aux [] l n
let split_nth = split_at
let mem e = exists (( = ) e)
let memq e = exists (( == ) e )
let assoc e l = snd (find (fun (a,_) -> a = e) l)
let assq e l = snd (find (fun (a,_) -> a == e) l)
let mem_assoc e l = BatOption.is_some (may_find (fun (a, _) -> a = e) l)
let mem_assq e l = BatOption.is_some (may_find (fun (a, _) -> a == e) l)
let unique ?(cmp = compare) l =
let set = ref (BatMap.PMap.create cmp) in
let should_keep x =
if BatMap.PMap.mem x !set then false
else ( set := BatMap.PMap.add x true !set; true )
in
filter should_keep l
let unique_eq ?(eq = (=)) l =
Compute the next accepted predicate without thunkification
| Cons (x, l) when exists (eq x) l -> next_true l
| l -> l
in
let rec aux l = lazy(match next_true l with
| Cons (x, l) -> Cons (x, aux l)
| Nil -> Nil)
in aux l
let remove_if p l =
let rec aux acc l = match next l with
| Nil -> rev_of_list acc
| Cons(h,t) when p h -> rev_append_of_list acc t
| Cons(h,t) -> aux (h::acc) t
in aux [] l
let remove_all_such p l =
filter_map (fun y -> if p y then None else Some y) l
let remove x l =
remove_if ( ( = ) x ) l
let remove_all x l =
remove_all_such ( ( = ) x ) l
let rec eternity = lazy (Cons ((), eternity))
let take n l = fst (split_at n l)
let drop n l =
let rec aux l i =
if i = 0 then l
else match next l with
| Nil -> raise (Invalid_index n)
| Cons(_, t) -> aux t (i - 1)
in aux l n
let drop_while p =
let rec aux l = match next l with
| Nil -> nil
| Cons(h,t) when p h -> aux t
| Cons(_,_) -> l
in aux
let take_while p =
let rec aux acc l = match next l with
| Cons(h,t) when p h -> aux (h::acc) t
| Cons _ | Nil -> rev_of_list acc
in aux []
let sort ?(cmp=Pervasives.compare) l = of_list (List.sort cmp (to_list l))
let stable_sort cmp l = of_list (List.stable_sort cmp (to_list l))
let map2 f l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> lazy (Cons (f h1 h2, aux t1 t2))
| (Nil, Nil) -> nil
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.map2")
in aux l1 l2
let iter2 f l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> f h1 h2; aux t1 t2
| (Nil, Nil) -> ()
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.iter2")
in aux l1 l2
let fold_left2 f acc l1 l2 =
let rec aux acc l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> aux (f acc h1 h2) t1 t2
| (Nil, Nil) -> acc
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.fold_left2")
in aux acc l1 l2
let fold_right2 f l1 l2 acc =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> f h1 h2 (aux t1 t2)
| (Nil, Nil) -> acc
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.fold_right2")
in aux l1 l2
let for_all2 p l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> p h1 h2 && (aux t1 t2)
| (Nil, Nil) -> true
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.for_all2")
in aux l1 l2
let exists2 p l1 l2 =
let rec aux l1 l2 =
match (next l1, next l2) with
| (Cons (h1, t1), Cons(h2, t2)) -> p h1 h2 || (aux t1 t2)
| (Nil, Nil) -> false
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.exists2")
in aux l1 l2
let combine l1 l2 =
let rec aux l1 l2 = match (next l1, next l2) with
| (Cons(h1, t1), Cons(h2, t2)) -> lazy (Cons ((h1, h2), ( aux t1 t2 )))
| (Nil, Nil ) -> nil
| (Cons _, Nil) | (Nil, Cons _) -> raise (Different_list_size "LazyList.combine")
in aux l1 l2
let uncombine l =
let (l1, l2) = BatEnum.uncombine (enum l) in
(of_enum l1, of_enum l2)
let uncombine l =
let rec aux l = match next l with
| Cons ( ( h1 , h2 ) , t ) - > lazy ( let ( t1 , t2 ) = aux t in
Cons ( h1 , t1 ) , Cons(h2 , t2 ) )
| Nil - > lazy ( Nil , Nil )
in aux l
let rec aux l = match next l with
| Cons ((h1, h2), t) -> lazy (let (t1, t2) = aux t in
Cons (h1, t1), Cons(h2, t2))
| Nil -> lazy (Nil, Nil)
in aux l*)
let print ?(first="[^") ?(last="^]") ?(sep="; ") print_a out t =
BatEnum.print ~first ~last ~sep print_a out (enum t)
module Infix = struct
let ( ^:^ ), ( ^@^ ) = ( ^:^ ), ( ^@^ )
end
module Exceptionless = struct
let find = may_find
let rfind = may_rfind
let findi = may_findi
let rfindi = may_rfindi
let at list n =
let rec aux list i =
match (next list, i) with
| (Cons (x, _), 0) -> `Ok x
| (Cons (_, t), _) -> aux t (i - 1)
| (Nil, _) -> `Invalid_index n
in if n < 0 then `Invalid_index n else aux list n
let assoc a (l:'a t) =
try Some (assoc a l)
with Not_found -> None
let assq a l =
try Some (assq a l)
with Not_found -> None
let split_at n l =
try `Ok (split_at n l)
with Not_found -> `Invalid_index n
end
module Labels = struct
let iter ~f x = iter f x
let iter2 ~f x = iter2 f x
let iteri ~f x = iteri f x
let map ~f x = map f x
let map2 ~f x = map2 f x
let mapi ~f x = mapi f x
let filter ~f = filter f
let exists ~f = exists f
let exists2 ~f = exists2 f
let for_all ~f = for_all f
let for_all2 ~f = for_all2 f
let filter_map ~f = filter_map f
let find ~f = find f
let findi ~f = findi f
let rfind ~f = rfind f
let rfindi ~f = rfindi f
let find_exn ~f = find_exn f
let rfind_exn ~f = rfind_exn f
let remove_if ~f = remove_if f
let remove_all_such ~f= remove_all_such f
let take_while ~f= take_while f
let drop_while ~f= drop_while f
let fold_left ~f ~init = fold_left f init
let fold_right ~f ~init = fold_right f init
let fold_left2 ~f ~init = fold_left2 f init
let fold_right2 ~f l1 l2 ~init = fold_right2 f l1 l2 init
module Exceptionless = struct
let find ~f = Exceptionless.find f
let rfind ~f = Exceptionless.rfind f
let findi ~f = Exceptionless.findi f
let rfindi ~f = Exceptionless.rfindi f
let assq = Exceptionless.assq
let assoc = Exceptionless.assoc
let at = Exceptionless.at
let split_at = Exceptionless.split_at
end
end
|
267843f9acc65df5c569370654799c084bf9100ebb2b839fb8017f3f4e3f9807 | artyom-poptsov/guile-png | image-processing.scm | ;;; image-processing.scm -- Image processing procedures.
Copyright ( C ) 2022 < >
;;
;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;; (at your option) any later version.
;;
;; The program is distributed in the hope that it will be useful,
;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;; GNU General Public License for more details.
;;
You should have received a copy of the GNU General Public License
;; along with the program. If not, see </>.
;;; Commentary:
;; This module contains image processing procedures.
;;; Code:
(define-module (png image-processing)
#:use-module (oop goops)
#:use-module (rnrs bytevectors)
#:use-module (png image)
#:use-module (png graphics pixel)
#:export (png-image-filter-invert-colors
png-image-filter-solarize))
(define-method (png-image-filter-invert-colors (image <png-image>))
"Copy IMAGE and its colors. Return new image."
(let* ((image-clone (png-image-clone image))
(pixel-count (png-image-pixels image)))
(let loop ((index 0))
(if (= index pixel-count)
image-clone
(begin
(let* ((pixel (png-image-pixel-ref image index))
(red (bytevector-u8-ref pixel 0))
(green (bytevector-u8-ref pixel 1))
(blue (bytevector-u8-ref pixel 2)))
(bytevector-u8-set! pixel 0 (- 255 red))
(bytevector-u8-set! pixel 1 (- 255 green))
(bytevector-u8-set! pixel 2 (- 255 blue))
(png-image-pixel-set! image-clone index pixel)
(loop (+ index 1))))))))
(define-method (png-image-filter-solarize (image <png-image>)
(threshold <number>))
(let ((image-clone (png-image-clone image))
(pixel-count (png-image-pixels image)))
(let loop ((index 0))
(if (= index pixel-count)
image-clone
(let* ((pixel (png-image-pixel-ref image index))
(red (bytevector-u8-ref pixel 0))
(green (bytevector-u8-ref pixel 1))
(blue (bytevector-u8-ref pixel 2)))
(when (< red threshold)
(bytevector-u8-set! pixel 0 (- 255 red)))
(when (< green threshold)
(bytevector-u8-set! pixel 1 (- 255 green)))
(when (< blue threshold)
(bytevector-u8-set! pixel 2 (- 255 blue)))
(png-image-pixel-set! image-clone index pixel)
(loop (+ index 1)))))))
;;; image-processing.scm ends here.
| null | https://raw.githubusercontent.com/artyom-poptsov/guile-png/03386be98972850be833662a5ce31c3ce581e82a/modules/png/image-processing.scm | scheme | image-processing.scm -- Image processing procedures.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
The program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with the program. If not, see </>.
Commentary:
This module contains image processing procedures.
Code:
image-processing.scm ends here. |
Copyright ( C ) 2022 < >
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
(define-module (png image-processing)
#:use-module (oop goops)
#:use-module (rnrs bytevectors)
#:use-module (png image)
#:use-module (png graphics pixel)
#:export (png-image-filter-invert-colors
png-image-filter-solarize))
(define-method (png-image-filter-invert-colors (image <png-image>))
"Copy IMAGE and its colors. Return new image."
(let* ((image-clone (png-image-clone image))
(pixel-count (png-image-pixels image)))
(let loop ((index 0))
(if (= index pixel-count)
image-clone
(begin
(let* ((pixel (png-image-pixel-ref image index))
(red (bytevector-u8-ref pixel 0))
(green (bytevector-u8-ref pixel 1))
(blue (bytevector-u8-ref pixel 2)))
(bytevector-u8-set! pixel 0 (- 255 red))
(bytevector-u8-set! pixel 1 (- 255 green))
(bytevector-u8-set! pixel 2 (- 255 blue))
(png-image-pixel-set! image-clone index pixel)
(loop (+ index 1))))))))
(define-method (png-image-filter-solarize (image <png-image>)
(threshold <number>))
(let ((image-clone (png-image-clone image))
(pixel-count (png-image-pixels image)))
(let loop ((index 0))
(if (= index pixel-count)
image-clone
(let* ((pixel (png-image-pixel-ref image index))
(red (bytevector-u8-ref pixel 0))
(green (bytevector-u8-ref pixel 1))
(blue (bytevector-u8-ref pixel 2)))
(when (< red threshold)
(bytevector-u8-set! pixel 0 (- 255 red)))
(when (< green threshold)
(bytevector-u8-set! pixel 1 (- 255 green)))
(when (< blue threshold)
(bytevector-u8-set! pixel 2 (- 255 blue)))
(png-image-pixel-set! image-clone index pixel)
(loop (+ index 1)))))))
|
bf701c89f5a1cf963562a9c0f1ee8e58c7070201145019255de3f0e7993be7fd | clojure-interop/aws-api | AbstractAWSResourceGroupsAsync.clj | (ns com.amazonaws.services.resourcegroups.AbstractAWSResourceGroupsAsync
"Abstract implementation of AWSResourceGroupsAsync. Convenient method forms pass through to the corresponding
overload that takes a request object and an AsyncHandler, which throws an
UnsupportedOperationException."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.resourcegroups AbstractAWSResourceGroupsAsync]))
(defn create-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.CreateGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.CreateGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.CreateGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.CreateGroupRequest request]
(-> this (.createGroupAsync request))))
(defn list-group-resources-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListGroupResources operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.ListGroupResourcesResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listGroupResourcesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest request]
(-> this (.listGroupResourcesAsync request))))
(defn search-resources-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.SearchResourcesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the SearchResources operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.SearchResourcesResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.SearchResourcesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.searchResourcesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.SearchResourcesRequest request]
(-> this (.searchResourcesAsync request))))
(defn get-group-query-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetGroupQuery operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.GetGroupQueryResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getGroupQueryAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest request]
(-> this (.getGroupQueryAsync request))))
(defn update-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UpdateGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.UpdateGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.updateGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupRequest request]
(-> this (.updateGroupAsync request))))
(defn get-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.GetGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.GetGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupRequest request]
(-> this (.getGroupAsync request))))
(defn list-groups-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.ListGroupsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListGroups operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.ListGroupsResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listGroupsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupsRequest request]
(-> this (.listGroupsAsync request))))
(defn delete-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.DeleteGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.DeleteGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.DeleteGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.DeleteGroupRequest request]
(-> this (.deleteGroupAsync request))))
(defn tag-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.TagRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the Tag operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.TagResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.TagRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.tagAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.TagRequest request]
(-> this (.tagAsync request))))
(defn update-group-query-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UpdateGroupQuery operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.UpdateGroupQueryResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.updateGroupQueryAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest request]
(-> this (.updateGroupQueryAsync request))))
(defn untag-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.UntagRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the Untag operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.UntagResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UntagRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.untagAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UntagRequest request]
(-> this (.untagAsync request))))
(defn get-tags-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.GetTagsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetTags operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.GetTagsResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetTagsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getTagsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetTagsRequest request]
(-> this (.getTagsAsync request))))
| null | https://raw.githubusercontent.com/clojure-interop/aws-api/59249b43d3bfaff0a79f5f4f8b7bc22518a3bf14/com.amazonaws.services.resourcegroups/src/com/amazonaws/services/resourcegroups/AbstractAWSResourceGroupsAsync.clj | clojure | (ns com.amazonaws.services.resourcegroups.AbstractAWSResourceGroupsAsync
"Abstract implementation of AWSResourceGroupsAsync. Convenient method forms pass through to the corresponding
overload that takes a request object and an AsyncHandler, which throws an
UnsupportedOperationException."
(:refer-clojure :only [require comment defn ->])
(:import [com.amazonaws.services.resourcegroups AbstractAWSResourceGroupsAsync]))
(defn create-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.CreateGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the CreateGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.CreateGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.CreateGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.createGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.CreateGroupRequest request]
(-> this (.createGroupAsync request))))
(defn list-group-resources-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListGroupResources operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.ListGroupResourcesResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listGroupResourcesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupResourcesRequest request]
(-> this (.listGroupResourcesAsync request))))
(defn search-resources-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.SearchResourcesRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the SearchResources operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.SearchResourcesResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.SearchResourcesRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.searchResourcesAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.SearchResourcesRequest request]
(-> this (.searchResourcesAsync request))))
(defn get-group-query-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetGroupQuery operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.GetGroupQueryResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getGroupQueryAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupQueryRequest request]
(-> this (.getGroupQueryAsync request))))
(defn update-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UpdateGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.UpdateGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.updateGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupRequest request]
(-> this (.updateGroupAsync request))))
(defn get-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.GetGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.GetGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetGroupRequest request]
(-> this (.getGroupAsync request))))
(defn list-groups-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.ListGroupsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the ListGroups operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.ListGroupsResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.listGroupsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.ListGroupsRequest request]
(-> this (.listGroupsAsync request))))
(defn delete-group-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.DeleteGroupRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the DeleteGroup operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.DeleteGroupResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.DeleteGroupRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.deleteGroupAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.DeleteGroupRequest request]
(-> this (.deleteGroupAsync request))))
(defn tag-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.TagRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the Tag operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.TagResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.TagRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.tagAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.TagRequest request]
(-> this (.tagAsync request))))
(defn update-group-query-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the UpdateGroupQuery operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.UpdateGroupQueryResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.updateGroupQueryAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UpdateGroupQueryRequest request]
(-> this (.updateGroupQueryAsync request))))
(defn untag-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.UntagRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the Untag operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.UntagResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UntagRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.untagAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.UntagRequest request]
(-> this (.untagAsync request))))
(defn get-tags-async
"Description copied from interface: AWSResourceGroupsAsync
request - `com.amazonaws.services.resourcegroups.model.GetTagsRequest`
async-handler - `com.amazonaws.handlers.AsyncHandler`
returns: A Java Future containing the result of the GetTags operation returned by the service. - `java.util.concurrent.Future<com.amazonaws.services.resourcegroups.model.GetTagsResult>`"
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetTagsRequest request ^com.amazonaws.handlers.AsyncHandler async-handler]
(-> this (.getTagsAsync request async-handler)))
(^java.util.concurrent.Future [^AbstractAWSResourceGroupsAsync this ^com.amazonaws.services.resourcegroups.model.GetTagsRequest request]
(-> this (.getTagsAsync request))))
| |
9b1eeed8176c0ffb942c7bd7a1993ee32cb1dbacf1681d22a6b8a71a6fc6c113 | robertzk/xgboost.hs | Foreign.hs | # LANGUAGE ForeignFunctionInterface #
# LANGUAGE FlexibleInstances #
{-# LANGUAGE EmptyDataDecls #-}
{-# CFILES xgboost_wrapper.cpp #-}
However , we use the C interface to Xgboost .
module Xgboost.Foreign (
-- Error handling
xgboostGetLastError,
-- Data I/O
xgboostMatrixCreateFromCSR,
xgboostMatrixCreateFromFile,
xgboostMatrixCreateFromMat,
xgboostMatrixSliceDMatrix,
xgboostMatrixFree,
xgboostMatrixSaveBinary,
-- Mutators for matrix meta-data
xgboostMatrixSetFloatInfo,
xgboostMatrixSetUIntInfo,
xgboostMatrixSetGroup,
-- Accessors for matrix meta-data
xgboostMatrixGetFloatInfo,
xgboostMatrixGetUIntInfo,
xgboostMatrixNumRow,
xgboostMatrixNumCol,
-- Booster object construction and manipulation
xgboostBoosterCreate,
xgboostBoosterFree,
xgboostBoosterSetParam,
-- Booster training methods
xgboostBoosterUpdateOneIter,
xgboostBoosterBoostOneIter,
xgboostBoosterEvalOneIter,
-- Booster predict methods
xgboostBoosterPredict,
-- Booster object I/O
xgboostBoosterLoadModel,
xgboostBoosterSaveModel,
xgboostBoosterLoadModelFromBuffer,
xgboostBoosterGetModelRaw,
xgboostBoosterDumpModel,
xgboostBoosterDumpModelWithFeatures,
) where
import qualified Foreign
import Foreign.C
import Foreign.Ptr
type DMatrixHandle = Ptr ()
type BoosterHandle = Ptr ()
type FloatArray = Ptr CFloat
type ModelDump = Ptr CString
-- For the prelude of the extracted C documentation, see
xgboost / wrapper / xgboost_wrapper.h
-- (relative to this root of this package)
/ * !
* \brief get string message of the last error
*
* all function in this file will return 0 when success
* and -1 when an error occured ,
* XGBGetLastError can be called to retrieve the error
*
* this function is threadsafe and can be called by different thread
* \return const char * error inforomation
* /
XGB_DLL const char * XGBGetLastError ( ) ;
/*!
* \brief get string message of the last error
*
* all function in this file will return 0 when success
* and -1 when an error occured,
* XGBGetLastError can be called to retrieve the error
*
* this function is threadsafe and can be called by different thread
* \return const char* error inforomation
*/
XGB_DLL const char *XGBGetLastError();
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBGetLastError"
xgboostGetLastError :: CString -> IO CInt
{-
/*!
* \brief load a data matrix
* \param fname the name of the file
* \param silent whether print messages during loading
* \param out a loaded data matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixCreateFromFile(const char *fname,
int silent,
DMatrixHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixCreateFromCSR"
xgboostMatrixCreateFromCSR :: (Ptr CULong) -> (Ptr CUInt) -> FloatArray -> CULong -> CULong -> (Ptr DMatrixHandle) -> IO CInt
{-
/*!
* \brief load a data matrix
* \param fname the name of the file
* \param silent whether print messages during loading
* \param out a loaded data matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixCreateFromFile(const char *fname,
int silent,
DMatrixHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixCreateFromFile"
xgboostMatrixCreateFromFile :: FloatArray -> CInt -> CULong -> CFloat -> (Ptr DMatrixHandle) -> IO CInt
/ * !
* \brief create matrix content from dense matrix
* \param data pointer to the data space
* \param nrow number of rows
* \param ncol number columns
* \param missing which value to represent missing value
* \param out created dmatrix
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixCreateFromMat(const float * data ,
,
ncol ,
float missing ,
DMatrixHandle * out ) ;
/*!
* \brief create matrix content from dense matrix
* \param data pointer to the data space
* \param nrow number of rows
* \param ncol number columns
* \param missing which value to represent missing value
* \param out created dmatrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixCreateFromMat(const float *data,
bst_ulong nrow,
bst_ulong ncol,
float missing,
DMatrixHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixCreateFromMat"
xgboostMatrixCreateFromMat :: FloatArray -> CULong -> CULong -> CFloat -> (Ptr DMatrixHandle) -> IO CInt
/ * !
* \brief create a new dmatrix from sliced content of existing matrix
* \param handle instance of data matrix to be sliced
* \param idxset index set
* \param len length of index set
* \param out a sliced new matrix
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixSliceDMatrix(DMatrixHandle handle ,
const int * idxset ,
* out ) ;
/*!
* \brief create a new dmatrix from sliced content of existing matrix
* \param handle instance of data matrix to be sliced
* \param idxset index set
* \param len length of index set
* \param out a sliced new matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSliceDMatrix(DMatrixHandle handle,
const int *idxset,
bst_ulong len,
DMatrixHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSliceDMatrix"
xgboostMatrixSliceDMatrix :: DMatrixHandle -> (Ptr CInt) -> CULong -> (Ptr DMatrixHandle) -> IO CInt
{-
/*!
* \brief free space in data matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixFree(void *handle);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixFree"
xgboostMatrixFree :: DMatrixHandle -> IO CInt
{-
/*!
* \brief load a data matrix into binary file
* \param handle a instance of data matrix
* \param fname file name
* \param silent print statistics when saving
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSaveBinary(DMatrixHandle handle,
const char *fname, int silent);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSaveBinary"
xgboostMatrixSaveBinary :: DMatrixHandle -> CString -> CInt -> IO CInt
/ * !
* \brief set float vector to a content in info
* \param handle a instance of data matrix
* \param field field name , can be label , weight
* \param array pointer to float vector
* \param len length of array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL ,
const char * field ,
const float * array ,
) ;
/*!
* \brief set float vector to a content in info
* \param handle a instance of data matrix
* \param field field name, can be label, weight
* \param array pointer to float vector
* \param len length of array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSetFloatInfo(DMatrixHandle handle,
const char *field,
const float *array,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSetFloatInfo"
xgboostMatrixSetFloatInfo :: DMatrixHandle -> CString -> FloatArray -> CULong -> IO CInt
/ * !
* \brief set uint32 vector to a content in info
* \param handle a instance of data matrix
* \param field field name
* \param array pointer to float vector
* \param len length of array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixSetUIntInfo(DMatrixHandle handle ,
const char * field ,
const unsigned * array ,
) ;
/*!
* \brief set uint32 vector to a content in info
* \param handle a instance of data matrix
* \param field field name
* \param array pointer to float vector
* \param len length of array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSetUIntInfo(DMatrixHandle handle,
const char *field,
const unsigned *array,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSetUIntInfo"
xgboostMatrixSetUIntInfo :: DMatrixHandle -> CString -> (Ptr CUInt) -> CULong -> IO CInt
/ * !
* \brief set label of the training matrix
* \param handle a instance of data matrix
* \param group pointer to group size
* \param len length of array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixSetGroup(DMatrixHandle handle ,
const unsigned * group ,
) ;
/*!
* \brief set label of the training matrix
* \param handle a instance of data matrix
* \param group pointer to group size
* \param len length of array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSetGroup(DMatrixHandle handle,
const unsigned *group,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSetGroup"
xgboostMatrixSetGroup :: DMatrixHandle -> (Ptr CUInt) -> CULong -> IO CInt
{-
* \brief get float info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_len used to set result length
* \param out_dptr pointer to the result
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixGetFloatInfo(const DMatrixHandle handle,
const char *field,
bst_ulong* out_len,
const float **out_dptr);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixGetFloatInfo"
xgboostMatrixGetFloatInfo :: DMatrixHandle -> CString -> (Ptr CULong) -> IO (Ptr FloatArray)
/ * !
* \brief get uint32 info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_ptr pointer to the result
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixGetUIntInfo(const DMatrixHandle handle ,
const char * field ,
bst_ulong * out_len ,
const unsigned * * out_dptr ) ;
/*!
* \brief get uint32 info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_ptr pointer to the result
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixGetUIntInfo(const DMatrixHandle handle,
const char *field,
bst_ulong* out_len,
const unsigned **out_dptr);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixGetUIntInfo"
xgboostMatrixGetUIntInfo :: DMatrixHandle -> CString -> (Ptr CULong) -> IO (Ptr (Ptr CUInt))
{-
/*!
* \brief get number of rows
* \param handle the handle to the DMatrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixNumRow(DMatrixHandle handle,
bst_ulong *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixNumRow"
xgboostMatrixNumRow :: DMatrixHandle -> (Ptr CULong) -> IO CInt
{-
/*!
* \brief get number of columns
* \param handle the handle to the DMatrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixNumCol(DMatrixHandle handle,
bst_ulong *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixNumCol"
xgboostMatrixNumCol :: DMatrixHandle -> (Ptr CULong) -> IO CInt
/ * !
* \brief create xgboost learner
* \param dmats matrices that are set to be cached
* \param len length of dmats
* \param out handle to the result booster
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterCreate(void * [ ] ,
,
BoosterHandle * out ) ;
/*!
* \brief create xgboost learner
* \param dmats matrices that are set to be cached
* \param len length of dmats
* \param out handle to the result booster
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterCreate(void* dmats[],
bst_ulong len,
BoosterHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterCreate"
xgboostBoosterCreate :: (Ptr DMatrixHandle) -> CULong -> (Ptr BoosterHandle) -> IO CInt
{-
/*!
* \brief free obj in handle
* \param handle handle to be freed
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterFree(BoosterHandle handle);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterFree"
xgboostBoosterFree :: BoosterHandle -> IO CInt
{-
/*!
* \brief set parameters
* \param handle handle
* \param name parameter name
* \param val value of parameter
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterSetParam(BoosterHandle handle,
const char *name,
const char *value);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterSetParam"
xgboostBoosterSetParam :: BoosterHandle -> CString -> CString -> IO CInt
/ * !
* \brief update the model in one round using dtrain
* \param handle handle
* \param iter current iteration rounds
* \param dtrain training data
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterUpdateOneIter(BoosterHandle handle ,
int iter ,
) ;
/*!
* \brief update the model in one round using dtrain
* \param handle handle
* \param iter current iteration rounds
* \param dtrain training data
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterUpdateOneIter(BoosterHandle handle,
int iter,
DMatrixHandle dtrain);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterUpdateOneIter"
xgboostBoosterUpdateOneIter :: BoosterHandle -> CInt -> DMatrixHandle -> IO CInt
/ * !
* \brief update the model , by directly specify gradient and second order gradient ,
* this can be used to replace , to support customized loss function
* \param handle handle
* \param dtrain training data
* \param grad gradient statistics
* \param hess second order gradient statistics
* \param len length of grad / hess array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterBoostOneIter(BoosterHandle handle ,
,
float * grad ,
float * hess ,
) ;
/*!
* \brief update the model, by directly specify gradient and second order gradient,
* this can be used to replace UpdateOneIter, to support customized loss function
* \param handle handle
* \param dtrain training data
* \param grad gradient statistics
* \param hess second order gradient statistics
* \param len length of grad/hess array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterBoostOneIter(BoosterHandle handle,
DMatrixHandle dtrain,
float *grad,
float *hess,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterBoostOneIter"
xgboostBoosterBoostOneIter :: BoosterHandle -> DMatrixHandle -> FloatArray -> FloatArray -> CULong -> IO CInt
/ * !
* \brief get evaluation statistics for xgboost
* \param handle handle
* \param iter current iteration rounds
* \param dmats pointers to data to be evaluated
* \param evnames pointers to names of each data
* \param len length of dmats
* \param out_result the string containing evaluation statistics
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterEvalOneIter(BoosterHandle handle ,
int iter ,
DMatrixHandle dmats [ ] ,
const char * evnames [ ] ,
,
const char * * out_result ) ;
/*!
* \brief get evaluation statistics for xgboost
* \param handle handle
* \param iter current iteration rounds
* \param dmats pointers to data to be evaluated
* \param evnames pointers to names of each data
* \param len length of dmats
* \param out_result the string containing evaluation statistics
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterEvalOneIter(BoosterHandle handle,
int iter,
DMatrixHandle dmats[],
const char *evnames[],
bst_ulong len,
const char **out_result);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterEvalOneIter"
xgboostBoosterEvalOneIter :: BoosterHandle -> CInt -> DMatrixHandle -> (Ptr CString) -> CULong -> (Ptr CString) -> IO CInt
/ * !
* \brief make prediction based on dmat
* \param handle handle
* \param dmat data matrix
* \param option_mask bit - mask of options taken in prediction , possible values
* 0 : normal prediction
* 1 : output margin instead of transformed value
* 2 : output leaf index of trees instead of leaf value , note leaf index is unique per tree
* \param ntree_limit limit number of trees used for prediction , this is only valid for boosted trees
* when the parameter is set to 0 , we will use all the trees
* \param out_len used to store length of returning result
* \param out_result used to set a pointer to array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterPredict(BoosterHandle handle ,
DMatrixHandle dmat ,
int option_mask ,
unsigned ntree_limit ,
bst_ulong * out_len ,
const float * * out_result ) ;
/*!
* \brief make prediction based on dmat
* \param handle handle
* \param dmat data matrix
* \param option_mask bit-mask of options taken in prediction, possible values
* 0:normal prediction
* 1:output margin instead of transformed value
* 2:output leaf index of trees instead of leaf value, note leaf index is unique per tree
* \param ntree_limit limit number of trees used for prediction, this is only valid for boosted trees
* when the parameter is set to 0, we will use all the trees
* \param out_len used to store length of returning result
* \param out_result used to set a pointer to array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterPredict(BoosterHandle handle,
DMatrixHandle dmat,
int option_mask,
unsigned ntree_limit,
bst_ulong *out_len,
const float **out_result);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterPredict"
xgboostBoosterPredict :: BoosterHandle -> DMatrixHandle -> CInt -> CUInt -> (Ptr FloatArray) -> IO CInt
{-
/*!
* \brief load model from existing file
* \param handle handle
* \param fname file name
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterLoadModel(BoosterHandle handle,
const char *fname);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterLoadModel"
xgboostBoosterLoadModel :: BoosterHandle -> CString -> IO CInt
{-
/*!
* \brief save model into existing file
* \param handle handle
* \param fname file name
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterSaveModel(BoosterHandle handle,
const char *fname);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterSaveModel"
xgboostBoosterSaveModel :: BoosterHandle -> CString -> IO CInt
/ * !
* \brief load model from in memory buffer
* \param handle handle
* \param buf pointer to the buffer
* \param len the length of the buffer
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterLoadModelFromBuffer(BoosterHandle handle ,
const void * buf ,
) ;
/*!
* \brief load model from in memory buffer
* \param handle handle
* \param buf pointer to the buffer
* \param len the length of the buffer
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterLoadModelFromBuffer(BoosterHandle handle,
const void *buf,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterLoadModelFromBuffer"
xgboostBoosterLoadModelFromBuffer :: BoosterHandle -> Ptr () -> CULong -> IO CInt
{-
/*!
* \brief save model into binary raw bytes, return header of the array
* user must copy the result out, before next xgboost call
* \param handle handle
* \param out_len the argument to hold the output length
* \param out_dptr the argument to hold the output data pointer
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterGetModelRaw(BoosterHandle handle,
bst_ulong *out_len,
const char **out_dptr);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterGetModelRaw"
xgboostBoosterGetModelRaw :: BoosterHandle -> CULong -> (Ptr CString) -> IO CInt
{-
/*!
* \brief dump model, return array of strings representing model dump
* \param handle handle
* \param fmap name to fmap can be empty string
* \param with_stats whether to dump with statistics
* \param out_len length of output array
* \param out_dump_array pointer to hold representing dump of each model
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterDumpModel(BoosterHandle handle,
const char *fmap,
int with_stats,
bst_ulong *out_len,
const char ***out_dump_array);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterDumpModel"
xgboostBoosterDumpModel :: BoosterHandle -> CString -> CInt -> (Ptr CULong) -> (Ptr ModelDump) -> IO CInt
{-
/*!
* \brief dump model, return array of strings representing model dump
* \param handle handle
* \param fnum number of features
* \param fnum names of features
* \param fnum types of features
* \param with_stats whether to dump with statistics
* \param out_len length of output array
* \param out_dump_array pointer to hold representing dump of each model
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterDumpModelWithFeatures(BoosterHandle handle,
int fnum,
const char **fname,
const char **ftype,
int with_stats,
bst_ulong *len,
const char ***out_models);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterDumpModelWithFeatures"
xgboostBoosterDumpModelWithFeatures :: BoosterHandle -> CInt -> (Ptr CString) -> (Ptr CString) -> CInt -> (Ptr CULong) -> (Ptr ModelDump) -> IO CInt
| null | https://raw.githubusercontent.com/robertzk/xgboost.hs/b0b8890eba63a4dd25de3413b12a727f5e2fedac/src/Xgboost/Foreign.hs | haskell | # LANGUAGE EmptyDataDecls #
# CFILES xgboost_wrapper.cpp #
Error handling
Data I/O
Mutators for matrix meta-data
Accessors for matrix meta-data
Booster object construction and manipulation
Booster training methods
Booster predict methods
Booster object I/O
For the prelude of the extracted C documentation, see
(relative to this root of this package)
/*!
* \brief load a data matrix
* \param fname the name of the file
* \param silent whether print messages during loading
* \param out a loaded data matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixCreateFromFile(const char *fname,
int silent,
DMatrixHandle *out);
/*!
* \brief load a data matrix
* \param fname the name of the file
* \param silent whether print messages during loading
* \param out a loaded data matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixCreateFromFile(const char *fname,
int silent,
DMatrixHandle *out);
/*!
* \brief free space in data matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixFree(void *handle);
/*!
* \brief load a data matrix into binary file
* \param handle a instance of data matrix
* \param fname file name
* \param silent print statistics when saving
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSaveBinary(DMatrixHandle handle,
const char *fname, int silent);
* \brief get float info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_len used to set result length
* \param out_dptr pointer to the result
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixGetFloatInfo(const DMatrixHandle handle,
const char *field,
bst_ulong* out_len,
const float **out_dptr);
/*!
* \brief get number of rows
* \param handle the handle to the DMatrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixNumRow(DMatrixHandle handle,
bst_ulong *out);
/*!
* \brief get number of columns
* \param handle the handle to the DMatrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixNumCol(DMatrixHandle handle,
bst_ulong *out);
/*!
* \brief free obj in handle
* \param handle handle to be freed
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterFree(BoosterHandle handle);
/*!
* \brief set parameters
* \param handle handle
* \param name parameter name
* \param val value of parameter
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterSetParam(BoosterHandle handle,
const char *name,
const char *value);
/*!
* \brief load model from existing file
* \param handle handle
* \param fname file name
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterLoadModel(BoosterHandle handle,
const char *fname);
/*!
* \brief save model into existing file
* \param handle handle
* \param fname file name
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterSaveModel(BoosterHandle handle,
const char *fname);
/*!
* \brief save model into binary raw bytes, return header of the array
* user must copy the result out, before next xgboost call
* \param handle handle
* \param out_len the argument to hold the output length
* \param out_dptr the argument to hold the output data pointer
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterGetModelRaw(BoosterHandle handle,
bst_ulong *out_len,
const char **out_dptr);
/*!
* \brief dump model, return array of strings representing model dump
* \param handle handle
* \param fmap name to fmap can be empty string
* \param with_stats whether to dump with statistics
* \param out_len length of output array
* \param out_dump_array pointer to hold representing dump of each model
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterDumpModel(BoosterHandle handle,
const char *fmap,
int with_stats,
bst_ulong *out_len,
const char ***out_dump_array);
/*!
* \brief dump model, return array of strings representing model dump
* \param handle handle
* \param fnum number of features
* \param fnum names of features
* \param fnum types of features
* \param with_stats whether to dump with statistics
* \param out_len length of output array
* \param out_dump_array pointer to hold representing dump of each model
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterDumpModelWithFeatures(BoosterHandle handle,
int fnum,
const char **fname,
const char **ftype,
int with_stats,
bst_ulong *len,
const char ***out_models);
| # LANGUAGE ForeignFunctionInterface #
# LANGUAGE FlexibleInstances #
However , we use the C interface to Xgboost .
module Xgboost.Foreign (
xgboostGetLastError,
xgboostMatrixCreateFromCSR,
xgboostMatrixCreateFromFile,
xgboostMatrixCreateFromMat,
xgboostMatrixSliceDMatrix,
xgboostMatrixFree,
xgboostMatrixSaveBinary,
xgboostMatrixSetFloatInfo,
xgboostMatrixSetUIntInfo,
xgboostMatrixSetGroup,
xgboostMatrixGetFloatInfo,
xgboostMatrixGetUIntInfo,
xgboostMatrixNumRow,
xgboostMatrixNumCol,
xgboostBoosterCreate,
xgboostBoosterFree,
xgboostBoosterSetParam,
xgboostBoosterUpdateOneIter,
xgboostBoosterBoostOneIter,
xgboostBoosterEvalOneIter,
xgboostBoosterPredict,
xgboostBoosterLoadModel,
xgboostBoosterSaveModel,
xgboostBoosterLoadModelFromBuffer,
xgboostBoosterGetModelRaw,
xgboostBoosterDumpModel,
xgboostBoosterDumpModelWithFeatures,
) where
import qualified Foreign
import Foreign.C
import Foreign.Ptr
type DMatrixHandle = Ptr ()
type BoosterHandle = Ptr ()
type FloatArray = Ptr CFloat
type ModelDump = Ptr CString
xgboost / wrapper / xgboost_wrapper.h
/ * !
* \brief get string message of the last error
*
* all function in this file will return 0 when success
* and -1 when an error occured ,
* XGBGetLastError can be called to retrieve the error
*
* this function is threadsafe and can be called by different thread
* \return const char * error inforomation
* /
XGB_DLL const char * XGBGetLastError ( ) ;
/*!
* \brief get string message of the last error
*
* all function in this file will return 0 when success
* and -1 when an error occured,
* XGBGetLastError can be called to retrieve the error
*
* this function is threadsafe and can be called by different thread
* \return const char* error inforomation
*/
XGB_DLL const char *XGBGetLastError();
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBGetLastError"
xgboostGetLastError :: CString -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixCreateFromCSR"
xgboostMatrixCreateFromCSR :: (Ptr CULong) -> (Ptr CUInt) -> FloatArray -> CULong -> CULong -> (Ptr DMatrixHandle) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixCreateFromFile"
xgboostMatrixCreateFromFile :: FloatArray -> CInt -> CULong -> CFloat -> (Ptr DMatrixHandle) -> IO CInt
/ * !
* \brief create matrix content from dense matrix
* \param data pointer to the data space
* \param nrow number of rows
* \param ncol number columns
* \param missing which value to represent missing value
* \param out created dmatrix
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixCreateFromMat(const float * data ,
,
ncol ,
float missing ,
DMatrixHandle * out ) ;
/*!
* \brief create matrix content from dense matrix
* \param data pointer to the data space
* \param nrow number of rows
* \param ncol number columns
* \param missing which value to represent missing value
* \param out created dmatrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixCreateFromMat(const float *data,
bst_ulong nrow,
bst_ulong ncol,
float missing,
DMatrixHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixCreateFromMat"
xgboostMatrixCreateFromMat :: FloatArray -> CULong -> CULong -> CFloat -> (Ptr DMatrixHandle) -> IO CInt
/ * !
* \brief create a new dmatrix from sliced content of existing matrix
* \param handle instance of data matrix to be sliced
* \param idxset index set
* \param len length of index set
* \param out a sliced new matrix
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixSliceDMatrix(DMatrixHandle handle ,
const int * idxset ,
* out ) ;
/*!
* \brief create a new dmatrix from sliced content of existing matrix
* \param handle instance of data matrix to be sliced
* \param idxset index set
* \param len length of index set
* \param out a sliced new matrix
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSliceDMatrix(DMatrixHandle handle,
const int *idxset,
bst_ulong len,
DMatrixHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSliceDMatrix"
xgboostMatrixSliceDMatrix :: DMatrixHandle -> (Ptr CInt) -> CULong -> (Ptr DMatrixHandle) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixFree"
xgboostMatrixFree :: DMatrixHandle -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSaveBinary"
xgboostMatrixSaveBinary :: DMatrixHandle -> CString -> CInt -> IO CInt
/ * !
* \brief set float vector to a content in info
* \param handle a instance of data matrix
* \param field field name , can be label , weight
* \param array pointer to float vector
* \param len length of array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL ,
const char * field ,
const float * array ,
) ;
/*!
* \brief set float vector to a content in info
* \param handle a instance of data matrix
* \param field field name, can be label, weight
* \param array pointer to float vector
* \param len length of array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSetFloatInfo(DMatrixHandle handle,
const char *field,
const float *array,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSetFloatInfo"
xgboostMatrixSetFloatInfo :: DMatrixHandle -> CString -> FloatArray -> CULong -> IO CInt
/ * !
* \brief set uint32 vector to a content in info
* \param handle a instance of data matrix
* \param field field name
* \param array pointer to float vector
* \param len length of array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixSetUIntInfo(DMatrixHandle handle ,
const char * field ,
const unsigned * array ,
) ;
/*!
* \brief set uint32 vector to a content in info
* \param handle a instance of data matrix
* \param field field name
* \param array pointer to float vector
* \param len length of array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSetUIntInfo(DMatrixHandle handle,
const char *field,
const unsigned *array,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSetUIntInfo"
xgboostMatrixSetUIntInfo :: DMatrixHandle -> CString -> (Ptr CUInt) -> CULong -> IO CInt
/ * !
* \brief set label of the training matrix
* \param handle a instance of data matrix
* \param group pointer to group size
* \param len length of array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixSetGroup(DMatrixHandle handle ,
const unsigned * group ,
) ;
/*!
* \brief set label of the training matrix
* \param handle a instance of data matrix
* \param group pointer to group size
* \param len length of array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixSetGroup(DMatrixHandle handle,
const unsigned *group,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixSetGroup"
xgboostMatrixSetGroup :: DMatrixHandle -> (Ptr CUInt) -> CULong -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixGetFloatInfo"
xgboostMatrixGetFloatInfo :: DMatrixHandle -> CString -> (Ptr CULong) -> IO (Ptr FloatArray)
/ * !
* \brief get uint32 info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_ptr pointer to the result
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGDMatrixGetUIntInfo(const DMatrixHandle handle ,
const char * field ,
bst_ulong * out_len ,
const unsigned * * out_dptr ) ;
/*!
* \brief get uint32 info vector from matrix
* \param handle a instance of data matrix
* \param field field name
* \param out_ptr pointer to the result
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGDMatrixGetUIntInfo(const DMatrixHandle handle,
const char *field,
bst_ulong* out_len,
const unsigned **out_dptr);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixGetUIntInfo"
xgboostMatrixGetUIntInfo :: DMatrixHandle -> CString -> (Ptr CULong) -> IO (Ptr (Ptr CUInt))
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixNumRow"
xgboostMatrixNumRow :: DMatrixHandle -> (Ptr CULong) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGDMatrixNumCol"
xgboostMatrixNumCol :: DMatrixHandle -> (Ptr CULong) -> IO CInt
/ * !
* \brief create xgboost learner
* \param dmats matrices that are set to be cached
* \param len length of dmats
* \param out handle to the result booster
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterCreate(void * [ ] ,
,
BoosterHandle * out ) ;
/*!
* \brief create xgboost learner
* \param dmats matrices that are set to be cached
* \param len length of dmats
* \param out handle to the result booster
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterCreate(void* dmats[],
bst_ulong len,
BoosterHandle *out);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterCreate"
xgboostBoosterCreate :: (Ptr DMatrixHandle) -> CULong -> (Ptr BoosterHandle) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterFree"
xgboostBoosterFree :: BoosterHandle -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterSetParam"
xgboostBoosterSetParam :: BoosterHandle -> CString -> CString -> IO CInt
/ * !
* \brief update the model in one round using dtrain
* \param handle handle
* \param iter current iteration rounds
* \param dtrain training data
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterUpdateOneIter(BoosterHandle handle ,
int iter ,
) ;
/*!
* \brief update the model in one round using dtrain
* \param handle handle
* \param iter current iteration rounds
* \param dtrain training data
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterUpdateOneIter(BoosterHandle handle,
int iter,
DMatrixHandle dtrain);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterUpdateOneIter"
xgboostBoosterUpdateOneIter :: BoosterHandle -> CInt -> DMatrixHandle -> IO CInt
/ * !
* \brief update the model , by directly specify gradient and second order gradient ,
* this can be used to replace , to support customized loss function
* \param handle handle
* \param dtrain training data
* \param grad gradient statistics
* \param hess second order gradient statistics
* \param len length of grad / hess array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterBoostOneIter(BoosterHandle handle ,
,
float * grad ,
float * hess ,
) ;
/*!
* \brief update the model, by directly specify gradient and second order gradient,
* this can be used to replace UpdateOneIter, to support customized loss function
* \param handle handle
* \param dtrain training data
* \param grad gradient statistics
* \param hess second order gradient statistics
* \param len length of grad/hess array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterBoostOneIter(BoosterHandle handle,
DMatrixHandle dtrain,
float *grad,
float *hess,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterBoostOneIter"
xgboostBoosterBoostOneIter :: BoosterHandle -> DMatrixHandle -> FloatArray -> FloatArray -> CULong -> IO CInt
/ * !
* \brief get evaluation statistics for xgboost
* \param handle handle
* \param iter current iteration rounds
* \param dmats pointers to data to be evaluated
* \param evnames pointers to names of each data
* \param len length of dmats
* \param out_result the string containing evaluation statistics
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterEvalOneIter(BoosterHandle handle ,
int iter ,
DMatrixHandle dmats [ ] ,
const char * evnames [ ] ,
,
const char * * out_result ) ;
/*!
* \brief get evaluation statistics for xgboost
* \param handle handle
* \param iter current iteration rounds
* \param dmats pointers to data to be evaluated
* \param evnames pointers to names of each data
* \param len length of dmats
* \param out_result the string containing evaluation statistics
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterEvalOneIter(BoosterHandle handle,
int iter,
DMatrixHandle dmats[],
const char *evnames[],
bst_ulong len,
const char **out_result);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterEvalOneIter"
xgboostBoosterEvalOneIter :: BoosterHandle -> CInt -> DMatrixHandle -> (Ptr CString) -> CULong -> (Ptr CString) -> IO CInt
/ * !
* \brief make prediction based on dmat
* \param handle handle
* \param dmat data matrix
* \param option_mask bit - mask of options taken in prediction , possible values
* 0 : normal prediction
* 1 : output margin instead of transformed value
* 2 : output leaf index of trees instead of leaf value , note leaf index is unique per tree
* \param ntree_limit limit number of trees used for prediction , this is only valid for boosted trees
* when the parameter is set to 0 , we will use all the trees
* \param out_len used to store length of returning result
* \param out_result used to set a pointer to array
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterPredict(BoosterHandle handle ,
DMatrixHandle dmat ,
int option_mask ,
unsigned ntree_limit ,
bst_ulong * out_len ,
const float * * out_result ) ;
/*!
* \brief make prediction based on dmat
* \param handle handle
* \param dmat data matrix
* \param option_mask bit-mask of options taken in prediction, possible values
* 0:normal prediction
* 1:output margin instead of transformed value
* 2:output leaf index of trees instead of leaf value, note leaf index is unique per tree
* \param ntree_limit limit number of trees used for prediction, this is only valid for boosted trees
* when the parameter is set to 0, we will use all the trees
* \param out_len used to store length of returning result
* \param out_result used to set a pointer to array
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterPredict(BoosterHandle handle,
DMatrixHandle dmat,
int option_mask,
unsigned ntree_limit,
bst_ulong *out_len,
const float **out_result);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterPredict"
xgboostBoosterPredict :: BoosterHandle -> DMatrixHandle -> CInt -> CUInt -> (Ptr FloatArray) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterLoadModel"
xgboostBoosterLoadModel :: BoosterHandle -> CString -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterSaveModel"
xgboostBoosterSaveModel :: BoosterHandle -> CString -> IO CInt
/ * !
* \brief load model from in memory buffer
* \param handle handle
* \param buf pointer to the buffer
* \param len the length of the buffer
* \return 0 when success , -1 when failure happens
* /
XGB_DLL int XGBoosterLoadModelFromBuffer(BoosterHandle handle ,
const void * buf ,
) ;
/*!
* \brief load model from in memory buffer
* \param handle handle
* \param buf pointer to the buffer
* \param len the length of the buffer
* \return 0 when success, -1 when failure happens
*/
XGB_DLL int XGBoosterLoadModelFromBuffer(BoosterHandle handle,
const void *buf,
bst_ulong len);
-}
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterLoadModelFromBuffer"
xgboostBoosterLoadModelFromBuffer :: BoosterHandle -> Ptr () -> CULong -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterGetModelRaw"
xgboostBoosterGetModelRaw :: BoosterHandle -> CULong -> (Ptr CString) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterDumpModel"
xgboostBoosterDumpModel :: BoosterHandle -> CString -> CInt -> (Ptr CULong) -> (Ptr ModelDump) -> IO CInt
foreign import ccall unsafe "xgboost_wrapper.h XGBoosterDumpModelWithFeatures"
xgboostBoosterDumpModelWithFeatures :: BoosterHandle -> CInt -> (Ptr CString) -> (Ptr CString) -> CInt -> (Ptr CULong) -> (Ptr ModelDump) -> IO CInt
|
ab434ffe4dcc34bf5856b0e534d3b4ad421ad600a8f026cd18030679a5f2020b | OCamlPro/freeton_ocaml_sdk | main.ml | (**************************************************************************)
(* *)
Copyright ( c ) 2021 OCamlPro SAS
(* *)
(* All rights reserved. *)
(* This file is distributed under the terms of the GNU Lesser General *)
Public License version 2.1 , with the special exception on linking
(* described in the LICENSE.md file in the root directory. *)
(* *)
(* *)
(**************************************************************************)
(* If you delete or rename this file, you should add
'src/freeton_client_lwt/main.ml' to the 'skip' field in "drom.toml" *)
let main () = Printf.printf "Hello world!\n"
| null | https://raw.githubusercontent.com/OCamlPro/freeton_ocaml_sdk/42a0d95252ed19c647fa86e9728af15d557fc5e3/src/freeton_client_lwt/main.ml | ocaml | ************************************************************************
All rights reserved.
This file is distributed under the terms of the GNU Lesser General
described in the LICENSE.md file in the root directory.
************************************************************************
If you delete or rename this file, you should add
'src/freeton_client_lwt/main.ml' to the 'skip' field in "drom.toml" | Copyright ( c ) 2021 OCamlPro SAS
Public License version 2.1 , with the special exception on linking
let main () = Printf.printf "Hello world!\n"
|
3b06138cf697ea125db6d4f0c0c8267cf8d7cb8bc733e012191d661073c414d0 | iu-parfunc/HSBencher | Codespeed.hs | # LANGUAGE NamedFieldPuns , RecordWildCards , ScopedTypeVariables , CPP , BangPatterns #
# LANGUAGE TupleSections , DeriveDataTypeable #
# LANGUAGE TypeFamilies #
{-# LANGUAGE OverloadedStrings #-}
| CodeSpeed website upload of benchmark data .
--
-- This module must be used in conjunction with the main "hsbencher" package,
e.g. " import HSBencher " , and then " import HSBencher . Backend . CodeSpeed " and
-- add the plugin
module HSBencher.Backend.Codespeed
( -- * The plugin itself, what you probably want
defaultCodespeedPlugin
-- * Details and configuration options.
, CodespeedConfig(..)
, stdRetry , getTableId
, fusionSchema , resultToTuple
-- , uploadBenchResult
, CodespeedPlug(), CodespeedCmdLnFlag(..),
)
where
import Control.Monad.Reader
import Control.Concurrent (threadDelay)
import qualified Control.Exception as E
import Data.Maybe (isJust, fromJust, catMaybes, fromMaybe)
import Data.Dynamic
import Data.Default (Default(..))
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.ByteString.Char8 as BS
import Data.Time.Clock
import Data.Time.Calendar
import Data.Time.Format ()
import Network.HTTP.Types (renderQuery, urlEncode, urlDecode)
import Network.HTTP (simpleHTTP, postRequestWithBody)
import Control.Monad.Trans.Resource (runResourceT)
( encodeStrict , toJSObject )
import HSBencher.Types
import HSBencher.Internal.Logging (log)
import Prelude hiding (log)
import System.IO (hPutStrLn, stderr)
import System.IO.Unsafe (unsafePerformIO)
import System.Console.GetOpt (getOpt, ArgOrder(Permute), OptDescr(Option), ArgDescr(..), usageInfo)
import System.Directory (doesFileExist, doesDirectoryExist, getAppUserDataDirectory,
createDirectory, renameFile, removeFile)
import System.FilePath ((</>),(<.>), splitExtension)
import System.IO.Unsafe (unsafePerformIO)
import System.Environment (getEnvironment)
import System.Exit
import Control.Concurrent.MVar
--------------------------------------------------------------------------------
-- | A default plugin. This binding provides future-proof way to get
-- a default instance of the plugin, in the eventuality that more
-- configuration options are added in the future.
defaultCodespeedPlugin :: CodespeedPlug
defaultCodespeedPlugin = CodespeedPlug
-- | This is the same as defaultCodespeedPlugin
instance Default CodespeedPlug where
def = defaultCodespeedPlugin
TODO : may need to grab stdRetry / retryIORequest from the Fusion plugin ..
| Configuration options for Codespeed uploading .
data CodespeedConfig =
CodespeedConfig { codespeedURL :: URL
, projName :: String }
deriving (Show,Read,Ord,Eq, Typeable)
-- | Note, the default config may not be complete and thus may have
-- some required fields to fill in, or errors will ensue.
instance Default CodespeedConfig where
def = CodespeedConfig
{ codespeedURL = error "incomplete CodespeedConfig: Must set Codespeed URL (--codespeed) to use this plugin!"
, projName = error "incomplete CodespeedConfig: Must set Codespeed --projname to use this plugin!"
}
| command line options provided by the user that initiaties benchmarking .
data CodespeedCmdLnFlag = CodespeedURL URL
| CodespeedProjName String
-- TODO: Authentication!
deriving (Show,Read,Ord,Eq, Typeable)
type URL = String
getDateTime :: IO String
getDateTime = do
utc <- getCurrentTime
return $ show utc
-- | Push the results from a single benchmark to the server.
uploadBenchResult :: BenchmarkResult -> BenchM ()
uploadBenchResult br = do
lift$ putStrLn " [codespeed] Begin upload of one benchmark result."
conf <- ask
-- Look up our configuration dynamically based the plugin type:
let codespeedConfig = getMyConf CodespeedPlug conf
-- lift$ putStrLn$ " [codespeed] Running with config: \n"++show conf
lift$ putStrLn$ " [codespeed] Running with plugin config: \n"++show codespeedConfig
let CodespeedConfig {codespeedURL} = codespeedConfig
contentType = "application/x-www-form-urlencoded"
-- contentType = "application/json"
addURL = (codespeedURL ++ "/result/add/json/")
-- Version that uses HTTP pkg:
let json = renderJSONResult codespeedConfig br
bod = urlEncode False $ BS.pack json
let req = postRequestWithBody addURL contentType $ BS.unpack bod
lift$ putStrLn$ " [codespeed] Uploading json: "++ json
lift$ putStrLn$ " [codespeed] URl-encoded json POST body: "++ BS.unpack bod
lift$ putStrLn$ " [codespeed] Submitting HTTP Post request: \n"++show req
resp <- lift$ simpleHTTP req
case resp of
Left err -> lift$ putStrLn$ " [codespeed] ERROR uploading: \n"++show err
Right x -> lift$ putStrLn$ " [codespeed] Got response from server:\n"++show x
return ()
renderJSONResult :: CodespeedConfig -> BenchmarkResult -> String
renderJSONResult CodespeedConfig{projName} benchRes =
-- _PROGNAME _VARIANT _ARGS _HOSTNAME _RUNID _CI_BUILD_ID _THREADS
-- _DATETIME _MINTIME _MEDIANTIME _MAXTIME _MINTIME_PRODUCTIVITY
_ MEDIANTIME_PRODUCTIVITY _ MAXTIME_PRODUCTIVITY _ ALLTIMES _
_ COMPILER _ COMPILE_FLAGS _
_ BENCH_FILE _ UNAME _ PROCESSOR _ TOPOLOGY _ GIT_BRANCH _ GIT_HASH
-- _GIT_DEPTH _WHO _ETC_ISSUE _LSPCI _FULL_LOG _MEDIANTIME_ALLOCRATE
_ _ ALLJITTIMES _ CUSTOM
simpleFormat
[
-- A working example:
( " project " , S " MyProject2 " )
, ( " executable " , S " myexe 04 32bits " )
-- , ("benchmark", S "float")
, ( " commitid " , S " 8 " )
-- , ("environment", S "cutter")
, ( " result_value " , D 2500.1 )
-- , ("branch", S "default")
("project", S projName)
, ("executable", S exec)
, ("benchmark", S bench)
, ("commitid", S _GIT_HASH)
, ( " environment " , S " 129 - 79 - 241 - 98 " ) -- Results in 400 / BAD REQUEST
, ( " environment " , S " 1297924198 " ) -- Results in 400 / BAD REQUEST
-- Apparently this is the error on the server:
-- Exception Value:
Expecting ' , ' delimiter : line 1 column 235 ( char 234 )
Exception Location : /opt / python/2.7.8 / lib / python2.7 / json / decoder.py in raw_decode , line 382
, ( " environment " , S " hello1297924198 " ) -- Results in 400 / BAD REQUEST
, ( " environment " , S " hello " ) -- Also 400 / BAD REQUEST
-- Seems to fail if the environment is not REGISTERED already
-- on the website. Does not create on demand?
, ("environment", S _HOSTNAME)
, ("result_value", D _MEDIANTIME)
, ("branch", S _GIT_BRANCH)
-- Plus add optional fields:
-- , ("revision_date", s "") -- Optional. Default is taken either
-- from VCS integration or from current date
-- , ("result_date", s "") -- Optional, default is current date
, ( " std_dev " , ( 1.11111 : : Double ) ) -- Optional . Default is blank
, ("max", D _MAXTIME) -- Optional. Default is blank
, ("min", D _MINTIME) -- Optional. Default is blank
: Question : are and min the * observed * max and min presumably ?
]
where
Populate the CodeSpeed fields using the HSBencher fields :
BenchmarkResult{..} = benchRes
exec = combine $ [_VARIANT] ++ if _THREADS==0 then []
else [show _THREADS ++ "T"]
bench = combine [_PROGNAME, unwords _ARGS]
| This is a hacky way to pack multiple fields into one field of the
-- destination schema. There is a tradeoff here between readability
-- and ease of dissection.
combine :: [String] -> String
combine fields =
let fields' = filter (not . null) fields in
L.concat (L.intersperse "|" fields')
data RHS = S String | D Double
-- | The Django-based codespeed server is a bit finicky in exactly
-- what JSON formattincg and URL encodings it accepts. Thus, rather
-- than using any of the existing frameworks, we just use a particular
-- format we know works.
simpleFormat :: [(String,RHS)] -> String
simpleFormat prs = "json=[{" ++ bod ++"}]"
where
bod = L.concat $ L.intersperse ", " $ L.map fn prs
fn (l,r) = show l ++ ": " ++ rhs r
rhs (S s) = show s
rhs (D d) = show d
| The type of Codespeed table plugins . Currently this is a singleton type ; there is
really only one Codespeed plugin .
data CodespeedPlug = CodespeedPlug
deriving (Eq,Show,Ord,Read)
instance Plugin CodespeedPlug where
-- These configs are stored in a dynamically typed list within the global BenchM config:
type PlugConf CodespeedPlug = CodespeedConfig
type PlugFlag CodespeedPlug = CodespeedCmdLnFlag
-- | Better be globally unique! Careful.
plugName _ = "codespeed"
plugCmdOpts _ = codespeed_cli_options
plugUploadRow p cfg row = runReaderT (uploadBenchResult row) cfg
plugInitialize p gconf = do
putStrLn " [codespeed] Codespeed table plugin initializing.. (which is a NOOP)"
return gconf
foldFlags p flgs cnf0 =
foldr ($) cnf0 (map doFlag flgs)
where
doFlag (CodespeedURL url) r = r { codespeedURL = url}
doFlag (CodespeedProjName nm) r = r { projName = nm }
theEnv :: [(String,String)]
theEnv = unsafePerformIO getEnvironment
-- | All the command line options understood by this plugin.
codespeed_cli_options :: (String, [OptDescr CodespeedCmdLnFlag])
codespeed_cli_options =
("Codespeed Table Options:",
[ Option [] ["codespeed"] (ReqArg CodespeedURL "URL")
"specify the root URL of the Codespeed installation"
, Option [] ["projname"] (ReqArg CodespeedProjName "NAME")
"specify which Codespeed Project receives the uploaded results"
])
| null | https://raw.githubusercontent.com/iu-parfunc/HSBencher/76782b75b3a4b276c45a2c159e0b4cb6bd8a2360/hsbencher-codespeed/HSBencher/Backend/Codespeed.hs | haskell | # LANGUAGE OverloadedStrings #
This module must be used in conjunction with the main "hsbencher" package,
add the plugin
* The plugin itself, what you probably want
* Details and configuration options.
, uploadBenchResult
------------------------------------------------------------------------------
| A default plugin. This binding provides future-proof way to get
a default instance of the plugin, in the eventuality that more
configuration options are added in the future.
| This is the same as defaultCodespeedPlugin
| Note, the default config may not be complete and thus may have
some required fields to fill in, or errors will ensue.
TODO: Authentication!
| Push the results from a single benchmark to the server.
Look up our configuration dynamically based the plugin type:
lift$ putStrLn$ " [codespeed] Running with config: \n"++show conf
contentType = "application/json"
Version that uses HTTP pkg:
_PROGNAME _VARIANT _ARGS _HOSTNAME _RUNID _CI_BUILD_ID _THREADS
_DATETIME _MINTIME _MEDIANTIME _MAXTIME _MINTIME_PRODUCTIVITY
_GIT_DEPTH _WHO _ETC_ISSUE _LSPCI _FULL_LOG _MEDIANTIME_ALLOCRATE
A working example:
, ("benchmark", S "float")
, ("environment", S "cutter")
, ("branch", S "default")
Results in 400 / BAD REQUEST
Results in 400 / BAD REQUEST
Apparently this is the error on the server:
Exception Value:
Results in 400 / BAD REQUEST
Also 400 / BAD REQUEST
Seems to fail if the environment is not REGISTERED already
on the website. Does not create on demand?
Plus add optional fields:
, ("revision_date", s "") -- Optional. Default is taken either
from VCS integration or from current date
, ("result_date", s "") -- Optional, default is current date
Optional . Default is blank
Optional. Default is blank
Optional. Default is blank
destination schema. There is a tradeoff here between readability
and ease of dissection.
| The Django-based codespeed server is a bit finicky in exactly
what JSON formattincg and URL encodings it accepts. Thus, rather
than using any of the existing frameworks, we just use a particular
format we know works.
These configs are stored in a dynamically typed list within the global BenchM config:
| Better be globally unique! Careful.
| All the command line options understood by this plugin. | # LANGUAGE NamedFieldPuns , RecordWildCards , ScopedTypeVariables , CPP , BangPatterns #
# LANGUAGE TupleSections , DeriveDataTypeable #
# LANGUAGE TypeFamilies #
| CodeSpeed website upload of benchmark data .
e.g. " import HSBencher " , and then " import HSBencher . Backend . CodeSpeed " and
module HSBencher.Backend.Codespeed
defaultCodespeedPlugin
, CodespeedConfig(..)
, stdRetry , getTableId
, fusionSchema , resultToTuple
, CodespeedPlug(), CodespeedCmdLnFlag(..),
)
where
import Control.Monad.Reader
import Control.Concurrent (threadDelay)
import qualified Control.Exception as E
import Data.Maybe (isJust, fromJust, catMaybes, fromMaybe)
import Data.Dynamic
import Data.Default (Default(..))
import qualified Data.Set as S
import qualified Data.Map as M
import qualified Data.List as L
import qualified Data.ByteString.Lazy.Char8 as B
import qualified Data.ByteString.Char8 as BS
import Data.Time.Clock
import Data.Time.Calendar
import Data.Time.Format ()
import Network.HTTP.Types (renderQuery, urlEncode, urlDecode)
import Network.HTTP (simpleHTTP, postRequestWithBody)
import Control.Monad.Trans.Resource (runResourceT)
( encodeStrict , toJSObject )
import HSBencher.Types
import HSBencher.Internal.Logging (log)
import Prelude hiding (log)
import System.IO (hPutStrLn, stderr)
import System.IO.Unsafe (unsafePerformIO)
import System.Console.GetOpt (getOpt, ArgOrder(Permute), OptDescr(Option), ArgDescr(..), usageInfo)
import System.Directory (doesFileExist, doesDirectoryExist, getAppUserDataDirectory,
createDirectory, renameFile, removeFile)
import System.FilePath ((</>),(<.>), splitExtension)
import System.IO.Unsafe (unsafePerformIO)
import System.Environment (getEnvironment)
import System.Exit
import Control.Concurrent.MVar
defaultCodespeedPlugin :: CodespeedPlug
defaultCodespeedPlugin = CodespeedPlug
instance Default CodespeedPlug where
def = defaultCodespeedPlugin
TODO : may need to grab stdRetry / retryIORequest from the Fusion plugin ..
| Configuration options for Codespeed uploading .
data CodespeedConfig =
CodespeedConfig { codespeedURL :: URL
, projName :: String }
deriving (Show,Read,Ord,Eq, Typeable)
instance Default CodespeedConfig where
def = CodespeedConfig
{ codespeedURL = error "incomplete CodespeedConfig: Must set Codespeed URL (--codespeed) to use this plugin!"
, projName = error "incomplete CodespeedConfig: Must set Codespeed --projname to use this plugin!"
}
| command line options provided by the user that initiaties benchmarking .
data CodespeedCmdLnFlag = CodespeedURL URL
| CodespeedProjName String
deriving (Show,Read,Ord,Eq, Typeable)
type URL = String
getDateTime :: IO String
getDateTime = do
utc <- getCurrentTime
return $ show utc
uploadBenchResult :: BenchmarkResult -> BenchM ()
uploadBenchResult br = do
lift$ putStrLn " [codespeed] Begin upload of one benchmark result."
conf <- ask
let codespeedConfig = getMyConf CodespeedPlug conf
lift$ putStrLn$ " [codespeed] Running with plugin config: \n"++show codespeedConfig
let CodespeedConfig {codespeedURL} = codespeedConfig
contentType = "application/x-www-form-urlencoded"
addURL = (codespeedURL ++ "/result/add/json/")
let json = renderJSONResult codespeedConfig br
bod = urlEncode False $ BS.pack json
let req = postRequestWithBody addURL contentType $ BS.unpack bod
lift$ putStrLn$ " [codespeed] Uploading json: "++ json
lift$ putStrLn$ " [codespeed] URl-encoded json POST body: "++ BS.unpack bod
lift$ putStrLn$ " [codespeed] Submitting HTTP Post request: \n"++show req
resp <- lift$ simpleHTTP req
case resp of
Left err -> lift$ putStrLn$ " [codespeed] ERROR uploading: \n"++show err
Right x -> lift$ putStrLn$ " [codespeed] Got response from server:\n"++show x
return ()
renderJSONResult :: CodespeedConfig -> BenchmarkResult -> String
renderJSONResult CodespeedConfig{projName} benchRes =
_ MEDIANTIME_PRODUCTIVITY _ MAXTIME_PRODUCTIVITY _ ALLTIMES _
_ COMPILER _ COMPILE_FLAGS _
_ BENCH_FILE _ UNAME _ PROCESSOR _ TOPOLOGY _ GIT_BRANCH _ GIT_HASH
_ _ ALLJITTIMES _ CUSTOM
simpleFormat
[
( " project " , S " MyProject2 " )
, ( " executable " , S " myexe 04 32bits " )
, ( " commitid " , S " 8 " )
, ( " result_value " , D 2500.1 )
("project", S projName)
, ("executable", S exec)
, ("benchmark", S bench)
, ("commitid", S _GIT_HASH)
Expecting ' , ' delimiter : line 1 column 235 ( char 234 )
Exception Location : /opt / python/2.7.8 / lib / python2.7 / json / decoder.py in raw_decode , line 382
, ("environment", S _HOSTNAME)
, ("result_value", D _MEDIANTIME)
, ("branch", S _GIT_BRANCH)
: Question : are and min the * observed * max and min presumably ?
]
where
Populate the CodeSpeed fields using the HSBencher fields :
BenchmarkResult{..} = benchRes
exec = combine $ [_VARIANT] ++ if _THREADS==0 then []
else [show _THREADS ++ "T"]
bench = combine [_PROGNAME, unwords _ARGS]
| This is a hacky way to pack multiple fields into one field of the
combine :: [String] -> String
combine fields =
let fields' = filter (not . null) fields in
L.concat (L.intersperse "|" fields')
data RHS = S String | D Double
simpleFormat :: [(String,RHS)] -> String
simpleFormat prs = "json=[{" ++ bod ++"}]"
where
bod = L.concat $ L.intersperse ", " $ L.map fn prs
fn (l,r) = show l ++ ": " ++ rhs r
rhs (S s) = show s
rhs (D d) = show d
| The type of Codespeed table plugins . Currently this is a singleton type ; there is
really only one Codespeed plugin .
data CodespeedPlug = CodespeedPlug
deriving (Eq,Show,Ord,Read)
instance Plugin CodespeedPlug where
type PlugConf CodespeedPlug = CodespeedConfig
type PlugFlag CodespeedPlug = CodespeedCmdLnFlag
plugName _ = "codespeed"
plugCmdOpts _ = codespeed_cli_options
plugUploadRow p cfg row = runReaderT (uploadBenchResult row) cfg
plugInitialize p gconf = do
putStrLn " [codespeed] Codespeed table plugin initializing.. (which is a NOOP)"
return gconf
foldFlags p flgs cnf0 =
foldr ($) cnf0 (map doFlag flgs)
where
doFlag (CodespeedURL url) r = r { codespeedURL = url}
doFlag (CodespeedProjName nm) r = r { projName = nm }
theEnv :: [(String,String)]
theEnv = unsafePerformIO getEnvironment
codespeed_cli_options :: (String, [OptDescr CodespeedCmdLnFlag])
codespeed_cli_options =
("Codespeed Table Options:",
[ Option [] ["codespeed"] (ReqArg CodespeedURL "URL")
"specify the root URL of the Codespeed installation"
, Option [] ["projname"] (ReqArg CodespeedProjName "NAME")
"specify which Codespeed Project receives the uploaded results"
])
|
fa53ac87fbd757b3357287a52c5dc32fdce19753fba2ed1917c2ea4405a8653e | sky-big/RabbitMQ | rabbit_channel_sup.erl | The contents of this file are subject to the Mozilla Public License
%% Version 1.1 (the "License"); you may not use this file except in
%% compliance with the License. You may obtain a copy of the License
%% at /
%%
Software distributed under the License is distributed on an " AS IS "
%% basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
%% the License for the specific language governing rights and
%% limitations under the License.
%%
The Original Code is RabbitMQ .
%%
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2007 - 2014 GoPivotal , Inc. All rights reserved .
%%
-module(rabbit_channel_sup).
-behaviour(supervisor2).
-export([start_link/1]).
-export([init/1]).
-include("rabbit.hrl").
%%----------------------------------------------------------------------------
-ifdef(use_specs).
-export_type([start_link_args/0]).
-type(start_link_args() ::
{'tcp', rabbit_net:socket(), rabbit_channel:channel_number(),
non_neg_integer(), pid(), string(), rabbit_types:protocol(),
rabbit_types:user(), rabbit_types:vhost(), rabbit_framing:amqp_table(),
pid()} |
{'direct', rabbit_channel:channel_number(), pid(), string(),
rabbit_types:protocol(), rabbit_types:user(), rabbit_types:vhost(),
rabbit_framing:amqp_table(), pid()}).
-spec(start_link/1 :: (start_link_args()) -> {'ok', pid(), {pid(), any()}}).
-endif.
%%----------------------------------------------------------------------------
%% rabbit_channel_sup监督进程的启动,然后在rabbit_channel_sup监督进程下启动rabbit_limiter进程,rabbit_writer进程,rabbit_channel进程
start_link({tcp, Sock, Channel, FrameMax, ReaderPid, ConnName, Protocol, User,
VHost, Capabilities, Collector}) ->
%% 先启动rabbit_channel_sup监督进程,然后在rabbit_channel_sup监督进程下启动rabbit_writer进程和rabbit_limiter进程
{ok, SupPid} = supervisor2:start_link(
?MODULE, {tcp, Sock, Channel, FrameMax,
ReaderPid, Protocol, {ConnName, Channel}}),
%% 从rabbit_channel_sup监督进程中取得rabbit_limiter进程的Pid
[LimiterPid] = supervisor2:find_child(SupPid, limiter),
从rabbit_channel_sup监督进程中取得rabbit_writer进程的Pid
[WriterPid] = supervisor2:find_child(SupPid, writer),
%% rabbit_channel_sup监督进程下启动rabbit_channel进程
{ok, ChannelPid} =
supervisor2:start_child(
SupPid,
{channel, {rabbit_channel, start_link,
[Channel, ReaderPid, WriterPid, ReaderPid, ConnName,
Protocol, User, VHost, Capabilities, Collector,
LimiterPid]},
intrinsic, ?MAX_WAIT, worker, [rabbit_channel]}),
{ok, AState} = rabbit_command_assembler:init(Protocol),
{ok, SupPid, {ChannelPid, AState}};
start_link({direct, Channel, ClientChannelPid, ConnPid, ConnName, Protocol,
User, VHost, Capabilities, Collector}) ->
{ok, SupPid} = supervisor2:start_link(
?MODULE, {direct, {ConnName, Channel}}),
[LimiterPid] = supervisor2:find_child(SupPid, limiter),
{ok, ChannelPid} =
supervisor2:start_child(
SupPid,
{channel, {rabbit_channel, start_link,
[Channel, ClientChannelPid, ClientChannelPid, ConnPid,
ConnName, Protocol, User, VHost, Capabilities, Collector,
LimiterPid]},
intrinsic, ?MAX_WAIT, worker, [rabbit_channel]}),
{ok, SupPid, {ChannelPid, none}}.
%%----------------------------------------------------------------------------
init(Type) ->
{ok, {{one_for_all, 0, 1}, child_specs(Type)}}.
%% 启动rabbit_writer进程
child_specs({tcp, Sock, Channel, FrameMax, ReaderPid, Protocol, Identity}) ->
[{writer, {rabbit_writer, start_link,
[Sock, Channel, FrameMax, Protocol, ReaderPid, Identity, true]},
intrinsic, ?MAX_WAIT, worker, [rabbit_writer]}
| child_specs({direct, Identity})];
%% 启动rabbit_limiter进程
child_specs({direct, Identity}) ->
[{limiter, {rabbit_limiter, start_link, [Identity]},
transient, ?MAX_WAIT, worker, [rabbit_limiter]}].
| null | https://raw.githubusercontent.com/sky-big/RabbitMQ/d7a773e11f93fcde4497c764c9fa185aad049ce2/src/rabbit_channel_sup.erl | erlang | Version 1.1 (the "License"); you may not use this file except in
compliance with the License. You may obtain a copy of the License
at /
basis, WITHOUT WARRANTY OF ANY KIND, either express or implied. See
the License for the specific language governing rights and
limitations under the License.
----------------------------------------------------------------------------
----------------------------------------------------------------------------
rabbit_channel_sup监督进程的启动,然后在rabbit_channel_sup监督进程下启动rabbit_limiter进程,rabbit_writer进程,rabbit_channel进程
先启动rabbit_channel_sup监督进程,然后在rabbit_channel_sup监督进程下启动rabbit_writer进程和rabbit_limiter进程
从rabbit_channel_sup监督进程中取得rabbit_limiter进程的Pid
rabbit_channel_sup监督进程下启动rabbit_channel进程
----------------------------------------------------------------------------
启动rabbit_writer进程
启动rabbit_limiter进程 | The contents of this file are subject to the Mozilla Public License
Software distributed under the License is distributed on an " AS IS "
The Original Code is RabbitMQ .
The Initial Developer of the Original Code is GoPivotal , Inc.
Copyright ( c ) 2007 - 2014 GoPivotal , Inc. All rights reserved .
-module(rabbit_channel_sup).
-behaviour(supervisor2).
-export([start_link/1]).
-export([init/1]).
-include("rabbit.hrl").
-ifdef(use_specs).
-export_type([start_link_args/0]).
-type(start_link_args() ::
{'tcp', rabbit_net:socket(), rabbit_channel:channel_number(),
non_neg_integer(), pid(), string(), rabbit_types:protocol(),
rabbit_types:user(), rabbit_types:vhost(), rabbit_framing:amqp_table(),
pid()} |
{'direct', rabbit_channel:channel_number(), pid(), string(),
rabbit_types:protocol(), rabbit_types:user(), rabbit_types:vhost(),
rabbit_framing:amqp_table(), pid()}).
-spec(start_link/1 :: (start_link_args()) -> {'ok', pid(), {pid(), any()}}).
-endif.
start_link({tcp, Sock, Channel, FrameMax, ReaderPid, ConnName, Protocol, User,
VHost, Capabilities, Collector}) ->
{ok, SupPid} = supervisor2:start_link(
?MODULE, {tcp, Sock, Channel, FrameMax,
ReaderPid, Protocol, {ConnName, Channel}}),
[LimiterPid] = supervisor2:find_child(SupPid, limiter),
从rabbit_channel_sup监督进程中取得rabbit_writer进程的Pid
[WriterPid] = supervisor2:find_child(SupPid, writer),
{ok, ChannelPid} =
supervisor2:start_child(
SupPid,
{channel, {rabbit_channel, start_link,
[Channel, ReaderPid, WriterPid, ReaderPid, ConnName,
Protocol, User, VHost, Capabilities, Collector,
LimiterPid]},
intrinsic, ?MAX_WAIT, worker, [rabbit_channel]}),
{ok, AState} = rabbit_command_assembler:init(Protocol),
{ok, SupPid, {ChannelPid, AState}};
start_link({direct, Channel, ClientChannelPid, ConnPid, ConnName, Protocol,
User, VHost, Capabilities, Collector}) ->
{ok, SupPid} = supervisor2:start_link(
?MODULE, {direct, {ConnName, Channel}}),
[LimiterPid] = supervisor2:find_child(SupPid, limiter),
{ok, ChannelPid} =
supervisor2:start_child(
SupPid,
{channel, {rabbit_channel, start_link,
[Channel, ClientChannelPid, ClientChannelPid, ConnPid,
ConnName, Protocol, User, VHost, Capabilities, Collector,
LimiterPid]},
intrinsic, ?MAX_WAIT, worker, [rabbit_channel]}),
{ok, SupPid, {ChannelPid, none}}.
init(Type) ->
{ok, {{one_for_all, 0, 1}, child_specs(Type)}}.
child_specs({tcp, Sock, Channel, FrameMax, ReaderPid, Protocol, Identity}) ->
[{writer, {rabbit_writer, start_link,
[Sock, Channel, FrameMax, Protocol, ReaderPid, Identity, true]},
intrinsic, ?MAX_WAIT, worker, [rabbit_writer]}
| child_specs({direct, Identity})];
child_specs({direct, Identity}) ->
[{limiter, {rabbit_limiter, start_link, [Identity]},
transient, ?MAX_WAIT, worker, [rabbit_limiter]}].
|
745996c2071838f15476a5158c3c1ec106c6cca3b41366891d7620d4177b9ab5 | purescript/purescript | AsHtml.hs |
| Functions for rendering generated documentation from PureScript code as
-- HTML.
module Language.PureScript.Docs.AsHtml (
HtmlOutput(..),
HtmlOutputModule(..),
HtmlRenderContext(..),
nullRenderContext,
packageAsHtml,
moduleAsHtml,
makeFragment,
renderMarkdown
) where
import Prelude
import Control.Category ((>>>))
import Control.Monad (unless)
import Data.Bifunctor (bimap)
import Data.Char (isUpper)
import Data.Either (isRight)
import qualified Data.List.NonEmpty as NE
import Data.Maybe (fromMaybe)
import Data.Foldable (for_)
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text as T
import Text.Blaze.Html5 as H hiding (map)
import qualified Text.Blaze.Html5.Attributes as A
import qualified Cheapskate
import qualified Language.PureScript as P
import Language.PureScript.Docs.Types
import Language.PureScript.Docs.RenderedCode hiding (sp)
import qualified Language.PureScript.Docs.Render as Render
import qualified Language.PureScript.CST as CST
data HtmlOutput a = HtmlOutput
{ htmlIndex :: [(Maybe Char, a)]
, htmlModules :: [(P.ModuleName, HtmlOutputModule a)]
}
deriving (Show, Functor)
data HtmlOutputModule a = HtmlOutputModule
{ htmlOutputModuleLocals :: a
, htmlOutputModuleReExports :: [(InPackage P.ModuleName, a)]
}
deriving (Show, Functor)
data HtmlRenderContext = HtmlRenderContext
{ buildDocLink :: Namespace -> Text -> ContainingModule -> Maybe DocLink
, renderDocLink :: DocLink -> Text
, renderSourceLink :: P.SourceSpan -> Maybe Text
}
-- |
An HtmlRenderContext for when you do n't want to render any links .
nullRenderContext :: HtmlRenderContext
nullRenderContext = HtmlRenderContext
{ buildDocLink = const (const (const Nothing))
, renderDocLink = const ""
, renderSourceLink = const Nothing
}
packageAsHtml
:: (InPackage P.ModuleName -> Maybe HtmlRenderContext)
-> Package a
-> HtmlOutput Html
packageAsHtml getHtmlCtx Package{..} =
HtmlOutput indexFile modules
where
indexFile = []
modules = moduleAsHtml getHtmlCtx <$> pkgModules
moduleAsHtml
:: (InPackage P.ModuleName -> Maybe HtmlRenderContext)
-> Module
-> (P.ModuleName, HtmlOutputModule Html)
moduleAsHtml getHtmlCtx Module{..} = (modName, HtmlOutputModule modHtml reexports)
where
modHtml = do
let r = fromMaybe nullRenderContext $ getHtmlCtx (Local modName)
in do
for_ modComments renderMarkdown
for_ modDeclarations (declAsHtml r)
reexports =
flip map modReExports $ \(pkg, decls) ->
let r = fromMaybe nullRenderContext $ getHtmlCtx pkg
in (pkg, foldMap (declAsHtml r) decls)
renderIndex : : LinksContext - > [ ( Maybe , Html ) ]
renderIndex LinksContext { .. } = go
-- where
-- go = takeLocals
> > > renderEntry
> > > map ( second ( ul . ) )
--
-- getIndex (_, title_) = do
-- c <- textHeadMay title_
-- guard (toUpper c `elem` ['A'..'Z'])
-- pure c
--
-- textHeadMay t =
case t of
-- 0 -> Nothing
-- _ -> Just (T.index t 0)
--
-- renderEntry (mn, title_) =
-- li $ do
-- let url = T.pack (filePathFor mn `relativeTo` "index") <> "#" <> title_
-- code $
a ! ( v url ) $ text title _
-- sp
-- text ("(" <> P.runModuleName mn <> ")")
--
groupIndex : : i = > ( a - > Maybe i ) - > ( a - > b ) - > [ a ] - > [ ( Maybe i , [ b ] ) ]
-- groupIndex f g =
map ( second DList.toList ) . M.toList . foldr go ' M.empty . ( comparing f )
-- where
-- go' x = insertOrAppend (f x) (g x)
-- insertOrAppend idx val m =
let cur = M.findWithDefault DList.empty idx m
-- new = DList.snoc cur val
in M.insert idx new m
declAsHtml :: HtmlRenderContext -> Declaration -> Html
declAsHtml r d@Declaration{..} = do
let declFragment = makeFragment (declInfoNamespace declInfo) declTitle
H.div ! A.class_ "decl" ! A.id (v (T.drop 1 declFragment)) $ do
h3 ! A.class_ "decl__title clearfix" $ do
a ! A.class_ "decl__anchor" ! A.href (v declFragment) $ "#"
H.span $ text declTitle
text " " -- prevent browser from treating
declTitle + linkToSource as one word
for_ declSourceSpan (linkToSource r)
H.div ! A.class_ "decl__body" $ do
case declInfo of
AliasDeclaration fixity alias_ ->
renderAlias fixity alias_
_ -> do
pre ! A.class_ "decl__signature" $ do
for_ declKind $ \kindInfo -> do
code ! A.class_ "decl__kind" $ do
codeAsHtml r (Render.renderKindSig declTitle kindInfo)
code $ codeAsHtml r (Render.renderDeclaration d)
for_ declComments renderMarkdown
let (instances, dctors, members) = partitionChildren declChildren
unless (null dctors) $ do
h4 "Constructors"
renderChildren r dctors
unless (null members) $ do
h4 "Members"
renderChildren r members
unless (null instances) $ do
h4 "Instances"
renderChildren r instances
where
linkToSource :: HtmlRenderContext -> P.SourceSpan -> Html
linkToSource ctx srcspan =
maybe (return ()) go (renderSourceLink ctx srcspan)
where
go href =
H.span ! A.class_ "decl__source" $
a ! A.href (v href) $ text "Source"
renderChildren :: HtmlRenderContext -> [ChildDeclaration] -> Html
renderChildren _ [] = return ()
renderChildren r xs = ul $ mapM_ item xs
where
item decl =
li ! A.id (v (T.drop 1 (fragment decl))) $ do
renderCode decl
for_ (cdeclComments decl) $ \coms ->
H.div ! A.class_ "decl__child_comments" $ renderMarkdown coms
fragment decl = makeFragment (childDeclInfoNamespace (cdeclInfo decl)) (cdeclTitle decl)
renderCode = code . codeAsHtml r . Render.renderChildDeclaration
codeAsHtml :: HtmlRenderContext -> RenderedCode -> Html
codeAsHtml r = outputWith elemAsHtml
where
elemAsHtml e = case e of
Syntax x ->
withClass "syntax" (text x)
Keyword x ->
withClass "keyword" (text x)
Space ->
text " "
Symbol ns name link_ ->
case link_ of
Link mn ->
let
class_ =
if startsWithUpper name then "ctor" else "ident"
target
| isOp name =
if ns == TypeLevel
then "type (" <> name <> ")"
else "(" <> name <> ")"
| otherwise = name
in
linkToDecl ns target mn (withClass class_ (text name))
NoLink ->
text name
Role role ->
case role of
"nominal" -> renderRole describeNominal "decl__role_nominal"
"phantom" -> renderRole describePhantom "decl__role_phantom"
-- representational is intentionally not rendered
"representational" -> toHtml ("" :: Text)
x -> P.internalError $ "codeAsHtml: unknown value for role annotation: '" <> T.unpack x <> "'"
where
renderRole hoverTextContent className =
H.a ! A.href (v docRepoRolePage) ! A.target (v "_blank") ! A.class_ "decl__role" $ do
H.abbr ! A.class_ "decl__role_hover" ! A.title (v hoverTextContent) $ do
H.sub ! A.class_ className $ do
toHtml ("" :: Text)
docRepoRolePage =
""
describeNominal =
"The 'nominal' role means this argument may not change when coercing the type."
describePhantom =
"The 'phantom' role means this argument can change freely when coercing the type."
linkToDecl = linkToDeclaration r
startsWithUpper :: Text -> Bool
startsWithUpper str = not (T.null str) && isUpper (T.index str 0)
isOp = isRight . runParser CST.parseOperator
runParser :: CST.Parser a -> Text -> Either String a
runParser p' =
bimap (CST.prettyPrintError . NE.head) snd
. CST.runTokenParser p'
. CST.lex
renderLink :: HtmlRenderContext -> DocLink -> Html -> Html
renderLink r link_@DocLink{..} =
a ! A.href (v (renderDocLink r link_ <> fragmentFor link_))
! A.title (v fullyQualifiedName)
where
fullyQualifiedName =
P.runModuleName modName <> "." <> linkTitle
modName = case linkLocation of
LocalModule m -> m
DepsModule _ _ m -> m
BuiltinModule m -> m
makeFragment :: Namespace -> Text -> Text
makeFragment ns = (prefix <>) . escape
where
prefix = case ns of
TypeLevel -> "#t:"
ValueLevel -> "#v:"
TODO
escape = id
fragmentFor :: DocLink -> Text
fragmentFor l = makeFragment (linkNamespace l) (linkTitle l)
linkToDeclaration ::
HtmlRenderContext ->
Namespace ->
Text ->
ContainingModule ->
Html ->
Html
linkToDeclaration r ns target containMn =
maybe id (renderLink r) (buildDocLink r ns target containMn)
renderAlias :: P.Fixity -> FixityAlias -> Html
renderAlias (P.Fixity associativity precedence) alias_ =
p $ do
-- TODO: Render a link
toHtml $ "Operator alias for " <> P.showQualified showAliasName alias_ <> " "
em $
text ("(" <> associativityStr <> " / precedence " <> T.pack (show precedence) <> ")")
where
showAliasName (Left valueAlias) = P.runProperName valueAlias
showAliasName (Right typeAlias) = case typeAlias of
(Left identifier) -> P.runIdent identifier
(Right properName) -> P.runProperName properName
associativityStr = case associativity of
P.Infixl -> "left-associative"
P.Infixr -> "right-associative"
P.Infix -> "non-associative"
-- | Render Markdown to HTML. Safe for untrusted input. Relative links are
-- | removed.
renderMarkdown :: Text -> H.Html
renderMarkdown =
H.toMarkup . removeRelativeLinks . Cheapskate.markdown opts
where
opts = Cheapskate.def { Cheapskate.allowRawHtml = False }
removeRelativeLinks :: Cheapskate.Doc -> Cheapskate.Doc
removeRelativeLinks = Cheapskate.walk go
where
go :: Cheapskate.Inlines -> Cheapskate.Inlines
go = (>>= stripRelatives)
stripRelatives :: Cheapskate.Inline -> Cheapskate.Inlines
stripRelatives (Cheapskate.Link contents_ href _)
| isRelativeURI href = contents_
stripRelatives other = pure other
Tests for a ' : ' character in the first segment of a URI .
--
-- See Section 4.2 of RFC 3986:
#section-4.2
--
-- >>> isRelativeURI "/" == False
-- >>> isRelativeURI "mailto:" == False
-- >>> isRelativeURI "foo/bar" == True
-- >>> isRelativeURI "/bar" == True
> > > isRelativeURI " ./bar " = = True
isRelativeURI :: Text -> Bool
isRelativeURI =
T.takeWhile (/= '/') >>> T.all (/= ':')
v :: Text -> AttributeValue
v = toValue
withClass :: String -> Html -> Html
withClass className = H.span ! A.class_ (fromString className)
partitionChildren ::
[ChildDeclaration] ->
([ChildDeclaration], [ChildDeclaration], [ChildDeclaration])
partitionChildren =
reverseAll . foldl go ([], [], [])
where
go (instances, dctors, members) rcd =
case cdeclInfo rcd of
ChildInstance _ _ -> (rcd : instances, dctors, members)
ChildDataConstructor _ -> (instances, rcd : dctors, members)
ChildTypeClassMember _ -> (instances, dctors, rcd : members)
reverseAll (xs, ys, zs) = (reverse xs, reverse ys, reverse zs)
| null | https://raw.githubusercontent.com/purescript/purescript/2c78eb614cb1f3556737900e57d0e7395158791e/src/Language/PureScript/Docs/AsHtml.hs | haskell | HTML.
|
where
go = takeLocals
getIndex (_, title_) = do
c <- textHeadMay title_
guard (toUpper c `elem` ['A'..'Z'])
pure c
textHeadMay t =
0 -> Nothing
_ -> Just (T.index t 0)
renderEntry (mn, title_) =
li $ do
let url = T.pack (filePathFor mn `relativeTo` "index") <> "#" <> title_
code $
sp
text ("(" <> P.runModuleName mn <> ")")
groupIndex f g =
where
go' x = insertOrAppend (f x) (g x)
insertOrAppend idx val m =
new = DList.snoc cur val
prevent browser from treating
representational is intentionally not rendered
TODO: Render a link
| Render Markdown to HTML. Safe for untrusted input. Relative links are
| removed.
See Section 4.2 of RFC 3986:
>>> isRelativeURI "/" == False
>>> isRelativeURI "mailto:" == False
>>> isRelativeURI "foo/bar" == True
>>> isRelativeURI "/bar" == True |
| Functions for rendering generated documentation from PureScript code as
module Language.PureScript.Docs.AsHtml (
HtmlOutput(..),
HtmlOutputModule(..),
HtmlRenderContext(..),
nullRenderContext,
packageAsHtml,
moduleAsHtml,
makeFragment,
renderMarkdown
) where
import Prelude
import Control.Category ((>>>))
import Control.Monad (unless)
import Data.Bifunctor (bimap)
import Data.Char (isUpper)
import Data.Either (isRight)
import qualified Data.List.NonEmpty as NE
import Data.Maybe (fromMaybe)
import Data.Foldable (for_)
import Data.String (fromString)
import Data.Text (Text)
import qualified Data.Text as T
import Text.Blaze.Html5 as H hiding (map)
import qualified Text.Blaze.Html5.Attributes as A
import qualified Cheapskate
import qualified Language.PureScript as P
import Language.PureScript.Docs.Types
import Language.PureScript.Docs.RenderedCode hiding (sp)
import qualified Language.PureScript.Docs.Render as Render
import qualified Language.PureScript.CST as CST
data HtmlOutput a = HtmlOutput
{ htmlIndex :: [(Maybe Char, a)]
, htmlModules :: [(P.ModuleName, HtmlOutputModule a)]
}
deriving (Show, Functor)
data HtmlOutputModule a = HtmlOutputModule
{ htmlOutputModuleLocals :: a
, htmlOutputModuleReExports :: [(InPackage P.ModuleName, a)]
}
deriving (Show, Functor)
data HtmlRenderContext = HtmlRenderContext
{ buildDocLink :: Namespace -> Text -> ContainingModule -> Maybe DocLink
, renderDocLink :: DocLink -> Text
, renderSourceLink :: P.SourceSpan -> Maybe Text
}
An HtmlRenderContext for when you do n't want to render any links .
nullRenderContext :: HtmlRenderContext
nullRenderContext = HtmlRenderContext
{ buildDocLink = const (const (const Nothing))
, renderDocLink = const ""
, renderSourceLink = const Nothing
}
packageAsHtml
:: (InPackage P.ModuleName -> Maybe HtmlRenderContext)
-> Package a
-> HtmlOutput Html
packageAsHtml getHtmlCtx Package{..} =
HtmlOutput indexFile modules
where
indexFile = []
modules = moduleAsHtml getHtmlCtx <$> pkgModules
moduleAsHtml
:: (InPackage P.ModuleName -> Maybe HtmlRenderContext)
-> Module
-> (P.ModuleName, HtmlOutputModule Html)
moduleAsHtml getHtmlCtx Module{..} = (modName, HtmlOutputModule modHtml reexports)
where
modHtml = do
let r = fromMaybe nullRenderContext $ getHtmlCtx (Local modName)
in do
for_ modComments renderMarkdown
for_ modDeclarations (declAsHtml r)
reexports =
flip map modReExports $ \(pkg, decls) ->
let r = fromMaybe nullRenderContext $ getHtmlCtx pkg
in (pkg, foldMap (declAsHtml r) decls)
renderIndex : : LinksContext - > [ ( Maybe , Html ) ]
renderIndex LinksContext { .. } = go
> > > renderEntry
> > > map ( second ( ul . ) )
case t of
a ! ( v url ) $ text title _
groupIndex : : i = > ( a - > Maybe i ) - > ( a - > b ) - > [ a ] - > [ ( Maybe i , [ b ] ) ]
map ( second DList.toList ) . M.toList . foldr go ' M.empty . ( comparing f )
let cur = M.findWithDefault DList.empty idx m
in M.insert idx new m
declAsHtml :: HtmlRenderContext -> Declaration -> Html
declAsHtml r d@Declaration{..} = do
let declFragment = makeFragment (declInfoNamespace declInfo) declTitle
H.div ! A.class_ "decl" ! A.id (v (T.drop 1 declFragment)) $ do
h3 ! A.class_ "decl__title clearfix" $ do
a ! A.class_ "decl__anchor" ! A.href (v declFragment) $ "#"
H.span $ text declTitle
declTitle + linkToSource as one word
for_ declSourceSpan (linkToSource r)
H.div ! A.class_ "decl__body" $ do
case declInfo of
AliasDeclaration fixity alias_ ->
renderAlias fixity alias_
_ -> do
pre ! A.class_ "decl__signature" $ do
for_ declKind $ \kindInfo -> do
code ! A.class_ "decl__kind" $ do
codeAsHtml r (Render.renderKindSig declTitle kindInfo)
code $ codeAsHtml r (Render.renderDeclaration d)
for_ declComments renderMarkdown
let (instances, dctors, members) = partitionChildren declChildren
unless (null dctors) $ do
h4 "Constructors"
renderChildren r dctors
unless (null members) $ do
h4 "Members"
renderChildren r members
unless (null instances) $ do
h4 "Instances"
renderChildren r instances
where
linkToSource :: HtmlRenderContext -> P.SourceSpan -> Html
linkToSource ctx srcspan =
maybe (return ()) go (renderSourceLink ctx srcspan)
where
go href =
H.span ! A.class_ "decl__source" $
a ! A.href (v href) $ text "Source"
renderChildren :: HtmlRenderContext -> [ChildDeclaration] -> Html
renderChildren _ [] = return ()
renderChildren r xs = ul $ mapM_ item xs
where
item decl =
li ! A.id (v (T.drop 1 (fragment decl))) $ do
renderCode decl
for_ (cdeclComments decl) $ \coms ->
H.div ! A.class_ "decl__child_comments" $ renderMarkdown coms
fragment decl = makeFragment (childDeclInfoNamespace (cdeclInfo decl)) (cdeclTitle decl)
renderCode = code . codeAsHtml r . Render.renderChildDeclaration
codeAsHtml :: HtmlRenderContext -> RenderedCode -> Html
codeAsHtml r = outputWith elemAsHtml
where
elemAsHtml e = case e of
Syntax x ->
withClass "syntax" (text x)
Keyword x ->
withClass "keyword" (text x)
Space ->
text " "
Symbol ns name link_ ->
case link_ of
Link mn ->
let
class_ =
if startsWithUpper name then "ctor" else "ident"
target
| isOp name =
if ns == TypeLevel
then "type (" <> name <> ")"
else "(" <> name <> ")"
| otherwise = name
in
linkToDecl ns target mn (withClass class_ (text name))
NoLink ->
text name
Role role ->
case role of
"nominal" -> renderRole describeNominal "decl__role_nominal"
"phantom" -> renderRole describePhantom "decl__role_phantom"
"representational" -> toHtml ("" :: Text)
x -> P.internalError $ "codeAsHtml: unknown value for role annotation: '" <> T.unpack x <> "'"
where
renderRole hoverTextContent className =
H.a ! A.href (v docRepoRolePage) ! A.target (v "_blank") ! A.class_ "decl__role" $ do
H.abbr ! A.class_ "decl__role_hover" ! A.title (v hoverTextContent) $ do
H.sub ! A.class_ className $ do
toHtml ("" :: Text)
docRepoRolePage =
""
describeNominal =
"The 'nominal' role means this argument may not change when coercing the type."
describePhantom =
"The 'phantom' role means this argument can change freely when coercing the type."
linkToDecl = linkToDeclaration r
startsWithUpper :: Text -> Bool
startsWithUpper str = not (T.null str) && isUpper (T.index str 0)
isOp = isRight . runParser CST.parseOperator
runParser :: CST.Parser a -> Text -> Either String a
runParser p' =
bimap (CST.prettyPrintError . NE.head) snd
. CST.runTokenParser p'
. CST.lex
renderLink :: HtmlRenderContext -> DocLink -> Html -> Html
renderLink r link_@DocLink{..} =
a ! A.href (v (renderDocLink r link_ <> fragmentFor link_))
! A.title (v fullyQualifiedName)
where
fullyQualifiedName =
P.runModuleName modName <> "." <> linkTitle
modName = case linkLocation of
LocalModule m -> m
DepsModule _ _ m -> m
BuiltinModule m -> m
makeFragment :: Namespace -> Text -> Text
makeFragment ns = (prefix <>) . escape
where
prefix = case ns of
TypeLevel -> "#t:"
ValueLevel -> "#v:"
TODO
escape = id
fragmentFor :: DocLink -> Text
fragmentFor l = makeFragment (linkNamespace l) (linkTitle l)
linkToDeclaration ::
HtmlRenderContext ->
Namespace ->
Text ->
ContainingModule ->
Html ->
Html
linkToDeclaration r ns target containMn =
maybe id (renderLink r) (buildDocLink r ns target containMn)
renderAlias :: P.Fixity -> FixityAlias -> Html
renderAlias (P.Fixity associativity precedence) alias_ =
p $ do
toHtml $ "Operator alias for " <> P.showQualified showAliasName alias_ <> " "
em $
text ("(" <> associativityStr <> " / precedence " <> T.pack (show precedence) <> ")")
where
showAliasName (Left valueAlias) = P.runProperName valueAlias
showAliasName (Right typeAlias) = case typeAlias of
(Left identifier) -> P.runIdent identifier
(Right properName) -> P.runProperName properName
associativityStr = case associativity of
P.Infixl -> "left-associative"
P.Infixr -> "right-associative"
P.Infix -> "non-associative"
renderMarkdown :: Text -> H.Html
renderMarkdown =
H.toMarkup . removeRelativeLinks . Cheapskate.markdown opts
where
opts = Cheapskate.def { Cheapskate.allowRawHtml = False }
removeRelativeLinks :: Cheapskate.Doc -> Cheapskate.Doc
removeRelativeLinks = Cheapskate.walk go
where
go :: Cheapskate.Inlines -> Cheapskate.Inlines
go = (>>= stripRelatives)
stripRelatives :: Cheapskate.Inline -> Cheapskate.Inlines
stripRelatives (Cheapskate.Link contents_ href _)
| isRelativeURI href = contents_
stripRelatives other = pure other
Tests for a ' : ' character in the first segment of a URI .
#section-4.2
> > > isRelativeURI " ./bar " = = True
isRelativeURI :: Text -> Bool
isRelativeURI =
T.takeWhile (/= '/') >>> T.all (/= ':')
v :: Text -> AttributeValue
v = toValue
withClass :: String -> Html -> Html
withClass className = H.span ! A.class_ (fromString className)
partitionChildren ::
[ChildDeclaration] ->
([ChildDeclaration], [ChildDeclaration], [ChildDeclaration])
partitionChildren =
reverseAll . foldl go ([], [], [])
where
go (instances, dctors, members) rcd =
case cdeclInfo rcd of
ChildInstance _ _ -> (rcd : instances, dctors, members)
ChildDataConstructor _ -> (instances, rcd : dctors, members)
ChildTypeClassMember _ -> (instances, dctors, rcd : members)
reverseAll (xs, ys, zs) = (reverse xs, reverse ys, reverse zs)
|
f338214d2030b569a4daaa66d2961d5b5dccbde38740d7cd8ed657d16b901fdc | TheRiver/L-MATH | spline-interpolation.lisp | (in-package #:l-math)
;;; L-MATH: a library for simple linear algebra.
Copyright ( C ) 2009 - 2012
;;;
;;; This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
;;; (at your option) any later version.
;;;
;;; This program is distributed in the hope that it will be useful,
;;; but WITHOUT ANY WARRANTY; without even the implied warranty of
;;; MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
;;; GNU General Public License for more details.
;;;
You should have received a copy of the GNU General Public License
;;; along with this program. If not, see </>.
;;;
Additional permission under GNU GPL version 3 section 7
;;;
;;; Linking this library statically or dynamically with other modules is
;;; making a combined work based on this library. Thus, the terms and
conditions of the GNU General Public License cover the whole
;;; combination.
;;;
;;; As a special exception, the copyright holders of this library give you
;;; permission to link this library with independent modules to produce an
;;; executable, regardless of the license terms of these independent
;;; modules, and to copy and distribute the resulting executable under
;;; terms of your choice, provided that you also meet, for each linked
;;; independent module, the terms and conditions of the license of that
;;; module. An independent module is a module which is not derived from or
;;; based on this library. If you modify this library, you may extend this
;;; exception to your version of the library, but you are not obligated to
;;; do so. If you do not wish to do so, delete this exception statement
;;; from your version.
(defgeneric spline-interpolation (points &key degree parametrisation close)
(:documentation "Returns a b-spline that interpolates the given
points. This is based on the discussion in sections 9.1 and 7.8 of
Farin's Curves and Surfaces for Computer Aided Geometric
Design. PARAMETRISATION should be one of :uniform, :centripetal,
or :chord-length. If CLOSE is true, then this will produce a closed
b-spline.")
(:method ((points list) &key
(degree (dimension (first points)))
(parametrisation :centripetal)
close)
(when (or (null points)
(null (second points)))
(error 'l-math-error :format-control "There must be two or more points in order to interpolate."))
(let* ((points (if (and close ; Here we want to make sure that the points form a closed shape.
(not (equivalent (first (last points))
(first points))))
(append points (list (first points)))
points)))
(labels ((make-first-point (points)
(cond
(close
(nth (- (length points) 2) points))
(t
(- (* 2 (first points))
(second points)))))
(make-last-point (points)
(cond
(close
(second points))
(t
(+ (- (nth (- (length points) 2) points))
(* 2 (first (last points)))))))
(converge (spline)
(let ((differences (mapcar #'(lambda (par point)
;; DIFFERENCES is the difference between the wanted
;; points and those the spline passes through.
(- point (evaluate spline par)))
(domain-knots spline)
points)))
(cond
((some #'(lambda (value)
(not (equivalent 0 (norm value))))
differences)
;; So we know we still need to move things slightly.
(let ((points (loop
for point in (rest (butlast (spline-geometry spline)))
for difference in differences
for i from 0
collect (+ point difference))))
(converge (make-instance 'b-spline
:degree degree
:knots (b-spline-knots spline)
:points (append (cons (make-first-point points)
points)
(list (make-last-point points)))))))
(t
spline)))))
(converge (make-instance 'b-spline
:uniform (eq parametrisation :uniform)
:centripetal (eq parametrisation :centripetal)
:chord-length (eq parametrisation :chord-length)
:degree degree
:points (append (cons (make-first-point points) points)
(list (make-last-point points)))))))))
| null | https://raw.githubusercontent.com/TheRiver/L-MATH/81f43d21ea0bc75f949fbdfb3701afbab4183d38/spline-interpolation.lisp | lisp | L-MATH: a library for simple linear algebra.
This program is free software: you can redistribute it and/or modify
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
along with this program. If not, see </>.
Linking this library statically or dynamically with other modules is
making a combined work based on this library. Thus, the terms and
combination.
As a special exception, the copyright holders of this library give you
permission to link this library with independent modules to produce an
executable, regardless of the license terms of these independent
modules, and to copy and distribute the resulting executable under
terms of your choice, provided that you also meet, for each linked
independent module, the terms and conditions of the license of that
module. An independent module is a module which is not derived from or
based on this library. If you modify this library, you may extend this
exception to your version of the library, but you are not obligated to
do so. If you do not wish to do so, delete this exception statement
from your version.
Here we want to make sure that the points form a closed shape.
DIFFERENCES is the difference between the wanted
points and those the spline passes through.
So we know we still need to move things slightly. | (in-package #:l-math)
Copyright ( C ) 2009 - 2012
it under the terms of the GNU General Public License as published by
the Free Software Foundation , either version 3 of the License , or
You should have received a copy of the GNU General Public License
Additional permission under GNU GPL version 3 section 7
conditions of the GNU General Public License cover the whole
(defgeneric spline-interpolation (points &key degree parametrisation close)
(:documentation "Returns a b-spline that interpolates the given
points. This is based on the discussion in sections 9.1 and 7.8 of
Farin's Curves and Surfaces for Computer Aided Geometric
Design. PARAMETRISATION should be one of :uniform, :centripetal,
or :chord-length. If CLOSE is true, then this will produce a closed
b-spline.")
(:method ((points list) &key
(degree (dimension (first points)))
(parametrisation :centripetal)
close)
(when (or (null points)
(null (second points)))
(error 'l-math-error :format-control "There must be two or more points in order to interpolate."))
(not (equivalent (first (last points))
(first points))))
(append points (list (first points)))
points)))
(labels ((make-first-point (points)
(cond
(close
(nth (- (length points) 2) points))
(t
(- (* 2 (first points))
(second points)))))
(make-last-point (points)
(cond
(close
(second points))
(t
(+ (- (nth (- (length points) 2) points))
(* 2 (first (last points)))))))
(converge (spline)
(let ((differences (mapcar #'(lambda (par point)
(- point (evaluate spline par)))
(domain-knots spline)
points)))
(cond
((some #'(lambda (value)
(not (equivalent 0 (norm value))))
differences)
(let ((points (loop
for point in (rest (butlast (spline-geometry spline)))
for difference in differences
for i from 0
collect (+ point difference))))
(converge (make-instance 'b-spline
:degree degree
:knots (b-spline-knots spline)
:points (append (cons (make-first-point points)
points)
(list (make-last-point points)))))))
(t
spline)))))
(converge (make-instance 'b-spline
:uniform (eq parametrisation :uniform)
:centripetal (eq parametrisation :centripetal)
:chord-length (eq parametrisation :chord-length)
:degree degree
:points (append (cons (make-first-point points) points)
(list (make-last-point points)))))))))
|
70a75513516ef8c9f054ca4d55280e635d89a0bcfd2695a87cca69e7b0ba9834 | gigasquid/hyperdimensional-playground | core_test.clj | (ns hyperdimensional-playground.core-test
(:require [clojure.test :refer :all]
[hyperdimensional-playground.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| null | https://raw.githubusercontent.com/gigasquid/hyperdimensional-playground/ee83b9b38467f3ae60b82e70ec78db6563dbf17a/test/hyperdimensional_playground/core_test.clj | clojure | (ns hyperdimensional-playground.core-test
(:require [clojure.test :refer :all]
[hyperdimensional-playground.core :refer :all]))
(deftest a-test
(testing "FIXME, I fail."
(is (= 0 1))))
| |
a92c332816497f47e0e9e5132de93b0e265e3745535f873a8f5afe7d0ed2891c | Frama-C/Frama-C-snapshot | function_Froms.ml | (**************************************************************************)
(* *)
This file is part of Frama - C.
(* *)
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
(* alternatives) *)
(* *)
(* you can redistribute it and/or modify it under the terms of the GNU *)
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
(* *)
(* It is distributed in the hope that it will be useful, *)
(* but WITHOUT ANY WARRANTY; without even the implied warranty of *)
(* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *)
(* GNU Lesser General Public License for more details. *)
(* *)
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
(* *)
(**************************************************************************)
open Locations
module Deps =
struct
type deps = {
data: Zone.t;
indirect: Zone.t;
}
let to_zone {data; indirect} = Zone.join data indirect
module DatatypeFromDeps = Datatype.Make(struct
type t = deps
let name = "Function_Froms.Deps.from_deps"
let hash fd =
Zone.hash fd.data + 37 * Zone.hash fd.indirect
let compare fd1 fd2 =
let c = Zone.compare fd1.data fd2.data in
if c <> 0 then c
else Zone.compare fd1.indirect fd2.indirect
let equal = Datatype.from_compare
let pretty fmt d = Zone.pretty fmt (to_zone d)
let reprs =
List.map (fun z -> {data = z; indirect = z}) Zone.reprs
let structural_descr =
Structural_descr.t_record [| Zone.packed_descr; Zone.packed_descr; |]
let rehash = Datatype.identity
let mem_project = Datatype.never_any_project
let varname _ = "da"
let internal_pretty_code = Datatype.undefined
let copy = Datatype.undefined
end)
include DatatypeFromDeps
let pretty_precise fmt {data; indirect} =
let bottom_data = Zone.is_bottom data in
let bottom_indirect = Zone.is_bottom indirect in
match bottom_indirect, bottom_data with
| true, true ->
Format.fprintf fmt "\\nothing"
| true, false ->
Format.fprintf fmt "direct: %a"
Zone.pretty data
| false, true ->
Format.fprintf fmt "indirect: %a"
Zone.pretty indirect
| false, false ->
Format.fprintf fmt "indirect: %a; direct: %a"
Zone.pretty indirect
Zone.pretty data
let from_data_deps z = { data = z; indirect = Zone.bottom }
let from_indirect_deps z = { data = Zone.bottom; indirect = z }
let bottom = {
data = Zone.bottom;
indirect = Zone.bottom;
}
let top = {
data = Zone.top;
indirect = Zone.top;
}
let is_included fd1 fd2 =
Zone.is_included fd1.data fd2.data &&
Zone.is_included fd1.indirect fd2.indirect
let join fd1 fd2 =
if fd1 == bottom then fd2
else if fd2 == bottom then fd1
else {
data = Zone.join fd1.data fd2.data;
indirect = Zone.join fd1.indirect fd2.indirect
}
let _narrow fd1 fd2 = {
data = Zone.narrow fd1.data fd2.data;
indirect = Zone.narrow fd1.indirect fd2.indirect
}
let add_data_dep fd data =
{ fd with data = Zone.join fd.data data }
let add_indirect_dep fd indirect =
{ fd with indirect = Zone.join fd.indirect indirect }
let map f fd = {
data = f fd.data;
indirect = f fd.indirect;
}
end
module DepsOrUnassigned = struct
type deps_or_unassigned =
| DepsBottom
| Unassigned
| AssignedFrom of Deps.t
| MaybeAssignedFrom of Deps.t
module DatatypeDeps = Datatype.Make(struct
type t = deps_or_unassigned
let name = "Function_Froms.Deps.deps"
let pretty fmt = function
| DepsBottom -> Format.pp_print_string fmt "DEPS_BOTTOM"
| Unassigned -> Format.pp_print_string fmt "UNASSIGNED"
| AssignedFrom fd -> Deps.pretty_precise fmt fd
| MaybeAssignedFrom fd ->
' ( or UNASSIGNED ) ' would be a better pretty - printer , we use
' ( and SELF ) ' only for compatibility reasons
'(and SELF)' only for compatibility reasons *)
Format.fprintf fmt "%a (and SELF)" Deps.pretty_precise fd
let hash = function
| DepsBottom -> 3
| Unassigned -> 17
| AssignedFrom fd -> 37 + 13 * Deps.hash fd
| MaybeAssignedFrom fd -> 57 + 123 * Deps.hash fd
let compare d1 d2 = match d1, d2 with
| DepsBottom, DepsBottom
| Unassigned, Unassigned -> 0
| AssignedFrom fd1, AssignedFrom fd2
| MaybeAssignedFrom fd1, MaybeAssignedFrom fd2 ->
Deps.compare fd1 fd2
| DepsBottom, (Unassigned | AssignedFrom _ | MaybeAssignedFrom _)
| Unassigned, (AssignedFrom _ | MaybeAssignedFrom _)
| AssignedFrom _, MaybeAssignedFrom _ ->
-1
| (Unassigned | AssignedFrom _ | MaybeAssignedFrom _), DepsBottom
| (AssignedFrom _ | MaybeAssignedFrom _), Unassigned
| MaybeAssignedFrom _, AssignedFrom _ ->
1
let equal = Datatype.from_compare
let reprs = Unassigned :: List.map (fun r -> AssignedFrom r) Deps.reprs
let structural_descr =
let d = Deps.packed_descr in
Structural_descr.t_sum [| [| d |]; [| d |] |]
let rehash = Datatype.identity
let mem_project = Datatype.never_any_project
let varname _ = "d"
let internal_pretty_code = Datatype.undefined
let copy = Datatype.undefined
end)
let join d1 d2 = match d1, d2 with
| DepsBottom, d | d, DepsBottom -> d
| Unassigned, Unassigned -> Unassigned
| Unassigned, AssignedFrom fd | AssignedFrom fd, Unassigned ->
MaybeAssignedFrom fd
| Unassigned, (MaybeAssignedFrom _ as d)
| (MaybeAssignedFrom _ as d), Unassigned ->
d
| AssignedFrom fd1, AssignedFrom fd2 ->
AssignedFrom (Deps.join fd1 fd2)
| AssignedFrom fd1, MaybeAssignedFrom fd2
| MaybeAssignedFrom fd1, AssignedFrom fd2
| MaybeAssignedFrom fd1, MaybeAssignedFrom fd2 ->
MaybeAssignedFrom (Deps.join fd1 fd2)
let narrow _ _ = assert false (* not used yet *)
let is_included d1 d2 = match d1, d2 with
| DepsBottom, (DepsBottom | Unassigned | AssignedFrom _ |
MaybeAssignedFrom _)
| Unassigned, (Unassigned | AssignedFrom _ | MaybeAssignedFrom _) ->
true
| MaybeAssignedFrom fd1, (AssignedFrom fd2 | MaybeAssignedFrom fd2)
| AssignedFrom fd1, AssignedFrom fd2 ->
Deps.is_included fd1 fd2
| (Unassigned | AssignedFrom _ | MaybeAssignedFrom _), DepsBottom
| (AssignedFrom _ | MaybeAssignedFrom _), Unassigned
| AssignedFrom _, MaybeAssignedFrom _ ->
false
let bottom = DepsBottom
let top = MaybeAssignedFrom Deps.top
let default = Unassigned
include DatatypeDeps
let subst f d = match d with
| DepsBottom -> DepsBottom
| Unassigned -> Unassigned
| AssignedFrom fd ->
let fd' = f fd in
if fd == fd' then d else AssignedFrom fd'
| MaybeAssignedFrom fd ->
let fd' = f fd in
if fd == fd' then d else MaybeAssignedFrom fd'
let pretty_precise = pretty
let to_zone = function
| DepsBottom | Unassigned -> Zone.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> Deps.to_zone fd
let to_deps = function
| DepsBottom | Unassigned -> Deps.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> fd
let extract_data = function
| DepsBottom | Unassigned -> Zone.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> fd.Deps.data
let extract_indirect = function
| DepsBottom | Unassigned -> Zone.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> fd.Deps.indirect
let may_be_unassigned = function
| DepsBottom | AssignedFrom _ -> false
| Unassigned | MaybeAssignedFrom _ -> true
let compose d1 d2 =
match d1, d2 with
| DepsBottom, _ | _, DepsBottom ->
DepsBottom (* could indicate dead code. Not used in practice anyway *)
| Unassigned, _ -> d2
| AssignedFrom _, _ -> d1
| MaybeAssignedFrom _, Unassigned -> d1
| MaybeAssignedFrom d1, MaybeAssignedFrom d2 ->
MaybeAssignedFrom (Deps.join d1 d2)
| MaybeAssignedFrom d1, AssignedFrom d2 ->
AssignedFrom (Deps.join d1 d2)
(* for backwards compatibility *)
let pretty fmt fd =
match fd with
| DepsBottom -> Format.pp_print_string fmt "DEPS_BOTTOM"
| Unassigned -> Format.pp_print_string fmt "(SELF)"
| AssignedFrom d -> Zone.pretty fmt (Deps.to_zone d)
| MaybeAssignedFrom d ->
Format.fprintf fmt "%a (and SELF)" Zone.pretty (Deps.to_zone d)
end
module Memory = struct
* A From table is internally represented as a Lmap of [ DepsOrUnassigned ] .
However , the API mostly hides this fact , and exports access functions
that take or return [ Deps.t ] values . This way , the user needs not
understand the subtleties of DepsBottom / Unassigned / MaybeAssigned .
However, the API mostly hides this fact, and exports access functions
that take or return [Deps.t] values. This way, the user needs not
understand the subtleties of DepsBottom/Unassigned/MaybeAssigned. *)
include Lmap_bitwise.Make_bitwise(DepsOrUnassigned)
let () = imprecise_write_msg := "dependencies to update"
let pretty_skip = function
| DepsOrUnassigned.DepsBottom -> true
| DepsOrUnassigned.Unassigned -> true
| DepsOrUnassigned.AssignedFrom _ -> false
| DepsOrUnassigned.MaybeAssignedFrom _ -> false
let pretty =
pretty_generic_printer
~skip_v:pretty_skip ~pretty_v:DepsOrUnassigned.pretty ~sep:"FROM" ()
let pretty_ind_data =
pretty_generic_printer
~skip_v:pretty_skip ~pretty_v:DepsOrUnassigned.pretty_precise ~sep:"FROM"
()
* This is the auxiliary datastructure used to write the function [ find ] .
When we iterate over a offsetmap of value [ DepsOrUnassigned ] , we obtain
two things : ( 1 ) some dependencies ; ( 2 ) some intervals that may have not
been assigned , and that will appear as data dependencies ( once we know
the base we are iterating on ) .
When we iterate over a offsetmap of value [DepsOrUnassigned], we obtain
two things: (1) some dependencies; (2) some intervals that may have not
been assigned, and that will appear as data dependencies (once we know
the base we are iterating on). *)
type find_offsm = {
fo_itvs: Int_Intervals.t;
fo_deps: Deps.t;
}
(** Once the base is known, we can obtain something of type [Deps.t] *)
let convert_find_offsm base fp =
let z = Zone.inject base fp.fo_itvs in
Deps.add_data_dep fp.fo_deps z
let empty_find_offsm = {
fo_itvs = Int_Intervals.bottom;
fo_deps = Deps.bottom;
}
let join_find_offsm fp1 fp2 =
if fp1 == empty_find_offsm then fp2
else if fp2 == empty_find_offsm then fp1
else {
fo_itvs = Int_Intervals.join fp1.fo_itvs fp2.fo_itvs;
fo_deps = Deps.join fp1.fo_deps fp2.fo_deps;
}
(** Auxiliary function that collects the dependencies on some intervals of
an offsetmap. *)
let find_precise_offsetmap : Int_Intervals.t -> LOffset.t -> find_offsm =
let cache = Hptmap_sig.PersistentCache "Function_Froms.find_precise" in
let aux_find_offsm ib ie v =
(* If the interval can be unassigned, we collect its bound. We also
return the dependencies stored at this interval. *)
let default, v = match v with
| DepsOrUnassigned.DepsBottom -> false, Deps.bottom
| DepsOrUnassigned.Unassigned -> true, Deps.bottom
| DepsOrUnassigned.MaybeAssignedFrom v -> true, v
| DepsOrUnassigned.AssignedFrom v -> false, v
in
{ fo_itvs =
if default
then Int_Intervals.inject_bounds ib ie
else Int_Intervals.bottom;
fo_deps = v }
in
(* Partial application is important *)
LOffset.fold_join_itvs
~cache aux_find_offsm join_find_offsm empty_find_offsm
(** Collecting dependencies on a given zone. *)
let find_precise : t -> Zone.t -> Deps.t =
let both = find_precise_offsetmap in
let conv = convert_find_offsm in
(* We are querying a zone for which no dependency is stored. Hence, every
base is implicitly bound to [Unassigned]. *)
let empty_map z = Deps.from_data_deps z in
let join = Deps.join in
let empty = Deps.bottom in
(* Partial application is important *)
let f = fold_join_zone ~both ~conv ~empty_map ~join ~empty in
fun m z ->
match m with
| Top -> Deps.top
| Bottom -> Deps.bottom
| Map m -> try f z m with Abstract_interp.Error_Top -> Deps.top
let find z m =
Deps.to_zone (find_precise z m)
let add_binding_precise_loc ~exact access m loc v =
let aux_one_loc loc m =
let loc = Locations.valid_part access loc in
add_binding_loc ~exact m loc (DepsOrUnassigned.AssignedFrom v)
in
Precise_locs.fold aux_one_loc loc m
let bind_var vi v m =
let z = Locations.zone_of_varinfo vi in
add_binding ~exact:true m z (DepsOrUnassigned.AssignedFrom v)
let unbind_var vi m =
remove_base (Base.of_varinfo vi) m
let add_binding ~exact m z v =
add_binding ~exact m z (DepsOrUnassigned.AssignedFrom v)
let add_binding_loc ~exact m loc v =
add_binding_loc ~exact m loc (DepsOrUnassigned.AssignedFrom v)
let is_unassigned m =
LOffset.is_same_value m DepsOrUnassigned.Unassigned
(* Unassigned is a neutral value for compose, on both sides *)
let decide_compose m1 m2 =
if m1 == m2 || is_unassigned m1 then LOffset.ReturnRight
else if is_unassigned m2 then LOffset.ReturnLeft
else LOffset.Recurse
let compose_map =
let cache = Hptmap_sig.PersistentCache "Function_Froms.Memory.compose" in
(* Partial application is important because of the cache. Idempotent,
because [compose x x] is always equal to [x]. *)
map2 ~cache ~symmetric:false ~idempotent:true ~empty_neutral:true
decide_compose DepsOrUnassigned.compose
let compose m1 m2 = match m1, m2 with
| Top, _ | _, Top -> Top
| Map m1, Map m2 -> Map (compose_map m1 m2)
| Bottom, (Map _ | Bottom) | Map _, Bottom -> Bottom
(** Auxiliary function that substitutes the data right-hand part of a
dependency by a pre-existing From state. The returned result is a Deps.t:
the data part will be the data part of the complete result, the indirect
part will be added to the indirect part of the final result. *)
(* This function iterates simultaneously on a From memory, and on a zone.
It is cached. The definitions below are used to call the function that
does the recursive descent. *)
let substitute_data_deps =
Nothing left to substitute , return z unchanged
let empty_right z = Deps.from_data_deps z in
(* Zone to substitute is empty *)
let empty_left _ = Deps.bottom in
(* [b] is in the zone and substituted. Rewrite appropriately *)
let both b itvs offsm =
let fp = find_precise_offsetmap itvs offsm in
convert_find_offsm b fp
in
let join = Deps.join in
let empty = Deps.bottom in
let cache = Hptmap_sig.PersistentCache "From_compute.subst_data" in
let f_map =
Zone.fold2_join_heterogeneous
~cache ~empty_left ~empty_right ~both ~join ~empty
in
fun call_site_froms z ->
match call_site_froms with
| Bottom -> Deps.bottom
| Top -> Deps.top
| Map m ->
try f_map z (shape m)
with Abstract_interp.Error_Top -> Deps.top
(** Auxiliary function that substitutes the indirect right-hand part of a
dependency by a pre-existing From state. The returned result is a zone,
which will be added to the indirect part of the final result. *)
let substitute_indirect_deps =
Nothing left to substitute , z is directly an indirect dependency
let empty_right z = z in
(* Zone to substitute is empty *)
let empty_left _ = Zone.bottom in
let both b itvs offsm =
(* Both the found data and indirect dependencies are computed for indirect
dependencies: merge to a single zone *)
let fp = find_precise_offsetmap itvs offsm in
Deps.to_zone (convert_find_offsm b fp)
in
let join = Zone.join in
let empty = Zone.bottom in
let cache = Hptmap_sig.PersistentCache "From_compute.subst_indirect" in
let f_map =
Zone.fold2_join_heterogeneous
~cache ~empty_left ~empty_right ~both ~join ~empty
in
fun call_site_froms z ->
match call_site_froms with
| Bottom -> Zone.bottom
| Top -> Zone.top
| Map m ->
try f_map z (shape m)
with Abstract_interp.Error_Top -> Zone.top
let substitute call_site_froms deps =
let open Deps in
let { data; indirect } = deps in
(* depending directly on an indirect dependency -> indirect,
depending indirectly on a direct dependency -> indirect *)
let dirdeps = substitute_data_deps call_site_froms data in
let inddeps = substitute_indirect_deps call_site_froms indirect in
let dir = dirdeps.data in
let ind = Zone.(join dirdeps.indirect inddeps) in
{ data = dir; indirect = ind }
type return = Deps.t
let default_return = Deps.bottom
let top_return = Deps.top
let add_to_return ?start:(_start=0) ~size:_size ?(m=default_return) v =
Deps.join m v
(*
let start = Ival.of_int start in
let itvs = Int_Intervals.from_ival_size start size in
LOffset.add_iset ~exact:true itvs (DepsOrUnassigned.AssignedFrom v) m
*)
let top_return_size size =
add_to_return ~size Deps.top
let join_return = Deps.join
let collapse_return x = x
end
type froms =
{ deps_return : Memory.return;
deps_table : Memory.t }
let top = {
deps_return = Memory.top_return;
deps_table = Memory.top;
}
let join x y =
{ deps_return = Memory.join_return x.deps_return y.deps_return ;
deps_table = Memory.join x.deps_table y.deps_table }
let outputs { deps_table = t } =
match t with
| Memory.Top -> Locations.Zone.top
| Memory.Bottom -> Locations.Zone.bottom
| Memory.Map(m) ->
Memory.fold
(fun z v acc ->
let open DepsOrUnassigned in
match v with
| DepsBottom | Unassigned -> acc
| AssignedFrom _ | MaybeAssignedFrom _ -> Locations.Zone.join z acc)
m Locations.Zone.bottom
let inputs ?(include_self=false) t =
let aux b offm acc =
Memory.LOffset.fold
(fun itvs deps acc ->
let z = DepsOrUnassigned.to_zone deps in
let self = DepsOrUnassigned.may_be_unassigned deps in
let acc = Zone.join z acc in
match include_self, self, b with
| true, true, Some b ->
Zone.join acc (Zone.inject b itvs)
| _ -> acc
)
offm
acc
in
let return = Deps.to_zone t.deps_return in
let aux_table b = aux (Some b) in
match t.deps_table with
| Memory.Top -> Zone.top
| Memory.Bottom -> Zone.bottom
| Memory.Map m -> Memory.fold_base aux_table m return
let pretty fmt { deps_return = r ; deps_table = t } =
Format.fprintf fmt "%a@\n\\result FROM @[%a@]@\n"
Memory.pretty t
Deps.pretty r
(** same as pretty, but uses the type of the function to output more
precise information.
@raise Error if the given type is not a function type
*)
let pretty_with_type ~indirect typ fmt { deps_return = r; deps_table = t } =
let (rt_typ,_,_,_) = Cil.splitFunctionType typ in
if Memory.is_bottom t
then Format.fprintf fmt
"@[NON TERMINATING - NO EFFECTS@]"
else
let map_pretty =
if indirect
then Memory.pretty_ind_data
else Memory.pretty
in
if Cil.isVoidType rt_typ
then begin
if Memory.is_empty t
then Format.fprintf fmt "@[NO EFFECTS@]"
else map_pretty fmt t
end
else
let pp_space fmt =
if not (Memory.is_empty t) then
Format.fprintf fmt "@ "
in
Format.fprintf fmt "@[<v>%a%t@[\\result FROM @[%a@]@]@]"
map_pretty t pp_space
(if indirect then Deps.pretty_precise else Deps.pretty) r
let pretty_with_type_indirect = pretty_with_type ~indirect:true
let pretty_with_type = pretty_with_type ~indirect:false
let hash { deps_return = dr ; deps_table = dt } =
Memory.hash dt + 197 * Deps.hash dr
let equal
{ deps_return = dr ; deps_table = dt }
{ deps_return = dr' ; deps_table = dt' } =
Memory.equal dt dt'&& Deps.equal dr dr'
include Datatype.Make
(struct
type t = froms
let reprs =
List.fold_left
(fun acc o ->
List.fold_left
(fun acc m -> { deps_return = o; deps_table = m } :: acc)
acc
Memory.reprs)
[]
Deps.reprs
let structural_descr =
Structural_descr.t_record
[| Deps.packed_descr;
Memory.packed_descr |]
let name = "Function_Froms"
let hash = hash
let compare = Datatype.undefined
let equal = equal
let pretty = pretty
let internal_pretty_code = Datatype.undefined
let rehash = Datatype.identity
let copy = Datatype.undefined
let varname = Datatype.undefined
let mem_project = Datatype.never_any_project
end)
(*
Local Variables:
compile-command: "make -C ../../.."
End:
*)
| null | https://raw.githubusercontent.com/Frama-C/Frama-C-snapshot/639a3647736bf8ac127d00ebe4c4c259f75f9b87/src/plugins/value_types/function_Froms.ml | ocaml | ************************************************************************
alternatives)
you can redistribute it and/or modify it under the terms of the GNU
It is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU Lesser General Public License for more details.
************************************************************************
not used yet
could indicate dead code. Not used in practice anyway
for backwards compatibility
* Once the base is known, we can obtain something of type [Deps.t]
* Auxiliary function that collects the dependencies on some intervals of
an offsetmap.
If the interval can be unassigned, we collect its bound. We also
return the dependencies stored at this interval.
Partial application is important
* Collecting dependencies on a given zone.
We are querying a zone for which no dependency is stored. Hence, every
base is implicitly bound to [Unassigned].
Partial application is important
Unassigned is a neutral value for compose, on both sides
Partial application is important because of the cache. Idempotent,
because [compose x x] is always equal to [x].
* Auxiliary function that substitutes the data right-hand part of a
dependency by a pre-existing From state. The returned result is a Deps.t:
the data part will be the data part of the complete result, the indirect
part will be added to the indirect part of the final result.
This function iterates simultaneously on a From memory, and on a zone.
It is cached. The definitions below are used to call the function that
does the recursive descent.
Zone to substitute is empty
[b] is in the zone and substituted. Rewrite appropriately
* Auxiliary function that substitutes the indirect right-hand part of a
dependency by a pre-existing From state. The returned result is a zone,
which will be added to the indirect part of the final result.
Zone to substitute is empty
Both the found data and indirect dependencies are computed for indirect
dependencies: merge to a single zone
depending directly on an indirect dependency -> indirect,
depending indirectly on a direct dependency -> indirect
let start = Ival.of_int start in
let itvs = Int_Intervals.from_ival_size start size in
LOffset.add_iset ~exact:true itvs (DepsOrUnassigned.AssignedFrom v) m
* same as pretty, but uses the type of the function to output more
precise information.
@raise Error if the given type is not a function type
Local Variables:
compile-command: "make -C ../../.."
End:
| This file is part of Frama - C.
Copyright ( C ) 2007 - 2019
CEA ( Commissariat à l'énergie atomique et aux énergies
Lesser General Public License as published by the Free Software
Foundation , version 2.1 .
See the GNU Lesser General Public License version 2.1
for more details ( enclosed in the file licenses / LGPLv2.1 ) .
open Locations
module Deps =
struct
type deps = {
data: Zone.t;
indirect: Zone.t;
}
let to_zone {data; indirect} = Zone.join data indirect
module DatatypeFromDeps = Datatype.Make(struct
type t = deps
let name = "Function_Froms.Deps.from_deps"
let hash fd =
Zone.hash fd.data + 37 * Zone.hash fd.indirect
let compare fd1 fd2 =
let c = Zone.compare fd1.data fd2.data in
if c <> 0 then c
else Zone.compare fd1.indirect fd2.indirect
let equal = Datatype.from_compare
let pretty fmt d = Zone.pretty fmt (to_zone d)
let reprs =
List.map (fun z -> {data = z; indirect = z}) Zone.reprs
let structural_descr =
Structural_descr.t_record [| Zone.packed_descr; Zone.packed_descr; |]
let rehash = Datatype.identity
let mem_project = Datatype.never_any_project
let varname _ = "da"
let internal_pretty_code = Datatype.undefined
let copy = Datatype.undefined
end)
include DatatypeFromDeps
let pretty_precise fmt {data; indirect} =
let bottom_data = Zone.is_bottom data in
let bottom_indirect = Zone.is_bottom indirect in
match bottom_indirect, bottom_data with
| true, true ->
Format.fprintf fmt "\\nothing"
| true, false ->
Format.fprintf fmt "direct: %a"
Zone.pretty data
| false, true ->
Format.fprintf fmt "indirect: %a"
Zone.pretty indirect
| false, false ->
Format.fprintf fmt "indirect: %a; direct: %a"
Zone.pretty indirect
Zone.pretty data
let from_data_deps z = { data = z; indirect = Zone.bottom }
let from_indirect_deps z = { data = Zone.bottom; indirect = z }
let bottom = {
data = Zone.bottom;
indirect = Zone.bottom;
}
let top = {
data = Zone.top;
indirect = Zone.top;
}
let is_included fd1 fd2 =
Zone.is_included fd1.data fd2.data &&
Zone.is_included fd1.indirect fd2.indirect
let join fd1 fd2 =
if fd1 == bottom then fd2
else if fd2 == bottom then fd1
else {
data = Zone.join fd1.data fd2.data;
indirect = Zone.join fd1.indirect fd2.indirect
}
let _narrow fd1 fd2 = {
data = Zone.narrow fd1.data fd2.data;
indirect = Zone.narrow fd1.indirect fd2.indirect
}
let add_data_dep fd data =
{ fd with data = Zone.join fd.data data }
let add_indirect_dep fd indirect =
{ fd with indirect = Zone.join fd.indirect indirect }
let map f fd = {
data = f fd.data;
indirect = f fd.indirect;
}
end
module DepsOrUnassigned = struct
type deps_or_unassigned =
| DepsBottom
| Unassigned
| AssignedFrom of Deps.t
| MaybeAssignedFrom of Deps.t
module DatatypeDeps = Datatype.Make(struct
type t = deps_or_unassigned
let name = "Function_Froms.Deps.deps"
let pretty fmt = function
| DepsBottom -> Format.pp_print_string fmt "DEPS_BOTTOM"
| Unassigned -> Format.pp_print_string fmt "UNASSIGNED"
| AssignedFrom fd -> Deps.pretty_precise fmt fd
| MaybeAssignedFrom fd ->
' ( or UNASSIGNED ) ' would be a better pretty - printer , we use
' ( and SELF ) ' only for compatibility reasons
'(and SELF)' only for compatibility reasons *)
Format.fprintf fmt "%a (and SELF)" Deps.pretty_precise fd
let hash = function
| DepsBottom -> 3
| Unassigned -> 17
| AssignedFrom fd -> 37 + 13 * Deps.hash fd
| MaybeAssignedFrom fd -> 57 + 123 * Deps.hash fd
let compare d1 d2 = match d1, d2 with
| DepsBottom, DepsBottom
| Unassigned, Unassigned -> 0
| AssignedFrom fd1, AssignedFrom fd2
| MaybeAssignedFrom fd1, MaybeAssignedFrom fd2 ->
Deps.compare fd1 fd2
| DepsBottom, (Unassigned | AssignedFrom _ | MaybeAssignedFrom _)
| Unassigned, (AssignedFrom _ | MaybeAssignedFrom _)
| AssignedFrom _, MaybeAssignedFrom _ ->
-1
| (Unassigned | AssignedFrom _ | MaybeAssignedFrom _), DepsBottom
| (AssignedFrom _ | MaybeAssignedFrom _), Unassigned
| MaybeAssignedFrom _, AssignedFrom _ ->
1
let equal = Datatype.from_compare
let reprs = Unassigned :: List.map (fun r -> AssignedFrom r) Deps.reprs
let structural_descr =
let d = Deps.packed_descr in
Structural_descr.t_sum [| [| d |]; [| d |] |]
let rehash = Datatype.identity
let mem_project = Datatype.never_any_project
let varname _ = "d"
let internal_pretty_code = Datatype.undefined
let copy = Datatype.undefined
end)
let join d1 d2 = match d1, d2 with
| DepsBottom, d | d, DepsBottom -> d
| Unassigned, Unassigned -> Unassigned
| Unassigned, AssignedFrom fd | AssignedFrom fd, Unassigned ->
MaybeAssignedFrom fd
| Unassigned, (MaybeAssignedFrom _ as d)
| (MaybeAssignedFrom _ as d), Unassigned ->
d
| AssignedFrom fd1, AssignedFrom fd2 ->
AssignedFrom (Deps.join fd1 fd2)
| AssignedFrom fd1, MaybeAssignedFrom fd2
| MaybeAssignedFrom fd1, AssignedFrom fd2
| MaybeAssignedFrom fd1, MaybeAssignedFrom fd2 ->
MaybeAssignedFrom (Deps.join fd1 fd2)
let is_included d1 d2 = match d1, d2 with
| DepsBottom, (DepsBottom | Unassigned | AssignedFrom _ |
MaybeAssignedFrom _)
| Unassigned, (Unassigned | AssignedFrom _ | MaybeAssignedFrom _) ->
true
| MaybeAssignedFrom fd1, (AssignedFrom fd2 | MaybeAssignedFrom fd2)
| AssignedFrom fd1, AssignedFrom fd2 ->
Deps.is_included fd1 fd2
| (Unassigned | AssignedFrom _ | MaybeAssignedFrom _), DepsBottom
| (AssignedFrom _ | MaybeAssignedFrom _), Unassigned
| AssignedFrom _, MaybeAssignedFrom _ ->
false
let bottom = DepsBottom
let top = MaybeAssignedFrom Deps.top
let default = Unassigned
include DatatypeDeps
let subst f d = match d with
| DepsBottom -> DepsBottom
| Unassigned -> Unassigned
| AssignedFrom fd ->
let fd' = f fd in
if fd == fd' then d else AssignedFrom fd'
| MaybeAssignedFrom fd ->
let fd' = f fd in
if fd == fd' then d else MaybeAssignedFrom fd'
let pretty_precise = pretty
let to_zone = function
| DepsBottom | Unassigned -> Zone.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> Deps.to_zone fd
let to_deps = function
| DepsBottom | Unassigned -> Deps.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> fd
let extract_data = function
| DepsBottom | Unassigned -> Zone.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> fd.Deps.data
let extract_indirect = function
| DepsBottom | Unassigned -> Zone.bottom
| AssignedFrom fd | MaybeAssignedFrom fd -> fd.Deps.indirect
let may_be_unassigned = function
| DepsBottom | AssignedFrom _ -> false
| Unassigned | MaybeAssignedFrom _ -> true
let compose d1 d2 =
match d1, d2 with
| DepsBottom, _ | _, DepsBottom ->
| Unassigned, _ -> d2
| AssignedFrom _, _ -> d1
| MaybeAssignedFrom _, Unassigned -> d1
| MaybeAssignedFrom d1, MaybeAssignedFrom d2 ->
MaybeAssignedFrom (Deps.join d1 d2)
| MaybeAssignedFrom d1, AssignedFrom d2 ->
AssignedFrom (Deps.join d1 d2)
let pretty fmt fd =
match fd with
| DepsBottom -> Format.pp_print_string fmt "DEPS_BOTTOM"
| Unassigned -> Format.pp_print_string fmt "(SELF)"
| AssignedFrom d -> Zone.pretty fmt (Deps.to_zone d)
| MaybeAssignedFrom d ->
Format.fprintf fmt "%a (and SELF)" Zone.pretty (Deps.to_zone d)
end
module Memory = struct
* A From table is internally represented as a Lmap of [ DepsOrUnassigned ] .
However , the API mostly hides this fact , and exports access functions
that take or return [ Deps.t ] values . This way , the user needs not
understand the subtleties of DepsBottom / Unassigned / MaybeAssigned .
However, the API mostly hides this fact, and exports access functions
that take or return [Deps.t] values. This way, the user needs not
understand the subtleties of DepsBottom/Unassigned/MaybeAssigned. *)
include Lmap_bitwise.Make_bitwise(DepsOrUnassigned)
let () = imprecise_write_msg := "dependencies to update"
let pretty_skip = function
| DepsOrUnassigned.DepsBottom -> true
| DepsOrUnassigned.Unassigned -> true
| DepsOrUnassigned.AssignedFrom _ -> false
| DepsOrUnassigned.MaybeAssignedFrom _ -> false
let pretty =
pretty_generic_printer
~skip_v:pretty_skip ~pretty_v:DepsOrUnassigned.pretty ~sep:"FROM" ()
let pretty_ind_data =
pretty_generic_printer
~skip_v:pretty_skip ~pretty_v:DepsOrUnassigned.pretty_precise ~sep:"FROM"
()
* This is the auxiliary datastructure used to write the function [ find ] .
When we iterate over a offsetmap of value [ DepsOrUnassigned ] , we obtain
two things : ( 1 ) some dependencies ; ( 2 ) some intervals that may have not
been assigned , and that will appear as data dependencies ( once we know
the base we are iterating on ) .
When we iterate over a offsetmap of value [DepsOrUnassigned], we obtain
two things: (1) some dependencies; (2) some intervals that may have not
been assigned, and that will appear as data dependencies (once we know
the base we are iterating on). *)
type find_offsm = {
fo_itvs: Int_Intervals.t;
fo_deps: Deps.t;
}
let convert_find_offsm base fp =
let z = Zone.inject base fp.fo_itvs in
Deps.add_data_dep fp.fo_deps z
let empty_find_offsm = {
fo_itvs = Int_Intervals.bottom;
fo_deps = Deps.bottom;
}
let join_find_offsm fp1 fp2 =
if fp1 == empty_find_offsm then fp2
else if fp2 == empty_find_offsm then fp1
else {
fo_itvs = Int_Intervals.join fp1.fo_itvs fp2.fo_itvs;
fo_deps = Deps.join fp1.fo_deps fp2.fo_deps;
}
let find_precise_offsetmap : Int_Intervals.t -> LOffset.t -> find_offsm =
let cache = Hptmap_sig.PersistentCache "Function_Froms.find_precise" in
let aux_find_offsm ib ie v =
let default, v = match v with
| DepsOrUnassigned.DepsBottom -> false, Deps.bottom
| DepsOrUnassigned.Unassigned -> true, Deps.bottom
| DepsOrUnassigned.MaybeAssignedFrom v -> true, v
| DepsOrUnassigned.AssignedFrom v -> false, v
in
{ fo_itvs =
if default
then Int_Intervals.inject_bounds ib ie
else Int_Intervals.bottom;
fo_deps = v }
in
LOffset.fold_join_itvs
~cache aux_find_offsm join_find_offsm empty_find_offsm
let find_precise : t -> Zone.t -> Deps.t =
let both = find_precise_offsetmap in
let conv = convert_find_offsm in
let empty_map z = Deps.from_data_deps z in
let join = Deps.join in
let empty = Deps.bottom in
let f = fold_join_zone ~both ~conv ~empty_map ~join ~empty in
fun m z ->
match m with
| Top -> Deps.top
| Bottom -> Deps.bottom
| Map m -> try f z m with Abstract_interp.Error_Top -> Deps.top
let find z m =
Deps.to_zone (find_precise z m)
let add_binding_precise_loc ~exact access m loc v =
let aux_one_loc loc m =
let loc = Locations.valid_part access loc in
add_binding_loc ~exact m loc (DepsOrUnassigned.AssignedFrom v)
in
Precise_locs.fold aux_one_loc loc m
let bind_var vi v m =
let z = Locations.zone_of_varinfo vi in
add_binding ~exact:true m z (DepsOrUnassigned.AssignedFrom v)
let unbind_var vi m =
remove_base (Base.of_varinfo vi) m
let add_binding ~exact m z v =
add_binding ~exact m z (DepsOrUnassigned.AssignedFrom v)
let add_binding_loc ~exact m loc v =
add_binding_loc ~exact m loc (DepsOrUnassigned.AssignedFrom v)
let is_unassigned m =
LOffset.is_same_value m DepsOrUnassigned.Unassigned
let decide_compose m1 m2 =
if m1 == m2 || is_unassigned m1 then LOffset.ReturnRight
else if is_unassigned m2 then LOffset.ReturnLeft
else LOffset.Recurse
let compose_map =
let cache = Hptmap_sig.PersistentCache "Function_Froms.Memory.compose" in
map2 ~cache ~symmetric:false ~idempotent:true ~empty_neutral:true
decide_compose DepsOrUnassigned.compose
let compose m1 m2 = match m1, m2 with
| Top, _ | _, Top -> Top
| Map m1, Map m2 -> Map (compose_map m1 m2)
| Bottom, (Map _ | Bottom) | Map _, Bottom -> Bottom
let substitute_data_deps =
Nothing left to substitute , return z unchanged
let empty_right z = Deps.from_data_deps z in
let empty_left _ = Deps.bottom in
let both b itvs offsm =
let fp = find_precise_offsetmap itvs offsm in
convert_find_offsm b fp
in
let join = Deps.join in
let empty = Deps.bottom in
let cache = Hptmap_sig.PersistentCache "From_compute.subst_data" in
let f_map =
Zone.fold2_join_heterogeneous
~cache ~empty_left ~empty_right ~both ~join ~empty
in
fun call_site_froms z ->
match call_site_froms with
| Bottom -> Deps.bottom
| Top -> Deps.top
| Map m ->
try f_map z (shape m)
with Abstract_interp.Error_Top -> Deps.top
let substitute_indirect_deps =
Nothing left to substitute , z is directly an indirect dependency
let empty_right z = z in
let empty_left _ = Zone.bottom in
let both b itvs offsm =
let fp = find_precise_offsetmap itvs offsm in
Deps.to_zone (convert_find_offsm b fp)
in
let join = Zone.join in
let empty = Zone.bottom in
let cache = Hptmap_sig.PersistentCache "From_compute.subst_indirect" in
let f_map =
Zone.fold2_join_heterogeneous
~cache ~empty_left ~empty_right ~both ~join ~empty
in
fun call_site_froms z ->
match call_site_froms with
| Bottom -> Zone.bottom
| Top -> Zone.top
| Map m ->
try f_map z (shape m)
with Abstract_interp.Error_Top -> Zone.top
let substitute call_site_froms deps =
let open Deps in
let { data; indirect } = deps in
let dirdeps = substitute_data_deps call_site_froms data in
let inddeps = substitute_indirect_deps call_site_froms indirect in
let dir = dirdeps.data in
let ind = Zone.(join dirdeps.indirect inddeps) in
{ data = dir; indirect = ind }
type return = Deps.t
let default_return = Deps.bottom
let top_return = Deps.top
let add_to_return ?start:(_start=0) ~size:_size ?(m=default_return) v =
Deps.join m v
let top_return_size size =
add_to_return ~size Deps.top
let join_return = Deps.join
let collapse_return x = x
end
type froms =
{ deps_return : Memory.return;
deps_table : Memory.t }
let top = {
deps_return = Memory.top_return;
deps_table = Memory.top;
}
let join x y =
{ deps_return = Memory.join_return x.deps_return y.deps_return ;
deps_table = Memory.join x.deps_table y.deps_table }
let outputs { deps_table = t } =
match t with
| Memory.Top -> Locations.Zone.top
| Memory.Bottom -> Locations.Zone.bottom
| Memory.Map(m) ->
Memory.fold
(fun z v acc ->
let open DepsOrUnassigned in
match v with
| DepsBottom | Unassigned -> acc
| AssignedFrom _ | MaybeAssignedFrom _ -> Locations.Zone.join z acc)
m Locations.Zone.bottom
let inputs ?(include_self=false) t =
let aux b offm acc =
Memory.LOffset.fold
(fun itvs deps acc ->
let z = DepsOrUnassigned.to_zone deps in
let self = DepsOrUnassigned.may_be_unassigned deps in
let acc = Zone.join z acc in
match include_self, self, b with
| true, true, Some b ->
Zone.join acc (Zone.inject b itvs)
| _ -> acc
)
offm
acc
in
let return = Deps.to_zone t.deps_return in
let aux_table b = aux (Some b) in
match t.deps_table with
| Memory.Top -> Zone.top
| Memory.Bottom -> Zone.bottom
| Memory.Map m -> Memory.fold_base aux_table m return
let pretty fmt { deps_return = r ; deps_table = t } =
Format.fprintf fmt "%a@\n\\result FROM @[%a@]@\n"
Memory.pretty t
Deps.pretty r
let pretty_with_type ~indirect typ fmt { deps_return = r; deps_table = t } =
let (rt_typ,_,_,_) = Cil.splitFunctionType typ in
if Memory.is_bottom t
then Format.fprintf fmt
"@[NON TERMINATING - NO EFFECTS@]"
else
let map_pretty =
if indirect
then Memory.pretty_ind_data
else Memory.pretty
in
if Cil.isVoidType rt_typ
then begin
if Memory.is_empty t
then Format.fprintf fmt "@[NO EFFECTS@]"
else map_pretty fmt t
end
else
let pp_space fmt =
if not (Memory.is_empty t) then
Format.fprintf fmt "@ "
in
Format.fprintf fmt "@[<v>%a%t@[\\result FROM @[%a@]@]@]"
map_pretty t pp_space
(if indirect then Deps.pretty_precise else Deps.pretty) r
let pretty_with_type_indirect = pretty_with_type ~indirect:true
let pretty_with_type = pretty_with_type ~indirect:false
let hash { deps_return = dr ; deps_table = dt } =
Memory.hash dt + 197 * Deps.hash dr
let equal
{ deps_return = dr ; deps_table = dt }
{ deps_return = dr' ; deps_table = dt' } =
Memory.equal dt dt'&& Deps.equal dr dr'
include Datatype.Make
(struct
type t = froms
let reprs =
List.fold_left
(fun acc o ->
List.fold_left
(fun acc m -> { deps_return = o; deps_table = m } :: acc)
acc
Memory.reprs)
[]
Deps.reprs
let structural_descr =
Structural_descr.t_record
[| Deps.packed_descr;
Memory.packed_descr |]
let name = "Function_Froms"
let hash = hash
let compare = Datatype.undefined
let equal = equal
let pretty = pretty
let internal_pretty_code = Datatype.undefined
let rehash = Datatype.identity
let copy = Datatype.undefined
let varname = Datatype.undefined
let mem_project = Datatype.never_any_project
end)
|
c8b3458114eec5b2635fcd3d027677973ba1891806703aa56292d46646759bc6 | rixed/ramen | RamenOCamlCompiler.ml | open Batteries
open RamenLog
open RamenHelpersNoLog
open RamenHelpers
module C = RamenConf
module N = RamenName
module Files = RamenFiles
let max_simult_compilations = Atomic.Counter.make 4
let use_external_compiler = ref false
let warnings = "-8-58-26@5"
(* Mostly copied from ocaml source code driver/optmain.ml *)
module Backend = struct
let symbol_for_global' = Compilenv.symbol_for_global'
let closure_symbol = Compilenv.closure_symbol
let really_import_approx = Import_approx.really_import_approx
let import_symbol = Import_approx.import_symbol
let size_int = Arch.size_int
let big_endian = Arch.big_endian
let max_sensible_number_of_arguments = Proc.max_arguments_for_tailcalls - 1
end
Compiler accumulate some options in there so we have to manually clean
* it in between two compilations : - <
* it in between two compilations :-< *)
let reset () =
Clflags.objfiles := [] ;
Clflags.ccobjs := [] ;
Clflags.dllibs := [] ;
Clflags.all_ccopts := [] ;
Clflags.all_ppx := [] ;
Clflags.open_modules := [] ;
Clflags.dllpaths := []
(* Return a formatter outputting in the logger *)
let ppf () =
let oc =
match !logger.output with
| Syslog ->
(match RamenLog.syslog with
| None -> stdnull
| Some slog ->
let buf = Buffer.create 100 in
let write c =
if c = '\n' then (
Syslog.syslog slog `LOG_ERR (Buffer.contents buf);
Buffer.clear buf
) else Buffer.add_char buf c in
let output b s l =
for i = 0 to l-1 do
write (Bytes.get b (s + i))
done ;
l
and flush () = ()
and close () = () in
BatIO.create_out ~write ~output ~flush ~close)
| output ->
let tm = Unix.(gettimeofday () |> localtime) in
RamenLog.do_output output tm true in
BatFormat.formatter_of_output oc
let cannot_compile what status =
Printf.sprintf "Cannot compile %S: %s"
what status |>
failwith
let cannot_link what status =
Printf.sprintf "Cannot link %S: %s" what status |>
failwith
(* Takes a source file and produce an object file: *)
let compile_internal conf ~keep_temp_files what src_file obj_file =
let debug = !logger.log_level = Debug in
let backend = (module Backend : Backend_intf.S) in
C.info_or_test conf "Compiling %a" N.path_print_quoted src_file ;
reset () ;
Clflags.native_code := true ;
Clflags.binary_annotations := true ;
Clflags.annotations := true ;
Clflags.use_linscan := true ; (* *)
Clflags.debug := debug ;
Clflags.verbose := debug ;
Clflags.no_std_include := true ;
!logger.debug "Use bundled libraries from %a"
N.path_print_quoted conf.C.bundle_dir ;
Also include in incdir the directory where the obj_file will be ,
* since other modules ( params ... ) might have been compiled there
* already :
* since other modules (params...) might have been compiled there
* already: *)
let obj_dir = Files.dirname obj_file in
let inc_dirs =
obj_dir ::
List.map (fun d ->
N.path_cat [ conf.C.bundle_dir ; d ]
) RamenDepLibs.incdirs in
Clflags.include_dirs := (inc_dirs :> string list) ;
Clflags.dlcode := true ;
Clflags.keep_asm_file := keep_temp_files ;
if debug then (
Clflags.default_simplify_rounds := 1 ;
Clflags.(use_inlining_arguments_set o1_arguments)
) else (
(* equivalent to -O2: *)
Clflags.default_simplify_rounds := 2 ;
Clflags.(use_inlining_arguments_set o2_arguments) ;
Clflags.(use_inlining_arguments_set ~round:0 o1_arguments)
) ;
Clflags.compile_only := true ;
Clflags.link_everything := false ;
Warnings.parse_options false warnings ;
Clflags.output_name := Some (obj_file :> string) ;
Asmlink.reset () ;
try
Optcompile.implementation ~backend (ppf ()) (src_file :> string)
((Files.remove_ext src_file) :> string)
with exn ->
Location.report_exception (ppf ()) exn ;
cannot_compile what (Printexc.to_string exn)
let compile_external _conf ~keep_temp_files what (src_file : N.path) obj_file =
let debug = !logger.log_level = Debug in
let cmd =
Printf.sprintf
"env -i PATH=%s OCAMLPATH=%s \
nice -n 1 \
%s ocamlopt%s%s -O%d -linscan -thread -annot -w %s \
-o %s -package ramen -I %s -c %s"
(shell_quote RamenCompilConfig.build_path)
(shell_quote RamenCompilConfig.ocamlpath)
(shell_quote (RamenCompilConfig.ocamlfind :> string))
(if debug then " -g" else "")
(if keep_temp_files then " -S" else "")
(if debug then 0 else 2)
(shell_quote warnings)
(shell_quote (obj_file : N.path :> string))
(shell_quote ((Files.dirname obj_file) :> string))
(shell_quote (src_file :> string)) in
(* TODO: return an array of arguments and get rid of the shell *)
let cmd_name = "Compilation of "^ what in
match run_coprocess ~max_count:max_simult_compilations cmd_name cmd with
| None ->
cannot_compile what "Cannot run command"
| Some (Unix.WEXITED 0) ->
!logger.debug "Compiled %s with: %s" what cmd
| Some status ->
(* As this might well be an installation problem, makes this error
* report to the GUI: *)
cannot_compile what (string_of_process_status status)
let compile conf ?(keep_temp_files=false) what src_file obj_file =
Files.mkdir_all ~is_file:true obj_file ;
(if !use_external_compiler then compile_external else compile_internal)
conf ~keep_temp_files what src_file obj_file
(* Function to take some object files, a source file, and produce an
* executable: *)
let is_ocaml_objfile (fname : N.path) =
String.ends_with (fname :> string) ".cmx" ||
String.ends_with (fname :> string) ".cmxa"
let link_internal conf ~keep_temp_files
~what ~inc_dirs ~obj_files
~src_file ~(exec_file : N.path) =
let debug = !logger.log_level = Debug in
let backend = (module Backend : Backend_intf.S) in
C.info_or_test conf "Linking %a" N.path_print_quoted src_file ;
reset () ;
Clflags.native_code := true ;
Clflags.binary_annotations := true ;
Clflags.use_linscan := true ;
Clflags.debug := debug ;
Clflags.verbose := debug ;
Clflags.no_std_include := true ;
!logger.debug "Use bundled libraries from %a"
N.path_print_quoted conf.C.bundle_dir ;
let inc_dirs =
List.map (fun d ->
N.path_cat [ conf.C.bundle_dir ; d ]
) RamenDepLibs.incdirs @
Set.to_list inc_dirs in
Clflags.include_dirs := (inc_dirs :> string list) ;
Clflags.dlcode := true ;
Clflags.keep_asm_file := keep_temp_files ;
if debug then (
Clflags.default_simplify_rounds := 1 ;
Clflags.(use_inlining_arguments_set o1_arguments)
) else (
(* equivalent to -O2: *)
Clflags.default_simplify_rounds := 2 ;
Clflags.(use_inlining_arguments_set o2_arguments) ;
Clflags.(use_inlining_arguments_set ~round:0 o1_arguments)
) ;
Clflags.compile_only := false ;
Clflags.link_everything := false ;
Warnings.parse_options false warnings ;
Clflags.output_name := Some (exec_file :> string) ;
Internal compiler wants .o files elsewhere then in objfiles :
let objfiles, ccobjs =
List.fold_left (fun (mls, cs) obj_file ->
if is_ocaml_objfile obj_file then
obj_file :: mls, cs
else (* Let's assume it's then a legitimate object file *)
mls, obj_file :: cs
) ([], []) obj_files in
let objfiles = List.rev objfiles
and ccobjs = List.rev ccobjs in
Now add the bundled libs and finally the main cmx :
let cmx_file = Files.change_ext "cmx" src_file in
let objfiles =
List.map (fun d ->
N.path_cat [ conf.C.bundle_dir ; d ]
) RamenDepLibs.objfiles @
objfiles @ [ cmx_file ] in
!logger.debug "objfiles = %a" (List.print N.path_print) objfiles ;
!logger.debug "ccobjs = %a" (List.print N.path_print) ccobjs ;
Clflags.ccobjs := (ccobjs :> string list) ;
Asmlink.reset () ;
try
Optcompile.implementation ~backend (ppf ()) (src_file :> string)
((Files.remove_ext src_file) :> string) ;
(* Now link *)
Compmisc.init_path true ;
Asmlink.link (ppf ()) (objfiles :> string list) (exec_file :> string)
with exn ->
Location.report_exception (ppf ()) exn ;
cannot_link what (Printexc.to_string exn)
let link_external _conf ~keep_temp_files
~what ~inc_dirs ~obj_files
~(src_file : N.path) ~(exec_file : N.path) =
let debug = !logger.log_level = Debug in
let path = getenv ~def:"/usr/bin:/usr/sbin" "PATH"
and ocamlpath = getenv ~def:"" "OCAMLPATH" in
let cmd =
Printf.sprintf
"env -i PATH=%s OCAMLPATH=%s \
nice -n 1 \
ocamlfind ocamlopt%s%s %s -thread -annot \
-o %s -package ramen -linkpkg %s %s"
(shell_quote path)
(shell_quote ocamlpath)
(if debug then " -g" else "")
(if keep_temp_files then " -S" else "")
(IO.to_string
(Set.print ~first:"" ~last:"" ~sep:" " (fun oc (f : N.path) ->
Printf.fprintf oc "-I %s" (shell_quote (f :> string)))) inc_dirs)
(shell_quote (exec_file :> string))
(IO.to_string
(List.print ~first:"" ~last:"" ~sep:" " (fun oc (f : N.path) ->
Printf.fprintf oc "%s" (shell_quote (f :> string)))) obj_files)
(shell_quote (src_file :> string)) in
(* TODO: return an array of arguments and get rid of the shell *)
let cmd_name = "Compilation+Link of "^ what in
match run_coprocess ~max_count:max_simult_compilations cmd_name cmd with
| None ->
cannot_link what "Cannot run command"
| Some (Unix.WEXITED 0) ->
!logger.debug "Compiled %s with: %s" what cmd ;
| Some status ->
(* As this might well be an installation problem, makes this error
* report to the GUI: *)
cannot_link what (string_of_process_status status)
let link conf ?(keep_temp_files=false)
~what ~obj_files ~src_file ~exec_file =
Files.mkdir_all ~is_file:true exec_file ;
We have a few C libraries in bundle_dir / lib that will be searched by the
* C linker :
* C linker: *)
let inc_dirs =
Set.singleton (N.path_cat [ conf.C.bundle_dir ; N.path "lib" ]) in
Look for cmi files in the same dirs where the cmx are :
let inc_dirs, obj_files =
List.fold_left (fun (s, l) obj_file ->
if is_ocaml_objfile obj_file then
Set.add (Files.dirname obj_file) s,
Files.basename obj_file :: l
else
s, obj_file :: l
) (inc_dirs, []) obj_files in
(if !use_external_compiler then link_external else link_internal)
conf ~keep_temp_files
~what ~inc_dirs ~obj_files ~src_file ~exec_file
(* Helpers: *)
(* Accepts a filename (without directory) and change it into something valid
* as an ocaml compilation unit.
* If the passed file name is not supposed to have an extension (yet?), set
* has_extension to false so that any dot can be replaced. *)
let to_module_file_name =
let re = Str.regexp "[^a-zA-Z0-9_]" in
fun ?(has_extension=true) fname ->
let s, ext =
if has_extension then
Files.remove_ext fname, Files.ext fname
else
fname, "" in
let s =
if N.is_empty s then "_" else
(* Encode all chars not allowed in OCaml modules: *)
let s =
Str.global_substitute re (fun s ->
let c = Str.matched_string s in
assert (String.length c = 1) ;
let i = Char.code c.[0] in
"_" ^ string_of_int i ^ "_"
) (s :> string) in
(* Then make sure we start with a letter: *)
if Char.is_letter s.[0] then s else "m"^ s
in
Files.add_ext (N.path s) ext
(*$= to_module_file_name & ~printer:BatPervasives.identity
"br_46_20_46_11.0" \
((to_module_file_name ~has_extension:true (N.path "br.20.11.0")) :> string)
"br_46_20_46_11_46_0" \
((to_module_file_name ~has_extension:false (N.path "br.20.11.0")) :> string)
*)
(* Given a file name (with or without extension, as instructed), make it a
* valid module name: *)
let make_valid_for_module ?has_extension (fname : N.path) =
let dirname, basename =
try String.rsplit ~by:"/" (fname :> string) |>
fun (d, b) -> N.path d, N.path b
with Not_found -> N.path ".", fname in
let basename = to_module_file_name ?has_extension basename in
N.path_cat [ dirname ; basename ]
(* obj name must not conflict with any external module. *)
let with_code_file_for obj_name reuse_prev_files f =
assert (not (N.is_empty obj_name)) ;
let basename =
Files.(change_ext "ml" (basename obj_name)) in
(* Make sure this will result in a valid module name: *)
let basename = to_module_file_name basename in
let fname = N.path_cat [ Files.dirname obj_name ; basename ] in
Files.mkdir_all ~is_file:true fname ;
(* If keep-temp-file is set, reuse preexisting source code : *)
if reuse_prev_files &&
Files.check ~min_size:1 ~has_perms:0o400 fname = FileOk
then
!logger.info "Reusing source file %a" N.path_print_quoted fname
else
File.with_file_out ~mode:[`create; `text; `trunc] (fname :> string) f ;
fname
let make_valid_ocaml_identifier s =
let is_letter c = (c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z')
and is_digit c = c >= '0' && c <= '9'
in
if s = "" then invalid_arg "make_valid_ocaml_identifier: empty" ;
String.fold_lefti (fun s i c ->
s ^ (
if is_letter c || c = '_' ||
(i > 0 && (c = '\'' || is_digit c))
then
if i > 0 then String.of_char c
else String.of_char (Char.lowercase c)
else
(if i > 0 then "'" else "x'") ^ string_of_int (Char.code c))
(* Here we use the single quote as an escape char, given the single
* quote is not usable in quoted identifiers on ramen's side. *)
) "" s
(* Test that [make_valid_ocaml_identifier] is a projection: *)
(*$Q make_valid_ocaml_identifier
Q.small_string (fun s -> s = "" || ( \
let f = make_valid_ocaml_identifier in \
let i1 = f s in let i2 = f i1 in i1 = i2))
*)
let module_name_of_file_name fname =
(Files.(basename fname |> remove_ext) :> string) |>
make_valid_ocaml_identifier |>
String.capitalize_ascii
| null | https://raw.githubusercontent.com/rixed/ramen/c45cb3fcde6ad1e350bbf45a4bee385dcb51c30d/src/RamenOCamlCompiler.ml | ocaml | Mostly copied from ocaml source code driver/optmain.ml
Return a formatter outputting in the logger
Takes a source file and produce an object file:
equivalent to -O2:
TODO: return an array of arguments and get rid of the shell
As this might well be an installation problem, makes this error
* report to the GUI:
Function to take some object files, a source file, and produce an
* executable:
equivalent to -O2:
Let's assume it's then a legitimate object file
Now link
TODO: return an array of arguments and get rid of the shell
As this might well be an installation problem, makes this error
* report to the GUI:
Helpers:
Accepts a filename (without directory) and change it into something valid
* as an ocaml compilation unit.
* If the passed file name is not supposed to have an extension (yet?), set
* has_extension to false so that any dot can be replaced.
Encode all chars not allowed in OCaml modules:
Then make sure we start with a letter:
$= to_module_file_name & ~printer:BatPervasives.identity
"br_46_20_46_11.0" \
((to_module_file_name ~has_extension:true (N.path "br.20.11.0")) :> string)
"br_46_20_46_11_46_0" \
((to_module_file_name ~has_extension:false (N.path "br.20.11.0")) :> string)
Given a file name (with or without extension, as instructed), make it a
* valid module name:
obj name must not conflict with any external module.
Make sure this will result in a valid module name:
If keep-temp-file is set, reuse preexisting source code :
Here we use the single quote as an escape char, given the single
* quote is not usable in quoted identifiers on ramen's side.
Test that [make_valid_ocaml_identifier] is a projection:
$Q make_valid_ocaml_identifier
Q.small_string (fun s -> s = "" || ( \
let f = make_valid_ocaml_identifier in \
let i1 = f s in let i2 = f i1 in i1 = i2))
| open Batteries
open RamenLog
open RamenHelpersNoLog
open RamenHelpers
module C = RamenConf
module N = RamenName
module Files = RamenFiles
let max_simult_compilations = Atomic.Counter.make 4
let use_external_compiler = ref false
let warnings = "-8-58-26@5"
module Backend = struct
let symbol_for_global' = Compilenv.symbol_for_global'
let closure_symbol = Compilenv.closure_symbol
let really_import_approx = Import_approx.really_import_approx
let import_symbol = Import_approx.import_symbol
let size_int = Arch.size_int
let big_endian = Arch.big_endian
let max_sensible_number_of_arguments = Proc.max_arguments_for_tailcalls - 1
end
Compiler accumulate some options in there so we have to manually clean
* it in between two compilations : - <
* it in between two compilations :-< *)
let reset () =
Clflags.objfiles := [] ;
Clflags.ccobjs := [] ;
Clflags.dllibs := [] ;
Clflags.all_ccopts := [] ;
Clflags.all_ppx := [] ;
Clflags.open_modules := [] ;
Clflags.dllpaths := []
let ppf () =
let oc =
match !logger.output with
| Syslog ->
(match RamenLog.syslog with
| None -> stdnull
| Some slog ->
let buf = Buffer.create 100 in
let write c =
if c = '\n' then (
Syslog.syslog slog `LOG_ERR (Buffer.contents buf);
Buffer.clear buf
) else Buffer.add_char buf c in
let output b s l =
for i = 0 to l-1 do
write (Bytes.get b (s + i))
done ;
l
and flush () = ()
and close () = () in
BatIO.create_out ~write ~output ~flush ~close)
| output ->
let tm = Unix.(gettimeofday () |> localtime) in
RamenLog.do_output output tm true in
BatFormat.formatter_of_output oc
let cannot_compile what status =
Printf.sprintf "Cannot compile %S: %s"
what status |>
failwith
let cannot_link what status =
Printf.sprintf "Cannot link %S: %s" what status |>
failwith
let compile_internal conf ~keep_temp_files what src_file obj_file =
let debug = !logger.log_level = Debug in
let backend = (module Backend : Backend_intf.S) in
C.info_or_test conf "Compiling %a" N.path_print_quoted src_file ;
reset () ;
Clflags.native_code := true ;
Clflags.binary_annotations := true ;
Clflags.annotations := true ;
Clflags.debug := debug ;
Clflags.verbose := debug ;
Clflags.no_std_include := true ;
!logger.debug "Use bundled libraries from %a"
N.path_print_quoted conf.C.bundle_dir ;
Also include in incdir the directory where the obj_file will be ,
* since other modules ( params ... ) might have been compiled there
* already :
* since other modules (params...) might have been compiled there
* already: *)
let obj_dir = Files.dirname obj_file in
let inc_dirs =
obj_dir ::
List.map (fun d ->
N.path_cat [ conf.C.bundle_dir ; d ]
) RamenDepLibs.incdirs in
Clflags.include_dirs := (inc_dirs :> string list) ;
Clflags.dlcode := true ;
Clflags.keep_asm_file := keep_temp_files ;
if debug then (
Clflags.default_simplify_rounds := 1 ;
Clflags.(use_inlining_arguments_set o1_arguments)
) else (
Clflags.default_simplify_rounds := 2 ;
Clflags.(use_inlining_arguments_set o2_arguments) ;
Clflags.(use_inlining_arguments_set ~round:0 o1_arguments)
) ;
Clflags.compile_only := true ;
Clflags.link_everything := false ;
Warnings.parse_options false warnings ;
Clflags.output_name := Some (obj_file :> string) ;
Asmlink.reset () ;
try
Optcompile.implementation ~backend (ppf ()) (src_file :> string)
((Files.remove_ext src_file) :> string)
with exn ->
Location.report_exception (ppf ()) exn ;
cannot_compile what (Printexc.to_string exn)
let compile_external _conf ~keep_temp_files what (src_file : N.path) obj_file =
let debug = !logger.log_level = Debug in
let cmd =
Printf.sprintf
"env -i PATH=%s OCAMLPATH=%s \
nice -n 1 \
%s ocamlopt%s%s -O%d -linscan -thread -annot -w %s \
-o %s -package ramen -I %s -c %s"
(shell_quote RamenCompilConfig.build_path)
(shell_quote RamenCompilConfig.ocamlpath)
(shell_quote (RamenCompilConfig.ocamlfind :> string))
(if debug then " -g" else "")
(if keep_temp_files then " -S" else "")
(if debug then 0 else 2)
(shell_quote warnings)
(shell_quote (obj_file : N.path :> string))
(shell_quote ((Files.dirname obj_file) :> string))
(shell_quote (src_file :> string)) in
let cmd_name = "Compilation of "^ what in
match run_coprocess ~max_count:max_simult_compilations cmd_name cmd with
| None ->
cannot_compile what "Cannot run command"
| Some (Unix.WEXITED 0) ->
!logger.debug "Compiled %s with: %s" what cmd
| Some status ->
cannot_compile what (string_of_process_status status)
let compile conf ?(keep_temp_files=false) what src_file obj_file =
Files.mkdir_all ~is_file:true obj_file ;
(if !use_external_compiler then compile_external else compile_internal)
conf ~keep_temp_files what src_file obj_file
let is_ocaml_objfile (fname : N.path) =
String.ends_with (fname :> string) ".cmx" ||
String.ends_with (fname :> string) ".cmxa"
let link_internal conf ~keep_temp_files
~what ~inc_dirs ~obj_files
~src_file ~(exec_file : N.path) =
let debug = !logger.log_level = Debug in
let backend = (module Backend : Backend_intf.S) in
C.info_or_test conf "Linking %a" N.path_print_quoted src_file ;
reset () ;
Clflags.native_code := true ;
Clflags.binary_annotations := true ;
Clflags.use_linscan := true ;
Clflags.debug := debug ;
Clflags.verbose := debug ;
Clflags.no_std_include := true ;
!logger.debug "Use bundled libraries from %a"
N.path_print_quoted conf.C.bundle_dir ;
let inc_dirs =
List.map (fun d ->
N.path_cat [ conf.C.bundle_dir ; d ]
) RamenDepLibs.incdirs @
Set.to_list inc_dirs in
Clflags.include_dirs := (inc_dirs :> string list) ;
Clflags.dlcode := true ;
Clflags.keep_asm_file := keep_temp_files ;
if debug then (
Clflags.default_simplify_rounds := 1 ;
Clflags.(use_inlining_arguments_set o1_arguments)
) else (
Clflags.default_simplify_rounds := 2 ;
Clflags.(use_inlining_arguments_set o2_arguments) ;
Clflags.(use_inlining_arguments_set ~round:0 o1_arguments)
) ;
Clflags.compile_only := false ;
Clflags.link_everything := false ;
Warnings.parse_options false warnings ;
Clflags.output_name := Some (exec_file :> string) ;
Internal compiler wants .o files elsewhere then in objfiles :
let objfiles, ccobjs =
List.fold_left (fun (mls, cs) obj_file ->
if is_ocaml_objfile obj_file then
obj_file :: mls, cs
mls, obj_file :: cs
) ([], []) obj_files in
let objfiles = List.rev objfiles
and ccobjs = List.rev ccobjs in
Now add the bundled libs and finally the main cmx :
let cmx_file = Files.change_ext "cmx" src_file in
let objfiles =
List.map (fun d ->
N.path_cat [ conf.C.bundle_dir ; d ]
) RamenDepLibs.objfiles @
objfiles @ [ cmx_file ] in
!logger.debug "objfiles = %a" (List.print N.path_print) objfiles ;
!logger.debug "ccobjs = %a" (List.print N.path_print) ccobjs ;
Clflags.ccobjs := (ccobjs :> string list) ;
Asmlink.reset () ;
try
Optcompile.implementation ~backend (ppf ()) (src_file :> string)
((Files.remove_ext src_file) :> string) ;
Compmisc.init_path true ;
Asmlink.link (ppf ()) (objfiles :> string list) (exec_file :> string)
with exn ->
Location.report_exception (ppf ()) exn ;
cannot_link what (Printexc.to_string exn)
let link_external _conf ~keep_temp_files
~what ~inc_dirs ~obj_files
~(src_file : N.path) ~(exec_file : N.path) =
let debug = !logger.log_level = Debug in
let path = getenv ~def:"/usr/bin:/usr/sbin" "PATH"
and ocamlpath = getenv ~def:"" "OCAMLPATH" in
let cmd =
Printf.sprintf
"env -i PATH=%s OCAMLPATH=%s \
nice -n 1 \
ocamlfind ocamlopt%s%s %s -thread -annot \
-o %s -package ramen -linkpkg %s %s"
(shell_quote path)
(shell_quote ocamlpath)
(if debug then " -g" else "")
(if keep_temp_files then " -S" else "")
(IO.to_string
(Set.print ~first:"" ~last:"" ~sep:" " (fun oc (f : N.path) ->
Printf.fprintf oc "-I %s" (shell_quote (f :> string)))) inc_dirs)
(shell_quote (exec_file :> string))
(IO.to_string
(List.print ~first:"" ~last:"" ~sep:" " (fun oc (f : N.path) ->
Printf.fprintf oc "%s" (shell_quote (f :> string)))) obj_files)
(shell_quote (src_file :> string)) in
let cmd_name = "Compilation+Link of "^ what in
match run_coprocess ~max_count:max_simult_compilations cmd_name cmd with
| None ->
cannot_link what "Cannot run command"
| Some (Unix.WEXITED 0) ->
!logger.debug "Compiled %s with: %s" what cmd ;
| Some status ->
cannot_link what (string_of_process_status status)
let link conf ?(keep_temp_files=false)
~what ~obj_files ~src_file ~exec_file =
Files.mkdir_all ~is_file:true exec_file ;
We have a few C libraries in bundle_dir / lib that will be searched by the
* C linker :
* C linker: *)
let inc_dirs =
Set.singleton (N.path_cat [ conf.C.bundle_dir ; N.path "lib" ]) in
Look for cmi files in the same dirs where the cmx are :
let inc_dirs, obj_files =
List.fold_left (fun (s, l) obj_file ->
if is_ocaml_objfile obj_file then
Set.add (Files.dirname obj_file) s,
Files.basename obj_file :: l
else
s, obj_file :: l
) (inc_dirs, []) obj_files in
(if !use_external_compiler then link_external else link_internal)
conf ~keep_temp_files
~what ~inc_dirs ~obj_files ~src_file ~exec_file
let to_module_file_name =
let re = Str.regexp "[^a-zA-Z0-9_]" in
fun ?(has_extension=true) fname ->
let s, ext =
if has_extension then
Files.remove_ext fname, Files.ext fname
else
fname, "" in
let s =
if N.is_empty s then "_" else
let s =
Str.global_substitute re (fun s ->
let c = Str.matched_string s in
assert (String.length c = 1) ;
let i = Char.code c.[0] in
"_" ^ string_of_int i ^ "_"
) (s :> string) in
if Char.is_letter s.[0] then s else "m"^ s
in
Files.add_ext (N.path s) ext
let make_valid_for_module ?has_extension (fname : N.path) =
let dirname, basename =
try String.rsplit ~by:"/" (fname :> string) |>
fun (d, b) -> N.path d, N.path b
with Not_found -> N.path ".", fname in
let basename = to_module_file_name ?has_extension basename in
N.path_cat [ dirname ; basename ]
let with_code_file_for obj_name reuse_prev_files f =
assert (not (N.is_empty obj_name)) ;
let basename =
Files.(change_ext "ml" (basename obj_name)) in
let basename = to_module_file_name basename in
let fname = N.path_cat [ Files.dirname obj_name ; basename ] in
Files.mkdir_all ~is_file:true fname ;
if reuse_prev_files &&
Files.check ~min_size:1 ~has_perms:0o400 fname = FileOk
then
!logger.info "Reusing source file %a" N.path_print_quoted fname
else
File.with_file_out ~mode:[`create; `text; `trunc] (fname :> string) f ;
fname
let make_valid_ocaml_identifier s =
let is_letter c = (c >= 'a' && c <= 'z') ||
(c >= 'A' && c <= 'Z')
and is_digit c = c >= '0' && c <= '9'
in
if s = "" then invalid_arg "make_valid_ocaml_identifier: empty" ;
String.fold_lefti (fun s i c ->
s ^ (
if is_letter c || c = '_' ||
(i > 0 && (c = '\'' || is_digit c))
then
if i > 0 then String.of_char c
else String.of_char (Char.lowercase c)
else
(if i > 0 then "'" else "x'") ^ string_of_int (Char.code c))
) "" s
let module_name_of_file_name fname =
(Files.(basename fname |> remove_ext) :> string) |>
make_valid_ocaml_identifier |>
String.capitalize_ascii
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.